Commit a6e9e0c8 authored by Dr.李's avatar Dr.李

added xgb trainer models

parent c0e2023c
......@@ -33,6 +33,7 @@ from alphamind.model import RandomForestRegressor
from alphamind.model import RandomForestClassifier
from alphamind.model import XGBRegressor
from alphamind.model import XGBClassifier
from alphamind.model import XGBTrainer
from alphamind.model import load_model
from alphamind.model.data_preparing import fetch_data_package
from alphamind.model.data_preparing import fetch_train_phase
......@@ -74,6 +75,7 @@ __all__ = [
'RandomForestClassifier',
'XGBRegressor',
'XGBClassifier',
'XGBTrainer',
'load_model',
'NaiveExecutor',
'ThresholdExecutor',
......
......@@ -1954,5 +1954,5 @@ class OutrightTmp(Base):
if __name__ == '__main__':
from sqlalchemy import create_engine
engine = create_engine('postgres+psycopg2://postgres:A12345678!@10.63.6.220/alpha')
engine = create_engine('postgres+psycopg2://postgres:we083826@101.132.104.118/alpha')
Base.metadata.create_all(engine)
......@@ -14,6 +14,7 @@ from alphamind.model.treemodel import RandomForestRegressor
from alphamind.model.treemodel import RandomForestClassifier
from alphamind.model.treemodel import XGBRegressor
from alphamind.model.treemodel import XGBClassifier
from alphamind.model.treemodel import XGBTrainer
from alphamind.model.loader import load_model
......@@ -26,4 +27,5 @@ __all__ = ['LinearRegression',
'RandomForestClassifier',
'XGBRegressor',
'XGBClassifier',
'XGBTrainer',
'load_model']
\ No newline at end of file
......@@ -14,6 +14,7 @@ from alphamind.model.treemodel import RandomForestRegressor
from alphamind.model.treemodel import RandomForestClassifier
from alphamind.model.treemodel import XGBRegressor
from alphamind.model.treemodel import XGBClassifier
from alphamind.model.treemodel import XGBTrainer
def load_model(model_desc: dict) -> ModelBase:
......@@ -37,5 +38,7 @@ def load_model(model_desc: dict) -> ModelBase:
return XGBRegressor.load(model_desc)
elif 'XGBClassifier' in model_name_parts:
return XGBClassifier.load(model_desc)
elif 'XGBTrainer' in model_name_parts:
return XGBTrainer.load(model_desc)
else:
raise ValueError('{0} is not currently supported in model loader.'.format(model_name))
......@@ -162,7 +162,7 @@ class XGBClassifier(ModelBase):
class XGBTrainer(ModelBase):
def __init__(self,
objective,
objective='binary:logistic',
booster='gbtree',
tree_method='hist',
n_estimators: int=100,
......
......@@ -12,16 +12,18 @@ from alphamind.model.treemodel import RandomForestRegressor
from alphamind.model.treemodel import RandomForestClassifier
from alphamind.model.treemodel import XGBRegressor
from alphamind.model.treemodel import XGBClassifier
from alphamind.model.treemodel import XGBTrainer
class TestTreeModel(unittest.TestCase):
def setUp(self):
self.x = np.random.randn(1000, 10)
self.y = np.random.randn(1000)
def test_random_forest_regress_persistence(self):
model = RandomForestRegressor(features=list(range(10)))
x = np.random.randn(1000, 10)
y = np.random.randn(1000)
model.fit(x, y)
model.fit(self.x, self.y)
desc = model.save()
new_model = load_model(desc)
......@@ -32,11 +34,8 @@ class TestTreeModel(unittest.TestCase):
def test_random_forest_classify_persistence(self):
model = RandomForestClassifier(features=list(range(10)))
x = np.random.randn(1000, 10)
y = np.random.randn(1000)
y = np.where(y > 0, 1, 0)
model.fit(x, y)
y = np.where(self.y > 0, 1, 0)
model.fit(self.x, y)
desc = model.save()
new_model = load_model(desc)
......@@ -47,10 +46,7 @@ class TestTreeModel(unittest.TestCase):
def test_xgb_regress_persistence(self):
model = XGBRegressor(features=list(range(10)))
x = np.random.randn(1000, 10)
y = np.random.randn(1000)
model.fit(x, y)
model.fit(self.x, self.y)
desc = model.save()
new_model = load_model(desc)
......@@ -61,11 +57,24 @@ class TestTreeModel(unittest.TestCase):
def test_xgb_classify_persistence(self):
model = XGBClassifier(features=list(range(10)))
x = np.random.randn(1000, 10)
y = np.random.randn(1000)
y = np.where(y > 0, 1, 0)
y = np.where(self.y > 0, 1, 0)
model.fit(self.x, y)
desc = model.save()
new_model = load_model(desc)
self.assertEqual(model.features, new_model.features)
sample_x = np.random.randn(100, 10)
np.testing.assert_array_almost_equal(model.predict(sample_x), new_model.predict(sample_x))
model.fit(x, y)
def test_xgb_trainer_persisence(self):
model = XGBTrainer(features=list(range(10)),
objective='binary:logistic',
booster='gbtree',
tree_method='hist',
n_estimators=200)
y = np.where(self.y > 0, 1, 0)
model.fit(self.x, y)
desc = model.save()
new_model = load_model(desc)
......
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment