Commit 43779306 authored by Dr.李's avatar Dr.李

update models

parent 139bddec
......@@ -5,6 +5,7 @@ Created on 2017-8-23
@author: cheng.li
"""
import copy
import pandas as pd
from PyFin.api import pyFinAssert
from PyFin.Analysis.SecurityValueHolders import SecurityValueHolder
......@@ -58,7 +59,7 @@ class Transformer(object):
def __init__(self,
expressions):
expression_dict, expression_dependency = \
factor_translator(expressions)
factor_translator(copy.deepcopy(expressions))
if expression_dict:
self.names = sorted(expression_dict.keys())
......
......@@ -32,9 +32,9 @@ class ConstLinearModel(ModelBase):
def __init__(self,
features: list = None,
weights: np.ndarray = None,
**kwargs):
super().__init__(features, **kwargs)
formulas: dict = None,
weights: np.ndarray = None):
super().__init__(features, formulas=formulas)
if features is not None and weights is not None:
pyFinAssert(len(features) == len(weights),
ValueError,
......@@ -57,8 +57,8 @@ class ConstLinearModel(ModelBase):
class LinearRegression(ModelBase):
def __init__(self, features: list = None, fit_intercept: bool = False, **kwargs):
super().__init__(features, **kwargs)
def __init__(self, features: list = None, formulas: dict = None, fit_intercept: bool = False, **kwargs):
super().__init__(features, formulas=formulas)
self.impl = LinearRegressionImpl(fit_intercept=fit_intercept, **kwargs)
self.trained_time = None
......@@ -85,8 +85,8 @@ class LinearRegression(ModelBase):
class LassoRegression(ModelBase):
def __init__(self, alpha=0.01, features: list = None, fit_intercept: bool = False, **kwargs):
super().__init__(features, **kwargs)
def __init__(self, alpha=0.01, features: list = None, formulas: dict = None, fit_intercept: bool = False, **kwargs):
super().__init__(features, formulas=formulas)
self.impl = Lasso(alpha=alpha, fit_intercept=fit_intercept, **kwargs)
self.trained_time = None
......@@ -113,8 +113,8 @@ class LassoRegression(ModelBase):
class LogisticRegression(ModelBase):
def __init__(self, features: list = None, fit_intercept: bool = False, **kwargs):
super().__init__(features, **kwargs)
def __init__(self, features: list = None, formulas: dict = None, fit_intercept: bool = False, **kwargs):
super().__init__(features, formulas=formulas)
self.impl = LogisticRegressionImpl(fit_intercept=fit_intercept, **kwargs)
def save(self) -> dict:
......
......@@ -6,6 +6,7 @@ Created on 2017-9-4
"""
import abc
import copy
import arrow
import numpy as np
from alphamind.utilities import alpha_logger
......@@ -15,13 +16,13 @@ from alphamind.utilities import decode
class ModelBase(metaclass=abc.ABCMeta):
def __init__(self, features: list=None, formulas: dict=None, **kwargs):
def __init__(self, features: list=None, formulas: dict=None):
if features is not None:
self.features = list(features)
else:
self.features = formulas
self.features = None
self.impl = None
self.formulas = formulas
self.formulas = copy.deepcopy(formulas)
self.trained_time = None
def fit(self, x: np.ndarray, y: np.ndarray):
......
......@@ -61,8 +61,9 @@ class RandomForestClassifier(ModelBase):
n_estimators: int=100,
max_features: str='auto',
features: List = None,
formulas: dict = None,
**kwargs):
super().__init__(features, **kwargs)
super().__init__(features, formulas=formulas)
self.impl = RandomForestClassifierImpl(n_estimators=n_estimators,
max_features=max_features,
**kwargs)
......@@ -96,8 +97,9 @@ class XGBRegressor(ModelBase):
learning_rate: float=0.1,
max_depth: int=3,
features: List=None,
formulas: dict = None,
**kwargs):
super().__init__(features, **kwargs)
super().__init__(features, formulas=formulas)
self.impl = XGBRegressorImpl(n_estimators=n_estimators,
learning_rate=learning_rate,
max_depth=max_depth,
......@@ -131,8 +133,9 @@ class XGBClassifier(ModelBase):
learning_rate: float=0.1,
max_depth: int=3,
features: List = None,
formulas: dict = None,
**kwargs):
super().__init__(features, **kwargs)
super().__init__(features, formulas=formulas)
self.impl = XGBClassifierImpl(n_estimators=n_estimators,
learning_rate=learning_rate,
max_depth=max_depth,
......@@ -173,9 +176,10 @@ class XGBTrainer(ModelBase):
subsample=1.,
colsample_bytree=1.,
features: List = None,
formulas: dict = None,
random_state=0,
**kwargs):
super().__init__(features)
super().__init__(features, formulas=formulas)
self.params = {
'silent': 1,
'objective': objective,
......
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment