Unverified Commit ed3131b1 authored by iLampard's avatar iLampard Committed by GitHub

Merge pull request #1 from alpha-miner/master

merge update
parents d52e93db e84b1fbf
......@@ -43,6 +43,7 @@ from alphamind.model import RandomForestClassifier
from alphamind.model import XGBRegressor
from alphamind.model import XGBClassifier
from alphamind.model import XGBTrainer
from alphamind.model import NvSVRModel
from alphamind.model import load_model
from alphamind.model.data_preparing import fetch_data_package
from alphamind.model.data_preparing import fetch_train_phase
......@@ -104,6 +105,7 @@ __all__ = [
'XGBRegressor',
'XGBClassifier',
'XGBTrainer',
'NvSVRModel',
'load_model',
'NaiveExecutor',
'ThresholdExecutor',
......
......@@ -72,11 +72,11 @@ cdef extern from "tvoptimizer.hpp" namespace "pfopt":
double*,
double*,
double,
double,
int,
double*,
double*,
double*) except +
double*,
string) except +
vector[double] xValue()
double feval()
int status()
......@@ -96,11 +96,11 @@ cdef class CVOptimizer:
cnp.ndarray[double, ndim=2] cons_matrix=None,
double[:] clbound=None,
double[:] cubound=None,
double target_low=0.0,
double target_high=1.0,
double target_vol=1.0,
cnp.ndarray[double, ndim=2] factor_cov_matrix=None,
cnp.ndarray[double, ndim=2] factor_loading_matrix=None,
double[:] idsync_risk=None):
double[:] idsync_risk=None,
str linear_solver="ma27"):
self.n = lbound.shape[0]
self.m = 0
......@@ -123,12 +123,12 @@ cdef class CVOptimizer:
&cons[0],
&clbound[0],
&cubound[0],
target_low,
target_high,
target_vol,
self.f,
&factor_cov[0] if factor_cov is not None else NULL,
&factor_loading[0] if factor_loading is not None else NULL,
&idsync_risk[0] if idsync_risk is not None else NULL)
&idsync_risk[0] if idsync_risk is not None else NULL,
bytes(linear_solver, encoding='utf8'))
else:
self.cobj = new TVOptimizer(self.n,
&expected_return[0],
......@@ -139,12 +139,12 @@ cdef class CVOptimizer:
NULL,
NULL,
NULL,
target_low,
target_high,
target_vol,
self.f,
&factor_cov[0] if factor_cov is not None else NULL,
&factor_loading[0] if factor_loading is not None else NULL,
&idsync_risk[0] if idsync_risk is not None else NULL)
&idsync_risk[0] if idsync_risk is not None else NULL,
bytes(linear_solver, encoding='utf8'))
def __dealloc__(self):
del self.cobj
......@@ -174,7 +174,8 @@ cdef extern from "mvoptimizer.hpp" namespace "pfopt":
int,
double*,
double*,
double*) except +
double*,
string) except +
vector[double] xValue()
double feval()
int status()
......@@ -213,7 +214,8 @@ cdef class QPOptimizer:
double risk_aversion=1.0,
cnp.ndarray[double, ndim=2] factor_cov_matrix=None,
cnp.ndarray[double, ndim=2] factor_loading_matrix=None,
double[:] idsync_risk=None):
double[:] idsync_risk=None,
str linear_solver='ma27'):
self.n = lbound.shape[0]
self.m = 0
......@@ -243,7 +245,8 @@ cdef class QPOptimizer:
self.f,
&factor_cov[0] if factor_cov is not None else NULL,
&factor_loading[0] if factor_loading is not None else NULL,
&idsync_risk[0] if idsync_risk is not None else NULL)
&idsync_risk[0] if idsync_risk is not None else NULL,
bytes(linear_solver, encoding='utf8'))
else:
self.cobj = new MVOptimizer(self.n,
&expected_return[0],
......@@ -258,7 +261,8 @@ cdef class QPOptimizer:
self.f,
&factor_cov[0] if factor_cov is not None else NULL,
&factor_loading[0] if factor_loading is not None else NULL,
&idsync_risk[0] if idsync_risk is not None else NULL)
&idsync_risk[0] if idsync_risk is not None else NULL,
bytes(linear_solver, encoding='utf8'))
def __dealloc__(self):
del self.cobj
......
......@@ -16,6 +16,8 @@ from alphamind.model.treemodel import XGBRegressor
from alphamind.model.treemodel import XGBClassifier
from alphamind.model.treemodel import XGBTrainer
from alphamind.model.svm import NvSVRModel
from alphamind.model.loader import load_model
......@@ -28,4 +30,5 @@ __all__ = ['LinearRegression',
'XGBRegressor',
'XGBClassifier',
'XGBTrainer',
'NvSVRModel',
'load_model']
\ No newline at end of file
......@@ -6,14 +6,11 @@ Created on 2017-5-10
"""
import numpy as np
from distutils.version import LooseVersion
from sklearn import __version__ as sklearn_version
from sklearn.linear_model import LinearRegression as LinearRegressionImpl
from sklearn.linear_model import Lasso
from sklearn.linear_model import LogisticRegression as LogisticRegressionImpl
from PyFin.api import pyFinAssert
from alphamind.model.modelbase import ModelBase
from alphamind.utilities import alpha_logger
from alphamind.model.modelbase import create_model_base
class ConstLinearModelImpl(object):
......@@ -35,7 +32,7 @@ class ConstLinearModelImpl(object):
return 1. - sse / ssto
class ConstLinearModel(ModelBase):
class ConstLinearModel(create_model_base()):
def __init__(self,
features=None,
......@@ -63,7 +60,7 @@ class ConstLinearModel(ModelBase):
return self.impl.weights.tolist()
class LinearRegression(ModelBase):
class LinearRegression(create_model_base('sklearn')):
def __init__(self, features=None, fit_intercept: bool = False, fit_target=None, **kwargs):
super().__init__(features=features, fit_target=fit_target)
......@@ -71,26 +68,15 @@ class LinearRegression(ModelBase):
def save(self) -> dict:
model_desc = super().save()
model_desc['sklearn_version'] = sklearn_version
model_desc['weight'] = self.impl.coef_.tolist()
return model_desc
@classmethod
def load(cls, model_desc: dict):
obj_layout = super().load(model_desc)
if LooseVersion(sklearn_version) < LooseVersion(model_desc['sklearn_version']):
alpha_logger.warning('Current sklearn version {0} is lower than the model version {1}. '
'Loaded model may work incorrectly.'.format(sklearn_version,
model_desc['sklearn_version']))
return obj_layout
@property
def weights(self):
return self.impl.coef_.tolist()
class LassoRegression(ModelBase):
class LassoRegression(create_model_base('sklearn')):
def __init__(self, alpha=0.01, features=None, fit_intercept: bool = False, fit_target=None, **kwargs):
super().__init__(features=features, fit_target=fit_target)
......@@ -98,26 +84,15 @@ class LassoRegression(ModelBase):
def save(self) -> dict:
model_desc = super().save()
model_desc['sklearn_version'] = sklearn_version
model_desc['weight'] = self.impl.coef_.tolist()
return model_desc
@classmethod
def load(cls, model_desc: dict):
obj_layout = super().load(model_desc)
if LooseVersion(sklearn_version) < LooseVersion(model_desc['sklearn_version']):
alpha_logger.warning('Current sklearn version {0} is lower than the model version {1}. '
'Loaded model may work incorrectly.'.format(sklearn_version,
model_desc['sklearn_version']))
return obj_layout
@property
def weights(self):
return self.impl.coef_.tolist()
class LogisticRegression(ModelBase):
class LogisticRegression(create_model_base('sklearn')):
def __init__(self, features=None, fit_intercept: bool = False, fit_target=None, **kwargs):
super().__init__(features=features, fit_target=fit_target)
......@@ -125,20 +100,9 @@ class LogisticRegression(ModelBase):
def save(self) -> dict:
model_desc = super().save()
model_desc['sklearn_version'] = sklearn_version
model_desc['weight'] = self.impl.coef_.tolist()
return model_desc
@classmethod
def load(cls, model_desc: dict):
obj_layout = super().load(model_desc)
if LooseVersion(sklearn_version) < LooseVersion(model_desc['sklearn_version']):
alpha_logger.warning('Current sklearn version {0} is lower than the model version {1}. '
'Loaded model may work incorrectly.'.format(sklearn_version,
model_desc['sklearn_version']))
return obj_layout
@property
def weights(self):
return self.impl.coef_.tolist()
......
......@@ -15,6 +15,7 @@ from alphamind.model.treemodel import RandomForestClassifier
from alphamind.model.treemodel import XGBRegressor
from alphamind.model.treemodel import XGBClassifier
from alphamind.model.treemodel import XGBTrainer
from alphamind.model.svm import NvSVRModel
def load_model(model_desc: dict) -> ModelBase:
......@@ -40,5 +41,7 @@ def load_model(model_desc: dict) -> ModelBase:
return XGBClassifier.load(model_desc)
elif 'XGBTrainer' in model_name_parts:
return XGBTrainer.load(model_desc)
elif 'NvSVR' in model_name_parts:
return NvSVRModel.load(model_desc)
else:
raise ValueError('{0} is not currently supported in model loader.'.format(model_name))
......@@ -6,10 +6,13 @@ Created on 2017-9-4
"""
import abc
from distutils.version import LooseVersion
import arrow
import numpy as np
import pandas as pd
from simpleutils.miscellaneous import list_eq
from sklearn import __version__ as sklearn_version
from xgboost import __version__ as xgbboot_version
from alphamind.utilities import alpha_logger
from alphamind.utilities import encode
from alphamind.utilities import decode
......@@ -84,3 +87,39 @@ class ModelBase(metaclass=abc.ABCMeta):
obj_layout.fit_target = None
return obj_layout
def create_model_base(party_name=None):
if not party_name:
return ModelBase
else:
class ExternalLibBase(ModelBase):
_lib_name = party_name
def save(self) -> dict:
model_desc = super().save()
if self._lib_name == 'sklearn':
model_desc[self._lib_name + "_version"] = sklearn_version
elif self._lib_name == 'xgboost':
model_desc[self._lib_name + "_version"] = xgbboot_version
else:
raise ValueError("3rd party lib name ({0}) is not recognized".format(self._lib_name))
return model_desc
@classmethod
def load(cls, model_desc: dict):
obj_layout = super().load(model_desc)
if cls._lib_name == 'sklearn':
current_version = sklearn_version
elif cls._lib_name == 'xgboost':
current_version = xgbboot_version
else:
raise ValueError("3rd party lib name ({0}) is not recognized".format(cls._lib_name))
if LooseVersion(current_version) < LooseVersion(model_desc[cls._lib_name + "_version"]):
alpha_logger.warning('Current {2} version {0} is lower than the model version {1}. '
'Loaded model may work incorrectly.'.format(sklearn_version,
model_desc[cls._lib_name],
cls._lib_name))
return obj_layout
return ExternalLibBase
# -*- coding: utf-8 -*-
"""
Created on 2018-7-9
@author: cheng.li
"""
from sklearn.svm import NuSVR
from alphamind.model.modelbase import create_model_base
class NvSVRModel(create_model_base('sklearn')):
def __init__(self,
features=None,
fit_target=None,
**kwargs):
super().__init__(features=features, fit_target=fit_target)
self.impl = NuSVR(**kwargs)
......@@ -9,7 +9,6 @@ from distutils.version import LooseVersion
import arrow
import numpy as np
import pandas as pd
from sklearn import __version__ as sklearn_version
from sklearn.ensemble import RandomForestRegressor as RandomForestRegressorImpl
from sklearn.ensemble import RandomForestClassifier as RandomForestClassifierImpl
from sklearn.model_selection import train_test_split
......@@ -17,11 +16,11 @@ import xgboost as xgb
from xgboost import __version__ as xgbboot_version
from xgboost import XGBRegressor as XGBRegressorImpl
from xgboost import XGBClassifier as XGBClassifierImpl
from alphamind.model.modelbase import ModelBase
from alphamind.model.modelbase import create_model_base
from alphamind.utilities import alpha_logger
class RandomForestRegressor(ModelBase):
class RandomForestRegressor(create_model_base('sklearn')):
def __init__(self,
n_estimators: int=100,
......@@ -34,27 +33,12 @@ class RandomForestRegressor(ModelBase):
max_features=max_features,
**kwargs)
def save(self) -> dict:
model_desc = super().save()
model_desc['sklearn_version'] = sklearn_version
return model_desc
@classmethod
def load(cls, model_desc: dict):
obj_layout = super().load(model_desc)
if LooseVersion(sklearn_version) < LooseVersion(model_desc['sklearn_version']):
alpha_logger.warning('Current sklearn version {0} is lower than the model version {1}. '
'Loaded model may work incorrectly.'.format(sklearn_version,
model_desc['sklearn_version']))
return obj_layout
@property
def importances(self):
return self.impl.feature_importances_.tolist()
class RandomForestClassifier(ModelBase):
class RandomForestClassifier(create_model_base('sklearn')):
def __init__(self,
n_estimators: int=100,
......@@ -67,27 +51,12 @@ class RandomForestClassifier(ModelBase):
max_features=max_features,
**kwargs)
def save(self) -> dict:
model_desc = super().save()
model_desc['sklearn_version'] = sklearn_version
return model_desc
@classmethod
def load(cls, model_desc: dict):
obj_layout = super().load(model_desc)
if LooseVersion(sklearn_version) < LooseVersion(model_desc['sklearn_version']):
alpha_logger.warning('Current sklearn version {0} is lower than the model version {1}. '
'Loaded model may work incorrectly.'.format(sklearn_version,
model_desc['sklearn_version']))
return obj_layout
@property
def importances(self):
return self.impl.feature_importances_.tolist()
class XGBRegressor(ModelBase):
class XGBRegressor(create_model_base('xgboost')):
def __init__(self,
n_estimators: int=100,
......@@ -104,27 +73,12 @@ class XGBRegressor(ModelBase):
n_jobs=n_jobs,
**kwargs)
def save(self) -> dict:
model_desc = super().save()
model_desc['xgbboot_version'] = xgbboot_version
return model_desc
@classmethod
def load(cls, model_desc: dict):
obj_layout = super().load(model_desc)
if LooseVersion(xgbboot_version) < LooseVersion(model_desc['xgbboot_version']):
alpha_logger.warning('Current xgboost version {0} is lower than the model version {1}. '
'Loaded model may work incorrectly.'.format(xgbboot_version,
model_desc['xgbboot_version']))
return obj_layout
@property
def importances(self):
return self.impl.feature_importances_.tolist()
class XGBClassifier(ModelBase):
class XGBClassifier(create_model_base('xgboost')):
def __init__(self,
n_estimators: int=100,
......@@ -141,27 +95,12 @@ class XGBClassifier(ModelBase):
n_jobs=n_jobs,
**kwargs)
def save(self) -> dict:
model_desc = super().save()
model_desc['xgbboot_version'] = xgbboot_version
return model_desc
@classmethod
def load(cls, model_desc: dict):
obj_layout = super().load(model_desc)
if LooseVersion(xgbboot_version) < LooseVersion(model_desc['xgbboot_version']):
alpha_logger.warning('Current xgboost version {0} is lower than the model version {1}. '
'Loaded model may work incorrectly.'.format(xgbboot_version,
model_desc['xgbboot_version']))
return obj_layout
@property
def importances(self):
return self.impl.feature_importances_.tolist()
class XGBTrainer(ModelBase):
class XGBTrainer(create_model_base('xgboost')):
def __init__(self,
objective='binary:logistic',
......@@ -226,21 +165,6 @@ class XGBTrainer(ModelBase):
d_predict = xgb.DMatrix(x[self.features].values)
return self.impl.predict(d_predict)
def save(self) -> dict:
model_desc = super().save()
model_desc['xgbboot_version'] = xgbboot_version
return model_desc
@classmethod
def load(cls, model_desc: dict):
obj_layout = super().load(model_desc)
if LooseVersion(xgbboot_version) < LooseVersion(model_desc['xgbboot_version']):
alpha_logger.warning('Current xgboost version {0} is lower than the model version {1}. '
'Loaded model may work incorrectly.'.format(xgbboot_version,
model_desc['xgbboot_version']))
return obj_layout
@property
def importances(self):
imps = self.impl.get_fscore().items()
......
Subproject commit ffaf2153dfdce380c3d8aa1a69b328ab77665ad3
Subproject commit 148fd8eedca5aeff85d90e6374658d96316e7f66
......@@ -109,7 +109,6 @@ def target_vol_builder(er: np.ndarray,
cons_mat,
clbound,
cubound,
0.,
vol_target,
risk_model['factor_cov'],
risk_model['factor_loading'],
......
......@@ -23,6 +23,7 @@ from alphamind.data.engines.sqlengine import industry_styles
from alphamind.data.engines.sqlengine import macro_styles
from alphamind.data.processing import factor_processing
from alphamind.analysis.factoranalysis import er_portfolio_analysis
from alphamind.exceptions.exceptions import PortfolioBuilderException
all_styles = risk_styles + industry_styles + macro_styles
......@@ -73,6 +74,7 @@ class Strategy(object):
self.total_data = None
self.index_return = None
self.risk_models = None
self.alpha_models = None
def prepare_backtest_data(self):
total_factors = self.engine.fetch_factor_range(self.universe,
......@@ -120,6 +122,25 @@ class Strategy(object):
offset=1).set_index('trade_date')
self.total_data = total_data
def prepare_backtest_models(self):
if self.total_data is None:
self.prepare_backtest_data()
total_data_groups = self.total_data.groupby('trade_date')
if self.dask_client is None:
models = {}
for ref_date, _ in total_data_groups:
models[ref_date] = train_model(ref_date.strftime('%Y-%m-%d'), self.alpha_model, self.data_meta)
else:
def worker(parameters):
new_model = train_model(parameters[0].strftime('%Y-%m-%d'), parameters[1], parameters[2])
return parameters[0], new_model
l = self.dask_client.map(worker, [(d[0], self.alpha_model, self.data_meta) for d in total_data_groups])
results = self.dask_client.gather(l)
models = dict(results)
self.alpha_models = models
alpha_logger.info("alpha models training finished ...")
@staticmethod
def _create_lu_bounds(running_setting, codes, benchmark_w):
......@@ -168,29 +189,19 @@ class Strategy(object):
executor = copy.deepcopy(running_setting.executor)
positions = pd.DataFrame()
if self.dask_client is None:
models = {}
for ref_date, _ in total_data_groups:
models[ref_date] = train_model(ref_date.strftime('%Y-%m-%d'), self.alpha_model, self.data_meta)
else:
def worker(parameters):
new_model = train_model(parameters[0].strftime('%Y-%m-%d'), parameters[1], parameters[2])
return parameters[0], new_model
l = self.dask_client.map(worker, [(d[0], self.alpha_model, self.data_meta) for d in total_data_groups])
results = self.dask_client.gather(l)
models = dict(results)
if self.alpha_models is None:
self.prepare_backtest_models()
for ref_date, this_data in total_data_groups:
risk_model = self.risk_models[ref_date]
new_model = models[ref_date]
new_model = self.alpha_models[ref_date]
codes = this_data.code.values.tolist()
if previous_pos.empty:
current_position = None
else:
previous_pos.set_index('code', inplace=True)
remained_pos = previous_pos.loc[codes]
remained_pos = previous_pos.reindex(codes)
remained_pos.fillna(0., inplace=True)
current_position = remained_pos.weight.values
......@@ -248,25 +259,38 @@ class Strategy(object):
ret_df = ret_df.shift(1)
ret_df.iloc[0] = 0.
ret_df['excess_return'] = ret_df['returns'] - ret_df['benchmark_returns'] * ret_df['leverage']
return ret_df, positions
def _calculate_pos(self, running_setting, er, data, constraints, benchmark_w, lbound, ubound, risk_model,
current_position):
more_opts = running_setting.more_opts
target_pos, _ = er_portfolio_analysis(er,
industry=data.industry_name.values,
dx_return=None,
constraints=constraints,
detail_analysis=False,
benchmark=benchmark_w,
method=running_setting.rebalance_method,
lbound=lbound,
ubound=ubound,
current_position=current_position,
target_vol=more_opts.get('target_vol'),
risk_model=risk_model,
turn_over_target=more_opts.get('turn_over_target'))
try:
target_pos, _ = er_portfolio_analysis(er,
industry=data.industry_name.values,
dx_return=None,
constraints=constraints,
detail_analysis=False,
benchmark=benchmark_w,
method=running_setting.rebalance_method,
lbound=lbound,
ubound=ubound,
current_position=current_position,
target_vol=more_opts.get('target_vol'),
risk_model=risk_model,
turn_over_target=more_opts.get('turn_over_target'))
except PortfolioBuilderException:
alpha_logger.warning("Not able to fit the constraints. Using full re-balance.")
target_pos, _ = er_portfolio_analysis(er,
industry=data.industry_name.values,
dx_return=None,
constraints=constraints,
detail_analysis=False,
benchmark=benchmark_w,
method=running_setting.rebalance_method,
lbound=lbound,
ubound=ubound,
target_vol=more_opts.get('target_vol'),
risk_model=risk_model)
return target_pos
......@@ -291,44 +315,23 @@ if __name__ == '__main__':
mpl.rcParams['font.sans-serif'] = ['SimHei']
mpl.rcParams['axes.unicode_minus'] = False
# Back test parameter settings
start_date = '2016-01-01'
end_date = '2018-06-11'
"""
Back test parameter settings
"""
benchmark_code = 905
universe = Universe('zz800') + Universe('cyb')
start_date = '2011-01-01'
end_date = '2011-05-04'
freq = '10b'
industry_name = 'sw_adj'
industry_level = 1
turn_over_target = 0.4
batch = 1
horizon = map_freq(freq)
weights_bandwidth = 0.02
universe = Universe('zz800')
data_source = os.environ['DB_URI']
benchmark_code = 300
method = 'risk_neutral'
# Model settings
neutralized_risk = None
alpha_factors = {
'ep_q_cs': CSQuantiles(LAST('ep_q'), groups='sw1_adj'),
'roe_q_cs': CSQuantiles(LAST('roe_q'), groups='sw1_adj'),
'SGRO_cs': CSQuantiles(LAST('SGRO'), groups='sw1_adj'),
'GREV_cs': CSQuantiles(LAST('GREV'), groups='sw1_adj'),
'con_peg_rolling_cs': CSQuantiles(LAST('con_peg_rolling'), groups='sw1_adj'),
'con_pe_rolling_order_cs': CSQuantiles(LAST('con_pe_rolling_order'), groups='sw1_adj'),
'IVR_cs': CSQuantiles(LAST('IVR'), groups='sw1_adj'),
'ILLIQUIDITY_cs': CSQuantiles(LAST('ILLIQUIDITY') * LAST('NegMktValue'), groups='sw1_adj'),
'DividendPaidRatio_cs': CSQuantiles(LAST('DividendPaidRatio'), groups='sw1_adj'),
'ep_q_cs': CSQuantiles(LAST('ep_q'), groups='sw1_adj')
}
weights = dict(ep_q_cs=1.,
roe_q_cs=1.,
SGRO_cs=0.0,
GREV_cs=0.0,
con_peg_rolling_cs=-0.25,
con_pe_rolling_order_cs=-0.25,
IVR_cs=0.5,
ILLIQUIDITY_cs=0.5,
DividendPaidRatio_cs=0.5)
weights = dict(ep_q_cs=1.)
alpha_model = ConstLinearModel(features=alpha_factors, weights=weights)
......@@ -338,79 +341,8 @@ if __name__ == '__main__':
neutralized_risk=None,
pre_process=None,
post_process=None,
data_source=data_source)
# Constraintes settings
industry_names = industry_list(industry_name, industry_level)
constraint_risk = ['SIZE', 'BETA']
total_risk_names = constraint_risk + ['benchmark', 'total']
all_styles = risk_styles + industry_names + macro_styles
b_type = []
l_val = []
u_val = []
previous_pos = pd.DataFrame()
rets = []
turn_overs = []
leverags = []
for name in total_risk_names:
if name == 'benchmark':
b_type.append(BoundaryType.RELATIVE)
l_val.append(0.8)
u_val.append(1.0)
elif name == 'total':
b_type.append(BoundaryType.ABSOLUTE)
l_val.append(.0)
u_val.append(.0)
elif name == 'EARNYILD':
b_type.append(BoundaryType.ABSOLUTE)
l_val.append(0.00)
u_val.append(0.60)
elif name == 'GROWTH':
b_type.append(BoundaryType.ABSOLUTE)
l_val.append(-0.05)
u_val.append(0.05)
elif name == 'MOMENTUM':
b_type.append(BoundaryType.ABSOLUTE)
l_val.append(0.20)
u_val.append(0.20)
elif name == 'SIZE':
b_type.append(BoundaryType.ABSOLUTE)
l_val.append(-0.05)
u_val.append(0.05)
elif name == 'LIQUIDTY':
b_type.append(BoundaryType.ABSOLUTE)
l_val.append(-0.40)
u_val.append(-0.0)
elif benchmark_code == 905 and name not in ["计算机", "医药生物", "国防军工", "信息服务", "机械设备"] and name in industry_names:
b_type.append(BoundaryType.RELATIVE)
l_val.append(0.8)
u_val.append(1.0)
elif benchmark_code == 300 and name in ["银行", "保险", "证券", "多元金融"]:
b_type.append(BoundaryType.RELATIVE)
l_val.append(0.70)
u_val.append(0.90)
elif name in ["计算机", "医药生物", "国防军工", "信息服务", "机械设备"]:
b_type.append(BoundaryType.ABSOLUTE)
l_val.append(0.0)
u_val.append(0.05)
else:
b_type.append(BoundaryType.ABSOLUTE)
l_val.append(-0.002)
u_val.append(0.002)
data_source=os.environ['DB_URI'])
bounds = create_box_bounds(total_risk_names, b_type, l_val, u_val)
# Running settings
running_setting = RunningSetting(weights_bandwidth=weights_bandwidth,
rebalance_method=method,
bounds=bounds,
turn_over_target=turn_over_target)
# Strategy
strategy = Strategy(alpha_model,
data_meta,
universe=universe,
......@@ -420,4 +352,60 @@ if __name__ == '__main__':
benchmark=benchmark_code)
strategy.prepare_backtest_data()
ret_df, positions = strategy.run(running_setting=running_setting)
\ No newline at end of file
def create_scenario(weights_bandwidth=0.02, target_vol=0.01, method='risk_neutral'):
industry_names = industry_list('sw_adj', 1)
constraint_risk = ['EARNYILD', 'LIQUIDTY', 'GROWTH', 'SIZE', 'BETA', 'MOMENTUM']
total_risk_names = constraint_risk + industry_names + ['benchmark', 'total']
b_type = []
l_val = []
u_val = []
for name in total_risk_names:
if name == 'benchmark':
b_type.append(BoundaryType.RELATIVE)
l_val.append(0.8)
u_val.append(1.001)
elif name == 'total':
b_type.append(BoundaryType.ABSOLUTE)
l_val.append(-0.001)
u_val.append(.001)
elif name == 'EARNYILD':
b_type.append(BoundaryType.ABSOLUTE)
l_val.append(-0.001)
u_val.append(0.60)
elif name == 'GROWTH':
b_type.append(BoundaryType.ABSOLUTE)
l_val.append(-0.20)
u_val.append(0.20)
elif name == 'MOMENTUM':
b_type.append(BoundaryType.ABSOLUTE)
l_val.append(-0.10)
u_val.append(0.20)
elif name == 'SIZE':
b_type.append(BoundaryType.ABSOLUTE)
l_val.append(-0.20)
u_val.append(0.20)
elif name == 'LIQUIDTY':
b_type.append(BoundaryType.ABSOLUTE)
l_val.append(-0.25)
u_val.append(0.25)
else:
b_type.append(BoundaryType.ABSOLUTE)
l_val.append(-0.01)
u_val.append(0.01)
bounds = create_box_bounds(total_risk_names, b_type, l_val, u_val)
running_setting = RunningSetting(weights_bandwidth=weights_bandwidth,
rebalance_method=method,
bounds=bounds,
target_vol=target_vol,
turn_over_target=0.4)
ret_df, positions = strategy.run(running_setting)
return ret_df
create_scenario(0.01, target_vol=0.01, method='tv')
\ No newline at end of file
......@@ -116,7 +116,6 @@ class TestOptimizers(unittest.TestCase):
None,
None,
None,
target_vol,
target_vol)
# check against known good result
......@@ -144,7 +143,6 @@ class TestOptimizers(unittest.TestCase):
cons,
clbound,
cubound,
target_vol,
target_vol)
# check against known good result
......@@ -173,7 +171,6 @@ class TestOptimizers(unittest.TestCase):
cons,
clbound,
cubound,
0.,
target_vol,
factor_var,
factor_load,
......@@ -204,7 +201,6 @@ class TestOptimizers(unittest.TestCase):
cons,
clbound,
cubound,
0.,
target_vol)
# check against known good result
......
......@@ -57,6 +57,9 @@ class TestMeanVarianceBuild(unittest.TestCase):
status, _, x = mean_variance_builder(er, model, bm, lbound, ubound, None, None, lam=1)
np.testing.assert_array_almost_equal(x, np.linalg.inv(cov) @ er)
def test_mean_variance_builder_without_constraints_with_factor_model(self):
pass
def test_mean_variance_builder_with_none_unity_lambda(self):
er = np.array([0.01, 0.02, 0.03])
cov = np.array([[0.02, 0.01, 0.02],
......
Subproject commit 5cd851ccef8cf0a0a71094d0d0e33a9d102f1f55
Subproject commit a187ed6c8f3aa40b47d5be80667cbbe6a6fd563d
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment