Skip to content
Projects
Groups
Snippets
Help
Loading...
Help
Submit feedback
Sign in
Toggle navigation
A
alpha-mind
Project
Project
Details
Activity
Releases
Cycle Analytics
Repository
Repository
Files
Commits
Branches
Tags
Contributors
Graph
Compare
Charts
Issues
0
Issues
0
List
Board
Labels
Milestones
Merge Requests
0
Merge Requests
0
CI / CD
CI / CD
Pipelines
Jobs
Schedules
Charts
Wiki
Wiki
Snippets
Snippets
Members
Members
Collapse sidebar
Close sidebar
Activity
Graph
Charts
Create a new issue
Jobs
Commits
Issue Boards
Open sidebar
Dr.李
alpha-mind
Commits
6e01cdaa
Commit
6e01cdaa
authored
Sep 22, 2017
by
Dr.李
Browse files
Options
Browse Files
Download
Email Patches
Plain Diff
fixed models ref_date bug
parent
dca3a11a
Changes
2
Hide whitespace changes
Inline
Side-by-side
Showing
2 changed files
with
338 additions
and
2 deletions
+338
-2
.gitignore
.gitignore
+1
-2
executor_example.ipynb
notebooks/executor_example.ipynb
+337
-0
No files found.
.gitignore
View file @
6e01cdaa
...
...
@@ -9,5 +9,4 @@ Alpha_Mind.egg-info/*
*.html
*.nbc
*.nbi
/notebooks/.ipynb_checkpoints
notebooks/*
\ No newline at end of file
/notebooks/.ipynb_checkpoints
\ No newline at end of file
notebooks/executor_example.ipynb
0 → 100644
View file @
6e01cdaa
{
"cells": [
{
"cell_type": "code",
"execution_count": null,
"metadata": {
"collapsed": true
},
"outputs": [],
"source": [
"%matplotlib inline\n",
"import numpy as np\n",
"import pandas as pd\n",
"from alphamind.api import *\n",
"from PyFin.api import *\n",
"from matplotlib import pyplot as plt\n",
"plt.style.use('fivethirtyeight')"
]
},
{
"cell_type": "markdown",
"metadata": {},
"source": [
"# Parameter Setting\n",
"----------------------"
]
},
{
"cell_type": "code",
"execution_count": null,
"metadata": {
"collapsed": true
},
"outputs": [],
"source": [
"def _map_freq(freq):\n",
"\n",
" if freq == '1m':\n",
" horizon = 21\n",
" elif freq == '1w':\n",
" horizon = 4\n",
" elif freq == '2w':\n",
" horizon = 8\n",
" elif freq == '3w':\n",
" horizon = 12\n",
" elif freq == '1d':\n",
" horizon = 0\n",
" else:\n",
" raise ValueError(\"Unrecognized freq: {0}\".format(freq))\n",
" return horizon"
]
},
{
"cell_type": "code",
"execution_count": null,
"metadata": {},
"outputs": [],
"source": [
"factors = ['VAL', 'RVOL', 'ROEDiluted', 'GREV', 'EPS', 'CHV', 'CFinc1', 'BDTO']\n",
"factor_weights = np.array([0.034129344,\n",
" 0.015881607,\n",
" 0.048765746,\n",
" 0.042747382,\n",
" -0.015900173,\n",
" 0.019044573,\n",
" -0.001792638,\n",
" 0.014277867,\n",
" ])\n",
"\n",
"engine = SqlEngine()\n",
"universe = Universe('custom', ['zz500'])\n",
"benchmark_code = 905\n",
"neutralize_risk = ['SIZE'] + industry_styles\n",
"constraint_risk = ['SIZE'] + industry_styles\n",
"start_date = '2012-01-01'\n",
"end_date = '2017-09-20'\n",
"\n",
"freq = '1w'\n",
"horizon = _map_freq(freq)\n",
"dates = makeSchedule(start_date, end_date, tenor=freq, calendar='china.sse', dateGenerationRule=DateGeneration.Backward)"
]
},
{
"cell_type": "code",
"execution_count": null,
"metadata": {},
"outputs": [],
"source": [
"all_data = engine.fetch_data_range(universe, factors, dates=dates, benchmark=905)\n",
"factor_all_data = all_data['factor']\n",
"factor_groups = factor_all_data.groupby('trade_date')"
]
},
{
"cell_type": "markdown",
"metadata": {},
"source": [
"# Naive Executor Strategy\n",
"---------------------------------"
]
},
{
"cell_type": "code",
"execution_count": null,
"metadata": {},
"outputs": [],
"source": [
"rets = []\n",
"turn_overs = []\n",
"executor = NaiveExecutor()\n",
"\n",
"for i, value in enumerate(factor_groups):\n",
" date = value[0]\n",
" data = value[1]\n",
" codes = data.code.tolist()\n",
" ref_date = date.strftime('%Y-%m-%d')\n",
" total_data = data.dropna()\n",
" dx_return = None\n",
" risk_exp = total_data[neutralize_risk].values.astype(float)\n",
" industry = total_data.industry.values\n",
" benchmark_w = total_data.weight.values\n",
" \n",
" constraint_exp = total_data[constraint_risk].values\n",
" risk_exp_expand = np.concatenate((constraint_exp, np.ones((len(risk_exp), 1))), axis=1).astype(float)\n",
" risk_names = constraint_risk + ['total']\n",
" risk_target = risk_exp_expand.T @ benchmark_w\n",
" lbound = np.zeros(len(total_data))\n",
" ubound = 0.01 + benchmark_w\n",
"\n",
" constraint = Constraints(risk_exp_expand, risk_names)\n",
" for i, name in enumerate(risk_names):\n",
" constraint.set_constraints(name, lower_bound=risk_target[i], upper_bound=risk_target[i])\n",
" \n",
" er = factor_processing(total_data[factors].values,\n",
" pre_process=[winsorize_normal, standardize],\n",
" post_process=[standardize]) @ factor_weights\n",
" \n",
" target_pos, _ = er_portfolio_analysis(er,\n",
" industry,\n",
" dx_return,\n",
" constraint,\n",
" False,\n",
" benchmark_w)\n",
" target_pos['code'] = total_data['code'].values\n",
" \n",
" turn_over, executed_pos = executor.execute(target_pos=target_pos)\n",
" \n",
" executed_codes = executed_pos.code.tolist()\n",
" dx_retuns = engine.fetch_dx_return(date, executed_codes, horizon=horizon)\n",
" \n",
" result = pd.merge(executed_pos, total_data, on=['code'], how='inner')\n",
" result = pd.merge(result, dx_retuns, on=['code'])\n",
" \n",
" ret = (result.weight_x - result.weight_y).values @ result.dx.values\n",
" rets.append(ret)\n",
" turn_overs.append(turn_over)"
]
},
{
"cell_type": "code",
"execution_count": null,
"metadata": {},
"outputs": [],
"source": [
"ret_df = pd.DataFrame({'returns': rets, 'turn_over': turn_overs}, index=dates)\n",
"ret_df.loc[advanceDateByCalendar('china.sse', dates[-1], freq)] = 0.\n",
"ret_df = ret_df.shift(1)\n",
"ret_df.iloc[0] = 0.\n",
"ret_df['tc_cost'] = ret_df.turn_over * 0.002"
]
},
{
"cell_type": "code",
"execution_count": null,
"metadata": {},
"outputs": [],
"source": [
"ret_df[['returns', 'tc_cost']].cumsum().plot(figsize=(12, 6), title='Fixed frequency rebalanced: {0}'.format(freq), secondary_y='tc_cost')"
]
},
{
"cell_type": "code",
"execution_count": null,
"metadata": {},
"outputs": [],
"source": [
"ret_df.turn_over.sum()"
]
},
{
"cell_type": "markdown",
"metadata": {},
"source": [
"# Threshold Turn Over Strategy\n",
"------------------------"
]
},
{
"cell_type": "code",
"execution_count": null,
"metadata": {},
"outputs": [],
"source": [
"freq = '1d'\n",
"horizon = _map_freq(freq)\n",
"dates = makeSchedule(start_date, end_date, tenor=freq, calendar='china.sse', dateGenerationRule=DateGeneration.Backward)"
]
},
{
"cell_type": "code",
"execution_count": null,
"metadata": {
"collapsed": true
},
"outputs": [],
"source": [
"all_data = engine.fetch_data_range(universe, factors, dates=dates, benchmark=905)\n",
"factor_all_data = all_data['factor']\n",
"factor_groups = factor_all_data.groupby('trade_date')"
]
},
{
"cell_type": "code",
"execution_count": null,
"metadata": {
"collapsed": true
},
"outputs": [],
"source": [
"rets = []\n",
"turn_overs = []\n",
"target_turn_over = 1.\n",
"executor = ThresholdExecutor(target_turn_over)\n",
"\n",
"for i, value in enumerate(factor_groups):\n",
" date = value[0]\n",
" data = value[1]\n",
" codes = data.code.tolist()\n",
" ref_date = date.strftime('%Y-%m-%d')\n",
" total_data = data.dropna()\n",
" dx_return = None\n",
" risk_exp = total_data[neutralize_risk].values.astype(float)\n",
" industry = total_data.industry.values\n",
" benchmark_w = total_data.weight.values\n",
" \n",
" constraint_exp = total_data[constraint_risk].values\n",
" risk_exp_expand = np.concatenate((constraint_exp, np.ones((len(risk_exp), 1))), axis=1).astype(float)\n",
" risk_names = constraint_risk + ['total']\n",
" risk_target = risk_exp_expand.T @ benchmark_w\n",
" lbound = np.zeros(len(total_data))\n",
" ubound = 0.01 + benchmark_w\n",
"\n",
" constraint = Constraints(risk_exp_expand, risk_names)\n",
" for i, name in enumerate(risk_names):\n",
" constraint.set_constraints(name, lower_bound=risk_target[i], upper_bound=risk_target[i])\n",
" \n",
" er = factor_processing(total_data[factors].values,\n",
" pre_process=[winsorize_normal, standardize],\n",
" post_process=[standardize]) @ factor_weights\n",
" \n",
" target_pos, _ = er_portfolio_analysis(er,\n",
" industry,\n",
" dx_return,\n",
" constraint,\n",
" False,\n",
" benchmark_w)\n",
" target_pos['code'] = total_data['code'].values\n",
" \n",
" turn_over, executed_pos = executor.execute(target_pos=target_pos)\n",
" \n",
" executed_codes = executed_pos.code.tolist()\n",
" dx_retuns = engine.fetch_dx_return(date, executed_codes, horizon=horizon)\n",
" \n",
" result = pd.merge(executed_pos, total_data, on=['code'], how='inner')\n",
" result = pd.merge(result, dx_retuns, on=['code'])\n",
" \n",
" ret = (result.weight_x - result.weight_y).values @ result.dx.values\n",
" rets.append(ret)\n",
" turn_overs.append(turn_over)"
]
},
{
"cell_type": "code",
"execution_count": null,
"metadata": {
"collapsed": true
},
"outputs": [],
"source": [
"ret_df = pd.DataFrame({'returns': rets, 'turn_over': turn_overs}, index=dates)\n",
"ret_df.loc[advanceDateByCalendar('china.sse', dates[-1], freq)] = 0.\n",
"ret_df = ret_df.shift(1)\n",
"ret_df.iloc[0] = 0.\n",
"ret_df['tc_cost'] = ret_df.turn_over * 0.002"
]
},
{
"cell_type": "code",
"execution_count": null,
"metadata": {},
"outputs": [],
"source": [
"ret_df[['returns', 'tc_cost']].cumsum().plot(figsize=(12, 6),\n",
" title='Targeted turn over rebalanced: Monitored freq {0}, {1} target'.format(freq,\n",
" target_turn_over),\n",
" secondary_y='tc_cost')"
]
},
{
"cell_type": "code",
"execution_count": null,
"metadata": {
"collapsed": true
},
"outputs": [],
"source": []
},
{
"cell_type": "code",
"execution_count": null,
"metadata": {
"collapsed": true
},
"outputs": [],
"source": []
}
],
"metadata": {
"kernelspec": {
"display_name": "Python 3",
"language": "python",
"name": "python3"
}
},
"nbformat": 4,
"nbformat_minor": 2
}
Write
Preview
Markdown
is supported
0%
Try again
or
attach a new file
Attach a file
Cancel
You are about to add
0
people
to the discussion. Proceed with caution.
Finish editing this message first!
Cancel
Please
register
or
sign in
to comment