Skip to content
Projects
Groups
Snippets
Help
Loading...
Help
Submit feedback
Sign in
Toggle navigation
A
alpha-mind
Project
Project
Details
Activity
Releases
Cycle Analytics
Repository
Repository
Files
Commits
Branches
Tags
Contributors
Graph
Compare
Charts
Issues
0
Issues
0
List
Board
Labels
Milestones
Merge Requests
0
Merge Requests
0
CI / CD
CI / CD
Pipelines
Jobs
Schedules
Charts
Wiki
Wiki
Snippets
Snippets
Members
Members
Collapse sidebar
Close sidebar
Activity
Graph
Charts
Create a new issue
Jobs
Commits
Issue Boards
Open sidebar
Dr.李
alpha-mind
Commits
b7fa9cc6
Commit
b7fa9cc6
authored
Oct 11, 2017
by
Dr.李
Browse files
Options
Browse Files
Download
Plain Diff
Merge branch 'master' of
https://github.com/wegamekinglc/alpha-mind
parents
6fb8a24d
7e8778af
Changes
1
Hide whitespace changes
Inline
Side-by-side
Showing
1 changed file
with
341 additions
and
0 deletions
+341
-0
turn_over_control_strategy.ipynb
notebooks/turn_over_control_strategy.ipynb
+341
-0
No files found.
notebooks/turn_over_control_strategy.ipynb
0 → 100644
View file @
b7fa9cc6
{
"cells": [
{
"cell_type": "code",
"execution_count": null,
"metadata": {},
"outputs": [],
"source": [
"%matplotlib inline\n",
"import numpy as np\n",
"import pandas as pd\n",
"from PyFin.api import *\n",
"from alphamind.api import *\n",
"from matplotlib import pyplot as plt\n",
"plt.style.use('fivethirtyeight')"
]
},
{
"cell_type": "markdown",
"metadata": {},
"source": [
"# Parameter Setting\n",
"----------------------"
]
},
{
"cell_type": "code",
"execution_count": null,
"metadata": {},
"outputs": [],
"source": [
"#factors = ['RVOL', 'EPS', 'DROEAfterNonRecurring', 'DivP', 'CFinc1', 'BDTO']\n",
"#factor_weights = np.array([0.05, 0.3, 0.35, 0.075, 0.15, 0.05])\n",
"\n",
"factors = ['VAL', 'RVOL', 'ROEDiluted', 'GREV', 'EPS', 'CHV', 'CFinc1', 'BDTO']\n",
"factor_weights = np.array([0.034129344,\n",
" 0.015881607,\n",
" 0.048765746,\n",
" 0.042747382,\n",
" -0.015900173,\n",
" 0.019044573,\n",
" -0.001792638,\n",
" 0.014277867,\n",
" ])\n",
"\n",
"engine = SqlEngine()\n",
"universe = Universe('custom', ['zz500'])\n",
"benchmark_code = 905\n",
"neutralize_risk = ['SIZE'] + industry_styles\n",
"constraint_risk = ['SIZE'] + industry_styles\n",
"freq = '1w'\n",
"\n",
"if freq == '1m':\n",
" horizon = 21\n",
"elif freq == '1w':\n",
" horizon = 4\n",
"elif freq == '2w':\n",
" horizon = 8\n",
"elif freq == '3w':\n",
" horizon = 12\n",
"elif freq == '1d':\n",
" horizon = 0\n",
"else:\n",
" raise ValueError(\"Unrecognized freq: {0}\".format(freq))\n",
" \n",
"dates = makeSchedule('2012-01-01', '2017-09-19', tenor=freq, calendar='china.sse', dateGenerationRule=DateGeneration.Backward)"
]
},
{
"cell_type": "code",
"execution_count": null,
"metadata": {},
"outputs": [],
"source": [
"all_data = engine.fetch_data_range(universe, factors, dates=dates, benchmark=905)\n",
"factor_all_data = all_data['factor']\n",
"factor_groups = factor_all_data.groupby('trade_date')"
]
},
{
"cell_type": "markdown",
"metadata": {},
"source": [
"# Fixed Frequency Rebalance Strategy\n",
"-------------------------------------"
]
},
{
"cell_type": "code",
"execution_count": null,
"metadata": {},
"outputs": [],
"source": [
"rets = []\n",
"turn_overs = []\n",
"previous_pos = pd.DataFrame()\n",
"\n",
"for i, value in enumerate(factor_groups):\n",
" date = value[0]\n",
" data = value[1]\n",
" codes = data.code.tolist()\n",
" ref_date = date.strftime('%Y-%m-%d')\n",
" returns = engine.fetch_dx_return(ref_date, codes, horizon=horizon)\n",
" total_data = pd.merge(data, returns, on=['code']).dropna()\n",
" risk_exp = total_data[neutralize_risk].values.astype(float)\n",
" industry = total_data.industry.values\n",
" dx_return = total_data.dx.values\n",
" benchmark_w = total_data.weight.values\n",
" \n",
" constraint_exp = total_data[constraint_risk].values\n",
" risk_exp_expand = np.concatenate((constraint_exp, np.ones((len(risk_exp), 1))), axis=1).astype(float)\n",
" risk_names = constraint_risk + ['total']\n",
" risk_target = risk_exp_expand.T @ benchmark_w\n",
" lbound = np.zeros(len(total_data))\n",
" ubound = 0.01 + benchmark_w\n",
"\n",
" constraint = Constraints(risk_exp_expand, risk_names)\n",
" for i, name in enumerate(risk_names):\n",
" constraint.set_constraints(name, lower_bound=risk_target[i], upper_bound=risk_target[i])\n",
" \n",
" er = factor_processing(total_data[factors].values,\n",
" pre_process=[winsorize_normal, standardize],\n",
" post_process=[standardize]) @ factor_weights\n",
" \n",
" pos, _ = er_portfolio_analysis(er,\n",
" industry,\n",
" dx_return,\n",
" constraint,\n",
" False,\n",
" benchmark_w)\n",
" pos['code'] = total_data['code']\n",
" \n",
" ret = (pos.weight - benchmark_w) @ dx_return\n",
" rets.append(ret)\n",
" \n",
" if previous_pos.empty:\n",
" turn_over = 0.\n",
" else:\n",
" pos_merged = pd.merge(pos, previous_pos, on=['code'], how='outer')\n",
" pos_merged.fillna(0, inplace=True)\n",
" turn_over = np.abs(pos_merged.weight_x - pos_merged.weight_y).sum()\n",
" \n",
" turn_overs.append(turn_over)\n",
" \n",
" previous_pos = pos\n",
" previous_pos"
]
},
{
"cell_type": "code",
"execution_count": null,
"metadata": {},
"outputs": [],
"source": [
"ret_df = pd.DataFrame({'returns': rets, 'turn_over': turn_overs}, index=dates)\n",
"ret_df.loc[advanceDateByCalendar('china.sse', dates[-1], freq)] = 0.\n",
"ret_df = ret_df.shift(1)\n",
"ret_df.iloc[0] = 0.\n",
"ret_df['tc_cost'] = ret_df.turn_over * 0.002"
]
},
{
"cell_type": "code",
"execution_count": null,
"metadata": {},
"outputs": [],
"source": [
"ret_df[['returns', 'tc_cost']].cumsum().plot(figsize=(12, 6), title='Fixed frequency rebalanced: {0}'.format(freq), secondary_y='tc_cost')"
]
},
{
"cell_type": "markdown",
"metadata": {},
"source": [
"# Targeted Turn Over Strategy\n",
"-------------------------------"
]
},
{
"cell_type": "code",
"execution_count": null,
"metadata": {
"collapsed": true
},
"outputs": [],
"source": [
"freq = '1d'\n",
"dates = makeSchedule('2012-01-01', '2017-09-19', tenor=freq, calendar='china.sse', dateGenerationRule=DateGeneration.Backward)\n",
"all_data = engine.fetch_data_range(universe, factors, dates=dates, benchmark=905)\n",
"factor_all_data = all_data['factor']\n",
"factor_groups = factor_all_data.groupby('trade_date')\n",
"\n",
"if freq == '1m':\n",
" horizon = 21\n",
"elif freq == '1w':\n",
" horizon = 4\n",
"elif freq == '2w':\n",
" horizon = 8\n",
"elif freq == '3w':\n",
" horizon = 12\n",
"elif freq == '1d':\n",
" horizon = 0\n",
"else:\n",
" raise ValueError(\"Unrecognized freq: {0}\".format(freq))"
]
},
{
"cell_type": "code",
"execution_count": null,
"metadata": {
"collapsed": true
},
"outputs": [],
"source": [
"target_turn_over = 0.5"
]
},
{
"cell_type": "code",
"execution_count": null,
"metadata": {},
"outputs": [],
"source": [
"rets = []\n",
"turn_overs = []\n",
"previous_pos = pd.DataFrame()\n",
"\n",
"for i, value in enumerate(factor_groups):\n",
" date = value[0]\n",
" data = value[1]\n",
" codes = data.code.tolist()\n",
" ref_date = date.strftime('%Y-%m-%d')\n",
" returns = engine.fetch_dx_return(ref_date, codes, horizon=horizon)\n",
" total_data = pd.merge(data, returns, on=['code']).dropna()\n",
" risk_exp = total_data[neutralize_risk].values.astype(float)\n",
" industry = total_data.industry.values\n",
" dx_return = total_data.dx.values\n",
" benchmark_w = total_data.weight.values\n",
" \n",
" constraint_exp = total_data[constraint_risk].values\n",
" risk_exp_expand = np.concatenate((constraint_exp, np.ones((len(risk_exp), 1))), axis=1).astype(float)\n",
" risk_names = constraint_risk + ['total']\n",
" risk_target = risk_exp_expand.T @ benchmark_w\n",
" lbound = np.zeros(len(total_data))\n",
" ubound = 0.01 + benchmark_w\n",
"\n",
" constraint = Constraints(risk_exp_expand, risk_names)\n",
" for i, name in enumerate(risk_names):\n",
" constraint.set_constraints(name, lower_bound=risk_target[i], upper_bound=risk_target[i])\n",
" \n",
" er = factor_processing(total_data[factors].values,\n",
" pre_process=[winsorize_normal, standardize],\n",
" post_process=[standardize]) @ factor_weights\n",
" \n",
" pos, _ = er_portfolio_analysis(er,\n",
" industry,\n",
" dx_return,\n",
" constraint,\n",
" False,\n",
" benchmark_w)\n",
" pos['code'] = total_data['code'].astype(int).values\n",
" \n",
" if previous_pos.empty:\n",
" turn_over = 0.\n",
" else:\n",
" pos_merged = pd.merge(pos, previous_pos, on=['code'], how='left')\n",
" pos_merged.fillna(0, inplace=True)\n",
" turn_over = np.abs(pos_merged.weight_x - pos_merged.weight_y).sum()\n",
" \n",
" if turn_over < target_turn_over and not previous_pos.empty:\n",
" turn_over = 0.\n",
" previous_pos = pos_merged[['weight_y', 'code']]\n",
" previous_pos.rename(columns={'weight_y': 'weight'}, inplace=True)\n",
" pos = previous_pos.copy()\n",
" \n",
" turn_overs.append(turn_over)\n",
" \n",
" ret = (pos.weight - benchmark_w) @ dx_return\n",
" rets.append(ret)\n",
" \n",
" previous_pos = pos.copy()\n",
" print('{0} is finished'.format(date))"
]
},
{
"cell_type": "code",
"execution_count": null,
"metadata": {},
"outputs": [],
"source": [
"ret_df = pd.DataFrame({'returns': rets, 'turn_over': turn_overs}, index=dates)\n",
"ret_df.loc[advanceDateByCalendar('china.sse', dates[-1], freq)] = 0.\n",
"ret_df = ret_df.shift(1)\n",
"ret_df.iloc[0] = 0.\n",
"ret_df['tc_cost'] = ret_df.turn_over * 0.002"
]
},
{
"cell_type": "code",
"execution_count": null,
"metadata": {},
"outputs": [],
"source": [
"ret_df[['returns', 'tc_cost']].cumsum().plot(figsize=(12, 6),\n",
" title='Targeted turn over rebalanced: Monitored freq {0}, {1} target'.format(freq,\n",
" target_turn_over),\n",
" secondary_y='tc_cost')"
]
},
{
"cell_type": "code",
"execution_count": null,
"metadata": {
"collapsed": true
},
"outputs": [],
"source": []
}
],
"metadata": {
"kernelspec": {
"display_name": "Python 3",
"language": "python",
"name": "python3"
},
"language_info": {
"codemirror_mode": {
"name": "ipython",
"version": 3
},
"file_extension": ".py",
"mimetype": "text/x-python",
"name": "python",
"nbconvert_exporter": "python",
"pygments_lexer": "ipython3",
"version": "3.6.2"
}
},
"nbformat": 4,
"nbformat_minor": 2
}
Write
Preview
Markdown
is supported
0%
Try again
or
attach a new file
Attach a file
Cancel
You are about to add
0
people
to the discussion. Proceed with caution.
Finish editing this message first!
Cancel
Please
register
or
sign in
to comment