Skip to content
Projects
Groups
Snippets
Help
Loading...
Help
Submit feedback
Sign in
Toggle navigation
A
alpha-mind
Project
Project
Details
Activity
Releases
Cycle Analytics
Repository
Repository
Files
Commits
Branches
Tags
Contributors
Graph
Compare
Charts
Issues
0
Issues
0
List
Board
Labels
Milestones
Merge Requests
0
Merge Requests
0
CI / CD
CI / CD
Pipelines
Jobs
Schedules
Charts
Wiki
Wiki
Snippets
Snippets
Members
Members
Collapse sidebar
Close sidebar
Activity
Graph
Charts
Create a new issue
Jobs
Commits
Issue Boards
Open sidebar
Dr.李
alpha-mind
Commits
877d8434
Commit
877d8434
authored
Oct 20, 2017
by
Dr.李
Browse files
Options
Browse Files
Download
Email Patches
Plain Diff
added more doc
parent
9ba3fc03
Changes
2
Show whitespace changes
Inline
Side-by-side
Showing
2 changed files
with
56 additions
and
94 deletions
+56
-94
2017-10-20_备选策略以及生产策略比较.xlsx
doc/2017-10-20_备选策略以及生产策略比较.xlsx
+0
-0
2017-10-18_new_strategy.ipynb
notebooks/2017-10-18_new_strategy.ipynb
+56
-94
No files found.
doc/2017-10-20_备选策略以及生产策略比较.xlsx
0 → 100644
View file @
877d8434
File added
notebooks/2017-10-18_new_strategy.ipynb
View file @
877d8434
...
@@ -29,18 +29,20 @@
...
@@ -29,18 +29,20 @@
{
{
"cell_type": "code",
"cell_type": "code",
"execution_count": null,
"execution_count": null,
"metadata": {},
"metadata": {
"collapsed": true
},
"outputs": [],
"outputs": [],
"source": [
"source": [
"engine = SqlEngine()\n",
"engine = SqlEngine(
'postgres+psycopg2://postgres:A12345678!@10.63.6.220/alpha'
)\n",
"universe = Universe('custom', ['zz500'])\n",
"universe = Universe('custom', ['zz500'])\n",
"benchmark_code = 905\n",
"benchmark_code = 905\n",
"neutralize_risk = ['SIZE'] + industry_styles\n",
"neutralize_risk = ['SIZE'] + industry_styles\n",
"constraint_risk = ['SIZE'] + industry_styles\n",
"constraint_risk = ['SIZE'] + industry_styles\n",
"start_date = '2015-01-01'\n",
"start_date = '2015-01-01'\n",
"end_date = '2017-10-1
6
'\n",
"end_date = '2017-10-1
7
'\n",
"industry_lower = 0.5\n",
"industry_lower = 0.
7
5\n",
"industry_upper = 1.5\n",
"industry_upper = 1.
2
5\n",
"\n",
"\n",
"freq = '1d'\n",
"freq = '1d'\n",
"horizon = map_freq(freq)\n",
"horizon = map_freq(freq)\n",
...
@@ -106,6 +108,7 @@
...
@@ -106,6 +108,7 @@
"metadata": {},
"metadata": {},
"outputs": [],
"outputs": [],
"source": [
"source": [
"%%time\n",
"all_data = engine.fetch_data_range(universe, factors, dates=dates, benchmark=905)\n",
"all_data = engine.fetch_data_range(universe, factors, dates=dates, benchmark=905)\n",
"factor_all_data = all_data['factor']\n",
"factor_all_data = all_data['factor']\n",
"factor_groups = factor_all_data.groupby('trade_date')"
"factor_groups = factor_all_data.groupby('trade_date')"
...
@@ -114,7 +117,9 @@
...
@@ -114,7 +117,9 @@
{
{
"cell_type": "code",
"cell_type": "code",
"execution_count": null,
"execution_count": null,
"metadata": {},
"metadata": {
"collapsed": true
},
"outputs": [],
"outputs": [],
"source": [
"source": [
"rets = []\n",
"rets = []\n",
...
@@ -151,10 +156,12 @@
...
@@ -151,10 +156,12 @@
" \n",
" \n",
" er1 = factor_processing(total_data[factor_group1].values,\n",
" er1 = factor_processing(total_data[factor_group1].values,\n",
" pre_process=[winsorize_normal, standardize],\n",
" pre_process=[winsorize_normal, standardize],\n",
" risk_factors=risk_exp,\n",
" post_process=[winsorize_normal, standardize]) @ factor_weight1\n",
" post_process=[winsorize_normal, standardize]) @ factor_weight1\n",
" \n",
" \n",
" er2 = factor_processing(total_data[factor_group2].values,\n",
" er2 = factor_processing(total_data[factor_group2].values,\n",
" pre_process=[winsorize_normal, standardize],\n",
" pre_process=[winsorize_normal, standardize],\n",
" risk_factors=risk_exp,\n",
" post_process=[winsorize_normal, standardize]) @ factor_weight2\n",
" post_process=[winsorize_normal, standardize]) @ factor_weight2\n",
" \n",
" \n",
" er = (er1 / er1.std() + er2 / er2.std())\n",
" er = (er1 / er1.std() + er2 / er2.std())\n",
...
@@ -277,10 +284,12 @@
...
@@ -277,10 +284,12 @@
" \n",
" \n",
" er1 = factor_processing(total_data[factor_group1].values,\n",
" er1 = factor_processing(total_data[factor_group1].values,\n",
" pre_process=[winsorize_normal, standardize],\n",
" pre_process=[winsorize_normal, standardize],\n",
" risk_factors=risk_exp,\n",
" post_process=[winsorize_normal, standardize]) @ factor_weight1\n",
" post_process=[winsorize_normal, standardize]) @ factor_weight1\n",
" \n",
" \n",
" er2 = factor_processing(total_data[factor_group2].values,\n",
" er2 = factor_processing(total_data[factor_group2].values,\n",
" pre_process=[winsorize_normal, standardize],\n",
" pre_process=[winsorize_normal, standardize],\n",
" risk_factors=risk_exp,\n",
" post_process=[winsorize_normal, standardize]) @ factor_weight2\n",
" post_process=[winsorize_normal, standardize]) @ factor_weight2\n",
" \n",
" \n",
" er = (er1 / er1.std() + er2 / er2.std())\n",
" er = (er1 / er1.std() + er2 / er2.std())\n",
...
@@ -415,30 +424,20 @@
...
@@ -415,30 +424,20 @@
{
{
"cell_type": "code",
"cell_type": "code",
"execution_count": null,
"execution_count": null,
"metadata": {
"metadata": {},
"collapsed": true
},
"outputs": [],
"outputs": [],
"source": [
"source": [
"factor_group1 = ['RVOL', 'EPS', 'DROEAfterNonRecurring', 'DivP', 'CFinc1', 'BDTO']\n",
"factor_group1 = ['RVOL', 'EPS', 'DROEAfterNonRecurring', 'DivP', 'CFinc1', 'BDTO']\n",
"factor_weight1 = [0.05, 0.3, 0.35, 0.075, 0.15, 0.05]\n",
"factor_weight1 = [0.05, 0.3, 0.35, 0.075, 0.15, 0.05]\n",
"group_weight1 = 1.\n",
"group_weight1 = 1.\n",
"\n",
"\n",
"factor_group2 = ['VAL', '
RVOL
', 'ROEDiluted', 'GREV', 'EPS', 'CHV', 'CFinc1', 'BDTO']\n",
"factor_group2 = ['VAL', '
IVR
', 'ROEDiluted', 'GREV', 'EPS', 'CHV', 'CFinc1', 'BDTO']\n",
"factor_weight2 = [0.034129344, 0.0
15881607
, 0.048765746, 0.042747382, -0.015900173, 0.019044573, -0.001792638, 0.014277867]\n",
"factor_weight2 = [0.034129344, 0.0
2
, 0.048765746, 0.042747382, -0.015900173, 0.019044573, -0.001792638, 0.014277867]\n",
"group_weight2 = 1.\n",
"group_weight2 = 1.\n",
"\n",
"\n",
"factor_group3 = ['con_pe_rolling', 'con_pb_rolling', 'con_eps', 'con_target_price']\n",
"factor_group3 = ['cs_pe', 'cs_pb', 'cs_eps']\n",
"factor_weight3 = [-0.6, -0.6, 0.6, 1.2]\n",
"factor_weight3 = [-0.6, -0.6, 0.6]\n",
"group_weight3 = 0.5\n",
"group_weight3 = 0.25"
"\n",
"factor_group4 = ['IVR']\n",
"factor_weight4 = [1.]\n",
"group_weight4 = 0.2\n",
"\n",
"factor_group5 = ['BBIC']\n",
"factor_weight5 = [1.]\n",
"group_weight5 = 0."
]
]
},
},
{
{
...
@@ -449,7 +448,29 @@
...
@@ -449,7 +448,29 @@
},
},
"outputs": [],
"outputs": [],
"source": [
"source": [
"factors = list(set(factor_group1 + factor_group2 + factor_group3 + factor_group4 + factor_group5))\n",
"factors = {'RVOL': LAST('RVOL'),\n",
" 'EPS': LAST('EPS'),\n",
" 'DROEAfterNonRecurring': LAST('DROEAfterNonRecurring'),\n",
" 'DivP': LAST('DivP'),\n",
" 'CFinc1': LAST('CFinc1'),\n",
" 'BDTO': LAST('BDTO'),\n",
" 'VAL': LAST('VAL'),\n",
" 'ROEDiluted': LAST('ROEDiluted'),\n",
" 'GREV': LAST('GREV'),\n",
" 'CHV': LAST('CHV'),\n",
" 'cs_pe': LAST('con_pe_rolling'),\n",
" 'cs_pb': LAST('con_pb_rolling'),\n",
" 'cs_eps': LAST('con_eps_rolling'),\n",
" 'IVR': LAST('IVR')}"
]
},
{
"cell_type": "code",
"execution_count": null,
"metadata": {},
"outputs": [],
"source": [
"%%time\n",
"all_data = engine.fetch_data_range(universe, factors, dates=dates, benchmark=905)\n",
"all_data = engine.fetch_data_range(universe, factors, dates=dates, benchmark=905)\n",
"factor_all_data = all_data['factor']\n",
"factor_all_data = all_data['factor']\n",
"factor_groups = factor_all_data.groupby('trade_date')"
"factor_groups = factor_all_data.groupby('trade_date')"
...
@@ -495,29 +516,22 @@
...
@@ -495,29 +516,22 @@
" \n",
" \n",
" er1 = factor_processing(total_data[factor_group1].values,\n",
" er1 = factor_processing(total_data[factor_group1].values,\n",
" pre_process=[winsorize_normal, standardize],\n",
" pre_process=[winsorize_normal, standardize],\n",
" risk_factors=risk_exp,\n",
" post_process=[winsorize_normal, standardize]) @ factor_weight1\n",
" post_process=[winsorize_normal, standardize]) @ factor_weight1\n",
" \n",
" \n",
" er2 = factor_processing(total_data[factor_group2].values,\n",
" er2 = factor_processing(total_data[factor_group2].values,\n",
" pre_process=[winsorize_normal, standardize],\n",
" pre_process=[winsorize_normal, standardize],\n",
" risk_factors=risk_exp,\n",
" post_process=[winsorize_normal, standardize]) @ factor_weight2\n",
" post_process=[winsorize_normal, standardize]) @ factor_weight2\n",
" \n",
" \n",
" er3 = factor_processing(total_data[factor_group3].values,\n",
" er3 = factor_processing((total_data[factor_group3].values @ factor_weight3).reshape((-1,1)),\n",
" pre_process=[winsorize_normal, standardize],\n",
" post_process=[winsorize_normal, standardize]) @ factor_weight3\n",
" \n",
" er4 = factor_processing(total_data[factor_group4].values,\n",
" pre_process=[winsorize_normal, standardize],\n",
" post_process=[winsorize_normal, standardize]) @ factor_weight4\n",
" \n",
" er5 = factor_processing(total_data[factor_group4].values,\n",
" pre_process=[winsorize_normal, standardize],\n",
" pre_process=[winsorize_normal, standardize],\n",
" post_process=[winsorize_normal, standardize]) @ factor_weight5\n",
" risk_factors=risk_exp,\n",
" post_process=[winsorize_normal, standardize]).flatten()\n",
" \n",
" \n",
" er = (group_weight1* er1 / er1.std() + \n",
" er = (group_weight1* er1 / er1.std() + \n",
" group_weight2 * er2 / er2.std() + \n",
" group_weight2 * er2 / er2.std() + \n",
" group_weight3 * er3 / er3.std() + \n",
" group_weight3 * er3 / er3.std())\n",
" group_weight4 * er4 / er4.std() +\n",
" group_weight5 * er5 / er5.std())\n",
" \n",
" \n",
" target_pos, _ = er_portfolio_analysis(er,\n",
" target_pos, _ = er_portfolio_analysis(er,\n",
" industry,\n",
" industry,\n",
...
@@ -639,29 +653,22 @@
...
@@ -639,29 +653,22 @@
" \n",
" \n",
" er1 = factor_processing(total_data[factor_group1].values,\n",
" er1 = factor_processing(total_data[factor_group1].values,\n",
" pre_process=[winsorize_normal, standardize],\n",
" pre_process=[winsorize_normal, standardize],\n",
" risk_factors=risk_exp,\n",
" post_process=[winsorize_normal, standardize]) @ factor_weight1\n",
" post_process=[winsorize_normal, standardize]) @ factor_weight1\n",
" \n",
" \n",
" er2 = factor_processing(total_data[factor_group2].values,\n",
" er2 = factor_processing(total_data[factor_group2].values,\n",
" pre_process=[winsorize_normal, standardize],\n",
" pre_process=[winsorize_normal, standardize],\n",
" risk_factors=risk_exp,\n",
" post_process=[winsorize_normal, standardize]) @ factor_weight2\n",
" post_process=[winsorize_normal, standardize]) @ factor_weight2\n",
" \n",
" \n",
" er3 = factor_processing(total_data[factor_group3].values,\n",
" er3 = factor_processing((total_data[factor_group3].values @ factor_weight3).reshape((-1,1)),\n",
" pre_process=[winsorize_normal, standardize],\n",
" post_process=[winsorize_normal, standardize]) @ factor_weight3\n",
" \n",
" er4 = factor_processing(total_data[factor_group4].values,\n",
" pre_process=[winsorize_normal, standardize],\n",
" pre_process=[winsorize_normal, standardize],\n",
" post_process=[winsorize_normal, standardize]) @ factor_weight4\n",
" risk_factors=risk_exp,\n",
" \n",
" post_process=[winsorize_normal, standardize]).flatten()\n",
" er5 = factor_processing(total_data[factor_group4].values,\n",
" pre_process=[winsorize_normal, standardize],\n",
" post_process=[winsorize_normal, standardize]) @ factor_weight5\n",
" \n",
" \n",
" er = (group_weight1* er1 / er1.std() + \n",
" er = (group_weight1* er1 / er1.std() + \n",
" group_weight2 * er2 / er2.std() + \n",
" group_weight2 * er2 / er2.std() + \n",
" group_weight3 * er3 / er3.std() + \n",
" group_weight3 * er3 / er3.std())\n",
" group_weight4 * er4 / er4.std() +\n",
" group_weight5 * er5 / er5.std())\n",
" \n",
" \n",
" codes = total_data['code'].values\n",
" codes = total_data['code'].values\n",
" \n",
" \n",
...
@@ -809,51 +816,6 @@
...
@@ -809,51 +816,6 @@
"outputs": [],
"outputs": [],
"source": []
"source": []
},
},
{
"cell_type": "code",
"execution_count": null,
"metadata": {
"collapsed": true
},
"outputs": [],
"source": []
},
{
"cell_type": "code",
"execution_count": null,
"metadata": {
"collapsed": true
},
"outputs": [],
"source": []
},
{
"cell_type": "code",
"execution_count": null,
"metadata": {
"collapsed": true
},
"outputs": [],
"source": []
},
{
"cell_type": "code",
"execution_count": null,
"metadata": {
"collapsed": true
},
"outputs": [],
"source": []
},
{
"cell_type": "code",
"execution_count": null,
"metadata": {
"collapsed": true
},
"outputs": [],
"source": []
},
{
{
"cell_type": "code",
"cell_type": "code",
"execution_count": null,
"execution_count": null,
...
...
Write
Preview
Markdown
is supported
0%
Try again
or
attach a new file
Attach a file
Cancel
You are about to add
0
people
to the discussion. Proceed with caution.
Finish editing this message first!
Cancel
Please
register
or
sign in
to comment