Skip to content
Projects
Groups
Snippets
Help
Loading...
Help
Submit feedback
Sign in
Toggle navigation
A
alpha-mind
Project
Project
Details
Activity
Releases
Cycle Analytics
Repository
Repository
Files
Commits
Branches
Tags
Contributors
Graph
Compare
Charts
Issues
0
Issues
0
List
Board
Labels
Milestones
Merge Requests
0
Merge Requests
0
CI / CD
CI / CD
Pipelines
Jobs
Schedules
Charts
Wiki
Wiki
Snippets
Snippets
Members
Members
Collapse sidebar
Close sidebar
Activity
Graph
Charts
Create a new issue
Jobs
Commits
Issue Boards
Open sidebar
Dr.李
alpha-mind
Commits
0626982d
Commit
0626982d
authored
Nov 20, 2017
by
Dr.李
Browse files
Options
Browse Files
Download
Email Patches
Plain Diff
added index return fetcher
parent
c5ff7386
Changes
2
Show whitespace changes
Inline
Side-by-side
Showing
2 changed files
with
89 additions
and
12 deletions
+89
-12
sqlengine.py
alphamind/data/engines/sqlengine.py
+72
-3
combined_model_training.py
alphamind/examples/combined_model_training.py
+17
-9
No files found.
alphamind/data/engines/sqlengine.py
View file @
0626982d
...
@@ -20,7 +20,6 @@ from alphamind.data.engines.universe import Universe
...
@@ -20,7 +20,6 @@ from alphamind.data.engines.universe import Universe
from
alphamind.data.dbmodel.models
import
FactorMaster
from
alphamind.data.dbmodel.models
import
FactorMaster
from
alphamind.data.dbmodel.models
import
FactorLog
from
alphamind.data.dbmodel.models
import
FactorLog
from
alphamind.data.dbmodel.models
import
Strategy
from
alphamind.data.dbmodel.models
import
Strategy
from
alphamind.data.dbmodel.models
import
DailyReturn
from
alphamind.data.dbmodel.models
import
IndexComponent
from
alphamind.data.dbmodel.models
import
IndexComponent
from
alphamind.data.dbmodel.models
import
Industry
from
alphamind.data.dbmodel.models
import
Industry
from
alphamind.data.dbmodel.models
import
Experimental
from
alphamind.data.dbmodel.models
import
Experimental
...
@@ -31,6 +30,7 @@ from alphamind.data.dbmodel.models import RiskCovLong
...
@@ -31,6 +30,7 @@ from alphamind.data.dbmodel.models import RiskCovLong
from
alphamind.data.dbmodel.models
import
FullFactor
from
alphamind.data.dbmodel.models
import
FullFactor
from
alphamind.data.dbmodel.models
import
Models
from
alphamind.data.dbmodel.models
import
Models
from
alphamind.data.dbmodel.models
import
Market
from
alphamind.data.dbmodel.models
import
Market
from
alphamind.data.dbmodel.models
import
IndexMarket
from
alphamind.data.dbmodel.models
import
Universe
as
UniverseTable
from
alphamind.data.dbmodel.models
import
Universe
as
UniverseTable
from
alphamind.data.transformer
import
Transformer
from
alphamind.data.transformer
import
Transformer
from
alphamind.model.loader
import
load_model
from
alphamind.model.loader
import
load_model
...
@@ -191,7 +191,8 @@ class SqlEngine(object):
...
@@ -191,7 +191,8 @@ class SqlEngine(object):
start_date
=
ref_date
start_date
=
ref_date
if
not
expiry_date
:
if
not
expiry_date
:
end_date
=
advanceDateByCalendar
(
'china.sse'
,
ref_date
,
str
(
1
+
horizon
+
offset
+
DAILY_RETURN_OFFSET
)
+
'b'
)
.
strftime
(
'
%
Y
%
m
%
d'
)
end_date
=
advanceDateByCalendar
(
'china.sse'
,
ref_date
,
str
(
1
+
horizon
+
offset
+
DAILY_RETURN_OFFSET
)
+
'b'
)
.
strftime
(
'
%
Y
%
m
%
d'
)
else
:
else
:
end_date
=
expiry_date
end_date
=
expiry_date
...
@@ -224,7 +225,8 @@ class SqlEngine(object):
...
@@ -224,7 +225,8 @@ class SqlEngine(object):
start_date
=
dates
[
0
]
start_date
=
dates
[
0
]
end_date
=
dates
[
-
1
]
end_date
=
dates
[
-
1
]
end_date
=
advanceDateByCalendar
(
'china.sse'
,
end_date
,
str
(
1
+
horizon
+
offset
+
DAILY_RETURN_OFFSET
)
+
'b'
)
.
strftime
(
'
%
Y-
%
m-
%
d'
)
end_date
=
advanceDateByCalendar
(
'china.sse'
,
end_date
,
str
(
1
+
horizon
+
offset
+
DAILY_RETURN_OFFSET
)
+
'b'
)
.
strftime
(
'
%
Y-
%
m-
%
d'
)
cond
=
universe
.
query_range
(
start_date
,
end_date
)
cond
=
universe
.
query_range
(
start_date
,
end_date
)
big_table
=
join
(
Market
,
UniverseTable
,
big_table
=
join
(
Market
,
UniverseTable
,
...
@@ -248,6 +250,73 @@ class SqlEngine(object):
...
@@ -248,6 +250,73 @@ class SqlEngine(object):
df
[
'dx'
]
=
np
.
exp
(
df
[
'dx'
])
-
1.
df
[
'dx'
]
=
np
.
exp
(
df
[
'dx'
])
-
1.
return
df
return
df
def
fetch_dx_return_index
(
self
,
ref_date
:
str
,
index_code
:
int
,
expiry_date
:
str
=
None
,
horizon
:
int
=
0
,
offset
:
int
=
0
)
->
pd
.
DataFrame
:
start_date
=
ref_date
if
not
expiry_date
:
end_date
=
advanceDateByCalendar
(
'china.sse'
,
ref_date
,
str
(
1
+
horizon
+
offset
+
DAILY_RETURN_OFFSET
)
+
'b'
)
.
strftime
(
'
%
Y
%
m
%
d'
)
else
:
end_date
=
expiry_date
stats
=
func
.
sum
(
self
.
ln_func
(
1.
+
IndexMarket
.
chgPct
))
.
over
(
partition_by
=
IndexMarket
.
indexCode
,
order_by
=
IndexMarket
.
trade_date
,
rows
=
(
1
+
DAILY_RETURN_OFFSET
+
offset
,
1
+
horizon
+
DAILY_RETURN_OFFSET
+
offset
))
.
label
(
'dx'
)
query
=
select
([
IndexMarket
.
trade_date
,
IndexMarket
.
indexCode
.
label
(
'code'
),
stats
])
.
where
(
and_
(
IndexMarket
.
trade_date
.
between
(
start_date
,
end_date
),
IndexMarket
.
indexCode
==
index_code
)
)
df
=
pd
.
read_sql
(
query
,
self
.
session
.
bind
)
.
dropna
()
df
=
df
[
df
.
trade_date
==
ref_date
]
df
[
'dx'
]
=
np
.
exp
(
df
[
'dx'
])
-
1.
return
df
[[
'code'
,
'dx'
]]
def
fetch_dx_return_index_range
(
self
,
index_code
,
start_date
:
str
=
None
,
end_date
:
str
=
None
,
dates
:
Iterable
[
str
]
=
None
,
horizon
:
int
=
0
,
offset
:
int
=
0
)
->
pd
.
DataFrame
:
if
dates
:
start_date
=
dates
[
0
]
end_date
=
dates
[
-
1
]
end_date
=
advanceDateByCalendar
(
'china.sse'
,
end_date
,
str
(
1
+
horizon
+
offset
+
DAILY_RETURN_OFFSET
)
+
'b'
)
.
strftime
(
'
%
Y-
%
m-
%
d'
)
stats
=
func
.
sum
(
self
.
ln_func
(
1.
+
IndexMarket
.
chgPct
))
.
over
(
partition_by
=
IndexMarket
.
indexCode
,
order_by
=
IndexMarket
.
trade_date
,
rows
=
(
1
+
offset
+
DAILY_RETURN_OFFSET
,
1
+
horizon
+
offset
+
DAILY_RETURN_OFFSET
))
.
label
(
'dx'
)
query
=
select
([
IndexMarket
.
trade_date
,
IndexMarket
.
indexCode
.
label
(
'code'
),
stats
])
\
.
where
(
and_
(
IndexMarket
.
trade_date
.
between
(
start_date
,
end_date
),
IndexMarket
.
indexCode
==
index_code
)
)
df
=
pd
.
read_sql
(
query
,
self
.
session
.
bind
)
.
dropna
()
if
dates
:
df
=
df
[
df
.
trade_date
.
isin
(
dates
)]
df
[
'dx'
]
=
np
.
exp
(
df
[
'dx'
])
-
1.
return
df
def
fetch_factor
(
self
,
def
fetch_factor
(
self
,
ref_date
:
str
,
ref_date
:
str
,
factors
:
Iterable
[
object
],
factors
:
Iterable
[
object
],
...
...
alphamind/examples/combined_model_training.py
View file @
0626982d
...
@@ -21,11 +21,11 @@ Back test parameter settings
...
@@ -21,11 +21,11 @@ Back test parameter settings
"""
"""
start_date
=
'2012-01-01'
start_date
=
'2012-01-01'
end_date
=
'201
7
-11-15'
end_date
=
'201
2
-11-15'
benchmark_code
=
300
benchmark_code
=
300
universe_name
=
[
'zz500'
,
'hs300'
]
universe_name
=
[
'zz500'
,
'hs300'
]
universe
=
Universe
(
universe_name
,
universe_name
)
universe
=
Universe
(
universe_name
,
universe_name
)
frequency
=
'
2
b'
frequency
=
'
5
b'
batch
=
8
batch
=
8
method
=
'risk_neutral'
method
=
'risk_neutral'
use_rank
=
100
use_rank
=
100
...
@@ -35,12 +35,12 @@ neutralize_risk = ['SIZE'] + industry_styles
...
@@ -35,12 +35,12 @@ neutralize_risk = ['SIZE'] + industry_styles
constraint_risk
=
[
'SIZE'
]
+
industry_styles
constraint_risk
=
[
'SIZE'
]
+
industry_styles
size_risk_lower
=
0
size_risk_lower
=
0
size_risk_upper
=
0
size_risk_upper
=
0
turn_over_target_base
=
0.
1
turn_over_target_base
=
0.
25
weight_gaps
=
[
0.01
,
0.02
,
0.03
,
0.04
]
weight_gaps
=
[
0.01
,
0.02
,
0.03
,
0.04
]
benchmark_total_lower
=
0.8
benchmark_total_lower
=
0.8
benchmark_total_upper
=
1.
benchmark_total_upper
=
1.
horizon
=
map_freq
(
frequency
)
horizon
=
map_freq
(
frequency
)
hedging_ratio
=
1
.
hedging_ratio
=
0
.
executor
=
NaiveExecutor
()
executor
=
NaiveExecutor
()
...
@@ -101,7 +101,7 @@ for ref_date in ref_dates:
...
@@ -101,7 +101,7 @@ for ref_date in ref_dates:
alpha_logger
.
info
(
'trade_date: {0} training finished'
.
format
(
ref_date
))
alpha_logger
.
info
(
'trade_date: {0} training finished'
.
format
(
ref_date
))
frequency
=
'
2
b'
frequency
=
'
5
b'
ref_dates
=
makeSchedule
(
start_date
,
end_date
,
frequency
,
'china.sse'
)
ref_dates
=
makeSchedule
(
start_date
,
end_date
,
frequency
,
'china.sse'
)
const_model_factor_data
=
engine
.
fetch_data_range
(
universe
,
const_model_factor_data
=
engine
.
fetch_data_range
(
universe
,
...
@@ -144,8 +144,8 @@ for weight_gap in weight_gaps:
...
@@ -144,8 +144,8 @@ for weight_gap in weight_gaps:
risk_names
=
constraint_risk
+
[
'total'
]
risk_names
=
constraint_risk
+
[
'total'
]
risk_target
=
risk_exp_expand
.
T
@
benchmark_w
risk_target
=
risk_exp_expand
.
T
@
benchmark_w
lbound
=
np
.
maximum
(
0.
,
hedging_ratio
*
benchmark_w
-
weight_gap
)
# np.zeros(len(total_data))
lbound
=
np
.
maximum
(
0.
,
benchmark_w
-
weight_gap
)
# np.zeros(len(total_data))
ubound
=
weight_gap
+
hedging_ratio
*
benchmark_w
ubound
=
weight_gap
+
benchmark_w
is_in_benchmark
=
(
benchmark_w
>
0.
)
.
astype
(
float
)
is_in_benchmark
=
(
benchmark_w
>
0.
)
.
astype
(
float
)
...
@@ -256,7 +256,7 @@ for weight_gap in weight_gaps:
...
@@ -256,7 +256,7 @@ for weight_gap in weight_gaps:
leverage
=
result
.
weight_x
.
abs
()
.
sum
()
leverage
=
result
.
weight_x
.
abs
()
.
sum
()
ret
=
(
result
.
weight_x
-
hedging_ratio
*
result
.
weight_y
*
leverage
/
result
.
weight_y
.
sum
())
.
values
@
result
.
dx
.
values
ret
=
(
result
.
weight_x
-
hedging_ratio
*
result
.
weight_y
*
leverage
/
result
.
weight_y
.
sum
())
.
values
@
result
.
dx
.
values
rets
.
append
(
ret
)
rets
.
append
(
np
.
log
(
1.
+
ret
)
)
executor
.
set_current
(
executed_pos
)
executor
.
set_current
(
executed_pos
)
turn_overs
.
append
(
turn_over
)
turn_overs
.
append
(
turn_over
)
leverags
.
append
(
leverage
)
leverags
.
append
(
leverage
)
...
@@ -265,10 +265,17 @@ for weight_gap in weight_gaps:
...
@@ -265,10 +265,17 @@ for weight_gap in weight_gaps:
alpha_logger
.
info
(
'{0} is finished'
.
format
(
date
))
alpha_logger
.
info
(
'{0} is finished'
.
format
(
date
))
ret_df
=
pd
.
DataFrame
({
'returns'
:
rets
,
'turn_over'
:
turn_overs
,
'leverage'
:
leverage
},
index
=
index_dates
)
ret_df
=
pd
.
DataFrame
({
'returns'
:
rets
,
'turn_over'
:
turn_overs
,
'leverage'
:
leverage
},
index
=
index_dates
)
# index return
index_return
=
engine
.
fetch_dx_return_index_range
(
benchmark_code
,
start_date
,
end_date
,
horizon
=
horizon
,
offset
=
1
)
.
set_index
(
'trade_date'
)
ret_df
[
'index'
]
=
np
.
log
(
index_return
[
'dx'
]
+
1.
)
ret_df
.
loc
[
advanceDateByCalendar
(
'china.sse'
,
ref_dates
[
-
1
],
frequency
)]
=
0.
ret_df
.
loc
[
advanceDateByCalendar
(
'china.sse'
,
ref_dates
[
-
1
],
frequency
)]
=
0.
ret_df
=
ret_df
.
shift
(
1
)
ret_df
=
ret_df
.
shift
(
1
)
ret_df
.
iloc
[
0
]
=
0.
ret_df
.
iloc
[
0
]
=
0.
ret_df
[
'tc_cost'
]
=
ret_df
.
turn_over
*
0.002
ret_df
[
'tc_cost'
]
=
ret_df
.
turn_over
*
0.002
ret_df
[
'returns'
]
=
ret_df
[
'leverage'
]
*
(
ret_df
[
'returns'
]
-
ret_df
[
'index'
])
ret_df
[[
'returns'
,
'tc_cost'
]]
.
cumsum
()
.
plot
(
figsize
=
(
12
,
6
),
ret_df
[[
'returns'
,
'tc_cost'
]]
.
cumsum
()
.
plot
(
figsize
=
(
12
,
6
),
title
=
'Fixed frequency rebalanced: {0}'
.
format
(
frequency
),
title
=
'Fixed frequency rebalanced: {0}'
.
format
(
frequency
),
...
@@ -280,7 +287,7 @@ for weight_gap in weight_gaps:
...
@@ -280,7 +287,7 @@ for weight_gap in weight_gaps:
drawdown_calc
=
MovingMaxDrawdown
(
49
)
drawdown_calc
=
MovingMaxDrawdown
(
49
)
max_drawdown_calc
=
MovingMaxDrawdown
(
len
(
ret_df
))
max_drawdown_calc
=
MovingMaxDrawdown
(
len
(
ret_df
))
res_df
=
pd
.
DataFrame
(
columns
=
[
'daily_return'
,
'cum_ret'
,
'sharp'
,
'drawdown'
,
'max_drawn'
])
res_df
=
pd
.
DataFrame
(
columns
=
[
'daily_return'
,
'cum_ret'
,
'sharp'
,
'drawdown'
,
'max_drawn'
,
'leverage'
])
total_returns
=
0.
total_returns
=
0.
...
@@ -295,6 +302,7 @@ for weight_gap in weight_gaps:
...
@@ -295,6 +302,7 @@ for weight_gap in weight_gaps:
res_df
.
loc
[
date
,
'cum_ret'
]
=
total_returns
res_df
.
loc
[
date
,
'cum_ret'
]
=
total_returns
res_df
.
loc
[
date
,
'drawdown'
]
=
drawdown_calc
.
result
()[
0
]
res_df
.
loc
[
date
,
'drawdown'
]
=
drawdown_calc
.
result
()[
0
]
res_df
.
loc
[
date
,
'max_drawn'
]
=
max_drawdown_calc
.
result
()[
0
]
res_df
.
loc
[
date
,
'max_drawn'
]
=
max_drawdown_calc
.
result
()[
0
]
res_df
.
loc
[
date
,
'leverage'
]
=
ret_df
.
loc
[
date
,
'leverage'
]
if
i
<
10
:
if
i
<
10
:
res_df
.
loc
[
date
,
'sharp'
]
=
0.
res_df
.
loc
[
date
,
'sharp'
]
=
0.
...
...
Write
Preview
Markdown
is supported
0%
Try again
or
attach a new file
Attach a file
Cancel
You are about to add
0
people
to the discussion. Proceed with caution.
Finish editing this message first!
Cancel
Please
register
or
sign in
to comment