Unverified Commit 02e028fd authored by iLampard's avatar iLampard Committed by GitHub

Merge pull request #4 from alpha-miner/master

merge update
parents f1709d16 cd3087a0
# -*- coding: utf-8 -*-
"""
Created on 2017-5-17
@author: cheng.li
"""
......@@ -76,10 +76,7 @@ def cross_section_analysis(ref_date,
if __name__ == '__main__':
import numpy as np
import pandas as pd
import statsmodels.api as sm
from alphamind.api import *
from alphamind.api import SqlEngine, Universe, risk_styles, industry_styles
factor_name = 'SIZE'
data_source = 'postgres+psycopg2://postgres:A12345678!@10.63.6.220/alpha'
......
......@@ -14,7 +14,7 @@ from alphamind.data.standardize import standardize
from alphamind.data.winsorize import winsorize_normal
from alphamind.portfolio.constraints import Constraints
from alphamind.portfolio.constraints import LinearConstraints
from alphamind.portfolio.longshortbulder import long_short_build
from alphamind.portfolio.longshortbulder import long_short_builder
from alphamind.portfolio.rankbuilder import rank_build
from alphamind.portfolio.linearbuilder import linear_builder
from alphamind.portfolio.meanvariancebuilder import mean_variance_builder
......@@ -109,7 +109,7 @@ def er_portfolio_analysis(er: np.ndarray,
weights = rank_build(er, use_rank=kwargs['use_rank'], masks=is_tradable).flatten() * benchmark.sum() / kwargs[
'use_rank']
elif method == 'ls' or method == 'long_short':
weights = long_short_build(er).flatten()
weights = long_short_builder(er).flatten()
elif method == 'mv' or method == 'mean_variance':
lbound, ubound, cons_exp, risk_lbound, risk_ubound = create_constraints(benchmark, **kwargs)
cov = kwargs['cov']
......
# -*- coding: utf-8 -*-
"""
Created on 2018-1-15
@author: cheng.li
"""
import numpy as np
from alphamind.data.standardize import standardize
def factor_turn_over(factor_values: np.ndarray,
trade_dates: np.ndarray,
codes: np.ndarray,
use_standize: bool=True):
if use_standize:
factor_values = standardize(factor_values, trade_dates)
if __name__ == '__main__':
from alphamind.api import *
engine = SqlEngine()
factor = 'ep_q'
freq = '5b'
start_date = '2017-06-01'
end_date = '2017-08-01'
universe = Universe('custom', ['zz500'])
......@@ -11,15 +11,15 @@ from alphamind.utilities import simple_abssum
from alphamind.utilities import transform
def long_short_build(er: np.ndarray,
leverage: float=1.,
groups: np.ndarray=None,
masks: np.ndarray=None) -> np.ndarray:
def long_short_builder(er: np.ndarray,
leverage: float = 1.,
groups: np.ndarray = None,
masks: np.ndarray = None) -> np.ndarray:
er = er.copy()
if masks is not None:
er[~masks] = 0.
er[masks] = 0.
er[~masks] = er[~masks] - er[~masks].mean()
if er.ndim == 1:
er = er.reshape((-1, 1))
......
......@@ -8,7 +8,7 @@ Created on 2017-5-9
import unittest
import numpy as np
import pandas as pd
from alphamind.portfolio.longshortbulder import long_short_build
from alphamind.portfolio.longshortbulder import long_short_builder
class TestLongShortBuild(unittest.TestCase):
......@@ -17,37 +17,38 @@ class TestLongShortBuild(unittest.TestCase):
self.x = np.random.randn(3000, 10)
self.groups = np.random.randint(10, 40, size=3000)
choices = np.random.choice(3000, 100, replace=False)
self.masks = np.full(3000, True, dtype=bool)
self.masks[choices] = False
self.masks = np.full(3000, False, dtype=bool)
self.masks[choices] = True
def test_long_short_build(self):
x = self.x[:, 0].flatten()
calc_weights = long_short_build(x).flatten()
calc_weights = long_short_builder(x).flatten()
expected_weights = x / np.abs(x).sum()
np.testing.assert_array_almost_equal(calc_weights, expected_weights)
calc_weights = long_short_build(self.x, leverage=2)
calc_weights = long_short_builder(self.x, leverage=2)
expected_weights = self.x / np.abs(self.x).sum(axis=0) * 2
np.testing.assert_array_almost_equal(calc_weights, expected_weights)
def test_long_short_build_with_group(self):
x = self.x[:, 0].flatten()
calc_weights = long_short_build(x, groups=self.groups).flatten()
calc_weights = long_short_builder(x, groups=self.groups).flatten()
expected_weights = pd.Series(x).groupby(self.groups).apply(lambda s: s / np.abs(s).sum())
np.testing.assert_array_almost_equal(calc_weights, expected_weights)
calc_weights = long_short_build(self.x, groups=self.groups)
calc_weights = long_short_builder(self.x, groups=self.groups)
expected_weights = pd.DataFrame(self.x).groupby(self.groups).apply(lambda s: s / np.abs(s).sum(axis=0))
np.testing.assert_array_almost_equal(calc_weights, expected_weights)
def test_long_short_build_with_masks(self):
x = self.x[:, 0].flatten()
calc_weights = long_short_builder(x, masks=self.masks, leverage=1.).flatten()
self.assertAlmostEqual(calc_weights.sum(), 0.)
masked_x = x.copy()
masked_x[~self.masks] = 0.
leverage = np.abs(masked_x).sum()
calc_weights = long_short_build(x, masks=self.masks, leverage=leverage).flatten()
expected_weights = x.copy()
expected_weights[~self.masks] = 0.
masked_x[self.masks] = 0.
masked_x[~self.masks] = masked_x[~self.masks] - masked_x[~self.masks].mean()
expected_weights = masked_x / np.abs(masked_x).sum()
np.testing.assert_array_almost_equal(calc_weights, expected_weights)
......
......@@ -42,7 +42,7 @@
"all_styles = risk_styles + industry_styles + macro_styles\n",
"\n",
"risk_exposure_values = total_data[all_styles].values.astype(float)\n",
"special_risk_values = total_data['s_srisk'].values.astype(float)\n",
"special_risk_values = total_data['srisk'].values.astype(float)\n",
"risk_cov_values = risk_cov[all_styles].values\n",
"\n",
"sec_cov_values_full = risk_exposure_values @ risk_cov_values @ risk_exposure_values.T / 10000 + np.diag(special_risk_values ** 2) / 10000\n",
......@@ -293,6 +293,20 @@
" print(\"{0:<8}{1:>12.2f}{2:>12.2f}{3:>12.2f}\".format(n, cvxpy_times[i], cvxopt_times[i], ipopt_times[i]))"
]
},
{
"cell_type": "code",
"execution_count": null,
"metadata": {},
"outputs": [],
"source": []
},
{
"cell_type": "code",
"execution_count": null,
"metadata": {},
"outputs": [],
"source": []
},
{
"cell_type": "code",
"execution_count": null,
......
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment