Commit d50e2048 authored by 李煜's avatar 李煜

code update

parent 74ec7081
......@@ -13,8 +13,8 @@
- factor_earning # 收益质量
- factor_per_share_indicators # 收益质量
### client
程序执行入口
### /client
程序执行入口, 文件目录下包含单类因子计算, 以及合并计算
### sumbit
分布式计算,提交入口
......@@ -22,7 +22,8 @@
### init
初始化分布式计算redis数据库
### q5_cluster_work
分布式引擎启动
### cluster_work
分布式引擎节点启动程序
#! /usr/bin/env python
# -*- coding: utf-8 -*-
"""
@version: ??
@author: li
@file: __init__.py.py
@time: 2019-07-16 20:00
"""
\ No newline at end of file
This diff is collapsed.
#! /usr/bin/env python
# -*- coding: utf-8 -*-
"""
@version: ??
@author: li
@file: cash_flow.py
@time: 2019-07-16 17:31
"""
import sys
sys.path.append('../')
sys.path.append('../../')
sys.path.append('../../../')
import time
import collections
import argparse
from datetime import datetime, timedelta
from factor import factor_cash_flow
from factor.ttm_fundamental import *
from vision.file_unit.balance import Balance
from vision.file_unit.cash_flow import CashFlow
from vision.file_unit.income import Income
from vision.file_unit.valuation import Valuation
from factor.utillities.trade_date import TradeDate
from ultron.cluster.invoke.cache_data import cache_data
def get_trade_date(trade_date, n):
"""
获取当前时间前n年的时间点,且为交易日,如果非交易日,则往前提取最近的一天。
:param trade_date: 当前交易日
:param n:
:return:
"""
_trade_date = TradeDate()
trade_date_sets = collections.OrderedDict(
sorted(_trade_date._trade_date_sets.items(), key=lambda t: t[0], reverse=False))
time_array = datetime.strptime(str(trade_date), "%Y%m%d")
time_array = time_array - timedelta(days=365) * n
date_time = int(datetime.strftime(time_array, "%Y%m%d"))
if date_time < min(trade_date_sets.keys()):
# print('date_time %s is outof trade_date_sets' % date_time)
return date_time
else:
while date_time not in trade_date_sets:
date_time = date_time - 1
# print('trade_date pre %s year %s' % (n, date_time))
return date_time
def get_basic_cash_flow(trade_date):
"""
获取cash flow所需要的因子
:param trade_date:
:return:
"""
cash_flow_sets = get_fundamentals(add_filter_trade(query(CashFlow.__name__,
[CashFlow.symbol,
CashFlow.net_operate_cash_flow,
CashFlow.goods_sale_and_service_render_cash])
, [trade_date]))
income_sets = get_fundamentals(add_filter_trade(query(Income.__name__,
[Income.symbol,
Income.operating_revenue,
Income.total_operating_cost,
Income.total_operating_revenue]), [trade_date]))
valuation_sets = get_fundamentals(add_filter_trade(query(Valuation.__name__,
[Valuation.symbol,
Valuation.market_cap,
Valuation.circulating_market_cap]), [trade_date]))
# 合并
tp_cash_flow = pd.merge(cash_flow_sets, income_sets, on="symbol")
tp_cash_flow = tp_cash_flow[-tp_cash_flow.duplicated()]
ttm_factors = {Balance.__name__: [Balance.symbol,
Balance.total_liability,
Balance.shortterm_loan,
Balance.longterm_loan,
Balance.total_current_liability,
Balance.net_liability,
Balance.total_current_assets,
Balance.interest_bearing_liability,
Balance.total_assets],
CashFlow.__name__: [CashFlow.symbol,
CashFlow.net_operate_cash_flow,
CashFlow.goods_sale_and_service_render_cash,
CashFlow.cash_and_equivalents_at_end],
Income.__name__: [Income.symbol,
Income.operating_revenue,
Income.total_operating_revenue,
Income.total_operating_cost,
Income.net_profit,
Income.np_parent_company_owners]
}
ttm_cash_flow_sets = get_ttm_fundamental([], ttm_factors, trade_date).reset_index()
ttm_cash_flow_sets = ttm_cash_flow_sets[-ttm_cash_flow_sets.duplicated()]
# 合并
ttm_cash_flow_sets = pd.merge(ttm_cash_flow_sets, valuation_sets, on="symbol")
return tp_cash_flow, ttm_cash_flow_sets
def prepare_calculate(trade_date):
# cash flow
tp_cash_flow, ttm_cash_flow_sets = get_basic_cash_flow(trade_date)
if len(tp_cash_flow) <= 0 or len(ttm_cash_flow_sets) <=0:
print("%s has no data" % trade_date)
return
else:
tic = time.time()
session = str(int(time.time() * 1000000 + datetime.now().microsecond))
cache_data.set_cache(session + str(trade_date) + "1", trade_date, tp_cash_flow.to_json(orient='records'))
cache_data.set_cache(session + str(trade_date) + "2", trade_date, ttm_cash_flow_sets.to_json(orient='records'))
factor_cash_flow.factor_calculate.delay(date_index=trade_date, session=session)
time4 = time.time()
print('cash_flow_cal_time:{}'.format(time4 - tic))
def do_update(start_date, end_date, count):
# 读取本地交易日
_trade_date = TradeDate()
trade_date_sets = _trade_date.trade_date_sets_ago(start_date, end_date, count)
for trade_date in trade_date_sets:
print('因子计算日期: %s' % trade_date)
prepare_calculate(trade_date)
print('----->')
if __name__ == '__main__':
parser = argparse.ArgumentParser()
parser.add_argument('--start_date', type=int, default=20070101)
parser.add_argument('--end_date', type=int, default=0)
parser.add_argument('--count', type=int, default=1)
parser.add_argument('--rebuild', type=bool, default=False)
parser.add_argument('--update', type=bool, default=False)
parser.add_argument('--schedule', type=bool, default=False)
args = parser.parse_args()
if args.end_date == 0:
end_date = int(datetime.now().date().strftime('%Y%m%d'))
else:
end_date = args.end_date
if args.rebuild:
processor = factor_cash_flow.FactorCashFlow('factor_cash_flow')
processor.create_dest_tables()
do_update(args.start_date, end_date, args.count)
if args.update:
do_update(args.start_date, end_date, args.count)
#! /usr/bin/env python
# -*- coding: utf-8 -*-
"""
@version: ??
@author: li
@file: constrain.py
@time: 2019-07-16 19:22
"""
import sys
sys.path.append('../')
sys.path.append('../../')
sys.path.append('../../../')
import time
import collections
import argparse
from datetime import datetime, timedelta
from factor import factor_constrain
from factor.ttm_fundamental import *
from vision.file_unit.balance import Balance
from vision.file_unit.income import Income
from factor.utillities.trade_date import TradeDate
from ultron.cluster.invoke.cache_data import cache_data
def get_trade_date(trade_date, n):
"""
获取当前时间前n年的时间点,且为交易日,如果非交易日,则往前提取最近的一天。
:param trade_date: 当前交易日
:param n:
:return:
"""
_trade_date = TradeDate()
trade_date_sets = collections.OrderedDict(
sorted(_trade_date._trade_date_sets.items(), key=lambda t: t[0], reverse=False))
time_array = datetime.strptime(str(trade_date), "%Y%m%d")
time_array = time_array - timedelta(days=365) * n
date_time = int(datetime.strftime(time_array, "%Y%m%d"))
if date_time < min(trade_date_sets.keys()):
# print('date_time %s is outof trade_date_sets' % date_time)
return date_time
else:
while date_time not in trade_date_sets:
date_time = date_time - 1
# print('trade_date pre %s year %s' % (n, date_time))
return date_time
def get_basic_constrain(trade_date):
# 读取当前因子
# 资产负债
balance_sets = get_fundamentals(add_filter_trade(query(Balance._name_,
[Balance.symbol,
Balance.total_current_liability,
Balance.total_liability,
Balance.total_assets,
Balance.total_current_assets,
Balance.fixed_assets,
Balance.interest_bearing_liability
]), [trade_date]))
balance_sets = balance_sets[-balance_sets.duplicated()]
# TTM计算
ttm_factors = {Income._name_: [Income.symbol,
Income.operating_cost,
Income.operating_revenue,
Income.operating_tax_surcharges,
Income.total_operating_revenue,
Income.total_operating_cost,
Income.financial_expense,
Income.sale_expense,
Income.administration_expense
]}
ttm_constrain_sets = get_ttm_fundamental([], ttm_factors, trade_date).reset_index()
ttm_constrain_sets = ttm_constrain_sets[-ttm_constrain_sets.duplicated()]
return balance_sets, ttm_constrain_sets
def prepare_calculate(trade_date):
# factor_constrain
balance_sets, ttm_factors_sets = get_basic_constrain(trade_date)
if len(balance_sets) <= 0 or len(ttm_factors_sets) <=0:
print("%s has no data" % trade_date)
return
else:
tic = time.time()
session = str(int(time.time() * 1000000 + datetime.now().microsecond))
cache_data.set_cache(session + str(trade_date) + '1', trade_date, balance_sets.to_json(orient='records'))
cache_data.set_cache(session + str(trade_date) + '2', trade_date, ttm_factors_sets.to_json(orient='records'))
factor_constrain.factor_calculate.delay(date_index=trade_date, session=session)
time5 = time.time()
print('constrain_cal_time:{}'.format(time5 - tic))
def do_update(start_date, end_date, count):
# 读取本地交易日
_trade_date = TradeDate()
trade_date_sets = _trade_date.trade_date_sets_ago(start_date, end_date, count)
for trade_date in trade_date_sets:
print('因子计算日期: %s' % trade_date)
prepare_calculate(trade_date)
print('----->')
if __name__ == '__main__':
parser = argparse.ArgumentParser()
parser.add_argument('--start_date', type=int, default=20070101)
parser.add_argument('--end_date', type=int, default=0)
parser.add_argument('--count', type=int, default=1)
parser.add_argument('--rebuild', type=bool, default=False)
parser.add_argument('--update', type=bool, default=False)
parser.add_argument('--schedule', type=bool, default=False)
args = parser.parse_args()
if args.end_date == 0:
end_date = int(datetime.now().date().strftime('%Y%m%d'))
else:
end_date = args.end_date
if args.rebuild:
processor = factor_constrain.FactorConstrain('factor_constrain')
processor.create_dest_tables()
do_update(args.start_date, end_date, args.count)
if args.update:
do_update(args.start_date, end_date, args.count)
#! /usr/bin/env python
# -*- coding: utf-8 -*-
"""
@version: ??
@author: li
@file: earning.py
@time: 2019-07-16 19:44
"""
import sys
sys.path.append('../')
sys.path.append('../../')
sys.path.append('../../../')
import time
import collections
import argparse
from datetime import datetime, timedelta
from factor import factor_earning
from factor.ttm_fundamental import *
from vision.file_unit.balance import Balance
from vision.file_unit.cash_flow import CashFlow
from vision.file_unit.income import Income
from vision.file_unit.valuation import Valuation
from vision.file_unit.indicator import Indicator
from factor.utillities.trade_date import TradeDate
from ultron.cluster.invoke.cache_data import cache_data
def get_trade_date(trade_date, n):
"""
获取当前时间前n年的时间点,且为交易日,如果非交易日,则往前提取最近的一天。
:param trade_date: 当前交易日
:param n:
:return:
"""
_trade_date = TradeDate()
trade_date_sets = collections.OrderedDict(
sorted(_trade_date._trade_date_sets.items(), key=lambda t: t[0], reverse=False))
time_array = datetime.strptime(str(trade_date), "%Y%m%d")
time_array = time_array - timedelta(days=365) * n
date_time = int(datetime.strftime(time_array, "%Y%m%d"))
if date_time < min(trade_date_sets.keys()):
# print('date_time %s is outof trade_date_sets' % date_time)
return date_time
else:
while date_time not in trade_date_sets:
date_time = date_time - 1
# print('trade_date pre %s year %s' % (n, date_time))
return date_time
def get_basic_earning(trade_date):
# 读取目前涉及到的因子
# 当期数据
# pdb.set_trace()
balance_sets = get_fundamentals(add_filter_trade(query(Balance.__name__,
[Balance.symbol,
Balance.equities_parent_company_owners])
, [trade_date]))
cash_flow_sets = get_fundamentals(add_filter_trade(query(CashFlow.__name__,
[CashFlow.symbol,
CashFlow.goods_sale_and_service_render_cash])
, [trade_date]))
income_sets = get_fundamentals(add_filter_trade(query(Income.__name__,
[Income.symbol,
Income.total_operating_revenue,
Income.total_operating_cost,
Income.invest_income_associates,
Income.non_operating_revenue,
Income.non_operating_expense,
Income.total_profit,
Income.net_profit,
Income.np_parent_company_owners
])
, [trade_date]))
valuation_sets = get_fundamentals(add_filter_trade(query(Valuation.__name__,
[Valuation.symbol,
Valuation.circulating_market_cap])
, [trade_date]))
indicator_sets = get_fundamentals(add_filter_trade(query(Indicator.__name__,
[Indicator.symbol,
Indicator.adjusted_profit])
, [trade_date]))
# 合并
tp_earning = pd.merge(cash_flow_sets, balance_sets, on="symbol")
tp_earning = pd.merge(tp_earning, income_sets, on="symbol")
tp_earning = pd.merge(tp_earning, valuation_sets, on="symbol")
tp_earning = pd.merge(tp_earning, indicator_sets, on="symbol")
tp_earning = tp_earning[-tp_earning.duplicated()]
# tp_earning.set_index('symbol', inplace=True)
# TTM数据
ttm_factors = {Balance.__name__: [Balance.symbol,
Balance.total_assets,
Balance.equities_parent_company_owners,
Balance.total_owner_equities
],
CashFlow.__name__: [CashFlow.symbol,
CashFlow.cash_and_equivalents_at_end],
Income.__name__: [Income.symbol,
Income.total_operating_revenue,
Income.operating_revenue,
Income.interest_income,
Income.total_operating_cost,
Income.operating_cost,
Income.financial_expense,
Income.invest_income_associates,
Income.operating_profit,
Income.non_operating_revenue,
Income.non_operating_expense,
Income.total_profit,
Income.net_profit,
Income.np_parent_company_owners
]
}
ttm_earning = get_ttm_fundamental([], ttm_factors, trade_date).reset_index()
ttm_earning = ttm_earning[-ttm_earning.duplicated()]
## 5年TTM数据
ttm_factors = {Balance.__name__: [Balance.symbol,
Balance.total_assets,
Balance.total_owner_equities],
CashFlow.__name__: [CashFlow.symbol,
CashFlow.cash_and_equivalents_at_end],
Income.__name__: [Income.symbol,
Income.net_profit,]
}
# 通过cache_data.set_cache, 会使得index的name丢失
ttm_earning_5y = get_ttm_fundamental([], ttm_factors, trade_date, year=5).reset_index()
ttm_earning_5y = ttm_earning_5y[-ttm_earning_5y.duplicated()]
return tp_earning, ttm_earning_5y, ttm_earning
def prepare_calculate(trade_date):
# cash flow
tp_earning, ttm_earning_5y, ttm_earning = get_basic_earning(trade_date)
if len(tp_earning) <= 0 or len(ttm_earning_5y) <= 0 or len(ttm_earning) <= 0:
print("%s has no data" % trade_date)
return
else:
tic = time.time()
session = str(int(time.time() * 1000000 + datetime.now().microsecond))
cache_data.set_cache(session + str(trade_date) + "1", trade_date, tp_earning.to_json(orient='records'))
cache_data.set_cache(session + str(trade_date) + "2", trade_date, ttm_earning_5y.to_json(orient='records'))
cache_data.set_cache(session + str(trade_date) + "3", trade_date, ttm_earning.to_json(orient='records'))
factor_earning.factor_calculate.delay(date_index=trade_date, session=session)
time6 = time.time()
print('earning_cal_time:{}'.format(time6 - tic))
def do_update(start_date, end_date, count):
# 读取本地交易日
_trade_date = TradeDate()
trade_date_sets = _trade_date.trade_date_sets_ago(start_date, end_date, count)
for trade_date in trade_date_sets:
print('因子计算日期: %s' % trade_date)
prepare_calculate(trade_date)
print('----->')
if __name__ == '__main__':
parser = argparse.ArgumentParser()
parser.add_argument('--start_date', type=int, default=20070101)
parser.add_argument('--end_date', type=int, default=0)
parser.add_argument('--count', type=int, default=1)
parser.add_argument('--rebuild', type=bool, default=False)
parser.add_argument('--update', type=bool, default=False)
parser.add_argument('--schedule', type=bool, default=False)
args = parser.parse_args()
if args.end_date == 0:
end_date = int(datetime.now().date().strftime('%Y%m%d'))
else:
end_date = args.end_date
if args.rebuild:
processor = factor_earning.FactorEarning('factor_earning')
processor.create_dest_tables()
do_update(args.start_date, end_date, args.count)
if args.update:
do_update(args.start_date, end_date, args.count)
#! /usr/bin/env python
# -*- coding: utf-8 -*-
"""
@version: 0.1
@author: zzh
@file: factor_scale_value.py
@time: 2019-01-28 11:33
"""
import sys
from vision.file_unit.balance import Balance
from jpy.factor.ttm_fundamental import get_ttm_fundamental
sys.path.append("../")
sys.path.append("../../")
sys.path.append("../../../")
import argparse
import time
import collections
import pandas as pd
from datetime import datetime, timedelta
from jpy.factor.factor_base import FactorBase
from vision.fm.signletion_engine import *
from vision.file_unit.income import Income
from vision.file_unit.valuation import Valuation
from ultron.cluster.invoke.cache_data import cache_data
from factor import factor_scale_value_task
from factor.utillities.trade_date import TradeDate
class FactorScaleValue(FactorBase):
def __init__(self, name):
super(FactorScaleValue, self).__init__(name)
self._trade_date = TradeDate()
# 构建因子表
def create_dest_tables(self):
"""
创建数据库表
:return:
"""
drop_sql = """drop table if exists `{0}`""".format(self._name)
create_sql = """create table `{0}`(
`id` varchar(32) NOT NULL,
`symbol` varchar(24) NOT NULL,
`trade_date` date NOT NULL,
`mkt_value` decimal(19,4) NOT NULL,
`cir_mkt_value` decimal(19,4),
`sales_ttm` decimal(19,4),
`total_assets` decimal(19,4),
`log_of_mkt_value` decimal(19, 4),
`log_of_neg_mkt_value` decimal(19,4),
`nl_size` decimal(19,4),
`log_sales_ttm` decimal(19,4),
`log_total_last_qua_assets` decimal(19,4),
PRIMARY KEY(`id`,`trade_date`,`symbol`)
)ENGINE=InnoDB DEFAULT CHARSET=utf8;""".format(self._name)
super(FactorScaleValue, self)._create_tables(create_sql, drop_sql)
def get_trade_date(self, trade_date, n):
"""
获取当前时间前n年的时间点,且为交易日,如果非交易日,则往前提取最近的一天。
:param trade_date: 当前交易日
:param n:
:return:
"""
# print("trade_date %s" % trade_date)
trade_date_sets = collections.OrderedDict(
sorted(self._trade_date._trade_date_sets.items(), key=lambda t: t[0], reverse=False))
time_array = datetime.strptime(str(trade_date), "%Y%m%d")
time_array = time_array - timedelta(days=365) * n
date_time = int(datetime.strftime(time_array, "%Y%m%d"))
if date_time < min(trade_date_sets.keys()):
# print('date_time %s is outof trade_date_sets' % date_time)
return date_time
else:
while not date_time in trade_date_sets:
date_time = date_time - 1
# print('trade_date pre %s year %s' % (n, date_time))
return date_time
def get_basic_data(self, trade_date):
"""
获取基础数据
按天获取当天交易日所有股票的基础数据
:param trade_date: 交易日
:return:
"""
# market_cap,circulating_market_cap,total_operating_revenue
valuation_sets = get_fundamentals(add_filter_trade(query(Valuation._name_,
[Valuation.symbol,
Valuation.market_cap,
Valuation.circulating_market_cap]), [trade_date]))
income_sets = get_fundamentals(add_filter_trade(query(Income._name_,
[Income.symbol,
Income.total_operating_revenue]), [trade_date]))
balance_set = get_fundamentals(add_filter_trade(query(Balance._name_,
[Balance.symbol,
Balance.total_assets]), [trade_date]))
# TTM计算
ttm_factors = {Income._name_: [Income.symbol,
Income.total_operating_revenue]
}
ttm_factor_sets = get_ttm_fundamental([], ttm_factors, trade_date).reset_index()
# ttm 周期内计算需要优化
# ttm_factor_sets_sum = get_ttm_fundamental([], ttm_factors_sum_list, trade_date, 5).reset_index()
ttm_factor_sets = ttm_factor_sets.drop(columns={"trade_date"})
return valuation_sets, ttm_factor_sets, income_sets, balance_set
def prepaer_calculate(self, trade_date):
valuation_sets, ttm_factor_sets, income_sets, balance_set = self.get_basic_data(trade_date)
# valuation_sets = pd.merge(valuation_sets, income_sets, on='symbol')
valuation_sets = pd.merge(valuation_sets, ttm_factor_sets, on='symbol')
valuation_sets = pd.merge(valuation_sets, balance_set, on='symbol')
if len(valuation_sets) <= 0:
print("%s has no data" % trade_date)
return
else:
session = str(int(time.time() * 1000000 + datetime.now().microsecond))
cache_data.set_cache(session, 'scale' + str(trade_date), valuation_sets.to_json(orient='records'))
factor_scale_value_task.calculate.delay(factor_name='scale' + str(trade_date), trade_date=trade_date,
session=session)
def do_update(self, start_date, end_date, count):
# 读取本地交易日
trade_date_sets = self._trade_date.trade_date_sets_ago(start_date, end_date, count)
for trade_date in trade_date_sets:
print('因子计算日期: %s' % trade_date)
self.prepaer_calculate(trade_date)
print('----->')
if __name__ == '__main__':
parser = argparse.ArgumentParser()
parser.add_argument('--start_date', type=int, default=20070101)
parser.add_argument('--end_date', type=int, default=0)
parser.add_argument('--count', type=int, default=1)
parser.add_argument('--rebuild', type=bool, default=False)
parser.add_argument('--update', type=bool, default=False)
parser.add_argument('--schedule', type=bool, default=False)
args = parser.parse_args()
if args.end_date == 0:
end_date = int(datetime.now().date().strftime('%Y%m%d'))
else:
end_date = args.end_date
if args.rebuild:
processor = FactorScaleValue('factor_scale_value')
processor.create_dest_tables()
processor.do_update(args.start_date, end_date, args.count)
if args.update:
processor = FactorScaleValue('factor_scale_value')
processor.do_update(args.start_date, end_date, args.count)
#! /usr/bin/env python
# -*- coding: utf-8 -*-
"""
@version: 0.1
@author: zzh
@file: factor_volatility_value.py
@time: 2019-01-28 11:33
"""
import sys
sys.path.append("../")
sys.path.append("../../")
sys.path.append("../../../")
import argparse
import time
import collections
from datetime import datetime, timedelta
from factor.factor_base import FactorBase
from vision.fm.signletion_engine import *
from vision.file_unit.sk_daily_price import SKDailyPrice
from ultron.cluster.invoke.cache_data import cache_data
from factor import factor_volatility_value_task
import json
from factor.utillities.trade_date import TradeDate
class FactorVolatilityValue(FactorBase):
def __init__(self, name):
super(FactorVolatilityValue, self).__init__(name)
self._trade_date = TradeDate()
# 构建因子表
def create_dest_tables(self):
"""
创建数据库表
:return:
"""
drop_sql = """drop table if exists `{0}`""".format(self._name)
create_sql = """create table `{0}`(
`id` varchar(32) NOT NULL,
`symbol` varchar(24) NOT NULL,
`trade_date` date NOT NULL,
`variance_20d` decimal(19,4) NOT NULL,
`variance_60d` decimal(19,4) NOT NULL,
`variance_120d` decimal(19,4) NOT NULL,
`kurtosis_20d` decimal(19,4) NOT NULL,
`kurtosis_60d` decimal(19,4) NOT NULL,
`kurtosis_120d` decimal(19,4) NOT NULL,
`alpha_20d` decimal(19,4) NOT NULL,
`alpha_60d` decimal(19,4) NOT NULL,
`alpha_120d` decimal(19,4) NOT NULL,
`beta_20d` decimal(19,4) NOT NULL,
`beta_60d` decimal(19,4) NOT NULL,
`beta_120d` decimal(19,4) NOT NULL,
`sharp_20d` decimal(19,4) NOT NULL,
`sharp_60d` decimal(19,4) NOT NULL,
`sharp_120d` decimal(19,4) NOT NULL,
`tr_20d` decimal(19,4) NOT NULL,
`tr_60d` decimal(19,4) NOT NULL,
`tr_120d` decimal(19,4) NOT NULL,
`ir_20d` decimal(19,4) NOT NULL,
`ir_60d` decimal(19,4) NOT NULL,
`ir_120d` decimal(19,4) NOT NULL,
`gain_variance_20d` decimal(19,4) NOT NULL,
`gain_variance_60d` decimal(19,4) NOT NULL,
`gain_variance_120d` decimal(19,4) NOT NULL,
`loss_variance_20d` decimal(19,4) NOT NULL,
`loss_variance_60d` decimal(19,4) NOT NULL,
`loss_variance_120d` decimal(19,4) NOT NULL,
`gain_loss_variance_ratio_20d` decimal(19,4) NOT NULL,
`gain_loss_variance_ratio_60d` decimal(19,4) NOT NULL,
`gain_loss_variance_ratio_120d` decimal(19,4) NOT NULL,
`dastd_252d` decimal(19,4) NOT NULL,
`ddnsr_12m` decimal(19,4) NOT NULL,
`ddncr_12m` decimal(19,4) NOT NULL,
`dvrat` decimal(19,4) NOT NULL,
PRIMARY KEY(`id`,`trade_date`,`symbol`)
)ENGINE=InnoDB DEFAULT CHARSET=utf8;""".format(self._name)
super(FactorVolatilityValue, self)._create_tables(create_sql, drop_sql)
def get_trade_date(self, trade_date, n):
"""
获取当前时间前n年的时间点,且为交易日,如果非交易日,则往前提取最近的一天。
:param trade_date: 当前交易日
:param n:
:return:
"""
# print("trade_date %s" % trade_date)
trade_date_sets = collections.OrderedDict(
sorted(self._trade_date._trade_date_sets.items(), key=lambda t: t[0], reverse=False))
time_array = datetime.strptime(str(trade_date), "%Y%m%d")
time_array = time_array - timedelta(days=365) * n
date_time = int(datetime.strftime(time_array, "%Y%m%d"))
if date_time < min(trade_date_sets.keys()):
# print('date_time %s is outof trade_date_sets' % date_time)
return date_time
else:
while not date_time in trade_date_sets:
date_time = date_time - 1
# print('trade_date pre %s year %s' % (n, date_time))
return date_time
def get_basic_data(self, trade_date):
"""
获取基础数据
按天获取当天交易日所有股票的基础数据
:param trade_date: 交易日
:return:
"""
# market_cap,circulating_market_cap,total_operating_revenue
count = 300
sk_daily_price_sets = get_sk_history_price([], trade_date, count, [SKDailyPrice.symbol,
SKDailyPrice.trade_date, SKDailyPrice.open,
SKDailyPrice.close, SKDailyPrice.high,
SKDailyPrice.low])
index_daily_price_sets = get_index_history_price(["000300.XSHG"], trade_date, count,
["symbol", "trade_date", "close"])
temp_price_sets = index_daily_price_sets[index_daily_price_sets.trade_date <= trade_date]
return sk_daily_price_sets, temp_price_sets[:count]
def prepaer_calculate(self, trade_date):
self.trade_date = trade_date
tp_price_return, temp_price_sets = self.get_basic_data(trade_date)
# tp_price_return.set_index('symbol', inplace=True)
# tp_price_return['symbol'] = tp_price_return.index
# symbol_sets = list(set(tp_price_return['symbol']))
# tp_price_return_list = pd.DataFrame()
#
# for symbol in symbol_sets:
# if len(tp_price_return[tp_price_return['symbol'] == symbol]) < 3:
# continue
# tp_price_return_list = tp_price_return_list.append(
# tp_price_return.loc[symbol].sort_values(by='trade_date', ascending=True))
if len(tp_price_return) <= 0:
print("%s has no data" % trade_date)
return
else:
session = str(int(time.time() * 1000000 + datetime.now().microsecond))
data = {
'total_data': tp_price_return.to_json(orient='records'),
'index_daily_price_sets': temp_price_sets.to_json(orient='records')
}
cache_data.set_cache(session, 'volatility' + str(trade_date),
json.dumps(data))
# cache_data.set_cache(session, 'volatility' + str(trade_date) + '_a',
# tp_price_return_list.to_json(orient='records'))
# cache_data.set_cache(session, 'volatility' + str(trade_date) + '_b',
# temp_price_sets.to_json(orient='records'))
factor_volatility_value_task.calculate.delay(factor_name='volatility' + str(trade_date), trade_date=trade_date,
session=session)
def do_update(self, start_date, end_date, count):
# 读取本地交易日
trade_date_sets = self._trade_date.trade_date_sets_ago(start_date, end_date, count)
for trade_date in trade_date_sets:
print('因子计算日期: %s' % trade_date)
self.prepaer_calculate(trade_date)
print('----->')
if __name__ == '__main__':
parser = argparse.ArgumentParser()
parser.add_argument('--start_date', type=int, default=20070101)
parser.add_argument('--end_date', type=int, default=0)
parser.add_argument('--count', type=int, default=1)
parser.add_argument('--rebuild', type=bool, default=False)
parser.add_argument('--update', type=bool, default=False)
parser.add_argument('--schedule', type=bool, default=False)
args = parser.parse_args()
if args.end_date == 0:
end_date = int(datetime.now().date().strftime('%Y%m%d'))
else:
end_date = args.end_date
if args.rebuild:
processor = FactorVolatilityValue('factor_volatility_value')
processor.create_dest_tables()
processor.do_update(args.start_date, end_date, args.count)
if args.update:
processor = FactorVolatilityValue('factor_volatility_value')
processor.do_update(args.start_date, end_date, args.count)
This diff is collapsed.
#! /usr/bin/env python
# -*- coding: utf-8 -*-
"""
@version: ??
@author: li
@file: historical_value.py
@time: 2019-07-16 19:48
"""
import sys
sys.path.append('../')
sys.path.append('../../')
sys.path.append('../../../')
import time
import collections
import argparse
from datetime import datetime, timedelta
from factor import historical_value
from factor.ttm_fundamental import *
from vision.file_unit.balance import Balance
from vision.file_unit.cash_flow import CashFlow
from vision.file_unit.income import Income
from vision.file_unit.valuation import Valuation
from vision.file_unit.industry import Industry
from vision.file_unit.indicator import Indicator
from factor.utillities.trade_date import TradeDate
from ultron.cluster.invoke.cache_data import cache_data
def get_trade_date(trade_date, n):
"""
获取当前时间前n年的时间点,且为交易日,如果非交易日,则往前提取最近的一天。
:param trade_date: 当前交易日
:param n:
:return:
"""
_trade_date = TradeDate()
trade_date_sets = collections.OrderedDict(
sorted(_trade_date._trade_date_sets.items(), key=lambda t: t[0], reverse=False))
time_array = datetime.strptime(str(trade_date), "%Y%m%d")
time_array = time_array - timedelta(days=365) * n
date_time = int(datetime.strftime(time_array, "%Y%m%d"))
if date_time < min(trade_date_sets.keys()):
# print('date_time %s is outof trade_date_sets' % date_time)
return date_time
else:
while date_time not in trade_date_sets:
date_time = date_time - 1
# print('trade_date pre %s year %s' % (n, date_time))
return date_time
def get_basic_history_value_data(trade_date):
"""
获取基础数据
按天获取当天交易日所有股票的基础数据
:param trade_date: 交易日
:return:
"""
# PS, PE, PB, PCF
valuation_sets = get_fundamentals(add_filter_trade(query(Valuation._name_,
[Valuation.symbol,
Valuation.pe,
Valuation.ps,
Valuation.pb,
Valuation.pcf,
Valuation.market_cap,
Valuation.circulating_market_cap]), [trade_date]))
cash_flow_sets = get_fundamentals(add_filter_trade(query(CashFlow._name_,
[CashFlow.symbol,
CashFlow.goods_sale_and_service_render_cash]), [trade_date]))
income_sets = get_fundamentals(add_filter_trade(query(Income._name_,
[Income.symbol,
Income.net_profit]), [trade_date]))
industry_set = ['801010', '801020', '801030', '801040', '801050', '801080', '801110', '801120', '801130',
'801140', '801150', '801160', '801170', '801180', '801200', '801210', '801230', '801710',
'801720', '801730', '801740', '801750', '801760', '801770', '801780', '801790', '801880',
'801890']
sw_industry = get_fundamentals(add_filter_trade(query(Industry._name_,
[Industry.symbol,
Industry.isymbol]), [trade_date]))
# TTM计算
ttm_factors = {Income._name_: [Income.symbol,
Income.np_parent_company_owners],
CashFlow._name_:[CashFlow.symbol,
CashFlow.net_operate_cash_flow]
}
ttm_factors_sum_list = {Income._name_:[Income.symbol,
Income.net_profit, # 净利润
],}
trade_date_2y = get_trade_date(trade_date, 2)
trade_date_3y = get_trade_date(trade_date, 3)
trade_date_4y = get_trade_date(trade_date, 4)
trade_date_5y = get_trade_date(trade_date, 5)
# print(trade_date_2y, trade_date_3y, trade_date_4y, trade_date_5y)
ttm_factor_sets = get_ttm_fundamental([], ttm_factors, trade_date).reset_index()
ttm_factor_sets_3 = get_ttm_fundamental([], ttm_factors, trade_date_3y).reset_index()
ttm_factor_sets_5 = get_ttm_fundamental([], ttm_factors, trade_date_5y).reset_index()
# ttm 周期内计算需要优化
# ttm_factor_sets_sum = get_ttm_fundamental([], ttm_factors_sum_list, trade_date, 5).reset_index()
factor_sets_sum = get_fundamentals(add_filter_trade(query(Valuation._name_,
[Valuation.symbol,
Valuation.market_cap,
Valuation.circulating_market_cap,
Valuation.trade_date]),
[trade_date_2y, trade_date_3y, trade_date_4y, trade_date_5y]))
factor_sets_sum_1 = factor_sets_sum.groupby('symbol')['market_cap'].sum().reset_index().rename(columns={"market_cap": "market_cap_sum",})
factor_sets_sum_2 = factor_sets_sum.groupby('symbol')['circulating_market_cap'].sum().reset_index().rename(columns={"circulating_market_cap": "circulating_market_cap_sum",})
# print(factor_sets_sum_1)
# 根据申万一级代码筛选
sw_industry = sw_industry[sw_industry['isymbol'].isin(industry_set)]
# 合并价值数据和申万一级行业
valuation_sets = pd.merge(valuation_sets, sw_industry, on='symbol')
# valuation_sets = pd.merge(valuation_sets, sw_industry, on='symbol', how="outer")
ttm_factor_sets = ttm_factor_sets.drop(columns={"trade_date"})
ttm_factor_sets_3 = ttm_factor_sets_3.rename(columns={"np_parent_company_owners": "np_parent_company_owners_3"})
ttm_factor_sets_3 = ttm_factor_sets_3.drop(columns={"trade_date"})
ttm_factor_sets_5 = ttm_factor_sets_5.rename(columns={"np_parent_company_owners": "np_parent_company_owners_5"})
ttm_factor_sets_5 = ttm_factor_sets_5.drop(columns={"trade_date"})
# ttm_factor_sets_sum = ttm_factor_sets_sum.rename(columns={"net_profit": "net_profit_5"})
ttm_factor_sets = pd.merge(ttm_factor_sets, ttm_factor_sets_3, on='symbol')
ttm_factor_sets = pd.merge(ttm_factor_sets, ttm_factor_sets_5, on='symbol')
# ttm_factor_sets = pd.merge(ttm_factor_sets, ttm_factor_sets_sum, on='symbol')
ttm_factor_sets = pd.merge(ttm_factor_sets, factor_sets_sum_1, on='symbol')
ttm_factor_sets = pd.merge(ttm_factor_sets, factor_sets_sum_2, on='symbol')
# ttm_factor_sets = pd.merge(ttm_factor_sets, ttm_factor_sets_3, on='symbol', how='outer')
# ttm_factor_sets = pd.merge(ttm_factor_sets, ttm_factor_sets_5, on='symbol', how='outer')
return valuation_sets, ttm_factor_sets, cash_flow_sets, income_sets
def prepare_calculate(trade_date):
# history_value
valuation_sets, ttm_factor_sets, cash_flow_sets, income_sets = get_basic_history_value_data(trade_date)
valuation_sets = pd.merge(valuation_sets, income_sets, on='symbol')
valuation_sets = pd.merge(valuation_sets, ttm_factor_sets, on='symbol')
valuation_sets = pd.merge(valuation_sets, cash_flow_sets, on='symbol')
if len(valuation_sets) <= 0:
print("%s has no data" % trade_date)
return
else:
tic = time.time()
session = str(int(time.time() * 1000000 + datetime.now().microsecond))
cache_data.set_cache(session + str(trade_date), trade_date, valuation_sets.to_json(orient='records'))
historical_value.factor_calculate.delay(date_index=trade_date, session=session)
time2 = time.time()
print('history_cal_time:{}'.format(time2 - tic))
def do_update(start_date, end_date, count):
# 读取本地交易日
_trade_date = TradeDate()
trade_date_sets = _trade_date.trade_date_sets_ago(start_date, end_date, count)
for trade_date in trade_date_sets:
print('因子计算日期: %s' % trade_date)
prepare_calculate(trade_date)
print('----->')
if __name__ == '__main__':
parser = argparse.ArgumentParser()
parser.add_argument('--start_date', type=int, default=20070101)
parser.add_argument('--end_date', type=int, default=0)
parser.add_argument('--count', type=int, default=1)
parser.add_argument('--rebuild', type=bool, default=False)
parser.add_argument('--update', type=bool, default=False)
parser.add_argument('--schedule', type=bool, default=False)
args = parser.parse_args()
if args.end_date == 0:
end_date = int(datetime.now().date().strftime('%Y%m%d'))
else:
end_date = args.end_date
if args.rebuild:
processor = historical_value.HistoricalValue('factor_historical_value')
processor.create_dest_tables()
do_update(args.start_date, end_date, args.count)
if args.update:
do_update(args.start_date, end_date, args.count)
#! /usr/bin/env python
# -*- coding: utf-8 -*-
"""
@version: ??
@author: li
@file: per_share_indicator.py
@time: 2019-07-16 19:51
"""
import sys
sys.path.append('../')
sys.path.append('../../')
sys.path.append('../../../')
import time
import collections
import argparse
from datetime import datetime, timedelta
from factor import factor_per_share_indicators
from factor.ttm_fundamental import *
from vision.file_unit.balance import Balance
from vision.file_unit.cash_flow import CashFlow
from vision.file_unit.income import Income
from vision.file_unit.valuation import Valuation
from vision.file_unit.industry import Industry
from vision.file_unit.indicator import Indicator
from factor.utillities.trade_date import TradeDate
from ultron.cluster.invoke.cache_data import cache_data
def get_trade_date(trade_date, n):
"""
获取当前时间前n年的时间点,且为交易日,如果非交易日,则往前提取最近的一天。
:param trade_date: 当前交易日
:param n:
:return:
"""
_trade_date = TradeDate()
trade_date_sets = collections.OrderedDict(
sorted(_trade_date._trade_date_sets.items(), key=lambda t: t[0], reverse=False))
time_array = datetime.strptime(str(trade_date), "%Y%m%d")
time_array = time_array - timedelta(days=365) * n
date_time = int(datetime.strftime(time_array, "%Y%m%d"))
if date_time < min(trade_date_sets.keys()):
# print('date_time %s is outof trade_date_sets' % date_time)
return date_time
else:
while date_time not in trade_date_sets:
date_time = date_time - 1
# print('trade_date pre %s year %s' % (n, date_time))
return date_time
def get_basic_scale_data(trade_date):
"""
获取基础数据
按天获取当天交易日所有股票的基础数据
:param trade_date: 交易日
:return:
"""
valuation_sets = get_fundamentals(add_filter_trade(query(Valuation._name_,
[Valuation.symbol,
Valuation.market_cap,
Valuation.capitalization, # 总股本
Valuation.circulating_market_cap]), #
[trade_date]))
cash_flow_sets = get_fundamentals(add_filter_trade(query(CashFlow._name_,
[CashFlow.symbol,
CashFlow.cash_and_equivalents_at_end, # 现金及现金等价物净增加额
CashFlow.cash_equivalent_increase]), # 期末现金及现金等价物余额(元)
[trade_date]))
income_sets = get_fundamentals(add_filter_trade(query(Income._name_,
[Income.symbol,
Income.basic_eps, # 基本每股收益
Income.diluted_eps, # 稀释每股收益
Income.net_profit,
Income.operating_revenue, # 营业收入
Income.operating_profit, # 营业利润
Income.total_operating_revenue]), # 营业总收入
[trade_date]))
balance_sets = get_fundamentals(add_filter_trade(query(Balance._name_,
[Balance.symbol,
Balance.capital_reserve_fund, # 资本公积
Balance.surplus_reserve_fund, # 盈余公积
Balance.total_assets, # 总资产(资产合计)
Balance.dividend_receivable, # 股利
Balance.retained_profit, # 未分配利润
Balance.total_owner_equities]), # 归属于母公司的所有者权益
[trade_date]))
# TTM计算
ttm_factors = {Income._name_: [Income.symbol,
Income.operating_revenue, # 营业收入
Income.operating_profit, # 营业利润
Income.np_parent_company_owners, # 归属于母公司所有者股东的净利润
Income.total_operating_revenue], # 营业总收入
CashFlow._name_: [CashFlow.symbol,
CashFlow.net_operate_cash_flow] # 经营活动产生的现金流量净额
}
ttm_factor_sets = get_ttm_fundamental([], ttm_factors, trade_date).reset_index()
ttm_factor_sets = ttm_factor_sets.rename(columns={"np_parent_company_owners": "np_parent_company_owners_ttm"})
ttm_factor_sets = ttm_factor_sets.rename(columns={"net_operate_cash_flow": "net_operate_cash_flow_ttm"})
ttm_factor_sets = ttm_factor_sets.rename(columns={"operating_revenue": "operating_revenue_ttm"})
ttm_factor_sets = ttm_factor_sets.rename(columns={"operating_profit": "operating_profit_ttm"})
ttm_factor_sets = ttm_factor_sets.rename(columns={"total_operating_revenue": "total_operating_revenue_ttm"})
ttm_factor_sets = ttm_factor_sets.drop(columns={"trade_date"})
return valuation_sets, ttm_factor_sets, cash_flow_sets, income_sets, balance_sets
def prepare_calculate(trade_date):
# per share indicators
valuation_sets, ttm_factor_sets, cash_flow_sets, income_sets, balance_sets = get_basic_scale_data(trade_date)
valuation_sets = pd.merge(valuation_sets, income_sets, on='symbol')
valuation_sets = pd.merge(valuation_sets, ttm_factor_sets, on='symbol')
valuation_sets = pd.merge(valuation_sets, cash_flow_sets, on='symbol')
valuation_sets = pd.merge(valuation_sets, balance_sets, on='symbol')
if len(valuation_sets) <= 0 :
print("%s has no data" % trade_date)
return
else:
tic = time.time()
session = str(int(time.time() * 1000000 + datetime.now().microsecond))
cache_data.set_cache(session + str(trade_date), trade_date, valuation_sets.to_json(orient='records'))
factor_per_share_indicators.factor_calculate.delay(date_index=trade_date, session=session)
time3 = time.time()
print('per_share_cal_time:{}'.format(time3 - tic))
def do_update(start_date, end_date, count):
# 读取本地交易日
_trade_date = TradeDate()
trade_date_sets = _trade_date.trade_date_sets_ago(start_date, end_date, count)
for trade_date in trade_date_sets:
print('因子计算日期: %s' % trade_date)
prepare_calculate(trade_date)
print('----->')
if __name__ == '__main__':
parser = argparse.ArgumentParser()
parser.add_argument('--start_date', type=int, default=20070101)
parser.add_argument('--end_date', type=int, default=0)
parser.add_argument('--count', type=int, default=1)
parser.add_argument('--rebuild', type=bool, default=False)
parser.add_argument('--update', type=bool, default=False)
parser.add_argument('--schedule', type=bool, default=False)
args = parser.parse_args()
if args.end_date == 0:
end_date = int(datetime.now().date().strftime('%Y%m%d'))
else:
end_date = args.end_date
if args.rebuild:
processor = factor_per_share_indicators.PerShareIndicators('factor_per_share')
processor.create_dest_tables()
do_update(args.start_date, end_date, args.count)
if args.update:
do_update(args.start_date, end_date, args.count)
......@@ -14,4 +14,6 @@ app = create_app('factor', ['factor.factor_growth',
'factor.factor_cash_flow',
'factor.factor_constrain',
'factor.factor_earning',
'factor.factor_per_share_indicators'])
'factor.factor_per_share_indicators',
'factor.factor_volatility_value_task',
'factor.factor_scale_value_task'])
# coding=utf-8
import time
from pandas.io.json import json_normalize, json
import pandas as pd
import math
import sys
sys.path.append("../")
sys.path.append("../../")
sys.path.append("../../../")
from factor import app
from factor.factor_base import FactorBase
from ultron.cluster.invoke.cache_data import cache_data
def lcap(tp_historical_value, factor_scale_value):
"""
:param tp_historical_value:
:param factor_scale_value:
:return:
"""
columns_lists = ['symbol', 'market_cap']
historical_value = tp_historical_value.loc[:, columns_lists]
historical_value['log_of_mkt_value'] = historical_value['market_cap'].map(lambda x: math.log(abs(x)))
historical_value = historical_value.drop(columns=['market_cap'], axis=1)
factor_scale_value = pd.merge(factor_scale_value, historical_value, on="symbol")
return factor_scale_value
def lflo(tp_historical_value, factor_scale_value):
"""
:param tp_historical_value:
:param factor_scale_value:
:return:
"""
columns_lists = ['symbol', 'circulating_market_cap']
historical_value = tp_historical_value.loc[:, columns_lists]
historical_value['log_of_neg_mkt_value'] = historical_value['circulating_market_cap'].map(
lambda x: math.log(abs(x)))
historical_value = historical_value.drop(columns=['circulating_market_cap'], axis=1)
factor_scale_value = pd.merge(factor_scale_value, historical_value, on="symbol")
return factor_scale_value
def nlsize(tp_historical_value, factor_scale_value):
"""
:param tp_historical_value:
:param factor_scale_value:
:return:
"""
columns_lists = ['symbol', 'log_of_mkt_value']
historical_value = tp_historical_value.loc[:, columns_lists]
historical_value['nl_size'] = historical_value['log_of_mkt_value'].map(
lambda x: pow(x, 3))
historical_value = historical_value.drop(columns=['log_of_mkt_value'], axis=1)
factor_scale_value = pd.merge(factor_scale_value, historical_value, on="symbol")
return factor_scale_value
def lst(tp_historical_value, factor_scale_value):
"""
:param tp_historical_value:
:param factor_scale_value:
:return:
"""
columns_lists = ['symbol', 'total_operating_revenue']
historical_value = tp_historical_value.loc[:, columns_lists]
historical_value['log_sales_ttm'] = historical_value['total_operating_revenue'].map(
lambda x: math.log(abs(x)))
historical_value = historical_value.drop(columns=['total_operating_revenue'], axis=1)
factor_scale_value = pd.merge(factor_scale_value, historical_value, on="symbol")
return factor_scale_value
def ltlqa(tp_historical_value, factor_scale_value):
"""
:param tp_historical_value:
:param factor_scale_value:
:return:
"""
columns_lists = ['symbol', 'total_assets']
historical_value = tp_historical_value.loc[:, columns_lists]
historical_value['log_total_last_qua_assets'] = historical_value['total_assets'].map(
lambda x: math.log(abs(x)))
historical_value = historical_value.drop(columns=['total_assets'], axis=1)
factor_scale_value = pd.merge(factor_scale_value, historical_value, on="symbol")
return factor_scale_value
@app.task(ignore_result=True)
def calculate(**kwargs):
"""
:param trade_date:
:return:
"""
fb = FactorBase('factor_scale_value')
print(kwargs)
factor_name = kwargs['factor_name']
session = kwargs['session']
trade_date = kwargs['trade_date']
content = cache_data.get_cache(session, factor_name)
total_data = json_normalize(json.loads(content))
print(len(total_data))
factor_scale_value = lcap(total_data, total_data)
factor_scale_value = lflo(factor_scale_value, factor_scale_value)
factor_scale_value = nlsize(factor_scale_value, factor_scale_value)
factor_scale_value = lst(factor_scale_value, factor_scale_value)
factor_scale_value = ltlqa(factor_scale_value, factor_scale_value)
factor_scale_value.rename(columns={'market_cap': 'mkt_value',
'circulating_market_cap': 'cir_mkt_value',
'total_operating_revenue': 'sales_ttm'},
inplace=True)
factor_scale_value = factor_scale_value[['symbol',
'mkt_value',
'cir_mkt_value',
'sales_ttm',
'total_assets',
'log_of_mkt_value',
'log_of_neg_mkt_value',
'nl_size',
'log_total_last_qua_assets',
'log_sales_ttm'
]]
factor_scale_value['id'] = factor_scale_value['symbol'] + str(trade_date)
factor_scale_value['trade_date'] = str(trade_date)
# super(HistoricalValue, self)._storage_data(factor_scale_value, trade_date)
fb._storage_data(factor_scale_value, trade_date)
# calculate(factor_name='scale20180202', trade_date=20180202, session='1562054216473773')
# calculate(factor_name='scale20180202', trade_date=20180202, session='1562901137622956')
This diff is collapsed.
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment