Commit 80567726 authored by 吕先亚's avatar 吕先亚

ai

parent bfec2eee
...@@ -15,22 +15,21 @@ from api import DataSync ...@@ -15,22 +15,21 @@ from api import DataSync
# 截止日期 # 截止日期
max_date = None max_date = None
# max_date = '2024-03-20'
# max_date = '2024-01-11'
toForecast = True # False means test, True means forecast toForecast = True # False means test, True means forecast
syncData = True # 开启会同步数据库指数及基金数据 syncData = False # 开启会同步数据库指数及基金数据
uploadData = True # 开启会上传预测结果 uploadData = False # 开启会上传预测结果
doReport = True # 开启会生成Excel报告 doReport = False # 开启会生成Excel报告
# 待预测指数 # 待预测指数
# PREDICT_LIST = [67, 121, 122, 123] PREDICT_LIST = [156]
PREDICT_LIST = [67, 121, 122, 123, 147, 148, 149, 150, 151, 152, 153, 154, 155, 156, 157, 158, 159, 160, 161, 162, 163, # PREDICT_LIST = [67, 121, 122, 123, 147, 148, 149, 150, 151, 152, 153, 154, 155, 156, 157, 158, 159, 160, 161, 162, 163,
164, 165, 166, 167, 168, 169, 170, 171, 174, 175, 177, 178] # 164, 165, 166, 167, 168, 169, 170, 171, 174, 175, 177, 178]
eco = [65, 66, 74, 134] eco = [65, 66, 74, 134, 191]
index = [67, 68, 69, 70, 71, 72, 73, 75, 76, 77, 105, 106, 116, 117, 138, 139, 142, 143, 140, 141, 144, 145, 146] index = [67, 68, 69, 70, 71, 72, 73, 75, 76, 77, 105, 106, 116, 117, 138, 139, 142, 143, 140, 141, 144, 145, 146]
fund = [121, 122, 123, 147, 148, 149, 150, 151, 152, 153, 154, 155, 156, 157, 158, 159, 160, 161, 162, 163, 164, 165, # fund = [121, 122, 123, 147, 148, 149, 150, 151, 152, 153, 154, 155, 156, 157, 158, 159, 160, 161, 162, 163, 164, 165,
166, 167, 168, 169, 170, 171, 174, 175, 177, 178] # 166, 167, 168, 169, 170, 171, 174, 175, 177, 178]
fund = [156]
@autowired @autowired
...@@ -119,6 +118,7 @@ if __name__ == '__main__': ...@@ -119,6 +118,7 @@ if __name__ == '__main__':
FDTRData = data_access.get_fdtr(ecoData) FDTRData = data_access.get_fdtr(ecoData)
# 新增指标 NAPMPMI :美國的ISM製造業指數 (Monthly) # 新增指标 NAPMPMI :美國的ISM製造業指數 (Monthly)
NAPMPMIData = data_access.get_napmpmi(ecoData) NAPMPMIData = data_access.get_napmpmi(ecoData)
TTM = data_access.get_jifu_spx_opeps_currq_ttm(ecoData)
builder = TrainingDataBuilder(index, eco, fund, indexDict, toForecast, win1W, win1M, win1Q, numForecastDays, builder = TrainingDataBuilder(index, eco, fund, indexDict, toForecast, win1W, win1M, win1Q, numForecastDays,
theThreshold) theThreshold)
...@@ -126,7 +126,7 @@ if __name__ == '__main__': ...@@ -126,7 +126,7 @@ if __name__ == '__main__':
print(f'{indexDict[pid]} start '.center(50, '=')) print(f'{indexDict[pid]} start '.center(50, '='))
t_data = indexData if pid in index else fundData t_data = indexData if pid in index else fundData
X_train, X_test, y_train, y_test, scaledX_forecast, forecastDay = \ X_train, X_test, y_train, y_test, scaledX_forecast, forecastDay = \
builder.build_train_test(pid, t_data, vixData, indexOtherData, cpiData, FDTRData, NAPMPMIData) builder.build_train_test(pid, t_data, vixData, indexOtherData, cpiData, FDTRData, NAPMPMIData,TTM)
trainer = ModelTrainer(toForecast) trainer = ModelTrainer(toForecast)
rf_model = trainer.train_random_forest(X_train, y_train, X_test, y_test) rf_model = trainer.train_random_forest(X_train, y_train, X_test, y_test)
gbt_model = trainer.train_GBT(X_train, y_train, X_test, y_test) gbt_model = trainer.train_GBT(X_train, y_train, X_test, y_test)
......
...@@ -36,8 +36,8 @@ class DataAccess(ABC): ...@@ -36,8 +36,8 @@ class DataAccess(ABC):
def get_eco_datas(self): def get_eco_datas(self):
ecoData = pd.DataFrame( ecoData = pd.DataFrame(
get_eco_list(eco_ids=self._eco, max_date=self._max_date)) get_eco_list(eco_ids=self._eco, max_date=self._max_date))
ecoData = ecoData[["red_eco_id", "red_date", "red_indicator"]] ecoData = ecoData[["red_eco_id", "red_release_date", "red_indicator"]]
ecoData.rename(columns={"red_date": 'date'}, inplace=True) # please use 'date' ecoData.rename(columns={"red_release_date": 'date'}, inplace=True) # please use 'date'
ecoData["red_eco_id"] = ecoData["red_eco_id"].map(self._indexDict) ecoData["red_eco_id"] = ecoData["red_eco_id"].map(self._indexDict)
return ecoData return ecoData
...@@ -118,3 +118,12 @@ class DataAccess(ABC): ...@@ -118,3 +118,12 @@ class DataAccess(ABC):
NAPMPMIData.set_index('date', inplace=True) NAPMPMIData.set_index('date', inplace=True)
NAPMPMIData.index = pd.to_datetime(NAPMPMIData.index) NAPMPMIData.index = pd.to_datetime(NAPMPMIData.index)
return NAPMPMIData return NAPMPMIData
def get_jifu_spx_opeps_currq_ttm(self, ecoData):
# 新增指标 SP500 Operating EPS Current Quarter TTM
ttm = ecoData[ecoData['red_eco_id'] == "JIFU_SPX_OPEPS_CURRQ_TTM"].copy()
del (ttm['red_eco_id'])
ttm.rename(columns={"red_indicator": 'JIFU_SPX_OPEPS_CURRQ_TTM'}, inplace=True)
ttm.set_index('date', inplace=True)
ttm.index = pd.to_datetime(ttm.index)
return ttm
...@@ -124,7 +124,7 @@ class TrainingDataBuilder(ABC): ...@@ -124,7 +124,7 @@ class TrainingDataBuilder(ABC):
del (predictData['close']) del (predictData['close'])
return predictData return predictData
def build_train_test(self, pid, indexData, vixData, indexOtherData, cpiData, FDTRData, NAPMPMIData): def build_train_test(self, pid, indexData, vixData, indexOtherData, cpiData, FDTRData, NAPMPMIData,TTM):
###### Merge Data to one table ###### Merge Data to one table
predictData = self.build_predict_data(indexData, pid) predictData = self.build_predict_data(indexData, pid)
forecastDay = None forecastDay = None
...@@ -135,26 +135,12 @@ class TrainingDataBuilder(ABC): ...@@ -135,26 +135,12 @@ class TrainingDataBuilder(ABC):
DataAll = pd.merge(DataAll, cpiData, how='outer', on='date') DataAll = pd.merge(DataAll, cpiData, how='outer', on='date')
DataAll = pd.merge(DataAll, FDTRData, how='outer', on='date') DataAll = pd.merge(DataAll, FDTRData, how='outer', on='date')
DataAll = pd.merge(DataAll, NAPMPMIData, how='outer', on='date') DataAll = pd.merge(DataAll, NAPMPMIData, how='outer', on='date')
DataAll = pd.merge(DataAll, TTM, how='outer', on='date')
DataAll.set_index('date', inplace=True) DataAll.set_index('date', inplace=True)
DataAll.sort_index(inplace=True) DataAll.sort_index(inplace=True)
DataAll.reset_index(inplace=True) DataAll.reset_index(inplace=True)
DataAll.ffill(inplace=True)
###### fill eco data
for col in ['CPI_YOY', 'CPURNSA', 'CPI_MOM', 'CPI_MOM_Diff']:
DataAll[col].bfill(inplace=True)
for col in ['FDTR']:
DataAll[col].ffill(inplace=True)
# 新增指数NAPMPMI :美國的ISM製造業指數 (Monthly)
for col in ['NAPMPMI']:
DataAll[col].bfill(inplace=True)
DataAll[col].ffill(inplace=True)
for col in DataAll.columns:
if col not in ['CPI_YOY', 'CPURNSA', 'CPI_MOM', 'CPI_MOM_Diff', 'futureR', 'yLabel']:
DataAll[col].ffill(inplace=True)
if (self._toForecast): if (self._toForecast):
# 处理CPI_YOY:美国城镇消费物价指数同比未经季 CPURNSA:美国消费者物价指数未经季调 # 处理CPI_YOY:美国城镇消费物价指数同比未经季 CPURNSA:美国消费者物价指数未经季调
DataAllCopy = DataAll.copy() DataAllCopy = DataAll.copy()
......
...@@ -3,26 +3,12 @@ import sys ...@@ -3,26 +3,12 @@ import sys
from abc import ABC, abstractmethod from abc import ABC, abstractmethod
from datetime import datetime as dt from datetime import datetime as dt
from enum import Enum, unique from enum import Enum, unique
from typing import List
from py_jftech import get_config, parse_date from py_jftech import get_config, parse_date
logger = logging.getLogger(__name__) logger = logging.getLogger(__name__)
@unique
class BacktestStep(Enum):
ASSET_POOL = 1
NORMAL_PORTFOLIO = 2
HOLD_PORTFOLIO = 3
def within(self, step: Enum):
return self.value <= step.value
def without(self, step: Enum):
return self.value >= step.value
@unique @unique
class DatumType(Enum): class DatumType(Enum):
FUND = 'FUND' FUND = 'FUND'
...@@ -30,43 +16,11 @@ class DatumType(Enum): ...@@ -30,43 +16,11 @@ class DatumType(Enum):
ECO = 'ECO' ECO = 'ECO'
@unique
class AssetPoolType(Enum):
OPTIMIZE = 1
@unique @unique
class PortfoliosRisk(Enum): class PortfoliosRisk(Enum):
FT3 = 3 FT3 = 3
@unique
class PortfoliosType(Enum):
NORMAL = 'normal'
CUSTOM = 'custom'
@unique
class SolveType(Enum):
INFEASIBLE = 0
MPT = 1
POEM = 2
RISK_PARITY = 3
@unique
class LoggerType(Enum):
SIGNAL = 'signal'
@unique
class SignalType(Enum):
NORMAL = 1
SignalType.NORMAL.p_type = PortfoliosType.NORMAL
class DataSync(ABC): class DataSync(ABC):
''' '''
数据同步服务,需要同步数据的服务,可以实现该接口 数据同步服务,需要同步数据的服务,可以实现该接口
...@@ -80,37 +34,6 @@ class DataSync(ABC): ...@@ -80,37 +34,6 @@ class DataSync(ABC):
pass pass
class Cleanable(ABC):
'''
可清除服务
'''
@property
@abstractmethod
def clean_name(self):
'''
清除数据的名称
'''
pass
@property
@abstractmethod
def clean_step(self):
'''
清除数据所属的步骤
'''
pass
@abstractmethod
def clean_up(self, min_date=None, risk: PortfoliosRisk = None):
'''
清理指定的数据
:param min_date: 指定的起始时间
:param risk: 指定的风险等级
'''
pass
class Datum(ABC): class Datum(ABC):
''' '''
基础资料服务,基金资料数据,各种指数,指标资料数据 基础资料服务,基金资料数据,各种指数,指标资料数据
...@@ -223,322 +146,6 @@ class Navs(ABC): ...@@ -223,322 +146,6 @@ class Navs(ABC):
:return: 如果存在,则返回指定日期最后count个指标项(查询日期,指标,公告日期),否则返回None :return: 如果存在,则返回指定日期最后count个指标项(查询日期,指标,公告日期),否则返回None
''' '''
pass pass
class AssetOptimize(ABC):
'''
优选相关服务ABC
'''
@abstractmethod
def find_optimize(self, fund_ids, day):
'''
从多id中,选出指定日期最优的id
:param fund_ids: 待选id列表
:param day: 指定日期
:return: 最优的id
'''
pass
@abstractmethod
def get_optimize_pool(self, day):
'''
根据优选规则获取指定日期的优选池
:param day: 指定日期
:return: 优选id列表
'''
pass
class AssetPool(ABC):
'''
资产池相关服务
'''
@abstractmethod
def get_pool(self, day):
'''
返回指定日期的可用资产池
:param day: 指定日期
:return: 资产id列表
'''
pass
@abstractmethod
def clear(self, day=None):
'''
清除指定日期之后的资产池数据,如果没有给日期,则全部清空
:param day: 指定清除的开始日期,可选
'''
pass
class PortfoliosBuilder(ABC):
'''
投组组合构建器
'''
@abstractmethod
def get_portfolios(self, day, risk: PortfoliosRisk, type: PortfoliosType = PortfoliosType.NORMAL):
'''
获取指定日期,指定风险等级,指定类型的投资组合
:param type: 投组的类型
:param day: 指定日期
:param risk: 风险等级
:return: 资产组合字典{id: weight}
'''
pass
@abstractmethod
def build_portfolio(self, day, type: PortfoliosType):
'''
构建指定日期,指定类型的投资组合
:param day: 指定日期
:param type: 指定类型
:return 投资组合数据{risk: {...}},计算明细数据 {...}
'''
pass
@abstractmethod
def clear(self, day=None, risk: PortfoliosRisk = None):
'''
清除指定风险等级,指定日期之后的最优投组
:param day: 指定清除的开始日期,可选,如果没给,则清除全部日期
:param risk: 指定风险等级,如果没给,则清除全部风险等级
'''
pass
class PortfoliosChecker(ABC):
'''
投组组合检测器
'''
@abstractmethod
def check(self, day=None, portfolios=None):
"""
检测避免出现最优投组同时出现全部是ft或美盛基金的情况,增加一步替换动作。
@param day:
@param portfolios:
@return:
"""
pass
class Solver(ABC):
'''
解算器
'''
@abstractmethod
def solve_max_rtn(self):
'''
:return: max_rtn, max_var, minCVaR_whenMaxR
'''
pass
@abstractmethod
def solve_min_rtn(self):
'''
:return: min_rtn, min_var, maxCVaR_whenMinR
'''
pass
@abstractmethod
def solve_mpt(self, min_rtn, max_rtn):
'''
常规mpt计算
:param min_rtn: 最小回报率
:param max_rtn: 最大回报率
:return: 投组,cvar
'''
pass
@abstractmethod
def solve_poem(self, min_rtn, max_rtn, base_cvar, max_cvar):
'''
poem方式的mpt计算
:param min_rtn: 最小回报率
:param max_rtn: 最大回报率
:param base_cvar: 基础cvar
:param max_cvar: 最大cvar
:return: 投组,cvar
'''
pass
@abstractmethod
def solve_risk_parity(self):
'''
risk_parity计算
:return: 投组
'''
pass
@abstractmethod
def reset_navs(self, day):
'''
根据指定的日期,重置当前解算器,其他计算,全部依赖这里重置后的基金净值数据
:param day: 指定的日期
:return: 根据指定日期获取的,基金净值数据
'''
pass
@abstractmethod
def set_navs(self, navs):
'''
根据指定的navs,重置当前解算器
:param navs: 指定的navs
'''
pass
@abstractmethod
def set_category(self, category):
'''
根据指定的category,重置当前解算器
:param category: 指定的category
'''
pass
@property
@abstractmethod
def category(self):
pass
@property
@abstractmethod
def navs(self):
'''
:return: 当前解算器使用的基金净值
'''
pass
@property
@abstractmethod
def transfer_type(self):
"""
得出调仓类型
@return:
"""
pass
class SolverFactory(ABC):
'''
解算器工厂
'''
@abstractmethod
def create_solver(self, risk: PortfoliosRisk, type: PortfoliosType = PortfoliosType.NORMAL) -> Solver:
'''
根据指定的投组风险等级,以及投组类型,创建解算器
:param risk: 投组风险等级
:param type: 投组类型
:return: 解算器
'''
pass
class PortfoliosHolder(ABC):
'''
投资组合持仓器
'''
@abstractmethod
def get_portfolio_type(self, day, risk: PortfoliosRisk) -> PortfoliosType:
'''
获取指定日期指定风险等级持仓投组的类型
:param day: 指定日期
:param risk: 指定风险等级
:return: 持仓投组类型
'''
pass
@abstractmethod
def get_portfolios_weight(self, day, risk: PortfoliosRisk):
'''
获取指定日期指定风险等级的持仓投组比重
:param day: 指定日期
:param risk: 指定风险等级
:return: 持仓投组占比
'''
pass
@abstractmethod
def has_hold(self, risk: PortfoliosRisk) -> bool:
'''
是否存在指定分线等级的投组持仓
:param risk: 指定风险等级
:return: 如果已经存在持仓,则返回True, 否则返回False
'''
pass
@abstractmethod
def build_hold_portfolio(self, day, risk: PortfoliosRisk, force_mpt=False):
'''
构建指定日期,指定风险等级的持仓投组,以day为截止日期,会持续补满
:param day: 指定日期
:param risk: 指定风险等级
:param force_mpt: 如果为True,则强制计算当天mpt,否则不强制计算
:return:
'''
pass
@abstractmethod
def get_last_rebalance_date(self, risk: PortfoliosRisk, max_date=None):
'''
获取最后一次实际调仓的时间
:param risk: 持仓风险等级类型,必须
:param max_date: 指定日期之前的最后一次,可选
:return: 最后一次实际调仓的日期
'''
pass
@abstractmethod
def get_rebalance_date_by_signal(self, signal_id):
'''
获取指定调仓信号触发的实际调仓日期
:param signal_id: 指定的调仓信号
:return: 实际调仓日期
'''
pass
@property
@abstractmethod
def interval_days(self):
'''
返回实际交易的最小间隔交易日数
:return: 实际交易的最小间隔交易日数
'''
pass
@abstractmethod
def is_dividend_date(self, day):
"""
是否为配息日
:param day: 日期
:return: 是否为配息日
"""
pass
@abstractmethod
def clear(self, day=None, risk: PortfoliosRisk = None):
'''
清除指定风险等级,指定日期之后的持仓投组
:param day: 指定清除的开始日期,可选,如果没给,则清除全部日期
:param risk: 指定风险等级,如果没给,则清除全部风险等级
'''
pass
@property
@abstractmethod
def month_dividend(self):
"""
获取当月配息
"""
pass
class RoboExecutor(ABC): class RoboExecutor(ABC):
''' '''
ROBO执行器,整合以上逻辑,进行实盘或回测 ROBO执行器,整合以上逻辑,进行实盘或回测
...@@ -575,112 +182,3 @@ class RoboExecutor(ABC): ...@@ -575,112 +182,3 @@ class RoboExecutor(ABC):
logger.warning(f'get curt date from argv failure.', e) logger.warning(f'get curt date from argv failure.', e)
return dt.combine(dt.today().date(), dt.min.time()) return dt.combine(dt.today().date(), dt.min.time())
class RoboReportor(ABC):
'''
投组报告器
'''
@property
@abstractmethod
def report_name(self) -> str:
'''
返回报告名称
:return: 报告名称
'''
pass
@abstractmethod
def load_report(self, max_date=dt.today(), min_date=None) -> List[dict]:
'''
获取指定日期的报告
:param max_date: 指定截止日期
:param min_date: 指定开始日期
:return: 报告数据
'''
pass
class RoboExportor(ABC):
'''
投组导出器
'''
@abstractmethod
def export(self, max_date=dt.today(), min_date=None):
'''
根据参数以及配置信息执行导出相关操作
:param max_date: 指定截止日期
:param min_date: 指定开始日期
'''
pass
class DataLogger(ABC):
@abstractmethod
def save_record(self, date: dt, risk: PortfoliosRisk, type: LoggerType, datas: dict, exist_merge=True):
'''
保存数据日志记录
:param date: 要保存的数据记录日期
:param risk: 要保存的数据记录风险等级
:param type: 要保存的数据记录类型
:param datas: 要保存的数据记录
:param exist_merge: 如果要保存的记录存在,需要处理的方式,与之前的数据做合并处理,则为Ture, 否则会直接覆盖
'''
pass
@abstractmethod
def load_records(self, max_date=None, min_date=None, risk: PortfoliosRisk = None, type: LoggerType = None):
'''
获取数据日志记录
:param max_date: 截止日期
:param min_date: 起始日期
:param risk: 风险等级
:param type: 日志类型
:return: 日志数据列表
'''
pass
class RebalanceSignal(ABC):
'''
控制信号,发起是否调仓服务
'''
@abstractmethod
def get_signal(self, day, risk: PortfoliosRisk):
'''
根据日期和风险等级,返回当天的调仓信号,如果没有则返回None
:param day: 指定的日期,净值日
:param risk: 指定的风险等级
:return: 如果有信号,则返回信号数据,否则返回None
'''
pass
@property
@abstractmethod
def signal_type(self) -> SignalType:
'''
返回信号类型
:return: 信号类型
'''
pass
@abstractmethod
def get_last_signal(self, day, risk: PortfoliosRisk):
'''
根据日期和风险等级,返回最近的调仓信号,如果没有则返回None
:param day: 指定的日期,净值日
:param risk: 指定的风险等级
:return: 如果有信号,则返回信号数据,否则返回None
'''
@abstractmethod
def clear(self, min_date=None, risk: PortfoliosRisk = None):
'''
清理指定的数据
:param min_date: 指定的起始时间
:param risk: 指定的风险等级
'''
pass
import json
from abc import ABC, abstractmethod
from sys import exception
import numpy as np
import pandas as pd
from dateutil.relativedelta import relativedelta
from empyrical import sortino_ratio, annual_volatility, downside_risk, annual_return, tail_ratio
from py_jftech import filter_weekend, dict_remove, get_config, component, autowired, next_workday, \
is_workday
from api import AssetOptimize, Navs, Datum, AssetPoolType, DatumType
from asset_pool.dao import robo_assets_pool as rop, robo_indicator
class SortinoAssetOptimize(AssetOptimize, ABC):
def __init__(self):
optimize_config = get_config(__name__)
self._config = [{
**x,
'name': [f"sortino_{y[1]}_{y[0]}" for y in x.items() if y[0] != 'weight'][0]
} for x in optimize_config['sortino-weight']] if 'sortino-weight' in optimize_config else []
@property
def delta_kwargs(self):
result = []
for item in self._config:
delta_kwargs = item.copy()
del delta_kwargs['weight'], delta_kwargs['name']
result.append(delta_kwargs)
return result
def find_optimize(self, fund_ids, day):
pass
def get_optimize_pool(self, day):
pass
@property
@abstractmethod
def nav_min_dates(self) -> dict:
pass
@abstractmethod
def get_groups(self, day=None):
'''
:return: 返回待处理的id数组
'''
pass
@abstractmethod
def get_pct_change(self, fund_ids, day):
'''
根据id数组,返回指定日期的收益率
:param fund_ids: id数组
:param day: 指定的日期
:return: 收益率
'''
pass
@abstractmethod
def has_change(self, day):
return False
@component(bean_name='dividend')
class FundDividendSortinoAssetOptimize(SortinoAssetOptimize):
"""
根据索提诺比率计算基金优选的优选实现
以美国资产为主:US_STOCK、US_HY_BOND、US_IG_BOND
Sortino ratio对资产进行排序,选出排名靠前的资产(非一类选一只)
"""
@autowired
def __init__(self, navs: Navs = None, datum: Datum = None):
super().__init__()
self._navs = navs
self._datum = datum
self._conf = get_config(__name__)
@property
def annual_volatility_section(self):
return self._conf['annual-volatility-section']
@property
def annual_volatility_filter(self):
return self._conf['annual-volatility-filter']
@property
def asset_include(self):
return self._conf['asset-include']
@property
def asset_filter(self):
return self._conf.get('asset-filter')
@property
def optimize_count(self):
return self._conf['optimize-count']
@property
def nav_min_dates(self) -> dict:
return self._navs.get_nav_start_date()
def has_change(self, day):
return self._datum.update_change(day)
def find_optimize(self, fund_ids, day):
assert self._config, "find optimize, but not found sortino config."
pct_change = pd.DataFrame(self.get_pct_change(fund_ids, day))
pct_change.set_index('date', inplace=True)
sortino = pd.DataFrame()
for item in self._config:
ratio = dict(sortino_ratio(
pct_change.truncate(before=(day - relativedelta(**dict_remove(item, ('weight', 'name')))))))
sortino = pd.concat([sortino, pd.DataFrame([ratio], index=[item['name']])])
sortino = sortino.T
sortino['score'] = sortino.apply(lambda r: sum([x['weight'] * r[x['name']] for x in self._config]), axis=1)
sortino.sort_values('score', ascending=False, inplace=True)
records = sortino.to_dict(orient='index')
data = {fund_ids[k]: v for k, v in records.items()}
self.save_sortino(day, data)
# 取得分数高的前optimize_count个
return pct_change.columns[sortino.index[0:self.optimize_count]].values, sortino['score']
def save_sortino(self, day, datas):
for key, record in datas.items():
record = {k: v for k, v in record.items() if not (np.isnan(v) or np.isinf(v))}
robo_indicator.update_sortino(key, day, json.dumps(record))
def get_optimize_pool(self, day):
opt_pool = rop.get_one(day=day, type=AssetPoolType.OPTIMIZE)
if opt_pool is not None:
return json.loads(opt_pool['asset_ids'])
last_one = rop.get_last_one(day=day, type=AssetPoolType.OPTIMIZE)
if not last_one or day > last_one['date']:
pool = []
min_dates = self.nav_min_dates
max_incept_date = sorted([(day - relativedelta(**x)) for x in self.delta_kwargs])[0]
max_incept_date = max_incept_date if is_workday(max_incept_date) else next_workday(max_incept_date)
for fund_group in self.get_groups(day):
fund_group = [x for x in fund_group if min_dates[x] <= max_incept_date]
if len(fund_group) > self.optimize_count:
pool.extend(self.find_optimize(tuple(fund_group), day)[0])
elif len(fund_group) <= self.optimize_count:
pool.extend(fund_group)
if len(pool) < get_config('portfolios.solver.asset-count')[0]:
raise ValueError(f"基金优选个数小于{get_config('portfolios.solver.asset-count')[0]},请调整参数")
rop.insert(day, AssetPoolType.OPTIMIZE, sorted(pool))
last_one = rop.get_last_one(day=day, type=AssetPoolType.OPTIMIZE)
return json.loads(last_one['asset_ids'])
def do_annual_volatility_filter(self, day, funds):
"""
年化波动率过滤器
@return:
"""
filtered = []
fund_ids = [fund['id'] for fund in funds]
pct_change = pd.DataFrame(self.get_pct_change(fund_ids, day))
pct_change.set_index('date', inplace=True)
pct_change = pct_change.truncate(before=(day - relativedelta(**self.annual_volatility_section[0])))
# 时间未够计算年化波动的直接剔除
funds = [fund for fund in funds if fund['id'] in pct_change.columns]
ratio = downside_risk(pct_change) # annual_volatility / downside_risk / tail_ratio
ratio = pd.Series(ratio).to_dict()
annual = dict(zip(pct_change.columns, ratio.values()))
self.save_annual(day, annual)
filters = self.annual_volatility_filter
for f in filters:
customType = f.get('customType')
exclude = f.get('exclude')
volatility = f.get('volatility')
retain = f.get('min-retain')
records = [fund for fund in funds if fund['customType'] == customType and fund['id'] in annual.keys()]
records = sorted(records, key=lambda x: annual[x['id']])
max_exclude = len(records) - retain
if exclude is not None:
exclude = exclude if len(records) > exclude else len(records)
exclude = max_exclude if (len(records) - exclude) < retain else exclude
if exclude > 0:
filtered.extend(records[-exclude:])
records = records[:- exclude]
if volatility is not None and len(records) > retain:
max_exclude = max_exclude - len(records)
records = [record for record in records if annual.get(record['id']) > volatility][:max_exclude]
filtered.extend(records)
for f in filtered:
funds.remove(f)
return funds
def save_annual(self, day, annual):
datas = []
for key, record in annual.items():
data = {
"id": key,
"date": day,
"annual": record,
}
datas.append(data)
robo_indicator.insert(datas)
def get_filtered_funds(self, day):
funds = self._datum.get_datums(type=DatumType.FUND)
if get_config('portfolios.checker.month-fund-filter'):
# 如果有按月剔除
filters = get_config('portfolios.checker.month-fund-filter')
excludes = filters.get(day.month)
if excludes:
for f in funds[:]:
if f['bloombergTicker'] in excludes:
funds.remove(f)
if self.asset_filter:
filters = list(self.asset_filter.keys())[0]
funds_in = []
for fund in funds:
if fund[filters] in self.asset_filter[filters]:
funds_in.append(fund)
return funds_in
funds = self.do_annual_volatility_filter(day, funds)
return funds
def get_groups(self, day=None):
funds = pd.DataFrame(self.get_filtered_funds(day))
if len(funds) < get_config('portfolios.solver.asset-count')[0]:
raise ValueError(f"{day}==基金优选个数小于{get_config('portfolios.solver.asset-count')[0]},请调整参数")
result = []
if self.asset_include:
include = list(self.asset_include.keys())[0]
for key, fund_group in funds.groupby(by=include):
if key in self.asset_include[include]:
result.append(tuple(fund_group['id']))
else:
for (category, asset_type), fund_group in funds.groupby(by=['category', 'assetType']):
result.append(tuple(fund_group['id']))
return result
def get_pct_change(self, fund_ids, day):
if not self._config:
raise exception(f"find optimize, but not found sortino config.")
days = [day - relativedelta(days=7, **dict_remove(x, ('weight', 'name'))) for x in self._config]
days.append(day - relativedelta(days=7, **self.annual_volatility_section[0]))
start = filter_weekend(sorted(days)[0])
fund_navs = pd.DataFrame(self._navs.get_fund_navs(fund_ids=tuple(fund_ids), min_date=start, max_date=day))
if not fund_navs.empty:
fund_navs.sort_values('nav_date', inplace=True)
fund_navs = fund_navs.pivot_table(index='nav_date', columns='fund_id', values='nav_cal')
fund_navs.fillna(method='ffill', inplace=True)
fund_navs = fund_navs.loc[fund_navs.index >= start + relativedelta(days=6)]
fund_navs.dropna(axis=1, inplace=True)
result = round(fund_navs.pct_change().dropna(), 4)
result.reset_index(inplace=True)
result.rename(columns={'nav_date': 'date'}, inplace=True)
return result.to_dict('records')
return []
CREATE TABLE IF NOT EXISTS robo_assets_pool
(
rap_id BIGINT UNSIGNED NOT NULL AUTO_INCREMENT,
rap_date DATETIME NOT NULL COMMENT '数据日期',
rap_type TINYINT NOT NULL COMMENT '资产池类别',
rap_asset_ids JSON DEFAULT NULL COMMENT '基金ID',
rap_create_time DATETIME NOT NULL DEFAULT CURRENT_TIMESTAMP,
rap_update_time DATETIME DEFAULT NULL ON UPDATE CURRENT_TIMESTAMP,
PRIMARY KEY (rap_id),
UNIQUE INDEX (rap_date, rap_type),
INDEX (rap_type)
) ENGINE = InnoDB
AUTO_INCREMENT = 0
DEFAULT CHARSET = utf8mb4 COMMENT '资产池';
CREATE TABLE IF NOT EXISTS robo_indicator
(
`ri_rbd_id` bigint(20) NOT NULL,
`ri_date` datetime NOT NULL,
`ri_annual` double NOT NULL,
`ri_sortino` json NULL,
`ri_create_time` datetime NOT NULL DEFAULT CURRENT_TIMESTAMP,
`ri_update_time` datetime NULL DEFAULT NULL ON UPDATE CURRENT_TIMESTAMP,
UNIQUE INDEX `ri_rbd_id`(`ri_rbd_id`, `ri_date`) USING BTREE
) ENGINE = InnoDB CHARACTER SET = utf8mb4 COLLATE = utf8mb4_general_ci ROW_FORMAT = Dynamic;
\ No newline at end of file
import json
from py_jftech import read, write, where, format_date
from api import AssetPoolType
__COLUMNS__ = {
'rap_id': 'id',
'rap_date': 'date',
'rap_type': 'type',
'rap_asset_ids': 'asset_ids',
}
@read
def get_list(max_date=None, min_date=None, type: AssetPoolType = None):
sqls = []
if max_date:
sqls.append(f"rap_date <= '{format_date(max_date)}'")
if min_date:
sqls.append(f"rap_date >= '{format_date(min_date)}'")
return f'''
select {','.join([f"`{x[0]}` as `{x[1]}`" for x in __COLUMNS__.items()])} from robo_assets_pool
{where(*sqls, rap_type=type)} order by rap_type, rap_date
'''
@read(one=True)
def get_one(day, type: AssetPoolType):
return f'''select {','.join([f"`{x[0]}` as `{x[1]}`" for x in __COLUMNS__.items()])} from robo_assets_pool {where(rap_date=day, rap_type=type)}'''
@read(one=True)
def get_last_one(type: AssetPoolType = None, day=None):
sql = f"rap_date <= '{format_date(day)}'" if day else None
return f'''select {','.join([f"`{x[0]}` as `{x[1]}`" for x in __COLUMNS__.items()])} from robo_assets_pool {where(sql, rap_type=type)} order by rap_date desc limit 1'''
@write
def insert(day, type: AssetPoolType, pool: list):
return f'''
insert into robo_assets_pool(rap_date, rap_type, rap_asset_ids)
values ('{format_date(day)}', {type.value},'{json.dumps(pool)}')
'''
@write
def delete(day=None):
if day:
return f"delete from robo_assets_pool where rap_date >= '{format_date(day)}'"
else:
return 'truncate table robo_assets_pool'
from py_jftech import write, mapper_columns
__COLUMNS__ = {
'ri_rbd_id': 'id',
'ri_date': 'date',
'ri_annual': 'annual',
'ri_sortino': 'sortino',
}
@write
def insert(datas):
datas = [mapper_columns(datas=x, columns=__COLUMNS__, ignore_none=False) for x in datas]
values = ','.join(
[f'''({','.join([(f"'{x[j]}'" if j in x and x[j] is not None else 'null') for j in __COLUMNS__.keys()])})''' for
x in datas])
return f'''insert into robo_indicator({','.join(__COLUMNS__.keys())}) values {values}'''
@write
def update_sortino(id, date, sortino):
return f'''update robo_indicator set ri_sortino='{sortino}' where ri_rbd_id={id} and ri_date='{date}' '''
@write
def clear():
return 'TRUNCATE robo_indicator'
from datetime import datetime as dt
from py_jftech import component, autowired
from api import AssetPool, AssetOptimize
from asset_pool.dao import robo_assets_pool as rap, robo_indicator
@component
class FundAssetPool(AssetPool):
@autowired
def __init__(self, optimize: AssetOptimize = None):
self._optimize = optimize
def get_pool(self, day=dt.today()):
return self._optimize.get_optimize_pool(day)
def clear(self, day=None):
rap.delete(day)
robo_indicator.clear()
import logging
import unittest
from py_jftech import autowired, parse_date
from api import AssetOptimize
logger = logging.getLogger(__name__)
class AssetPoolTest(unittest.TestCase):
@autowired(names={'asset': 'dividend'})
def test_dividend_asset_optimize(self, asset: AssetOptimize = None):
asset.get_optimize_pool(parse_date('2023-03-01'))
if __name__ == '__main__':
unittest.main()
import os
from datetime import datetime, timedelta
from pathlib import Path
import pandas as pd
import pytz
import requests
from openpyxl.reader.excel import load_workbook
def is_dst():
"""
判断当前时区是否实行夏令时
@return:
"""
tz = pytz.timezone('America/New_York')
now = datetime.now(tz)
return now.dst() != timedelta(0)
def usa_close_day():
"""
美股收盘时间,收盘后,日期+1天
@return:
"""
tz = pytz.timezone('America/New_York')
now = datetime.now(tz)
if is_dst():
# 夏令时
if now.hour > 16:
now = now + timedelta(1)
else:
# 冬令时
if now.hour > 17:
now = now + timedelta(1)
return now.strftime("%Y%m%d")
def get_quarter_end_date(date=None):
"""
@return: 当前日期所在季度的最后一天日期
"""
# 获取当前日期
if date is None:
date = datetime.now()
# 计算当前季度
current_quarter = (date.month - 1) // 3 + 1
# 计算季度的最后一个月份
if current_quarter == 1:
quarter_end_month = 3
elif current_quarter == 2:
quarter_end_month = 6
elif current_quarter == 3:
quarter_end_month = 9
else:
quarter_end_month = 12
# 计算季度末的日期
quarter_end_date = datetime(date.year, quarter_end_month, 1) + timedelta(days=31) - timedelta(
days=datetime(date.year, quarter_end_month, 1).day)
return quarter_end_date
def list_files_sorted_by_name(directory, max_day=None):
"""
文件排序
@param directory: 所在目录
@param max_day: 期望最大日期
@return: 返回日期小于max_day的所有文件
"""
files = []
for root, dirs, filenames in os.walk(directory):
for filename in filenames:
files.append(os.path.join(root, filename))
files.sort() # 默认是按照字典序排序
if max_day:
files = [f for f in files[:-1] if str(f)[-13:-5] >= max_day.strftime("%Y%m%d")]
return files
def fetch_sp500():
temp_file = Path(__file__).parent/'resources/sp-500.xlsx'
response = requests.get("https://www.spglobal.com/spdji/en/documents/additional-material/sp-500-eps-est.xlsx")
# 确保请求成功
if response.status_code == 200:
# 保存临时文件
with open(temp_file, 'wb') as f:
f.write(response.content)
else:
print(f"Failed to retrieve file: {response.status_code}")
def save_sp500():
fetch_sp500()
files = list_files_sorted_by_name(Path(__file__).parent/'resources')[-2:]
compare_day = None
for file in files:
# 使用openpyxl加载Excel文件
wb = load_workbook(filename=file, data_only=True)
ws = wb['ESTIMATES&PEs']
# 读取特定单元格的值
report_day = ws['A2'].value
if compare_day is None:
compare_day = report_day
else:
if compare_day != report_day:
wb.save(Path(__file__).parent/f'resources/sp-500-eps-est_USA{usa_close_day()}.xlsx')
# 关闭工作簿
wb.close()
def sync_sp500(day):
file = Path(__file__).parent/'resources/sp-500-eps-est_USA20241014.xlsx'
if day:
files = list_files_sorted_by_name(Path(__file__).parent / 'resources', day)
if files:
file = files[-1]
else:
return []
wb = load_workbook(filename=file, data_only=True)
ws = wb['ESTIMATES&PEs']
estimates = "ESTIMATES"
estimates_row = 0
actuals = "ACTUALS"
actuals_row = 0
datas = []
# 遍历A列
for row in range(100, 300):
cell_value = ws[f'A{row}'].value
if cell_value and estimates == str(cell_value):
estimates_row = row
if cell_value and actuals == str(cell_value):
actuals_row = row
break
report_day = ws['A2'].value
for i in range(estimates_row + 1, actuals_row):
if ws[f'A{i}'].value is None:
break
date_value = datetime.strptime(str(ws[f'A{i}'].value).split(' ')[0].strip(), '%m/%d/%Y') if type(
ws[f'A{i}'].value) == str else ws[f'A{i}'].value
if date_value < report_day:
data = {'date': date_value,
'eps': ws[f'C{i}'].value}
data["releaseDate"] = data['date'] + timedelta(days=1)
datas.append(data)
elif date_value == get_quarter_end_date(report_day):
data = {'date': report_day,
'eps': ws[f'C{i}'].value,
'releaseDate': datetime.strptime(str(file)[-13:-5], "%Y%m%d")}
datas.append(data)
for i in range(actuals_row + 1, ws.max_row):
if ws[f'A{i}'].value is None:
break
data = {'date': datetime.strptime(str(ws[f'A{i}'].value).strip(), '%m/%d/%Y') if type(
ws[f'A{i}'].value) == str else ws[f'A{i}'].value,
'eps': ws[f'C{i}'].value}
data["releaseDate"] = data['date'] + timedelta(days=1)
datas.append(data)
wb.close()
datas = pd.DataFrame(datas[::-1])
datas['close'] = datas['eps'].rolling(window=4).sum().round(2)
datas.dropna(inplace=True)
return datas.to_dict(orient="records")[-1::] if day else datas.to_dict(orient="records")
if __name__ == '__main__':
# print(list_files_sorted_by_name(Path(__file__).parent / 'resources'))
# save_sp500()
sync_sp500(day=None)
...@@ -13,6 +13,7 @@ from py_jftech import format_date, is_workday, component, autowired, get_config, ...@@ -13,6 +13,7 @@ from py_jftech import format_date, is_workday, component, autowired, get_config,
from api import DatumType, DataSync, Datum from api import DatumType, DataSync, Datum
from basic.dao import robo_index_datas as rid, robo_eco_datas as red, robo_fund_navs as rfn, robo_exrate as re from basic.dao import robo_index_datas as rid, robo_eco_datas as red, robo_fund_navs as rfn, robo_exrate as re
from basic.sp500 import sync_sp500
logger = logging.getLogger(__name__) logger = logging.getLogger(__name__)
...@@ -208,7 +209,7 @@ class EcoSync(JDCDataSync): ...@@ -208,7 +209,7 @@ class EcoSync(JDCDataSync):
@property @property
def start_date(self): def start_date(self):
return super(EcoSync, self).start_date - relativedelta(years=4) return super().start_date - relativedelta(years=4)
@property @property
def datum_type(self) -> DatumType: def datum_type(self) -> DatumType:
...@@ -219,6 +220,8 @@ class EcoSync(JDCDataSync): ...@@ -219,6 +220,8 @@ class EcoSync(JDCDataSync):
return next_workday(last['date']) if last else self.start_date return next_workday(last['date']) if last else self.start_date
def build_urls(self, datum, start_date, page=0) -> str: def build_urls(self, datum, start_date, page=0) -> str:
if datum.get("source") == "calculating":
return None
return f'http://jdcprod.thiztech.com/api/datas/eco-value?page={page}&size=200&sourceCode={quote(datum["bloombergTicker"])}&sourceType=BLOOMBERG&startDate={format_date(start_date)}' return f'http://jdcprod.thiztech.com/api/datas/eco-value?page={page}&size=200&sourceCode={quote(datum["bloombergTicker"])}&sourceType=BLOOMBERG&startDate={format_date(start_date)}'
def store_date(self, datumid, datas: List[dict]): def store_date(self, datumid, datas: List[dict]):
...@@ -232,6 +235,33 @@ class EcoSync(JDCDataSync): ...@@ -232,6 +235,33 @@ class EcoSync(JDCDataSync):
red.batch_insert(save_datas) red.batch_insert(save_datas)
@component(bean_name='eco-sync-calculating')
class EcoSync(EcoSync):
def datum_start_date(self, datum_id):
last = red.get_last_one(eco_id=datum_id)
return next_workday(last['release_date']) if last else None
def do_sync(self, max_date=dt.today()):
logger.info(f'start sync datas for type[{self.datum_type}]')
for datum in self._datum.get_datums(type=self.datum_type):
if datum.get("source") == "calculating":
logger.debug(f'start sync ticker[{datum["bloombergTicker"]}]')
start_date = self.datum_start_date(datum['id'])
datas = sync_sp500(start_date)
self.store_date(datum['id'], datas)
def store_date(self, datumid, datas: List[dict]):
save_datas = [{
'eco_id': datumid,
'date': x['date'],
'indicator': x['close'],
'release_date': x['releaseDate'],
} for x in datas]
if save_datas:
red.batch_insert(save_datas)
@component(bean_name='navs-sync') @component(bean_name='navs-sync')
class FundNavSync(JDCDataSync): class FundNavSync(JDCDataSync):
......
py-jftech:
logger:
version: 1
formatters:
brief:
format: "%(asctime)s - %(levelname)s - %(message)s"
simple:
format: "%(asctime)s - %(filename)s - %(levelname)s - %(message)s"
handlers:
console:
class: logging.StreamHandler
formatter: simple
level: DEBUG
stream: ext://sys.stdout
file:
class: logging.handlers.TimedRotatingFileHandler
level: INFO
formatter: brief
filename: ${LOG_FILE:logs/info.log}
interval: 1
backupCount: 30
encoding: utf8
when: D
# loggers:
# basic.sync:
# level: DEBUG
# handlers: [console]
# propagate: no
root:
level: ${LOG_LEVEL:INFO}
handlers: ${LOG_HANDLERS:[ console ]}
database:
host: ${MYSQL_HOST:192.168.68.85}
port: ${MYSQL_PORT:3306}
user: ${MYSQL_USER:root}
password: ${MYSQL_PWD:changeit}
dbname: ${MYSQL_DBNAME:industryfund} # industryfund
injectable:
names:
backtest: robo_executor.BacktestExecutor
datum: basic.datum.DefaultDatum
hold-report: portfolios.holder.DivHoldReportor
mpt: portfolios.builder.PoemARCPortfoliosBuilder # PoemARCPortfoliosBuilder, RiskParityARCPortfoliosBuilder
dividend-holder: portfolios.holder.InvTrustPortfoliosHolder
navs-sync: basic.sync.FundNavSync
email:
server: smtphz.qiye.163.com
user: jft-ra@thizgroup.com
password: 5dbb#30ec6d3
mulit-process:
max-workers: ${MAX_PROCESS:1}
basic: # 基础信息模块
sync:
start-date: 1990-01-01 # 同步数据开始日期
datum: # 资料模块
change:
date: ${DATUM_CHANGE_DATE}
file: ${DATUM_CHANGE_FILE}
excludes: # 排除的资料彭博ticker
# backtest:
# - 'FKRCX US Equity' # 富蘭克林黃金基金 美元 A(Ydis)
real:
- 'XXX LX Equity'
navs: # 净值模块
exrate: # 汇率,如果不开启,整个这块注释掉
- from: EUR # 需要转换的货币类型
ticker: EURUSD BGN Curncy # 汇率值的彭博ticker
asset-pool: # 资产池模块
asset-optimize: # 资产优选模块
sortino-weight: # sortino计算需要的权重,下面每一条为一次计算,e.g. months: 3, weight: 0.5 表示 3个月数据使用权重0.5来计算分值
- months: 3
weight: 0.5
- months: 6
weight: 0.3
- years: 1
weight: 0.2
asset-include: {'customType':[1,2,3,4]}
optimize-count: 3 #基金优选个数
annual-volatility-filter: #1各资产年化波动率末exclude位 2各资产年化波动率大于volatility
# - customType: 1
# min-retain: 4
# exclude: 0
# volatility: 1000
- customType: 3
min-retain: 4
exclude: 0
volatility: 1000
annual-volatility-section: # 波动率时间区间
- years: 1
portfolios: # 投组模块
holder: # 持仓投组相关
init-nav: 100 # 初始金额
min-interval-days: 10 # 两次实际调仓最小间隔期,单位交易日
dividend-rate: 0.0 #设定年化配息率
dividend-date: 15 #配息日,每月15号
dividend-adjust-day: [1,4,7,10] #每年的首个季度调整配息
warehouse-frequency: 1 #每隔1个月调一次仓
warehouse-transfer-date: 1 #调仓日
redeem-list: [ 'TEUSAAU LX Equity', 'LIGTRAA ID Equity', 'TEMFHAC LX Equity', 'LUSHUAA ID Equity' ] #从持仓中的低风险资产“直接”按序赎回
solver: # 解算器相关
model: prr # 结算模型 ARC ,PRR, ~ 标准解算器
arc: on #是否开启ARC
brr: 0.0 #误差补偿值
trr: 5
tol: 1E-10 # 误差满足条件
navs: # 净值要求
range: # 需要净值数据的区间, days: 90 表示90自然日,months: 3 表示3个自然月
days: 90
max-nan: # 最大缺失净值条件
asset: 8 # 单一资产最多缺少多少交易日数据,则踢出资产池
day: 0.5 # 单一交易日最多缺少百分之多少净值,则删除该交易日
risk: [] # 资产风险等级要求,可分开写也可以合并写,e.g. risk:[ 2, 3 ] 则表示 所有投组资产风险等级都是 2 或 3
LARC: [0.50, 0.00] #低阈值
UARC: [1.00, 1.00] #高阈值
matrix-rtn-days: 20 # 计算回报率矩阵时,回报率滚动天数
asset-count: [3,4] # 投组资产个数。e.g. count 或 [min, max] 分别表示 最大最小都为count 或 最小为min 最大为max,另外这里也可以类似上面给不同风险等级分别配置
mpt: # mpt计算相关
cvar-beta: 0.2 # 计算Kbeta 需要用到
quantile: 0.9 # 分位点,也可以给不同风险等级分别配置
low-weight: 0.05 # 最低权重
high-weight: [ 0.50 ] # 最高权重比例,可给一个值,也可以给多个值,当多个值时,第一个表示只有一个资产时权重,第二个表示只有两个资产时权重,以此类推,最后一个表示其他资产个数时的权重
poem: # poem相关
cvar-scale-factor: 0.1 # 计算时用到的系数
checker: #投组检测模块
switch: off #是否开启检查
custom-type-priority: [3,2,1,4] # 检测优先级
month-fund-filter: {4:['XXX ID Equity']} # 'LMAOMPU ID Equity' 根据月份删除某几档基金,51勞動節:美盛西方資產亞洲機會債券基金 A 增益配息 (M) 美元
reports: # 报告模块相关
navs:
type: FUND
tickers:
- TEMTECI LX Equity
- TEPLX US Equity
- FRDPX US Equity
- FKRCX US Equity
- FTNRACU LX Equity
benchmark: # benchmark报告
ft:
init-amount: 100 # 初始金额
stock-rate: # stock型基金比例
RR3: 0.3
RR4: 0.5
RR5: 0.7
fixed-range: # 固定区间收益率
range-dates: # 固定起始截止日期
- start: 2008-01-01
end: 2008-10-27
- start: 2011-05-02
end: 2011-10-04
- start: 2013-05-08
end: 2013-06-24
- start: 2014-09-03
end: 2014-12-16
- start: 2015-04-28
end: 2016-01-21
- start: 2018-01-26
end: 2018-10-29
- start: 2020-01-20
end: 2020-03-23
relative-range: # 相对区间收益率
range-dates: # 相对时间周期
- days: 1
name: '一天'
- weeks: 1
name: '一周'
- months: 1
name: '一月'
- months: 3
name: '三月'
- months: 6
name: '六月'
- years: 1
name: '一年'
- years: 2
name: '两年'
- years: 3
name: '三年'
- years: 5
name: '五年'
- years: 10
name: '十年'
- dates: ~
name: '成立以来'
exports:
backtest: # 回测导出曹策略
save-path: ${EXPORT_PATH:excels} # 导出报告文件存放路径,如果以./或者../开头,则会以执行python文件为根目录,如果以/开头,则为系统绝对路径,否则,以项目目录为根目录
file-name: ${EXPORT_FILENAME:real} # 导出报告的文件名
save-config: ${EXPORT_CONFIG:off} # 是否保存配置文件
include-report: # 需要导出的报告类型列表,下面的顺序,也代表了excel中sheet的顺序
# - funds-report # 基金资料
# - navs-report # 净值报告
- hold-report # 持仓报告
- signal-report # 信号报告
# - benckmark-report # benckmark报告
# - combo-report # 持仓对比
- indicators-report # 各种特殊指标报告
- fixed-range-report # 固定区间收益报告
- relative-range-report # 相对区间收益报告
- year-range-report # 单年区间业绩报告
# - month-div-rate-report # 月度配息率比较
# - year-div-rate-report # 年度配息率比较
real-daily:
file-name: IndustryFoF_prr5(實盤)-每月投組推薦
include-report:
# - daily-hold-report
- daily-signal-report
email:
receives:
- brody_wu@chifufund.com
copies: ${DAILY_EMAIL_COPIES}
subject:
# default: "ROBO6_TAIBEI-实盘版-每日投組推薦_{today}"
rebalance: "IndustryFoF_prr5(實盤)-每月投組推薦_{today}"
content:
# default: "Dear All: 附件是今天生成的推薦組合,請驗收,謝謝! 注>:該郵件為自動發送,如有問題請聯繫矽谷團隊 telan_qian@chifufund.com"
rebalance: "Dear All: 附檔為每月投資組合推薦,請驗收,謝謝! 注>:該郵件為自動發送,如有問題請聯繫矽谷團隊 brody_wu@chifufund.com"
daily-monitor:
file-name: svROBO6_monitor
include-report:
- name: relative-range-report # 相对区间收益报告
min-date: ~
- name: contribution-report # 贡献率报告
min-date: {days: 30}
- name: high-weight-report # 高风险资产占比
min-date: {days: 30}
- name: asset-pool-report # 基金池
min-date: {days: 30}
- name: combo-report # 持仓报告
min-date: {days: 40}
- name: mpt-report
min-date: {days: 30}
- name: signal-report
min-date: ~
- name: crisis-one-report
min-date: {days: 30}
- name: crisis-two-report
min-date: {days: 30}
- name: market-right-report
min-date: {days: 30}
- name: drift-buy-report
min-date: {days: 30}
email:
receives:
- wenwen.tang@thizgroup.com
copies: ${MONITOR_EMAIL_COPIES}
subject: "SVROBO6-实盘版-每日监测_{today}"
content: "Dear All: 附件是今天生成的监测数据,請驗收,謝謝! 注>:該郵件為自動發送,如有問題請聯繫矽谷團隊 telan_qian@chifufund.com"
robo-executor: # 执行器相关
use: ${ROBO_EXECUTOR:backtest} # 执行哪个执行器,优先取系统环境变量ROBO_EXECUTOR的值,默认backtest
sync-data: ${SYNC_DATA:on} # 是否开启同步资料数据
backtest: # 回测执行器相关
start-date: 2013-01-02 # 回测起始日期
end-date: 2023-12-29 # 回测截止日期
sealing-period: 10 #调仓封闭期
start-step: ${BACKTEST_START_STEP:1} # 回测从哪一步开始执行 1:计算资产池;2:计算最优投组:3:计算再平衡信号以及持仓投组
end-step: ${BACKTEST_END_STEP:3} # 回测从哪一步执行完成后结束执行 1:计算资产池;2:计算最优投组:3:计算再平衡信号以及持仓投组
clean-up: off
real: # 实盘执行器
export: ${EXPORT_ENABLE:on} # 是否开启报告
start-date: 2023-05-08 # 实盘开始时间
include-date: []
py-jftech:
logger:
version: 1
formatters:
brief:
format: "%(asctime)s - %(levelname)s - %(message)s"
simple:
format: "%(asctime)s - %(filename)s - %(levelname)s - %(message)s"
handlers:
console:
class: logging.StreamHandler
formatter: simple
level: DEBUG
stream: ext://sys.stdout
file:
class: logging.handlers.TimedRotatingFileHandler
level: INFO
formatter: brief
filename: ${LOG_FILE:logs/info.log}
interval: 1
backupCount: 30
encoding: utf8
when: D
# loggers:
# basic.sync:
# level: DEBUG
# handlers: [console]
# propagate: no
root:
level: ${LOG_LEVEL:INFO}
handlers: ${LOG_HANDLERS:[ console ]}
database:
host: ${MYSQL_HOST:192.168.68.85}
port: ${MYSQL_PORT:3306}
user: ${MYSQL_USER:root}
password: ${MYSQL_PWD:changeit}
dbname: ${MYSQL_DBNAME:mdis_prr3} # mdis_prr3
injectable:
names:
backtest: robo_executor.BacktestExecutor
datum: basic.datum.DefaultDatum
hold-report: portfolios.holder.DivHoldReportor
mpt: portfolios.builder.PoemARCPortfoliosBuilder # RiskParityARCPortfoliosBuilder/PoemARCPortfoliosBuilder
dividend-holder: portfolios.holder.InvTrustPortfoliosHolder
navs-sync: basic.sync.FundNavSync
email:
server: smtphz.qiye.163.com
user: jft-ra@thizgroup.com
password: 5dbb#30ec6d3
mulit-process:
max-workers: ${MAX_PROCESS:1}
basic: # 基础信息模块
sync:
start-date: 1990-01-01 # 同步数据开始日期
datum: # 资料模块
change:
date: ${DATUM_CHANGE_DATE}
file: ${DATUM_CHANGE_FILE}
excludes: # 排除的资料彭博ticker
backtest:
- 'FRSTAMP LX Equity' # 富蘭克林坦伯頓全球投資系列-精選收益基金 美元A(Mdis-pc)股
# - 'TEMASAD LX Equity' # 富蘭克林坦伯頓全球投資系列-亞洲債券基金 美元A(Mdis)股
# - 'TEMEAUS LX Equity' # 富蘭克林坦伯頓全球投資系列-新興國家固定收益基金 美元A(Mdis)股
# - 'TEMDAMU LX Equity' # 富蘭克林坦伯頓全球投資系列-新興市場月收益基金美元A穩定月配股
# - 'TEMGINI LX Equity' # 富蘭克林坦伯頓全球投資系列-全球債券基金 美元A(Mdis)股
# - 'TGTRFAD LX Equity' # 富蘭克林坦伯頓全球投資系列-全球債券總報酬基金 美元A(Mdis)股
# - 'LMBADMU ID Equity' # 美盛布蘭迪全球固定收益基金 A 美元 配息
# - 'LGBOAUI ID Equity' # 美盛布蘭迪全球機會固定收益基金 A 美元 配息 (M)
# - 'FTEAMUH LX Equity' # 富蘭克林坦伯頓全球投資系列-歐洲股票收益基金美元避險A(Mdis)股-H1
# - 'TEMHYAD LX Equity' # 富蘭克林坦伯頓全球投資系列-歐洲非投資等級債券基金 歐元A(Mdis)股
real:
- 'FRSTAMP LX Equity' # 富蘭克林坦伯頓全球投資系列-精選收益基金 美元A(Mdis-pc)股
# - 'FTSIADU LX Equity' # 富蘭克林坦伯頓全球投資系列-精選收益基金 美元A(Mdis)股
navs: # 净值模块
exrate: # 汇率,如果不开启,整个这块注释掉
- from: EUR # 需要转换的货币类型
ticker: EURUSD BGN Curncy # 汇率值的彭博ticker
asset-pool: # 资产池模块
asset-optimize: # 资产优选模块
sortino-weight: # sortino计算需要的权重,下面每一条为一次计算,e.g. months: 3, weight: 0.5 表示 3个月数据使用权重0.5来计算分值
- months: 3
weight: 0.5
- months: 6
weight: 0.3
- years: 1
weight: 0.2
asset-include: {'customType':[1,2,3,4]}
optimize-count: 3 #基金优选个数
annual-volatility-filter: #1各资产年化波动率末exclude位 2各资产年化波动率大于volatility
# - customType: 1
# min-retain: 2
# exclude: 1
# volatility: 1000
- customType: 2
min-retain: 2
exclude: 5
# volatility: 1000
# - customType: 3
# exclude: 0
# volatility: 1000
- customType: 4
min-retain: 1
exclude: 2
# volatility: 1000
annual-volatility-section: # 波动率时间区间
- years: 1
portfolios: # 投组模块
holder: # 持仓投组相关
init-nav: 100 # 初始金额
min-interval-days: 10 # 两次实际调仓最小间隔期,单位交易日
dividend-rate: 0.0 #设定年化配息率
dividend-date: 15 #配息日,每月15号
dividend-adjust-day: [1,4,7,10] #每年的首个季度调整配息
warehouse-frequency: 1 #每隔1个月调一次仓
warehouse-transfer-date: 1 #调仓日
redeem-list: [ 'TEUSAAU LX Equity', 'LIGTRAA ID Equity', 'TEMFHAC LX Equity', 'LUSHUAA ID Equity' ] #从持仓中的低风险资产“直接”按序赎回
solver: # 解算器相关
model: prr # 结算模型 ARC ,PRR, ~ 标准解算器
arc: on #是否开启ARC
brr: 0.0 #误差补偿值
trr: 3
tol: 1E-10 # 误差满足条件
navs: # 净值要求
range: # 需要净值数据的区间, days: 90 表示90自然日,months: 3 表示3个自然月
days: 90
max-nan: # 最大缺失净值条件
asset: 8 # 单一资产最多缺少多少交易日数据,则踢出资产池
day: 0.5 # 单一交易日最多缺少百分之多少净值,则删除该交易日
risk: [] # 资产风险等级要求,可分开写也可以合并写,e.g. risk:[ 2, 3 ] 则表示 所有投组资产风险等级都是 2 或 3
LARC: [0.35, 0.10, 0.05, 0.00] #低阈值
UARC: [0.70, 0.70, 0.70, 0.35] #高阈值
matrix-rtn-days: 20 # 计算回报率矩阵时,回报率滚动天数
asset-count: [5,5] # 投组资产个数。e.g. count 或 [min, max] 分别表示 最大最小都为count 或 最小为min 最大为max,另外这里也可以类似上面给不同风险等级分别配置
mpt: # mpt计算相关
cvar-beta: 0.2 # 计算Kbeta 需要用到
quantile: 0.9 # 分位点,也可以给不同风险等级分别配置
low-weight: 0.05 # 最低权重
high-weight: [ 0.35 ] # 最高权重比例,可给一个值,也可以给多个值,当多个值时,第一个表示只有一个资产时权重,第二个表示只有两个资产时权重,以此类推,最后一个表示其他资产个数时的权重
poem: # poem相关
cvar-scale-factor: 0.1 # 计算时用到的系数
checker: #投组检测模块
switch: on #是否开启检查
custom-type-priority: [3,2,1,4] # 检测优先级
month-fund-filter: {4:['XXX ID Equity']} # 'LMAOMPU ID Equity' 根据月份删除某几档基金,51勞動節:美盛西方資產亞洲機會債券基金 A 增益配息 (M) 美元
reports: # 报告模块相关
navs:
type: FUND
tickers:
- TEMTECI LX Equity
- TEPLX US Equity
- FRDPX US Equity
- FKRCX US Equity
- FTNRACU LX Equity
benchmark: # benchmark报告
ft:
init-amount: 100 # 初始金额
stock-rate: # stock型基金比例
RR3: 0.3
RR4: 0.5
RR5: 0.7
fixed-range: # 固定区间收益率
range-dates: # 固定起始截止日期
- start: 2008-01-01
end: 2008-10-27
- start: 2011-05-02
end: 2011-10-04
- start: 2013-05-08
end: 2013-06-24
- start: 2014-09-03
end: 2014-12-16
- start: 2015-05-21
end: 2016-02-11
- start: 2018-09-20
end: 2018-12-24
- start: 2020-02-19
end: 2020-03-23
- start: 2022-01-03
end: 2022-10-12
relative-range: # 相对区间收益率
range-dates: # 相对时间周期
- days: 1
name: '一天'
- weeks: 1
name: '一周'
- months: 1
name: '一月'
- months: 3
name: '三月'
- months: 6
name: '六月'
- years: 1
name: '一年'
- years: 2
name: '两年'
- years: 3
name: '三年'
- years: 5
name: '五年'
- years: 10
name: '十年'
- dates: ~
name: '成立以来'
exports:
backtest: # 回测导出曹策略
save-path: ${EXPORT_PATH:excels} # 导出报告文件存放路径,如果以./或者../开头,则会以执行python文件为根目录,如果以/开头,则为系统绝对路径,否则,以项目目录为根目录
file-name: ${EXPORT_FILENAME:real} # 导出报告的文件名
save-config: ${EXPORT_CONFIG:off} # 是否保存配置文件
include-report: # 需要导出的报告类型列表,下面的顺序,也代表了excel中sheet的顺序
# - funds-report # 基金资料
# - navs-report # 净值报告
- hold-report # 持仓报告
- signal-report # 信号报告
# - benckmark-report # benckmark报告
# - combo-report # 持仓对比
- indicators-report # 各种特殊指标报告
- fixed-range-report # 固定区间收益报告
- relative-range-report # 相对区间收益报告
- year-range-report # 单年区间业绩报告
# - month-div-rate-report # 月度配息率比较
# - year-div-rate-report # 年度配息率比较
real-daily:
file-name: MdisFoF_prr3(實盤)-每月投組推薦
include-report:
# - daily-hold-report
- daily-signal-report
email:
receives:
- brody_wu@chifufund.com
copies: ${DAILY_EMAIL_COPIES}
subject:
# default: "ROBO6_TAIBEI-实盘版-每日投組推薦_{today}"
rebalance: "MdisFoF_prr3(實盤)-每月投組推薦_{today}"
content:
# default: "Dear All: 附件是今天生成的推薦組合,請驗收,謝謝! 注>:該郵件為自動發送,如有問題請聯繫矽谷團隊 brody_wu@chifufund.com"
rebalance: "Dear All: 附檔為每月投資組合推薦,請驗收,謝謝! 注>:該郵件為自動發送,如有問題請聯繫矽谷團隊 brody_wu@chifufund.com"
daily-monitor:
file-name: svROBO6_monitor
include-report:
- name: relative-range-report # 相对区间收益报告
min-date: ~
- name: contribution-report # 贡献率报告
min-date: {days: 30}
- name: high-weight-report # 高风险资产占比
min-date: {days: 30}
- name: asset-pool-report # 基金池
min-date: {days: 30}
- name: combo-report # 持仓报告
min-date: {days: 40}
- name: mpt-report
min-date: {days: 30}
- name: signal-report
min-date: ~
- name: crisis-one-report
min-date: {days: 30}
- name: crisis-two-report
min-date: {days: 30}
- name: market-right-report
min-date: {days: 30}
- name: drift-buy-report
min-date: {days: 30}
email:
receives:
- wenwen.tang@thizgroup.com
copies: ${MONITOR_EMAIL_COPIES}
subject: "SVROBO6-实盘版-每日监测_{today}"
content: "Dear All: 附件是今天生成的监测数据,請驗收,謝謝! 注>:該郵件為自動發送,如有問題請聯繫矽谷團隊 telan_qian@chifufund.com"
robo-executor: # 执行器相关
use: ${ROBO_EXECUTOR:backtest} # 执行哪个执行器,优先取系统环境变量ROBO_EXECUTOR的值,默认backtest
sync-data: ${SYNC_DATA:on} # 是否开启同步资料数据
backtest: # 回测执行器相关
start-date: 2013-01-02 # 回测起始日期
end-date: 2023-12-29 # 回测截止日期
sealing-period: 10 #调仓封闭期
start-step: ${BACKTEST_START_STEP:1} # 回测从哪一步开始执行 1:计算资产池;2:计算最优投组:3:计算再平衡信号以及持仓投组
end-step: ${BACKTEST_END_STEP:3} # 回测从哪一步执行完成后结束执行 1:计算资产池;2:计算最优投组:3:计算再平衡信号以及持仓投组
clean-up: on
real: # 实盘执行器
export: ${EXPORT_ENABLE:on} # 是否开启报告
start-date: 2023-05-08 # 实盘开始时间
include-date: []
py-jftech:
logger:
version: 1
formatters:
brief:
format: "%(asctime)s - %(levelname)s - %(message)s"
simple:
format: "%(asctime)s - %(filename)s - %(levelname)s - %(message)s"
handlers:
console:
class: logging.StreamHandler
formatter: simple
level: DEBUG
stream: ext://sys.stdout
file:
class: logging.handlers.TimedRotatingFileHandler
level: INFO
formatter: brief
filename: ${LOG_FILE:logs/info.log}
interval: 1
backupCount: 30
encoding: utf8
when: D
# loggers:
# basic.sync:
# level: DEBUG
# handlers: [console]
# propagate: no
root:
level: ${LOG_LEVEL:INFO}
handlers: ${LOG_HANDLERS:[ console ]}
database:
host: ${MYSQL_HOST:192.168.68.85}
port: ${MYSQL_PORT:3306}
user: ${MYSQL_USER:root}
password: ${MYSQL_PWD:changeit}
dbname: ${MYSQL_DBNAME:stable_prr3} # stable_prr3
injectable:
names:
backtest: robo_executor.BacktestExecutor
datum: basic.datum.DefaultDatum
hold-report: portfolios.holder.DivHoldReportor
mpt: portfolios.builder.RiskParityARCPortfoliosBuilder # PoemARCPortfoliosBuilder
dividend-holder: portfolios.holder.InvTrustPortfoliosHolder
navs-sync: basic.sync.FundNavSync
email:
server: smtphz.qiye.163.com
user: jft-ra@thizgroup.com
password: 5dbb#30ec6d3
mulit-process:
max-workers: ${MAX_PROCESS:1}
basic: # 基础信息模块
sync:
start-date: 1990-01-01 # 同步数据开始日期
datum: # 资料模块
change:
date: ${DATUM_CHANGE_DATE}
file: ${DATUM_CHANGE_FILE}
excludes: # 排除的资料彭博ticker
backtest:
- 'LCUAGAA ID Equity' # 美盛凱利美國積極成長基金 A 美元 累積
# - 'TEMHYAH LX Equity'
# - 'FKRCX US Equity' # 富蘭克林黃金基金 美元 A(Ydis)
# - 'TEMHYAH LX Equity' # 富蘭克林坦伯頓全球投資系列-歐洲非投資等級債券基金 美元避險A(Mdis)股-H1
real:
- 'XXXX LX Equity'
# - 'TEMHYAH LX Equity' # 富蘭克林坦伯頓全球投資系列-歐洲非投資等級債券基金 美元避險A(Mdis)股-H1
navs: # 净值模块
exrate: # 汇率,如果不开启,整个这块注释掉
- from: EUR # 需要转换的货币类型
ticker: EURUSD BGN Curncy # 汇率值的彭博ticker
asset-pool: # 资产池模块
asset-optimize: # 资产优选模块
sortino-weight: # sortino计算需要的权重,下面每一条为一次计算,e.g. months: 3, weight: 0.5 表示 3个月数据使用权重0.5来计算分值
- months: 3
weight: 0.5
- months: 6
weight: 0.3
- years: 1
weight: 0.2
asset-include: {'customType':[1,2,3,4]}
optimize-count: 4 #基金优选个数
annual-volatility-filter: #1各资产年化波动率末exclude位 2各资产年化波动率大于volatility
- customType: 2 # none
min-retain: 4
exclude: 0
volatility: 1000
- customType: 3 # none
min-retain: 4
exclude: 0
volatility: 1000
annual-volatility-section: # 波动率时间区间
- years: 1
portfolios: # 投组模块
holder: # 持仓投组相关
init-nav: 100 # 初始金额
min-interval-days: 10 # 两次实际调仓最小间隔期,单位交易日
dividend-rate: 0.0 #设定年化配息率
dividend-date: 15 #配息日,每月15号
dividend-adjust-day: [1,4,7,10] #每年的首个季度调整配息
warehouse-frequency: 1 #每隔1个月调一次仓
warehouse-transfer-date: 1 #调仓日
redeem-list: [ 'TEUSAAU LX Equity', 'LIGTRAA ID Equity', 'TEMFHAC LX Equity', 'LUSHUAA ID Equity' ] #从持仓中的低风险资产“直接”按序赎回
solver: # 解算器相关
model: prr # 结算模型 ARC ,PRR, ~ 标准解算器
arc: on #是否开启ARC
brr: 0.0 #误差补偿值
trr: 3
tol: 1E-10 # 误差满足条件
navs: # 净值要求
range: # 需要净值数据的区间, days: 90 表示90自然日,months: 3 表示3个自然月
days: 90
max-nan: # 最大缺失净值条件
asset: 8 # 单一资产最多缺少多少交易日数据,则踢出资产池
day: 0.5 # 单一交易日最多缺少百分之多少净值,则删除该交易日
risk: [] # 资产风险等级要求,可分开写也可以合并写,e.g. risk:[ 2, 3 ] 则表示 所有投组资产风险等级都是 2 或 3
LARC: [0.15, 0.00, 0.00] #低阈值
UARC: [0.35, 0.75, 0.75] #高阈值
matrix-rtn-days: 20 # 计算回报率矩阵时,回报率滚动天数
asset-count: [5,5] # 投组资产个数。e.g. count 或 [min, max] 分别表示 最大最小都为count 或 最小为min 最大为max,另外这里也可以类似上面给不同风险等级分别配置
mpt: # mpt计算相关
cvar-beta: 0.2 # 计算Kbeta 需要用到
quantile: 0.9 # 分位点,也可以给不同风险等级分别配置
low-weight: 0.05 # 最低权重
high-weight: [ 0.50 ] # 最高权重比例,可给一个值,也可以给多个值,当多个值时,第一个表示只有一个资产时权重,第二个表示只有两个资产时权重,以此类推,最后一个表示其他资产个数时的权重
poem: # poem相关
cvar-scale-factor: 0.1 # 计算时用到的系数
checker: #投组检测模块
switch: off #是否开启检查
custom-type-priority: [3,2,1,4] # 检测优先级
month-fund-filter: {4:['XXX ID Equity']} # 'LMAOMPU ID Equity' 根据月份删除某几档基金,51勞動節:美盛西方資產亞洲機會債券基金 A 增益配息 (M) 美元
reports: # 报告模块相关
navs:
type: FUND
tickers:
- TEMTECI LX Equity
- TEPLX US Equity
- FRDPX US Equity
- FKRCX US Equity
- FTNRACU LX Equity
benchmark: # benchmark报告
ft:
init-amount: 100 # 初始金额
stock-rate: # stock型基金比例
RR3: 0.3
RR4: 0.5
RR5: 0.7
fixed-range: # 固定区间收益率
range-dates: # 固定起始截止日期
- start: 2008-01-01
end: 2008-10-27
- start: 2011-05-02
end: 2011-10-04
- start: 2013-05-08
end: 2013-06-24
- start: 2014-09-03
end: 2014-12-16
- start: 2015-05-21
end: 2016-02-11
- start: 2018-09-20
end: 2018-12-24
- start: 2020-02-19
end: 2020-03-23
- start: 2022-01-03
end: 2022-10-12
relative-range: # 相对区间收益率
range-dates: # 相对时间周期
- days: 1
name: '一天'
- weeks: 1
name: '一周'
- months: 1
name: '一月'
- months: 3
name: '三月'
- months: 6
name: '六月'
- years: 1
name: '一年'
- years: 2
name: '两年'
- years: 3
name: '三年'
- years: 5
name: '五年'
- years: 10
name: '十年'
- dates: ~
name: '成立以来'
exports:
backtest: # 回测导出曹策略
save-path: ${EXPORT_PATH:excels} # 导出报告文件存放路径,如果以./或者../开头,则会以执行python文件为根目录,如果以/开头,则为系统绝对路径,否则,以项目目录为根目录
file-name: ${EXPORT_FILENAME:real} # 导出报告的文件名
save-config: ${EXPORT_CONFIG:off} # 是否保存配置文件
include-report: # 需要导出的报告类型列表,下面的顺序,也代表了excel中sheet的顺序
# - funds-report # 基金资料
# - navs-report # 净值报告
- hold-report # 持仓报告
- signal-report # 信号报告
# - benckmark-report # benckmark报告
# - combo-report # 持仓对比
- indicators-report # 各种特殊指标报告
- fixed-range-report # 固定区间收益报告
- relative-range-report # 相对区间收益报告
- year-range-report # 单年区间业绩报告
# - month-div-rate-report # 月度配息率比较
# - year-div-rate-report # 年度配息率比较
real-daily:
file-name: SteadyFoF_prr3(實盤)-每月投組推薦
include-report:
# - daily-hold-report
- daily-signal-report
email:
receives:
- brody_wu@chifufund.com
copies: ${DAILY_EMAIL_COPIES}
subject:
# default: "ROBO6_TAIBEI-实盘版-每日投組推薦_{today}"
rebalance: "SteadyFoF_prr3(實盤)-每月投組推薦_{today}"
content:
# default: "Dear All: 附件是今天生成的推薦組合,請驗收,謝謝! 注>:該郵件為自動發送,如有問題請聯繫矽谷團隊 telan_qian@chifufund.com"
rebalance: "Dear All: 附檔為每月投資組合推薦,請驗收,謝謝! 注>:該郵件為自動發送,如有問題請聯繫矽谷團隊 brody_wu@chifufund.com"
daily-monitor:
file-name: svROBO6_monitor
include-report:
- name: relative-range-report # 相对区间收益报告
min-date: ~
- name: contribution-report # 贡献率报告
min-date: {days: 30}
- name: high-weight-report # 高风险资产占比
min-date: {days: 30}
- name: asset-pool-report # 基金池
min-date: {days: 30}
- name: combo-report # 持仓报告
min-date: {days: 40}
- name: mpt-report
min-date: {days: 30}
- name: signal-report
min-date: ~
- name: crisis-one-report
min-date: {days: 30}
- name: crisis-two-report
min-date: {days: 30}
- name: market-right-report
min-date: {days: 30}
- name: drift-buy-report
min-date: {days: 30}
email:
receives:
- wenwen.tang@thizgroup.com
copies: ${MONITOR_EMAIL_COPIES}
subject: "SVROBO6-实盘版-每日监测_{today}"
content: "Dear All: 附件是今天生成的监测数据,請驗收,謝謝! 注>:該郵件為自動發送,如有問題請聯繫矽谷團隊 telan_qian@chifufund.com"
robo-executor: # 执行器相关
use: ${ROBO_EXECUTOR:backtest} # 执行哪个执行器,优先取系统环境变量ROBO_EXECUTOR的值,默认backtest
sync-data: ${SYNC_DATA:on} # 是否开启同步资料数据
backtest: # 回测执行器相关
start-date: 2013-01-02 # 回测起始日期
end-date: 2023-12-29 # 回测截止日期
sealing-period: 10 #调仓封闭期
start-step: ${BACKTEST_START_STEP:1} # 回测从哪一步开始执行 1:计算资产池;2:计算最优投组:3:计算再平衡信号以及持仓投组
end-step: ${BACKTEST_END_STEP:3} # 回测从哪一步执行完成后结束执行 1:计算资产池;2:计算最优投组:3:计算再平衡信号以及持仓投组
clean-up: on
real: # 实盘执行器
export: ${EXPORT_ENABLE:on} # 是否开启报告
start-date: 2023-05-08 # 实盘开始时间
include-date: []
py-jftech:
logger:
version: 1
formatters:
brief:
format: "%(asctime)s - %(levelname)s - %(message)s"
simple:
format: "%(asctime)s - %(filename)s - %(levelname)s - %(message)s"
handlers:
console:
class: logging.StreamHandler
formatter: simple
level: DEBUG
stream: ext://sys.stdout
file:
class: logging.handlers.TimedRotatingFileHandler
level: INFO
formatter: brief
filename: ${LOG_FILE:logs/info.log}
interval: 1
backupCount: 30
encoding: utf8
when: D
# loggers:
# basic.sync:
# level: DEBUG
# handlers: [console]
# propagate: no
root:
level: ${LOG_LEVEL:INFO}
handlers: ${LOG_HANDLERS:[ console ]}
database:
host: ${MYSQL_HOST:192.168.68.85}
port: ${MYSQL_PORT:3306}
user: ${MYSQL_USER:root}
password: ${MYSQL_PWD:changeit}
dbname: ${MYSQL_DBNAME:jftech_robo}
injectable:
names:
backtest: robo_executor.BacktestExecutor
datum: basic.datum.DefaultDatum
hold-report: portfolios.holder.DivHoldReportor
mpt: portfolios.builder.PoemPortfoliosBuilder
dividend-holder: portfolios.holder.DividendPortfoliosHolder
navs-sync: basic.sync.FundNavSync
email:
server: smtphz.qiye.163.com
user: jft-ra@thizgroup.com
password: 5dbb#30ec6d3
mulit-process:
max-workers: ${MAX_PROCESS:4}
basic: # 基础信息模块
sync:
start-date: 2007-01-01 # 同步数据开始日期
datum: # 资料模块
change:
date: ${DATUM_CHANGE_DATE}
file: ${DATUM_CHANGE_FILE}
excludes: # 排除的资料彭博ticker
backtest:
- 'TEMUSGI LX Equity'
real:
- 'FGFSACU LX Equity'
- 'TEMUSGI LX Equity'
# navs: # 净值模块
# exrate: # 汇率,如果不开启,整个这块注释掉
# - from: EUR # 需要转换的货币类型
# ticker: EURUSD BGN Curncy # 汇率值的彭博ticker
asset-pool: # 资产池模块
asset-optimize: # 资产优选模块
sortino-weight: # sortino计算需要的权重,下面每一条为一次计算,e.g. months: 3, weight: 0.5 表示 3个月数据使用权重0.5来计算分值
- months: 3
weight: 0.5
- months: 6
weight: 0.3
- years: 1
weight: 0.2
asset-include: {'category':['US_STOCK','US_IG_BOND','US_HY_BOND']}
optimize-count: 3 #基金优选个数
portfolios: # 投组模块
holder: # 持仓投组相关
init-nav: 100 # 初始金额
min-interval-days: 10 # 两次实际调仓最小间隔期,单位交易日
dividend-rate: 0.09 #设定年化配息率
dividend-drift-rate: 0.1 #超过基准配息率上下10%触发配息率重置
dividend-date: 15 #配息日,每月15号
dividend-adjust-day: [1,4,7,10] #每年的首个季度调整配息
warehouse-frequency: 1 #每隔1个月调一次仓
warehouse-transfer-date: 1 #调仓日
solver: # 解算器相关
tol: 1E-10 # 误差满足条件
navs: # 净值要求
range: # 需要净值数据的区间, days: 90 表示90自然日,months: 3 表示3个自然月
days: 90
max-nan: # 最大缺失净值条件
asset: 8 # 单一资产最多缺少多少交易日数据,则踢出资产池
day: 0.5 # 单一交易日最多缺少百分之多少净值,则删除该交易日
normal-ratio: #US_STOCK:US_HY_BOND:US_IG_BOND三者分别对应低中高风险所占比率
US_STOCK: [ 0.5, 0.5, 0.7 ]
US_HY_BOND: [ 0.4, 0.4, 0.2 ]
US_IG_BOND: [ 0.1, 0.1, 0.1 ]
riskctl-ratio:
US_STOCK: [ 0.2, 0.4, 0.6 ]
US_HY_BOND: [ 0.5, 0.3, 0.1 ]
US_IG_BOND: [ 0.3, 0.3, 0.3 ]
matrix-rtn-days: 20 # 计算回报率矩阵时,回报率滚动天数
asset-count: [3,3] # 投组资产个数。e.g. count 或 [min, max] 分别表示 最大最小都为count 或 最小为min 最大为max,另外这里也可以类似上面给不同风险等级分别配置
mpt: # mpt计算相关
cvar-beta: 0.2 # 计算Kbeta 需要用到
quantile: 0.9 # 分位点,也可以给不同风险等级分别配置
low-weight: 0.05 # 最低权重
# high-weight: [ 1 ] # 最高权重比例,可给一个值,也可以给多个值,当多个值时,第一个表示只有一个资产时权重,第二个表示只有两个资产时权重,以此类推,最后一个表示其他资产个数时的权重
poem: # poem相关
cvar-scale-factor: 0.1 # 计算时用到的系数
checker: #投组检测模块
switch: off #是否开启检查
custom-type-priority: [ 3,2,1,4 ] # 检测优先级
reports: # 报告模块相关
navs:
type: FUND
tickers:
- TEMTECI LX Equity
- TEPLX US Equity
- FRDPX US Equity
- FKRCX US Equity
- FTNRACU LX Equity
benchmark: # benchmark报告
ft:
init-amount: 100 # 初始金额
stock-rate: # stock型基金比例
RR3: 0.3
RR4: 0.5
RR5: 0.7
fixed-range: # 固定区间收益率
range-dates: # 固定起始截止日期
- start: 2008-01-01
end: 2008-10-27
- start: 2011-05-02
end: 2011-10-04
- start: 2013-05-08
end: 2013-06-24
- start: 2014-09-03
end: 2014-12-16
- start: 2015-04-28
end: 2016-01-21
- start: 2018-01-26
end: 2018-10-29
- start: 2020-01-20
end: 2020-03-23
relative-range: # 相对区间收益率
range-dates: # 相对时间周期
- days: 1
name: '一天'
- weeks: 1
name: '一周'
- months: 1
name: '一月'
- months: 3
name: '三月'
- months: 6
name: '六月'
- years: 1
name: '一年'
- years: 2
name: '两年'
- years: 3
name: '三年'
- years: 5
name: '五年'
- years: 10
name: '十年'
- dates: ~
name: '成立以来'
exports:
backtest: # 回测导出曹策略
save-path: ${EXPORT_PATH:excels} # 导出报告文件存放路径,如果以./或者../开头,则会以执行python文件为根目录,如果以/开头,则为系统绝对路径,否则,以项目目录为根目录
file-name: ${EXPORT_FILENAME:real} # 导出报告的文件名
save-config: ${EXPORT_CONFIG:off} # 是否保存配置文件
include-report: # 需要导出的报告类型列表,下面的顺序,也代表了excel中sheet的顺序
# - funds-report # 基金资料
# - navs-report # 净值报告
- hold-report # 持仓报告
- signal-report # 信号报告
- benckmark-report # benckmark报告
- combo-report # 持仓对比
- indicators-report # 各种特殊指标报告
- fixed-range-report # 固定区间收益报告
- relative-range-report # 相对区间收益报告
- year-range-report # 单年区间业绩报告
- month-div-rate-report # 月度配息率比较
- year-div-rate-report # 年度配息率比较
real-daily:
file-name: svROBO5_portfolios
include-report:
- daily-hold-report
- daily-signal-report
email:
receives:
- wenwen.tang@thizgroup.com
copies: ${DAILY_EMAIL_COPIES}
subject:
default: "ROBO5_TAIBEI-实盘版-每日投組推薦_{today}"
rebalance: "ROBO5_TAIBEI-实盘版-每日投組推薦_{today}_今日有調倉信號!!!"
content:
default: "Dear All: 附件是今天生成的推薦組合,請驗收,謝謝! 注>:該郵件為自動發送,如有問題請聯繫矽谷團隊 telan_qian@chifufund.com"
rebalance: "Dear All: 附件是今天生成的推薦組合以及調倉信號,請驗收,謝謝! 注>:該郵件為自動發送,如有問題請聯繫矽谷團隊 telan_qian@chifufund.com"
daily-monitor:
file-name: svROBO5_monitor
include-report:
- name: relative-range-report # 相对区间收益报告
min-date: ~
- name: contribution-report # 贡献率报告
min-date: {days: 30}
- name: high-weight-report # 高风险资产占比
min-date: {days: 30}
- name: asset-pool-report # 基金池
min-date: {days: 30}
- name: combo-report # 持仓报告
min-date: {days: 40}
- name: mpt-report
min-date: {days: 30}
- name: signal-report
min-date: ~
- name: crisis-one-report
min-date: {days: 30}
- name: crisis-two-report
min-date: {days: 30}
- name: market-right-report
min-date: {days: 30}
- name: drift-buy-report
min-date: {days: 30}
email:
receives:
- wenwen.tang@thizgroup.com
copies: ${MONITOR_EMAIL_COPIES}
subject: "SVROBO5-实盘版-每日监测_{today}"
content: "Dear All: 附件是今天生成的监测数据,請驗收,謝謝! 注>:該郵件為自動發送,如有問題請聯繫矽谷團隊 telan_qian@chifufund.com"
robo-executor: # 执行器相关
use: ${ROBO_EXECUTOR:backtest} # 执行哪个执行器,优先取系统环境变量ROBO_EXECUTOR的值,默认backtest
sync-data: ${SYNC_DATA:off} # 是否开启同步资料数据
backtest: # 回测执行器相关
start-date: 2022-10-25 # 回测起始日期
end-date: 2023-06-01 # 回测截止日期
sealing-period: 10 #调仓封闭期
start-step: ${BACKTEST_START_STEP:1} # 回测从哪一步开始执行 1:计算资产池;2:计算最优投组:3:计算再平衡信号以及持仓投组
end-step: ${BACKTEST_END_STEP:3} # 回测从哪一步执行完成后结束执行 1:计算资产池;2:计算最优投组:3:计算再平衡信号以及持仓投组
clean-up: true
real: # 实盘执行器
export: ${EXPORT_ENABLE:on} # 是否开启报告
start-date: 2023-01-01 # 实盘开始时间
include-date: []
py-jftech:
logger:
version: 1
formatters:
brief:
format: "%(asctime)s - %(levelname)s - %(message)s"
simple:
format: "%(asctime)s - %(filename)s - %(levelname)s - %(message)s"
handlers:
console:
class: logging.StreamHandler
formatter: simple
level: DEBUG
stream: ext://sys.stdout
file:
class: logging.handlers.TimedRotatingFileHandler
level: INFO
formatter: brief
filename: ${LOG_FILE:logs/info.log}
interval: 1
backupCount: 30
encoding: utf8
when: D
# loggers:
# basic.sync:
# level: DEBUG
# handlers: [console]
# propagate: no
root:
level: ${LOG_LEVEL:INFO}
handlers: ${LOG_HANDLERS:[ console ]}
database:
host: ${MYSQL_HOST:192.168.68.85}
port: ${MYSQL_PORT:3306}
user: ${MYSQL_USER:root}
password: ${MYSQL_PWD:changeit}
dbname: ${MYSQL_DBNAME:j_robo} # mdiv_prr3
injectable:
names:
backtest: robo_executor.BacktestExecutor
datum: basic.datum.DefaultDatum
hold-report: portfolios.holder.DivHoldReportor
mpt: portfolios.builder.RiskParityARCPortfoliosBuilder
dividend-holder: portfolios.holder.InvTrustPortfoliosHolder
navs-sync: basic.sync.FundNavSync
email:
server: smtphz.qiye.163.com
user: jft-ra@thizgroup.com
password: 5dbb#30ec6d3
mulit-process:
max-workers: ${MAX_PROCESS:1}
basic: # 基础信息模块
sync:
start-date: 1990-01-01 # 同步数据开始日期
datum: # 资料模块
change:
date: ${DATUM_CHANGE_DATE}
file: ${DATUM_CHANGE_FILE}
excludes: # 排除的资料彭博ticker
# backtest:
# - 'TEMDLRI LX Equity' # 富蘭克林坦伯頓全球投資系列-美元短期票券基金 美元A(Mdis)股
# - 'LMWADMU ID Equity' # 美盛西方資產全球藍籌債券基金 A 美元 配息
# - 'TGEIADI LX Equity' # 富蘭克林坦伯頓全球投資系列-全球股票收益基金 美元A穩定月配息股
real:
- 'FGFSACU LX Equity'
navs: # 净值模块
exrate: # 汇率,如果不开启,整个这块注释掉
- from: EUR # 需要转换的货币类型
ticker: EURUSD BGN Curncy # 汇率值的彭博ticker
asset-pool: # 资产池模块
asset-optimize: # 资产优选模块
sortino-weight: # sortino计算需要的权重,下面每一条为一次计算,e.g. months: 3, weight: 0.5 表示 3个月数据使用权重0.5来计算分值
- months: 3
weight: 0.5
- months: 6
weight: 0.3
- years: 1
weight: 0.2
asset-include: {'customType':[1,2,3,4]}
optimize-count: 3 #基金优选个数
annual-volatility-filter: #1各资产年化波动率末exclude位 2各资产年化波动率大于volatility
- customType: 1
exclude: 2
min-retain: 2
# volatility: 100
- customType: 4
exclude: 5
min-retain: 2
# volatility: 111
annual-volatility-section: # 波动率时间区间
- years: 1
portfolios: # 投组模块
holder: # 持仓投组相关
init-nav: 100 # 初始金额
min-interval-days: 10 # 两次实际调仓最小间隔期,单位交易日
dividend-rate: 0.0 #设定年化配息率
dividend-date: 15 #配息日,每月15号
dividend-adjust-day: [1,4,7,10] #每年的首个季度调整配息
warehouse-frequency: 1 #每隔1个月调一次仓
warehouse-transfer-date: 1 #调仓日
redeem-list: [ 'TEUSAAU LX Equity', 'LIGTRAA ID Equity', 'TEMFHAC LX Equity', 'LUSHUAA ID Equity' ] #从持仓中的低风险资产“直接”按序赎回
solver: # 解算器相关
model: prr # 结算模型 ARC ,PRR, ~ 标准解算器
arc: on #是否开启ARC
brr: 0.02 #误差补偿值
trr: 3
tol: 1E-10 # 误差满足条件
navs: # 净值要求
range: # 需要净值数据的区间, days: 90 表示90自然日,months: 3 表示3个自然月
days: 90
max-nan: # 最大缺失净值条件
asset: 8 # 单一资产最多缺少多少交易日数据,则踢出资产池
day: 0.5 # 单一交易日最多缺少百分之多少净值,则删除该交易日
risk: [] # 资产风险等级要求,可分开写也可以合并写,e.g. risk:[ 2, 3 ] 则表示 所有投组资产风险等级都是 2 或 3
LARC: [0.30, 0.00, 0.00] #低阈值
UARC: [0.70, 0.70, 0.70] #高阈值
matrix-rtn-days: 20 # 计算回报率矩阵时,回报率滚动天数
asset-count: [5,5] # 投组资产个数。e.g. count 或 [min, max] 分别表示 最大最小都为count 或 最小为min 最大为max,另外这里也可以类似上面给不同风险等级分别配置
mpt: # mpt计算相关
cvar-beta: 0.2 # 计算Kbeta 需要用到
quantile: 0.9 # 分位点,也可以给不同风险等级分别配置
low-weight: 0.05 # 最低权重
high-weight: [ 0.35 ] # 最高权重比例,可给一个值,也可以给多个值,当多个值时,第一个表示只有一个资产时权重,第二个表示只有两个资产时权重,以此类推,最后一个表示其他资产个数时的权重
poem: # poem相关
cvar-scale-factor: 0.1 # 计算时用到的系数
checker: #投组检测模块
switch: on #是否开启检查
custom-type-priority: [3,2,1,4] # 检测优先级
month-fund-filter: {} # 根据月份删除某几档基金
reports: # 报告模块相关
navs:
type: FUND
tickers:
- TEMTECI LX Equity
- TEPLX US Equity
- FRDPX US Equity
- FKRCX US Equity
- FTNRACU LX Equity
benchmark: # benchmark报告
ft:
init-amount: 100 # 初始金额
stock-rate: # stock型基金比例
RR3: 0.3
RR4: 0.5
RR5: 0.7
fixed-range: # 固定区间收益率
range-dates: # 固定起始截止日期
- start: 2008-01-01
end: 2008-10-27
- start: 2011-05-02
end: 2011-10-04
- start: 2013-05-08
end: 2013-06-24
- start: 2014-09-03
end: 2014-12-16
- start: 2015-04-28
end: 2016-01-21
- start: 2018-01-26
end: 2018-10-29
- start: 2020-01-20
end: 2020-03-23
relative-range: # 相对区间收益率
range-dates: # 相对时间周期
- days: 1
name: '一天'
- weeks: 1
name: '一周'
- months: 1
name: '一月'
- months: 3
name: '三月'
- months: 6
name: '六月'
- years: 1
name: '一年'
- years: 2
name: '两年'
- years: 3
name: '三年'
- years: 5
name: '五年'
- years: 10
name: '十年'
- dates: ~
name: '成立以来'
exports:
backtest: # 回测导出曹策略
save-path: ${EXPORT_PATH:excels} # 导出报告文件存放路径,如果以./或者../开头,则会以执行python文件为根目录,如果以/开头,则为系统绝对路径,否则,以项目目录为根目录
file-name: ${EXPORT_FILENAME:real} # 导出报告的文件名
save-config: ${EXPORT_CONFIG:off} # 是否保存配置文件
include-report: # 需要导出的报告类型列表,下面的顺序,也代表了excel中sheet的顺序
# - funds-report # 基金资料
# - navs-report # 净值报告
- hold-report # 持仓报告
- signal-report # 信号报告
- benckmark-report # benckmark报告
- combo-report # 持仓对比
- indicators-report # 各种特殊指标报告
- fixed-range-report # 固定区间收益报告
- relative-range-report # 相对区间收益报告
- year-range-report # 单年区间业绩报告
- month-div-rate-report # 月度配息率比较
- year-div-rate-report # 年度配息率比较
real-daily:
file-name: svROBO6_portfolios
include-report:
- daily-hold-report
- daily-signal-report
email:
receives:
- wenwen.tang@thizgroup.com
copies: ${DAILY_EMAIL_COPIES}
subject:
default: "ROBO6_TAIBEI-实盘版-每日投組推薦_{today}"
rebalance: "ROBO6_TAIBEI-实盘版-每日投組推薦_{today}_今日有調倉信號!!!"
content:
default: "Dear All: 附件是今天生成的推薦組合,請驗收,謝謝! 注>:該郵件為自動發送,如有問題請聯繫矽谷團隊 telan_qian@chifufund.com"
rebalance: "Dear All: 附件是今天生成的推薦組合以及調倉信號,請驗收,謝謝! 注>:該郵件為自動發送,如有問題請聯繫矽谷團隊 telan_qian@chifufund.com"
daily-monitor:
file-name: svROBO6_monitor
include-report:
- name: relative-range-report # 相对区间收益报告
min-date: ~
- name: contribution-report # 贡献率报告
min-date: {days: 30}
- name: high-weight-report # 高风险资产占比
min-date: {days: 30}
- name: asset-pool-report # 基金池
min-date: {days: 30}
- name: combo-report # 持仓报告
min-date: {days: 40}
- name: mpt-report
min-date: {days: 30}
- name: signal-report
min-date: ~
- name: crisis-one-report
min-date: {days: 30}
- name: crisis-two-report
min-date: {days: 30}
- name: market-right-report
min-date: {days: 30}
- name: drift-buy-report
min-date: {days: 30}
email:
receives:
- wenwen.tang@thizgroup.com
copies: ${MONITOR_EMAIL_COPIES}
subject: "SVROBO6-实盘版-每日监测_{today}"
content: "Dear All: 附件是今天生成的监测数据,請驗收,謝謝! 注>:該郵件為自動發送,如有問題請聯繫矽谷團隊 telan_qian@chifufund.com"
robo-executor: # 执行器相关
use: ${ROBO_EXECUTOR:real} # 执行哪个执行器,优先取系统环境变量ROBO_EXECUTOR的值,默认backtest
sync-data: ${SYNC_DATA:on} # 是否开启同步资料数据
backtest: # 回测执行器相关
start-date: 2024-03-02 # 回测起始日期
end-date: 2024-04-11 # 回测截止日期
sealing-period: 10 #调仓封闭期
start-step: ${BACKTEST_START_STEP:1} # 回测从哪一步开始执行 1:计算资产池;2:计算最优投组:3:计算再平衡信号以及持仓投组
end-step: ${BACKTEST_END_STEP:3} # 回测从哪一步执行完成后结束执行 1:计算资产池;2:计算最优投组:3:计算再平衡信号以及持仓投组
clean-up: on
real: # 实盘执行器
export: ${EXPORT_ENABLE:on} # 是否开启报告
start-date: 2023-05-08 # 实盘开始时间
include-date: []
py-jftech:
logger:
version: 1
formatters:
brief:
format: "%(asctime)s - %(levelname)s - %(message)s"
simple:
format: "%(asctime)s - %(filename)s - %(levelname)s - %(message)s"
handlers:
console:
class: logging.StreamHandler
formatter: simple
level: DEBUG
stream: ext://sys.stdout
file:
class: logging.handlers.TimedRotatingFileHandler
level: INFO
formatter: brief
filename: ${LOG_FILE:logs/info.log}
interval: 1
backupCount: 30
encoding: utf8
when: D
# loggers:
# basic.sync:
# level: DEBUG
# handlers: [console]
# propagate: no
root:
level: ${LOG_LEVEL:INFO}
handlers: ${LOG_HANDLERS:[ console ]}
database:
host: ${MYSQL_HOST:192.168.68.85}
port: ${MYSQL_PORT:3306}
user: ${MYSQL_USER:root}
password: ${MYSQL_PWD:changeit}
dbname: ${MYSQL_DBNAME:usMarket_PRR4}
injectable:
names:
backtest: robo_executor.BacktestExecutor
datum: basic.datum.DefaultDatum
hold-report: portfolios.holder.DivHoldReportor
mpt: portfolios.builder.PoemARCPortfoliosBuilder
dividend-holder: portfolios.holder.InvTrustPortfoliosHolder
navs-sync: basic.sync.FundNavSync
email:
server: smtphz.qiye.163.com
user: jft-ra@thizgroup.com
password: 5dbb#30ec6d3
mulit-process:
max-workers: ${MAX_PROCESS:4}
basic: # 基础信息模块
sync:
start-date: 1990-01-01 # 同步数据开始日期
datum: # 资料模块
change:
date: ${DATUM_CHANGE_DATE}
file: ${DATUM_CHANGE_FILE}
excludes: # 排除的资料彭博ticker
backtest:
- 'LCUAGAA ID Equity' # 美盛凱利美國積極成長基金 A 美元 累積
# - 'FKRCX US Equity' # 富蘭克林黃金基金 美元 A(Ydis)
# - 'FTNRACU LX Equity' # 富蘭克林坦伯頓全球投資系列-天然資源基金 美元A(acc)股
# - 'TEMBDAI LX Equity' # 富蘭克林坦伯頓全球投資系列-生技領航基金 A(acc)股
real:
- 'XXXXXXX LX Equity'
- 'ZZZZZZZ LX Equity'
navs: # 净值模块
exrate: # 汇率,如果不开启,整个这块注释掉
- from: EUR # 需要转换的货币类型
ticker: EURUSD BGN Curncy # 汇率值的彭博ticker
asset-pool: # 资产池模块
asset-optimize: # 资产优选模块
sortino-weight: # sortino计算需要的权重,下面每一条为一次计算,e.g. months: 3, weight: 0.5 表示 3个月数据使用权重0.5来计算分值
- months: 3
weight: 0.5
- months: 6
weight: 0.3
- years: 1
weight: 0.2
asset-include: {'customType':[1,2,3,4]}
optimize-count: 3 #基金优选个数
annual-volatility-filter: #1各资产年化波动率末exclude位 2各资产年化波动率大于volatility
- customType: 1
min-retain: 4
exclude: 0
volatility: 1000
- customType: 2
min-retain: 4
exclude: 0
volatility: 1000
annual-volatility-section: # 波动率时间区间
- years: 1
portfolios: # 投组模块
holder: # 持仓投组相关
init-nav: 100 # 初始金额
min-interval-days: 10 # 两次实际调仓最小间隔期,单位交易日
dividend-rate: 0.0 #设定年化配息率
dividend-date: 15 #配息日,每月15号
dividend-adjust-day: [1,4,7,10] #每年的首个季度调整配息
warehouse-frequency: 1 #每隔1个月调一次仓
warehouse-transfer-date: 1 #调仓日
redeem-list: [ 'TEUSAAU LX Equity', 'LIGTRAA ID Equity', 'TEMFHAC LX Equity', 'LUSHUAA ID Equity' ] #从持仓中的低风险资产“直接”按序赎回
solver: # 解算器相关
model: prr # 结算模型 ARC ,PRR, ~ 标准解算器
arc: on #是否开启ARC
brr: 0.00 #误差补偿值
trr: 4
tol: 1E-10 # 误差满足条件
navs: # 净值要求
range: # 需要净值数据的区间, days: 90 表示90自然日,months: 3 表示3个自然月
days: 90
max-nan: # 最大缺失净值条件
asset: 8 # 单一资产最多缺少多少交易日数据,则踢出资产池
day: 0.5 # 单一交易日最多缺少百分之多少净值,则删除该交易日
risk: [] # 资产风险等级要求,可分开写也可以合并写,e.g. risk:[ 2, 3 ] 则表示 所有投组资产风险等级都是 2 或 3
LARC: [0.40, 0.00, 0.00, 0.00] #低阈值
UARC: [0.60, 0.50, 0.50, 0.20] #高阈值
matrix-rtn-days: 20 # 计算回报率矩阵时,回报率滚动天数
asset-count: [5,5] # 投组资产个数。e.g. count 或 [min, max] 分别表示 最大最小都为count 或 最小为min 最大为max,另外这里也可以类似上面给不同风险等级分别配置
mpt: # mpt计算相关
cvar-beta: 0.2 # 计算Kbeta 需要用到
quantile: 0.9 # 分位点,也可以给不同风险等级分别配置
low-weight: 0.05 # 最低权重
high-weight: [ 0.35 ] # 最高权重比例,可给一个值,也可以给多个值,当多个值时,第一个表示只有一个资产时权重,第二个表示只有两个资产时权重,以此类推,最后一个表示其他资产个数时的权重
poem: # poem相关
cvar-scale-factor: 0.1 # 计算时用到的系数
checker: # 投组检测模块,设计原因:期望投组同时出现FT&LM,实现每月配息2次
switch: off # 是否开启检查
custom-type-priority: [ 3,2,1,4 ] # 检测优先级
reports: # 报告模块相关
navs:
type: FUND
tickers:
- TEMTECI LX Equity
- TEPLX US Equity
- FRDPX US Equity
- FKRCX US Equity
- FTNRACU LX Equity
benchmark: # benchmark报告
ft:
init-amount: 100 # 初始金额
stock-rate: # stock型基金比例
RR3: 0.3
RR4: 0.5
RR5: 0.7
fixed-range: # 固定区间收益率
range-dates: # 固定起始截止日期
- start: 2008-01-01
end: 2008-10-27
- start: 2011-05-02
end: 2011-10-04
- start: 2013-05-08
end: 2013-06-24
- start: 2014-09-03
end: 2014-12-16
- start: 2015-04-28
end: 2016-01-21
- start: 2018-01-26
end: 2018-10-29
- start: 2020-01-20
end: 2020-03-23
relative-range: # 相对区间收益率
range-dates: # 相对时间周期
- days: 1
name: '一天'
- weeks: 1
name: '一周'
- months: 1
name: '一月'
- months: 3
name: '三月'
- months: 6
name: '六月'
- years: 1
name: '一年'
- years: 2
name: '两年'
- years: 3
name: '三年'
- years: 5
name: '五年'
- years: 10
name: '十年'
- dates: ~
name: '成立以来'
exports:
backtest: # 回测导出曹策略
save-path: ${EXPORT_PATH:excels} # 导出报告文件存放路径,如果以./或者../开头,则会以执行python文件为根目录,如果以/开头,则为系统绝对路径,否则,以项目目录为根目录
file-name: ${EXPORT_FILENAME:real} # 导出报告的文件名
save-config: ${EXPORT_CONFIG:off} # 是否保存配置文件
include-report: # 需要导出的报告类型列表,下面的顺序,也代表了excel中sheet的顺序
# - funds-report # 基金资料
# - navs-report # 净值报告
- hold-report # 持仓报告
- signal-report # 信号报告
# - benckmark-report # benckmark报告
- combo-report # 持仓对比
- indicators-report # 各种特殊指标报告
- fixed-range-report # 固定区间收益报告
- relative-range-report # 相对区间收益报告
- year-range-report # 单年区间业绩报告
# - month-div-rate-report # 月度配息率比较
# - year-div-rate-report # 年度配息率比较
real-daily:
file-name: USFoF_prr4(實盤)-每月投組推薦
include-report:
# - daily-hold-report
- daily-signal-report
email:
receives:
- brody_wu@chifufund.com
copies: ${DAILY_EMAIL_COPIES}
subject:
# default: "USFoF-PRR4-實盤版-每月投組推薦_{today}"
rebalance: "USFoF_prr4(實盤)-每月投組推薦_{today}"
content:
# default: "Dear All: 附件是每月生成的推薦組合,請驗收,謝謝! 注>:該郵件為自動發送,如有問題請聯繫矽谷團隊 telan_qian@chifufund.com"
rebalance: "Dear All: 附檔為每月投資組合推薦,請驗收,謝謝! 注>:該郵件為自動發送,如有問題請聯繫矽谷團隊 brody_wu@chifufund.com"
daily-monitor:
file-name: svROBO6_monitor
include-report:
- name: relative-range-report # 相对区间收益报告
min-date: ~
- name: contribution-report # 贡献率报告
min-date: {days: 30}
- name: high-weight-report # 高风险资产占比
min-date: {days: 30}
- name: asset-pool-report # 基金池
min-date: {days: 30}
- name: combo-report # 持仓报告
min-date: {days: 40}
- name: mpt-report
min-date: {days: 30}
- name: signal-report
min-date: ~
- name: crisis-one-report
min-date: {days: 30}
- name: crisis-two-report
min-date: {days: 30}
- name: market-right-report
min-date: {days: 30}
- name: drift-buy-report
min-date: {days: 30}
email:
receives:
- wenwen.tang@thizgroup.com
copies: ${MONITOR_EMAIL_COPIES}
subject: "SVROBO6-实盘版-每日监测_{today}"
content: "Dear All: 附件是今天生成的监测数据,請驗收,謝謝! 注>:該郵件為自動發送,如有問題請聯繫矽谷團隊 telan_qian@chifufund.com"
robo-executor: # 执行器相关
use: ${ROBO_EXECUTOR:backtest} # 执行哪个执行器,优先取系统环境变量ROBO_EXECUTOR的值,默认backtest
sync-data: ${SYNC_DATA:on} # 是否开启同步资料数据
backtest: # 回测执行器相关
start-date: 2013-01-02 # 回测起始日期
end-date: 2023-12-29 # 回测截止日期
sealing-period: 10 #调仓封闭期
start-step: ${BACKTEST_START_STEP:1} # 回测从哪一步开始执行 1:计算资产池;2:计算最优投组:3:计算再平衡信号以及持仓投组
end-step: ${BACKTEST_END_STEP:3} # 回测从哪一步执行完成后结束执行 1:计算资产池;2:计算最优投组:3:计算再平衡信号以及持仓投组
clean-up: on
real: # 实盘执行器
export: ${EXPORT_ENABLE:on} # 是否开启报告
start-date: 2023-05-08 # 实盘开始时间
include-date: []
from typing import List
from apscheduler.schedulers.blocking import BlockingScheduler
from py_jftech import autowired from py_jftech import autowired
from api import RoboExecutor from api import DataSync
from basic.sp500 import save_sp500
@autowired(names={'executor': RoboExecutor.use_name()}) @autowired
def start(executor: RoboExecutor = None): def sync(syncs: List[DataSync] = None):
executor.start_exec() for s in syncs:
s.do_sync()
if __name__ == '__main__': if __name__ == '__main__':
start() sync()
scheduler = BlockingScheduler()
# 开启定时任务,每日抓取sp500数据
scheduler.add_job(save_sp500, 'cron', day_of_week='0-6', hour=3, minute=55)
scheduler.add_job(save_sp500, 'cron', day_of_week='0-6', hour=4, minute=00)
scheduler.add_job(save_sp500, 'cron', day_of_week='0-6', hour=4, minute=55)
scheduler.add_job(save_sp500, 'cron', day_of_week='0-6', hour=5, minute=00)
scheduler.add_job(sync, 'cron', day_of_week='0-6', hour=8, minute=00)
scheduler.start()
import json
import logging
from py_jftech import component, autowired, format_date
from pymysql import IntegrityError, constants
from api import PortfoliosBuilder, PortfoliosRisk, AssetPool, Navs, PortfoliosType, Datum, SolveType, SolverFactory, \
PortfoliosChecker
from portfolios.dao import robo_mpt_portfolios as rmp
logger = logging.getLogger(__name__)
@component(bean_name='mpt')
class MptPortfoliosBuilder(PortfoliosBuilder):
@autowired
def __init__(self, assets: AssetPool = None, navs: Navs = None, datum: Datum = None, factory: SolverFactory = None,
checker: PortfoliosChecker = None):
self._assets = assets
self._navs = navs
self._datum = datum
self._factory = factory
self._checker = checker
def get_portfolios(self, day, risk: PortfoliosRisk, type: PortfoliosType = PortfoliosType.NORMAL):
try:
portfolio = rmp.get_one(day, type, risk)
if not portfolio:
result = self.build_portfolio(day, type)
for build_risk, datas in result.items():
datas['portfolio'] = self._checker.check(day, json.loads(datas['portfolio']))
try:
rmp.insert({
**datas,
'risk': build_risk,
'type': type,
'date': day
})
except IntegrityError as e:
code, msg = e.args
if code != constants.ER.DUP_ENTRY:
raise e
portfolio = rmp.get_one(day, type, risk)
if SolveType(portfolio['solve']) is not SolveType.INFEASIBLE:
result = json.loads(portfolio['portfolio'])
return {int(x[0]): x[1] for x in result.items()}
return None
except Exception as e:
logger.exception(
f"build portfolio of type[{type.name}] and risk[{risk.name}] with date[{format_date(day)}] failure.", e)
raise e
def build_portfolio(self, day, type: PortfoliosType):
result = {}
portfolios = {}
for risk in PortfoliosRisk:
logger.info(
f"start to build protfolio of type[{type.name}] and risk[{risk.name}] with date[{format_date(day)}]")
solver = self._factory.create_solver(risk, type)
navs_group = solver.reset_navs(day)
for category, navs in navs_group.items():
# count = solver.get_config('asset-count')[0]
# nav_count = len(navs.columns)
# if count <= nav_count:
# pass
solver.set_navs(navs)
solver.set_category(category)
logger.debug({
'Khist': len(solver.rtn_history),
'beta': solver.get_config('mpt.cvar-beta'),
'Kbeta': solver.k_beta,
})
max_rtn, max_var, minCVaR_whenMaxR = solver.solve_max_rtn()
min_rtn, min_var, maxCVaR_whenMinV = solver.solve_min_rtn()
portfolio, cvar = solver.solve_mpt(min_rtn, max_rtn)
portfolios = {**portfolios, **portfolio}
result[risk] = {
'solve': SolveType.MPT,
'portfolio': json.dumps(portfolios),
} if portfolios else {
'solve': SolveType.INFEASIBLE
}
return result
def clear(self, day=None, risk: PortfoliosRisk = None):
rmp.delete(min_date=day, risk=risk)
@component(bean_name='mpt')
class PoemPortfoliosBuilder(MptPortfoliosBuilder):
def build_portfolio(self, day, type: PortfoliosType):
result = {}
portfolios = {}
for risk in PortfoliosRisk:
solver = self._factory.create_solver(risk, type)
navs_group = solver.reset_navs(day)
for category, navs in navs_group.items():
solver.set_navs(navs)
solver.set_category(category)
max_rtn, max_var, minCVaR_whenMaxR = solver.solve_max_rtn()
min_rtn, min_var, maxCVaR_whenMinV = solver.solve_min_rtn()
mpt_portfolio, mpt_cvar = solver.solve_mpt(min_rtn, max_rtn)
portfolio, cvar = solver.solve_poem(min_rtn, max_rtn, mpt_cvar, maxCVaR_whenMinV)
if not portfolio:
portfolio = mpt_portfolio
portfolios = {**portfolios, **portfolio}
if portfolios:
result[risk] = {
'solve': SolveType.POEM,
'portfolio': json.dumps(portfolios),
}
return result
@component(bean_name='mpt')
class MptARCPortfoliosBuilder(MptPortfoliosBuilder):
def get_portfolios(self, day, risk: PortfoliosRisk, type: PortfoliosType = PortfoliosType.NORMAL):
try:
portfolio = rmp.get_one(day, type, risk)
if not portfolio:
result, detail = self.build_portfolio(day, type)
for build_risk, datas in result.items():
datas['portfolio'] = self._checker.check(day, json.loads(datas['portfolio']))
try:
rmp.insert({
**datas,
'risk': build_risk,
'type': type,
'date': day
})
except IntegrityError as e:
code, msg = e.args
if code != constants.ER.DUP_ENTRY:
raise e
portfolio = rmp.get_one(day, type, risk)
if SolveType(portfolio['solve']) is not SolveType.INFEASIBLE:
result = json.loads(portfolio['portfolio'])
return {int(x[0]): x[1] for x in result.items()}
return None
except Exception as e:
logger.exception(
f"build protfolio of type[{type.name}] and risk[{risk.name}] with date[{format_date(day)}] failure.",
exc_info=e)
raise e
def build_portfolio(self, day, type: PortfoliosType):
result = {}
detail = {}
risk = PortfoliosRisk.FT3
logger.info(
f"start to build protfolio of type[{type.name}] and risk[{risk.name}] with date[{format_date(day)}]")
solver = self._factory.create_solver(risk, type)
solver.reset_navs(day)
logger.debug({
'Khist': len(solver.rtn_history),
'beta': solver.get_config('mpt.cvar-beta'),
'Kbeta': solver.k_beta,
})
max_rtn, max_var, minCVaR_whenMaxR = solver.solve_max_rtn()
min_rtn, min_var, maxCVaR_whenMinV = solver.solve_min_rtn()
portfolio, cvar = solver.solve_mpt(min_rtn, max_rtn)
result[risk] = {
'solve': SolveType.MPT,
'portfolio': json.dumps(portfolio),
'cvar': cvar
} if portfolio else {
'solve': SolveType.INFEASIBLE
}
detail[risk] = {
'max_rtn': max_rtn,
'max_var': max_var,
'minCVaR_whenMaxR': minCVaR_whenMaxR,
'min_rtn': min_rtn,
'min_var': min_var,
'maxCVaR_whenMinV': maxCVaR_whenMinV,
}
return result, detail
@component(bean_name='mpt')
class PoemARCPortfoliosBuilder(MptARCPortfoliosBuilder):
def build_portfolio(self, day, type: PortfoliosType):
result, detail = super(PoemARCPortfoliosBuilder, self).build_portfolio(day, type)
risk = PortfoliosRisk.FT3
# if result[risk]['solve'] is SolveType.INFEASIBLE:
# continue
solver = self._factory.create_solver(risk, type)
solver.reset_navs(day)
min_rtn = detail[risk]['min_rtn']
max_rtn = detail[risk]['max_rtn']
mpt_cvar = result[risk]['cvar']
maxCVaR_whenMinV = detail[risk]['maxCVaR_whenMinV']
portfolio, cvar = solver.solve_poem(min_rtn, max_rtn, mpt_cvar, maxCVaR_whenMinV)
if portfolio:
result[risk] = {
'solve': SolveType.POEM,
'portfolio': json.dumps(portfolio),
'cvar': cvar
}
detail[risk]['mpt_cvar'] = mpt_cvar
return result, detail
@component(bean_name='mpt')
class RiskParityARCPortfoliosBuilder(MptPortfoliosBuilder):
def build_portfolio(self, day, type: PortfoliosType):
result = {}
risk = PortfoliosRisk.FT3
logger.info(
f"start to build protfolio of type[{type.name}] and risk[{risk.name}] with date[{format_date(day)}]")
solver = self._factory.create_solver(risk, type)
solver.reset_navs(day)
portfolio = solver.solve_risk_parity()
result[risk] = {
'solve': SolveType.RISK_PARITY,
'portfolio': json.dumps(portfolio),
} if portfolio else {
'solve': SolveType.INFEASIBLE
}
return result
import logging
from py_jftech import autowired, component, get_config
from api import AssetOptimize, PortfoliosChecker, Datum, Navs, DatumType
logger = logging.getLogger(__name__)
@component(bean_name='checker')
class DefaultPortfoliosChecker(PortfoliosChecker):
@autowired
def __init__(self, asset: AssetOptimize = None, navs: Navs = None, datum: Datum = None):
self._asset = asset
self._navs = navs
self._datum = datum
self._config = get_config(__name__)
def check(self, day=None, portfolios: dict = None):
if not self._config.get('switch'):
return portfolios
funds = self._datum.get_datums(type=DatumType.FUND)
company = {f"{fund['id']}": fund['companyType'] for fund in funds}
customType = {f"{fund['id']}": fund['customType'] for fund in funds}
companies = set(company[key] for key in portfolios.keys())
# 同时出现全部是ft或美盛基金的情况
if len(companies) == 1:
# step1: 检查原始投组的customType。检查顺序用列表呈现,依序进行
priority = self._config.get('custom-type-priority')
for p in priority:
# 找出对应优先级序列的基金列表
keys = [key for key in portfolios.keys() if customType[key] == p]
# 若存在匹配值则执行后跳出循环
if len(keys) > 0:
# 选取非同公司的、风险等级小于等于原基金的 基金
min_risk = min(fund['risk'] for fund in funds if str(fund['id']) in keys)
ids = [fund['id'] for fund in funds if fund['companyType'] != list(companies)[0] and
fund['risk'] <= min_risk]
if len(ids) == 0:
continue
best = self.find_highest_score(ids, day)
# 若刚好有一个匹配,直接替换
if len(keys) == 1:
portfolios[best] = portfolios[keys[0]]
# 删除原始键
del portfolios[keys[0]]
else:
# 算分,把分低的替换掉
scores = self.do_score(keys, day)
weight_scores = {key: scores[key] * portfolios[key] for key in keys}
lowest = min(scores, key=lambda k: weight_scores[k])
portfolios[best] = portfolios[lowest]
# 删除原始键
del portfolios[lowest]
break
return portfolios
def do_score(self, ids, day):
optimize = self._asset.find_optimize(fund_ids=ids, day=day)
scores = optimize[1].to_dict()
id_score = {}
for k, v in scores.items():
id_score[f'{ids[k]}'] = v
return id_score
def find_highest_score(self, ids, day):
optimize = self._asset.find_optimize(fund_ids=ids, day=day)
return optimize[0][0]
CREATE TABLE IF NOT EXISTS robo_mpt_portfolios
(
rmp_id BIGINT UNSIGNED NOT NULL AUTO_INCREMENT,
rmp_date DATETIME NOT NULL COMMENT '日期',
rmp_risk TINYINT NOT NULL COMMENT '风险等级',
rmp_type VARCHAR(255) NOT NULL COMMENT '投组类型',
rmp_rolve TINYINT NOT NULL COMMENT '求解方式',
rmp_portfolio JSON DEFAULT NULL COMMENT '投组权重',
rmp_cvar DOUBLE DEFAULT NULL COMMENT '投组cvar',
rmp_create_time DATETIME NOT NULL DEFAULT CURRENT_TIMESTAMP,
rmp_update_time DATETIME DEFAULT NULL ON UPDATE CURRENT_TIMESTAMP,
PRIMARY KEY (rmp_id),
UNIQUE INDEX (rmp_date, rmp_risk, rmp_type),
INDEX (rmp_risk),
INDEX (rmp_type)
) ENGINE = InnoDB
AUTO_INCREMENT = 0
DEFAULT CHARSET = utf8mb4 COMMENT '最优投组表';
CREATE TABLE IF NOT EXISTS robo_hold_portfolios
(
`rhp_id` bigint(20) UNSIGNED NOT NULL AUTO_INCREMENT,
`rhp_date` datetime NOT NULL COMMENT '日期',
`rhp_risk` tinyint(4) NOT NULL COMMENT '风险等级',
`rhp_rrs_id` bigint(20) UNSIGNED NULL DEFAULT NULL COMMENT '调仓信号id',
`rhp_rebalance` tinyint(4) NOT NULL DEFAULT 0 COMMENT '是否调仓',
`rhp_portfolios` json NOT NULL COMMENT '投组信息',
`rhp_fund_av` double(12, 4) NOT NULL COMMENT '投组原始净值,sum(个股原始净值*对应份额)',
`rhp_fund_nav` double(12, 4) NOT NULL DEFAULT 0.0000 COMMENT '基金被动配息做配股',
`rhp_nav` double(12, 4) NOT NULL COMMENT '复权净值',
`rhp_asset_nav` double(12, 4) NOT NULL COMMENT '产品净值,投顾模式:fund_av',
`rhp_div_forecast` double(12, 4) NOT NULL DEFAULT 0.0000 COMMENT '预配息金额',
`rhp_div_acc` double(12, 4) NOT NULL COMMENT '累计配息金额,投顾:acc(port_div + fund_div)',
`rhp_port_div` double(12, 4) NOT NULL COMMENT '主动配息',
`rhp_cash` double(12, 4) NOT NULL DEFAULT 0.0000 COMMENT '现金(产品的现金账户)',
`rhp_fund_div` double(12, 4) NOT NULL COMMENT '持有基金配息sum(个股每股配息*对应份额)',
`rhp_create_time` datetime NOT NULL DEFAULT CURRENT_TIMESTAMP,
`rhp_update_time` datetime NULL DEFAULT NULL ON UPDATE CURRENT_TIMESTAMP,
`v_nav_div_acc` double(12, 4) GENERATED ALWAYS AS ((`rhp_asset_nav` + `rhp_div_acc`)) VIRTUAL COMMENT '产品累计净值 asset_nav+ acc_div' NOT NULL,
PRIMARY KEY (`rhp_id`) USING BTREE,
UNIQUE INDEX `rhp_date`(`rhp_date`, `rhp_risk`) USING BTREE,
INDEX `rhp_risk`(`rhp_risk`) USING BTREE
) ENGINE = InnoDB AUTO_INCREMENT = 1 CHARACTER SET = utf8mb4 COLLATE = utf8mb4_general_ci COMMENT = '持仓投组表' ROW_FORMAT = Dynamic;
\ No newline at end of file
from py_jftech import read, where, write, format_date, mapper_columns
from api import PortfoliosRisk
__COLUMNS__ = {
'rhp_id': 'id',
'rhp_date': 'date',
'rhp_risk': 'risk',
'rhp_div_acc': 'div_acc',
'rhp_rrs_id': 'signal_id',
'rhp_rebalance': 'rebalance',
'rhp_portfolios': 'portfolios',
'rhp_nav': 'nav',
'rhp_cash': 'cash',
'rhp_fund_av': 'fund_av',
'rhp_fund_nav': 'fund_nav',
'rhp_fund_div': 'fund_div',
'rhp_div_forecast': 'div_forecast',
'rhp_asset_nav': 'asset_nav',
'rhp_port_div': 'port_div',
'v_nav_div_acc': 'acc_av',
}
@read
def get_list(risk: PortfoliosRisk = None, min_date=None, max_date=None, rebalance: bool = None):
sqls = []
if min_date:
sqls.append(f"rhp_date >= '{format_date(min_date)}'")
if max_date:
sqls.append(f"rhp_date <= '{format_date(max_date)}'")
return f'''
select {','.join([f'{x[0]} as {x[1]}' for x in __COLUMNS__.items()])} from robo_hold_portfolios
{where(*sqls, rhp_risk=risk, rhp_rebalance=rebalance)} order by rhp_risk, rhp_date
'''
@read(one=True)
def get_one(day, risk: PortfoliosRisk):
return f'''select {','.join([f'{x[0]} as {x[1]}' for x in __COLUMNS__.items()])} from robo_hold_portfolios {where(rhp_date=day, rhp_risk=risk)}'''
@read(one=True)
def get_last_one(risk: PortfoliosRisk = None, max_date=None, rebalance: bool = None, signal_id=None):
sql = f"rhp_date <= '{format_date(max_date)}'" if max_date else None
return f'''
select {','.join([f'{x[0]} as {x[1]}' for x in __COLUMNS__.items()])} from robo_hold_portfolios
{where(sql, rhp_risk=risk, rhp_rrs_id=signal_id, rhp_rebalance=rebalance)}
order by rhp_date desc limit 1
'''
def get_count(risk: PortfoliosRisk = None):
@read(one=True)
def exec():
return f'''select count(*) as `count` from robo_hold_portfolios {where(rhp_risk=risk)}'''
result = exec()
return result['count']
@write
def insert(datas):
datas = mapper_columns(datas=datas, columns=__COLUMNS__)
return f'''
insert into robo_hold_portfolios({','.join([x for x in datas.keys()])})
values ({','.join([f"'{x[1]}'" for x in datas.items()])})
'''
@write
def delete(min_date=None, risk: PortfoliosRisk = None):
if min_date is None and risk is None:
return 'truncate table robo_hold_portfolios'
else:
sql = f"rhp_date >= '{format_date(min_date)}'" if min_date else None
return f"delete from robo_hold_portfolios {where(sql, rhp_risk=risk)}"
from py_jftech import read, write, where, format_date, mapper_columns
from api import PortfoliosRisk, PortfoliosType
__COLUMNS__ = {
'rmp_id': 'id',
'rmp_date': 'date',
'rmp_risk': 'risk',
'rmp_type': 'type',
'rmp_rolve': 'solve',
'rmp_portfolio': 'portfolio',
'rmp_cvar': 'cvar',
'rmp_create_time': 'create_time'
}
@write
def insert(datas):
datas = mapper_columns(datas=datas, columns=__COLUMNS__)
return f'''
insert into robo_mpt_portfolios({','.join([x for x in datas.keys()])})
values ({','.join([f"'{x[1]}'" for x in datas.items()])})
'''
@write
def delete(min_date=None, risk: PortfoliosRisk = None):
if min_date is None and risk is None:
return 'truncate table robo_mpt_portfolios'
else:
sql = f"rmp_date >= '{format_date(min_date)}'" if min_date else None
return f"delete from robo_mpt_portfolios {where(sql, rmp_risk=risk)}"
@read(one=True)
def get_one(day, type: PortfoliosType, risk: PortfoliosRisk):
return f'''
select {','.join([f"{x[0]} as {x[1]}" for x in __COLUMNS__.items()])} from robo_mpt_portfolios
{where(rmp_date=day, rmp_risk=risk, rmp_type=type)}
'''
@read
def get_list(max_date=None, min_date=None, type: PortfoliosType = None, risk: PortfoliosRisk = None):
sqls = []
if max_date:
sqls.append(f"rmp_date <= '{format_date(max_date)}'")
if min_date:
sqls.append(f"rmp_date >= '{format_date(min_date)}'")
return f'''
select {','.join([f"{x[0]} as {x[1]}" for x in __COLUMNS__.items()])} from robo_mpt_portfolios
{where(*sqls, rmp_risk=risk, rmp_type=type)}
order by rmp_date
'''
@read(one=True)
def get_last_one(date=None, type: PortfoliosType = None, risk: PortfoliosRisk = None):
sqls = []
if date:
sqls.append(f"rmp_date <= '{format_date(date)}'")
return f'''
select {','.join([f"{x[0]} as {x[1]}" for x in __COLUMNS__.items()])} from robo_mpt_portfolios
{where(*sqls, rmp_risk=risk, rmp_type=type)}
order by rmp_date desc limit 1
'''
\ No newline at end of file
import datetime
import json
import logging
from datetime import datetime as dt, date
from functools import reduce
from typing import List
import pandas as pd
from py_jftech import (
component, autowired, get_config, next_workday, format_date, is_workday, prev_workday, workday_range
)
from api import PortfoliosHolder, PortfoliosRisk, Navs, RoboExecutor, PortfoliosType, PortfoliosBuilder, RoboReportor, \
DatumType, Datum, RebalanceSignal
from portfolios.dao import robo_hold_portfolios as rhp
from portfolios.utils import format_weight
logger = logging.getLogger(__name__)
@component(bean_name='dividend-holder')
class DividendPortfoliosHolder(PortfoliosHolder):
@autowired(names={'executor': RoboExecutor.use_name()})
def __init__(self, navs: Navs = None, executor: RoboExecutor = None, builder: PortfoliosBuilder = None,
datum: Datum = None, mpt: PortfoliosBuilder = None, signal: RebalanceSignal = None):
self._navs = navs
self._executor = executor
self._builder = builder
self._config = get_config(__name__)
self._datum = datum
self._mpt = mpt
self._signal = signal
def get_portfolio_type(self, day, risk: PortfoliosRisk) -> PortfoliosType:
return PortfoliosType.NORMAL
def get_last_rebalance_date(self, risk: PortfoliosRisk, max_date=None):
assert risk, f"get last rebalance date, risk can not be none"
last = rhp.get_last_one(max_date=max_date, risk=risk, rebalance=True)
return last['date'] if last else None
def get_rebalance_date_by_signal(self, signal_id):
last = rhp.get_last_one(signal_id=signal_id, rebalance=True)
return last['date'] if last else None
def get_portfolios_weight(self, day, risk: PortfoliosRisk):
hold = rhp.get_one(day, risk)
if hold:
result = json.loads(hold['portfolios'])['weight']
return {int(x[0]): x[1] for x in result.items()}
return None
def has_hold(self, risk: PortfoliosRisk) -> bool:
return rhp.get_count(risk=risk) > 0
def build_hold_portfolio(self, day, risk: PortfoliosRisk, force_mpt=False):
last_nav = rhp.get_last_one(max_date=day, risk=risk)
start = next_workday(last_nav['date']) if last_nav else self._executor.start_date
try:
while start <= day:
if force_mpt:
logger.info(f'start to get normal portfolio for date[{format_date(start)}]')
self._mpt.get_portfolios(day=prev_workday(start), type=PortfoliosType.NORMAL, risk=risk)
logger.info(f"start to build hold portfolio[{risk.name}] for date[{format_date(start)}]")
signal = self._signal.get_signal(prev_workday(start), risk)
if signal:
last_re_date = self.get_last_rebalance_date(risk=risk, max_date=start)
# 两次实际调仓最小间隔期,单位交易日
if last_re_date and len(workday_range(last_re_date, start)) <= self.interval_days:
self.no_rebalance(start, risk, last_nav)
else:
self.do_rebalance(start, risk, signal, last_nav)
else:
self.no_rebalance(start, risk, last_nav)
start = next_workday(start)
last_nav = rhp.get_last_one(max_date=day, risk=risk)
except Exception as e:
logger.exception(f"build hold portfolio[{risk.name}] for date[{format_date(start)}] failure.", e)
def do_rebalance(self, day, risk: PortfoliosRisk, signal, last_nav):
weight = {int(x[0]): x[1] for x in json.loads(signal['portfolio']).items()}
if last_nav:
share = {int(x): y for x, y in json.loads(last_nav['portfolios'])['share'].items()}
fund_div_tuple = self.get_navs_and_div(fund_ids=tuple(set(weight) | set(share)), day=day)
navs = fund_div_tuple[0]
fund_dividend = fund_div_tuple[1]
fund_dividend = sum(
map(lambda k: share[k] * fund_dividend[k], filter(lambda k: k in fund_dividend, share.keys())))
dividend_acc = last_nav['div_acc'] + fund_dividend
fund_av = round(sum([navs[x] * y for x, y in share.items()]), 4)
fund_nav = fund_av + dividend_acc
cash = last_nav['cash'] + fund_dividend
div_forecast = last_nav['div_forecast']
# 每年的首个季度调整配息
if day.month in self._config.get('dividend-adjust-day'):
asset_nav = last_nav['asset_nav']
# 配息率
div_rate = last_nav['div_forecast'] * 12 / asset_nav
# 年配息率减去配息率差值超过基准配息率上下10%触发配息率重置
if self.month_dividend > 0 and abs(
(self._config['dividend-rate'] - div_rate) / self._config['dividend-rate']) > \
self._config['dividend-drift-rate']:
# 以本月前一天的单位净值进行配息计算
div_forecast = last_nav['asset_nav'] * self.month_dividend
asset_nav = fund_av + cash
nav = last_nav['nav'] * asset_nav / last_nav['asset_nav']
share = {x: fund_av * w / navs[x] for x, w in weight.items()}
share_nav = {x: fund_nav * w / navs[x] for x, w in weight.items()}
else:
fund_av = self.init_nav
fund_div_tuple = self.get_navs_and_div(fund_ids=tuple(weight), day=day)
navs = fund_div_tuple[0]
fund_dividend = 0
cash = 0
div_forecast = fund_av * self.month_dividend
dividend_acc = 0
nav = self.init_nav
asset_nav = fund_av + cash
funds = self._datum.get_datums(type=DatumType.FUND)
funds_subscription_rate = {fund['id']: fund.get('subscriptionRate', 0) for fund in funds}
share = {x: (1 - funds_subscription_rate[x]) * (fund_av * w) / navs[x] for x, w in weight.items()}
share_nav = share
# 初始买入扣手续费
fee = sum(funds_subscription_rate[x] * (fund_av * w) for x, w in weight.items())
fund_av = fund_av - fee
fund_nav = fund_av
rhp.insert({
'date': day,
'risk': risk,
'signal_id': signal['id'],
'div_forecast': div_forecast if div_forecast else last_nav['div_forecast'] if last_nav else None,
'fund_div': fund_dividend,
'div_acc': dividend_acc,
'rebalance': True,
'portfolios': {
'weight': weight,
'weight_nav': weight,
'share': share,
'share_nav': share_nav,
},
'fund_av': fund_av,
'fund_nav': fund_nav,
'nav': nav,
'port_div': 0,
'cash': cash,
'asset_nav': asset_nav,
})
def no_rebalance(self, day, risk: PortfoliosRisk, last_nav):
share = {int(x): y for x, y in json.loads(last_nav['portfolios'])['share'].items()}
share_nav = {int(x): y for x, y in json.loads(last_nav['portfolios'])['share'].items()}
fund_div_tuple = self.get_navs_and_div(fund_ids=tuple(share), day=day)
navs = fund_div_tuple[0]
fund_dividend = fund_div_tuple[1]
# 配息当天配股
for k in share_nav.keys():
if k in fund_dividend:
share_nav[k] = (share_nav[k] * fund_dividend[k]) / (share_nav[k] * navs[k]) + share_nav[k]
fund_av = round(sum([navs[x] * y for x, y in share.items()]), 4)
fund_nav = round(sum([navs[x] * y for x, y in share_nav.items()]), 4)
weight = {x: round(y * navs[x] / fund_av, 2) for x, y in share.items()}
weight_nav = {x: round(y * navs[x] / fund_av, 2) for x, y in share_nav.items()}
weight = format_weight(weight)
weight_nav = format_weight(weight_nav)
port_div = 0
fund_dividend = sum(
map(lambda k: share[k] * fund_dividend[k], filter(lambda k: k in fund_dividend, share.keys())))
dividend_acc = last_nav['div_acc']
cash = last_nav['cash'] + fund_dividend
if self.is_dividend_date(day):
port_div = last_nav['div_forecast']
cash += port_div
asset_nav = fund_av + cash
dividend_acc += port_div
nav = last_nav['nav'] * (asset_nav + port_div) / last_nav['asset_nav']
else:
asset_nav = fund_av + cash
nav = last_nav['nav'] * asset_nav / last_nav['asset_nav']
rhp.insert({
'date': day,
'risk': risk,
'div_forecast': last_nav['div_forecast'],
'fund_div': fund_dividend,
'div_acc': dividend_acc,
'signal_id': last_nav['signal_id'],
'rebalance': False,
'portfolios': {
'weight': weight,
'weight_nav': weight_nav,
'share': share,
'share_nav': share_nav
},
'fund_av': fund_av,
'fund_nav': fund_nav,
'nav': nav,
'cash': cash,
'port_div': port_div,
'asset_nav': asset_nav,
})
def get_navs_and_div(self, day, fund_ids):
navs = pd.DataFrame(
self._navs.get_fund_navs(fund_ids=fund_ids, max_date=day, min_date=day - datetime.timedelta(22)))
dividend = navs.pivot_table(index='nav_date', columns='fund_id', values='dividend')
nav_cal = navs.pivot_table(index='nav_date', columns='fund_id', values='nav_cal')
navs = navs.pivot_table(index='nav_date', columns='fund_id', values='av')
navs.fillna(method='ffill', inplace=True)
nav_cal.fillna(method='ffill', inplace=True)
dividend.fillna(value=0, inplace=True)
dividend = dividend.reindex(pd.date_range(start=dividend.index.min(), end=day, freq='D'), fill_value=0)
return dict(navs.iloc[-1]), dict(dividend.iloc[-1]), dict(nav_cal.iloc[-1])
def clear(self, day=None, risk: PortfoliosRisk = None):
rhp.delete(min_date=day, risk=risk)
def is_dividend_date(self, day):
div_date = self._config['dividend-date']
div_date = date(day.year, day.month, div_date)
if is_workday(div_date):
return div_date.day == day.day
else:
return next_workday(div_date).day == day.day
@property
def month_dividend(self):
return self._config['dividend-rate'] / 12
@property
def interval_days(self):
return self._config['min-interval-days']
@property
def init_nav(self):
return self._config['init-nav']
@component(bean_name='dividend-holder')
class InvTrustPortfoliosHolder(DividendPortfoliosHolder):
def do_rebalance(self, day, risk: PortfoliosRisk, signal, last_nav):
weight = {int(x[0]): x[1] for x in json.loads(signal['portfolio']).items()}
dividend_acc = 0
fund_dividend = 0
if last_nav:
# 若非首次配息
share = {int(x): y for x, y in json.loads(last_nav['portfolios'])['share'].items()}
# 参与配息的基金份额
share_nav = {int(x): y for x, y in json.loads(last_nav['portfolios'])['share_nav'].items()}
share_nodiv_nav = {int(x): y for x, y in json.loads(last_nav['portfolios'])['share_nodiv_nav'].items()}
fund_div_tuple = self.get_navs_and_div(fund_ids=tuple(set(weight) | set(share)), day=day)
navs = fund_div_tuple[0]
fund_dividend = fund_div_tuple[1]
nav_cals = fund_div_tuple[2]
fund_dividend_nav = sum(
map(lambda k: share_nav[k] * fund_dividend[k], filter(lambda k: k in fund_dividend, share_nav.keys())))
fund_dividend = sum(
map(lambda k: share[k] * fund_dividend[k], filter(lambda k: k in fund_dividend, share.keys())))
dividend_acc = last_nav['div_acc'] + fund_dividend
fund_av = round(sum([navs[x] * y for x, y in share.items()]), 4)
fund_nav = round(sum([navs[x] * y for x, y in share_nav.items()]), 4)
nav = round(sum([nav_cals[x] * y for x, y in share_nodiv_nav.items()]), 4)
fund_nav += fund_dividend_nav
asset_nav = fund_av
share = {x: fund_av * w / navs[x] for x, w in weight.items()}
# 若调仓当日,有基金产生配息
share_nav = {x: fund_nav * w / navs[x] for x, w in weight.items()}
share_nodiv_nav = {x: nav * w / nav_cals[x] for x, w in weight.items()}
if self.is_transfer_workday(day):
div_forecast = asset_nav * self.month_dividend
else:
fund_av = self.init_nav
asset_nav = self.init_nav
nav = self.init_nav
fund_div_tuple = self.get_navs_and_div(fund_ids=tuple(weight), day=day)
navs = fund_div_tuple[0]
# 首次配息金额,做记录
div_forecast = 0
funds = self._datum.get_datums(type=DatumType.FUND)
funds_subscription_rate = {fund['id']: fund.get('subscriptionRate', 0) for fund in funds}
share = {x: (1 - funds_subscription_rate[x]) * (fund_av * w) / navs[x] for x, w in weight.items()}
nav_cals = fund_div_tuple[2]
share_nav = share
# 不考虑配息
share_nodiv_nav = {x: (1 - funds_subscription_rate[x]) * (fund_av * w) / nav_cals[x] for x, w in weight.items()}
# 初始买入扣手续费
fee = sum(funds_subscription_rate[x] * (fund_av * w) for x, w in weight.items())
fund_av = fund_av - fee
fund_nav = fund_av
rhp.insert({
'date': day,
'risk': risk,
'signal_id': signal['id'],
'fund_div': fund_dividend,
'div_forecast': div_forecast if div_forecast else last_nav['div_forecast'] if last_nav else None,
'div_acc': dividend_acc,
'rebalance': True,
'portfolios': {
'weight': weight,
'weight_nav': weight,
'weight_nodiv_nav': weight,
'share': share,
'share_nav': share_nav,
'share_nodiv_nav': share_nodiv_nav
},
'fund_av': fund_av,
'fund_nav': fund_nav,
'nav': nav,
'port_div': 0,
'asset_nav': asset_nav,
})
def is_transfer_workday(self, day):
transfer_date = self._config['warehouse-transfer-date']
# 获取当月第n天的日期
transfer_date = date(day.year, day.month, transfer_date)
first_work_day = transfer_date if is_workday(transfer_date) else next_workday(transfer_date)
return day.day == first_work_day.day
def no_rebalance(self, day, risk: PortfoliosRisk, last_nav):
port_div = 0
dividend_acc = last_nav['div_acc']
share = {int(x): y for x, y in json.loads(last_nav['portfolios'])['share'].items()}
share_nav = {int(x): y for x, y in json.loads(last_nav['portfolios'])['share_nav'].items()}
share_nodiv_nav = {int(x): y for x, y in json.loads(last_nav['portfolios'])['share_nodiv_nav'].items()}
fund_div_tuple = self.get_navs_and_div(fund_ids=tuple(share), day=day)
navs = fund_div_tuple[0]
fund_dividend = fund_div_tuple[1]
nav_cals = fund_div_tuple[2]
# 配息当天配股
for k in share_nav.keys():
if k in fund_dividend:
share_nav[k] = (share_nav[k] * fund_dividend[k]) / navs[k] + share_nav[k]
# 配息日当天取得调仓日计算的应调仓金额,做实际份额赎回,这里的金额(即月初计算的赎回金额)用于转换成“赎回目标的份额”
need_div = last_nav['div_forecast']
if self.is_dividend_date(day) and need_div > 0:
funds = self._datum.get_datums(type=DatumType.FUND, ticker=self._config['redeem-list'])
self.exec_redeem(funds, navs, need_div, share)
self.exec_redeem(funds, navs, need_div, share_nav)
port_div = last_nav['div_forecast']
fund_dividend = sum(
map(lambda k: share[k] * fund_dividend[k], filter(lambda k: k in fund_dividend, share.keys())))
dividend_acc = dividend_acc + port_div + fund_dividend
fund_av = round(sum([navs[x] * y for x, y in share.items()]), 4)
nav = round(sum([nav_cals[x] * y for x, y in share_nodiv_nav.items()]), 4)
fund_nav = round(sum([navs[x] * y for x, y in share_nav.items()]), 4)
weight = {x: round(y * navs[x] / fund_av, 2) for x, y in share.items()}
weight_nodiv_nav = {x: round(y * nav_cals[x] / nav, 2) for x, y in share_nav.items()}
weight_nav = {x: round(y * navs[x] / fund_av, 2) for x, y in share_nav.items()}
weight = format_weight(weight)
weight_nav = format_weight(weight_nav)
weight_nodiv_nav = format_weight(weight_nodiv_nav)
asset_nav = fund_av
div_forecast = last_nav['div_forecast']
if self.is_transfer_workday(day):
div_forecast = asset_nav * self.month_dividend
rhp.insert({
'date': day,
'risk': risk,
'fund_div': fund_dividend,
'div_forecast': div_forecast,
'div_acc': dividend_acc,
'signal_id': last_nav['signal_id'],
'rebalance': False,
'portfolios': {
'weight': weight,
'weight_nav': weight_nav,
'weight_nodiv_nav': weight_nodiv_nav,
'share': share,
'share_nav': share_nav,
'share_nodiv_nav': share_nodiv_nav
},
'fund_av': fund_av,
'fund_nav': fund_nav,
'nav': nav,
'port_div': port_div,
'asset_nav': asset_nav,
})
def exec_redeem(self, funds, navs, need_div, share):
# 获取需要配息的金额
for fund in funds:
if fund['id'] in share.keys():
# 按配息金额依次扣除对应基金份额
if share[fund['id']] * navs[fund['id']] <= need_div:
share[fund['id']] = 0
need_div = need_div - share[fund['id']] * navs[fund['id']]
else:
share[fund['id']] = (share[fund['id']] * navs[fund['id']] - need_div) / navs[fund['id']]
break
@component(bean_name='hold-report')
class DivHoldReportor(RoboReportor):
@property
def report_name(self) -> str:
return '投組淨值'
def load_report(self, max_date=dt.today(), min_date=None) -> List[dict]:
holds = pd.DataFrame(rhp.get_list(max_date=max_date, min_date=min_date))
if not holds.empty:
holds['signal_type'] = 'INIT'
holds['real_av'] = holds['asset_nav']
holds = holds[
['date', 'signal_type', 'fund_av', 'fund_nav', 'fund_div', 'cash', 'real_av', 'port_div', 'div_acc',
'acc_av', 'nav']]
return holds.to_dict('records')
return []
@component(bean_name='daily-hold-report')
class DailyHoldReportor(RoboReportor):
@autowired
def __init__(self, datum: Datum = None):
self._datum = datum
@property
def report_name(self) -> str:
return '每日持倉信息'
def load_report(self, max_date=prev_workday(dt.today()), min_date=None) -> List[dict]:
holds = pd.DataFrame(rhp.get_list(max_date=max_date, min_date=min_date))
holds = holds[holds['date'].dt.date == max_date.date()]
if not holds.empty:
portfolio = rhp.get_last_one(max_date=max_date, rebalance=True)
datum_ids = reduce(lambda x, y: x | y,
holds['portfolios'].apply(lambda x: set(json.loads(x)['weight'].keys())))
datums = pd.DataFrame(self._datum.get_datums(type=DatumType.FUND, datum_ids=datum_ids))
datums.set_index('id', inplace=True)
holds['rebalance_type'] = holds.apply(lambda row: PortfoliosType.NORMAL.name, axis=1)
holds['rebalance_date'] = holds.apply(lambda row: prev_workday(portfolio['date']), axis=1)
holds['risk'] = holds.apply(lambda row: PortfoliosRisk(row['risk']).name, axis=1)
holds['portfolios'] = holds.apply(lambda row: [x for x in json.loads(row['portfolios'])['weight'].items()],
axis=1)
holds = holds.explode('portfolios', ignore_index=True)
holds['weight'] = holds.apply(lambda row: format(row['portfolios'][1], '.0%'), axis=1)
holds['asset_ids'] = holds.apply(lambda row: datums.loc[int(row['portfolios'][0])]['ftTicker'], axis=1)
holds['name'] = holds.apply(lambda row: datums.loc[int(row['portfolios'][0])]['chineseName'], axis=1)
holds['lipper_id'] = holds.apply(lambda row: datums.loc[int(row['portfolios'][0])]['lipperKey'], axis=1)
holds = holds[
['lipper_id', 'asset_ids', 'name', 'weight', 'risk', 'date', 'rebalance_type', 'rebalance_date']]
return holds.to_dict('records')
return []
import math
import os
import sys
from logging import DEBUG, getLogger
import numpy as np
import pandas as pd
from dateutil.relativedelta import relativedelta
from numpy import NAN
from py_jftech import component, autowired, get_config, filter_weekend
from pyomo.environ import *
from api import SolverFactory as Factory, PortfoliosRisk, PortfoliosType, AssetPool, Navs, Solver, Datum, DatumType
from portfolios.utils import format_weight
logger = getLogger(__name__)
def create_solver():
if sys.platform.find('win') == 0:
executor = 'bonmin.exe'
elif sys.platform == 'linux':
executor = 'bonmin_linux'
else:
executor = 'bonmin_mac'
return SolverFactory('Bonmin', executable=os.path.join(os.path.dirname(__file__), executor))
@component
class DefaultFactory(Factory):
def __init__(self):
self._config = get_config(__name__)
@property
def solver_model(self):
return self._config['model'].upper() if 'model' in self._config and self._config['model'] is not None else None
def create_solver(self, risk: PortfoliosRisk = None, type: PortfoliosType = PortfoliosType.NORMAL) -> Solver:
if self.solver_model == 'ARC':
return ARCSolver(type=type, risk=risk)
if self.solver_model == 'PRR':
if risk == PortfoliosRisk.FT3:
return PRRSolver(type=type, risk=risk)
return DefaultSolver(type=type, risk=risk)
class DefaultSolver(Solver):
@autowired
def __init__(self, type: PortfoliosType, risk: PortfoliosRisk, assets: AssetPool = None, navs: Navs = None,
datum: Datum = None):
self._category = None
self._transfer_type = None
self.__navs = None
self.risk = risk
self.type = type or PortfoliosType.NORMAL
self._assets = assets
self._navs = navs
self._datum = datum
self._config = get_config(__name__)
self._solver = create_solver()
self._solver.options['tol'] = float(self.get_config('tol') or 1E-10)
@property
def navs(self):
return self.__navs
@property
def rtn_matrix(self):
result = self.navs / self.navs.shift(self.get_config('matrix-rtn-days')) - 1
result.dropna(inplace=True)
return result
@property
def rtn_annualized(self):
return list(self.rtn_matrix.mean() * 12)
@property
def sigma(self):
rtn = (self.navs / self.navs.shift(1) - 1)[1:]
return rtn.cov() * 252
@property
def risk_parity_sigma(self):
return self.navs.cov()
@property
def rtn_history(self):
result = self.rtn_matrix * 12
return result.values
@property
def beta(self):
return self.get_config('mpt.cvar-beta')
@property
def k_beta(self):
return round(len(self.rtn_history) * self.beta + 0.499999)
@property
def quantile(self):
return self.get_config('mpt.quantile')
@property
def category(self):
return self._category
@property
def transfer_type(self):
self._transfer_type = self.get_config("normal-ratio")
return self._transfer_type
def set_navs(self, navs):
self.__navs = navs
def set_category(self, category):
self._category = category
def solve_max_rtn(self):
model = self.create_model()
model.objective = Objective(expr=sum([model.w[i] * self.rtn_annualized[i] for i in model.indices]),
sense=maximize)
self._solver.solve(model)
self.debug_solve_result(model)
max_rtn = self.calc_port_rtn(model)
max_var = self.calc_port_var(model)
minCVaR_whenMaxR = self.calc_port_cvar(model)
logger.debug({
'max_rtn': max_rtn,
'max_var': max_var,
'minCVaR_whenMaxR': minCVaR_whenMaxR,
})
return max_rtn, max_var, minCVaR_whenMaxR
def solve_min_rtn(self):
model = self.create_model()
model.objective = Objective(
expr=sum([model.w[i] * model.w[j] * self.sigma.iloc[i, j] for i in model.indices for j in model.indices]),
sense=minimize)
self._solver.solve(model)
self.debug_solve_result(model)
min_rtn = self.calc_port_rtn(model)
min_var = self.calc_port_var(model)
maxCVaR_whenMinV = self.calc_port_cvar(model)
logger.debug({
'min_rtn': min_rtn,
'min_var': min_var,
'maxCVaR_whenMinV': maxCVaR_whenMinV,
})
return min_rtn, min_var, maxCVaR_whenMinV
def solve_mpt(self, min_rtn, max_rtn):
logger.debug(f'...... ...... ...... ...... ...... ...... ...... ...... '
f'MPT ... sub risk : pct_value = {self.quantile}')
big_y = min_rtn + self.quantile * (max_rtn - min_rtn)
logger.debug(f'big_Y = target_Return = {big_y}')
model = self.create_model()
model.cons_rtn = Constraint(expr=sum([model.w[i] * self.rtn_annualized[i] for i in model.indices]) >= big_y)
model.objective = Objective(
expr=sum([model.w[i] * model.w[j] * self.sigma.iloc[i, j] for i in model.indices for j in model.indices]),
sense=minimize)
result = self._solver.solve(model)
if result.solver.termination_condition == TerminationCondition.infeasible:
logger.debug('...... MPT: Infeasible Optimization Problem.')
return None, None
logger.debug('...... MPT: Has solution.')
self.debug_solve_result(model)
return self.calc_port_weight(model), self.calc_port_cvar(model)
def solve_poem(self, min_rtn, max_rtn, base_cvar, max_cvar):
k_history = len(self.rtn_history)
quantile = self.quantile
logger.debug(f'...... ...... ...... ...... ...... ...... ...... ...... '
f'POEM With CVaR constraints ... sub risk : pct_value = {quantile}')
big_y = min_rtn + quantile * (max_rtn - min_rtn)
small_y = base_cvar + (max_cvar - base_cvar) * self.get_config('poem.cvar-scale-factor') * quantile
logger.debug(f'big_Y = target_Return = {big_y} | small_y = target_cvar = {small_y}')
model = self.create_model()
model.alpha = Var(domain=Reals)
model.x = Var(range(k_history), domain=NonNegativeReals)
model.cons_cvar_aux = Constraint(range(k_history), rule=lambda m, k: m.x[k] >= m.alpha - sum(
[m.w[i] * self.rtn_history[k][i] for i in m.indices]))
model.cons_rtn = Constraint(expr=sum([model.w[i] * self.rtn_annualized[i] for i in model.indices]) >= big_y)
model.cons_cvar = Constraint(
expr=model.alpha - (1 / self.k_beta) * sum([model.x[k] for k in range(k_history)]) >= small_y)
result = self._solver.solve(model)
if result.solver.termination_condition == TerminationCondition.infeasible:
logger.debug('...... POEM: Infeasible Optimization Problem.')
return None, None
logger.debug('...... POEM: Has solution.')
self.debug_solve_result(model)
return self.calc_port_weight(model), self.calc_port_cvar(model)
def solve_risk_parity(self):
model = self.create_model()
model.objective = Objective(expr=sum(
[(model.z[i] * model.w[i] * (self.risk_parity_sigma.iloc[i] @ model.w) - model.z[j] * model.w[j] * (
self.risk_parity_sigma.iloc[j] @ model.w)) ** 2
for i in model.indices for j in model.indices]), sense=minimize)
self._solver.solve(model)
return self.calc_port_weight(model)
def calc_port_weight(self, model):
id_list = self.navs.columns
weight_list = []
for i in model.indices:
weight_list.append(model.w[i]._value * model.z[i]._value)
df_w = pd.DataFrame(data=weight_list, index=id_list, columns=['weight'])
df_w.replace(0, NAN, inplace=True)
df_w.dropna(axis=0, inplace=True)
df_w['weight'] = pd.Series(format_weight(dict(df_w['weight']), self.get_weight()))
dict_w = df_w.to_dict()['weight']
return dict_w
def calc_port_rtn(self, model):
return sum([model.w[i]._value * self.rtn_annualized[i] for i in model.indices])
def calc_port_var(self, model):
return sum([model.w[i]._value * model.w[j]._value * self.sigma.iloc[i, j] for i in model.indices for j in
model.indices])
def calc_port_cvar(self, model):
port_r_hist = []
for k in range(len(self.rtn_history)):
port_r_hist.append(
sum([model.w[i]._value * model.z[i]._value * self.rtn_history[k][i] for i in model.indices]))
port_r_hist.sort()
return sum(port_r_hist[0: self.k_beta]) / self.k_beta
def get_weight(self):
# 根据asset-include中的对应key找配置
return self.transfer_type[self.category][0]
def create_model(self):
count = self.get_config('asset-count')
min_count = count[0] if isinstance(count, list) else count
max_count = count[1] if isinstance(count, list) else count
min_count = min(min_count, len(self.rtn_annualized))
low_weight = self.get_config('mpt.low-weight')
high_weight = self.get_weight()
model = ConcreteModel()
model.indices = range(0, len(self.navs.columns))
model.w = Var(model.indices, domain=NonNegativeReals)
model.z = Var(model.indices, domain=Binary)
model.cons_sum_weight = Constraint(expr=sum([model.w[i] for i in model.indices]) == high_weight)
model.cons_num_asset = Constraint(
expr=inequality(min_count, sum([model.z[i] for i in model.indices]), max_count, strict=False))
model.cons_bounds_low = Constraint(model.indices, rule=lambda m, i: m.z[i] * low_weight <= m.w[i])
model.cons_bounds_up = Constraint(model.indices, rule=lambda m, i: m.z[i] * high_weight >= m.w[i])
return model
def reset_navs(self, day):
asset_ids = self._assets.get_pool(day)
datum = self._datum.get_datums(type=DatumType.FUND, datum_ids=asset_ids)
category = list(get_config('asset-pool')['asset-optimize']['asset-include'].keys())[0]
asset_ids_group = {k: [d['id'] for d in datum if d[category] == k] for k in set(d[category] for d in datum)}
navs_group = {}
for category, asset_ids in asset_ids_group.items():
min_date = day - relativedelta(**self.get_config('navs.range'))
navs = pd.DataFrame(self._navs.get_fund_navs(fund_ids=asset_ids, max_date=day, min_date=min_date))
navs = navs[navs['nav_date'].dt.day_of_week < 5]
navs['nav_date'] = pd.to_datetime(navs['nav_date'])
navs = navs.pivot_table(index='nav_date', columns='fund_id', values='nav_cal')
navs = navs.sort_index()
navs_nan = navs.isna().sum()
navs.drop(columns=[x for x in navs_nan.index if navs_nan.loc[x] >= self.get_config('navs.max-nan.asset')],
inplace=True)
navs_nan = navs.apply(lambda r: r.isna().sum() / len(r), axis=1)
navs.drop(index=[x for x in navs_nan.index if navs_nan.loc[x] >= self.get_config('navs.max-nan.day')],
inplace=True)
navs.fillna(method='ffill', inplace=True)
if navs.iloc[0].isna().sum() > 0:
navs.fillna(method='bfill', inplace=True)
navs_group[category] = navs
self.__navs = navs_group
return navs_group
def get_config(self, name):
def load_config(config):
for key in name.split('.'):
if key in config:
config = config[key]
else:
return None
return config
value = load_config(self._config[self.type.value] if self.type is not PortfoliosType.NORMAL else self._config)
if value is None:
value = load_config(self._config)
return value[f'ft{self.risk.value}'] if value and isinstance(value,
dict) and f'ft{self.risk.value}' in value else value
def debug_solve_result(self, model):
if logger.isEnabledFor(DEBUG):
logger.debug('===============================')
logger.debug('solution: id | w(id)')
w_sum = 0
for i in model.indices:
if model.z[i]._value == 1:
logger.debug(f'{self.navs.columns[i]} | {model.w[i]._value}')
w_sum += model.w[i]._value
logger.debug(f'w_sum = {w_sum}')
logger.debug({
'beta': self.beta,
'kbeta': self.k_beta,
'port_R': self.calc_port_rtn(model),
'port_V': self.calc_port_cvar(model),
'port_CVaR': self.calc_port_cvar(model)
})
logger.debug('-------------------------------')
class ARCSolver(DefaultSolver):
def __init__(self, type: PortfoliosType, risk: PortfoliosRisk, assets: AssetPool = None, navs: Navs = None,
datum: Datum = None):
super().__init__(type, risk)
self.__date = None
@property
def date(self):
return self.__date
def calc_port_weight(self, model):
id_list = self.navs.columns
weight_list = [model.w[i]._value * model.z[i]._value for i in model.indices]
df_w = pd.DataFrame(data=weight_list, index=id_list, columns=['weight'])
df_w.replace(0, math.nan, inplace=True)
df_w.dropna(axis=0, inplace=True)
df_w['weight'] = pd.Series(format_weight(dict(df_w['weight'])))
dict_w = df_w.to_dict()['weight']
return dict_w
@property
def max_count(self):
count = self.get_config('asset-count')
return count[1] if isinstance(count, list) else count
@property
def min_count(self):
count = self.get_config('asset-count')
return min(count[0] if isinstance(count, list) else count, len(self.rtn_annualized))
def create_model(self):
low_weight = self.get_config('mpt.low-weight')
high_weight = self.get_config('mpt.high-weight')
if isinstance(high_weight, list):
high_weight = high_weight[min(len(self.navs.columns), self.min_count, len(high_weight)) - 1]
model = ConcreteModel()
model.indices = range(0, len(self.navs.columns))
model.w = Var(model.indices, domain=NonNegativeReals)
model.z = Var(model.indices, domain=Binary)
model.cons_sum_weight = Constraint(expr=sum([model.w[i] for i in model.indices]) == 1)
model.cons_num_asset = Constraint(
expr=inequality(self.min_count, sum([model.z[i] for i in model.indices]), self.max_count, strict=False))
model.cons_bounds_low = Constraint(model.indices, rule=lambda m, i: m.z[i] * low_weight <= m.w[i])
model.cons_bounds_up = Constraint(model.indices, rule=lambda m, i: m.z[i] * high_weight >= m.w[i])
if self._config['arc']:
LARC = self._config['LARC']
UARC = self._config['UARC']
numARC = len(LARC) # this is the M in the doc
numAsset = len(self.navs.columns)
# This should from DB. We just fake value here for developing the code
datums = self._datum.get_datums(type=DatumType.FUND, datum_ids=list(self.navs.columns))
AssetARC = np.array([x['customType'] for x in datums], dtype=int)
# the above are input data from either config file or DB
# the following are POEM / MPT code
A = np.zeros((numARC, numAsset), dtype=int)
for i in range(numAsset):
A[AssetARC[i] - 1, i] = 1
model.cons_arc_low = Constraint(range(numARC),
rule=lambda m, i: LARC[i] <= sum([A[i, j] * m.w[j] for j in m.indices]))
model.cons_arc_up = Constraint(range(numARC),
rule=lambda m, i: UARC[i] >= sum([A[i, j] * m.w[j] for j in m.indices]))
return model
def reset_navs(self, day):
self.__date = filter_weekend(day)
asset_ids = self._assets.get_pool(self.date)
asset_risk = self.get_config('navs.risk')
datum = self._datum.get_datums(type=DatumType.FUND, datum_ids=asset_ids, risk=asset_risk)
exclude = self.get_config('navs.exclude-asset-type') or []
asset_ids = list(set(asset_ids) & set([x['id'] for x in datum if x['assetType'] not in exclude]))
min_date = self.date - relativedelta(**self.get_config('navs.range'))
navs = pd.DataFrame(self._navs.get_fund_navs(fund_ids=asset_ids, max_date=self.date, min_date=min_date))
navs = navs[navs['nav_date'].dt.day_of_week < 5]
navs['nav_date'] = pd.to_datetime(navs['nav_date'])
navs = navs.pivot_table(index='nav_date', columns='fund_id', values='nav_cal')
navs = navs.sort_index()
navs_nan = navs.isna().sum()
navs.drop(columns=[x for x in navs_nan.index if navs_nan.loc[x] >= self.get_config('navs.max-nan.asset')],
inplace=True)
navs_nan = navs.apply(lambda r: r.isna().sum() / len(r), axis=1)
navs.drop(index=[x for x in navs_nan.index if navs_nan.loc[x] >= self.get_config('navs.max-nan.day')],
inplace=True)
navs.fillna(method='ffill', inplace=True)
if navs.iloc[0].isna().sum() > 0:
navs.fillna(method='bfill', inplace=True)
self.set_navs(navs)
class PRRSolver(ARCSolver):
def __init__(self, type: PortfoliosType, risk: PortfoliosRisk, assets: AssetPool = None, navs: Navs = None,
datum: Datum = None):
super().__init__(type, risk)
self.__risk = None
def create_model(self):
model = super(PRRSolver, self).create_model()
# print(self.risks)
# 创建一个空列表来存储第二列的值
RR = []
# 遍历字典的键值对
for key, value in self.risks.items():
# 将值添加到列表中
RR.append(value)
# 打印第二列的值
# print(RR)
minRRweightWithinTRR = 0.7 + self._config['brr']
TRR = self._config['trr']
# RR = np.zeros(len(self.navs.columns), dtype=int)
# # Please note, RR should come from DB with real values. Here, we just assign fake values for coding
# for i in range(len(self.navs.columns)):
# RR[i] = math.ceil((i + 1) / len(self.navs.columns) * 5)
# the following code are real model code ::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::
model.cons_TRR = Constraint(expr=sum([model.w[i] * RR[i] for i in model.indices]) <= TRR)
RR_LE_TRR = np.zeros(len(self.navs.columns), dtype=int)
RR_in_1_5 = np.zeros(len(self.navs.columns), dtype=int)
RR_EQ_5 = np.zeros(len(self.navs.columns), dtype=int)
for i in range(len(self.navs.columns)):
if RR[i] <= TRR:
RR_LE_TRR[i] = 1
if RR[i] > 1 and RR[i] < 5:
RR_in_1_5[i] = 1
elif RR[i] == 5:
RR_EQ_5[i] = 1
model.cons_RR_LE_TRR = Constraint(
expr=sum([model.w[i] * RR_LE_TRR[i] for i in model.indices]) >= minRRweightWithinTRR)
if TRR < 5:
model.cons_RR_in_1_5 = Constraint(
expr=sum([model.z[i] * (RR_in_1_5[i] * self.max_count - RR_EQ_5[i]) for i in model.indices]) >= 0)
return model
def reset_navs(self, day):
super(PRRSolver, self).reset_navs(day=day)
datums = self._datum.get_datums(type=DatumType.FUND, datum_ids=list(self.navs.columns))
self.__risk = {x['id']: x['risk'] for x in datums}
# self.__risk = {x['risk'] for x in datums}
@property
def risks(self):
return self.__risk
import logging
import unittest
from py_jftech import autowired, parse_date
from api import PortfoliosBuilder, PortfoliosType, PortfoliosRisk, PortfoliosHolder
class PortfoliosTest(unittest.TestCase):
logger = logging.getLogger(__name__)
@autowired(names={'builder': 'poem'})
def test_poem_build_portfolio(self, builder: PortfoliosBuilder = None):
result, detail = builder.build_portfolio(parse_date('2008-01-21'), PortfoliosType.NORMAL)
self.logger.info("portfolios: ")
for risk, portfolio in result.items():
self.logger.info(risk.name)
self.logger.info(portfolio)
self.logger.info(detail[risk])
@autowired(names={'builder': 'poem'})
def test_poem_get_portfolio(self, builder: PortfoliosBuilder = None):
portfolio = builder.get_portfolios(parse_date('2022-11-07'), PortfoliosRisk.FT9)
self.logger.info(portfolio)
@autowired(names={'hold': 'dividend-holder'})
def test_has_hold(self, hold: PortfoliosHolder = None):
self.logger.info(hold.has_hold(PortfoliosRisk.FT3))
@autowired(names={'hold': 'dividend-holder'})
def test_build_hold(self, hold: PortfoliosHolder = None):
hold.build_hold_portfolio(parse_date('2023-02-23'), PortfoliosRisk.FT9)
@autowired(names={'hold': 'dividend-holder'})
def test_clear(self, hold: PortfoliosHolder = None):
hold.clear()
if __name__ == '__main__':
unittest.main()
import pandas as pd
from py_jftech import autowired, get_config
from api import DatumType, Datum
risk_dict = {}
@autowired
def build_risk_dict(datum: Datum = None):
global risk_dict
if risk_dict:
pass
else:
funds = datum.get_datums(type=DatumType.FUND)
risk_dict = {fund['id']: fund['risk'] for fund in funds}
def format_weight(weight: dict, to=1) -> dict:
"""
对权重的小数点进行截取,到指定权重
@param datum:
@param weight:
@param to: 指定权重
@return:
"""
# funds = datum.get_datums(type=DatumType.FUND)
# risk_dict = {fund['id']: fund['risk'] for fund in funds}
# risk = 0
# for k, v in weight.items():
# risk += risk_dict.get(int(k)) * v
# print(risk)
build_risk_dict()
weight_series = pd.Series(weight)
weight_series = weight_series.fillna(0)
weight_series = weight_series.apply(lambda x: round(x, 2))
if weight_series.sum() == to:
return dict(weight_series)
id_sort = sorted(weight_series.to_dict().keys(), key=lambda x: risk_dict.get(int(x)))
low = get_config('portfolios.solver.mpt.low-weight')
high = get_config('portfolios.solver.mpt.high-weight')[0]
# 低风险
minidx = [i for i in id_sort if weight_series[i] < high][0]
# 高风险
maxidx = [i for i in id_sort if weight_series[i] > low][-1]
if weight_series.sum() < to:
weight_series[minidx] += to - weight_series.sum()
elif weight_series.sum() > to:
weight_series[maxidx] += to - weight_series.sum()
return dict(weight_series.apply(lambda x: round(float(x), 2)))
if __name__ == '__main__':
format_weight({"5": 0.35, "6": 0.35, "10": 0.1, "11": 0.16, "22": 0.05})
import json
from abc import ABC
from datetime import datetime as dt
from datetime import timedelta
from functools import reduce
from typing import List
import pandas as pd
from py_jftech import component, autowired, get_config, prev_workday, workday_range
from py_jftech import is_workday
from api import PortfoliosBuilder
from api import (
PortfoliosRisk, RebalanceSignal, SignalType, PortfoliosType, PortfoliosHolder,
RoboReportor, Datum, DatumType
)
from rebalance.dao import robo_rebalance_signal as rrs
@component(bean_name='base-signal')
class BaseRebalanceSignal(RebalanceSignal, ABC):
@autowired
def __init__(self, builder: PortfoliosBuilder = None):
self._builder = builder
def get_signal(self, day, risk: PortfoliosRisk):
signal = rrs.get_one(type=self.signal_type, risk=risk, date=day)
if signal:
return signal
trigger = self.need_rebalance(day, risk)
if trigger:
portfolio_type = self.portfolio_type
portfolio = self._builder.get_portfolios(day, risk, portfolio_type)
id = rrs.insert({
'date': day,
'type': self.signal_type,
'risk': risk,
'portfolio_type': portfolio_type,
'portfolio': portfolio,
'effective': 1
})
return rrs.get_by_id(id)
return None
def need_rebalance(self, day, risk: PortfoliosRisk) -> bool:
# 若记录为空则,将传入日期作为初始日期,进行build
signal = rrs.get_last_one(day, risk, SignalType.NORMAL, effective=None)
if signal:
frequency = get_config('portfolios')['holder']['warehouse-frequency']
transfer_date = get_config('portfolios')['holder']['warehouse-transfer-date']
date = pd.to_datetime(signal['date'].replace(day=transfer_date))
# 说明发生了跨月份问题
if signal['date'].day > transfer_date:
if rrs.get_count(risk=PortfoliosRisk.FT3, effective=True) > 0:
date = date + pd.DateOffset(months=1)
date = date + pd.DateOffset(months=frequency)
date = date - timedelta(days=1)
# 指定周期末的工作日
date = date if is_workday(date) else prev_workday(date)
if date == day:
return True
elif signal['date'] == day:
return True
else:
return False
else:
return True
@property
def portfolio_type(self):
return self.signal_type.p_type
@property
def signal_type(self) -> SignalType:
return SignalType.NORMAL
def get_last_signal(self, day, risk: PortfoliosRisk):
last_re = rrs.get_last_one(max_date=day, risk=risk, effective=True)
return last_re
def clear(self, min_date=None, risk: PortfoliosRisk = None):
rrs.delete(min_date=min_date, risk=risk)
@component(bean_name='signal-report')
class SignalReportor(RoboReportor):
@autowired
def __init__(self, hold: PortfoliosHolder = None, datum: Datum = None):
self._hold = hold
self._datum = datum
@property
def report_name(self) -> str:
return '調倉信號'
def load_report(self, max_date=dt.today(), min_date=None) -> List[dict]:
result = []
datums = {str(x['id']): x for x in self._datum.get_datums(type=DatumType.FUND, exclude=False)}
for signal in rrs.get_list(max_date=max_date, min_date=prev_workday(min_date), effective=True):
rebalance_date = self._hold.get_rebalance_date_by_signal(signal['id'])
if rebalance_date:
for fund_id, weight in json.loads(signal['portfolio']).items():
result.append({
'risk': PortfoliosRisk(signal['risk']).name,
'type': SignalType(signal['type']).name,
'signal_date': signal['date'],
'rebalance_date': rebalance_date,
'portfolio_type': PortfoliosType(signal['portfolio_type']).name,
'ft_ticker': datums[fund_id]['ftTicker'],
'bloomberg_ticker': datums[fund_id]['bloombergTicker'],
'fund_name': datums[fund_id]['chineseName'],
'weight': weight
})
return result
@component(bean_name='daily-signal-report')
class DailySignalReportor(RoboReportor):
@autowired
def __init__(self, hold: PortfoliosHolder = None, datum: Datum = None):
self._hold = hold
self._datum = datum
@property
def report_name(self) -> str:
return '每月調倉信號'
def load_report(self, max_date=prev_workday(dt.today()), min_date=None) -> List[dict]:
signals = pd.DataFrame(rrs.get_list(max_date=max_date, min_date=min_date))
signals = signals[(signals['date'].dt.date == max_date.date())]
if not signals.empty:
datum_ids = reduce(lambda x, y: x | y, signals['portfolio'].apply(lambda x: set(json.loads(x).keys())))
datums = pd.DataFrame(self._datum.get_datums(type=DatumType.FUND, datum_ids=datum_ids))
datums.set_index('id', inplace=True)
signals['risk'] = signals.apply(lambda row: PortfoliosRisk(row['risk']).name, axis=1)
signals['rebalance_type'] = signals.apply(lambda row: SignalType(row['type']).name, axis=1)
signals['portfolio_type'] = signals.apply(lambda row: PortfoliosType(row['portfolio_type']).name, axis=1)
signals['portfolio'] = signals.apply(lambda row: [x for x in json.loads(row['portfolio']).items()], axis=1)
signals = signals.explode('portfolio', ignore_index=True)
signals['weight'] = signals.apply(lambda row: format(row['portfolio'][1], '.0%'), axis=1)
signals['asset_ids'] = signals.apply(lambda row: datums.loc[int(row['portfolio'][0])]['ftTicker'], axis=1)
signals['name'] = signals.apply(lambda row: datums.loc[int(row['portfolio'][0])]['chineseName'], axis=1)
signals['lipper_id'] = signals.apply(lambda row: datums.loc[int(row['portfolio'][0])]['lipperKey'], axis=1)
signals = signals[['lipper_id', 'asset_ids', 'name', 'weight', 'risk', 'date', 'rebalance_type']]
return signals.to_dict('records')
return []
CREATE TABLE IF NOT EXISTS robo_rebalance_signal
(
rrs_id BIGINT UNSIGNED NOT NULL AUTO_INCREMENT,
rrs_date DATETIME NOT NULL COMMENT '信号日期',
rrs_type TINYINT NOT NULL COMMENT '信号类型',
rrs_risk TINYINT NOT NULL COMMENT '风险等级',
rrs_p_type VARCHAR(255) DEFAULT NULL COMMENT '投组类型',
rrs_p_weight JSON DEFAULT NULL COMMENT '投组信息',
rrs_effective TINYINT NOT NULL DEFAULT 0 COMMENT '是否生效',
rrs_create_time DATETIME NOT NULL DEFAULT CURRENT_TIMESTAMP,
rrs_update_time DATETIME DEFAULT NULL ON UPDATE CURRENT_TIMESTAMP,
PRIMARY KEY (rrs_id),
INDEX (rrs_date),
INDEX (rrs_type),
INDEX (rrs_risk)
) ENGINE = InnoDB
AUTO_INCREMENT = 0
DEFAULT CHARSET = utf8mb4 COMMENT '再平衡信号表';
from py_jftech import read, write, where, format_date, mapper_columns, to_tuple
from api import SignalType, PortfoliosRisk
__COLUMNS__ = {
'rrs_id': 'id',
'rrs_date': 'date',
'rrs_type': 'type',
'rrs_risk': 'risk',
'rrs_p_type': 'portfolio_type',
'rrs_p_weight': 'portfolio',
'rrs_effective': 'effective',
'rrs_create_time': 'create_time',
}
@read
def get_list(min_date=None, max_date=None, risk: PortfoliosRisk = None, type: SignalType = None, effective: bool = None):
sqls = []
if min_date:
sqls.append(f"rrs_date >= '{format_date(min_date)}'")
if max_date:
sqls.append(f"rrs_date <= '{format_date(max_date)}'")
return f'''
select {','.join([f"{x[0]} as {x[1]}" for x in __COLUMNS__.items()])} from robo_rebalance_signal
{where(*sqls, rrs_risk=risk, rrs_type=type, rrs_effective=effective)} order by rrs_risk, rrs_date
'''
@read
def get_by_ids(ids):
return f'''select {','.join([f"{x[0]} as {x[1]}" for x in __COLUMNS__.items()])} from robo_rebalance_signal {where(rrs_id=to_tuple(ids))}'''
@read(one=True)
def get_by_id(id):
return f'''select {','.join([f"{x[0]} as {x[1]}" for x in __COLUMNS__.items()])} from robo_rebalance_signal {where(rrs_id=id)}'''
@read(one=True)
def get_one(type: SignalType, risk: PortfoliosRisk, date):
return f'''
select {','.join([f"{x[0]} as {x[1]}" for x in __COLUMNS__.items()])} from robo_rebalance_signal
{where(rrs_date=date, rrs_type=type, rrs_risk=risk)}
'''
@read(one=True)
def get_first_after(type: SignalType, risk: PortfoliosRisk, min_date, effective=None):
return f'''
select {','.join([f"{x[0]} as {x[1]}" for x in __COLUMNS__.items()])} from robo_rebalance_signal
{where(f"rrs_date >= '{format_date(min_date)}'", rrs_type=type, rrs_risk=risk, rrs_effective=effective)} order by rrs_date limit 1
'''
@read(one=True)
def get_last_one(max_date, risk: PortfoliosRisk, type: SignalType = None, effective=None):
return f'''
select {','.join([f"{x[0]} as {x[1]}" for x in __COLUMNS__.items()])} from robo_rebalance_signal
{where(f"rrs_date <= '{format_date(max_date)}'", rrs_type=type, rrs_risk=risk, rrs_effective=effective)} order by rrs_date desc limit 1
'''
def get_count(risk: PortfoliosRisk = None, day=None, effective=None):
@read(one=True)
def exec():
return f"select count(*) as `count` from robo_rebalance_signal {where(rrs_risk=risk, rrs_date=day, rrs_effective=effective)}"
result = exec()
return result['count']
@write
def insert(datas):
datas = mapper_columns(datas=datas, columns=__COLUMNS__)
return f'''
insert into robo_rebalance_signal({','.join([x for x in datas.keys()])})
values ({','.join([f"'{x[1]}'" for x in datas.items()])})
'''
@write
def update(id, datas):
datas = mapper_columns(datas=datas, columns=__COLUMNS__)
return f'''
update robo_rebalance_signal
set {','.join([f"{x[0]} = '{x[1]}'" for x in datas.items()])}
where rrs_id = {id}
'''
@write
def delete_by_id(id):
return f"delete from robo_rebalance_signal where rrs_id = {id}"
@write
def delete(min_date=None, risk: PortfoliosRisk = None):
if min_date is None and risk is None:
return 'truncate table robo_rebalance_signal'
else:
sql = f"rrs_date >= '{format_date(min_date)}'" if min_date else None
return f"delete from robo_rebalance_signal {where(sql, rrs_risk=risk)}"
import json
from datetime import datetime as dt
from typing import List
from urllib.parse import urlencode
import pandas as pd
import requests
from py_jftech import component, filter_weekend, next_workday, get_config, format_date
from api import RoboReportor
from reports.dao import robo_benckmark as rb
config = get_config(__name__)
@component(bean_name='benckmark-report')
class BenchmarkAlligamReportor(RoboReportor):
@property
def report_name(self) -> str:
return 'BENCHMARK_ALLIGAM'
@property
def module_name(self) -> str:
return 'divrobo'
@property
def risk(self):
return 'alligam'
@property
def base_params(self):
return {
'subjectKeys': 879,
'size': 200,
'sourceType': 'BLOOMBERG'
}
def sync_benchmark(self, start_date=None):
params = {
**self.base_params,
'page': 0
}
if start_date:
params['startDate'] = format_date(start_date)
while True:
response = requests.get(f'http://jdcprod.thiztech.com/api/datas/asset-value?{urlencode(params)}').json()
if not response['success']:
raise Exception(f'''request jdc alligam failed: {response['status']}''')
rb.batch_insert([{
'date': dt.fromtimestamp(x['date'] / 1000),
'module': self.module_name,
'risk': self.risk,
'nav': x['calibrateValue'],
'remarks': json.dumps({
'av': x['originValue'],
'div': x['dividend'] if 'dividend' in x else 0
}, ensure_ascii=False)
} for x in response['body']['content']])
if response['body']['last']:
break
else:
params = {**params, 'page': params['page'] + 1}
def load_report(self, max_date=dt.today(), min_date=None) -> List[dict]:
max_date = filter_weekend(max_date)
min_date = filter_weekend(min_date) if min_date else None
last = rb.get_last_one(module=self.module_name, risk=self.risk, max_date=max_date)
if not last or last['date'] < max_date:
self.sync_benchmark(start_date=next_workday(last['date']) if last else None)
result = pd.DataFrame(rb.get_list(max_date=max_date, min_date=min_date))
result['av'] = result['remarks'].apply(lambda x: json.loads(x)['av'])
result['div'] = result['remarks'].apply(lambda x: json.loads(x)['div'])
result['acc'] = result.apply(lambda row: result[result['date'] <= row['date']]['div'].sum() + row['av'], axis=1)
result = result[['date', 'av', 'div', 'acc', 'nav']]
result.rename(columns={'nav': f'{self.risk}_nav', 'av': f'{self.risk}_av', 'div': f'{self.risk}_div', 'acc': f'{self.risk}_acc'}, inplace=True)
return result.to_dict('records')
from datetime import datetime as dt
from typing import List
import pandas as pd
from py_jftech import component, autowired
from api import RoboReportor
@component(bean_name='combo-report')
class DivAlligamComboDatasReportor(RoboReportor):
@autowired(names={'hold_reportor': 'hold-report', 'benchmark': 'benckmark-report'})
def __init__(self, hold_reportor: RoboReportor = None, benchmark: RoboReportor = None):
self._hold_reportor = hold_reportor
self._benchmark = benchmark
@property
def report_name(self) -> str:
return '对比报告'
def load_report(self, max_date=dt.today(), min_date=None) -> List[dict]:
holds = pd.DataFrame(self._hold_reportor.load_report(max_date=max_date, min_date=min_date))
if not holds.empty:
holds.set_index('date', inplace=True)
holds = holds[['real_av', 'acc_av', 'nav', 'fund_nav']]
holds.rename(columns={'real_av': 'av', 'acc_av': 'acc'}, inplace=True)
benchmark = pd.DataFrame(self._benchmark.load_report(max_date=max_date, min_date=min_date))
benchmark.set_index('date', inplace=True)
benchmark = benchmark[['alligam_av', 'alligam_acc', 'alligam_nav']]
datas = holds.join(benchmark)
datas.fillna(method='ffill', inplace=True)
datas.dropna(inplace=True)
datas.reset_index(inplace=True)
return datas.to_dict('records')
return []
import math
from datetime import datetime as dt
from typing import List
import pandas as pd
from py_jftech import component, autowired, filter_weekend, prev_workday
from api import RoboReportor, PortfoliosRisk, PortfoliosHolder, Datum, DatumType, Navs, RoboExecutor
@component(bean_name='contribution-report')
class ContributionReportor(RoboReportor):
@autowired
def __init__(self, hold: PortfoliosHolder = None, datum: Datum = None, navs: Navs = None, exec: RoboExecutor = None):
self._hold = hold
self._datum = datum
self._navs = navs
self._exec = exec
@property
def report_name(self) -> str:
return '贡献率'
def load_report(self, max_date=dt.today(), min_date=None) -> List[dict]:
max_date = filter_weekend(max_date)
min_date = filter_weekend(min_date) if min_date is not None else self._exec.start_date
result = pd.DataFrame()
for risk in PortfoliosRisk.values():
buy_date = None
sell_date = max_date
while buy_date is None or sell_date > min_date:
last_date = sell_date if sell_date == max_date else prev_workday(sell_date)
buy_date = self._hold.get_last_rebalance_date(risk=risk, max_date=last_date)
weight = self._hold.get_portfolios_weight(day=last_date, risk=risk)
datums = pd.DataFrame(self._datum.get_datums(type=DatumType.FUND, datum_ids=tuple(weight.keys())))
datums = datums[['id', 'ftTicker', 'bloombergTicker', 'chineseName']]
datums.columns = ['id', 'ft_ticker', 'bloomberg_ticker', 'name']
datums['ratio'] = datums.apply(lambda row: weight[row.id], axis=1)
datums['hold'] = (sell_date - buy_date).days
navs = pd.DataFrame(self._navs.get_fund_navs(fund_ids=tuple(weight.keys()), max_date=sell_date, min_date=buy_date))
navs = navs.pivot_table(columns='fund_id', index='nav_date', values='nav_cal')
rtns = navs.iloc[-1] / navs.iloc[0] - 1
rtns.name = 'rtns'
datums = datums.join(rtns, on='id')
datums['risk'] = risk.name
datums['buy_date'] = buy_date
datums['sell_date'] = sell_date if sell_date != max_date else math.nan
datums.drop('id', axis=1, inplace=True)
result = pd.concat([result, datums], ignore_index=True)
sell_date = buy_date if buy_date < sell_date else prev_workday(buy_date)
return result.to_dict('records') if not result.empty else []
DROP TABLE IF EXISTS robo_benchmark;
CREATE TABLE IF NOT EXISTS robo_benchmark
(
rb_id BIGINT UNSIGNED NOT NULL AUTO_INCREMENT,
rb_module VARCHAR(255) NOT NULL COMMENT '模块',
rb_date DATETIME NOT NULL COMMENT '日期',
rb_risk VARCHAR(255) NOT NULL COMMENT '风险等级',
rb_nav DOUBLE(16, 4) NOT NULL COMMENT '资产值',
rb_remarks JSON DEFAULT NULL COMMENT '其他信息',
rb_create_time DATETIME NOT NULL DEFAULT CURRENT_TIMESTAMP,
rb_update_time DATETIME DEFAULT NULL ON UPDATE CURRENT_TIMESTAMP,
PRIMARY KEY (rb_id),
UNIQUE INDEX (rb_module, rb_date, rb_risk),
INDEX (rb_date, rb_risk),
INDEX (rb_risk)
) ENGINE = InnoDB
AUTO_INCREMENT = 0
DEFAULT CHARSET = utf8mb4 COMMENT 'BENCHMARK数据表';
ALTER TABLE robo_benchmark ADD COLUMN v_rb_re TINYINT GENERATED ALWAYS AS (IF(rb_remarks->>'$.re' = 'true', 1, 0)) COMMENT '是否再分配' AFTER rb_remarks;
ALTER TABLE robo_benchmark ADD INDEX v_rb_re(`v_rb_re`);
ALTER TABLE robo_benchmark DROP INDEX v_rb_re;
ALTER TABLE robo_benchmark DROP COLUMN v_rb_re;
DROP TABLE IF EXISTS robo_data_logger;
CREATE TABLE IF NOT EXISTS robo_data_logger
(
rdl_id BIGINT UNSIGNED NOT NULL AUTO_INCREMENT,
rdl_date DATETIME NOT NULL COMMENT '日期',
rdl_risk VARCHAR(255) NOT NULL COMMENT '风险等级',
rdl_type VARCHAR(255) NOT NULL COMMENT '数据类别',
rdl_datas JSON NOT NULL COMMENT '日志数据',
rdl_create_time DATETIME NOT NULL DEFAULT CURRENT_TIMESTAMP,
rdl_update_time DATETIME DEFAULT NULL ON UPDATE CURRENT_TIMESTAMP,
PRIMARY KEY (rdl_id),
UNIQUE INDEX (rdl_date, rdl_risk, rdl_type),
INDEX (rdl_risk, rdl_type),
INDEX (rdl_type)
) ENGINE = InnoDB
AUTO_INCREMENT = 0
DEFAULT CHARSET = utf8mb4 COMMENT '数据日志表';
\ No newline at end of file
from py_jftech import read, write, where, mapper_columns, format_date
__COLUMNS__ = {
'rb_id': 'id',
'rb_module': 'module',
'rb_date': 'date',
'rb_risk': 'risk',
'rb_nav': 'nav',
'rb_remarks': 'remarks',
}
@write
def batch_insert(datas):
datas = [mapper_columns(x, __COLUMNS__) for x in datas]
values = ','.join([f'''({','.join([(f"'{x[j]}'" if j in x and x[j] is not None else 'null') for j in __COLUMNS__.keys() if j != 'rb_id'])})''' for x in datas])
return f'''insert into robo_benchmark({','.join([x for x in __COLUMNS__.keys() if x != 'rb_id'])}) values {values}'''
@read(one=True)
def get_last_one(module=None, max_date=None, risk=None, re: bool = None):
sqls = []
if max_date:
sqls.append(f"rb_date <= '{format_date(max_date)}'")
return f'''
select {','.join([f"{x[0]} as {x[1]}" for x in __COLUMNS__.items()])} from robo_benchmark
{where(*sqls, rb_module=module, rb_risk=risk, v_rb_re=re)} order by rb_date desc limit 1
'''
@read
def get_list(max_date=None, min_date=None, module=None, risk=None, re: bool = None):
sqls = []
if max_date:
sqls.append(f"rb_date <= '{format_date(max_date)}'")
if min_date:
sqls.append(f"rb_date >= '{format_date(min_date)}'")
return f'''
select {','.join([f"{x[0]} as {x[1]}" for x in __COLUMNS__.items()])} from robo_benchmark
{where(*sqls, rb_module=module, rb_risk=risk, v_rb_re=re)} order by rb_risk, rb_date
'''
import json
from py_jftech import read, write, where, mapper_columns, format_date
from api import PortfoliosRisk, LoggerType
__COLUMNS__ = {
'rdl_id': 'id',
'rdl_date': 'date',
'rdl_risk': 'risk',
'rdl_type': 'type',
'rdl_datas': 'datas',
}
@write
def batch_insert(datas):
datas = [mapper_columns(x, __COLUMNS__) for x in datas]
values = ','.join([f'''({','.join([(f"'{x[j]}'" if j in x and x[j] is not None else 'null') for j in __COLUMNS__.keys() if j != 'rb_id'])})''' for x in datas])
return f'''insert into robo_data_logger({','.join([x for x in __COLUMNS__.keys() if x != 'rb_id'])}) values {values}'''
@write
def insert(datas):
datas = mapper_columns(datas=datas, columns=__COLUMNS__)
return f'''
insert into robo_data_logger({','.join([x for x in datas.keys()])})
values ({','.join([f"'{x[1]}'" for x in datas.items()])})
'''
def update(id, datas):
return f'''
update robo_data_logger set rdl_datas = '{json.dumps(datas, ensure_ascii=False)}' where rdl_id = {id}
'''
@read(one=True)
def get_one(date, risk: PortfoliosRisk, type: LoggerType):
return f'''
select {','.join([f"{x[0]} as {x[1]}" for x in __COLUMNS__.items()])} from robo_data_logger
{where(rdl_date=date, rdl_risk=risk, rdl_type=type)}
'''
@read(one=True)
def get_last_one(max_date=None, risk: PortfoliosRisk = None, type: LoggerType = None, like_type=False):
sqls = []
if max_date:
sqls.append(f"rdl_date <= '{format_date(max_date)}'")
if like_type and type:
sqls.append(f"rdl_type like '{type.value}%'")
return f'''
select {','.join([f"{x[0]} as {x[1]}" for x in __COLUMNS__.items()])} from robo_data_logger
{where(*sqls, rdl_risk=risk, rdl_type=type if not like_type else None)} order by rdl_date, rdl_id desc limit 1
'''
@read
def get_list(min_date=None, max_date=None, risk: PortfoliosRisk = None, type: LoggerType = None, like_type=False):
sqls = []
if max_date:
sqls.append(f"rdl_date <= '{format_date(max_date)}'")
if min_date:
sqls.append(f"rdl_date >= '{format_date(min_date)}'")
if like_type and type:
sqls.append(f"rdl_type like '{type.value}%'")
return f'''
select {','.join([f"{x[0]} as {x[1]}" for x in __COLUMNS__.items()])} from robo_data_logger
{where(*sqls, rdl_risk=risk, rdl_type=type if not like_type else None)} order by rdl_risk, rdl_date, rdl_id
'''
@write
def delete(min_date=None, risk: PortfoliosRisk = None, type: LoggerType = None, like_type=False):
sqls = []
if min_date:
sqls.append(f"rdl_date >= '{format_date(min_date)}'")
if like_type and type:
sqls.append(f"rdl_type like '{type.value}%'")
delete_where = where(*sqls, rdl_risk=risk, rdl_type=type if not like_type else None)
if delete_where:
return f"delete from robo_data_logger {delete_where}"
else:
return "truncate table robo_data_logger"
import json
from datetime import datetime as dt
from py_jftech import component
from api import DataLogger, LoggerType, PortfoliosRisk, Cleanable, BacktestStep
from reports.dao import robo_data_logger as rdl
@component(bean_name='data-logger')
class DatabaseLogger(DataLogger, Cleanable):
def save_record(self, date: dt, risk: PortfoliosRisk, type: LoggerType, datas: dict, exist_merge=True):
assert date is not None, "save record, date cannot be null"
assert risk is not None, "save record, risk cannot be null"
assert type is not None, "save record, type cannot be null"
assert datas is not None, "save record, dates cannot be null"
exist = rdl.get_one(date=date, risk=risk, type=type)
if exist:
save_datas = datas
if exist_merge:
save_datas = {**json.loads(exist['datas']), **datas}
rdl.update(exist['id'], save_datas)
else:
rdl.insert({
'date': date,
'risk': risk,
'type': type,
'datas': datas
})
def load_records(self, max_date=None, min_date=None, risk: PortfoliosRisk = None, type: LoggerType = None):
result = rdl.get_list(max_date=max_date, min_date=min_date, risk=risk, type=type, like_type=True)
return [{**x, 'datas': json.loads(x['datas'])} for x in result]
def clean_up(self, min_date=None, risk: PortfoliosRisk = None):
rdl.delete(min_date=min_date, risk=risk)
@property
def clean_step(self):
return BacktestStep.HOLD_PORTFOLIO
@property
def clean_name(self):
return 'data logger'
import math
from datetime import datetime as dt
from typing import List
import pandas as pd
from py_jftech import component, autowired
from api import RoboReportor
@component(bean_name='month-div-rate-report')
class MonthDivRateReportor(RoboReportor):
@autowired(names={'hold_reportor': 'hold-report', 'benchmark': 'benckmark-report'})
def __init__(self, hold_reportor: RoboReportor = None, benchmark: RoboReportor = None):
self._hold_reportor = hold_reportor
self._benchmark = benchmark
@property
def report_name(self) -> str:
return '月度配息率比较'
def load_report(self, max_date=dt.today(), min_date=None) -> List[dict]:
holds = pd.DataFrame(self._hold_reportor.load_report(max_date=max_date, min_date=min_date))
benchmark = pd.DataFrame(self._benchmark.load_report(max_date=max_date, min_date=min_date))
if not holds.empty and not benchmark.empty:
holds['divrobo'] = round(holds['port_div'] * 12 / holds['real_av'].shift() * 100, 2)
holds = holds[['date', 'divrobo']]
holds.replace(0, math.nan, inplace=True)
holds.dropna(inplace=True)
holds['date'] = holds['date'].dt.to_period('m')
holds.set_index('date', inplace=True)
benchmark['alligam'] = round(benchmark['alligam_div'] * 12 / benchmark['alligam_av'].shift() * 100, 2)
benchmark = benchmark[['date', 'alligam']]
benchmark.replace(0, math.nan, inplace=True)
benchmark.dropna(inplace=True)
benchmark['date'] = benchmark['date'].dt.to_period('m')
benchmark.set_index('date', inplace=True)
result = holds.join(benchmark)
result.reset_index(inplace=True)
return result.to_dict('records')
return []
@component(bean_name='year-div-rate-report')
class YearDivRateReportor(RoboReportor):
@autowired(names={'month_div_rate': 'month-div-rate-report'})
def __init__(self, month_div_rate: RoboReportor = None):
self._month_div_rate = month_div_rate
@property
def report_name(self) -> str:
return '年度配息率比较'
def load_report(self, max_date=dt.today(), min_date=None) -> List[dict]:
month_datas = pd.DataFrame(self._month_div_rate.load_report(max_date=max_date, min_date=min_date))
if not month_datas.empty:
result = pd.DataFrame(columns=month_datas.columns)
month_datas['year'] = month_datas['date'].dt.year
month_datas.set_index('date', inplace=True)
result = round(month_datas.groupby(by='year').mean(), 2)
result.loc['平均'] = round(month_datas.drop(columns='year').mean(), 2)
result.reset_index(inplace=True)
return result.to_dict('records')
return []
import os
from abc import abstractmethod
from copy import deepcopy
from datetime import datetime as dt
from shutil import copyfile
from tempfile import TemporaryDirectory
from typing import List
import pandas as pd
from dateutil.relativedelta import relativedelta
from py_jftech import component, autowired, get_config, get_instance_name, get_project_path, format_date, sendmail
from api import RoboReportor, RoboExportor, RoboExecutor
def include_report():
return get_config(__name__)['include-report']
class DefaultExportor(RoboExportor):
@autowired
def __init__(self, reportors: List[RoboReportor] = None, exec: RoboExecutor = None):
self._reportors = {get_instance_name(x): x for x in reportors}
self._exec = exec
def export(self, max_date=dt.today(), min_date=None):
if not self.include_report:
return None
with TemporaryDirectory() as tmpdir:
filename = f"{self.file_name}_{format_date(self._exec.curt_date)}"
filepath = os.path.join(tmpdir, f"{filename}.xlsx")
with pd.ExcelWriter(filepath) as writer:
for reportor_name in self.include_report:
mindate = min_date
if isinstance(reportor_name, dict):
reportor = self._reportors[reportor_name['name']]
if reportor_name['min-date'] is None:
mindate = None
elif isinstance(reportor_name['min-date'], dict):
mindate = max_date - relativedelta(**reportor_name['min-date'])
datas = pd.DataFrame(reportor.load_report(max_date=max_date, min_date=mindate))
else:
reportor = self._reportors[reportor_name]
datas = pd.DataFrame(reportor.load_report(max_date=max_date, min_date=mindate))
sheet_name = reportor.report_name
if mindate and mindate > self._exec.start_date:
sheet_name = f'{sheet_name}(近{(max_date-mindate).days}天)'
if not datas.empty:
datas.to_excel(writer, sheet_name=sheet_name, index=False)
email = self.get_email(filepath)
if email and 'receives' in email and email['receives']:
receives = email['receives']
copies = email['copies'] if 'copies' in email and email['copies'] is not None else []
attach_paths = [filepath]
subject = email['subject'].format(today=format_date(dt.today()))
content = email['content'].format(today=format_date(dt.today()))
sendmail(receives=receives, copies=copies, attach_paths=attach_paths, subject=subject, content=content)
if self.save_path is not None:
os.makedirs(self.save_path, exist_ok=True)
save_file = os.path.join(self.save_path, f"{filename}.xlsx")
copyfile(filepath, save_file)
if self.save_config:
profile_active = os.environ.get('PROFILE_ACTIVE')
config_name = f'config-{profile_active}.yml' if profile_active is not None else 'config.yml'
src_path = f'{get_project_path()}{os.path.sep}{config_name}'
save_path = os.path.join(self.save_path, f"{filename}.yml")
copyfile(src_path, save_path)
def get_email(self, file):
return deepcopy(self.config['email']) if 'email' in self.config else None
@property
def save_path(self):
if 'save-path' not in self.config:
return None
save_path: str = self.config['save-path']
if save_path.startswith('.'):
return os.path.abspath(os.path.join(os.path.dirname(__file__), save_path))
elif save_path.startswith('/'):
return os.path.abspath(save_path)
return os.path.abspath(os.path.join(get_project_path(), save_path))
@property
def exist_build(self):
return self.config['exist-build'] if 'exist-build' in self.config else False
@property
def file_name(self):
return self.config['file-name'] if 'file-name' in self.config else 'export'
@property
def include_report(self):
return self.config['include-report'] if 'include-report' in self.config else []
@property
def save_config(self):
return self.config['save-config'] if 'save-config' in self.config else False
@property
@abstractmethod
def config(self):
pass
@component(bean_name='backtest-export')
class BacktestExportor(DefaultExportor):
def __init__(self):
super(BacktestExportor, self).__init__()
self.__config = deepcopy(get_config(__name__))
@property
def config(self):
return self.__config['backtest']
@component(bean_name='daily-real-export')
class DailyRealExportor(DefaultExportor):
@autowired(names={'signal_reportor': 'daily-signal-report'})
def __init__(self, signal_reportor: RoboReportor = None):
super(DailyRealExportor, self).__init__()
self.__config = deepcopy(get_config(__name__))
self._signal_reportor = signal_reportor
def get_email(self, file):
result = super(DailyRealExportor, self).get_email(file)
if result is None:
return None
content = pd.read_excel(file, sheet_name=None)
if self._signal_reportor.report_name in content:
result['subject'] = str(result['subject']['rebalance'])
result['content'] = result['content']['rebalance']
else:
result['subject'] = result['subject']['default']
result['content'] = result['content']['rebalance']
return result
@property
def config(self):
return self.__config['real-daily']
@component(bean_name='daily-monitor-export')
class DailyMonitorExportor(DefaultExportor):
def __init__(self):
super(DailyMonitorExportor, self).__init__()
self.__config = deepcopy(get_config(__name__))
@property
def config(self):
return self.__config['daily-monitor']
from datetime import datetime as dt
from typing import List
import pandas as pd
from py_jftech import component, autowired, get_config, format_date, filter_weekend
from api import RoboReportor
@component(bean_name='fixed-range-report')
class FixedRangeReport(RoboReportor):
@autowired(names={'combo': 'combo-report'})
def __init__(self, combo: RoboReportor = None):
self._combo = combo
self._config = get_config(__name__)
@property
def report_name(self) -> str:
return '固定区间收益率'
@property
def range_dates(self):
return self._config['range-dates']
def load_report(self, max_date=dt.today(), min_date=None) -> List[dict]:
datas = pd.DataFrame(self._combo.load_report(max_date=max_date, min_date=min_date))
if not datas.empty:
datas.set_index('date', inplace=True)
result = pd.DataFrame(columns=datas.columns)
for range in self.range_dates:
start = filter_weekend(range['start'])
end = filter_weekend(range['end'])
if not datas[start:end].empty:
row_name = f"{format_date(start)}~{format_date(end)}"
result.loc[row_name] = datas[start:end].values[-1] / datas[start:end].values[0] - 1
result = round(result, 4) * 100
result.reset_index(inplace=True)
result.rename(columns={'index': 'range-date'}, inplace=True)
return result.to_dict('records')
return []
from datetime import datetime as dt
from typing import List
import pandas as pd
from empyrical import annual_return, annual_volatility, max_drawdown, sharpe_ratio
from py_jftech import component, autowired
from api import RoboReportor
@component(bean_name='indicators-report')
class IndicatorsReportor(RoboReportor):
@autowired(names={'combo': 'combo-report'})
def __init__(self, combo: RoboReportor = None):
self._combo = combo
@property
def report_name(self) -> str:
return '指标'
def load_report(self, max_date=dt.today(), min_date=None) -> List[dict]:
datas = pd.DataFrame(self._combo.load_report(max_date=max_date, min_date=min_date))
if not datas.empty:
datas.set_index('date', inplace=True)
returns = round(datas.pct_change(), 5)
indicators = {
'annual_return': list(annual_return(returns, period='daily', annualization=None) * 100),
'annual_volatility': annual_volatility(returns, period='daily', annualization=None) * 100,
'max_drawdown': max_drawdown(returns, out=None) * 100,
'sharp': sharpe_ratio(returns, risk_free=0, period='daily', annualization=None),
}
indicators['calmar'] = abs(indicators['annual_return'] / indicators['max_drawdown'])
result = pd.DataFrame(indicators.values(), index=indicators.keys(), columns=list(returns.columns)).round(2)
result.reset_index(inplace=True)
result.rename(columns={'index': 'indicators'}, inplace=True)
return result.to_dict('records')
return []
from datetime import datetime as dt
from typing import List
import pandas as pd
from dateutil.relativedelta import relativedelta
from py_jftech import component, autowired, get_config, format_date, filter_weekend
from api import RoboReportor
@component(bean_name='relative-range-report')
class RelativeRangeReport(RoboReportor):
@autowired(names={'combo': 'combo-report'})
def __init__(self, combo: RoboReportor = None):
self._combo = combo
self._config = get_config(__name__)
@property
def report_name(self) -> str:
return '相对区间收益率'
@property
def range_dates(self):
return self._config['range-dates']
def load_report(self, max_date=dt.today(), min_date=None) -> List[dict]:
datas = pd.DataFrame(self._combo.load_report(max_date=max_date, min_date=min_date))
if not datas.empty:
datas.set_index('date', inplace=True)
result = pd.DataFrame(columns=datas.columns)
for range in self.range_dates:
kwargs = range.copy()
del kwargs['name']
start = filter_weekend(max_date - relativedelta(**kwargs)) if kwargs and ('dates' not in kwargs or kwargs['dates'] is not None) else datas.index[0]
end = filter_weekend(max_date)
row_name = f"{range['name']}({format_date(start)}~{format_date(end)})"
result.loc[row_name] = datas[start:end].values[-1] / datas[start:end].values[0] - 1
result = round(result, 4) * 100
result.reset_index(inplace=True)
result.rename(columns={'index': 'range-date'}, inplace=True)
return result.to_dict('records')
return []
import logging
import unittest
from datetime import datetime as dt
from py_jftech import autowired, to_str, parse_date, prev_workday
from api import RoboReportor, RoboExportor
logger = logging.getLogger(__name__)
class ReportTest(unittest.TestCase):
@autowired(names={'reportor': 'benckmark-report'})
def test_benchmark_report(self, reportor: RoboReportor = None):
result = reportor.load_report(max_date=parse_date('2023-03-01'))
logger.info(to_str(result))
@autowired(names={'reportor': 'indicators-report'})
def test_indicator_report(self, reportor: RoboReportor = None):
result = reportor.load_report(max_date=parse_date('2023-03-01'))
logger.info(to_str(result))
@autowired(names={'reportor': 'fixed-range-report'})
def test_fixed_report(self, reportor: RoboReportor = None):
result = reportor.load_report(max_date=parse_date('2023-03-01'))
logger.info(to_str(result))
@autowired(names={'reportor': 'relative-range-report'})
def test_relative_report(self, reportor: RoboReportor = None):
result = reportor.load_report(max_date=parse_date('2023-03-01'))
logger.info(to_str(result))
@autowired(names={'reportor': 'contribution-report'})
def test_contribution_report(self, reportor: RoboReportor = None):
result = reportor.load_report(max_date=parse_date('2023-03-21'))
logger.info(to_str(result))
@autowired(names={'reportor': 'combo-report'})
def test_combo_report(self, reportor: RoboReportor = None):
result = reportor.load_report(max_date=parse_date('2023-03-01'))
logger.info(to_str(result))
@autowired(names={'reportor': 'year-range-report'})
def test_year_report(self, reportor: RoboReportor = None):
result = reportor.load_report(max_date=parse_date('2023-03-01'))
logger.info(to_str(result))
@autowired(names={'reportor': 'month-div-rate-report'})
def test_year_report(self, reportor: RoboReportor = None):
result = reportor.load_report(max_date=parse_date('2023-03-01'))
logger.info(to_str(result))
@autowired(names={'reportor': 'year-div-rate-report'})
def test_year_report(self, reportor: RoboReportor = None):
result = reportor.load_report(max_date=parse_date('2023-03-01'))
logger.info(to_str(result))
@autowired(names={'exportor': 'backtest-export'})
def test_backtest_export(self, exportor: RoboExportor = None):
exportor.export(max_date=parse_date('2023-03-01'))
@autowired(names={'exportor': 'daily-real-export'})
def test_daily_export(self, exportor: RoboExportor = None):
exportor.export(max_date=prev_workday(dt.today()))
@autowired(names={'exportor': 'daily-monitor-export'})
def test_daily_monitor(self, exportor: RoboExportor = None):
exportor.export(max_date=parse_date('2023-03-27'))
if __name__ == '__main__':
unittest.main()
from datetime import datetime as dt
from typing import List
import pandas as pd
from py_jftech import component, autowired
from api import RoboReportor
@component(bean_name='year-range-report')
class YearRangeReport(RoboReportor):
@autowired(names={'combo': 'combo-report'})
def __init__(self, combo: RoboReportor = None):
self._combo = combo
@property
def report_name(self) -> str:
return '单年区间业绩'
def load_report(self, max_date=dt.today(), min_date=None) -> List[dict]:
datas = pd.DataFrame(self._combo.load_report(max_date=max_date, min_date=min_date))
if not datas.empty:
datas['year'] = datas['date'].dt.year
datas.set_index('date', inplace=True)
result = pd.DataFrame(columns=datas.columns)
last_data = None
for year, group_data in datas.groupby(by='year', group_keys=False):
group_data = group_data.drop(columns='year')
if last_data is None:
last_data = group_data.iloc[0]
result.loc[year] = group_data.iloc[-1] / last_data - 1
last_data = group_data.iloc[-1]
result.drop(columns='year', inplace=True)
result = round(result * 100, 4)
result.reset_index(inplace=True)
result.rename(columns={'index': 'year'}, inplace=True)
return result.to_dict('records')
return []
import logging
import sys
from concurrent.futures import wait
from datetime import datetime as dt
from typing import List
import pandas as pd
from py_jftech import (
component, autowired, get_config, filter_weekend, asynchronized,
parse_date, workday_range, is_workday, prev_workday, format_date
)
from api import (
RoboExecutor, Datum, AssetPool, PortfoliosBuilder,
PortfoliosRisk, PortfoliosHolder, DataSync, RoboExportor, BacktestStep, RebalanceSignal
)
logger = logging.getLogger(__name__)
@component(bean_name='backtest')
class BacktestExecutor(RoboExecutor):
@autowired
def __init__(self, datum: Datum = None, pool: AssetPool = None,
syncs: List[DataSync] = None, export: RoboExportor = None,
builder: PortfoliosBuilder = None, hold: PortfoliosHolder = None,
signal: RebalanceSignal = None):
self._datum = datum
self._pool = pool
self._builder = builder
self._hold = hold
self._syncs = syncs
self._export = export
self._signal = signal
self._config = get_config(__name__)['backtest']
@staticmethod
def get_last_business_day(start_date, end_date):
transfer_date = get_config('portfolios')['holder']['warehouse-transfer-date']
# 生成日期范围并转换为DataFrame
dates = pd.date_range(start_date, end_date, freq='MS', closed='right')
dates = [pd.to_datetime(f"{date.year}-{date.month}-{transfer_date}") for date in dates]
dates.insert(0, start_date)
df = pd.DataFrame({'dates': dates})
df['dates'] = df['dates'].apply(lambda x: prev_workday(x))
result = []
for i in range(0, len(df), get_config('portfolios')['holder']['warehouse-frequency']):
result.append(df.iloc[i]['dates'])
delta = workday_range(result[0], result[1])
period = get_config(__name__)['backtest']['sealing-period']
if len(delta) <= period:
result.pop(1)
return result
@property
def start_date(self):
return pd.to_datetime(filter_weekend(self._config['start-date']))
@property
def start_step(self) -> BacktestStep:
return BacktestStep(self._config['start-step'])
@property
def end_step(self) -> BacktestStep:
return BacktestStep(self._config['end-step']) if 'end-step' in self._config else BacktestStep.HOLD_PORTFOLIO
@property
def is_sync_data(self):
return get_config(__name__)['sync-data']
@property
def end_date(self):
return pd.to_datetime(self._config['end-date'])
@property
def is_clean_up(self):
return self._config['clean-up'] if 'clean-up' in self._config else True
def clear_datas(self):
if self.start_step.within(BacktestStep.ASSET_POOL) and self.end_step.without(BacktestStep.ASSET_POOL):
logger.info('start to clear asset pool'.center(50, '-'))
self._pool.clear()
if self.start_step.within(BacktestStep.NORMAL_PORTFOLIO) and self.end_step.without(
BacktestStep.NORMAL_PORTFOLIO):
logger.info('start to clear normal portfolios'.center(50, '-'))
self._builder.clear()
self._signal.clear()
if self.start_step.within(BacktestStep.HOLD_PORTFOLIO) and self.end_step.without(BacktestStep.HOLD_PORTFOLIO):
logger.info('start to clear hold portfolios'.center(50, '-'))
self._hold.clear()
def start_exec(self):
if self.is_sync_data:
for sync in self._syncs:
sync.do_sync()
if self.is_clean_up:
self.clear_datas()
if self.start_step.within(BacktestStep.ASSET_POOL) and self.end_step.without(BacktestStep.ASSET_POOL):
logger.info("start to build asset pool".center(50, '-'))
now = dt.now()
workdays = self.get_last_business_day(self.start_date, self.end_date)
for date in workdays:
self._pool.get_pool(date)
logger.info(f"build asset pool success, use[{(dt.now() - now).seconds}s]")
if self.start_step.within(BacktestStep.NORMAL_PORTFOLIO) and self.end_step.without(
BacktestStep.NORMAL_PORTFOLIO):
logger.info("start to build normal portfolios".center(50, '-'))
now = dt.now()
wait([self.async_build_portfolios(day, risk) for risk in PortfoliosRisk for day in
self.get_last_business_day(self.start_date, self.end_date)])
logger.info(f"build normal portfolios success, use[{(dt.now() - now).seconds}s]")
if self.start_step.within(BacktestStep.HOLD_PORTFOLIO) and self.end_step.without(BacktestStep.HOLD_PORTFOLIO):
logger.info("start to build hold portfolios".center(50, '-'))
now = dt.now()
wait([self.async_build_hold(x) for x in PortfoliosRisk])
logger.info(f"build hold portfolios success, use[{(dt.now() - now).seconds}s]")
logger.info("start to export report".center(50, '-'))
now = dt.now()
self._export.export(max_date=self.end_date, min_date=self.start_date)
logger.info(f"report file exported successfully. use[{(dt.now() - now).seconds}s].")
@asynchronized(isolate=True)
def async_build_risk_date(self, asset_id):
self._risk.build_risk_date(asset_id, self.end_date)
@asynchronized(isolate=True)
def async_build_portfolios(self, day, risk: PortfoliosRisk):
self._builder.get_portfolios(day, risk)
@asynchronized(isolate=True)
def async_build_hold(self, risk: PortfoliosRisk):
self._hold.build_hold_portfolio(day=self.end_date, risk=risk)
@component(bean_name='real')
class RealExecutor(RoboExecutor):
@autowired(names={'daily_export': 'daily-real-export', 'monitor_export': 'daily-monitor-export'})
def __init__(self, builder: PortfoliosBuilder = None, hold: PortfoliosHolder = None, syncs: List[DataSync] = None,
daily_export: RoboExportor = None, monitor_export: RoboExportor = None, pool: AssetPool = None,
signal: RebalanceSignal = None):
self._builder = builder
self._pool = pool
self._hold = hold
self._syncs = syncs
self._daily_export = daily_export
self._monitor_export = monitor_export
self._config = get_config(__name__)['real']
self._signal = signal
@property
def start_date(self):
return pd.to_datetime(filter_weekend(self._config['start-date']))
@property
def is_sync_data(self):
return get_config(__name__)['sync-data']
@property
def curt_date(self):
if len(sys.argv) > 1:
try:
return parse_date(sys.argv[1])
except Exception as e:
logger.warning(f'get curt date from argv failure.', e)
return dt.combine(dt.today().date(), dt.min.time())
@property
def include_date(self):
return [dt.combine(x, dt.min.time()) for x in self._config['include-date']]
@property
def export(self):
return self._config['export'] if 'export' in self._config else False
def start_exec(self):
if self.is_sync_data:
for sync in self._syncs:
sync.do_sync()
date = self.curt_date
if is_workday(date) or date in self.include_date:
date = prev_workday(filter_weekend(date))
for risk in PortfoliosRisk:
logger.info(f"start to build risk[{risk.name}] real for date[{format_date(date)}]".center(50, '-'))
now = dt.now()
# 因为每天都必须有NORMAL最优投组,不管用不用
self._builder.get_portfolios(date, risk)
self._signal.get_signal(date, risk)
# 更新持仓
self._hold.build_hold_portfolio(date, risk)
logger.info(
f"build risk[{risk.name}] real for date[{format_date(date)}] success, use[{(dt.now() - now).seconds}s]")
if self.export:
now = dt.now()
# 每日实盘报告
self._daily_export.export(max_date=date)
logger.info(
f'export email for date[{format_date(date)}] send success, use[{(dt.now() - now).seconds}s]')
else:
logger.info(f'today[{format_date(date)}] is a rest day, do not execute the daily real robo.')
import datetime as dt
import json
import logging
from statistics import pstdev
import pandas as pd
import uvicorn
from apscheduler.schedulers.asyncio import AsyncIOScheduler
from apscheduler.triggers.date import DateTrigger
from empyrical import sharpe_ratio, annual_volatility
from fastapi import FastAPI, Request
from py_jftech import prev_workday, filter_weekend, autowired, next_workday
from starlette.responses import JSONResponse
import main
from api import DatumType, PortfoliosRisk, Datum, RoboReportor
app = FastAPI()
# 创建 AsyncIOScheduler 实例
scheduler = AsyncIOScheduler()
REC_GID = 'E3886FBA-123B-7890-123E-123456BEEED'
fund_infos, cp, roi, risk = None, None, None, None
def get_today_rec():
from portfolios.dao import robo_mpt_portfolios as rmp
from api import PortfoliosType, PortfoliosRisk
day = prev_workday(filter_weekend(dt.date.today()))
portfolio = rmp.get_one(day, PortfoliosType.NORMAL, PortfoliosRisk.FT3)
return portfolio
def get_last_signal():
from rebalance.dao import robo_rebalance_signal as rrs
day = prev_workday(filter_weekend(dt.date.today()))
last_re = rrs.get_last_one(max_date=day, risk=PortfoliosRisk.FT3, effective=True)
return last_re
@autowired
def get_fund_infos(datum: Datum = None):
global fund_infos
fund_infos = datum.get_datums(DatumType.FUND)
@autowired(names={'combo': 'hold-report'})
def load_report(max_date=prev_workday(dt.date.today()), min_date=None, combo: RoboReportor = None):
global cp, roi, risk
datas = pd.DataFrame(combo.load_report(max_date=max_date, min_date=min_date))
datas.set_index('date', inplace=True)
datas = datas['acc_av']
returns = round(datas.pct_change(), 5)
cp = round(sharpe_ratio(returns, risk_free=0, period='daily', annualization=None), 2)
roi = round(annual_volatility(datas), 1)
risk = round(pstdev(datas), 1)
return cp, roi, risk
@app.get("/franklin/prediction_data")
async def recommend():
sig = get_last_signal()
if sig:
if not fund_infos:
get_fund_infos()
id_ticker_map = {str(info['id']): info for info in fund_infos}
funds = json.loads(sig['portfolio'])
rec_list = []
portfolios = {'recomm_guid': REC_GID}
load_report(min_date=dt.date.today() - dt.timedelta(365))
data = {'recomm_guid': REC_GID, 'data_date': sig['date'].strftime('%Y-%m-%d'),
'funds': [{'weight': round(weight * 100), 'fund_id': id_ticker_map[key]['ftTicker']} for key, weight in
funds.items()], 'creat_date': sig['create_time'].strftime('%Y-%m-%d %H:%M:%S'),
'risk': risk,
'rr': round(sum([id_ticker_map[key]['risk'] * weight for key, weight in funds.items()]), 2), 'cp': cp,
'roi': roi}
note = {'last_rec': next_workday(sig['date']).strftime('%Y%m%d')}
data['note'] = json.dumps(note)
portfolios['data'] = data
rec_list.append(portfolios)
return rec_list
else:
return {'msg': '当日投组未产生,待10:00后获取'}
# 其他异常处理程序
@app.exception_handler(Exception)
async def general_exception_handler(request: Request, exc: Exception):
# 打印一般错误信息
logging.error(f"请求 {request.url} 发生未知错误: {str(exc)}")
return JSONResponse(
status_code=500,
content={"errorCode": "500", "errorMsg": str(exc)},
)
# 定义应用启动事件
@app.on_event("startup")
async def startup_event():
# 异常情况可以重启跑当天投组
current_time = dt.datetime.now()
target_time = dt.time(10, 0)
if current_time.time() > target_time:
scheduler.add_job(main.start, trigger=DateTrigger(run_date=current_time))
# 开启定时任务,执行实盘
scheduler.add_job(main.start, 'cron', day_of_week='0-4', hour=10, minute=00)
scheduler.start()
if __name__ == "__main__":
uvicorn.run("robo_controller:app", host="0.0.0.0", port=8080)
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment