Commit 4bad915b authored by 赵杰's avatar 赵杰

核减修复

parent 03fa8113
......@@ -34,7 +34,7 @@ class UserCustomerDataAdaptor:
all_fund_distribution = {}
all_fund_performance = {}
def __init__(self, user_id, customer_id, end_date=str(datetime.datetime.now().date()), index_id="IN0000007M"):
def __init__(self, user_id, customer_id, end_date=str(datetime.datetime.now()), index_id="IN0000007M"):
self.user_id = user_id
self.customer_id = customer_id
self.compare_index_id = index_id
......@@ -42,7 +42,7 @@ class UserCustomerDataAdaptor:
p_end_date = pd.to_datetime(end_date).date()
# p_end_date = datetime.date(year=p_end_date.year, month=p_end_date.month, day=1) - datetime.timedelta(days=1)
self.end_date = pd.to_datetime(str(p_end_date))
# self.end_date = pd.to_datetime("2020-12-25")
# self.end_date = pd.to_datetime("2021-01-29")
p_start_date = datetime.date(year=p_end_date.year, month=p_end_date.month, day=1)
self.month_start_date = p_start_date
# self.month_start_date = pd.to_datetime("2020-12-01")
......@@ -162,7 +162,7 @@ class UserCustomerDataAdaptor:
user_customer_order_df = pd.concat([inter_order_df, outside_order_df])
user_customer_order_df = user_customer_order_df[(user_customer_order_df["order_type"] <= 4)]
user_customer_order_df = user_customer_order_df[((user_customer_order_df["order_type"] == 1)|(user_customer_order_df["order_type"] == 4))&(user_customer_order_df["confirm_share"] > 0)]
user_customer_order_df.index = pd.Series(range(len(user_customer_order_df)))
self.start_date = user_customer_order_df["confirm_share_date"].min()
return user_customer_order_df
......@@ -185,7 +185,7 @@ class UserCustomerDataAdaptor:
break
if _row["order_type"] == 1 and _row["fund_id"] == d_fund_id and _row["nav"] > d_nav:
coefficient = _row["nav"] / d_nav
fund_order.loc[_index, "coefficient"] = coefficient
fund_order.loc[_index, "coefficient"] = coefficient * _row["coefficient"]
fund_order.loc[_index, "divident_share"] = _row["confirm_share"] * coefficient
return fund_order
......@@ -201,13 +201,15 @@ class UserCustomerDataAdaptor:
for index_ori, row_ori in inter_order_df.iterrows():
if index_ori >= index:
break
if row["fund_id"] == row_ori["fund_id"] and row_ori["order_type"] == 1:
ori_share = row_ori["divident_share"]
if need_less_share >= ori_share:
if row["fund_id"] == row_ori["fund_id"] and (row_ori["order_type"] == 1 or row_ori["order_type"] == 4):
ori_share = row_ori["original_confirm_share"]
div_share = row_ori["divident_share"]
if need_less_share >= div_share:
need_less_share -= ori_share
inter_order_df.loc[index_ori, "divident_share"] = 0
else:
inter_order_df.loc[index_ori, "divident_share"] = row_ori["divident_share"] - need_less_share
stay_share = row_ori["divident_share"] - need_less_share
inter_order_df.loc[index_ori, "divident_share"] = stay_share if stay_share > 0.01 else 0
need_less_share = 0
if need_less_share <= 0:
break
......@@ -299,7 +301,7 @@ class UserCustomerDataAdaptor:
elif fund_type == 3:
per_sql = """select distinct `price_date`, `ret_1w`, `ret_1m`, `ret_6m`, `ret_1y`, `ret_ytd`, `ret_incep` from `ifa_imported_fund_count` where `fund_id`='{}' order by `price_date` ASC""".format(
cur_fund_id)
cur = tamp_product_session.execute(per_sql)
cur = tamp_fund_session.execute(per_sql)
else:
cur = tamp_product_session.execute(per_sql)
data = cur.fetchall()
......@@ -456,9 +458,9 @@ class UserCustomerDataAdaptor:
# buy
if row['order_type'] == 1:
cur_fund_share = actual_share_dict.get(cur_fund_id, 0)
cur_fund_share += row["reduce_share"] / row["coefficient"]
cur_fund_share += row["original_confirm_share"] - (row["original_confirm_share"] * row["coefficient"] - row["divident_share"])
nav_df.loc[confirm_share_date:, cur_fund_id + "_amount"] += row["reduce_share"] * row["nav"]
nav_df.loc[confirm_share_date:, cur_fund_id + "_share"] = cur_fund_share
# nav_df.loc[confirm_share_date:, cur_fund_id + "_share"] = cur_fund_share
nav_df.loc[confirm_share_date:, cur_fund_id + "_reduce_share"] += row["reduce_share"]
actual_share_dict[cur_fund_id] = cur_fund_share
......@@ -466,7 +468,7 @@ class UserCustomerDataAdaptor:
cur_fund_share = actual_share_dict.get(cur_fund_id, 0)
# cur_fund_share += row["reduce_share"]
# nav_df.loc[confirm_share_date:, cur_fund_id + "_amount"] = cur_fund_share * row["nav"]
# nav_df.loc[confirm_share_date:, cur_fund_id + "_share"] = cur_fund_share
nav_df.loc[confirm_share_date:, cur_fund_id + "_share"] = cur_fund_share + row["reduce_share"]
nav_df.loc[confirm_share_date:, cur_fund_id + "_reduce_share"] = cur_fund_share + row["reduce_share"]
# actual_share_dict[cur_fund_id] = cur_fund_share
......@@ -476,6 +478,8 @@ class UserCustomerDataAdaptor:
cnav_df[p_fund_id_ + "_cum_earn"] = cnav_df[p_fund_id_ + "_earn"].cumsum().fillna(0)
cnav_df[p_fund_id_ + "_net_amount"] = cnav_df[p_fund_id_ + "_cum_earn"].apply(lambda x: Decimal(x)) + \
cnav_df[p_fund_id_ + "_amount"]
# 内部订单
for p_fund_id_ in p_inter_order_df["fund_id"].unique():
nav_df[p_fund_id_ + "_net_amount"] = (nav_df[p_fund_id_] * nav_df[p_fund_id_ + "_reduce_share"]).apply(
lambda x: float(x)).fillna(0)
......@@ -488,6 +492,7 @@ class UserCustomerDataAdaptor:
nav_df[p_fund_id_ + "_net_amount"] = nav_df[p_fund_id_ + "_net_amount"].apply(lambda x: Decimal(x))
nav_df[p_fund_id_ + "_profit_ratio"] = nav_df[p_fund_id_ + "_profit_ratio"].apply(
lambda x: Decimal(0) if math.isnan(x) else x)
nav_df[p_fund_id_ + "_share"] = nav_df[p_fund_id_ + "_reduce_share"]
finall_cnav_df = cnav_df.copy()
all_nav_df = p_nav_df[p_nav_df.index >= start_date].copy().dropna(axis=0, how="all").fillna(method='ffill')
......@@ -617,7 +622,7 @@ class UserCustomerDataAdaptor:
# buy
if row['order_type'] == 1:
cur_fund_share = actual_share_dict.get(cur_fund_id, 0)
cur_fund_share += row["reduce_share"] / row["coefficient"]
cur_fund_share += row["original_confirm_share"] - (row["original_confirm_share"] * row["coefficient"] - row["divident_share"])
nav_df.loc[confirm_share_date:, cur_fund_id + "_amount"] += row["reduce_share"] * row["nav"]
nav_df.loc[confirm_share_date:, cur_fund_id + "_share"] = cur_fund_share
nav_df.loc[confirm_share_date:, cur_fund_id + "_reduce_share"] += row["reduce_share"]
......@@ -627,7 +632,7 @@ class UserCustomerDataAdaptor:
cur_fund_share = actual_share_dict.get(cur_fund_id, 0)
# cur_fund_share += row["reduce_share"]
# nav_df.loc[confirm_share_date:, cur_fund_id + "_amount"] = cur_fund_share * row["nav"]
# nav_df.loc[confirm_share_date:, cur_fund_id + "_share"] = cur_fund_share
nav_df.loc[confirm_share_date:, cur_fund_id + "_share"] = cur_fund_share + row["reduce_share"]
nav_df.loc[confirm_share_date:, cur_fund_id + "_reduce_share"] = cur_fund_share + row["reduce_share"]
# actual_share_dict[cur_fund_id] = cur_fund_share
......
HF00002JJ2,11月下半月,伴随美国大选形势明朗化,以及美元持续走弱,国内经济持续复苏等综合因素影响,商品和股市都持续走强。商品方面受美元因素影响较大的有色走势更强,股指期货方面由于市场风格转向周期和价值股,IH及IF走势更强。远澜商品组合中趋势,均值回复,量价截面和基本面量化等子策略皆不同程度获利,股指组合中量价和基本面量化择时策略也都小幅盈利。
HF00002G4A,本月市场环境对基金期权套利策略影响不大,整个11月表现为稳定增长。
HF00006693,目前仓位75成,主要配置于食品饮料(7.77%),TMT(17.22%)、医药生物(39.26%)、休闲服务(6.55%)、电力设备(4.68%)。
HF00006BGS,11月份月度涨幅0.59%,成立以来跌幅12.63%。市场轮动加速、 强势股快速回调,导致策略超额不理想。针对近期产品的回撤情况,无量投研团队也在日以继夜的加强研究分析,不断总结经验以求更好适应当下市场。无量逐步排查并解决了一些交易及策略方面存在的问题,并更新了一些低频因子,体现在业绩上还需要一些时间。
HF00006AZF,权益类:仍然持有基本面长期看好的先进制造业类股票,期待迎来戴维斯双击的 上涨,同时用股指期货期权对冲指数风险,整体权益类资产保持净多仓。 商品类:由空翻多,不过会加入跨品种之间的对冲保护。 债券类:大幅减仓,等待企稳后逐步加回仓位。本月收益主要由商品类资产贡献较多。
HF0000681Q,市场从年初以消费、医药、科技板块跑出较长时间的一波行情之后,9月10月经历了市场的调整,11月份之后,A股市场处于震荡上行状态,市场出现各个板块的风格不断快速切换,给阿尔法策略短期带来了较大的压力。另一方面,股指基差收敛,期货价格上升,但对于目前已经持有的股指空头来说,会造成额外的亏损。目前跟踪走势表现正常。
HF00004RHP,本月通过到期兑付、出售获得较多的流动性,并把握住了恐慌波动带来的市场投资良机, 获利了结部分乳业可交换债、全部汽车零部件行业转债;旅游行业公司债完成兑付,文化传媒行业公司债完成部分兑付;加仓建筑环保行业转债、电磁线行业转债,新增铝加工添加剂行业转债;新增环保行业、煤炭行业以及电解铝行业债券。目前仓位94成,重点行业主要分布在地产、电解铝、建筑环保、钢铁、城投。
HF00005DBQ,"本月通过到期兑付、出售获得较多的流动性,并把握住了恐慌波动带来的市场投资良机, 获利了结全部汽车零部件行业转债、服纺行业转债;旅游行业公司债完成兑付,文化传媒行业公司债完成部分
兑付;加仓建筑环保行业转债、电磁线行业转债,新增铝加工添加剂行业转债;新增环保行业、煤炭行业以及电解铝行业债券。目前仓位92成,重点行业主要分布在地产、钢铁、电解铝、煤炭。"
HF00005ZUB,本月通过到期兑付、出售获得较多的流动性,并把握住了恐慌波动带来的市场投资良机, 获利了结全部汽车零部件行业转债、服纺行业转债;旅游行业公司债完成兑付,文化传媒行业公司债完成部分兑付;加仓建筑环保行业转债、电磁线行业转债,新增铝加工添加剂行业转债;新增环保行业、煤炭行业以及电解铝行业债券。目前仓位9成,重点行业主要分布在地产、钢铁、电解铝、建筑环保。
HF00006641,业绩归因方面:本月打新收益0.22%,中高频CTA收益-0.315%,中性策略收益-2.1268%。因11月市场风格转变剧烈,量价因子阶段性失效,导致中性策略发生亏损。近期商品期货中亏损较大的板块是油脂油料和贵金属板块,由于思勰做的是短周期CTA策略,交易级别在小时级,因此更加关注商品期货在日内流畅的波动,然而遇到如开盘跳空或是日内的来回拉锯行情亦或是一些突发事件导致的走势反转,就很难捕捉到交易机会。
HF00006GNQ,目前本策略全部资金进行高频CTA运作,运作正常。
HF00005QOM,本月通过到期兑付、出售获得较多的流动性,并把握住了恐慌波动带来的市场投资良机,获利了结全部汽车零部件行业转债、服纺行业转债;旅游行业公司债完成兑付,文化传媒行业公司债完成部分兑付;加仓建筑环保行业转债、电磁线行业转债,新增铝加工添加剂行业转债;新增环保行业、煤炭行业以及电解铝行业债券。目前仓位9成,重点行业主要分布在地产、煤炭、城投、建筑环保。
HF00006DLK,11月下半月,伴随美国大选形势明朗化,以及美元持续走弱,国内经济持续复苏等综合因素影响,商品和股市都持续走强,很多板块间都有较为明显的趋势,商品中是有色板块,而股市中是周期和价值股。市场环境对于中长周期的象限策略来说偏友好。整个11月的涨幅较可观。
HF00006FEU,11月下半月,伴随美国大选形势明朗化,以及美元持续走弱,国内经济持续复苏等综合因素影响,商品和股市都持续走强,很多板块间都有较为明显的趋势,商品中是有色板块,而股市中则是周期和价值股,大多数量化选股策略由于量价因子失效均出现负超额,黑翼的CTA与指增组合效果表现要优于市场上大多纯指数增强策略。
HF00005AFK,疫情以及美国大选等扰动因素下,跨境价差套利策略较难做,为控制波动风险,适当降低策略配比,加大低风险的股指期现套利。
\ No newline at end of file
HF00002JJ2,远澜银杏1号,"1月份月度涨幅-1.62%,元旦后第一周市场人气旺盛,商品和股市全面上涨,但第二周市场对于货币政策是否持续宽松的预期有所调整,商品和股市都出现较剧烈的震荡回调走势。与之相应,商品趋势策略表现大多先扬后抑,在震荡行情中回吐了部分利润。商品基本面量化策略上半月仍在持续回撤。股指基本面量化策略和股指CTA策略本期表现较优,在上半月的趋势波段中获利。1月份下半月商品市场进入持续高位震荡行情,对于CTA策略较具挑战。远澜商品组合中趋势类策略大多回撤,其中长周期趋势表现优于中短周期趋势。商品基本面量化下半月大致持平,均值回归策略同期小亏。股指下半月也偏震荡,月底或因流动性缩紧等因素三大股指皆走低。远澜股指组合的基本面择时策略继续小幅新高,股指量价CTA基本持平。月份月度涨幅5.46%,12 月份商品大致维持强势上涨走势,其中黑色系因限产限电,季节性因素和中澳冲突等多重因素影响表现强劲。但下半月由于国外疫情加剧引发经济需求方面的担忧,商品市场出现较大的调整和转折走势。远澜商品组合中的趋势类策略表现较为理想,继续新高。商品基本面量化策略头寸方向转变相对较慢,策略表现先扬后抑大致持平。其他商品策略类型也大多盈利。"
HF00002G4A,申毅全天候2号,"1月份月度涨幅0.1234%,1 月份第一周期权波动率上涨迅速,而产品策略里包含备兑,做空波动率的部分,这部分产生了的浮亏。第一周下跌了0.974%,随后快速修复下跌部分,1月共上涨0.1234%。"
HF00006693,丹羿步步高德兴1号,"1月份月度涨幅6.26%, 今年以来收益 6.26%,成立以来收益 31.58%。目前仓位85成左右,主要配置于食品饮料、TMT、医药生物、休闲服务、以及电力设备。"
HF00006BGS,无量量化增强1号,"1月份月度微跌0.67%。1月份A股市场震荡,表现较为分化,1月份创业板月涨幅5.48%,上证指数微涨0.29%,中证500微跌0.33%,市场出现各个板块的风格不断快速切换,权重股表现更好,量化产品由于持股数量多,分化行情不太利于产品业绩,节前效应指增产品表现正常。"
HF00006AZF,半夏宏观对冲E,"1月份月度涨幅0.05%,成立以来收益 12.16%。权益类:仍然持有基本面长期看好的先进制造业类股票,期待迎来戴维斯双击的上涨,整体权益类(包括股指期货)资产维持多头仓位。商品类:总体维持加仓多头仓位,不过会加入跨品种之间的对冲保护。 债券类:大幅减仓,等待企稳后逐步加回国债期货多头仓位。"
HF0000681Q,中邮永安国金新智能六号,"1月份月度微跌1.22%。1月份A股市场处于震荡状态,其中创业板月涨幅5.48%,上证指数微涨0.29%,中证500微跌0.33%,市场出现各个板块的风格不断快速切换,权重股表现更好,量化产品由于持股数量多,造成小市值股票的相对超配,给阿尔法策略短期带来了较大的压力。另一方面,股指基差略微扩张,对中性产品业绩影响不大。"
HF00004RHP,高熵稳盛增强固定收益1号,"1月份月度涨幅0.83%,今年以来收益0.83%,成立以来收益82.96%。目前仓位9成左右,重点行业主要分布在地产、电解铝、建筑环保、煤炭。"
HF00005DBQ,高熵稳盛5号固定收益,"1月份月度涨幅2.84%,今年以来收益2.84%,成立以来收益40.34%。目前仓位9成左右,重点行业主要分布在地产、钢铁、电解铝、煤炭。"
HF00005QOM,高熵稳盛6号固定收益,"1月份月度涨幅1.14%,今年以来收益1.14%,成立以来收益29.74%。目前仓位9成左右,重点行业主要分布在地产、煤炭、城投、铝加工添加剂。"
HF00005ZUB,高熵稳盛7号固定收益,"1月份月度涨幅3.25%,今年以来收益3.25%,成立以来收益28.47%。目前仓位9成左右,重点行业主要分布在地产、钢铁、电解铝、煤炭。"
HF0000523H,高熵稳盛增强固定收益2号,"1月份月度涨幅-0.27%,今年以来收益 -0.27%,成立以来收益 -49.1%。目前仓位9成左右,重点行业主要分布在地产、煤炭、建筑环保、电解铝、电磁线。"
HF00006641,思勰投资子路十一号,"1月份月度涨幅-0.3%,今年以来涨幅-0.3%,成立以来收益-0.96%。"
HF00006GNQ,思勰投资京享一号,"1月份月度涨幅0.59%, 今年以来涨幅0.59%,成立以来收益2.66%。目前本策略全部资金进行高频CTA运作,运作正常。"
HF00006DLK,象限2号A期,"象限2号A期1月的业绩表现为-0.3280% 。1月份商品市场整体行情对CTA策略比较不友好,首先CTA策略普遍仓位较大品种价格出现多次反转,容易出现亏损,比如前期涨得较好的焦炭,本月下跌幅度较大。其次,CTA策略获利机会不多,1月商品市场整体振幅大小和2020年12月相比有着明显的下降,特别是1月中下旬。波动率情况和振幅类似。象限2号A期在第一周快速上涨之后缓慢回落,回吐了全部利润。"
HF00006FEU,黑翼策略精选5号,"1月份月度涨幅1.68%,1月份第一周市场人气旺盛,商品和股市全面上涨,但第二周以后市场对于货币政策是否持续宽松的预期有所调整,商品和股市都出现较剧烈的震荡回调走势。黑翼CTA策略在1月份的表现为先扬后抑,整体微跌,而指增策略表现为震荡上涨,收获正超额。"
HF00005AFK,展弘稳进1号8期,"1月份月度涨幅0.90%,疫情以及美国大选等扰动因素下,跨境价差套利策略较难做,以至于收益较往年略低,但仍然保持着收益的稳定性,且回撤控制上仍保持高水准。"
HF000013VX,循远成长一号,"1月月度涨幅-0.18%。今年以来收益-0.18%,成立以来188.45%。目前仓位85成,主要配置在大消费、新能源、TMT、医药医疗,建材家居等行业板块。"
\ No newline at end of file
......@@ -31,7 +31,7 @@ def get_tamp_nav(fund, start_date, rollback=False, invest_type=2):
# # df = pd.read_sql(sql, con).dropna(how='any')
# cur = tamp_product_session.execute(sql)
if invest_type == 0:
sql = """select distinct `id`, `end_date`, `accum_nav` from `public_fund_nav` where `id`='{}' order by `end_date` ASC""".format(
sql = """select distinct `fund_id`, `price_date`, `cumulative_nav` from `tx_fund_nav` where `fund_id`='{}' and `delete_tag`=0 order by `price_date` ASC""".format(
fund)
cur = tamp_fund_session.execute(sql)
elif invest_type == 1:
......@@ -42,6 +42,10 @@ def get_tamp_nav(fund, start_date, rollback=False, invest_type=2):
sql = """select distinct `fund_id`,`price_date`,`cumulative_nav` from `fund_nav` where `fund_id`='{}' order by `price_date` ASC""".format(
fund)
cur = tamp_product_session.execute(sql)
elif invest_type == 3:
sql = """select distinct `fund_id`,`price_date`,`cumulative_nav` from `ifa_imported_fund_nav` where `fund_id`='{}' order by `price_date` ASC""".format(
fund)
cur = tamp_fund_session.execute(sql)
data = cur.fetchall()
df = pd.DataFrame(data, columns=['fund_id', 'price_date', 'cumulative_nav']).dropna(how='any')
......
......@@ -670,6 +670,44 @@ class PortfolioDiagnose(object):
return prod
def proposal_customize(self, suggest_fund_list, suggest_fund_weight_list, suggest_fund_type):
"""建议申购基金
Args:
prod: 剔除建议替换基金的组合净值表
Returns: 增加建议申购基金的组合净值表
"""
trade_date_df = get_trade_cal()
prod = pd.DataFrame(index=trade_date_df["end_date"])
self.new_weights = suggest_fund_weight_list
for j in range(len(suggest_fund_list)):
proposal = suggest_fund_list[j]
# 获取净值
proposal_nav = get_tamp_nav(proposal, pd.to_datetime("2010-01-01"), invest_type=suggest_fund_type[proposal])
self.freq_list.append(get_frequency(proposal_nav))
proposal_nav = rename_col(proposal_nav, proposal)
prod[proposal] = proposal_nav[proposal]
self.proposal_fund.append(proposal)
prod.ffill(inplace=True)
prod = prod[prod.index >= self.start_date]
prod = resample(prod, get_trade_cal(), min(self.freq_list))
self.new_correlation = cal_correlation(prod)
prod.dropna(how='all', inplace=True)
prod.fillna(method="bfill", inplace=True)
self.new_correlation = self.new_correlation.fillna(1).round(2)
self.new_correlation.columns = self.new_correlation.columns.map(lambda x: get_fund_name(x, suggest_fund_type[x]).values[0][0])
self.new_correlation.index = self.new_correlation.index.map(lambda x: get_fund_name(x, suggest_fund_type[x]).values[0][0])
prod = pd.DataFrame(prod, dtype=np.float)
self.propose_portfolio = prod
self.prod = prod
return prod
def optimize(self, ):
import time
start = time.time()
......@@ -778,25 +816,6 @@ class PortfolioDiagnose(object):
end4 = time.time()
print("模型计算一次时间:", end4 - start)
# S = np.asmatrix(S)
# w_origin = np.asarray([i for i in w_origin.values()])
# risk_target = np.asarray([1 / len(w_origin)] * len(w_origin))
# self.proposal_weights = calcu_w(w_origin, S, risk_target)
# elif self.client_type == 2:
# elif self.client_type == 3:
# elif self.client_type == 4:
# elif self.client_type == 5:
# print(len(propose_portfolio.columns))
# # 单支基金占投资额的下界为 100W/投资总额
# # w_low = 1e6 / self.invest_amount
# w_low = 0
# w_origin, S, mu = optim_drawdown(propose_portfolio, 0.5, [w_low, 1], min(self.freq_list))
# print(w_origin)
# S = np.asmatrix(S)
# w_origin = np.asarray([i for i in w_origin.values()])
# risk_target = np.asarray([1 / len(w_origin)] * len(w_origin))
# self.proposal_weights = calcu_w(w_origin, S, risk_target)
def return_compare(self):
index_data = get_index_daily(self.index_id, self.start_date)
......@@ -1291,26 +1310,26 @@ class PortfolioDiagnose(object):
evaluation = choose_bad_evaluation(data)
ret = []
fund_name = get_fund_name(fund_id).values[0][0]
fund_name = get_fund_name(fund_id, fund_id_type).values[0][0]
# 默认评价
# try:
# default_evaluation = pd.read_csv("./app/service/evaluation.csv", encoding='utf-8', names=['fund_id', 'eval'])
# if default_evaluation[default_evaluation['fund_id'] == fund_id]['eval'].values[0]:
# ret.append('1、' + default_evaluation[default_evaluation['fund_id'] == fund_id]['eval'].values[0])
#
# evaluation_dict = {'name': fund_name, 'data': ret}
#
# if objective:
# if fund_id in self.abandon_fund_score + self.abandon_fund_corr:
# evaluation_dict['status'] = "换仓"
# elif fund_id in self.portfolio:
# evaluation_dict['status'] = "保留"
# else:
# evaluation_dict['status'] = ""
# return evaluation_dict
# except Exception as e:
# pass
try:
default_evaluation = pd.read_csv("./app/service/evaluation.csv", encoding='utf-8', names=['fund_id', 'fund_name', 'eval'])
if default_evaluation[default_evaluation['fund_id'] == fund_id]['eval'].values[0]:
ret.append('1、' + default_evaluation[default_evaluation['fund_id'] == fund_id]['eval'].values[0])
evaluation_dict = {'name': fund_name, 'data': ret}
if objective:
if fund_id in self.abandon_fund_score + self.abandon_fund_corr:
evaluation_dict['status'] = "换仓"
elif fund_id in self.portfolio:
evaluation_dict['status'] = "保留"
else:
evaluation_dict['status'] = ""
return evaluation_dict
except Exception as e:
pass
i = 1
for k, v in evaluation.items():
......
......@@ -174,7 +174,8 @@ class UserCustomerResultAdaptor(UserCustomerDataAdaptor):
# folio_report_data["contribution_decomposition"] = month_earn
# 组合内单个基金净值数据 组合内基金持仓数据
result_fund_nav_info, result_fund_hoding_info = self.group_fund_basic_info_data(cur_folio_order_data, cur_folio_result_cnav_data, cumulative_profit, total_cost)
result_fund_nav_info, result_fund_hoding_info, weight_res = self.group_fund_basic_info_data(cur_folio_order_data, cur_folio_result_cnav_data, cumulative_profit, total_cost)
folio_report_data["weight_result"] = weight_res
# 拼接组合以及综合结果数据
folio_report_data["group_nav_info"] = result_fund_nav_info
......@@ -388,6 +389,7 @@ class UserCustomerResultAdaptor(UserCustomerDataAdaptor):
n_freq = freq_days(int(freq_max))
resample_df = resample(p_result_cnav_data, self.trade_cal_date, freq_max)
# for index, row in p_order_df.iterrows():
fund_weight = {}
for hold_fund_id in p_order_df["fund_id"].unique():
order = p_order_df[(p_order_df["fund_id"] == hold_fund_id)]
row = order[order["order_type"] == 1].iloc[0]
......@@ -457,11 +459,11 @@ class UserCustomerResultAdaptor(UserCustomerDataAdaptor):
# fund_hoding_info["market_values"] = round((float(row["confirm_share"]) * (fund_basic_info["cur_cnav"] - confirm_cnav) + float(row["confirm_amount"]))/10000, 2)
temp_market_values = float(p_result_cnav_data[cur_fund_id + "_net_amount"].values[-1])
fund_hoding_info["market_values"] = round(temp_market_values / 10000.0, 2)
fund_hoding_info["weight"] = "%.2f" % round(
float(fund_hoding_info["market_values"]) / total_market_values * 10000.0 * 100, 2) # 月末占比
temp_weight = float(fund_hoding_info["market_values"]) / total_market_values * 10000.0
fund_hoding_info["weight"] = "%.2f" % round(temp_weight * 100, 2) # 月末占比
temp_cost = float(p_result_cnav_data[cur_fund_id + "_amount"].values[-1])
fund_hoding_info["cost"] = "%.2f" % round( temp_cost / 10000, 2) # 投资本金
fund_hoding_info["cost"] = "%.2f" % round(temp_cost / 10000, 2) # 投资本金
fund_weight[cur_fund_id] = round(temp_weight, 4)
# 当月收益
if row['confirm_share_date'] > self.month_start_date:
cal_month_start_date = row['confirm_share_date']
......@@ -494,7 +496,7 @@ class UserCustomerResultAdaptor(UserCustomerDataAdaptor):
return_ratio_year = annual_return(float(cum_profit_ratio_temp), cur_resample_df, n_freq)
fund_hoding_info["return_ratio_year"] = "%.2f" % round(float(return_ratio_year)*100, 2)
group_fund_hoding_info.append(fund_hoding_info)
return group_fund_basic_info, group_fund_hoding_info
return group_fund_basic_info, group_fund_hoding_info, fund_weight
@staticmethod
def combination_yield(p_combina_df, fund_id_list):
......
......@@ -9,7 +9,7 @@ import datetime
import time
import traceback
import uuid
import pandas as pd
from app.api.engine import logging, pdf_folder, tamp_diagnose_app_engine, config, env
import os
......@@ -65,6 +65,7 @@ def call_month_report(ifauser_id, ifauser_name, customer_id, customer_name):
start = time.time()
try:
dt = DataIntegrate(ifauser_id, customer_id, '{}_{}_.pdf'.format(ifauser_name, customer_name))
data = dt.get_template_data()
dt.render_data()
print('IFA: {}, 客户: {}的基金月报已经生成, 耗时{}秒'.format(ifauser_name, customer_name, time.time()-start))
except OSError:
......@@ -80,8 +81,19 @@ if __name__ == '__main__':
data = []
with TAMP_SQL(tamp_user_engine) as tamp_user, TAMP_SQL(tamp_order_engine) as tamp_order:
tamp_user_session = tamp_user.session
res = tamp_user_session.execute('SELECT DISTINCT user_id, customer_id , realname, customer_name FROM (select f1.user_id, f1.customer_id, f2.realname,f3.customer_name from tamp_order.customer_order_view f1, tamp_user.user_info f2,tamp_user.customer_info f3 where f2.id=f1.user_id and f3.id=f1.customer_id) res;')
res = tamp_user_session.execute('SELECT DISTINCT user_id, customer_id , realname, customer_name FROM (select f1.user_id, f1.customer_id, f2.realname,f3.customer_name from tamp_order.customer_order_view f1, tamp_user.user_info f2,tamp_order.customer f3 where f2.id=f1.user_id and f3.id=f1.customer_id) res;')
data = res.fetchall()
with TAMP_SQL(tamp_user_engine) as tamp_user:
tamp_user_session = tamp_user.session
sql = "SELECT `id` from user_info where team_id=0;"
cur = tamp_user_session.execute(sql)
ifa = cur.fetchall()
inner_ifa_df = pd.DataFrame(list(ifa), columns=["ifa_id"])
inner_ifa_list = list(inner_ifa_df["ifa_id"].values)
for d in data:
if d[0] in inner_ifa_list:
print(d)
continue
call_month_report(d[0], d[2], d[1], d[3])
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment