Spaces:
Configuration error
Configuration error
# -*- coding: utf-8 -*- | |
""" | |
Created on Tue Apr 26 17:38:54 2022 | |
@author: bullm | |
""" | |
import streamlit as st | |
from modules import tables | |
import pandas as pd | |
from streamlit_echarts import st_echarts | |
from Data.credentials import credentials_s3 as creds3 | |
import boto3 | |
import io | |
import pybase64 as base64 | |
import matplotlib.pyplot as plt | |
def get_asset_field(id_quant, start, field='IQ_CLOSEPRICE_ADJ', expand=True, | |
rename=['asset']): | |
asset_obj = tables.EquityMaster(asset=id_quant, field=field) | |
asset_df = asset_obj.query(rename=rename, start=start, expand=expand) | |
return pd.DataFrame(asset_df) | |
def get_macro_field(country, start, instrument="INDEX", expand=True, | |
rename=['country']): | |
asset_obj = tables.MacroMaster(country=country, instrument=instrument) | |
asset_df = asset_obj.query(rename=rename, start=start, expand=expand) | |
return pd.DataFrame(asset_df) | |
def plot_returns(id_quant, country, start): | |
asset_df = get_asset_field(id_quant, start) | |
index_df = get_macro_field(country, start) | |
asset_df = asset_df.merge(index_df, how='left', | |
left_index=True, | |
right_index=True) | |
x = asset_df.index | |
y2 = asset_df[id_quant]/asset_df.iloc[0][id_quant] - 1 | |
y1= (1 + asset_df[country]).cumprod() - 1 | |
plt.figure(figsize=(10, 5)) | |
plt.rcParams['axes.facecolor'] = '#EAEAEA' | |
plt.rcParams['figure.facecolor'] = '#EAEAEA' | |
plt.fill_between(x, y1, y2, where=y2 >y1, facecolor='green', alpha=0.5) | |
plt.fill_between(x, y1, y2, where=y2 <=y1, facecolor='red', alpha=0.5) | |
plt.xticks(rotation=60) | |
plt.title('Asset vs Benchmark') | |
st.pyplot(plt, height='300') | |
def get_ebitda(id_quant): | |
ebitda_df = get_asset_field(id_quant, '2021-01-01', field='IQ_EBITDA', expand=True, | |
rename=['asset']) | |
ebitda_actual = round(ebitda_df.iloc[-1][id_quant], 2) | |
ebitda_anterior = round(ebitda_df.iloc[-2][id_quant], 2) | |
delta = round(ebitda_actual - ebitda_anterior,2) | |
st.metric("Ebitda " + ebitda_df.index[-1].strftime("%Y-%m-%d"), ebitda_actual, delta) | |
def get_asset_field(id_quant, field, start, expand=False, rename=['asset', 'field']): | |
asset_obj = tables.EquityMaster(asset=id_quant, field=field) | |
asset_df = asset_obj.query(rename=rename, start=start, expand=expand) | |
return pd.DataFrame(asset_df) | |
def get_macro_field(country, instrument, start, expand=True, rename=['country']): | |
asset_obj = tables.MacroMaster(country=country, instrument=instrument) | |
asset_df = asset_obj.query(rename=rename, start=start, expand=expand) | |
return pd.DataFrame(asset_df) | |
def get_dict_companies(): | |
company_base_df = pd.read_excel("Data/Company_Base_Definitivo.xlsx", | |
sheet_name='Compilado') | |
company_id_dict = dict(zip(company_base_df["Ticker"], company_base_df["ID_Quant"])) | |
return company_id_dict | |
# asset = data_daily[field][id_quant] | |
def read_itub(): | |
itub_df = pd.read_csv('C:/Users/bullm/Desktop/ITUB.csv') | |
itub_df.index = pd.to_datetime(itub_df["Date"]) | |
itub_cs_s = itub_df["Adj Close"] | |
st.line_chart(itub_cs_s) | |
def company_info(): | |
st.set_page_config(layout="wide", page_title="Portal LVAM", | |
page_icon="img/icono.png") | |
st.sidebar.write("Companies") | |
company_base_df = pd.read_excel("Data/Company_Base_Definitivo.xlsx", | |
sheet_name='Compilado') | |
col1, col2 = st.columns((1, 1.681)) | |
companies_id_dict = get_dict_companies() | |
tickers = col2.multiselect("Seleccionasr Empresa", | |
company_base_df["Ticker"], | |
["ITUB4"]) | |
country = col2.multiselect("Seleccionasr Empresa", | |
company_base_df["Portfolio_Country"].unique(), | |
["Brazil"]) | |
id_quants= [str(companies_id_dict[ticker]) for ticker in tickers] | |
fields_ls= ["IQ_CLOSEPRICE_ADJ", "IQ_MARKETCAP"] | |
field = col1.selectbox("Selecione un campo", fields_ls) | |
start = '2020-01-01' | |
df = get_asset_field(id_quants, field, start, rename=['asset']) | |
df = df.ffill(axis=0) | |
tickers = list(tickers) | |
company_id_dict = dict(zip(company_base_df["Ticker"], company_base_df["ID_Quant"])) | |
id_company_dict = dict(zip(company_base_df["ID_Quant"], company_base_df["Ticker"])) | |
df.columns = [id_company_dict[int(col)] for col in df.columns] | |
st.title('Cierre Ajustado Mongo Quant') | |
col1, col2, col3 = st.columns(3) | |
mm2 = col2.checkbox("Indice Pais") | |
mm3 = col3.checkbox("Indice Sector") | |
if len(tickers) == 1: | |
mm = col1.checkbox("Medias moviles") | |
rollings = [20,60,240] | |
dicc_mm = { | |
tickers[0] + f' {x}':df[tickers[0]].rolling(x).mean() for x in rollings | |
} | |
df2 =pd.concat(dicc_mm.values(), keys=dicc_mm.keys(), axis=1) | |
df = pd.concat([df, df2], axis=1) | |
if mm2: | |
mc_df = (1+get_macro_field(country, "INDEX", start)).cumprod() | |
df = pd.concat([df, mc_df], axis=1).ffill(axis=0) | |
df = df.iloc[len(df) - 252: ] | |
else: | |
df = df.iloc[len(df) - 252: ] | |
if not mm and not mm2: | |
st.write(df) | |
st.line_chart(df[df.columns[0]]) | |
elif not mm and mm2: | |
df = df[[df.columns[0],df.columns[-1]]]/df.iloc[0][[df.columns[0],df.columns[-1]]] | |
st.write(df) | |
st.line_chart(df) | |
else: | |
st.write(df) | |
st.line_chart(df) | |
if len(tickers) > 1: | |
if mm2: | |
mc_df = (1+get_macro_field(country, "INDEX", start)).cumprod() | |
df = pd.concat([df, mc_df], axis=1).ffill(axis=0) | |
if mm3: | |
mc_df = (1+get_macro_field(country, "Banks_INDEX", start)).cumprod() | |
df = pd.concat([df, mc_df], axis=1).ffill(axis=0) | |
df = df.iloc[len(df)-252:] | |
# st.write(df.iloc[0]) | |
# st.write(df.iloc[-1]) | |
st.line_chart(df/df.iloc[0]) #/df.iloc[0]-1) | |
import json | |
def save_index(list_assets, titulo): | |
with open('Data/index.json', 'r') as json_file: | |
json_object = json.load(json_file) | |
json_object[titulo] = list_assets | |
with open('Data/index.json', 'w') as outfile: | |
json.dump(json_object, outfile) | |
outfile.close() | |
def read_scoring(): | |
key = creds3["S3_KEY_ID"] | |
secret_key = creds3["S3_SECRET_KEY"] | |
bucket = creds3["S3_BUCKET"] | |
path ="scoring.xlsx" | |
scoring = read_excel_s3(key, secret_key, bucket, path) | |
return scoring | |
def read_excel_s3(key, secret_key, bucket, path): | |
s3_client = boto3.client('s3', aws_access_key_id = key, aws_secret_access_key= secret_key) | |
response = s3_client.get_object(Bucket=bucket, Key=path) | |
data = response["Body"].read() | |
df = pd.read_excel(io.BytesIO(data), engine='openpyxl') | |
return df | |
def get_table_excel_link(df, name): | |
towrite = io.BytesIO() | |
writer = pd.ExcelWriter(towrite, engine='xlsxwriter') | |
downloaded_file = df.to_excel(writer, encoding='utf-8', index=True, | |
header=True) | |
workbook = writer.book | |
worksheet = writer.sheets["Sheet1"] | |
#set the column width as per your requirement | |
worksheet.set_column('A:BZ', 18) | |
writer.save() | |
towrite.seek(0) # reset pointer | |
file_name = name+'.xlsx' | |
style = 'style="color:black;text-decoration: none; font-size:18px;" ' | |
name_mark = name | |
b64 = base64.b64encode(towrite.read()).decode() # some strings | |
linko = f'<center><a href="data:application/vnd.openxmlformats-officedocument.spreadsheetml.sheet;base64,{b64}" '+style+'download="'+file_name+'"><button>'+name_mark+'</button></a></center>' | |
return linko | |
def index_constructor(): | |
try: | |
company_base_df = pd.read_excel("Data/Company_Base_Definitivo.xlsx", | |
sheet_name='Compilado') | |
scoring = read_scoring()[["Ticker", "Large/Small", "Market_Cap", "ADTV"]] | |
company_base_df = company_base_df.merge(scoring, how='left', on='Ticker') | |
col1, col2, col3, col4 = st.columns(4) | |
country = col1.selectbox("Country",["All", "Chile", "Brazil", "Mexico", "Peru", "Colombia"]) | |
large_small = col2.selectbox("Large/Small", ["All", "Large", "Small"]) | |
start = col3.text_input('Date', '2022-01') | |
field1 = col4.selectbox("Field", ['IQ_CLOSEPRICE_ADJ', 'IQ_PBV']) | |
if col1.checkbox("Filtro por Mkt Cap"): | |
mkt_cap = col2.number_input("Mkt Cap Min", value=1000) | |
company_base_df = company_base_df[company_base_df["Market_Cap"]>mkt_cap] | |
if col3.checkbox("Filtro por ADTV"): | |
adtv = col4.number_input("ADTV Min", value=1) | |
company_base_df = company_base_df[company_base_df["ADTV"]>adtv] | |
if country != "All": | |
company_base_df = company_base_df[company_base_df["Portfolio_Country"]==country] | |
if large_small != "All": | |
company_base_df = company_base_df[company_base_df["Large/Small"]==large_small] | |
if st.checkbox("Seleccionar todos"): | |
tickers = st.multiselect("Seleccionar Empresa", | |
company_base_df["Ticker"], | |
company_base_df["Ticker"]) | |
else: | |
tickers = st.multiselect("Seleccionasr Empresa2", | |
company_base_df["Ticker"],) | |
if len(tickers)> 0: | |
titulo = col1.text_input("Titulo") | |
save_index = col2.button("Save Index") | |
if save_index: | |
save_index(tickers, titulo) | |
companies_id_dict = dict(zip(company_base_df["Ticker"], | |
company_base_df["ID_Quant"])) | |
id_company_dict = dict(zip(company_base_df["ID_Quant"], | |
company_base_df["Ticker"])) | |
id_quants = [str(companies_id_dict[ticker]) for ticker in tickers] | |
field = get_asset_field(id_quants, | |
field1, | |
start, | |
expand=False, | |
rename=['asset']) | |
ccy = tables.MacroMaster(instrument='FX_USD', | |
currency='CLP').query(start=start) | |
if field1 == 'IQ_CLOSEPRICE_ADJ': | |
rets = field.pct_change() # field.mul(ccy, axis=0).pct_change() | |
else: | |
rets = field.ffill(0) | |
mkt_cap = get_asset_field(id_quants, | |
'IQ_MARKETCAP', | |
start, | |
expand=False, | |
rename=['asset']).ffill(0) | |
weights = mkt_cap.div(mkt_cap.sum(axis=1), axis=0).shift(1) | |
if field1 == 'IQ_CLOSEPRICE_ADJ': | |
st.line_chart((1 +(rets * weights).sum(axis=1)).cumprod()-1) | |
bm = (1 +(rets * weights).sum(axis=1)).cumprod()-1 | |
else: | |
st.line_chart((rets * weights).sum(axis=1)) | |
bm =(rets * weights).sum(axis=1) | |
company_id_dict = dict(zip(company_base_df["Ticker"], | |
company_base_df["ID_Quant"])) | |
id_company_dict = dict(zip(company_base_df["ID_Quant"], | |
company_base_df["Ticker CIQ"])) | |
weights.columns = [id_company_dict[int(col)] for col in weights.columns] | |
rets.columns = [id_company_dict[int(col)] for col in rets.columns] | |
index = (1+get_macro_field('Chile', "INDEX", start)).cumprod() | |
col1, col2, col3, col4 = st.columns(4) | |
col1.markdown(get_table_excel_link(index, "Index"), | |
unsafe_allow_html=True) | |
col2.markdown(get_table_excel_link(weights, "Weights"), | |
unsafe_allow_html=True) | |
col3.markdown(get_table_excel_link(rets, "Retornos"), | |
unsafe_allow_html=True) | |
col4.markdown(get_table_excel_link(bm, "bm"), unsafe_allow_html=True) | |
except Exception as exc: | |
st.write(exc) | |
def pca(rets): | |
from sklearn.decomposition import PCA | |
import numpy as np | |
st.header('PCA') | |
pca = PCA(n_components=10) | |
rets_arr = np.array(rets.fillna(0)) | |
rets_df = pd.DataFrame(rets_arr, columns = rets.columns, index= rets.index) | |
st.subheader('Retornos') | |
st.write(rets_df) | |
retorno_factores_arr = pca.fit_transform(rets_arr) | |
weights = pd.DataFrame(pca.components_, columns = rets.columns) | |
st.subheader('Weights') | |
st.write(weights) | |
ret_factor_fin = pd.DataFrame(retorno_factores_arr, index= rets.index) | |
st.subheader('Retornos Factores') | |
st.write(ret_factor_fin) | |
col1, col2 = st.columns(2) | |
st.write(pca.explained_variance_ratio_) | |
st.write(pca.explained_variance_ratio_.cumsum()) | |
col1.markdown(get_table_excel_link(weights, "Weights"), | |
unsafe_allow_html=True) | |
col2.markdown(get_table_excel_link(ret_factor_fin, "Retornos PCA"), | |
unsafe_allow_html=True) |