Spaces:
Sleeping
Sleeping
File size: 14,437 Bytes
bbe788d 897ea2d bbe788d 9fb5d19 2f2b0d5 60fd325 5d1bc6b 2506d4a 9602bfe 88484cb 0aa34d7 88484cb 5d1bc6b e7d9cc7 5d1bc6b 59fd337 e84a44c 59fd337 6eeacf7 59fd337 ea90ced eb69c0e a580030 0bc1a23 a580030 0bc1a23 a580030 0697975 f2e7a61 9e1bee4 4324c06 cbc8d85 0697975 994bcfa 0697975 f573577 bbe788d 63a0d92 bbe788d a580030 68fd81d 51799e9 efd3036 51799e9 efd3036 37d9b55 a580030 bbe788d 81d3eef 8c758a4 81d3eef 5cdc637 02c8225 f573577 9e1bee4 5cdc637 de141c7 4d7a303 de141c7 f573577 02c8225 5cdc637 703e3e3 5cdc637 9e1bee4 5cdc637 fac9697 af3bf18 4d7a303 af3bf18 703e3e3 5cdc637 703e3e3 5cdc637 9e1bee4 5cdc637 fac9697 af3bf18 4d7a303 a998bd1 99d8e2d 5cdc637 703e3e3 b5290a2 ab64c26 f573577 2f0dc3c 192d701 2f0dc3c f573577 68fd81d db72775 02c8225 6c40417 9fb5d19 6ee2207 9fb5d19 ba96f19 9fb5d19 6262455 19af976 bbe788d 1a07133 8b69c69 1a07133 8b69c69 ab64c26 0aa34d7 1a07133 6c40417 fd31ef9 efca6f0 38a9e29 9c4dcb3 38a9e29 87c4391 897ea2d efb0d40 c27a8df 0aa34d7 4d7a303 21b7a59 0aa34d7 6c40417 68c0e55 53c3483 e7d9cc7 53c3483 045fa06 7673c97 59fd337 7673c97 8c758a4 d9189c2 2c61227 02052b7 8ed4374 02052b7 74f85af 02052b7 8ed4374 02052b7 34391be 02052b7 69d0d2d 02052b7 0bc1a23 02052b7 f96846f 0bc1a23 f96846f 9e35f08 69d0d2d 02052b7 c903a84 bc7b20c 4eb55de d9189c2 7673c97 |
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 65 66 67 68 69 70 71 72 73 74 75 76 77 78 79 80 81 82 83 84 85 86 87 88 89 90 91 92 93 94 95 96 97 98 99 100 101 102 103 104 105 106 107 108 109 110 111 112 113 114 115 116 117 118 119 120 121 122 123 124 125 126 127 128 129 130 131 132 133 134 135 136 137 138 139 140 141 142 143 144 145 146 147 148 149 150 151 152 153 154 155 156 157 158 159 160 161 162 163 164 165 166 167 168 169 170 171 172 173 174 175 176 177 178 179 180 181 182 183 184 185 186 187 188 189 190 191 192 193 194 195 196 197 198 199 200 201 202 203 204 205 206 207 208 209 210 211 212 213 214 215 216 217 218 219 220 221 222 223 224 225 226 227 228 229 230 231 232 233 234 235 236 237 238 239 240 241 242 243 244 245 246 247 248 249 250 251 252 253 254 255 256 257 258 259 260 261 262 263 264 265 266 267 268 269 270 271 272 273 274 275 276 277 278 279 280 281 282 283 284 285 286 287 288 289 290 291 292 293 294 295 296 297 298 299 300 301 302 303 304 305 306 307 308 309 310 311 312 313 314 315 316 317 318 319 320 321 322 323 324 325 326 327 328 329 330 331 332 333 334 335 336 337 338 339 340 341 342 343 344 345 346 347 348 349 350 351 352 353 354 355 356 357 |
import streamlit as st
import pandas as pd
import numpy as np
from sklearn.neighbors import KNeighborsRegressor
from geopy.distance import geodesic
import googlemaps
from geopy.exc import GeocoderTimedOut
from streamlit_folium import st_folium
import folium
from branca.colormap import LinearColormap
import base64
from io import BytesIO
import sys
import pydeck as pdk
# Print the Python version
print("Python version")
print(sys.version)
print("Version info.")
print(sys.version_info)
# Function to add heatmap layer to folium map
def add_heatmap_layer(map_obj, data, column_name, colormap_name, radius=15):
heat_data = data[['latitude', 'longitude', column_name]].dropna()
heat_layer = folium.FeatureGroup(name=f'Variável - {column_name}')
cmap = LinearColormap(colors=['blue', 'white', 'red'], vmin=heat_data[column_name].min(), vmax=heat_data[column_name].max())
for index, row in heat_data.iterrows():
folium.CircleMarker(
location=[row['latitude'], row['longitude']],
radius=radius,
fill=True,
fill_color=cmap(row[column_name]),
fill_opacity=0.7,
color='black',
weight=0.5,
popup=f"{column_name}: {row[column_name]:.2f}" # Fix here
).add_to(heat_layer)
heat_layer.add_to(map_obj)
# Function to calculate distance in meters between two coordinates
def calculate_distance(lat1, lon1, lat2, lon2):
coords_1 = (lat1, lon1)
coords_2 = (lat2, lon2)
return geodesic(coords_1, coords_2).meters
def knn_predict(df, target_column, features_columns, k=5):
# Separate features and target variable
X = df[features_columns]
y = df[target_column]
# Check if there is enough data for prediction
if len(X) < k:
return np.zeros(len(X)) # Return an array of zeros if there isn't enough data
# Create KNN regressor
knn = KNeighborsRegressor(n_neighbors=k)
# Fit the model
knn.fit(X, y)
# Use the model to predict target_column for the filtered_data
predictions = knn.predict(df[features_columns])
return predictions
# Set wide mode
st.set_page_config(layout="wide")
# Set dark theme
st.markdown(
"""
<style>
@font-face {font-family: 'Quicksand';
src: url('font/Quicksand-VariableFont_wght.ttf') format('truetype');
}
body {
color: white;
background-color: #1e1e1e;
font-family: 'Quicksand', sans-serif;
}
.st-df-header, .st-df-body, .st-df-caption {
color: #f8f9fa; /* Bootstrap table header text color */
}
.st-eb {
background-color: #343a40; /* Streamlit exception box background color */
}
</style>
""",
unsafe_allow_html=True
)
# Create a DataFrame with sample data
data = pd.read_excel('data_nexus.xlsx')
# Initialize variables to avoid NameError
radius_visible = True
custom_address_initial = 'Centro, Lajeado - RS, Brazil' # Initial custom address
#custom_lat = data['latitude'].median()
custom_lat = -29.45880114339262
#custom_lon = data['longitude'].median()
custom_lon = -51.97011580843118
radius_in_meters = 150000
filtered_data = data # Initialize with the entire dataset
# Calculate a zoom level based on the maximum distance
zoom_level = 13
# Set font to 'Quicksand' for title_html
title_html = """
<style>
@font-face {font-family: 'Quicksand';
src: url('font/Quicksand-VariableFont_wght.ttf') format('truetype');
}
body {{
font-family: 'Quicksand', sans-serif;
}}
</style>
<span style='color: gray; font-size: 50px;'>aval</span>
<span style='color: #edb600; font-size: 50px;'>ia</span>
<span style='color: gray; font-size: 50px;'>.NEXUS</span>
"""
# Set font to 'Quicksand' for factor_html
factor_html = """
<style>
@font-face {font-family: 'Quicksand';
src: url('font/Quicksand-VariableFont_wght.ttf') format('truetype');
}
body {{
font-family: 'Quicksand', sans-serif;
}}
</style>
<a href='https://huggingface.co/spaces/DavidSB/avaliaFACTOR' target='_blank' style='text-decoration: none; color: inherit;'>
<span style='color: gray; font-size: 20px;'>aval</span>
<span style='color: #edb600; font-size: 20px;'>ia</span>
<span style='color: gray; font-size: 20px;'>.FACTOR</span>
"""
# Set font to 'Quicksand' for evo_html
evo_html = """
<style>
@font-face {font-family: 'Quicksand';
src: url('font/Quicksand-VariableFont_wght.ttf') format('truetype');
}
body {{
font-family: 'Quicksand', sans-serif;
}}
</style>
<a href='https://huggingface.co/spaces/DavidSB/avalia.EVO' target='_blank' style='text-decoration: none; color: inherit;'>
<span style='color: gray; font-size: 20px;'>aval</span>
<span style='color: #edb600; font-size: 20px;'>ia</span>
<span style='color: gray; font-size: 20px;'>.EVO</span>
"""
# Create a sidebar for controls
with st.sidebar:
st.markdown(title_html, unsafe_allow_html=True)
# Add a dropdown for filtering "Fonte"
selected_fonte = st.selectbox('Finalidade', data['Fonte'].unique(), index=data['Fonte'].unique().tolist().index('Venda'))
data = data[data['Fonte'] == selected_fonte]
# Add a dropdown for filtering "Tipo"
selected_tipo = st.selectbox('Tipo de imóvel', data['Tipo'].unique(), index=data['Tipo'].unique().tolist().index('Apartamento'))
data_tipo = data[data['Tipo'] == selected_tipo]
custom_address = st.text_input('Informe o endereço', custom_address_initial)
radius_visible = True # Show radius slider for custom coordinates
gmaps = googlemaps.Client(key='AIzaSyDoJ6C7NE2CHqFcaHTnhreOfgJeTk4uSH0') # Replace with your API key
try:
# Ensure custom_address ends with " - RS, Brazil"
custom_address = custom_address.strip() # Remove leading/trailing whitespaces
if not custom_address.endswith(" - RS, Brazil"):
custom_address += " - RS, Brazil"
location = gmaps.geocode(custom_address)[0]['geometry']['location']
custom_lat, custom_lon = location['lat'], location['lng']
except (IndexError, GeocoderTimedOut):
st.error("Erro: Não foi possível geocodificar o endereço fornecido. Por favor, verifique e tente novamente.")
# Conditionally render the radius slider
if radius_visible:
radius_in_meters = st.number_input('Selecione raio (em metros)', min_value=0, max_value=100000, value=2000)
# Add sliders to filter data based
#atotal_range = st.slider('Área Total', float(data_tipo['Atotal'].min()), float(data_tipo['Atotal'].max()), (float(data_tipo['Atotal'].min()), float(data_tipo['Atotal'].max())), step=.1 if data_tipo['Atotal'].min() != data_tipo['Atotal'].max() else 0.1)
#apriv_range = st.slider('Área Privativa', float(data_tipo['Apriv'].min()), float(data_tipo['Apriv'].max()), (float(data_tipo['Apriv'].min()), float(data_tipo['Apriv'].max())), step=.1 if data_tipo['Apriv'].min() != data_tipo['Apriv'].max() else 0.1)
# Create two columns for Área Total inputs
col1, col2 = st.columns(2)
with col1:
atotal_min = st.number_input('Área Total mínima',
min_value=float(data_tipo['Atotal'].min()),
max_value=float(data_tipo['Atotal'].max()),
value=float(data_tipo['Atotal'].min()),
step=0.1)
with col2:
atotal_max = st.number_input('Área Total máxima',
min_value=float(data_tipo['Atotal'].min()),
max_value=float(data_tipo['Atotal'].max()),
value=float(data_tipo['Atotal'].max()),
step=0.1)
# Create two columns for Área Privativa inputs
col3, col4 = st.columns(2)
with col3:
apriv_min = st.number_input('Área Privativa mínima',
min_value=float(data_tipo['Apriv'].min()),
max_value=float(data_tipo['Apriv'].max()),
value=float(data_tipo['Apriv'].min()),
step=0.1)
with col4:
apriv_max = st.number_input('Área Privativa máxima',
min_value=float(data_tipo['Apriv'].min()),
max_value=float(data_tipo['Apriv'].max()),
value=float(data_tipo['Apriv'].max()),
step=0.1)
#data_tipo = data_tipo[(data_tipo['Atotal'].between(atotal_range[0], atotal_range[1])) &
#(data_tipo['Apriv'].between(apriv_range[0], apriv_range[1]))]
data_tipo = data_tipo[(data_tipo['Atotal'].between(atotal_min, atotal_max)) &
(data_tipo['Apriv'].between(apriv_min, apriv_max))]
# Links to other apps at the bottom of the sidebar
#st.sidebar.markdown(factor_html, unsafe_allow_html=True)
#st.sidebar.markdown(evo_html, unsafe_allow_html=True)
filtered_data = data_tipo[data_tipo.apply(lambda x: calculate_distance(x['latitude'], x['longitude'], custom_lat, custom_lon), axis=1) <= radius_in_meters]
filtered_data = filtered_data.dropna() # Drop rows with NaN values
# Add a custom CSS class to the map container
st.markdown(f"""<style>
.map {{
width: 100%;
height: 100vh;
}}
</style>""", unsafe_allow_html=True)
# Determine which area feature to use for prediction
filtered_data['area_feature'] = np.where(filtered_data['Apriv'] != 0, filtered_data['Apriv'], filtered_data['Atotal'])
# Define the target column based on conditions
filtered_data['target_column'] = np.where(filtered_data['Vunit_priv'] != 0, filtered_data['Vunit_priv'], filtered_data['Vunit_total'])
# Apply KNN and get predicted target values
predicted_target = knn_predict(filtered_data, 'target_column', ['latitude', 'longitude', 'area_feature']) # Update with your features
# Add predicted target values to filtered_data
filtered_data['Predicted_target'] = predicted_target
with st.container():
# Define a PyDeck view state for the initial map view
view_state = pdk.ViewState(latitude=filtered_data['latitude'].mean(), longitude=filtered_data['longitude'].mean(), zoom=zoom_level)
# Define a PyDeck layer for plotting
layer = pdk.Layer(
"ScatterplotLayer",
filtered_data,
get_position=["longitude", "latitude"],
get_color="[237, 181, 0, 160]", # RGBA color for light orange, adjust opacity with the last number
get_radius=100, # Adjust dot size as needed
)
# Create a PyDeck map using the defined layer and view state
deck_map = pdk.Deck(layers=[layer], initial_view_state=view_state, map_style="mapbox://styles/mapbox/light-v9")
# Display the map in Streamlit
st.pydeck_chart(deck_map)
#st.map(filtered_data, zoom=zoom_level, use_container_width=True)
st.write("Dados:", filtered_data) # Debug: Print filtered_data
if st.button('Baixar planilha'):
st.write("Preparando...")
# Set up the file to be downloaded
output_df = filtered_data
# Create a BytesIO buffer to hold the Excel file
excel_buffer = BytesIO()
# Convert DataFrame to Excel and save to the buffer
with pd.ExcelWriter(excel_buffer, engine="xlsxwriter") as writer:
output_df.to_excel(writer, index=False, sheet_name="Sheet1")
# Reset the buffer position to the beginning
excel_buffer.seek(0)
# Create a download link
b64 = base64.b64encode(excel_buffer.read()).decode()
href = f'<a href="data:application/vnd.openxmlformats-officedocument.spreadsheetml.sheet;base64,{b64}" download="sample_data.xlsx">Clique aqui para baixar a planilha</a>'
#st.markdown(href, unsafe_allow_html=True)
# Use st.empty() to create a placeholder and update it with the link
download_placeholder = st.empty()
download_placeholder.markdown(href, unsafe_allow_html=True)
folium_layermap = folium.Map(location=[custom_lat, custom_lon], zoom_start=zoom_level, control_scale=True)
# Add heatmap layers for 'Valor_Urb', 'Valor_Eqp', and 'RENDA'
add_heatmap_layer(folium_layermap, filtered_data, 'Valor_Urb', 'RdBu_r')
add_heatmap_layer(folium_layermap, filtered_data, 'Valor_Eqp', 'RdBu_r')
add_heatmap_layer(folium_layermap, filtered_data, 'RENDA', 'RdBu_r')
# Add layer control
folium.LayerControl().add_to(folium_layermap)
# Display the map using st_folium
st_folium(folium_layermap, width=900, height=350)
k_threshold = 5
# Function to perform bootstrap on the predicted target values
def bootstrap_stats(bound_data, num_samples=1000):
# Reshape the predicted_target array
bound_data = np.array(bound_data).reshape(-1, 1)
# Bootstrap resampling
bootstrapped_means = []
for _ in range(num_samples):
bootstrap_sample = np.random.choice(bound_data.flatten(), len(bound_data), replace=True)
bootstrapped_means.append(np.mean(bootstrap_sample))
# Calculate lower and higher bounds
lower_bound = np.percentile(bootstrapped_means, 16.)
higher_bound = np.percentile(bootstrapped_means, 84.)
return lower_bound, higher_bound
# Apply KNN and get predicted Predicted_target values
predicted_target = knn_predict(filtered_data, 'Predicted_target', ['latitude', 'longitude', 'area_feature'])
# Check if there are predictions to display
if 'Predicted_target' in filtered_data.columns and not np.all(predicted_target == 0):
# Apply bootstrap - bounds
lower_bound, higher_bound = bootstrap_stats(filtered_data['target_column'])
mean_value = np.mean(filtered_data['Predicted_target'])
# Display the results with custom styling
st.markdown("## **Resultado da Análise Estatística**")
st.write(f"Valor médio (Reais/m²) para as características selecionadas: ${mean_value:.2f}$ Reais")
st.write(f"Os valores podem variar entre ${lower_bound:.2f}$ e ${higher_bound:.2f}$ Reais, dependendo das características dos imóveis.")
else:
st.warning(f"**Dados insuficientes para inferência do valor. Mínimo necessário:** {k_threshold}") |