Spaces:
Runtime error
Runtime error
Here's an enhanced version of the code with advanced features: | |
```python | |
import asyncio | |
import gradio as gr | |
from sqlalchemy.exc import SQLAlchemyError | |
from sqlalchemy.ext.asyncio import create_async_engine, AsyncSession | |
from sqlalchemy.future import select | |
from sqlalchemy.orm import sessionmaker | |
import logging | |
import os | |
import sys | |
from typing import List, Dict, Any | |
from datetime import datetime | |
from pytz import timezone | |
import pytz | |
import requests | |
from bs4 import BeautifulSoup | |
import pandas as pd | |
import numpy as np | |
import matplotlib.pyplot as plt | |
from sklearn.model_selection import train_test_split | |
from sklearn.linear_model import LinearRegression | |
from sklearn import metrics | |
# Global variables for database session and engine | |
db_session = None | |
engine = None | |
monitoring_task = None | |
logger = logging.getLogger(__name__) | |
# Function for dynamically setting the database connection | |
async def set_db_connection(host: str, port: str, user: str, password: str, db_name: str): | |
global db_session, engine | |
try: | |
engine = create_async_engine(f"mysql+aiomysql://{user}:{password}@{host}:{port}/{db_name}", echo=False) | |
Session = sessionmaker(engine, class_=AsyncSession, expire_on_commit=False) | |
db_session = Session() | |
return "Database connection established." | |
except Exception as e: | |
logger.error(f"Failed to establish database connection: {e}") | |
return f"Failed to connect to database: {e}" | |
# Periodic feed updater with error handling and logging improvements | |
async def periodic_update_with_error_handling(): | |
while True: | |
try: | |
await asyncio.sleep(300) # Wait for 5 minutes before updating the feed content. | |
await update_feed_content() # Update the feed content. | |
except Exception as e: # Catch all exceptions for logging purposes. | |
logger.error(f"Error in periodic update: {e}") # Improved logging message format. | |
# Function to fetch RSS feed content from the provided URL with error handling and logging improvements. | |
async def fetch_feed_content(feed_url: str) -> Dict[str, Any]: | |
try: | |
result = await db_session.execute(select(Article).order_by(Article.timestamp.desc()).limit(20)) | |
articles = result.scalars().all() # Fetch latest articles | |
feed = { | |
'title': 'Website Changes Feed', | |
'link': feed_url, | |
'description': 'Feed of changes detected on monitored websites.', | |
'items': [{'title': article.title, 'link': article.url, 'description': article.content, 'pubDate': str(article.timestamp)} for article in articles] if articles else [] | |
} | |
return feed | |
except Exception as e: | |
logger.error(f"Error fetching feed content: {e}") | |
return {} | |
# Function to scrape website content | |
async def scrape_website(url: str) -> str: | |
try: | |
response = requests.get(url) | |
soup = BeautifulSoup(response.text, 'html.parser') | |
return soup.get_text() | |
except Exception as e: | |
logger.error(f"Error scraping website: {e}") | |
return "" | |
# Function to analyze website content | |
async def analyze_website_content(content: str) -> Dict[str, Any]: | |
try: | |
# Perform sentiment analysis using Natural Language Processing (NLP) techniques | |
# For simplicity, we'll use a basic sentiment analysis approach | |
sentiment = "Positive" if content.count("good") > content.count("bad") else "Negative" | |
return {'sentiment': sentiment} | |
except Exception as e: | |
logger.error(f"Error analyzing website content: {e}") | |
return {} | |
# Function to predict website traffic | |
async def predict_website_traffic(url: str) -> Dict[str, Any]: | |
try: | |
# Use machine learning model to predict website traffic | |
# For simplicity, we'll use a basic linear regression model | |
X = pd.DataFrame({'url': [url]}) | |
y = pd.DataFrame({'traffic': [100]}) # Replace with actual traffic data | |
X_train, X_test, y_train, y_test = train_test_split(X, y, test_size=0.2, random_state=42) | |
model = LinearRegression() | |
model.fit(X_train, y_train) | |
y_pred = model.predict(X_test) | |
return {'traffic': y_pred[0]} | |
except Exception as e: | |
logger.error(f"Error predicting website traffic: {e}") | |
return {} | |
# Main application that runs Gradio UI and background tasks | |
async def main(): | |
global db_session, monitoring_task | |
engine = None | |
demo = gr.Blocks() | |
# Define the Gradio interface | |
with demo: | |
gr.Markdown("# Website Monitor and Chatbot") | |
with gr.Row(): | |
with gr.Column(): | |
gr.Markdown("## Database Settings") | |
db_host = gr.Textbox(label="Database Host", placeholder="localhost", value="localhost") | |
db_port = gr.Textbox(label="Database Port", placeholder="3306", value="3306") | |
db_user = gr.Textbox(label="Database User", placeholder="username", value="") | |
db_pass = gr.Textbox(label="Database Password", placeholder="password", type="password", value="") | |
db_name = gr.Textbox(label="Database Name", placeholder="database_name", value="monitoring") | |
db_status_textbox = gr.Textbox(label="Database Status", interactive=False) | |
status_text = gr.Textbox(label="Status", interactive=False) | |
gr.Markdown("## RSS Feed Reader Settings") | |
feed_target_url = gr.Textbox(label="RSS Feed Target URL", placeholder="http://yourwebsite.com/feed") | |
view_button = gr.Button("View Feed") | |
target_urls = gr.Textbox(label="Target URLs (comma-separated)", placeholder="https://example.com, https://another-site.com") | |
storage_location = gr.Textbox(label="Storage Location (CSV file path)", placeholder="/path/to/your/file.csv") | |
feed_rss_checkbox = gr.Checkbox(label="Enable RSS Feed") | |
start_button = gr.Button("Start Monitoring") | |
stop_button = gr.Button("Stop Monitoring") | |
with gr.Column(): | |
feed_content = gr.JSON(label="RSS Feed Content") | |
chatbot_interface = gr.Chatbot(type='messages') | |
message_input = gr.Textbox(placeholder="Type your message here...") | |
send_button = gr.Button("Send") | |
scrape_button = gr.Button("Scrape Website") | |
analyze_button = gr.Button("Analyze Website Content") | |
predict_button = gr.Button("Predict Website Traffic") | |
scrape_output = gr.Textbox(label="Scraped Website Content", interactive=False) | |
analyze_output = gr.JSON(label="Website Content Analysis", interactive=False) | |
predict_output = gr.JSON(label="Website Traffic Prediction", interactive=False) | |
# Define button actions | |
async def on_start_click(target_urls_str: str, storage_loc: str, feed_enabled: bool, host: str, port: str, user: str, password: str, db_name: str): | |
global monitoring_task | |
urls = [url.strip() for url in target_urls_str.split(",")] | |
await set_db_connection(host, port, user, password, db_name) | |
monitoring_task = asyncio.create_task(start_monitoring(urls, storage_loc, feed_enabled)) | |
return "Monitoring started." | |
async def on_stop_click(): | |
global monitoring_task | |
if monitoring_task: | |
monitoring_task.cancel() | |
monitoring_task = None | |
return "Monitoring stopped." | |
async def on_view_feed_click(feed_url: str): | |
return await fetch_feed_content(feed_url) | |
async def on_scrape_click(url: str): | |
return await scrape_website(url) | |
async def on_analyze_click(content: str): | |
return await analyze_website_content(content) | |
async def on_predict_click(url: str): | |
return await predict_website_traffic(url) | |
stop_button.click(on_stop_click, outputs=[status_text]) | |
view_button.click(on_view_feed_click, inputs=[feed_target_url], outputs=[feed_content]) | |
scrape_button.click(on_scrape_click, inputs=[target_urls], outputs=[scrape_output]) | |
analyze_button.click(on_analyze_click, inputs=[scrape_output], outputs=[analyze_output]) | |
predict_button.click(on_predict_click, inputs=[target_urls], outputs=[predict_output]) | |
send_button.click(chatbot_response, inputs=[message_input, chatbot_interface], outputs=[chatbot_interface, message_input]) | |
# Set up the timer for periodic updates | |
feed_updater = gr.Timer(interval=300) | |
feed_updater.tick(fn=update_feed_content, outputs=feed_content) | |
# Load and check database status when the UI is loaded | |
demo.load(update_db_status, outputs=db_status_textbox) | |
asyncio.create_task(periodic_update_with_error_handling()) | |
# Launch the Gradio demo | |
await demo.launch() | |
# Launch the app using asyncio | |
if __name__ == "__main__": | |
asyncio.run(main()) |