acecalisto3 commited on
Commit
7fae359
1 Parent(s): 97fc579

Update app2.py

Browse files
Files changed (1) hide show
  1. app2.py +47 -151
app2.py CHANGED
@@ -2,7 +2,6 @@ import asyncio
2
  import gradio as gr
3
  import logging
4
  import os
5
- import sys
6
  import requests
7
  from bs4 import BeautifulSoup
8
  from datetime import datetime
@@ -16,77 +15,27 @@ from dotenv import load_dotenv
16
  from pydantic_settings import BaseSettings
17
 
18
  # --- Configuration ---
19
- load_dotenv() # Load environment variables from .env file
20
 
21
  class Settings(BaseSettings):
22
- # Define the database fields explicitly
23
  DB_HOST: str
24
  DB_USER: str
25
  DB_PASSWORD: str
26
  DB_NAME: str
27
- database_type: str
28
- database_port: int
29
- chatbot_api_key: str
30
- rss_feed_url: str
31
- storage_location: str
 
 
32
 
33
  class Config:
34
- # Optional: if you want to allow extra fields
35
- extra = "allow"
36
-
37
- settings = Settings()
38
-
39
- # --- Database Connection ---
40
- def get_db_url(settings: Settings) -> str:
41
- if settings.database_type == "mysql":
42
- return f"mysql+aiomysql://{settings.db_user}:{settings.db_password}@{settings.db_host}:{settings.database_port}/{settings.db_name}"
43
- elif settings.database_type == "postgresql":
44
- return f"postgresql+asyncpg://{settings.db_user}:{settings.db_password}@{settings.db_host}:{settings.database_port}/{settings.db_name}"
45
- else:
46
- return "sqlite+aiosqlite:///default.db"
47
-
48
- async def set_db_connection(
49
- db_type: str = None,
50
- db_host: str = None,
51
- db_port: int = None,
52
- db_user: str = None,
53
- db_password: str = None,
54
- db_name: str = None
55
- ):
56
- global db_session, engine, settings
57
- try:
58
- # Update settings if new values provided
59
- if db_type:
60
- settings.database_type = db_type
61
- if db_host:
62
- settings.db_host = db_host
63
- if db_port:
64
- settings.database_port = db_port
65
- if db_user:
66
- settings.db_user = db_user
67
- if db_password:
68
- settings.db_password = db_password
69
- if db_name:
70
- settings.db_name = db_name
71
-
72
- # Close existing connection if any
73
- if db_session:
74
- await db_session.close()
75
- if engine:
76
- await engine.dispose()
77
-
78
- # Create new connection
79
- db_url = get_db_url(settings)
80
- engine = create_async_engine(db_url, echo=False)
81
- async_session_maker = sessionmaker(engine, class_=AsyncSession, expire_on_commit=False)
82
- db_session = async_session_maker()
83
- logger.info("Database connection established.")
84
- return "Database connection established."
85
- except Exception as e:
86
- logger.error(f"Failed to establish database connection: {e}")
87
- return f"Failed to connect to database: {e}"
88
 
89
- # --- Database Model (Example) ---
90
  class Article:
91
  def __init__(self, title, url, content, timestamp):
92
  self.title = title
@@ -95,6 +44,7 @@ class Article:
95
  self.timestamp = timestamp
96
 
97
  # --- Global Variables ---
 
98
  db_session = None
99
  engine = None
100
  monitoring_task = None
@@ -102,13 +52,19 @@ logger = logging.getLogger(__name__)
102
  logger.setLevel(logging.DEBUG)
103
 
104
  # --- Database Connection ---
 
 
 
 
 
 
 
 
105
  async def set_db_connection():
106
  global db_session, engine
107
  try:
108
- engine = create_async_engine(
109
- f"mysql+aiomysql://{settings.db_user}:{settings.db_password}@{settings.db_host}:{settings.database_port}/{settings.db_name}",
110
- echo=False,
111
- )
112
  async_session_maker = sessionmaker(engine, class_=AsyncSession, expire_on_commit=False)
113
  db_session = async_session_maker()
114
  logger.info("Database connection established.")
@@ -147,7 +103,6 @@ async def store_data(url: str, content: str, analysis: Dict[str, Any]):
147
 
148
  async def update_feed_content():
149
  try:
150
- # Fetch RSS feed content from database
151
  async with db_session as session:
152
  articles = await session.execute(select(Article))
153
  feed_content = []
@@ -176,32 +131,20 @@ async def scrape_website(url: str) -> str:
176
 
177
  async def analyze_website_content(content: str) -> Dict[str, Any]:
178
  try:
179
- # Perform sentiment analysis or other analysis
180
  sentiment = "Positive" if content.count("good") > content.count("bad") else "Negative"
181
  return {"sentiment": sentiment}
182
  except Exception as e:
183
  logger.error(f"Error analyzing website content: {e}")
184
  return {}
185
 
186
- # --- Website Traffic Prediction ---
187
- async def predict_website_traffic(url: str) -> Dict[str, Any]:
188
- try:
189
- # ... (Your machine learning model for traffic prediction) ...
190
- return {"traffic": 100} # Placeholder
191
- except Exception as e:
192
- logger.error(f"Error predicting website traffic: {e}")
193
- return {}
194
-
195
  # --- Chatbot Integration ---
196
  async def chatbot_response(message: str, chat_history: List[Dict[str, str]]) -> List[Dict[str, str]]:
197
  try:
198
- if not settings.chatbot_api_key:
199
  raise ValueError("Chatbot API key is not set.")
200
- # ... (Your code to call the chatbot API) ...
201
- # Example using a hypothetical API:
202
  response = requests.post(
203
  "https://your-chatbot-api.com/chat",
204
- json={"message": message, "api_key": settings.chatbot_api_key},
205
  )
206
  response.raise_for_status()
207
  bot_response = response.json()["response"]
@@ -215,19 +158,16 @@ async def chatbot_response(message: str, chat_history: List[Dict[str, str]]) ->
215
 
216
  # --- Database Status ---
217
  async def update_db_status():
218
- global db_session, engine
219
  if db_session and engine:
220
  try:
221
  await db_session.execute(select(1))
222
  return "Database connection is active."
223
  except SQLAlchemyError as e:
224
  return f"Database error: {e}"
225
- else:
226
- return "Database connection not established."
227
 
228
  # --- Gradio UI ---
229
  async def main():
230
- global db_session, monitoring_task
231
  demo = gr.Blocks()
232
 
233
  with demo:
@@ -240,7 +180,8 @@ async def main():
240
 
241
  gr.Markdown("## RSS Feed Reader Settings")
242
  view_button = gr.Button("View Feed")
243
- target_urls = gr.Textbox(label="Target URLs (comma-separated)", placeholder="https://example.com, https://another-site.com")
 
244
  feed_rss_checkbox = gr.Checkbox(label="Enable RSS Feed")
245
  start_button = gr.Button("Start Monitoring")
246
  stop_button = gr.Button("Stop Monitoring")
@@ -252,12 +193,26 @@ async def main():
252
  send_button = gr.Button("Send")
253
  scrape_button = gr.Button("Scrape Website")
254
  analyze_button = gr.Button("Analyze Website Content")
255
- predict_button = gr.Button("Predict Website Traffic")
256
  scrape_output = gr.Textbox(label="Scraped Website Content", interactive=False)
257
  analyze_output = gr.JSON(label="Website Content Analysis")
258
- predict_output = gr.JSON(label="Website Traffic Prediction")
259
 
260
  # --- Button Actions ---
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
261
  start_button.click(
262
  fn=on_start_click,
263
  inputs=[target_urls, feed_rss_checkbox],
@@ -265,31 +220,9 @@ async def main():
265
  )
266
 
267
  stop_button.click(fn=on_stop_click, outputs=[status_text])
268
-
269
- view_button.click(
270
- fn=on_view_feed_click,
271
- inputs=[],
272
- outputs=[feed_content],
273
- )
274
-
275
- scrape_button.click(
276
- fn=on_scrape_click,
277
- inputs=[target_urls],
278
- outputs=[scrape_output],
279
- )
280
-
281
- analyze_button.click(
282
- fn=on_analyze_click,
283
- inputs=[scrape_output],
284
- outputs=[analyze_output],
285
- )
286
-
287
- predict_button.click(
288
- fn=on_predict_click,
289
- inputs=[target_urls],
290
- outputs=[predict_output],
291
- )
292
-
293
  send_button.click(
294
  fn=chatbot_response,
295
  inputs=[message_input, chatbot_interface],
@@ -297,47 +230,10 @@ async def main():
297
  )
298
 
299
  # --- Periodic Updates ---
300
- async def update_feed_periodically(feed_content):
301
- while True:
302
- await update_feed_content()
303
- await asyncio.sleep(300) # Check every 5 minutes
304
-
305
- feed_updater = asyncio.create_task(update_feed_periodically(feed_content))
306
-
307
- # --- Load Database Status ---
308
  demo.load(fn=update_db_status, outputs=[db_status_textbox])
309
 
310
  # --- Launch Gradio ---
311
  await demo.launch()
312
 
313
-
314
- # --- Helper Functions ---
315
- async def on_start_click(target_urls_str: str, feed_enabled: bool):
316
- global monitoring_task
317
- urls = [url.strip() for url in target_urls_str.split(",")]
318
- await set_db_connection()
319
- monitoring_task = asyncio.create_task(start_monitoring(urls, settings.storage_location, feed_enabled))
320
- return "Monitoring started."
321
-
322
- async def on_stop_click():
323
- global monitoring_task
324
- if monitoring_task:
325
- monitoring_task.cancel()
326
- monitoring_task = None
327
- return "Monitoring stopped."
328
-
329
- async def on_view_feed_click():
330
- return await update_feed_content()
331
-
332
- async def on_scrape_click(url: str):
333
- return await scrape_website(url)
334
-
335
- async def on_analyze_click(content: str):
336
- return await analyze_website_content(content)
337
-
338
- async def on_predict_click(url: str):
339
- return await predict_website_traffic(url)
340
-
341
- # --- Main Execution ---
342
  if __name__ == "__main__":
343
  asyncio.run(main())
 
2
  import gradio as gr
3
  import logging
4
  import os
 
5
  import requests
6
  from bs4 import BeautifulSoup
7
  from datetime import datetime
 
15
  from pydantic_settings import BaseSettings
16
 
17
  # --- Configuration ---
18
+ load_dotenv()
19
 
20
  class Settings(BaseSettings):
21
+ # Database settings
22
  DB_HOST: str
23
  DB_USER: str
24
  DB_PASSWORD: str
25
  DB_NAME: str
26
+ DB_TYPE: str = "mysql" # Default value
27
+ DB_PORT: int = 3306 # Default value
28
+
29
+ # API and storage settings
30
+ CHATBOT_API_KEY: str
31
+ RSS_FEED_URL: str
32
+ STORAGE_LOCATION: str = "data" # Default value
33
 
34
  class Config:
35
+ env_file = ".env"
36
+ case_sensitive = False
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
37
 
38
+ # --- Database Model ---
39
  class Article:
40
  def __init__(self, title, url, content, timestamp):
41
  self.title = title
 
44
  self.timestamp = timestamp
45
 
46
  # --- Global Variables ---
47
+ settings = Settings()
48
  db_session = None
49
  engine = None
50
  monitoring_task = None
 
52
  logger.setLevel(logging.DEBUG)
53
 
54
  # --- Database Connection ---
55
+ def get_db_url() -> str:
56
+ if settings.DB_TYPE == "mysql":
57
+ return f"mysql+aiomysql://{settings.DB_USER}:{settings.DB_PASSWORD}@{settings.DB_HOST}:{settings.DB_PORT}/{settings.DB_NAME}"
58
+ elif settings.DB_TYPE == "postgresql":
59
+ return f"postgresql+asyncpg://{settings.DB_USER}:{settings.DB_PASSWORD}@{settings.DB_HOST}:{settings.DB_PORT}/{settings.DB_NAME}"
60
+ else:
61
+ return "sqlite+aiosqlite:///default.db"
62
+
63
  async def set_db_connection():
64
  global db_session, engine
65
  try:
66
+ db_url = get_db_url()
67
+ engine = create_async_engine(db_url, echo=False)
 
 
68
  async_session_maker = sessionmaker(engine, class_=AsyncSession, expire_on_commit=False)
69
  db_session = async_session_maker()
70
  logger.info("Database connection established.")
 
103
 
104
  async def update_feed_content():
105
  try:
 
106
  async with db_session as session:
107
  articles = await session.execute(select(Article))
108
  feed_content = []
 
131
 
132
  async def analyze_website_content(content: str) -> Dict[str, Any]:
133
  try:
 
134
  sentiment = "Positive" if content.count("good") > content.count("bad") else "Negative"
135
  return {"sentiment": sentiment}
136
  except Exception as e:
137
  logger.error(f"Error analyzing website content: {e}")
138
  return {}
139
 
 
 
 
 
 
 
 
 
 
140
  # --- Chatbot Integration ---
141
  async def chatbot_response(message: str, chat_history: List[Dict[str, str]]) -> List[Dict[str, str]]:
142
  try:
143
+ if not settings.CHATBOT_API_KEY:
144
  raise ValueError("Chatbot API key is not set.")
 
 
145
  response = requests.post(
146
  "https://your-chatbot-api.com/chat",
147
+ json={"message": message, "api_key": settings.CHATBOT_API_KEY},
148
  )
149
  response.raise_for_status()
150
  bot_response = response.json()["response"]
 
158
 
159
  # --- Database Status ---
160
  async def update_db_status():
 
161
  if db_session and engine:
162
  try:
163
  await db_session.execute(select(1))
164
  return "Database connection is active."
165
  except SQLAlchemyError as e:
166
  return f"Database error: {e}"
167
+ return "Database connection not established."
 
168
 
169
  # --- Gradio UI ---
170
  async def main():
 
171
  demo = gr.Blocks()
172
 
173
  with demo:
 
180
 
181
  gr.Markdown("## RSS Feed Reader Settings")
182
  view_button = gr.Button("View Feed")
183
+ target_urls = gr.Textbox(label="Target URLs (comma-separated)",
184
+ placeholder="https://example.com, https://another-site.com")
185
  feed_rss_checkbox = gr.Checkbox(label="Enable RSS Feed")
186
  start_button = gr.Button("Start Monitoring")
187
  stop_button = gr.Button("Stop Monitoring")
 
193
  send_button = gr.Button("Send")
194
  scrape_button = gr.Button("Scrape Website")
195
  analyze_button = gr.Button("Analyze Website Content")
 
196
  scrape_output = gr.Textbox(label="Scraped Website Content", interactive=False)
197
  analyze_output = gr.JSON(label="Website Content Analysis")
 
198
 
199
  # --- Button Actions ---
200
+ async def on_start_click(target_urls_str: str, feed_enabled: bool):
201
+ urls = [url.strip() for url in target_urls_str.split(",")]
202
+ await set_db_connection()
203
+ global monitoring_task
204
+ monitoring_task = asyncio.create_task(
205
+ start_monitoring(urls, settings.STORAGE_LOCATION, feed_enabled)
206
+ )
207
+ return "Monitoring started."
208
+
209
+ async def on_stop_click():
210
+ global monitoring_task
211
+ if monitoring_task:
212
+ monitoring_task.cancel()
213
+ monitoring_task = None
214
+ return "Monitoring stopped."
215
+
216
  start_button.click(
217
  fn=on_start_click,
218
  inputs=[target_urls, feed_rss_checkbox],
 
220
  )
221
 
222
  stop_button.click(fn=on_stop_click, outputs=[status_text])
223
+ view_button.click(fn=update_feed_content, outputs=[feed_content])
224
+ scrape_button.click(fn=scrape_website, inputs=[target_urls], outputs=[scrape_output])
225
+ analyze_button.click(fn=analyze_website_content, inputs=[scrape_output], outputs=[analyze_output])
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
226
  send_button.click(
227
  fn=chatbot_response,
228
  inputs=[message_input, chatbot_interface],
 
230
  )
231
 
232
  # --- Periodic Updates ---
 
 
 
 
 
 
 
 
233
  demo.load(fn=update_db_status, outputs=[db_status_textbox])
234
 
235
  # --- Launch Gradio ---
236
  await demo.launch()
237
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
238
  if __name__ == "__main__":
239
  asyncio.run(main())