Spaces:
openfree
/
Running on CPU Upgrade

MoneyRadar / app-backup.py
seawolf2357's picture
Update app-backup.py
2e959ba verified
raw
history blame
4.9 kB
import gradio as gr
import requests
import json
from datetime import datetime, timedelta
API_KEY = "V38CNn4HXpLtynJQyOeoUensTEYoFy8PBUxKpDqAW1pawT1vfJ2BWtPQ98h6"
MAJOR_COUNTRIES = [
"United States", "United Kingdom", "Canada", "Australia", "Germany",
"France", "Japan", "South Korea", "China", "India",
"Brazil", "Mexico", "Russia", "Italy", "Spain",
"Netherlands", "Sweden", "Switzerland", "Norway", "Denmark",
"Finland", "Belgium", "Austria", "New Zealand", "Ireland",
"Singapore", "Hong Kong", "Israel", "United Arab Emirates", "Saudi Arabia",
"South Africa", "Turkey", "Egypt", "Poland", "Czech Republic",
"Hungary", "Greece", "Portugal", "Argentina", "Chile",
"Colombia", "Peru", "Venezuela", "Thailand", "Malaysia",
"Indonesia", "Philippines", "Vietnam", "Pakistan", "Bangladesh"
]
def search_serphouse(query, country, page, num_result):
url = "https://api.serphouse.com/serp/live"
now = datetime.utcnow()
yesterday = now - timedelta(days=1)
date_range = f"{yesterday.strftime('%Y-%m-%d')},{now.strftime('%Y-%m-%d')}"
payload = {
"data": {
"q": query,
"domain": "google.com",
"loc": country,
"lang": "en",
"device": "desktop",
"serp_type": "news",
"page": str(page),
"verbatim": "1",
"num": str(num_result),
"date_range": date_range
}
}
headers = {
"accept": "application/json",
"content-type": "application/json",
"authorization": f"Bearer {API_KEY}"
}
try:
response = requests.post(url, json=payload, headers=headers)
response.raise_for_status()
return response.json()
except requests.RequestException as e:
error_msg = f"Error: {str(e)}"
if response.text:
error_msg += f"\nResponse content: {response.text}"
return {"error": error_msg}
def format_results(results):
all_results = "<h2>๋ชจ๋“  ๋‰ด์Šค ๊ฒฐ๊ณผ (24์‹œ๊ฐ„ ์ด๋‚ด)</h2>"
debug_info = "<h2>๋””๋ฒ„๊ทธ ์ •๋ณด</h2>"
if isinstance(results, dict) and "error" in results:
all_results += f"<p>์˜ค๋ฅ˜ ๋ฐœ์ƒ: {results['error']}</p>"
debug_info += f"<pre>{results['error']}</pre>"
return all_results, debug_info
debug_info += f"<pre>{json.dumps(results, indent=2, ensure_ascii=False)}</pre>"
try:
if not isinstance(results, dict):
raise ValueError("๊ฒฐ๊ณผ๊ฐ€ ์‚ฌ์ „ ํ˜•์‹์ด ์•„๋‹™๋‹ˆ๋‹ค.")
if "results" not in results:
raise ValueError("'results' ํ‚ค๊ฐ€ ์‘๋‹ต์— ์—†์Šต๋‹ˆ๋‹ค.")
news_results = results["results"].get("news", [])
debug_info += f"<p>๋‰ด์Šค ๊ฒฐ๊ณผ ์ˆ˜: {len(news_results)}</p>"
if not news_results:
all_results += "<p>๊ฒ€์ƒ‰ ๊ฒฐ๊ณผ๊ฐ€ ์—†์Šต๋‹ˆ๋‹ค.</p>"
else:
all_results += "<ol>"
for result in news_results:
title = result.get("title", "์ œ๋ชฉ ์—†์Œ")
url = result.get("url", "#")
snippet = result.get("snippet", "๋‚ด์šฉ ์—†์Œ")
channel = result.get("channel", "์•Œ ์ˆ˜ ์—†์Œ")
time_str = result.get("time", "์•Œ ์ˆ˜ ์—†๋Š” ์‹œ๊ฐ„")
article_info = f"""
<li>
<h3><a href="{url}" target="_blank">{title}</a></h3>
<p>{snippet}</p>
<p><strong>์ถœ์ฒ˜:</strong> {channel} - {time_str}</p>
</li>
"""
all_results += article_info
all_results += "</ol>"
except Exception as e:
error_message = f"๊ฒฐ๊ณผ ์ฒ˜๋ฆฌ ์ค‘ ์˜ค๋ฅ˜ ๋ฐœ์ƒ: {str(e)}"
debug_info += f"<p>{error_message}</p>"
all_results += f"<p>{error_message}</p>"
return all_results, debug_info
def serphouse_search(query, country, page, num_result):
results = search_serphouse(query, country, page, num_result)
all_results, debug_info = format_results(results)
return all_results, debug_info
css = """
footer {
visibility: hidden;
}
ol {
padding-left: 20px;
}
li {
margin-bottom: 20px;
}
"""
iface = gr.Interface(
fn=serphouse_search,
inputs=[
gr.Textbox(label="๊ฒ€์ƒ‰์–ด"),
gr.Dropdown(MAJOR_COUNTRIES, label="๊ตญ๊ฐ€"),
gr.Slider(1, 10, 1, label="ํŽ˜์ด์ง€"),
gr.Slider(1, 100, 10, label="๊ฒฐ๊ณผ ์ˆ˜")
],
outputs=[
gr.HTML(label="๋ชจ๋“  ๊ฒฐ๊ณผ"),
gr.HTML(label="๋””๋ฒ„๊ทธ ์ •๋ณด")
],
title="24์‹œ๊ฐ„ ์ด๋‚ด ๋‰ด์Šค ๊ฒ€์ƒ‰ ์ธํ„ฐํŽ˜์ด์Šค",
description="๊ฒ€์ƒ‰์–ด๋ฅผ ์ž…๋ ฅํ•˜๊ณ  ๊ตญ๊ฐ€๋ฅผ ์„ ํƒํ•˜์—ฌ 24์‹œ๊ฐ„ ์ด๋‚ด์˜ ๋‰ด์Šค ๊ฒฐ๊ณผ๋ฅผ ๊ฐ€์ ธ์˜ต๋‹ˆ๋‹ค.",
theme="Nymbo/Nymbo_Theme",
css=css
)
iface.launch(auth=("gini","pick"))