Spaces:
Running
on
CPU Upgrade
Running
on
CPU Upgrade
seawolf2357
commited on
Update app.py
Browse files
app.py
CHANGED
@@ -1,9 +1,17 @@
|
|
1 |
import gradio as gr
|
2 |
import requests
|
3 |
import json
|
|
|
4 |
from datetime import datetime, timedelta
|
|
|
|
|
5 |
|
6 |
-
|
|
|
|
|
|
|
|
|
|
|
7 |
|
8 |
MAJOR_COUNTRIES = [
|
9 |
"United States", "United Kingdom", "Canada", "Australia", "Germany",
|
@@ -58,11 +66,8 @@ def search_serphouse(query, country, page=1, num_result=100):
|
|
58 |
|
59 |
def format_results_from_raw(results):
|
60 |
try:
|
61 |
-
# ๋๋ฒ๊ทธ ์ ๋ณด ์๋ต
|
62 |
-
debug_info = ""
|
63 |
-
|
64 |
if isinstance(results, dict) and "error" in results:
|
65 |
-
return "Error: " + results["error"],
|
66 |
|
67 |
if not isinstance(results, dict):
|
68 |
raise ValueError("๊ฒฐ๊ณผ๊ฐ ์ฌ์ ํ์์ด ์๋๋๋ค.")
|
@@ -83,10 +88,9 @@ def format_results_from_raw(results):
|
|
83 |
news_results = []
|
84 |
|
85 |
if not news_results:
|
86 |
-
return "๊ฒ์ ๊ฒฐ๊ณผ๊ฐ ์์ต๋๋ค.",
|
87 |
|
88 |
-
|
89 |
-
list_output = ""
|
90 |
|
91 |
for idx, result in enumerate(news_results, 1):
|
92 |
title = result.get("title", "์ ๋ชฉ ์์")
|
@@ -96,85 +100,124 @@ def format_results_from_raw(results):
|
|
96 |
time = result.get("time", result.get("date", "์ ์ ์๋ ์๊ฐ"))
|
97 |
image_url = result.get("img", result.get("thumbnail", ""))
|
98 |
|
99 |
-
|
100 |
-
|
101 |
-
|
102 |
-
|
103 |
-
|
104 |
-
|
105 |
-
|
106 |
-
|
107 |
-
|
108 |
-
<h4>{idx}. <a href="{link}" target="_blank">{title}</a></h4>
|
109 |
-
<p>{thumbnail_html}</p>
|
110 |
-
<p>์์ฝ: {snippet}</p>
|
111 |
-
<p>์ถ์ฒ: {channel} | ์๊ฐ: {time}</p>
|
112 |
-
<hr>
|
113 |
-
</div>
|
114 |
-
"""
|
115 |
-
list_output += list_item
|
116 |
|
117 |
-
return
|
118 |
|
119 |
except Exception as e:
|
120 |
error_message = f"๊ฒฐ๊ณผ ์ฒ๋ฆฌ ์ค ์ค๋ฅ ๋ฐ์: {str(e)}"
|
121 |
-
return "Error: " + error_message,
|
122 |
|
123 |
def serphouse_search(query, country):
|
124 |
# ํ์ด์ง์ ๊ฒฐ๊ณผ ์์ ๊ธฐ๋ณธ๊ฐ์ ์ค์ ํฉ๋๋ค.
|
125 |
page = 1
|
126 |
num_result = 100
|
127 |
results = search_serphouse(query, country, page, num_result)
|
128 |
-
|
129 |
-
return
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
130 |
|
131 |
css = """
|
132 |
footer {
|
133 |
visibility: hidden;
|
134 |
}
|
135 |
-
/* '๋ด์ค ๊ฒฐ๊ณผ'์ '๋๋ฒ๊ทธ ์ ๋ณด' ํญ ์จ๊ธฐ๊ธฐ */
|
136 |
-
#tab-๋ด์ค_๊ฒฐ๊ณผ, #tab-๋๋ฒ๊ทธ_์ ๋ณด {
|
137 |
-
display: none !important;
|
138 |
-
}
|
139 |
-
/* 'ํ์ด์ง'์ '๊ฒฐ๊ณผ ์' ์
๋ ฅ ์์ ์จ๊ธฐ๊ธฐ */
|
140 |
-
.slider-container {
|
141 |
-
display: none !important;
|
142 |
-
}
|
143 |
"""
|
144 |
|
145 |
# Gradio ์ธํฐํ์ด์ค ๊ตฌ์ฑ
|
146 |
-
with gr.Blocks(
|
147 |
gr.Markdown("๊ฒ์์ด๋ฅผ ์
๋ ฅํ๊ณ ์ํ๋ ๊ตญ๊ฐ๋ฅผ ์ ํํ๋ฉด, ๊ฒ์์ด์ ์ผ์นํ๋ 24์๊ฐ ์ด๋ด ๋ด์ค๋ฅผ ์ต๋ 100๊ฐ ์ถ๋ ฅํฉ๋๋ค.")
|
148 |
|
149 |
-
with gr.
|
150 |
with gr.Row():
|
151 |
query = gr.Textbox(label="๊ฒ์์ด")
|
152 |
country = gr.Dropdown(MAJOR_COUNTRIES, label="๊ตญ๊ฐ", value="South Korea")
|
153 |
-
|
154 |
-
# with gr.Row():
|
155 |
-
# page = gr.Slider(1, 10, 1, label="ํ์ด์ง")
|
156 |
-
# num_result = gr.Slider(1, 100, 100, label="๊ฒฐ๊ณผ ์")
|
157 |
|
158 |
-
|
|
|
159 |
|
160 |
-
|
161 |
-
|
162 |
-
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
163 |
|
164 |
-
|
165 |
-
list_output = gr.HTML(label="๋ฆฌ์คํธ ๊ฒฐ๊ณผ") # HTML๋ก ๋ณ๊ฒฝ
|
166 |
|
167 |
-
|
168 |
-
# debug_output = gr.Textbox(label="๋๋ฒ๊ทธ ์ ๋ณด", lines=10)
|
169 |
|
170 |
-
def
|
171 |
-
|
172 |
-
return
|
|
|
|
|
|
|
173 |
|
174 |
search_button.click(
|
175 |
search_and_display,
|
176 |
inputs=[query, country],
|
177 |
-
outputs=[
|
|
|
|
|
|
|
|
|
|
|
|
|
178 |
)
|
179 |
|
180 |
iface.launch(auth=("gini", "pick"))
|
|
|
1 |
import gradio as gr
|
2 |
import requests
|
3 |
import json
|
4 |
+
import os
|
5 |
from datetime import datetime, timedelta
|
6 |
+
from bs4 import BeautifulSoup # ์น ํ์ด์ง์์ ํ
์คํธ๋ฅผ ์ถ์ถํ๊ธฐ ์ํด ์ฌ์ฉ
|
7 |
+
from huggingface_hub import InferenceClient # LLM ์ฌ์ฉ์ ์ํด ํ์
|
8 |
|
9 |
+
# ํ์ํ ํจํค์ง ์ค์น (ํ์ํ ๊ฒฝ์ฐ ์ฃผ์์ ์ ๊ฑฐํ๊ณ ์คํ)
|
10 |
+
# !pip install bs4 huggingface_hub
|
11 |
+
|
12 |
+
# ํ๊ฒฝ ๋ณ์์์ API ํค ๊ฐ์ ธ์ค๊ธฐ (API ํค๋ ์์ ํ๊ฒ ๊ด๋ฆฌ๋์ด์ผ ํฉ๋๋ค)
|
13 |
+
API_KEY = os.getenv("SERPHOUSE_API_KEY") # ๋ณธ์ธ์ SerpHouse API ํค๋ฅผ ํ๊ฒฝ ๋ณ์๋ก ์ค์ ํ์ธ์.
|
14 |
+
HF_TOKEN = os.getenv("HF_TOKEN") # Hugging Face API ํ ํฐ์ ํ๊ฒฝ ๋ณ์๋ก ์ค์ ํ์ธ์.
|
15 |
|
16 |
MAJOR_COUNTRIES = [
|
17 |
"United States", "United Kingdom", "Canada", "Australia", "Germany",
|
|
|
66 |
|
67 |
def format_results_from_raw(results):
|
68 |
try:
|
|
|
|
|
|
|
69 |
if isinstance(results, dict) and "error" in results:
|
70 |
+
return "Error: " + results["error"], []
|
71 |
|
72 |
if not isinstance(results, dict):
|
73 |
raise ValueError("๊ฒฐ๊ณผ๊ฐ ์ฌ์ ํ์์ด ์๋๋๋ค.")
|
|
|
88 |
news_results = []
|
89 |
|
90 |
if not news_results:
|
91 |
+
return "๊ฒ์ ๊ฒฐ๊ณผ๊ฐ ์์ต๋๋ค.", []
|
92 |
|
93 |
+
articles = []
|
|
|
94 |
|
95 |
for idx, result in enumerate(news_results, 1):
|
96 |
title = result.get("title", "์ ๋ชฉ ์์")
|
|
|
100 |
time = result.get("time", result.get("date", "์ ์ ์๋ ์๊ฐ"))
|
101 |
image_url = result.get("img", result.get("thumbnail", ""))
|
102 |
|
103 |
+
articles.append({
|
104 |
+
"index": idx,
|
105 |
+
"title": title,
|
106 |
+
"link": link,
|
107 |
+
"snippet": snippet,
|
108 |
+
"channel": channel,
|
109 |
+
"time": time,
|
110 |
+
"image_url": image_url
|
111 |
+
})
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
112 |
|
113 |
+
return "", articles
|
114 |
|
115 |
except Exception as e:
|
116 |
error_message = f"๊ฒฐ๊ณผ ์ฒ๋ฆฌ ์ค ์ค๋ฅ ๋ฐ์: {str(e)}"
|
117 |
+
return "Error: " + error_message, []
|
118 |
|
119 |
def serphouse_search(query, country):
|
120 |
# ํ์ด์ง์ ๊ฒฐ๊ณผ ์์ ๊ธฐ๋ณธ๊ฐ์ ์ค์ ํฉ๋๋ค.
|
121 |
page = 1
|
122 |
num_result = 100
|
123 |
results = search_serphouse(query, country, page, num_result)
|
124 |
+
error_message, articles = format_results_from_raw(results)
|
125 |
+
return error_message, articles
|
126 |
+
|
127 |
+
# LLM ์ค์
|
128 |
+
hf_client = InferenceClient("CohereForAI/c4ai-command-r-plus-08-2024", token=HF_TOKEN)
|
129 |
+
|
130 |
+
def summarize_article(url):
|
131 |
+
try:
|
132 |
+
# ์น ํ์ด์ง์์ ํ
์คํธ ์ถ์ถ
|
133 |
+
response = requests.get(url)
|
134 |
+
response.raise_for_status()
|
135 |
+
soup = BeautifulSoup(response.text, 'html.parser')
|
136 |
+
# ๋ชจ๋ ํ
์คํธ๋ฅผ ์ถ์ถ (๊ฐ๋จํ ์์)
|
137 |
+
text = ' '.join([p.get_text() for p in soup.find_all('p')])
|
138 |
+
if not text.strip():
|
139 |
+
return "๊ธฐ์ฌ ๋ด์ฉ์ ๊ฐ์ ธ์ฌ ์ ์์ต๋๋ค."
|
140 |
+
|
141 |
+
# ์์ฝ ์์ฑ
|
142 |
+
prompt = f"๋ค์ ์์ด ๊ธฐ์ฌ๋ฅผ ํ๊ตญ์ด๋ก 3๋ฌธ์ฅ์ผ๋ก ์์ฝํ์ธ์:\n{text}"
|
143 |
+
summary = hf_client.text_generation(prompt, max_new_tokens=500)
|
144 |
+
return summary
|
145 |
+
except Exception as e:
|
146 |
+
return f"์์ฝ ์ค ์ค๋ฅ ๋ฐ์: {str(e)}"
|
147 |
|
148 |
css = """
|
149 |
footer {
|
150 |
visibility: hidden;
|
151 |
}
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
152 |
"""
|
153 |
|
154 |
# Gradio ์ธํฐํ์ด์ค ๊ตฌ์ฑ
|
155 |
+
with gr.Blocks(css=css, title="NewsAI ์๋น์ค") as iface:
|
156 |
gr.Markdown("๊ฒ์์ด๋ฅผ ์
๋ ฅํ๊ณ ์ํ๋ ๊ตญ๊ฐ๋ฅผ ์ ํํ๋ฉด, ๊ฒ์์ด์ ์ผ์นํ๋ 24์๊ฐ ์ด๋ด ๋ด์ค๋ฅผ ์ต๋ 100๊ฐ ์ถ๋ ฅํฉ๋๋ค.")
|
157 |
|
158 |
+
with gr.Column():
|
159 |
with gr.Row():
|
160 |
query = gr.Textbox(label="๊ฒ์์ด")
|
161 |
country = gr.Dropdown(MAJOR_COUNTRIES, label="๊ตญ๊ฐ", value="South Korea")
|
162 |
+
search_button = gr.Button("๊ฒ์")
|
|
|
|
|
|
|
163 |
|
164 |
+
output_table = gr.HTML()
|
165 |
+
summary_output = gr.Markdown(visible=False)
|
166 |
|
167 |
+
def search_and_display(query, country):
|
168 |
+
error_message, articles = serphouse_search(query, country)
|
169 |
+
if error_message:
|
170 |
+
return f"<p>{error_message}</p>", gr.update(visible=False)
|
171 |
+
else:
|
172 |
+
# ๊ธฐ์ฌ ๋ชฉ๋ก์ HTML ํ
์ด๋ธ๋ก ์์ฑ
|
173 |
+
table_html = "<table border='1' style='width:100%; text-align:left;'><tr><th>๋ฒํธ</th><th>์ ๋ชฉ</th><th>์ถ์ฒ</th><th>์๊ฐ</th><th>๋ถ์</th></tr>"
|
174 |
+
for article in articles:
|
175 |
+
# ๊ฐ ๊ธฐ์ฌ์ ๋ํด ๋ฒํผ์ ํด๋นํ๋ JavaScript ์ฝ๋๋ฅผ ์ฝ์
|
176 |
+
analyze_button = f"""<button onclick="analyzeArticle('{article['link']}')">๋ถ์</button>"""
|
177 |
+
row = f"""
|
178 |
+
<tr>
|
179 |
+
<td>{article['index']}</td>
|
180 |
+
<td><a href="{article['link']}" target="_blank">{article['title']}</a></td>
|
181 |
+
<td>{article['channel']}</td>
|
182 |
+
<td>{article['time']}</td>
|
183 |
+
<td>{analyze_button}</td>
|
184 |
+
</tr>
|
185 |
+
"""
|
186 |
+
table_html += row
|
187 |
+
table_html += "</table>"
|
188 |
+
|
189 |
+
# JavaScript ํจ์ ์ ์
|
190 |
+
js_code = """
|
191 |
+
<script>
|
192 |
+
function analyzeArticle(url) {
|
193 |
+
// Gradio์ handle_function์ ์ฌ์ฉํ์ฌ Python ํจ์ ํธ์ถ
|
194 |
+
gradioApp().querySelector('#article_url_input textarea').value = url;
|
195 |
+
gradioApp().querySelector('#analyze_button').click();
|
196 |
+
}
|
197 |
+
</script>
|
198 |
+
"""
|
199 |
|
200 |
+
full_html = table_html + js_code
|
|
|
201 |
|
202 |
+
return full_html, gr.update(visible=True, value="") # summary_output ์ด๊ธฐํ
|
|
|
203 |
|
204 |
+
def analyze_article(url):
|
205 |
+
summary = summarize_article(url)
|
206 |
+
return summary
|
207 |
+
|
208 |
+
article_url_input = gr.Textbox(visible=False, elem_id="article_url_input")
|
209 |
+
analyze_button = gr.Button("๋ถ์", visible=False, elem_id="analyze_button")
|
210 |
|
211 |
search_button.click(
|
212 |
search_and_display,
|
213 |
inputs=[query, country],
|
214 |
+
outputs=[output_table, summary_output]
|
215 |
+
)
|
216 |
+
|
217 |
+
analyze_button.click(
|
218 |
+
analyze_article,
|
219 |
+
inputs=[article_url_input],
|
220 |
+
outputs=[summary_output]
|
221 |
)
|
222 |
|
223 |
iface.launch(auth=("gini", "pick"))
|