Update app.py
Browse files
app.py
CHANGED
@@ -10,19 +10,23 @@ from urllib.parse import parse_qs
|
|
10 |
from cachetools import TTLCache
|
11 |
|
12 |
# 创建一个TTL缓存,最多存储1000个项目,每个项目的有效期为1小时
|
13 |
-
cache = TTLCache(maxsize=1000, ttl=
|
14 |
|
15 |
-
async def fetch_url(url, session, max_retries=3):
|
16 |
for attempt in range(max_retries):
|
17 |
try:
|
18 |
-
async with session.get(url, timeout=ClientTimeout(total=
|
19 |
response.raise_for_status()
|
20 |
return await response.text()
|
|
|
|
|
21 |
except aiohttp.ClientError as e:
|
22 |
print(f"Attempt {attempt + 1} failed: {str(e)}", flush=True)
|
23 |
-
|
24 |
-
|
25 |
-
await asyncio.sleep(
|
|
|
|
|
26 |
|
27 |
async def extract_and_transform_proxies(input_text):
|
28 |
try:
|
@@ -130,7 +134,7 @@ async def handle_request(request):
|
|
130 |
try:
|
131 |
print(f"Fetching URL: {url}", flush=True)
|
132 |
async with aiohttp.ClientSession(connector=TCPConnector(ssl=False)) as session:
|
133 |
-
input_text = await fetch_url(url, session)
|
134 |
print(f"URL content length: {len(input_text)}", flush=True)
|
135 |
result = await extract_and_transform_proxies(input_text)
|
136 |
print(f"Transformed result length: {len(result)}", flush=True)
|
@@ -140,9 +144,9 @@ async def handle_request(request):
|
|
140 |
|
141 |
return web.Response(text=result, content_type='text/plain')
|
142 |
except Exception as e:
|
143 |
-
|
144 |
-
|
145 |
-
return web.Response(text=
|
146 |
else:
|
147 |
usage_guide = """
|
148 |
<html>
|
|
|
10 |
from cachetools import TTLCache
|
11 |
|
12 |
# 创建一个TTL缓存,最多存储1000个项目,每个项目的有效期为1小时
|
13 |
+
cache = TTLCache(maxsize=1000, ttl=3600)
|
14 |
|
15 |
+
async def fetch_url(url, session, max_retries=3, timeout=180):
|
16 |
for attempt in range(max_retries):
|
17 |
try:
|
18 |
+
async with session.get(url, timeout=ClientTimeout(total=timeout)) as response:
|
19 |
response.raise_for_status()
|
20 |
return await response.text()
|
21 |
+
except asyncio.TimeoutError:
|
22 |
+
print(f"Attempt {attempt + 1} timed out after {timeout} seconds", flush=True)
|
23 |
except aiohttp.ClientError as e:
|
24 |
print(f"Attempt {attempt + 1} failed: {str(e)}", flush=True)
|
25 |
+
|
26 |
+
if attempt < max_retries - 1:
|
27 |
+
await asyncio.sleep(5) # 在重试之前等待5秒
|
28 |
+
|
29 |
+
raise Exception(f"Failed to fetch URL after {max_retries} attempts")
|
30 |
|
31 |
async def extract_and_transform_proxies(input_text):
|
32 |
try:
|
|
|
134 |
try:
|
135 |
print(f"Fetching URL: {url}", flush=True)
|
136 |
async with aiohttp.ClientSession(connector=TCPConnector(ssl=False)) as session:
|
137 |
+
input_text = await fetch_url(url, session, max_retries=3, timeout=180)
|
138 |
print(f"URL content length: {len(input_text)}", flush=True)
|
139 |
result = await extract_and_transform_proxies(input_text)
|
140 |
print(f"Transformed result length: {len(result)}", flush=True)
|
|
|
144 |
|
145 |
return web.Response(text=result, content_type='text/plain')
|
146 |
except Exception as e:
|
147 |
+
error_message = f"Error processing request: {str(e)}\n{traceback.format_exc()}"
|
148 |
+
print(error_message, flush=True)
|
149 |
+
return web.Response(text=error_message, status=500)
|
150 |
else:
|
151 |
usage_guide = """
|
152 |
<html>
|