sfun commited on
Commit
08f92de
1 Parent(s): 7f1ed69

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +16 -6
app.py CHANGED
@@ -17,9 +17,15 @@ async def fetch_url(url, session, max_retries=3, timeout=180):
17
  try:
18
  async with session.get(url, timeout=ClientTimeout(total=timeout)) as response:
19
  response.raise_for_status()
20
- return await response.text()
 
21
  except asyncio.TimeoutError:
22
  print(f"Attempt {attempt + 1} timed out after {timeout} seconds", flush=True)
 
 
 
 
 
23
  except aiohttp.ClientError as e:
24
  print(f"Attempt {attempt + 1} failed: {str(e)}", flush=True)
25
 
@@ -29,8 +35,8 @@ async def fetch_url(url, session, max_retries=3, timeout=180):
29
  raise Exception(f"Failed to fetch URL after {max_retries} attempts")
30
 
31
  async def extract_and_transform_proxies(input_text):
32
- print("Original input data:")
33
- print(input_text)
34
  print("------------------------")
35
 
36
  try:
@@ -47,15 +53,19 @@ async def extract_and_transform_proxies(input_text):
47
  proxies_text = proxies_match.group(1)
48
  proxies_list = yaml.safe_load(proxies_text)
49
  else:
50
- return "未找到有效的代理配置"
 
 
 
51
  except yaml.YAMLError as e:
52
  return f"YAML解析错误: {str(e)}"
53
 
54
  if not proxies_list:
55
  return "未找到有效的代理配置"
56
 
57
- print("Parsed proxies list:")
58
- print(proxies_list)
 
59
  print("------------------------")
60
 
61
  transformed_proxies = []
 
17
  try:
18
  async with session.get(url, timeout=ClientTimeout(total=timeout)) as response:
19
  response.raise_for_status()
20
+ content = await response.read()
21
+ return content.decode('utf-8', errors='ignore')
22
  except asyncio.TimeoutError:
23
  print(f"Attempt {attempt + 1} timed out after {timeout} seconds", flush=True)
24
+ except aiohttp.ClientPayloadError as e:
25
+ print(f"Payload error on attempt {attempt + 1}: {str(e)}", flush=True)
26
+ if response.content_length and len(content) >= response.content_length:
27
+ print(f"Received data length: {len(content)}, expected: {response.content_length}", flush=True)
28
+ return content.decode('utf-8', errors='ignore')
29
  except aiohttp.ClientError as e:
30
  print(f"Attempt {attempt + 1} failed: {str(e)}", flush=True)
31
 
 
35
  raise Exception(f"Failed to fetch URL after {max_retries} attempts")
36
 
37
  async def extract_and_transform_proxies(input_text):
38
+ print("Original input data (first 1000 characters):")
39
+ print(input_text[:1000])
40
  print("------------------------")
41
 
42
  try:
 
53
  proxies_text = proxies_match.group(1)
54
  proxies_list = yaml.safe_load(proxies_text)
55
  else:
56
+ # 如果仍然找不到,尝试提取所有看起来像代理配置的部分
57
+ proxy_pattern = r'{[^}]+}'
58
+ possible_proxies = re.findall(proxy_pattern, input_text)
59
+ proxies_list = [yaml.safe_load(proxy) for proxy in possible_proxies]
60
  except yaml.YAMLError as e:
61
  return f"YAML解析错误: {str(e)}"
62
 
63
  if not proxies_list:
64
  return "未找到有效的代理配置"
65
 
66
+ print(f"Found {len(proxies_list)} possible proxy configurations")
67
+ print("Sample of parsed proxies list:")
68
+ print(proxies_list[:5]) # 只打印前5个代理配置
69
  print("------------------------")
70
 
71
  transformed_proxies = []