This commit is contained in:
本间白猫 2024-08-24 13:28:11 +08:00
parent be39921850
commit 9900ab1c30
2 changed files with 38 additions and 27 deletions

View File

@ -1,14 +1,16 @@
from http.server import BaseHTTPRequestHandler, HTTPServer from http.server import BaseHTTPRequestHandler, HTTPServer
from concurrent.futures import ThreadPoolExecutor from concurrent.futures import ThreadPoolExecutor
from httpx import AsyncClient
import multiprocessing import multiprocessing
import logoprint import logoprint
import threading import threading
import requests
import argparse import argparse
import asyncio
import logging import logging
import socket import socket
import select import select
import base64 import base64
import httpx
import getip import getip
import socks import socks
import time import time
@ -20,7 +22,7 @@ proxy_fail_count = {}
def load_proxies(file_path='ip.txt'): def load_proxies(file_path='ip.txt'):
with open(file_path, 'r') as file: with open(file_path, 'r') as file:
proxies = [line.strip().split('://') for line in file] proxies = [line.strip().split('://') for line in file]
return [(p[0], *p[1].split(':')) for p in proxies] return [(p[0], *p[1].split(':')) for p in proxies if len(p) == 2]
def rotate_proxies(proxies, interval): def rotate_proxies(proxies, interval):
global proxy_index global proxy_index
@ -47,10 +49,7 @@ proxies = []
class ProxyHTTPRequestHandler(BaseHTTPRequestHandler): class ProxyHTTPRequestHandler(BaseHTTPRequestHandler):
def __init__(self, *args, **kwargs): def __init__(self, *args, **kwargs):
self.session = requests.Session() self.client = AsyncClient(http2=True, timeout=httpx.Timeout(10.0, read=30.0))
adapter = requests.adapters.HTTPAdapter(pool_connections=1000, pool_maxsize=1000)
self.session.mount('http://', adapter)
self.session.mount('https://', adapter)
super().__init__(*args, **kwargs) super().__init__(*args, **kwargs)
def _update_proxy(self): def _update_proxy(self):
@ -62,7 +61,7 @@ class ProxyHTTPRequestHandler(BaseHTTPRequestHandler):
proxy_index = (proxy_index + 1) % len(proxies) proxy_index = (proxy_index + 1) % len(proxies)
logging.info(f"切换到代理地址: {proxies[proxy_index]}") logging.info(f"切换到代理地址: {proxies[proxy_index]}")
protocol, host, port = proxies[proxy_index] protocol, host, port = proxies[proxy_index]
self.proxy_dict = {"http": f"{protocol}://{host}:{port}", "https": f"{protocol}://{host}:{port}"} self.proxy_dict = {"http://": f"{protocol}://{host}:{port}", "https://": f"{protocol}://{host}:{port}"}
def _authenticate(self): def _authenticate(self):
auth = self.headers.get('Proxy-Authorization') auth = self.headers.get('Proxy-Authorization')
@ -104,17 +103,23 @@ class ProxyHTTPRequestHandler(BaseHTTPRequestHandler):
headers = {key: val for key, val in self.headers.items()} headers = {key: val for key, val in self.headers.items()}
headers['Connection'] = 'keep-alive' headers['Connection'] = 'keep-alive'
try: async def handle():
logging.info(f"处理请求: {self.command} {self.path}") for attempt in range(3):
response = self.session.request(self.command, self.path, headers=headers, data=data, try:
proxies=self.proxy_dict, stream=True) logging.info(f"处理请求: {self.command} {self.path}")
self.send_response(response.status_code) response = await self.client.request(self.command, self.path, headers=headers, data=data,
self.send_headers(response) proxies=self.proxy_dict, stream=True)
self.forward_content(response) self.send_response(response.status_code)
except (requests.RequestException, socket.timeout) as e: self.send_headers(response)
logging.error(f"请求失败: {e}") await self.forward_content(response)
self._handle_proxy_failure() break
self.send_error(500, message=str(e)) except (httpx.RequestError, socket.timeout, OSError) as e:
logging.error(f"请求失败: {e}")
self._handle_proxy_failure()
if attempt == 2:
self.send_error(500, message=str(e))
asyncio.run(handle())
def _handle_proxy_failure(self): def _handle_proxy_failure(self):
global proxy_index, proxy_fail_count global proxy_index, proxy_fail_count
@ -125,19 +130,25 @@ class ProxyHTTPRequestHandler(BaseHTTPRequestHandler):
proxy_fail_count[current_proxy] = 1 proxy_fail_count[current_proxy] = 1
if proxy_fail_count[current_proxy] >= 3: if proxy_fail_count[current_proxy] >= 3:
logging.info(f"代理地址 {current_proxy} 失败次数达到3次切换到下一个代理") logging.info(f"代理地址 {current_proxy} 失败次数达到3次尝试切换到下一个代理")
proxy_fail_count[current_proxy] = 0 original_proxy_index = proxy_index
if args.k: if args.k:
proxies.clear() new_proxies = get_proxy_from_getip()
proxies.extend(get_proxy_from_getip()) if new_proxies:
proxy_index = (proxy_index + 1) % len(proxies) proxies.clear()
logging.info(f"切换到代理地址: {proxies[proxy_index]}") proxies.extend(new_proxies)
proxy_index = (proxy_index + 1) % len(proxies)
logging.info(f"切换到新代理地址: {proxies[proxy_index]}")
else:
logging.warning("无法获取新代理,继续使用原代理")
proxy_index = original_proxy_index
else: else:
if rotate_mode == 'cycle': if rotate_mode == 'cycle':
proxy_index = (proxy_index + 1) % len(proxies) proxy_index = (proxy_index + 1) % len(proxies)
elif rotate_mode == 'once' and proxy_index < len(proxies) - 1: elif rotate_mode == 'once' and proxy_index < len(proxies) - 1:
proxy_index += 1 proxy_index += 1
logging.info(f"切换到代理地址: {proxies[proxy_index]}") logging.info(f"切换到代理地址: {proxies[proxy_index]}")
proxy_fail_count[current_proxy] = 0
def send_headers(self, response): def send_headers(self, response):
for key, value in response.headers.items(): for key, value in response.headers.items():
@ -146,8 +157,8 @@ class ProxyHTTPRequestHandler(BaseHTTPRequestHandler):
self.send_header('Connection', 'keep-alive') self.send_header('Connection', 'keep-alive')
self.end_headers() self.end_headers()
def forward_content(self, response): async def forward_content(self, response):
for chunk in response.iter_content(chunk_size=4096): async for chunk in response.aiter_bytes(chunk_size=4096):
if chunk: if chunk:
self.wfile.write(chunk) self.wfile.write(chunk)
self.wfile.flush() self.wfile.flush()

2
ip.txt
View File

@ -1,3 +1,3 @@
http://127.0.0.1:7890 http://127.0.0.1:7890
socks5://114.132.125.88:7777 socks5://114.132.125.88:7777
socks5://47.101.196.233:20002 socks5://47.101.196.233:20002