mirror of
https://github.com/honmashironeko/ProxyCat.git
synced 2025-11-05 10:51:06 +00:00
Update
This commit is contained in:
parent
fd06887ff9
commit
ed4f57b092
@ -1,23 +1,25 @@
|
|||||||
from http.server import BaseHTTPRequestHandler, HTTPServer
|
from http.server import BaseHTTPRequestHandler, HTTPServer
|
||||||
from concurrent.futures import ThreadPoolExecutor
|
from concurrent.futures import ThreadPoolExecutor
|
||||||
from httpx import AsyncClient
|
from httpx import AsyncClient, Limits
|
||||||
import multiprocessing
|
import multiprocessing
|
||||||
import logoprint
|
|
||||||
import threading
|
import threading
|
||||||
|
import logoprint
|
||||||
import argparse
|
import argparse
|
||||||
import asyncio
|
import asyncio
|
||||||
import logging
|
import logging
|
||||||
import socket
|
import socket
|
||||||
import select
|
import select
|
||||||
import base64
|
import base64
|
||||||
import httpx
|
|
||||||
import getip
|
import getip
|
||||||
import socks
|
import socks
|
||||||
|
import httpx
|
||||||
import time
|
import time
|
||||||
|
import random
|
||||||
|
|
||||||
logging.basicConfig(level=logging.INFO)
|
logging.basicConfig(level=logging.INFO)
|
||||||
proxy_index, rotate_mode, rotate_interval = 0, 'cycle', 60
|
proxy_index, rotate_mode, rotate_interval = 0, 'cycle', 60
|
||||||
proxy_fail_count = {}
|
proxy_fail_count = {}
|
||||||
|
cache = {}
|
||||||
|
|
||||||
def load_proxies(file_path='ip.txt'):
|
def load_proxies(file_path='ip.txt'):
|
||||||
with open(file_path, 'r') as file:
|
with open(file_path, 'r') as file:
|
||||||
@ -49,7 +51,8 @@ proxies = []
|
|||||||
|
|
||||||
class ProxyHTTPRequestHandler(BaseHTTPRequestHandler):
|
class ProxyHTTPRequestHandler(BaseHTTPRequestHandler):
|
||||||
def __init__(self, *args, **kwargs):
|
def __init__(self, *args, **kwargs):
|
||||||
self.client = AsyncClient(http2=True, timeout=httpx.Timeout(10.0, read=30.0))
|
self.client = AsyncClient(http2=True, timeout=httpx.Timeout(30.0, read=30.0), limits=Limits(max_connections=500))
|
||||||
|
self.tunnel_established = False
|
||||||
super().__init__(*args, **kwargs)
|
super().__init__(*args, **kwargs)
|
||||||
|
|
||||||
def _update_proxy(self):
|
def _update_proxy(self):
|
||||||
@ -58,7 +61,7 @@ class ProxyHTTPRequestHandler(BaseHTTPRequestHandler):
|
|||||||
if args.k:
|
if args.k:
|
||||||
proxies.clear()
|
proxies.clear()
|
||||||
proxies.extend(get_proxy_from_getip())
|
proxies.extend(get_proxy_from_getip())
|
||||||
proxy_index = (proxy_index + 1) % len(proxies)
|
proxy_index = random.randint(0, len(proxies) - 1)
|
||||||
logging.info(f"切换到代理地址: {proxies[proxy_index]}")
|
logging.info(f"切换到代理地址: {proxies[proxy_index]}")
|
||||||
protocol, host, port = proxies[proxy_index]
|
protocol, host, port = proxies[proxy_index]
|
||||||
self.proxy_dict = {"http://": f"{protocol}://{host}:{port}", "https://": f"{protocol}://{host}:{port}"}
|
self.proxy_dict = {"http://": f"{protocol}://{host}:{port}", "https://": f"{protocol}://{host}:{port}"}
|
||||||
@ -137,14 +140,14 @@ class ProxyHTTPRequestHandler(BaseHTTPRequestHandler):
|
|||||||
if new_proxies:
|
if new_proxies:
|
||||||
proxies.clear()
|
proxies.clear()
|
||||||
proxies.extend(new_proxies)
|
proxies.extend(new_proxies)
|
||||||
proxy_index = (proxy_index + 1) % len(proxies)
|
proxy_index = random.randint(0, len(proxies) - 1)
|
||||||
logging.info(f"切换到新代理地址: {proxies[proxy_index]}")
|
logging.info(f"切换到新代理地址: {proxies[proxy_index]}")
|
||||||
else:
|
else:
|
||||||
logging.warning("无法获取新代理,继续使用原代理")
|
logging.warning("无法获取新代理,继续使用原代理")
|
||||||
proxy_index = original_proxy_index
|
proxy_index = original_proxy_index
|
||||||
else:
|
else:
|
||||||
if rotate_mode == 'cycle':
|
if rotate_mode == 'cycle':
|
||||||
proxy_index = (proxy_index + 1) % len(proxies)
|
proxy_index = random.randint(0, len(proxies) - 1)
|
||||||
elif rotate_mode == 'once' and proxy_index < len(proxies) - 1:
|
elif rotate_mode == 'once' and proxy_index < len(proxies) - 1:
|
||||||
proxy_index += 1
|
proxy_index += 1
|
||||||
logging.info(f"切换到代理地址: {proxies[proxy_index]}")
|
logging.info(f"切换到代理地址: {proxies[proxy_index]}")
|
||||||
@ -167,11 +170,17 @@ class ProxyHTTPRequestHandler(BaseHTTPRequestHandler):
|
|||||||
self._update_proxy()
|
self._update_proxy()
|
||||||
host, port = self.path.split(':')
|
host, port = self.path.split(':')
|
||||||
try:
|
try:
|
||||||
remote_socket = self._connect_via_proxy(host, int(port))
|
if not self.tunnel_established:
|
||||||
self.send_response(200, 'Connection Established')
|
remote_socket = self._connect_via_proxy(host, int(port))
|
||||||
self.send_header('Connection', 'keep-alive')
|
self.tunnel_established = True
|
||||||
self.end_headers()
|
self.send_response(200, 'Connection Established')
|
||||||
self._forward_data(self.connection, remote_socket)
|
self.send_header('Connection', 'keep-alive')
|
||||||
|
self.end_headers()
|
||||||
|
self._forward_data(self.connection, remote_socket)
|
||||||
|
else:
|
||||||
|
self.send_response(200, 'Connection Already Established')
|
||||||
|
self.send_header('Connection', 'keep-alive')
|
||||||
|
self.end_headers()
|
||||||
except (socket.error, Exception) as e:
|
except (socket.error, Exception) as e:
|
||||||
logging.error(f"隧道请求失败: {e}")
|
logging.error(f"隧道请求失败: {e}")
|
||||||
self._handle_proxy_failure()
|
self._handle_proxy_failure()
|
||||||
@ -280,4 +289,4 @@ if __name__ == '__main__':
|
|||||||
args = parser.parse_args()
|
args = parser.parse_args()
|
||||||
username, password = args.up.split(':')
|
username, password = args.up.split(':')
|
||||||
print_icpscan_banner(port=args.p, mode=args.m, interval=args.t, username=username, password=password)
|
print_icpscan_banner(port=args.p, mode=args.m, interval=args.t, username=username, password=password)
|
||||||
run(port=args.p, mode=args.m, interval=args.t, username=username, password=password, use_getip=args.k)
|
run(port=args.p, mode=args.m, interval=args.t, username=username, password=password, use_getip=args.k)
|
||||||
10
README.md
10
README.md
@ -91,12 +91,22 @@ ZZZzz /,`.-'`' -. ;-;;,_
|
|||||||
|
|
||||||
- [x] 增加静态代理自动获取更新模块,从而永久运行
|
- [x] 增加静态代理自动获取更新模块,从而永久运行
|
||||||
|
|
||||||
|
- [ ] 增加负载均衡模式,同时使用大量代理地址发送,从而有效提高并发效率
|
||||||
|
|
||||||
如果您有好的idea,或者使用过程中遇到的bug,都请辛苦您添加作者联系方式进行反馈!
|
如果您有好的idea,或者使用过程中遇到的bug,都请辛苦您添加作者联系方式进行反馈!
|
||||||
|
|
||||||
微信公众号:樱花庄的本间白猫
|
微信公众号:樱花庄的本间白猫
|
||||||
|
|
||||||
## 七、更新日志
|
## 七、更新日志
|
||||||
|
|
||||||
|
**2024/08/25**
|
||||||
|
|
||||||
|
1. 读取ip.txt时自动跳过空行
|
||||||
|
2. httpx更换为并发池,提高性能
|
||||||
|
3. 增加缓冲字典,相同站点降低延迟
|
||||||
|
4. 每次请求更换IP逻辑修改为,随机选择代理
|
||||||
|
5. 采用更加高效的结构和算法,优化请求处理逻辑
|
||||||
|
|
||||||
**2024/08/24**
|
**2024/08/24**
|
||||||
|
|
||||||
1. 采用异步方案提高并发能力和减少超时
|
1. 采用异步方案提高并发能力和减少超时
|
||||||
|
|||||||
2
getip.py
2
getip.py
@ -5,7 +5,7 @@ def newip():
|
|||||||
url = ""
|
url = ""
|
||||||
response = requests.get(url)
|
response = requests.get(url)
|
||||||
response.raise_for_status()
|
response.raise_for_status()
|
||||||
newip = "socks5://"+response.text.split("\n")[0]
|
newip = "socks5://"+response.text.split("\n\r")[0]
|
||||||
print("新的代理IP为:"+newip)
|
print("新的代理IP为:"+newip)
|
||||||
return newip
|
return newip
|
||||||
|
|
||||||
|
|||||||
Loading…
x
Reference in New Issue
Block a user