下载地址:www.pan38.com/dow/share.p… 提取密码:2041
这个抢单系统包含主抢单逻辑、配置管理和代理管理三个模块。主模块实现了订单监控、抢单策略和订单处理功能,使用多线程提高效率。代码中包含了详细的日志记录、异常处理和请求签名等安全措施。
import time import random import threading import requests from fake_useragent import UserAgent from bs4 import BeautifulSoup import json import hashlib import queue import logging from datetime import datetime
class OrderGrabber: def init(self, config_file='config.json'): self.logger = self._setup_logger() self.config = self._load_config(config_file) self.session = requests.Session() self.order_queue = queue.Queue() self.running = False self.ua = UserAgent() self.proxies = self._init_proxies() self._setup_session()
def _setup_logger(self):
logger = logging.getLogger('OrderGrabber')
logger.setLevel(logging.DEBUG)
formatter = logging.Formatter('%(asctime)s - %(name)s - %(levelname)s - %(message)s')
# 文件日志
file_handler = logging.FileHandler('order_grabber.log')
file_handler.setFormatter(formatter)
logger.addHandler(file_handler)
# 控制台日志
console_handler = logging.StreamHandler()
console_handler.setFormatter(formatter)
logger.addHandler(console_handler)
return logger
def _load_config(self, config_file):
try:
with open(config_file, 'r', encoding='utf-8') as f:
return json.load(f)
except Exception as e:
self.logger.error(f"加载配置文件失败: {e}")
raise
def _init_proxies(self):
# 这里可以扩展为从代理池获取
return {
'http': 'http://proxy.example.com:8080',
'https': 'https://proxy.example.com:8080'
}
def _setup_session(self):
headers = {
'User-Agent': self.ua.random,
'Accept': 'application/json, text/javascript, */*; q=0.01',
'Accept-Language': 'zh-CN,zh;q=0.9,en;q=0.8',
'Connection': 'keep-alive'
}
self.session.headers.update(headers)
def _generate_signature(self, params):
# 生成请求签名
param_str = '&'.join([f'{k}={v}' for k, v in sorted(params.items())])
return hashlib.md5((param_str + self.config['api_secret']).encode()).hexdigest()
def start_monitoring(self):
self.running = True
monitor_thread = threading.Thread(target=self._monitor_orders)
monitor_thread.daemon = True
monitor_thread.start()
process_thread = threading.Thread(target=self._process_orders)
process_thread.daemon = True
process_thread.start()
def _monitor_orders(self):
while self.running:
try:
orders = self._fetch_new_orders()
for order in orders:
self.order_queue.put(order)
time.sleep(self.config['polling_interval'])
except Exception as e:
self.logger.error(f"监控订单出错: {e}")
time.sleep(10)
def _fetch_new_orders(self):
# 模拟获取新订单
params = {
'app_key': self.config['app_key'],
'timestamp': int(time.time()),
'page': 1,
'page_size': 20
}
params['sign'] = self._generate_signature(params)
try:
response = self.session.get(
self.config['order_api'],
params=params,
proxies=self.proxies,
timeout=10
)
response.raise_for_status()
return response.json().get('data', [])
except Exception as e:
self.logger.error(f"获取订单失败: {e}")
return []
def _process_orders(self):
while self.running:
try:
order = self.order_queue.get(timeout=5)
if self._should_grab(order):
success = self._grab_order(order)
if success:
self.logger.info(f"成功抢到订单: {order['order_id']}")
else:
self.logger.warning(f"抢单失败: {order['order_id']}")
except queue.Empty:
continue
except Exception as e:
self.logger.error(f"处理订单出错: {e}")
def _should_grab(self, order):
# 这里可以添加各种抢单策略
if order['price'] < self.config['min_price']:
return False
if order['distance'] > self.config['max_distance']:
return False
return True
def _grab_order(self, order):
grab_params = {
'app_key': self.config['app_key'],
'timestamp': int(time.time()),
'order_id': order['order_id'],
'user_id': self.config['user_id']
}
grab_params['sign'] = self._generate_signature(grab_params)
try:
response = self.session.post(
self.config['grab_api'],
data=grab_params,
proxies=self.proxies,
timeout=5
)
response.raise_for_status()
return response.json().get('success', False)
except Exception as e:
self.logger.error(f"抢单请求失败: {e}")
return False
def stop(self):
self.running = False
self.logger.info("抢单服务已停止")
if name == 'main': grabber = OrderGrabber() try: grabber.start_monitoring() while True: time.sleep(1) except KeyboardInterrupt: grabber.stop()
import random import time import requests from threading import Lock
class ProxyManager: def init(self, proxy_sources=None): self.proxies = [] self.last_update = 0 self.lock = Lock() self.proxy_sources = proxy_sources or [ 'www.proxysource1.com/api', 'www.proxysource2.com/api' ]
def update_proxies(self):
with self.lock:
if time.time() - self.last_update < 3600: # 1小时更新一次
return
new_proxies = []
for source in self.proxy_sources:
try:
response = requests.get(source, timeout=10)
if response.status_code == 200:
new_proxies.extend(response.json().get('proxies', []))
except Exception:
continue
if new_proxies:
self.proxies = list(set(new_proxies)) # 去重
self.last_update = time.time()
def get_random_proxy(self):
with self.lock:
if not self.proxies:
self.update_proxies()
if not self.proxies:
return None
return random.choice(self.proxies)
def report_bad_proxy(self, proxy):
with self.lock:
if proxy in self.proxies:
self.proxies.remove(proxy)