我需要以特定的时间间隔连续发送请求。我有一个使用线程的脚本,但它对服务器负载很大。此外,我更喜欢不要在单个会话中进行此操作,并且希望避免缓存响应
我想使用 asyncio 重写所有内容,但遇到一个问题,我需要预定义任务列表,并且“收集”调用之间存在显着的延迟。
import sys
sys.path.insert(0, "../")
import time
from loguru import logger
from auth import Auth
import requests
import threading
import orjson
from utils import retry_on_exception
logger.add("logs.log", filter=lambda record: record["extra"].get("logs") == True)
# URLS constants
URL = "https:....."
# .....
CONST1 = "..."
CONST2 = "..."
data_list = []
locker = threading.Lock()
@retry_on_exception
def func1(acc, info):
data = {
# data CONST1
}
resp = requests.post(url=URL,
headers=acc.get_headers(),
cookies=acc.get_cookies(),
json=data)
@retry_on_exception
def func2(acc, info1, info2):
data = {
# info1 info2 CONST2
}
resp = requests.post(URL,
headers=acc.get_headers(),
cookies=acc.get_cookies(),
json=data)
if resp.json()["code"] == "..." and resp.json().get("error"):
logger.info("...", logs=True)
return
logger.debug("{resp}", resp=resp, resp_text=resp.text)
info = resp.json()["..."]
func1(info=info, acc=acc)
def exception_execute(func):
def wrapper(*args, **kwargs):
try:
return func(*args, **kwargs)
except Exception as ex:
logger.exception("{ex}", ex=ex, console=True)
return wrapper
@exception_execute
def func3(acc, cookies, proxy):
resp = requests.get(URL, cookies=cookies, proxies=proxy)
print(resp, time.time()) # use print because logs loads more cpu
resp_json = orjson.loads(resp.text)
infos = resp_json["..."]
goods_infos = resp_json["..."]
for i in infos:
info1 = i["..."]
info2 = i["..."]
info3 = i["..."]
if True or False: # if with received information
with locker:
if info1 not in data_list:
data_list.append(info1)
else:
continue
func2(info1=info1,
info2=info2,
acc=acc)
logger.info("...", console=True, logs=True)
logger.info("{info1}", info1=info1, console=True, logs=True)
logger.info("{info2}", info2=info2, console=True, logs=True)
def get_acc():
acc_paths = [
# acc path list
]
acc_list = [Auth(path).get_cookies() for path in acc_paths]
while True:
for acc in acc_list:
yield acc
def main():
proxy_list = [
{}
]
main_acc = Auth(r"accounts/..")
main_acc.ensure_auth()
acc_gen = get_acc()
while True:
for proxy in proxy_list:
time.sleep(0.04)
t = threading.Thread(target=func3, args=(acc, next(acc_gen), proxy))
t.start()
if __name__ == '__main__':
main()