From c373f896b4963d097f8dfac7698e2c67b31db0b1 Mon Sep 17 00:00:00 2001 From: xhy Date: Wed, 9 Apr 2025 23:07:30 +0800 Subject: [PATCH] =?UTF-8?q?=E4=BF=AE=E5=A4=8D=E4=B8=80=E4=BA=9Bbug?= =?UTF-8?q?=EF=BC=8Cwap=20reporter=20=E5=BD=93=E8=BF=94=E5=9B=9E=E9=87=8D?= =?UTF-8?q?=E5=A4=8D=E6=8F=90=E4=BA=A4=E7=9A=84=E6=97=B6=E5=80=99=E6=9B=B4?= =?UTF-8?q?=E6=96=B0=E6=95=B0=E6=8D=AE=E5=BA=93=E7=8A=B6=E6=80=81?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- app/engines/crawl_engine.py | 2 +- app/engines/reporters/wap_reporter.py | 16 +++++++++++----- 2 files changed, 12 insertions(+), 6 deletions(-) diff --git a/app/engines/crawl_engine.py b/app/engines/crawl_engine.py index 32ee0c1..8dc2445 100644 --- a/app/engines/crawl_engine.py +++ b/app/engines/crawl_engine.py @@ -407,10 +407,10 @@ class CrawlEngine: headers=headers, proxies=get_proxies() ) - logger.debug(f"{response.content=}") response = response.json() backstr = response["data"]["backstr"] captcha_link = response["data"]["captchalist"][0]["source"]["back"]["path"] + logger.debug(f"{backstr=}, {captcha_link=}") # 下载验证码图片 image_response = requests.get(captcha_link, headers=headers, proxies=get_proxies()) diff --git a/app/engines/reporters/wap_reporter.py b/app/engines/reporters/wap_reporter.py index 7015944..f72fb7f 100644 --- a/app/engines/reporters/wap_reporter.py +++ b/app/engines/reporters/wap_reporter.py @@ -51,7 +51,8 @@ class WapReporter(BaseReporter): def run(self): """实现 WAP 端的举报逻辑""" with Session(self.database) as session: - stmt = select(ReportUrlModel).where(ReportUrlModel.is_report_by_wap == False).where(ReportUrlModel.has_evidence == True) + stmt = select(ReportUrlModel).where(ReportUrlModel.is_report_by_wap == False).where( + ReportUrlModel.has_evidence == True) rows: list[ReportUrlModel] = session.exec(stmt).all() logger.debug(f"[{self.engine_name}] 共找到 {len(rows)} 条待举报记录") @@ -61,6 +62,8 @@ class WapReporter(BaseReporter): if not self.status: break + self.ev.wait(1) + # 选个 cookie report_cookie = random.choice(get_all_cookies()) report_site_cookie = GenCookie.run(report_cookie) @@ -93,7 +96,7 @@ class WapReporter(BaseReporter): # wapUserAgent = random.choice(self.wapUserAgent) response = self.request.get( "https://ufosdk.baidu.com/api?m=Web&a=getUserInfo&appid=293852", - headers=self.headers, proxies=self.proxies, allow_redirects=False, timeout=10, verify=False + headers=self.headers, proxies=self.proxies, allow_redirects=False, timeout=10 ) json_data = response.json() logger.debug(f"{self.engine_name} get_user_info response: {json_data}") @@ -152,11 +155,14 @@ class WapReporter(BaseReporter): proxies=self.proxies, allow_redirects=False, timeout=10, - verify=False ) # logger.debug(req.json()) - logger.debug(response.json()) - if response.json()['errno'] == 0: + data = response.json() + logger.debug(data) + if data['errno'] == 0: logger.success(f"[{self.engine_name}] {fb_url} 举报成功") return True + if "请勿重复提交" in data["errmsg"]: + logger.success(f"[{self.engine_name}] {fb_url} 重复提交,标记为成功") + return True return False