215 lines
6.9 KiB
Python
215 lines
6.9 KiB
Python
import argparse
|
||
import asyncio
|
||
import sys
|
||
import os
|
||
import time
|
||
import signal
|
||
|
||
from app.engines.report_engine import Reporter
|
||
|
||
from .config import load_config, AppConfig
|
||
from .config.config import AppCtx
|
||
from .engines.crawl_engine import CrawlEngine
|
||
from .engines.evidence_engine import EvidenceEngine
|
||
from .models.base import connect_db, create_database
|
||
|
||
from loguru import logger
|
||
import sqlalchemy.exc
|
||
|
||
from .web.web import WebApp
|
||
|
||
|
||
class MainApp:
|
||
"""主应用"""
|
||
|
||
def __init__(self):
|
||
self.args = None
|
||
self.config: AppConfig = None
|
||
self.db_engine = None
|
||
|
||
# 所有的engine
|
||
self.crawl_engine = None
|
||
self.evidence_engine = None
|
||
self.report_engine = None
|
||
|
||
def parse_args(self):
|
||
"""解析命令行参数"""
|
||
parser = argparse.ArgumentParser(description="Baidu Reporter")
|
||
|
||
# 指定配置文件
|
||
parser.add_argument(
|
||
"-c",
|
||
"--config",
|
||
default="./config.local.toml",
|
||
help="指定配置文件路径,默认为 ./config.local.toml",
|
||
)
|
||
|
||
parser.add_argument(
|
||
"--crawl", help="采集模式,根据域名批量采集 SURL,多个域名可使用英文逗号分割,也可通过 --crawl-file 传入文件",
|
||
)
|
||
parser.add_argument(
|
||
"--crawl-file", help="目标域名文件,批量传入待采集的域名,每行一个"
|
||
)
|
||
|
||
parser.add_argument(
|
||
"--evidence", help="收集证据模式,对数据库内的 SURL 获取证据",
|
||
action="store_true"
|
||
)
|
||
|
||
# 添加运行模式参数
|
||
parser.add_argument(
|
||
"--report",
|
||
const="pc,site,wap",
|
||
nargs="?",
|
||
help="指定运行模式:pc/site/wap,不指定则运行所有模式,多个模式使用英文逗号分隔",
|
||
)
|
||
|
||
# 添加 web 服务器参数
|
||
parser.add_argument(
|
||
"--web", action="store_true", help="启动 web 服务器"
|
||
)
|
||
parser.add_argument(
|
||
"--web-only", action="store_true", help="启动 web 服务器,但是不启动引擎"
|
||
)
|
||
|
||
parser.add_argument(
|
||
"-s",
|
||
action="store_true",
|
||
help="当设置此选项的时候,将以正常模式启动Chrome(非headless模式),方便调试与观察运行情况"
|
||
)
|
||
|
||
# 如果没有传入任何参数,显示帮助信息
|
||
if len(sys.argv) == 1:
|
||
parser.print_help()
|
||
sys.exit(0)
|
||
|
||
args = parser.parse_args()
|
||
logger.debug(f"{args=}")
|
||
|
||
# 处理模式参数
|
||
if args.report:
|
||
reports = [m.strip() for m in args.report.split(",")]
|
||
valid_modes = ["pc", "site", "wap"]
|
||
invalid_modes = [m for m in reports if m not in valid_modes]
|
||
if invalid_modes:
|
||
parser.error(f'无效的运行模式: {", ".join(invalid_modes)}')
|
||
args.report = reports
|
||
elif args.report is not None:
|
||
args.report = ["pc", "site", "wap"]
|
||
|
||
# 检查输入的文件是否存在
|
||
if args.crawl_file and not os.path.exists(args.crawl_file):
|
||
parser.error(f"--crawl_file 指定的文件 {args.crawl_file} 不存在")
|
||
|
||
# 检查配置文件是否存在
|
||
if not os.path.exists(args.config):
|
||
parser.error(f"配置文件不存在: {args.config}")
|
||
|
||
self.args = args
|
||
|
||
def start_cli(self):
|
||
"""开启 CLI 模式"""
|
||
if self.args.crawl or self.args.crawl_file:
|
||
crawl = CrawlEngine()
|
||
crawl.cli_start(self.args.crawl, self.args.crawl_file)
|
||
crawl.cli_wait()
|
||
crawl.stop()
|
||
elif self.args.evidence:
|
||
evidence = EvidenceEngine()
|
||
evidence.cli_start()
|
||
evidence.stop()
|
||
elif self.args.report:
|
||
reporter = Reporter(self.args.report)
|
||
reporter.cli_start()
|
||
reporter.stop()
|
||
else:
|
||
logger.error("模式错误!")
|
||
return
|
||
|
||
def start_web(self):
|
||
"""开启 Web 模式"""
|
||
|
||
# 注册 ctrl+c 处理程序,正常结束所有的 engine
|
||
signal.signal(signal.SIGINT, self.exit_handler)
|
||
|
||
# 启动所有的 engine
|
||
if self.args.crawl:
|
||
self.crawl_engine = CrawlEngine()
|
||
self.crawl_engine.start()
|
||
logger.info("crawl 启动")
|
||
|
||
if self.args.evidence:
|
||
self.evidence_engine = EvidenceEngine()
|
||
self.evidence_engine.start()
|
||
logger.info("evidence 启动")
|
||
|
||
if self.args.report:
|
||
self.report_engine = Reporter(self.args.report)
|
||
self.report_engine.start()
|
||
logger.info("report 启动")
|
||
|
||
# 启动 web 页面
|
||
web_app = WebApp()
|
||
asyncio.run(web_app.start())
|
||
|
||
logger.info("web stop.")
|
||
|
||
def run(self):
|
||
"""运行应用"""
|
||
|
||
# 解析命令行参数
|
||
self.parse_args()
|
||
|
||
# 加载配置文件
|
||
self.config = load_config(self.args.config)
|
||
logger.info(f"加载配置文件 {self.args.config} 成功")
|
||
|
||
# 设置 chrome 模式
|
||
if self.args.s:
|
||
AppCtx.g_app_config.headless_chrome = False
|
||
else:
|
||
AppCtx.g_app_config.headless_chrome = True
|
||
|
||
# 连接数据库
|
||
try:
|
||
self.db_engine = connect_db(self.config)
|
||
logger.info(f"连接数据库 {self.config.database.database} 成功")
|
||
except sqlalchemy.exc.OperationalError as e:
|
||
# 如果错误类型是数据库不存在,询问用户是否执行初始化操作
|
||
if "1049" in str(e):
|
||
logger.info("数据库不存在,尝试初始化数据库")
|
||
create_database(self.config)
|
||
logger.info("数据库初始化成功,尝试连接数据库")
|
||
self.db_engine = connect_db(self.config)
|
||
logger.info(f"连接数据库 {self.config.database.database} 成功")
|
||
else:
|
||
logger.error(f"连接数据库失败,请检查配置文件或数据库服务是否正常: {e}")
|
||
sys.exit(1)
|
||
|
||
# 如果指定了 --web 参数,启动 web 服务器,忽略其他选项
|
||
if self.args.web or self.args.web_only:
|
||
logger.info("启动 Web 模式")
|
||
return self.start_web()
|
||
else:
|
||
logger.info("启动 CLI 模式")
|
||
return self.start_cli()
|
||
|
||
def exit_handler(self, signum, frame):
|
||
# 在这里结束各个 engine
|
||
logger.debug("CTRL+C called.")
|
||
|
||
if self.crawl_engine:
|
||
self.crawl_engine.stop()
|
||
self.crawl_engine.cli_wait()
|
||
logger.info("crawl 退出")
|
||
|
||
if self.evidence_engine:
|
||
self.evidence_engine.stop()
|
||
self.evidence_engine.wait()
|
||
logger.info("evidence 退出")
|
||
|
||
if self.report_engine:
|
||
self.report_engine.stop()
|
||
self.report_engine.wait()
|
||
logger.info("report 退出")
|