from LoggerUtils import Logger, initLogger from bs4 import BeautifulSoup as bs from urllib.request import urlopen, Request import json import Contant from sqlalchemy import create_engine from entity.ChannelEntity import Channel from service.ChannelService import ChannelService from common.YoutubeUtils import YouTubeUtil import operator import argparse # --start="2023-09-10T00:00:01Z" --end="2023-09-11T00:00:01Z" if __name__ == "__main__": # 读取参数 parser = argparse.ArgumentParser(description="") parser.add_argument("--start", type=str, default="") parser.add_argument("--end", type=str, default="") args = parser.parse_args() startTime = args.start endTime = args.end # 读取配置文件 with open('search_video_config.json', 'r', encoding='utf-8') as f: # 使用json.load()方法读取文件内容 data = json.load(f) # 初始化日志 Contant.logDir = data['log']['dir'] Contant.logFileName = data['log']['fileName'] initLogger(Contant.logDir, Contant.logFileName) # 连接mysql dbHost = data['mysql']['host'] dbPort = data['mysql']['port'] dbUserName = data['mysql']['username'] dbPassword = data['mysql']['password'] dbDatabase = data['mysql']['database'] Logger.info("尝试连接mysql host:'{}' port:'{}' username:'{}' password:'{}' database:'{}'", dbHost, dbPort, dbUserName, dbPassword, dbDatabase) Contant.engin = create_engine( f'mysql+mysqlconnector://{dbUserName}:{dbPassword}@{dbHost}:{dbPort}/{dbDatabase}') Logger.info("连接mysql成功") # 查询出所有Channel channels = ChannelService.queryAllChannel() Logger.info("Channels length:{}".format(len(channels))) for channel in channels: channel: Channel = channel # 通过channelId查询videos Logger.info( f"Id:{channel.id} channelId:{channel.channelId} startTime:{startTime} endTime:{endTime}") YouTubeUtil.getByChannelId( channelId=channel.channelId, startTime=startTime, endTime=endTime)