2023-06-09 12:41:53 +00:00
|
|
|
import argparse
|
2023-07-29 07:35:40 +00:00
|
|
|
import asyncio
|
|
|
|
import sys
|
2023-06-09 12:41:53 +00:00
|
|
|
|
2023-06-16 11:35:43 +00:00
|
|
|
import config
|
2023-07-29 07:35:40 +00:00
|
|
|
import db
|
2023-06-27 15:38:30 +00:00
|
|
|
from base import proxy_account_pool
|
2023-06-09 12:41:53 +00:00
|
|
|
from media_platform.douyin import DouYinCrawler
|
2023-11-23 16:04:33 +00:00
|
|
|
from media_platform.kuaishou import KuaishouCrawler
|
2023-06-09 12:41:53 +00:00
|
|
|
from media_platform.xhs import XiaoHongShuCrawler
|
|
|
|
|
|
|
|
|
|
|
|
class CrawlerFactory:
|
|
|
|
@staticmethod
|
|
|
|
def create_crawler(platform: str):
|
|
|
|
if platform == "xhs":
|
|
|
|
return XiaoHongShuCrawler()
|
|
|
|
elif platform == "dy":
|
|
|
|
return DouYinCrawler()
|
2023-11-23 16:04:33 +00:00
|
|
|
elif platform == "ks":
|
|
|
|
return KuaishouCrawler()
|
2023-06-09 12:41:53 +00:00
|
|
|
else:
|
2023-07-29 07:35:40 +00:00
|
|
|
raise ValueError("Invalid Media Platform Currently only supported xhs or dy ...")
|
2023-06-09 12:41:53 +00:00
|
|
|
|
|
|
|
|
|
|
|
async def main():
|
|
|
|
# define command line params ...
|
|
|
|
parser = argparse.ArgumentParser(description='Media crawler program.')
|
2023-11-23 16:04:33 +00:00
|
|
|
parser.add_argument('--platform', type=str, help='Media platform select (xhs | dy | ks)',
|
|
|
|
choices=["xhs", "dy", "ks"], default=config.PLATFORM)
|
2023-07-29 07:35:40 +00:00
|
|
|
parser.add_argument('--lt', type=str, help='Login type (qrcode | phone | cookie)',
|
|
|
|
choices=["qrcode", "phone", "cookie"], default=config.LOGIN_TYPE)
|
2023-11-18 05:38:11 +00:00
|
|
|
parser.add_argument('--type', type=str, help='crawler type (search | detail)',
|
2023-11-18 14:07:30 +00:00
|
|
|
choices=["search", "detail"], default=config.CRAWLER_TYPE)
|
|
|
|
|
2023-06-27 15:38:30 +00:00
|
|
|
# init account pool
|
|
|
|
account_pool = proxy_account_pool.create_account_pool()
|
2023-06-16 11:35:43 +00:00
|
|
|
|
2023-07-24 12:59:43 +00:00
|
|
|
# init db
|
|
|
|
if config.IS_SAVED_DATABASED:
|
|
|
|
await db.init_db()
|
|
|
|
|
2023-06-09 12:41:53 +00:00
|
|
|
args = parser.parse_args()
|
2023-07-29 07:35:40 +00:00
|
|
|
crawler = CrawlerFactory.create_crawler(platform=args.platform)
|
2023-06-09 12:41:53 +00:00
|
|
|
crawler.init_config(
|
2023-07-29 07:35:40 +00:00
|
|
|
platform=args.platform,
|
|
|
|
login_type=args.lt,
|
2023-11-18 05:38:11 +00:00
|
|
|
account_pool=account_pool,
|
|
|
|
crawler_type=args.type
|
2023-06-09 12:41:53 +00:00
|
|
|
)
|
|
|
|
await crawler.start()
|
|
|
|
|
|
|
|
|
|
|
|
if __name__ == '__main__':
|
|
|
|
try:
|
2023-07-29 07:35:40 +00:00
|
|
|
# asyncio.run(main())
|
|
|
|
asyncio.get_event_loop().run_until_complete(main())
|
2023-06-09 12:41:53 +00:00
|
|
|
except KeyboardInterrupt:
|
|
|
|
sys.exit()
|