2024-10-19 16:43:25 +00:00
|
|
|
# 声明:本代码仅供学习和研究目的使用。使用者应遵守以下原则:
|
|
|
|
# 1. 不得用于任何商业用途。
|
|
|
|
# 2. 使用时应遵守目标平台的使用条款和robots.txt规则。
|
|
|
|
# 3. 不得进行大规模爬取或对平台造成运营干扰。
|
|
|
|
# 4. 应合理控制请求频率,避免给目标平台带来不必要的负担。
|
|
|
|
# 5. 不得用于任何非法或不当的用途。
|
|
|
|
#
|
|
|
|
# 详细许可条款请参阅项目根目录下的LICENSE文件。
|
|
|
|
# 使用本代码即表示您同意遵守上述原则和LICENSE中的所有条款。
|
|
|
|
|
|
|
|
|
2023-07-29 07:35:40 +00:00
|
|
|
import asyncio
|
|
|
|
import sys
|
2023-06-09 12:41:53 +00:00
|
|
|
|
2024-06-09 01:35:52 +00:00
|
|
|
import cmd_arg
|
2023-06-16 11:35:43 +00:00
|
|
|
import config
|
2023-07-29 07:35:40 +00:00
|
|
|
import db
|
2023-12-02 16:30:10 +00:00
|
|
|
from base.base_crawler import AbstractCrawler
|
|
|
|
from media_platform.bilibili import BilibiliCrawler
|
2023-06-09 12:41:53 +00:00
|
|
|
from media_platform.douyin import DouYinCrawler
|
2023-11-23 16:04:33 +00:00
|
|
|
from media_platform.kuaishou import KuaishouCrawler
|
2024-08-05 10:51:51 +00:00
|
|
|
from media_platform.tieba import TieBaCrawler
|
2023-12-24 09:57:48 +00:00
|
|
|
from media_platform.weibo import WeiboCrawler
|
2023-12-24 16:02:11 +00:00
|
|
|
from media_platform.xhs import XiaoHongShuCrawler
|
2024-09-07 16:00:04 +00:00
|
|
|
from media_platform.zhihu import ZhihuCrawler
|
2023-06-09 12:41:53 +00:00
|
|
|
|
|
|
|
|
|
|
|
class CrawlerFactory:
|
2023-12-02 16:30:10 +00:00
|
|
|
CRAWLERS = {
|
|
|
|
"xhs": XiaoHongShuCrawler,
|
|
|
|
"dy": DouYinCrawler,
|
|
|
|
"ks": KuaishouCrawler,
|
2023-12-24 09:57:48 +00:00
|
|
|
"bili": BilibiliCrawler,
|
2024-08-05 10:51:51 +00:00
|
|
|
"wb": WeiboCrawler,
|
2024-09-07 16:00:04 +00:00
|
|
|
"tieba": TieBaCrawler,
|
|
|
|
"zhihu": ZhihuCrawler
|
2023-12-02 16:30:10 +00:00
|
|
|
}
|
|
|
|
|
2023-06-09 12:41:53 +00:00
|
|
|
@staticmethod
|
2023-12-02 16:30:10 +00:00
|
|
|
def create_crawler(platform: str) -> AbstractCrawler:
|
|
|
|
crawler_class = CrawlerFactory.CRAWLERS.get(platform)
|
|
|
|
if not crawler_class:
|
|
|
|
raise ValueError("Invalid Media Platform Currently only supported xhs or dy or ks or bili ...")
|
|
|
|
return crawler_class()
|
2023-06-09 12:41:53 +00:00
|
|
|
|
2024-08-05 10:51:51 +00:00
|
|
|
|
2023-06-09 12:41:53 +00:00
|
|
|
async def main():
|
2024-06-09 01:35:52 +00:00
|
|
|
# parse cmd
|
|
|
|
await cmd_arg.parse_cmd()
|
|
|
|
|
2023-07-24 12:59:43 +00:00
|
|
|
# init db
|
2024-01-14 14:06:31 +00:00
|
|
|
if config.SAVE_DATA_OPTION == "db":
|
2023-07-24 12:59:43 +00:00
|
|
|
await db.init_db()
|
|
|
|
|
2024-06-09 01:35:52 +00:00
|
|
|
crawler = CrawlerFactory.create_crawler(platform=config.PLATFORM)
|
2023-06-09 12:41:53 +00:00
|
|
|
await crawler.start()
|
2024-08-05 10:51:51 +00:00
|
|
|
|
2024-02-21 16:11:41 +00:00
|
|
|
if config.SAVE_DATA_OPTION == "db":
|
|
|
|
await db.close()
|
2023-06-09 12:41:53 +00:00
|
|
|
|
2024-09-02 13:45:12 +00:00
|
|
|
|
2023-06-09 12:41:53 +00:00
|
|
|
|
|
|
|
if __name__ == '__main__':
|
|
|
|
try:
|
2023-07-29 07:35:40 +00:00
|
|
|
# asyncio.run(main())
|
|
|
|
asyncio.get_event_loop().run_until_complete(main())
|
2023-06-09 12:41:53 +00:00
|
|
|
except KeyboardInterrupt:
|
|
|
|
sys.exit()
|