2023-06-16 11:35:43 +00:00
|
|
|
|
import random
|
2023-06-09 12:41:53 +00:00
|
|
|
|
import asyncio
|
2023-06-27 15:38:30 +00:00
|
|
|
|
import logging
|
2023-06-16 11:35:43 +00:00
|
|
|
|
from asyncio import Task
|
2023-06-27 15:38:30 +00:00
|
|
|
|
from typing import Optional, List, Dict, Tuple
|
|
|
|
|
from argparse import Namespace
|
2023-06-09 12:41:53 +00:00
|
|
|
|
|
|
|
|
|
from playwright.async_api import Page
|
|
|
|
|
from playwright.async_api import Cookie
|
|
|
|
|
from playwright.async_api import BrowserContext
|
|
|
|
|
from playwright.async_api import async_playwright
|
|
|
|
|
|
2023-06-16 11:35:43 +00:00
|
|
|
|
import config
|
2023-06-27 15:38:30 +00:00
|
|
|
|
from tools import utils
|
|
|
|
|
from .exception import *
|
|
|
|
|
from .login import XHSLogin
|
2023-06-09 12:41:53 +00:00
|
|
|
|
from .client import XHSClient
|
2023-06-16 11:35:43 +00:00
|
|
|
|
from models import xhs as xhs_model
|
2023-06-27 15:38:30 +00:00
|
|
|
|
from base.base_crawler import AbstractCrawler
|
|
|
|
|
from base.proxy_account_pool import AccountPool
|
2023-06-09 12:41:53 +00:00
|
|
|
|
|
|
|
|
|
|
2023-06-27 15:38:30 +00:00
|
|
|
|
class XiaoHongShuCrawler(AbstractCrawler):
|
2023-06-09 12:41:53 +00:00
|
|
|
|
def __init__(self):
|
2023-06-22 14:43:26 +00:00
|
|
|
|
self.cookies: Optional[List[Cookie]] = None # cookies from browser context
|
2023-06-09 12:41:53 +00:00
|
|
|
|
self.browser_context: Optional[BrowserContext] = None
|
|
|
|
|
self.context_page: Optional[Page] = None
|
|
|
|
|
self.user_agent = utils.get_user_agent()
|
|
|
|
|
self.xhs_client: Optional[XHSClient] = None
|
2023-06-16 11:35:43 +00:00
|
|
|
|
self.index_url = "https://www.xiaohongshu.com"
|
2023-06-27 15:38:30 +00:00
|
|
|
|
self.command_args: Optional[Namespace] = None
|
|
|
|
|
self.account_pool: Optional[AccountPool] = None
|
2023-06-09 12:41:53 +00:00
|
|
|
|
|
|
|
|
|
def init_config(self, **kwargs):
|
2023-06-17 07:14:58 +00:00
|
|
|
|
for key in kwargs.keys():
|
|
|
|
|
setattr(self, key, kwargs[key])
|
2023-06-09 12:41:53 +00:00
|
|
|
|
|
|
|
|
|
async def update_cookies(self):
|
|
|
|
|
self.cookies = await self.browser_context.cookies()
|
|
|
|
|
|
2023-06-27 15:38:30 +00:00
|
|
|
|
def create_proxy_info(self) -> Tuple[str, Dict, str]:
|
|
|
|
|
"""Create proxy info for playwright and httpx"""
|
|
|
|
|
# phone: 13012345671
|
|
|
|
|
# ip_proxy: 111.122.xx.xx1:8888
|
|
|
|
|
# 手机号和IP代理都是从账号池中获取的,并且它们是固定绑定的
|
|
|
|
|
phone, ip_proxy = self.account_pool.get_account()
|
|
|
|
|
playwright_proxy = {
|
|
|
|
|
"server": f"{config.IP_PROXY_PROTOCOL}{ip_proxy}",
|
|
|
|
|
"username": config.IP_PROXY_USER,
|
|
|
|
|
"password": config.IP_PROXY_PASSWORD,
|
|
|
|
|
}
|
|
|
|
|
httpx_proxy = f"{config.IP_PROXY_PROTOCOL}{config.IP_PROXY_USER}:{config.IP_PROXY_PASSWORD}@{ip_proxy}"
|
|
|
|
|
return phone, playwright_proxy, httpx_proxy
|
|
|
|
|
|
2023-06-09 12:41:53 +00:00
|
|
|
|
async def start(self):
|
2023-06-27 15:38:30 +00:00
|
|
|
|
account_phone, playwright_proxy, httpx_proxy = self.create_proxy_info()
|
|
|
|
|
if not config.ENABLE_IP_PROXY:
|
|
|
|
|
playwright_proxy, httpx_proxy = None, None
|
|
|
|
|
|
2023-06-09 12:41:53 +00:00
|
|
|
|
async with async_playwright() as playwright:
|
|
|
|
|
# launch browser and create single browser context
|
|
|
|
|
chromium = playwright.chromium
|
2023-06-27 15:38:30 +00:00
|
|
|
|
browser = await chromium.launch(headless=config.HEADLESS, proxy=playwright_proxy)
|
2023-06-09 12:41:53 +00:00
|
|
|
|
self.browser_context = await browser.new_context(
|
|
|
|
|
viewport={"width": 1920, "height": 1080},
|
2023-06-27 15:38:30 +00:00
|
|
|
|
user_agent=self.user_agent
|
2023-06-09 12:41:53 +00:00
|
|
|
|
)
|
|
|
|
|
|
|
|
|
|
# execute JS to bypass anti automation/crawler detection
|
|
|
|
|
await self.browser_context.add_init_script(path="libs/stealth.min.js")
|
|
|
|
|
self.context_page = await self.browser_context.new_page()
|
2023-06-16 11:35:43 +00:00
|
|
|
|
await self.context_page.goto(self.index_url)
|
2023-06-09 12:41:53 +00:00
|
|
|
|
|
2023-06-27 15:38:30 +00:00
|
|
|
|
# begin login
|
|
|
|
|
login_obj = XHSLogin(
|
|
|
|
|
login_type=self.command_args.lt,
|
|
|
|
|
login_phone=account_phone,
|
|
|
|
|
browser_context=self.browser_context,
|
|
|
|
|
context_page=self.context_page,
|
|
|
|
|
cookie_str=config.COOKIES
|
|
|
|
|
)
|
|
|
|
|
await login_obj.begin()
|
|
|
|
|
|
|
|
|
|
# update cookies
|
2023-06-09 12:41:53 +00:00
|
|
|
|
await self.update_cookies()
|
|
|
|
|
|
|
|
|
|
# init request client
|
|
|
|
|
cookie_str, cookie_dict = utils.convert_cookies(self.cookies)
|
|
|
|
|
self.xhs_client = XHSClient(
|
2023-06-27 15:38:30 +00:00
|
|
|
|
proxies=httpx_proxy,
|
2023-06-09 12:41:53 +00:00
|
|
|
|
headers={
|
|
|
|
|
"User-Agent": self.user_agent,
|
|
|
|
|
"Cookie": cookie_str,
|
|
|
|
|
"Origin": "https://www.xiaohongshu.com",
|
|
|
|
|
"Referer": "https://www.xiaohongshu.com",
|
|
|
|
|
"Content-Type": "application/json;charset=UTF-8"
|
|
|
|
|
},
|
|
|
|
|
playwright_page=self.context_page,
|
|
|
|
|
cookie_dict=cookie_dict,
|
|
|
|
|
)
|
|
|
|
|
|
|
|
|
|
# Search for notes and retrieve their comment information.
|
2023-06-16 11:35:43 +00:00
|
|
|
|
await self.search_posts()
|
2023-06-09 12:41:53 +00:00
|
|
|
|
|
|
|
|
|
# block main crawler coroutine
|
|
|
|
|
await asyncio.Event().wait()
|
|
|
|
|
|
2023-06-27 15:38:30 +00:00
|
|
|
|
async def close(self):
|
|
|
|
|
await self.browser_context.close()
|
|
|
|
|
await self.browser_context.close()
|
|
|
|
|
logging.info("Browser context closed ...")
|
2023-06-09 12:41:53 +00:00
|
|
|
|
|
|
|
|
|
async def search_posts(self):
|
2023-06-27 15:38:30 +00:00
|
|
|
|
logging.info("Begin search xiaohongshu keywords")
|
|
|
|
|
for keyword in config.KEYWORDS.split(","):
|
|
|
|
|
logging.info(f"Current keyword: {keyword}")
|
2023-06-16 11:35:43 +00:00
|
|
|
|
note_list: List[str] = []
|
|
|
|
|
max_note_len = 10
|
|
|
|
|
page = 1
|
|
|
|
|
while max_note_len > 0:
|
|
|
|
|
posts_res = await self.xhs_client.get_note_by_keyword(
|
|
|
|
|
keyword=keyword,
|
|
|
|
|
page=page,
|
|
|
|
|
)
|
|
|
|
|
page += 1
|
|
|
|
|
for post_item in posts_res.get("items"):
|
|
|
|
|
max_note_len -= 1
|
|
|
|
|
note_id = post_item.get("id")
|
2023-06-17 07:14:58 +00:00
|
|
|
|
try:
|
|
|
|
|
note_detail = await self.xhs_client.get_note_by_id(note_id)
|
|
|
|
|
except DataFetchError as ex:
|
|
|
|
|
continue
|
2023-06-16 11:35:43 +00:00
|
|
|
|
await xhs_model.update_xhs_note(note_detail)
|
|
|
|
|
await asyncio.sleep(0.05)
|
|
|
|
|
note_list.append(note_id)
|
2023-06-27 15:38:30 +00:00
|
|
|
|
logging.info(f"keyword:{keyword}, note_list:{note_list}")
|
2023-06-16 11:35:43 +00:00
|
|
|
|
await self.batch_get_note_comments(note_list)
|
|
|
|
|
|
|
|
|
|
async def batch_get_note_comments(self, note_list: List[str]):
|
|
|
|
|
task_list: List[Task] = []
|
|
|
|
|
for note_id in note_list:
|
|
|
|
|
task = asyncio.create_task(self.get_comments(note_id), name=note_id)
|
|
|
|
|
task_list.append(task)
|
|
|
|
|
await asyncio.wait(task_list)
|
2023-06-09 12:41:53 +00:00
|
|
|
|
|
|
|
|
|
async def get_comments(self, note_id: str):
|
2023-06-27 15:38:30 +00:00
|
|
|
|
logging.info(f"Begin get note id comments {note_id}")
|
2023-06-16 11:35:43 +00:00
|
|
|
|
all_comments = await self.xhs_client.get_note_all_comments(note_id=note_id, crawl_interval=random.random())
|
|
|
|
|
for comment in all_comments:
|
|
|
|
|
await xhs_model.update_xhs_note_comment(note_id=note_id, comment_item=comment)
|