feat: 完善类型注释,增加 mypy 类型检测
This commit is contained in:
parent
e5bdc63323
commit
745e59c875
|
@ -20,10 +20,6 @@ class AbstractLogin(ABC):
|
||||||
async def begin(self):
|
async def begin(self):
|
||||||
pass
|
pass
|
||||||
|
|
||||||
@abstractmethod
|
|
||||||
async def check_login_state(self):
|
|
||||||
pass
|
|
||||||
|
|
||||||
@abstractmethod
|
@abstractmethod
|
||||||
async def login_by_qrcode(self):
|
async def login_by_qrcode(self):
|
||||||
pass
|
pass
|
||||||
|
|
|
@ -1,4 +1,4 @@
|
||||||
from typing import Tuple, Optional
|
from typing import Tuple, Optional, List, Set
|
||||||
|
|
||||||
import config
|
import config
|
||||||
|
|
||||||
|
@ -6,9 +6,9 @@ import config
|
||||||
class PhonePool:
|
class PhonePool:
|
||||||
"""phone pool class"""
|
"""phone pool class"""
|
||||||
|
|
||||||
def __init__(self):
|
def __init__(self) -> None:
|
||||||
self.phones = []
|
self.phones: List[str] = []
|
||||||
self.used_phones = set()
|
self.used_phones: Set[str] = set()
|
||||||
|
|
||||||
def add_phone(self, phone: str) -> bool:
|
def add_phone(self, phone: str) -> bool:
|
||||||
"""add phone to the pool"""
|
"""add phone to the pool"""
|
||||||
|
@ -40,9 +40,9 @@ class PhonePool:
|
||||||
|
|
||||||
|
|
||||||
class IPPool:
|
class IPPool:
|
||||||
def __init__(self):
|
def __init__(self) -> None:
|
||||||
self.ips = []
|
self.ips: List[str]= []
|
||||||
self.used_ips = set()
|
self.used_ips: Set[str]= set()
|
||||||
|
|
||||||
def add_ip(self, ip):
|
def add_ip(self, ip):
|
||||||
"""添加ip"""
|
"""添加ip"""
|
||||||
|
|
|
@ -2,7 +2,7 @@
|
||||||
PLATFORM = "xhs"
|
PLATFORM = "xhs"
|
||||||
KEYWORDS = "健身,旅游"
|
KEYWORDS = "健身,旅游"
|
||||||
LOGIN_TYPE = "qrcode" # qrcode or phone or cookies
|
LOGIN_TYPE = "qrcode" # qrcode or phone or cookies
|
||||||
COOKIES = ""
|
COOKIES = "web_session=xxxxcfed1566xxxxxxxxxxxxxxxxxxx;" # if platform is xhs, pleas set only web_session cookie attr
|
||||||
|
|
||||||
# redis config
|
# redis config
|
||||||
REDIS_DB_HOST = "redis://127.0.0.1" # your redis host
|
REDIS_DB_HOST = "redis://127.0.0.1" # your redis host
|
||||||
|
@ -18,7 +18,7 @@ RETRY_INTERVAL = 60 * 30 # 30 minutes
|
||||||
HEADLESS = True
|
HEADLESS = True
|
||||||
|
|
||||||
# save login state
|
# save login state
|
||||||
SAVE_LOGIN_STATE = True
|
SAVE_LOGIN_STATE = False
|
||||||
|
|
||||||
# save user data dir
|
# save user data dir
|
||||||
USER_DATA_DIR = "%s_user_data_dir" # %s will be replaced by platform name
|
USER_DATA_DIR = "%s_user_data_dir" # %s will be replaced by platform name
|
||||||
|
|
|
@ -18,9 +18,10 @@ class DOUYINClient:
|
||||||
self,
|
self,
|
||||||
timeout=30,
|
timeout=30,
|
||||||
proxies=None,
|
proxies=None,
|
||||||
headers: Optional[Dict] = None,
|
*,
|
||||||
playwright_page: Page = None,
|
headers: Dict,
|
||||||
cookie_dict: Dict = None
|
playwright_page: Optional[Page],
|
||||||
|
cookie_dict: Dict
|
||||||
):
|
):
|
||||||
self.proxies = proxies
|
self.proxies = proxies
|
||||||
self.timeout = timeout
|
self.timeout = timeout
|
||||||
|
@ -33,7 +34,7 @@ class DOUYINClient:
|
||||||
if not params:
|
if not params:
|
||||||
return
|
return
|
||||||
headers = headers or self.headers
|
headers = headers or self.headers
|
||||||
local_storage: Dict = await self.playwright_page.evaluate("() => window.localStorage")
|
local_storage: Dict = await self.playwright_page.evaluate("() => window.localStorage") # type: ignore
|
||||||
douyin_js_obj = execjs.compile(open('libs/douyin.js').read())
|
douyin_js_obj = execjs.compile(open('libs/douyin.js').read())
|
||||||
common_params = {
|
common_params = {
|
||||||
"device_platform": "webapp",
|
"device_platform": "webapp",
|
||||||
|
@ -141,7 +142,7 @@ class DOUYINClient:
|
||||||
del headers["Origin"]
|
del headers["Origin"]
|
||||||
return await self.get("/aweme/v1/web/aweme/detail/", params, headers)
|
return await self.get("/aweme/v1/web/aweme/detail/", params, headers)
|
||||||
|
|
||||||
async def get_aweme_comments(self, aweme_id: str, cursor: str = ""):
|
async def get_aweme_comments(self, aweme_id: str, cursor: int = 0):
|
||||||
"""get note comments
|
"""get note comments
|
||||||
|
|
||||||
"""
|
"""
|
||||||
|
|
|
@ -20,20 +20,21 @@ from models import douyin
|
||||||
|
|
||||||
|
|
||||||
class DouYinCrawler(AbstractCrawler):
|
class DouYinCrawler(AbstractCrawler):
|
||||||
def __init__(self):
|
dy_client: DOUYINClient
|
||||||
self.browser_context: Optional[BrowserContext] = None
|
|
||||||
self.context_page: Optional[Page] = None
|
def __init__(self) -> None:
|
||||||
|
self.browser_context: Optional[BrowserContext] = None # type: ignore
|
||||||
|
self.context_page: Optional[Page] = None # type: ignore
|
||||||
self.user_agent = "Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/109.0.0.0 Safari/537.36" # fixed
|
self.user_agent = "Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/109.0.0.0 Safari/537.36" # fixed
|
||||||
self.dy_client: Optional[DOUYINClient] = None
|
|
||||||
self.index_url = "https://www.douyin.com"
|
self.index_url = "https://www.douyin.com"
|
||||||
self.command_args: Optional[Namespace] = None
|
self.command_args: Optional[Namespace] = None # type: ignore
|
||||||
self.account_pool: Optional[AccountPool] = None
|
self.account_pool: Optional[AccountPool] = None # type: ignore
|
||||||
|
|
||||||
def init_config(self, **kwargs):
|
def init_config(self, **kwargs):
|
||||||
for key, value in kwargs.items():
|
for key, value in kwargs.items():
|
||||||
setattr(self, key, value)
|
setattr(self, key, value)
|
||||||
|
|
||||||
async def start(self):
|
async def start(self) -> None:
|
||||||
account_phone, playwright_proxy, httpx_proxy = self.create_proxy_info()
|
account_phone, playwright_proxy, httpx_proxy = self.create_proxy_info()
|
||||||
async with async_playwright() as playwright:
|
async with async_playwright() as playwright:
|
||||||
# Launch a browser context.
|
# Launch a browser context.
|
||||||
|
@ -52,7 +53,7 @@ class DouYinCrawler(AbstractCrawler):
|
||||||
self.dy_client = await self.create_douyin_client(httpx_proxy)
|
self.dy_client = await self.create_douyin_client(httpx_proxy)
|
||||||
if not await self.dy_client.ping(browser_context=self.browser_context):
|
if not await self.dy_client.ping(browser_context=self.browser_context):
|
||||||
login_obj = DouYinLogin(
|
login_obj = DouYinLogin(
|
||||||
login_type=self.command_args.lt,
|
login_type=self.command_args.lt, # type: ignore
|
||||||
login_phone=account_phone,
|
login_phone=account_phone,
|
||||||
browser_context=self.browser_context,
|
browser_context=self.browser_context,
|
||||||
context_page=self.context_page,
|
context_page=self.context_page,
|
||||||
|
@ -66,7 +67,7 @@ class DouYinCrawler(AbstractCrawler):
|
||||||
|
|
||||||
utils.logger.info("Douyin Crawler finished ...")
|
utils.logger.info("Douyin Crawler finished ...")
|
||||||
|
|
||||||
async def search_posts(self):
|
async def search_posts(self) -> None:
|
||||||
utils.logger.info("Begin search douyin keywords")
|
utils.logger.info("Begin search douyin keywords")
|
||||||
for keyword in config.KEYWORDS.split(","):
|
for keyword in config.KEYWORDS.split(","):
|
||||||
utils.logger.info(f"Current keyword: {keyword}")
|
utils.logger.info(f"Current keyword: {keyword}")
|
||||||
|
@ -87,7 +88,7 @@ class DouYinCrawler(AbstractCrawler):
|
||||||
post_item.get("aweme_mix_info", {}).get("mix_items")[0]
|
post_item.get("aweme_mix_info", {}).get("mix_items")[0]
|
||||||
except TypeError:
|
except TypeError:
|
||||||
continue
|
continue
|
||||||
aweme_list.append(aweme_info.get("aweme_id"))
|
aweme_list.append(aweme_info.get("aweme_id",""))
|
||||||
await douyin.update_douyin_aweme(aweme_item=aweme_info)
|
await douyin.update_douyin_aweme(aweme_item=aweme_info)
|
||||||
utils.logger.info(f"keyword:{keyword}, aweme_list:{aweme_list}")
|
utils.logger.info(f"keyword:{keyword}, aweme_list:{aweme_list}")
|
||||||
# await self.batch_get_note_comments(aweme_list)
|
# await self.batch_get_note_comments(aweme_list)
|
||||||
|
@ -115,7 +116,7 @@ class DouYinCrawler(AbstractCrawler):
|
||||||
return None, None, None
|
return None, None, None
|
||||||
|
|
||||||
# phone: 13012345671 ip_proxy: 111.122.xx.xx1:8888
|
# phone: 13012345671 ip_proxy: 111.122.xx.xx1:8888
|
||||||
phone, ip_proxy = self.account_pool.get_account()
|
phone, ip_proxy = self.account_pool.get_account() # type: ignore
|
||||||
playwright_proxy = {
|
playwright_proxy = {
|
||||||
"server": f"{config.IP_PROXY_PROTOCOL}{ip_proxy}",
|
"server": f"{config.IP_PROXY_PROTOCOL}{ip_proxy}",
|
||||||
"username": config.IP_PROXY_USER,
|
"username": config.IP_PROXY_USER,
|
||||||
|
@ -124,9 +125,9 @@ class DouYinCrawler(AbstractCrawler):
|
||||||
httpx_proxy = f"{config.IP_PROXY_PROTOCOL}{config.IP_PROXY_USER}:{config.IP_PROXY_PASSWORD}@{ip_proxy}"
|
httpx_proxy = f"{config.IP_PROXY_PROTOCOL}{config.IP_PROXY_USER}:{config.IP_PROXY_PASSWORD}@{ip_proxy}"
|
||||||
return phone, playwright_proxy, httpx_proxy
|
return phone, playwright_proxy, httpx_proxy
|
||||||
|
|
||||||
async def create_douyin_client(self, httpx_proxy: str) -> DOUYINClient:
|
async def create_douyin_client(self, httpx_proxy: Optional[str]) -> DOUYINClient:
|
||||||
"""Create douyin client"""
|
"""Create douyin client"""
|
||||||
cookie_str, cookie_dict = utils.convert_cookies(await self.browser_context.cookies())
|
cookie_str, cookie_dict = utils.convert_cookies(await self.browser_context.cookies()) # type: ignore
|
||||||
douyin_client = DOUYINClient(
|
douyin_client = DOUYINClient(
|
||||||
proxies=httpx_proxy,
|
proxies=httpx_proxy,
|
||||||
headers={
|
headers={
|
||||||
|
@ -151,18 +152,18 @@ class DouYinCrawler(AbstractCrawler):
|
||||||
) -> BrowserContext:
|
) -> BrowserContext:
|
||||||
"""Launch browser and create browser context"""
|
"""Launch browser and create browser context"""
|
||||||
if config.SAVE_LOGIN_STATE:
|
if config.SAVE_LOGIN_STATE:
|
||||||
user_data_dir = os.path.join(os.getcwd(), "browser_data", config.USER_DATA_DIR % self.command_args.platform)
|
user_data_dir = os.path.join(os.getcwd(), "browser_data", config.USER_DATA_DIR % self.command_args.platform) # type: ignore
|
||||||
browser_context = await chromium.launch_persistent_context(
|
browser_context = await chromium.launch_persistent_context(
|
||||||
user_data_dir=user_data_dir,
|
user_data_dir=user_data_dir,
|
||||||
accept_downloads=True,
|
accept_downloads=True,
|
||||||
headless=headless,
|
headless=headless,
|
||||||
proxy=playwright_proxy,
|
proxy=playwright_proxy, # type: ignore
|
||||||
viewport={"width": 1920, "height": 1080},
|
viewport={"width": 1920, "height": 1080},
|
||||||
user_agent=user_agent
|
user_agent=user_agent
|
||||||
)
|
) # type: ignore
|
||||||
return browser_context
|
return browser_context
|
||||||
else:
|
else:
|
||||||
browser = await chromium.launch(headless=headless, proxy=playwright_proxy)
|
browser = await chromium.launch(headless=headless, proxy=playwright_proxy) # type: ignore
|
||||||
browser_context = await browser.new_context(
|
browser_context = await browser.new_context(
|
||||||
viewport={"width": 1920, "height": 1080},
|
viewport={"width": 1920, "height": 1080},
|
||||||
user_agent=user_agent
|
user_agent=user_agent
|
||||||
|
|
|
@ -1,6 +1,7 @@
|
||||||
import sys
|
import sys
|
||||||
import asyncio
|
import asyncio
|
||||||
import functools
|
import functools
|
||||||
|
from typing import Optional
|
||||||
|
|
||||||
import aioredis
|
import aioredis
|
||||||
from tenacity import (
|
from tenacity import (
|
||||||
|
@ -22,10 +23,10 @@ class DouYinLogin(AbstractLogin):
|
||||||
|
|
||||||
def __init__(self,
|
def __init__(self,
|
||||||
login_type: str,
|
login_type: str,
|
||||||
browser_context: BrowserContext,
|
browser_context: BrowserContext, # type: ignore
|
||||||
context_page: Page,
|
context_page: Page, # type: ignore
|
||||||
login_phone: str = None,
|
login_phone: Optional[str] = "",
|
||||||
cookie_str: str = None
|
cookie_str: Optional[str] = ""
|
||||||
):
|
):
|
||||||
self.login_type = login_type
|
self.login_type = login_type
|
||||||
self.browser_context = browser_context
|
self.browser_context = browser_context
|
||||||
|
@ -202,14 +203,14 @@ class DouYinLogin(AbstractLogin):
|
||||||
selector=back_selector,
|
selector=back_selector,
|
||||||
timeout=1000 * 10, # wait 10 seconds
|
timeout=1000 * 10, # wait 10 seconds
|
||||||
)
|
)
|
||||||
slide_back = str(await slider_back_elements.get_property("src"))
|
slide_back = str(await slider_back_elements.get_property("src")) # type: ignore
|
||||||
|
|
||||||
# get slider gap image
|
# get slider gap image
|
||||||
gap_elements = await self.context_page.wait_for_selector(
|
gap_elements = await self.context_page.wait_for_selector(
|
||||||
selector=gap_selector,
|
selector=gap_selector,
|
||||||
timeout=1000 * 10, # wait 10 seconds
|
timeout=1000 * 10, # wait 10 seconds
|
||||||
)
|
)
|
||||||
gap_src = str(await gap_elements.get_property("src"))
|
gap_src = str(await gap_elements.get_property("src")) # type: ignore
|
||||||
|
|
||||||
# 识别滑块位置
|
# 识别滑块位置
|
||||||
slide_app = utils.Slide(gap=gap_src, bg=slide_back)
|
slide_app = utils.Slide(gap=gap_src, bg=slide_back)
|
||||||
|
@ -223,14 +224,14 @@ class DouYinLogin(AbstractLogin):
|
||||||
|
|
||||||
# 根据轨迹拖拽滑块到指定位置
|
# 根据轨迹拖拽滑块到指定位置
|
||||||
element = await self.context_page.query_selector(gap_selector)
|
element = await self.context_page.query_selector(gap_selector)
|
||||||
bounding_box = await element.bounding_box()
|
bounding_box = await element.bounding_box() # type: ignore
|
||||||
|
|
||||||
await self.context_page.mouse.move(bounding_box["x"] + bounding_box["width"] / 2,
|
await self.context_page.mouse.move(bounding_box["x"] + bounding_box["width"] / 2, # type: ignore
|
||||||
bounding_box["y"] + bounding_box["height"] / 2)
|
bounding_box["y"] + bounding_box["height"] / 2) # type: ignore
|
||||||
# 这里获取到x坐标中心点位置
|
# 这里获取到x坐标中心点位置
|
||||||
x = bounding_box["x"] + bounding_box["width"] / 2
|
x = bounding_box["x"] + bounding_box["width"] / 2 # type: ignore
|
||||||
# 模拟滑动操作
|
# 模拟滑动操作
|
||||||
await element.hover()
|
await element.hover() # type: ignore
|
||||||
await self.context_page.mouse.down()
|
await self.context_page.mouse.down()
|
||||||
|
|
||||||
for track in tracks:
|
for track in tracks:
|
||||||
|
|
|
@ -17,9 +17,10 @@ class XHSClient:
|
||||||
self,
|
self,
|
||||||
timeout=10,
|
timeout=10,
|
||||||
proxies=None,
|
proxies=None,
|
||||||
headers: Optional[Dict] = None,
|
*,
|
||||||
playwright_page: Page = None,
|
headers: Dict[str, str],
|
||||||
cookie_dict: Dict = None
|
playwright_page: Page,
|
||||||
|
cookie_dict: Dict[str, str],
|
||||||
):
|
):
|
||||||
self.proxies = proxies
|
self.proxies = proxies
|
||||||
self.timeout = timeout
|
self.timeout = timeout
|
||||||
|
@ -51,21 +52,21 @@ class XHSClient:
|
||||||
self.headers.update(headers)
|
self.headers.update(headers)
|
||||||
return self.headers
|
return self.headers
|
||||||
|
|
||||||
async def request(self, method, url, **kwargs):
|
async def request(self, method, url, **kwargs) -> Dict:
|
||||||
async with httpx.AsyncClient(proxies=self.proxies) as client:
|
async with httpx.AsyncClient(proxies=self.proxies) as client:
|
||||||
response = await client.request(
|
response = await client.request(
|
||||||
method, url, timeout=self.timeout,
|
method, url, timeout=self.timeout,
|
||||||
**kwargs
|
**kwargs
|
||||||
)
|
)
|
||||||
data = response.json()
|
data: Dict = response.json()
|
||||||
if data["success"]:
|
if data["success"]:
|
||||||
return data.get("data", data.get("success"))
|
return data.get("data", data.get("success", {}))
|
||||||
elif data["code"] == self.IP_ERROR_CODE:
|
elif data["code"] == self.IP_ERROR_CODE:
|
||||||
raise IPBlockError(self.IP_ERROR_STR)
|
raise IPBlockError(self.IP_ERROR_STR)
|
||||||
else:
|
else:
|
||||||
raise DataFetchError(data.get("msg", None))
|
raise DataFetchError(data.get("msg", None))
|
||||||
|
|
||||||
async def get(self, uri: str, params=None):
|
async def get(self, uri: str, params=None) -> Dict:
|
||||||
final_uri = uri
|
final_uri = uri
|
||||||
if isinstance(params, dict):
|
if isinstance(params, dict):
|
||||||
final_uri = (f"{uri}?"
|
final_uri = (f"{uri}?"
|
||||||
|
@ -73,7 +74,7 @@ class XHSClient:
|
||||||
headers = await self._pre_headers(final_uri)
|
headers = await self._pre_headers(final_uri)
|
||||||
return await self.request(method="GET", url=f"{self._host}{final_uri}", headers=headers)
|
return await self.request(method="GET", url=f"{self._host}{final_uri}", headers=headers)
|
||||||
|
|
||||||
async def post(self, uri: str, data: dict):
|
async def post(self, uri: str, data: dict) -> Dict:
|
||||||
headers = await self._pre_headers(uri, data)
|
headers = await self._pre_headers(uri, data)
|
||||||
json_str = json.dumps(data, separators=(',', ':'), ensure_ascii=False)
|
json_str = json.dumps(data, separators=(',', ':'), ensure_ascii=False)
|
||||||
return await self.request(method="POST", url=f"{self._host}{uri}",
|
return await self.request(method="POST", url=f"{self._host}{uri}",
|
||||||
|
@ -86,7 +87,7 @@ class XHSClient:
|
||||||
try:
|
try:
|
||||||
note_card: Dict = await self.get_note_by_id(note_id)
|
note_card: Dict = await self.get_note_by_id(note_id)
|
||||||
return note_card.get("note_id") == note_id
|
return note_card.get("note_id") == note_id
|
||||||
except DataFetchError:
|
except Exception:
|
||||||
return False
|
return False
|
||||||
|
|
||||||
async def update_cookies(self, browser_context: BrowserContext):
|
async def update_cookies(self, browser_context: BrowserContext):
|
||||||
|
@ -128,7 +129,8 @@ class XHSClient:
|
||||||
data = {"source_note_id": note_id}
|
data = {"source_note_id": note_id}
|
||||||
uri = "/api/sns/web/v1/feed"
|
uri = "/api/sns/web/v1/feed"
|
||||||
res = await self.post(uri, data)
|
res = await self.post(uri, data)
|
||||||
return res["items"][0]["note_card"]
|
res_dict: Dict = res["items"][0]["note_card"]
|
||||||
|
return res_dict
|
||||||
|
|
||||||
async def get_note_comments(self, note_id: str, cursor: str = "") -> Dict:
|
async def get_note_comments(self, note_id: str, cursor: str = "") -> Dict:
|
||||||
"""get note comments
|
"""get note comments
|
||||||
|
|
|
@ -21,15 +21,15 @@ from base.proxy_account_pool import AccountPool
|
||||||
|
|
||||||
|
|
||||||
class XiaoHongShuCrawler(AbstractCrawler):
|
class XiaoHongShuCrawler(AbstractCrawler):
|
||||||
|
context_page: Page
|
||||||
|
browser_context: BrowserContext
|
||||||
|
xhs_client: XHSClient
|
||||||
|
account_pool: AccountPool
|
||||||
|
|
||||||
def __init__(self):
|
def __init__(self):
|
||||||
self.browser_context: Optional[BrowserContext] = None
|
|
||||||
self.context_page: Optional[Page] = None
|
|
||||||
self.user_agent = utils.get_user_agent()
|
|
||||||
self.xhs_client: Optional[XHSClient] = None
|
|
||||||
self.index_url = "https://www.xiaohongshu.com"
|
self.index_url = "https://www.xiaohongshu.com"
|
||||||
self.command_args: Optional[Namespace] = None
|
self.command_args: Optional[Namespace] = None # type: ignore
|
||||||
self.account_pool: Optional[AccountPool] = None
|
self.user_agent = utils.get_user_agent()
|
||||||
|
|
||||||
def init_config(self, **kwargs):
|
def init_config(self, **kwargs):
|
||||||
for key, value in kwargs.items():
|
for key, value in kwargs.items():
|
||||||
|
@ -69,7 +69,7 @@ class XiaoHongShuCrawler(AbstractCrawler):
|
||||||
|
|
||||||
utils.logger.info("Xhs Crawler finished ...")
|
utils.logger.info("Xhs Crawler finished ...")
|
||||||
|
|
||||||
async def search_posts(self):
|
async def search_posts(self) -> None:
|
||||||
"""Search for notes and retrieve their comment information."""
|
"""Search for notes and retrieve their comment information."""
|
||||||
utils.logger.info("Begin search xiaohongshu keywords")
|
utils.logger.info("Begin search xiaohongshu keywords")
|
||||||
|
|
||||||
|
@ -86,7 +86,7 @@ class XiaoHongShuCrawler(AbstractCrawler):
|
||||||
_semaphore = asyncio.Semaphore(config.MAX_CONCURRENCY_NUM)
|
_semaphore = asyncio.Semaphore(config.MAX_CONCURRENCY_NUM)
|
||||||
task_list = [
|
task_list = [
|
||||||
self.get_note_detail(post_item.get("id"), _semaphore)
|
self.get_note_detail(post_item.get("id"), _semaphore)
|
||||||
for post_item in posts_res.get("items")
|
for post_item in posts_res.get("items", {})
|
||||||
]
|
]
|
||||||
note_details = await asyncio.gather(*task_list)
|
note_details = await asyncio.gather(*task_list)
|
||||||
for note_detail in note_details:
|
for note_detail in note_details:
|
||||||
|
@ -170,18 +170,18 @@ class XiaoHongShuCrawler(AbstractCrawler):
|
||||||
if config.SAVE_LOGIN_STATE:
|
if config.SAVE_LOGIN_STATE:
|
||||||
# feat issue #14
|
# feat issue #14
|
||||||
# we will save login state to avoid login every time
|
# we will save login state to avoid login every time
|
||||||
user_data_dir = os.path.join(os.getcwd(), "browser_data", config.USER_DATA_DIR % self.command_args.platform)
|
user_data_dir = os.path.join(os.getcwd(), "browser_data", config.USER_DATA_DIR % self.command_args.platform) # type: ignore
|
||||||
browser_context = await chromium.launch_persistent_context(
|
browser_context = await chromium.launch_persistent_context(
|
||||||
user_data_dir=user_data_dir,
|
user_data_dir=user_data_dir,
|
||||||
accept_downloads=True,
|
accept_downloads=True,
|
||||||
headless=headless,
|
headless=headless,
|
||||||
proxy=playwright_proxy,
|
proxy=playwright_proxy, # type: ignore
|
||||||
viewport={"width": 1920, "height": 1080},
|
viewport={"width": 1920, "height": 1080},
|
||||||
user_agent=user_agent
|
user_agent=user_agent
|
||||||
)
|
)
|
||||||
return browser_context
|
return browser_context
|
||||||
else:
|
else:
|
||||||
browser = await chromium.launch(headless=headless, proxy=playwright_proxy)
|
browser = await chromium.launch(headless=headless, proxy=playwright_proxy) # type: ignore
|
||||||
browser_context = await browser.new_context(
|
browser_context = await browser.new_context(
|
||||||
viewport={"width": 1920, "height": 1080},
|
viewport={"width": 1920, "height": 1080},
|
||||||
user_agent=user_agent
|
user_agent=user_agent
|
||||||
|
|
|
@ -92,7 +92,7 @@ def mrc(e):
|
||||||
]
|
]
|
||||||
o = -1
|
o = -1
|
||||||
|
|
||||||
def right_without_sign(num, bit=0) -> int:
|
def right_without_sign(num: int, bit: int=0) -> int:
|
||||||
val = ctypes.c_uint32(num).value >> bit
|
val = ctypes.c_uint32(num).value >> bit
|
||||||
MAX32INT = 4294967295
|
MAX32INT = 4294967295
|
||||||
return (val + (MAX32INT + 1)) % (2 * (MAX32INT + 1)) - MAX32INT - 1
|
return (val + (MAX32INT + 1)) % (2 * (MAX32INT + 1)) - MAX32INT - 1
|
||||||
|
|
|
@ -24,8 +24,8 @@ class XHSLogin(AbstractLogin):
|
||||||
login_type: str,
|
login_type: str,
|
||||||
browser_context: BrowserContext,
|
browser_context: BrowserContext,
|
||||||
context_page: Page,
|
context_page: Page,
|
||||||
login_phone: str = None,
|
login_phone: str = "",
|
||||||
cookie_str: str = None
|
cookie_str: str = ""
|
||||||
):
|
):
|
||||||
self.login_type = login_type
|
self.login_type = login_type
|
||||||
self.browser_context = browser_context
|
self.browser_context = browser_context
|
||||||
|
|
|
@ -39,10 +39,10 @@ async def update_dy_aweme_comment(aweme_id: str, comment_item: Dict):
|
||||||
if aweme_id != comment_aweme_id:
|
if aweme_id != comment_aweme_id:
|
||||||
print(f"comment_aweme_id: {comment_aweme_id} != aweme_id: {aweme_id}")
|
print(f"comment_aweme_id: {comment_aweme_id} != aweme_id: {aweme_id}")
|
||||||
return
|
return
|
||||||
user_info = comment_item.get("user")
|
user_info = comment_item.get("user", {})
|
||||||
comment_id = comment_item.get("cid")
|
comment_id = comment_item.get("cid")
|
||||||
avatar_info = user_info.get("avatar_medium") or user_info.get("avatar_300x300") or user_info.get(
|
avatar_info = user_info.get("avatar_medium", {}) or user_info.get("avatar_300x300", {}) or user_info.get(
|
||||||
"avatar_168x168") or user_info.get("avatar_thumb") or {}
|
"avatar_168x168", {}) or user_info.get("avatar_thumb", {}) or {}
|
||||||
local_db_item = {
|
local_db_item = {
|
||||||
"comment_id": comment_id,
|
"comment_id": comment_id,
|
||||||
"create_time": comment_item.get("create_time"),
|
"create_time": comment_item.get("create_time"),
|
||||||
|
|
|
@ -1,4 +1,4 @@
|
||||||
from typing import Dict
|
from typing import Dict, List
|
||||||
|
|
||||||
from tools import utils
|
from tools import utils
|
||||||
|
|
||||||
|
@ -7,7 +7,7 @@ async def update_xhs_note(note_item: Dict):
|
||||||
note_id = note_item.get("note_id")
|
note_id = note_item.get("note_id")
|
||||||
user_info = note_item.get("user", {})
|
user_info = note_item.get("user", {})
|
||||||
interact_info = note_item.get("interact_info")
|
interact_info = note_item.get("interact_info")
|
||||||
image_list = note_item.get("image_list")
|
image_list: List[Dict]= note_item.get("image_list", [])
|
||||||
|
|
||||||
local_db_item = {
|
local_db_item = {
|
||||||
"note_id": note_item.get("note_id"),
|
"note_id": note_item.get("note_id"),
|
||||||
|
@ -20,7 +20,7 @@ async def update_xhs_note(note_item: Dict):
|
||||||
"nickname": user_info.get("nickname"),
|
"nickname": user_info.get("nickname"),
|
||||||
"avatar": user_info.get("avatar"),
|
"avatar": user_info.get("avatar"),
|
||||||
"ip_location": note_item.get("ip_location", ""),
|
"ip_location": note_item.get("ip_location", ""),
|
||||||
"image_list": ','.join([img.get('url') for img in image_list]),
|
"image_list": ','.join([img.get('url','') for img in image_list]),
|
||||||
"last_modify_ts": utils.get_current_timestamp(),
|
"last_modify_ts": utils.get_current_timestamp(),
|
||||||
}
|
}
|
||||||
# do something ...
|
# do something ...
|
||||||
|
@ -28,7 +28,7 @@ async def update_xhs_note(note_item: Dict):
|
||||||
|
|
||||||
|
|
||||||
async def update_xhs_note_comment(note_id: str, comment_item: Dict):
|
async def update_xhs_note_comment(note_id: str, comment_item: Dict):
|
||||||
user_info = comment_item.get("user_info")
|
user_info = comment_item.get("user_info", {})
|
||||||
comment_id = comment_item.get("id")
|
comment_id = comment_item.get("id")
|
||||||
local_db_item = {
|
local_db_item = {
|
||||||
"comment_id": comment_id,
|
"comment_id": comment_id,
|
||||||
|
|
|
@ -0,0 +1,9 @@
|
||||||
|
[mypy]
|
||||||
|
warn_return_any = True
|
||||||
|
warn_unused_configs = True
|
||||||
|
|
||||||
|
[mypy-cv2]
|
||||||
|
ignore_missing_imports = True
|
||||||
|
|
||||||
|
[mypy-execjs]
|
||||||
|
ignore_missing_imports = True
|
|
@ -2,6 +2,7 @@
|
||||||
import re
|
import re
|
||||||
import json
|
import json
|
||||||
import asyncio
|
import asyncio
|
||||||
|
from typing import List
|
||||||
|
|
||||||
import aioredis
|
import aioredis
|
||||||
import tornado.web
|
import tornado.web
|
||||||
|
@ -14,7 +15,7 @@ def extract_verification_code(message) -> str:
|
||||||
Extract verification code of 6 digits from the SMS.
|
Extract verification code of 6 digits from the SMS.
|
||||||
"""
|
"""
|
||||||
pattern = re.compile(r'\b[0-9]{6}\b')
|
pattern = re.compile(r'\b[0-9]{6}\b')
|
||||||
codes = pattern.findall(message)
|
codes: List[str]= pattern.findall(message)
|
||||||
return codes[0] if codes and len(codes) > 0 else ""
|
return codes[0] if codes and len(codes) > 0 else ""
|
||||||
|
|
||||||
|
|
||||||
|
|
|
@ -0,0 +1 @@
|
||||||
|
# -*- coding: utf-8 -*-
|
|
@ -0,0 +1,13 @@
|
||||||
|
# -*- coding: utf-8 -*-
|
||||||
|
from tools import utils
|
||||||
|
|
||||||
|
|
||||||
|
def test_convert_cookies():
|
||||||
|
xhs_cookies = "a1=x000101360; webId=1190c4d3cxxxx125xxx; "
|
||||||
|
cookie_dict = utils.convert_str_cookie_to_dict(xhs_cookies)
|
||||||
|
assert cookie_dict.get("webId") == "1190c4d3cxxxx125xxx"
|
||||||
|
assert cookie_dict.get("a1") == "x000101360"
|
||||||
|
|
||||||
|
|
||||||
|
if __name__ == '__main__':
|
||||||
|
test_convert_cookies()
|
|
@ -4,6 +4,7 @@
|
||||||
# thanks to aneasystone for his great work
|
# thanks to aneasystone for his great work
|
||||||
import numpy as np
|
import numpy as np
|
||||||
import math
|
import math
|
||||||
|
from typing import List, Tuple
|
||||||
|
|
||||||
|
|
||||||
# https://github.com/gdsmith/jquery.easing/blob/master/jquery.easing.js
|
# https://github.com/gdsmith/jquery.easing/blob/master/jquery.easing.js
|
||||||
|
@ -52,7 +53,7 @@ def ease_out_elastic(x):
|
||||||
return pow(2, -10 * x) * math.sin((x * 10 - 0.75) * c4) + 1
|
return pow(2, -10 * x) * math.sin((x * 10 - 0.75) * c4) + 1
|
||||||
|
|
||||||
|
|
||||||
def get_tracks(distance, seconds, ease_func):
|
def get_tracks(distance, seconds, ease_func) -> Tuple[List[int], List[int]]:
|
||||||
tracks = [0]
|
tracks = [0]
|
||||||
offsets = [0]
|
offsets = [0]
|
||||||
for t in np.arange(0.0, seconds, 0.1):
|
for t in np.arange(0.0, seconds, 0.1):
|
||||||
|
|
|
@ -22,7 +22,7 @@ async def find_login_qrcode(page: Page, selector: str) -> str:
|
||||||
elements = await page.wait_for_selector(
|
elements = await page.wait_for_selector(
|
||||||
selector=selector,
|
selector=selector,
|
||||||
)
|
)
|
||||||
login_qrcode_img = await elements.get_property("src")
|
login_qrcode_img = await elements.get_property("src") # type: ignore
|
||||||
return str(login_qrcode_img)
|
return str(login_qrcode_img)
|
||||||
|
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
|
@ -30,7 +30,7 @@ async def find_login_qrcode(page: Page, selector: str) -> str:
|
||||||
return ""
|
return ""
|
||||||
|
|
||||||
|
|
||||||
def show_qrcode(qr_code: str):
|
def show_qrcode(qr_code) -> None: # type: ignore
|
||||||
"""parse base64 encode qrcode image and show it"""
|
"""parse base64 encode qrcode image and show it"""
|
||||||
qr_code = qr_code.split(",")[1]
|
qr_code = qr_code.split(",")[1]
|
||||||
qr_code = base64.b64decode(qr_code)
|
qr_code = base64.b64decode(qr_code)
|
||||||
|
@ -68,20 +68,20 @@ def convert_cookies(cookies: Optional[List[Cookie]]) -> Tuple[str, Dict]:
|
||||||
|
|
||||||
|
|
||||||
def convert_str_cookie_to_dict(cookie_str: str) -> Dict:
|
def convert_str_cookie_to_dict(cookie_str: str) -> Dict:
|
||||||
cookie_dict = dict()
|
cookie_dict: Dict[str, str]= dict()
|
||||||
if not cookie_str:
|
if not cookie_str:
|
||||||
return cookie_dict
|
return cookie_dict
|
||||||
for cookie in cookie_str.split(";"):
|
for cookie in cookie_str.split(";"):
|
||||||
cookie = cookie.strip()
|
cookie = cookie.strip()
|
||||||
if not cookie:
|
if not cookie:
|
||||||
continue
|
continue
|
||||||
cookie = cookie.split("=")
|
cookie_list = cookie.split("=")
|
||||||
if len(cookie) != 2:
|
if len(cookie_list) != 2:
|
||||||
continue
|
continue
|
||||||
cookie_value = cookie[1]
|
cookie_value = cookie_list[1]
|
||||||
if isinstance(cookie_value, list):
|
if isinstance(cookie_value, list):
|
||||||
cookie_value = "".join(cookie_value)
|
cookie_value = "".join(cookie_value)
|
||||||
cookie_dict[cookie[0]] = cookie_value
|
cookie_dict[cookie_list[0]] = cookie_value
|
||||||
return cookie_dict
|
return cookie_dict
|
||||||
|
|
||||||
|
|
||||||
|
@ -228,11 +228,11 @@ class Slide:
|
||||||
return x
|
return x
|
||||||
|
|
||||||
|
|
||||||
def get_track_simple(distance):
|
def get_track_simple(distance) -> List[int]:
|
||||||
# 有的检测移动速度的 如果匀速移动会被识别出来,来个简单点的 渐进
|
# 有的检测移动速度的 如果匀速移动会被识别出来,来个简单点的 渐进
|
||||||
# distance为传入的总距离
|
# distance为传入的总距离
|
||||||
# 移动轨迹
|
# 移动轨迹
|
||||||
track = []
|
track: List[int]= []
|
||||||
# 当前位移
|
# 当前位移
|
||||||
current = 0
|
current = 0
|
||||||
# 减速阈值
|
# 减速阈值
|
||||||
|
@ -251,11 +251,11 @@ def get_track_simple(distance):
|
||||||
a = -3
|
a = -3
|
||||||
v0 = v
|
v0 = v
|
||||||
# 当前速度
|
# 当前速度
|
||||||
v = v0 + a * t
|
v = v0 + a * t # type: ignore
|
||||||
# 移动距离
|
# 移动距离
|
||||||
move = v0 * t + 1 / 2 * a * t * t
|
move = v0 * t + 1 / 2 * a * t * t
|
||||||
# 当前位移
|
# 当前位移
|
||||||
current += move
|
current += move # type: ignore
|
||||||
# 加入轨迹
|
# 加入轨迹
|
||||||
track.append(round(move))
|
track.append(round(move))
|
||||||
return track
|
return track
|
||||||
|
|
Loading…
Reference in New Issue