fix:翻页时search id不变
This commit is contained in:
parent
fa2bcc4181
commit
78c09c4ae1
|
@ -190,6 +190,7 @@ class XiaoHongShuClient(AbstractApiClient):
|
||||||
|
|
||||||
async def get_note_by_keyword(
|
async def get_note_by_keyword(
|
||||||
self, keyword: str,
|
self, keyword: str,
|
||||||
|
search_id: str = get_search_id(),
|
||||||
page: int = 1, page_size: int = 20,
|
page: int = 1, page_size: int = 20,
|
||||||
sort: SearchSortType = SearchSortType.GENERAL,
|
sort: SearchSortType = SearchSortType.GENERAL,
|
||||||
note_type: SearchNoteType = SearchNoteType.ALL
|
note_type: SearchNoteType = SearchNoteType.ALL
|
||||||
|
@ -211,7 +212,7 @@ class XiaoHongShuClient(AbstractApiClient):
|
||||||
"keyword": keyword,
|
"keyword": keyword,
|
||||||
"page": page,
|
"page": page,
|
||||||
"page_size": page_size,
|
"page_size": page_size,
|
||||||
"search_id": get_search_id(),
|
"search_id": search_id,
|
||||||
"sort": sort.value,
|
"sort": sort.value,
|
||||||
"note_type": note_type.value
|
"note_type": note_type.value
|
||||||
}
|
}
|
||||||
|
|
|
@ -31,7 +31,7 @@ from var import crawler_type_var, source_keyword_var
|
||||||
from .client import XiaoHongShuClient
|
from .client import XiaoHongShuClient
|
||||||
from .exception import DataFetchError
|
from .exception import DataFetchError
|
||||||
from .field import SearchSortType
|
from .field import SearchSortType
|
||||||
from .help import parse_note_info_from_note_url
|
from .help import parse_note_info_from_note_url, get_search_id
|
||||||
from .login import XiaoHongShuLogin
|
from .login import XiaoHongShuLogin
|
||||||
|
|
||||||
|
|
||||||
|
@ -112,6 +112,7 @@ class XiaoHongShuCrawler(AbstractCrawler):
|
||||||
source_keyword_var.set(keyword)
|
source_keyword_var.set(keyword)
|
||||||
utils.logger.info(f"[XiaoHongShuCrawler.search] Current search keyword: {keyword}")
|
utils.logger.info(f"[XiaoHongShuCrawler.search] Current search keyword: {keyword}")
|
||||||
page = 1
|
page = 1
|
||||||
|
search_id = get_search_id()
|
||||||
while (page - start_page + 1) * xhs_limit_count <= config.CRAWLER_MAX_NOTES_COUNT:
|
while (page - start_page + 1) * xhs_limit_count <= config.CRAWLER_MAX_NOTES_COUNT:
|
||||||
if page < start_page:
|
if page < start_page:
|
||||||
utils.logger.info(f"[XiaoHongShuCrawler.search] Skip page {page}")
|
utils.logger.info(f"[XiaoHongShuCrawler.search] Skip page {page}")
|
||||||
|
@ -123,6 +124,7 @@ class XiaoHongShuCrawler(AbstractCrawler):
|
||||||
note_id_list: List[str] = []
|
note_id_list: List[str] = []
|
||||||
notes_res = await self.xhs_client.get_note_by_keyword(
|
notes_res = await self.xhs_client.get_note_by_keyword(
|
||||||
keyword=keyword,
|
keyword=keyword,
|
||||||
|
search_id=search_id,
|
||||||
page=page,
|
page=page,
|
||||||
sort=SearchSortType(config.SORT_TYPE) if config.SORT_TYPE != '' else SearchSortType.GENERAL,
|
sort=SearchSortType(config.SORT_TYPE) if config.SORT_TYPE != '' else SearchSortType.GENERAL,
|
||||||
)
|
)
|
||||||
|
|
Loading…
Reference in New Issue