使用isort对引用进行格式化排序 修改微博获取图片默认配置关闭
This commit is contained in:
parent
52c720591f
commit
9cd6efb916
|
@ -34,7 +34,7 @@ CRAWLER_MAX_NOTES_COUNT = 20
|
||||||
MAX_CONCURRENCY_NUM = 4
|
MAX_CONCURRENCY_NUM = 4
|
||||||
|
|
||||||
# 是否开启爬图片模式, 默认不开启爬图片
|
# 是否开启爬图片模式, 默认不开启爬图片
|
||||||
ENABLE_GET_IMAGES = True
|
ENABLE_GET_IMAGES = False
|
||||||
|
|
||||||
# 是否开启爬评论模式, 默认不开启爬评论
|
# 是否开启爬评论模式, 默认不开启爬评论
|
||||||
ENABLE_GET_COMMENTS = False
|
ENABLE_GET_COMMENTS = False
|
||||||
|
|
|
@ -12,7 +12,6 @@ from urllib.parse import urlencode
|
||||||
|
|
||||||
import httpx
|
import httpx
|
||||||
from playwright.async_api import BrowserContext, Page
|
from playwright.async_api import BrowserContext, Page
|
||||||
|
|
||||||
from tools import utils
|
from tools import utils
|
||||||
|
|
||||||
from .exception import DataFetchError
|
from .exception import DataFetchError
|
||||||
|
|
|
@ -10,11 +10,10 @@ import random
|
||||||
from asyncio import Task
|
from asyncio import Task
|
||||||
from typing import Dict, List, Optional, Tuple
|
from typing import Dict, List, Optional, Tuple
|
||||||
|
|
||||||
from playwright.async_api import (BrowserContext, BrowserType, Page,
|
|
||||||
async_playwright)
|
|
||||||
|
|
||||||
import config
|
import config
|
||||||
from base.base_crawler import AbstractCrawler
|
from base.base_crawler import AbstractCrawler
|
||||||
|
from playwright.async_api import (BrowserContext, BrowserType, Page,
|
||||||
|
async_playwright)
|
||||||
from proxy.proxy_ip_pool import IpInfoModel, create_ip_pool
|
from proxy.proxy_ip_pool import IpInfoModel, create_ip_pool
|
||||||
from store import weibo as weibo_store
|
from store import weibo as weibo_store
|
||||||
from tools import utils
|
from tools import utils
|
||||||
|
|
|
@ -8,11 +8,10 @@ import functools
|
||||||
import sys
|
import sys
|
||||||
from typing import Optional
|
from typing import Optional
|
||||||
|
|
||||||
|
from base.base_crawler import AbstractLogin
|
||||||
from playwright.async_api import BrowserContext, Page
|
from playwright.async_api import BrowserContext, Page
|
||||||
from tenacity import (RetryError, retry, retry_if_result, stop_after_attempt,
|
from tenacity import (RetryError, retry, retry_if_result, stop_after_attempt,
|
||||||
wait_fixed)
|
wait_fixed)
|
||||||
|
|
||||||
from base.base_crawler import AbstractLogin
|
|
||||||
from tools import utils
|
from tools import utils
|
||||||
|
|
||||||
|
|
||||||
|
|
|
@ -7,8 +7,8 @@ from typing import List
|
||||||
|
|
||||||
import config
|
import config
|
||||||
|
|
||||||
from .weibo_store_impl import *
|
|
||||||
from .weibo_store_image import *
|
from .weibo_store_image import *
|
||||||
|
from .weibo_store_impl import *
|
||||||
|
|
||||||
|
|
||||||
class WeibostoreFactory:
|
class WeibostoreFactory:
|
||||||
|
|
|
@ -3,12 +3,13 @@
|
||||||
# @Time : 2024/4/9 17:35
|
# @Time : 2024/4/9 17:35
|
||||||
# @Desc : 微博保存图片类
|
# @Desc : 微博保存图片类
|
||||||
import pathlib
|
import pathlib
|
||||||
|
|
||||||
from tools import utils
|
|
||||||
from base.base_crawler import AbstractStoreImage
|
|
||||||
import aiofiles
|
|
||||||
from typing import Dict
|
from typing import Dict
|
||||||
|
|
||||||
|
import aiofiles
|
||||||
|
from base.base_crawler import AbstractStoreImage
|
||||||
|
from tools import utils
|
||||||
|
|
||||||
|
|
||||||
class WeiboStoreImage(AbstractStoreImage):
|
class WeiboStoreImage(AbstractStoreImage):
|
||||||
image_store_path: str = "data/weibo/images"
|
image_store_path: str = "data/weibo/images"
|
||||||
|
|
||||||
|
|
|
@ -10,7 +10,6 @@ import pathlib
|
||||||
from typing import Dict
|
from typing import Dict
|
||||||
|
|
||||||
import aiofiles
|
import aiofiles
|
||||||
|
|
||||||
from base.base_crawler import AbstractStore
|
from base.base_crawler import AbstractStore
|
||||||
from tools import utils
|
from tools import utils
|
||||||
from var import crawler_type_var
|
from var import crawler_type_var
|
||||||
|
|
Loading…
Reference in New Issue