使用isort对引用进行格式化排序 修改微博获取图片默认配置关闭

This commit is contained in:
Er_Meng 2024-04-10 09:54:28 +08:00
parent 52c720591f
commit 9cd6efb916
7 changed files with 10 additions and 13 deletions

View File

@ -34,7 +34,7 @@ CRAWLER_MAX_NOTES_COUNT = 20
MAX_CONCURRENCY_NUM = 4
# 是否开启爬图片模式, 默认不开启爬图片
ENABLE_GET_IMAGES = True
ENABLE_GET_IMAGES = False
# 是否开启爬评论模式, 默认不开启爬评论
ENABLE_GET_COMMENTS = False

View File

@ -12,7 +12,6 @@ from urllib.parse import urlencode
import httpx
from playwright.async_api import BrowserContext, Page
from tools import utils
from .exception import DataFetchError

View File

@ -10,11 +10,10 @@ import random
from asyncio import Task
from typing import Dict, List, Optional, Tuple
from playwright.async_api import (BrowserContext, BrowserType, Page,
async_playwright)
import config
from base.base_crawler import AbstractCrawler
from playwright.async_api import (BrowserContext, BrowserType, Page,
async_playwright)
from proxy.proxy_ip_pool import IpInfoModel, create_ip_pool
from store import weibo as weibo_store
from tools import utils

View File

@ -8,11 +8,10 @@ import functools
import sys
from typing import Optional
from base.base_crawler import AbstractLogin
from playwright.async_api import BrowserContext, Page
from tenacity import (RetryError, retry, retry_if_result, stop_after_attempt,
wait_fixed)
from base.base_crawler import AbstractLogin
from tools import utils

View File

@ -7,8 +7,8 @@ from typing import List
import config
from .weibo_store_impl import *
from .weibo_store_image import *
from .weibo_store_impl import *
class WeibostoreFactory:

View File

@ -3,12 +3,13 @@
# @Time : 2024/4/9 17:35
# @Desc : 微博保存图片类
import pathlib
from tools import utils
from base.base_crawler import AbstractStoreImage
import aiofiles
from typing import Dict
import aiofiles
from base.base_crawler import AbstractStoreImage
from tools import utils
class WeiboStoreImage(AbstractStoreImage):
image_store_path: str = "data/weibo/images"

View File

@ -10,7 +10,6 @@ import pathlib
from typing import Dict
import aiofiles
from base.base_crawler import AbstractStore
from tools import utils
from var import crawler_type_var