完成PT任务

This commit is contained in:
rm 2023-12-26 17:35:00 +08:00
parent cbbdc44769
commit 87782db389
3 changed files with 159 additions and 22 deletions

View File

@ -1,11 +1,27 @@
# 说明:
# level表示站点的等级或者大小程度等级越高站点规模越大
# type 表示站点的类型 9kg=18禁,
# ===========================【start】 nastools 合作站点 【start】===========================
['鲨鱼PT']
url = "https://sharkpt.net"
cookie = ""
level = 3
['Audiences 观众/奥迪']
url = "https://audiences.me"
cookie = ""
level = 3
['猪猪网']
url = "https://piggo.me"
cookie = ""
level = 3
['ZmPT 织梦']
url = "https://zmpt.cc"
cookie = ""
level = 3
['杜比PT']
url = "https://www.hddolby.com"
cookie = ""
@ -16,11 +32,101 @@ url = "https://pt.0ff.cc"
cookie = ""
level = 3
['HDFans 红豆饭']
url = "http://hdfans.org"
cookie = ""
level = 3
['HHanClub 憨憨']
url = "https://hhanclub.top"
cookie = ""
level = 3
['WinterSakura 冬樱']
url = "https://wintersakura.net"
cookie = ""
level = 3
['Red Leaves 红叶 ']
url = "https://leaves.red"
cookie = ""
level = 3
['1PTBar/壹PT']
url = "https://1ptba.com"
cookie = "c_secure_uid=MTA0ODQx; c_secure_pass=c32037fc1a670e75d797df403e9e5a33; c_secure_ssl=eWVhaA%3D%3D; c_secure_tracker_ssl=eWVhaA%3D%3D; c_secure_login=bm9wZQ%3D%3D"
level = 3
label = "游戏、综艺、电子书、windows软件"
# ===========================【end】 nastools 合作站点 【end】===========================
# ===========================【start】 9kg 站点 【start】===========================
[pttime]
url = "https://www.pttime.com"
cookie = "c_lang_folder=chs; c_secure_uid=OTExNDU%3D; c_secure_pass=b4567a9950c3906657ed8baeb5a39b2d; c_secure_ssl=eWVhaA%3D%3D; c_secure_tracker_ssl=eWVhaA%3D%3D; c_secure_login=bm9wZQ%3D%3D; cf_clearance=RtffM86J.2AMKNqMKcF5WTGWOK11V7gklzY2rDRNu24-1701826018-0-1-d71c894c.1d67b4d8.801dd6ac-0.2.1701826018"
level = 2
label = "9kg"
[2xfree]
url = "https://pt.2xfree.org"
cookie = ""
level = 2
label = "9kg"
['FSM 飞天拉面神教' ]
url = "https://fsm.name"
cookie = ""
level = 2
label = "9kg"
['ilolicon 萝莉控 ' ]
url = "https://share.ilolicon.com"
cookie = ""
level = 2
label = "9kg"
['KamePT ' ]
url = "https://kamept.com"
cookie = ""
level = 2
label = "9kg"
['Kelu 可鲁/可撸' ]
url = "https://our.kelu.one"
cookie = ""
level = 2
label = "9kg"
['M-Team 馒头 ' ]
url = "https://kp.m-team.cc"
cookie = ""
level = 1
label = "9kg"
['NicePT 老师 ' ]
url = "https://www.nicept.net"
cookie = ""
level = 2
label = "9kg"
['Rousi【肉丝】 ' ]
url = "https://rousi.zip"
cookie = ""
level = 4
label = "9kg"
['TU88 ' ]
url = "http://pt.tu88.men"
cookie = ""
level = 4
label = "9kg"
['YDYPT【伊甸园】 ' ]
url = "https://pt.hdbd.us"
cookie = ""
level = 4
label = "9kg"
# ===========================【end】 9kg 合作站点 【end】===========================
[btschool]
url = "https://pt.btschool.club"
cookie = ""
@ -30,6 +136,7 @@ level = 3
url = "https://www.icc2022.com"
cookie = "c_secure_uid=MTk0OTI%3D; c_secure_pass=d7f655d5b8e90739f23620b9d24241e1; c_secure_ssl=eWVhaA%3D%3D; c_secure_tracker_ssl=eWVhaA%3D%3D; c_secure_login=bm9wZQ%3D%3D; PHPSESSID=em8dtq7r0t77tt1e0aq4m0of1o"
level = 3
label = "动漫、综艺、电视剧"
[hdmayi]
url = "https://www.hdmayi.com"

View File

@ -1,3 +1,4 @@
import json
import sys
from loguru import logger
@ -5,7 +6,7 @@ import time
import requests
import toml
import cloudscraper
from bs4 import BeautifulSoup
'''
new Env('PT自动签到和注册检测');
@ -48,7 +49,7 @@ class PtOperation:
for _ in range(5):
try:
response_result = requests.get(request_url, headers=self.headers)
if '签到成功' in response_result.text:
if '签到成功' in response_result.text or '已签到' in response_result.text:
res_txt = res_txt + '签到成功!'
break
except Exception as e:
@ -65,17 +66,40 @@ class PtOperation:
"""
request_url = url + "/signup.php"
text = f"网站名:{section_name}, 网址:{request_url}"
scraper = cloudscraper.create_scraper(browser='chrome', debug=True, delay=10)
logger.info(f"开始 -->> {text}")
html = ""
for _ in range(5):
try:
response_result = scraper.get(url=request_url).text
logger.info(text)
logger.info(response_result)
break
flaresolverr_url = "http://152.136.50.100:7024/v1"
payload = json.dumps({
"cmd": "request.get",
"url": request_url,
"maxTimeout": 60000
})
headers = {
'Content-Type': 'application/json'
}
response = requests.post(flaresolverr_url, headers=headers, data=payload)
res = json.loads(response.text)
if res['status'] == 'ok' and res['solution']['status'] == 200:
html = res['solution']['response']
break
except Exception as e:
time.sleep(2)
else:
logger.error(f"{text} , 5次出现错误无法访问")
if len(html) == 0:
return
try:
# 打印结果
if "再次输入密码" in html or "submit" in html:
logger.info(f"现在 {text} 正在开放注册,请前往~")
else:
soup = BeautifulSoup(html, 'html.parser')
text_content = soup.find(class_='text').get_text()
print(text_content)
except Exception as e:
logger.error(f"{text} , 页面无法解析,请知晓!!!")
logger.info("=" * 100)
def opt(self):
@ -87,9 +111,8 @@ class PtOperation:
print(f"Processing section: {section_name} --- {section_data.get('url')}")
url, cookie = section_data.get('url'), section_data.get('cookie')
if len(cookie.strip()) > 0:
pass
# 签到
# self.attendances(section_name, url, cookie)
# 签到
self.attendances(section_name, url, cookie)
else:
# 检测是否可以注册
self.signup(section_name, url)

View File

@ -1,26 +1,33 @@
import json
import requests
from bs4 import BeautifulSoup
url = "https://1ptba.com"
flaresolverr_url = "http://152.136.50.100:7024/v1"
payload = json.dumps({
"cmd": "request.get",
"url": "https://piggo.me/login.php",
"url": url + "/signup.php",
"maxTimeout": 60000
})
headers = {
'Content-Type': 'application/json'
}
# response = requests.post(flaresolverr_url, headers=headers, data=payload)
# 这个Docker镜像启动的接口返回的数据是JOSN网页源代码在其中的.solution.response中
response = '{"status": "ok", "message": "Challenge solved!", "solution": {"url": "https://piggo.me/login.php", "status": 200, "cookies": [{"domain": "piggo.me", "expiry": 1703640561, "httpOnly": true, "name": "sl-session", "path": "/", "sameSite": "None", "secure": true, "value": "7BrzKfF9i2XQp+tXKV6Arw=="}, {"domain": ".piggo.me", "expiry": 1735090161, "httpOnly": true, "name": "cf_clearance", "path": "/", "sameSite": "None", "secure": true, "value": "gvbnoRfMbANgSnxTUAW_lLVeAX.Nzr.8rJNR0yFgDv0-1703554148-0-2-76d08d10.6cf0fe2.27f806e8-250.0.0"}], "userAgent": "Mozilla/5.0 (X11; Linux x86_64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/120.0.0.0 Safari/537.36", "headers": {}, "response": ""}, "startTimestamp": 1703554142434, "endTimestamp": 1703554164365, "version": "3.3.12"}'
res = json.loads(response)
response = requests.post(flaresolverr_url, headers=headers, data=payload)
# response = '{"status": "ok", "message": "Challenge solved!", "solution": {"url": "https://piggo.me/login.php", "status": 200, "cookies": [{"domain": "piggo.me", "expiry": 1703640561, "httpOnly": true, "name": "sl-session", "path": "/", "sameSite": "None", "secure": true, "value": "7BrzKfF9i2XQp+tXKV6Arw=="}, {"domain": ".piggo.me", "expiry": 1735090161, "httpOnly": true, "name": "cf_clearance", "path": "/", "sameSite": "None", "secure": true, "value": "gvbnoRfMbANgSnxTUAW_lLVeAX.Nzr.8rJNR0yFgDv0-1703554148-0-2-76d08d10.6cf0fe2.27f806e8-250.0.0"}], "userAgent": "Mozilla/5.0 (X11; Linux x86_64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/120.0.0.0 Safari/537.36", "headers": {}, "response": ""}, "startTimestamp": 1703554142434, "endTimestamp": 1703554164365, "version": "3.3.12"}'
print(response.text)
res = json.loads(response.text)
if res['status'] == 'ok' and res['solution']['status'] == 200:
user_agent = "Mozilla/5.0 (X11; Linux x86_64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/120.0.0.0 Safari/537.36"
user_agent = res['solution']['userAgent']
cookies = res['solution']['cookies']
for cookie in cookies:
name = cookie["name"]
value = cookie["value"]
print(f"Cookie Name: {name}\nCookie Value: {value}\n")
html = res['solution']['response']
soup = BeautifulSoup(html, 'html.parser')
# 获取class为'text'的元素的文字内容
text_content = soup.find(class_='text').get_text()
# 打印结果
if "再次输入密码" in text_content or "submit" in text_content:
print(f"现在 {url} 正在开放注册,请前往~")
else:
print(text_content)