Evil 爬虫 XD

This commit is contained in:
Remik1r3n 2025-02-07 14:46:15 +08:00
parent c5d408fff0
commit 394a15b812

View File

@ -3,6 +3,8 @@
import json
from API_TitleServer import apiSDGB
from Config import *
import time
import random
def apiGetUserPreview(userId) -> str:
data = json.dumps({
@ -16,3 +18,34 @@ if __name__ == "__main__":
#userId = input("请输入用户 ID")
userId = testUid
print(apiGetUserPreview(userId))
def crawlAllUserPreview():
# 这里设置开始和结束的 UserId
BeginUserId = 11000000
EndUserId = 12599999
# 打开文件,准备写入
with open('Remi_UserID_DB_Output.txt', 'w', encoding="utf-8") as f:
# 遍历 UserId
for userId in range(BeginUserId, EndUserId + 1):
# 调用 API
try:
userPreview = apiGetUserPreview(userId)
currentUser = json.loads(userPreview)
if currentUser["userId"] is not None:
# 每爬到一个就把它存到一个文件里面,每个一行
f.write(userPreview + "\n")
else:
f.write("\n")
except:
f.write("ERROR\n")
time.sleep(4)
f.flush()
# 随机等待0.2-0.5秒
time.sleep(random.uniform(0.2, 0.5))
print('Finished!')
if __name__ == "__main__":
crawlAllUserPreview()