# 获取用户简略预览数据的 API 实现,此 API 无需任何登录即可调取

import rapidjson as json
from API_TitleServer import apiSDGB
from Config import *
import time
import random
from loguru import logger

def apiGetUserPreview(userId, noLog:bool=False) -> str:
    data = json.dumps({
        "userId": int(userId)
    })
    preview_result = apiSDGB(data, "GetUserPreviewApi", userId, noLog)
    return preview_result

# CLI 示例
if __name__ == "__main__":
    #userId = input("请输入用户 ID:")
    userId = testUid8
    print(apiGetUserPreview(userId))

###
### 以下仅留作归档
###

def crawlAllUserPreview():
    """omg it's a evil crawler"""
    # 这里设置开始和结束的 UserId
    BeginUserId = 10200000
    EndUserId = 12599999

    # 打开文件,准备写入
    with open('Remi_UserID_DB_Output.txt', 'w', encoding="utf-8") as f:
        # 遍历 UserId
        for userId in range(BeginUserId, EndUserId + 1):
            # 调用 API
            try:
                userPreview = apiGetUserPreview(userId, True)
                currentUser = json.loads(userPreview)
                if currentUser["userId"] is not None:
                    # 每爬到一个就把它存到一个文件里面,每个一行
                    f.write(userPreview + "\n")
                    logger.info(f"{userId}: {currentUser['userName']}, RATING: {currentUser['playerRating']}")
                else:
                    f.write("\n")
            except:
                f.write("ERROR\n")
                time.sleep(4)
            f.flush()
            # 随机等待0.2-0.5秒
            time.sleep(random.uniform(0.2, 0.5))

    print('Finished!')

#if __name__ == "__main__":
#    crawlAllUserPreview()