Compare commits

...

26 Commits

Author SHA1 Message Date
15c6623ed8 chore: add more headers 2026-01-08 15:19:34 +08:00
da15da800c chore: 1.53 is still 404 2026-01-08 15:05:02 +08:00
9f82b88219 chore: upgrade ping to maimai DX 1.53 2026-01-08 14:54:25 +08:00
de0ec8ebb9 chore: output content length 2026-01-08 14:50:56 +08:00
0ccc425a19 chore: get token after qr login 2026-01-08 00:56:31 +08:00
b15088c332 fix: don't ignore http status code 2026-01-08 00:25:11 +08:00
322b85ed65 todo: prepare for 1.53 API support 2026-01-08 00:18:01 +08:00
c02ac2daad fix: userId starts from 10000000 2026-01-06 15:29:11 +08:00
b4ecc648a7 perf: remove .retain() immediate process 2026-01-06 11:30:26 +08:00
bd6df7b93a build: bump nyquest to v0.4.0 2026-01-05 23:50:56 +08:00
78adffd34d build: bump dependencies 2026-01-05 23:48:18 +08:00
beb8fd3e5b fix: 2025-12-29 update 2025-12-29 15:36:03 +08:00
meowkatee
8d2c3ab82c test: decode pong response 2025-11-30 21:16:06 +08:00
77cdf7801d chore: update dependencies 2025-10-09 15:43:38 +08:00
6818bdf789 build: bump dependencies 2025-09-25 00:13:22 +08:00
9e628dca63 chore: update to Maimai DX Circle 2025-09-25 00:11:28 +08:00
mokurin000
672f82bd85 enhance: cast difficulty manually 2025-09-19 22:03:44 +08:00
mokurin000
209b76b714 feat: export music db in flatten format 2025-09-19 22:00:08 +08:00
mokurin000
6ea483e267 chore: ignore b50.parquet 2025-09-19 03:40:13 +08:00
mokurin000
ad2903db9a perf: drop json output support 2025-09-19 03:37:24 +08:00
mokurin000
29e354204b perf: make json output easier to parse 2025-09-19 03:18:20 +08:00
mokurin000
c35240cc94 chore: remove pre-process code for b50 2025-09-19 03:16:49 +08:00
mokurin000
ee23914e29 perf: reduce useless data read 2025-09-17 15:39:04 +08:00
mokurin000
a7777d127a chore: update redb for performance 2025-09-16 19:04:59 +08:00
mokurin000
c45e12d1bb enhance: add newest A100 data 2025-08-23 00:11:49 +08:00
mokurin000
f1886a9302 feat: calculate dx rating func 2025-08-20 21:05:16 +08:00
25 changed files with 4484 additions and 1170 deletions

3
.gitignore vendored
View File

@@ -4,10 +4,11 @@
/players.redb* /players.redb*
/*.json* /b50*.parquet
/players*.parquet /players*.parquet
/region*.parquet /region*.parquet
/records*.parquet /records*.parquet
/musics.parquet
/.python-version /.python-version
/uv.lock /uv.lock

1505
Cargo.lock generated

File diff suppressed because it is too large Load Diff

View File

@@ -7,21 +7,25 @@ default-members = ["sdgb-cli"]
music-db = { path = "./music_db", default-features = false } music-db = { path = "./music_db", default-features = false }
sdgb-api = { path = "./sdgb-api", default-features = false } sdgb-api = { path = "./sdgb-api", default-features = false }
spdlog-rs = { version = "0.4.3", default-features = false, features = [ spdlog-rs = { version = "0.5.0", default-features = false, features = [
"level-debug", "level-debug",
"release-level-info", "release-level-info",
] } ] }
snafu = { version = "0.8.6", features = ["backtrace", "rust_1_81"] } nyquest = { version = "0.4.0" }
serde = { version = "1.0.219", features = ["derive"] } nyquest-preset = { version = "0.4.0" }
serde_json = "1.0.141"
snafu = { version = "0.8.9", features = ["backtrace", "rust_1_81"] }
serde = { version = "1.0.226", features = ["derive"] }
serde_json = "1.0.145"
strum = { version = "0.27.2", features = ["derive"] } strum = { version = "0.27.2", features = ["derive"] }
tokio = { version = "1.47.1", features = ["rt-multi-thread"] } tokio = { version = "1.47.1", features = ["rt-multi-thread"] }
compio = { version = "0.15.0", features = ["runtime"] } compio = { version = "0.16.0", features = ["runtime"] }
redb = "3.0.0" redb = "3.1.0"
crabtime = { git = "https://github.com/wdanilo/crabtime.git", rev = "2ed856f5" } crabtime = { git = "https://github.com/wdanilo/crabtime.git", rev = "2ed856f5" }
parquet = "56.0.0" parquet = "57.1"
parquet_derive = "57.1"
[profile.release] [profile.release]
lto = "thin" lto = "thin"

View File

@@ -3,3 +3,7 @@
- SBGA 舞萌DX API 文档参考 - SBGA 舞萌DX API 文档参考
- “裸” cli 工具,没多少人性化功能 - “裸” cli 工具,没多少人性化功能
- 暂时不完整开放,留在私仓 - 暂时不完整开放,留在私仓
## 2025-12-29 维护
从此次维护开始,需要先 `qr-login` 登录再进行login。

View File

@@ -1,4 +1,4 @@
use std::sync::LazyLock; use std::{ops::RangeInclusive, sync::LazyLock};
use rust_decimal::{Decimal, dec, serde::DecimalFromString}; use rust_decimal::{Decimal, dec, serde::DecimalFromString};
use rustc_hash::FxHashMap; use rustc_hash::FxHashMap;
@@ -58,13 +58,19 @@ impl Level {
/// ///
/// On invalid input, it returns 0. /// On invalid input, it returns 0.
pub fn dx_rating(&self, achievement: i32) -> (&'static str, u32) { pub fn dx_rating(&self, achievement: i32) -> (&'static str, u32) {
let difficulty_rank: Decimal = self.difficulty.value;
let achievement = achievement.min(1005000); // SSS+ case let achievement = achievement.min(1005000); // SSS+ case
dx_rating(difficulty_rank, achievement)
}
}
pub fn dx_rating(difficulty_rank: Decimal, achievement: i32) -> (&'static str, u32) {
let (rank, _, factor) = RANKS let (rank, _, factor) = RANKS
.into_iter() .into_iter()
.rev() .rev()
.find(|&(_, threshold, _)| threshold <= achievement) .find(|(_, threshold, _)| threshold.contains(&achievement))
.unwrap(); // save here, due to zero threshold .unwrap(); // save here, due to zero threshold
let difficulty_rank: Decimal = self.difficulty.value;
let achievement = Decimal::new(achievement as _, 4); let achievement = Decimal::new(achievement as _, 4);
#[cfg(feature = "log")] #[cfg(feature = "log")]
@@ -75,34 +81,34 @@ impl Level {
.floor() .floor()
.try_into() .try_into()
.unwrap_or_default(); .unwrap_or_default();
(rank, rating) (rank, rating)
}
} }
const RANKS: [(&'static str, i32, Decimal); 23] = [ const RANKS: [(&'static str, RangeInclusive<i32>, Decimal); 23] = [
("D", 0, dec!(0.0)), ("D", 0..=99999, dec!(0.0)),
("D", 100000, dec!(0.016)), ("D", 100000..=199999, dec!(0.016)),
("D", 200000, dec!(0.032)), ("D", 200000..=299999, dec!(0.032)),
("D", 300000, dec!(0.048)), ("D", 300000..=399999, dec!(0.048)),
("D", 400000, dec!(0.064)), ("D", 400000..=499999, dec!(0.064)),
("C", 500000, dec!(0.080)), ("C", 500000..=599999, dec!(0.080)),
("B", 600000, dec!(0.096)), ("B", 600000..=699999, dec!(0.096)),
("BB", 700000, dec!(0.112)), ("BB", 700000..=749999, dec!(0.112)),
("BBB", 750000, dec!(0.120)), ("BBB", 750000..=799998, dec!(0.120)),
("BBB", 799999, dec!(0.128)), ("BBB", 799999..=799999, dec!(0.128)),
("A", 800000, dec!(0.136)), ("A", 800000..=899999, dec!(0.136)),
("AA", 900000, dec!(0.152)), ("AA", 900000..=939999, dec!(0.152)),
("AAA", 940000, dec!(0.168)), ("AAA", 940000..=969998, dec!(0.168)),
("AAA", 969999, dec!(0.176)), ("AAA", 969999..=969999, dec!(0.176)),
("S", 970000, dec!(0.200)), ("S", 970000..=979999, dec!(0.200)),
("S+", 980000, dec!(0.203)), ("S+", 980000..=989998, dec!(0.203)),
("S+", 989999, dec!(0.206)), ("S+", 989999..=989999, dec!(0.206)),
("SS", 990000, dec!(0.208)), ("SS", 990000..=994999, dec!(0.208)),
("SS+", 995000, dec!(0.211)), ("SS+", 995000..=999998, dec!(0.211)),
("SS+", 999999, dec!(0.214)), ("SS+", 999999..=999999, dec!(0.214)),
("SSS", 1000000, dec!(0.216)), ("SSS", 1000000..=1004998, dec!(0.216)),
("SSS", 1004999, dec!(0.222)), ("SSS", 1004999..=1004999, dec!(0.222)),
("SSS+", 1005000, dec!(0.224)), ("SSS+", 1005000..=1005000, dec!(0.224)),
]; ];
#[cfg(test)] #[cfg(test)]

File diff suppressed because it is too large Load Diff

View File

@@ -4,11 +4,4 @@ version = "0.1.0"
description = "Add your description here" description = "Add your description here"
readme = "README.md" readme = "README.md"
requires-python = ">=3.12" requires-python = ">=3.12"
dependencies = [ dependencies = ["orjson>=3.11.1", "polars>=1.32.0", "polars-hash>=0.5.4"]
"diskcache>=5.6.3",
"loguru>=0.7.3",
"orjson>=3.11.1",
"polars>=1.32.0",
"polars-hash>=0.5.4",
"pyecharts>=2.0.8",
]

View File

@@ -37,7 +37,7 @@ md5 = "0.8.0"
chrono = "0.4.41" chrono = "0.4.41"
# network request # network request
nyquest = { version = "0.3.0", features = ["async", "json"] } nyquest = { workspace = true, features = ["async", "json"] }
# compression / encryption # compression / encryption
@@ -47,5 +47,5 @@ aes = "0.8.4"
cipher = { version = "0.4.4", features = ["block-padding"] } cipher = { version = "0.4.4", features = ["block-padding"] }
bincode = { version = "2.0.1", optional = true } bincode = { version = "2.0.1", optional = true }
parquet = { version = "56.0.0", optional = true } parquet = { workspace = true, optional = true }
parquet_derive = { version = "56.0.0", optional = true } parquet_derive = { workspace = true, optional = true }

View File

@@ -1,10 +1,16 @@
use std::backtrace::Backtrace; use std::backtrace::Backtrace;
use nyquest::{AsyncClient, Body, Request, header::USER_AGENT}; use nyquest::{
AsyncClient, Body, Request,
header::{SET_COOKIE, USER_AGENT},
};
mod model; mod model;
use model::{GetResponse, GetUserId}; use model::GetUserId;
use serde::Serialize; use serde::Serialize;
use spdlog::debug;
pub use model::GetResponse;
pub struct QRCode<'a> { pub struct QRCode<'a> {
pub qrcode_content: &'a str, pub qrcode_content: &'a str,
@@ -40,7 +46,7 @@ pub enum QRLoginError {
} }
impl QRCode<'_> { impl QRCode<'_> {
pub async fn login(self, client: &AsyncClient) -> Result<i64, QRLoginError> { pub async fn login(self, client: &AsyncClient) -> Result<GetResponse, QRLoginError> {
let qr_code = &self.qrcode_content.as_bytes()[self.qrcode_content.len() - 64..]; let qr_code = &self.qrcode_content.as_bytes()[self.qrcode_content.len() - 64..];
let qr_code = String::from_utf8_lossy(qr_code); let qr_code = String::from_utf8_lossy(qr_code);
@@ -49,12 +55,14 @@ impl QRCode<'_> {
.with_header(USER_AGENT, "WC_AIME_LIB"); .with_header(USER_AGENT, "WC_AIME_LIB");
let resp = client.request(req).await?; let resp = client.request(req).await?;
let cookie = resp.get_header(SET_COOKIE)?;
let resp: GetResponse = resp.json().await?; let resp: GetResponse = resp.json().await?;
let user_id = resp.user_id; debug!("Set-Cookie: {cookie:?}");
match resp.error_id { match resp.error_id {
0 => return Ok(user_id), 0 => return Ok(resp),
2 => Err(QRLoginError::QRCodeExpired10), 2 => Err(QRLoginError::QRCodeExpired10),
1 => Err(QRLoginError::QRCodeExpired30), 1 => Err(QRLoginError::QRCodeExpired30),
50 => Err(QRLoginError::BadSingature), 50 => Err(QRLoginError::BadSingature),

View File

@@ -15,7 +15,7 @@ pub struct GetUserId {
pub timestamp: String, pub timestamp: String,
} }
#[derive(Default, Debug, Clone, PartialEq, Deserialize)] #[derive(Default, Debug, Clone, PartialEq, Deserialize, Serialize)]
#[serde(rename_all = "camelCase")] #[serde(rename_all = "camelCase")]
pub struct GetResponse { pub struct GetResponse {
pub key: String, pub key: String,
@@ -24,6 +24,7 @@ pub struct GetResponse {
pub error_id: i64, pub error_id: i64,
#[serde(rename = "userID")] #[serde(rename = "userID")]
pub user_id: i64, pub user_id: i64,
pub token: String,
} }
impl GetUserId { impl GetUserId {

View File

@@ -9,7 +9,7 @@ use flate2::write::{ZlibDecoder, ZlibEncoder};
use spdlog::debug; use spdlog::debug;
use crate::error::ApiError; use crate::error::ApiError;
use crate::title::{MaiVersion, MaiVersionExt, Sdgb1_50}; use crate::title::{MaiVersion, MaiVersionExt, Sdgb1_50, Sdgb1_53};
impl MaiVersionExt for Sdgb1_50 { impl MaiVersionExt for Sdgb1_50 {
fn decode(mut data: impl AsMut<[u8]>) -> Result<Vec<u8>, ApiError> { fn decode(mut data: impl AsMut<[u8]>) -> Result<Vec<u8>, ApiError> {
@@ -29,6 +29,24 @@ impl MaiVersionExt for Sdgb1_50 {
} }
} }
impl MaiVersionExt for Sdgb1_53 {
fn decode(mut data: impl AsMut<[u8]>) -> Result<Vec<u8>, ApiError> {
if data.as_mut().is_empty() {
return Err(ApiError::EmptyResponse);
}
debug!("data size: {}", data.as_mut().len());
let decrypted = decrypt(&mut data, Self::AES_KEY, Self::AES_IV)?;
Ok(decompress(decrypted))
}
fn encode(data: impl AsRef<[u8]>) -> Result<Vec<u8>, ApiError> {
let compressed = compress(data)?;
let enc = encrypt(compressed, Self::AES_KEY, Self::AES_IV)?;
Ok(enc)
}
}
type Aes256CbcEnc = cbc::Encryptor<aes::Aes256>; type Aes256CbcEnc = cbc::Encryptor<aes::Aes256>;
type Aes256CbcDec = cbc::Decryptor<aes::Aes256>; type Aes256CbcDec = cbc::Decryptor<aes::Aes256>;
@@ -81,6 +99,16 @@ mod _tests {
use crate::title::{Sdgb1_50, encryption::*}; use crate::title::{Sdgb1_50, encryption::*};
#[test]
fn test_ping_dec() -> Result<(), ApiError> {
let mut data = b"\x72\x5c\xa5\x55\x27\x14\x85\xd1\x64\xc8\x64\x5b\x6e\x5f\xd8\xe3\
\x3f\x36\x4c\x9a\x3b\xa5\xb0\x9e\x75\xae\x83\xee\xb3\xb9\x2a\x75"
.to_vec();
let decoded = Sdgb1_50::decode(&mut data)?;
assert_eq!(decoded, b"{\"result\":\"Pong\"}");
Ok(())
}
#[test] #[test]
fn test_sdgb_150_dec_enc() -> Result<(), ApiError> { fn test_sdgb_150_dec_enc() -> Result<(), ApiError> {
let data = [ let data = [
@@ -106,7 +134,6 @@ mod _tests {
Ok(()) Ok(())
} }
// FIXME: user data decryption
#[test] #[test]
fn test_user_data_dec() -> Result<(), ApiError> { fn test_user_data_dec() -> Result<(), ApiError> {
let data = [ let data = [

View File

@@ -13,7 +13,7 @@ use super::ApiError;
use nyquest::{ use nyquest::{
AsyncClient, Body, AsyncClient, Body,
r#async::Request, r#async::Request,
header::{ACCEPT_ENCODING, CONTENT_ENCODING, EXPECT, USER_AGENT}, header::{ACCEPT, ACCEPT_ENCODING, CONTENT_ENCODING, CONTENT_TYPE, COOKIE, EXPECT, USER_AGENT},
}; };
use serde::{Deserialize, Serialize}; use serde::{Deserialize, Serialize};
use spdlog::debug; use spdlog::debug;
@@ -48,17 +48,26 @@ pub trait MaiVersionExt: MaiVersion {
let payload = Self::encode(json)?; let payload = Self::encode(json)?;
let api_hash = Self::api_hash(api); let api_hash = Self::api_hash(api);
let req = Request::post(format!( let mut req = Request::post(format!(
"https://maimai-gm.wahlap.com:42081/Maimai2Servlet/{api_hash}" "https://maimai-gm.wahlap.com:42081/Maimai2Servlet/{api_hash}"
)) ))
.with_body(Body::json_bytes(payload)) .with_body(Body::json_bytes(payload))
.with_header(USER_AGENT, format!("{api_hash}#{agent_extra}")) .with_header(USER_AGENT, format!("{api_hash}#{agent_extra}"))
.with_header("Mai-Encoding", Self::VERSION) .with_header("Mai-Encoding", Self::VERSION)
.with_header(ACCEPT, "*/*")
.with_header(ACCEPT_ENCODING, "") .with_header(ACCEPT_ENCODING, "")
.with_header("Charset", "UTF-8") .with_header("Charset", "UTF-8")
.with_header(CONTENT_ENCODING, "deflate") .with_header(CONTENT_ENCODING, "deflate")
.with_header(CONTENT_TYPE, "application/json")
.with_header(EXPECT, "100-continue"); .with_header(EXPECT, "100-continue");
// TODO: userid, token
if Self::VERSION >= "1.53" && false {
req = req.with_header(COOKIE, format!(""))
}
debug!("request: {req:?}");
Ok(req) Ok(req)
} }
@@ -83,9 +92,16 @@ pub trait MaiVersionExt: MaiVersion {
let req = spawn_blocking(move || Self::api_call(api, agent_extra, data)) let req = spawn_blocking(move || Self::api_call(api, agent_extra, data))
.await .await
.map_err(|_| ApiError::JoinError)??; .map_err(|_| ApiError::JoinError)??;
let data = client.request(req).await?.bytes().await?; let resp = client.request(req).await?.with_successful_status()?;
debug!("received: {data:?}"); debug!(
"server response: {}, {:?} bytes",
resp.status(),
resp.content_length()
);
let data = resp.bytes().await?;
debug!("server response payload: {data:?}");
let decoded = spawn_blocking(move || Self::decode(data)) let decoded = spawn_blocking(move || Self::decode(data))
.await .await
@@ -120,6 +136,7 @@ pub trait MaiVersionExt: MaiVersion {
} }
pub struct Sdgb1_50; pub struct Sdgb1_50;
pub struct Sdgb1_53;
impl MaiVersion for Sdgb1_50 { impl MaiVersion for Sdgb1_50 {
const AES_KEY: &[u8; 32] = b"a>32bVP7v<63BVLkY[xM>daZ1s9MBP<R"; const AES_KEY: &[u8; 32] = b"a>32bVP7v<63BVLkY[xM>daZ1s9MBP<R";
@@ -128,3 +145,11 @@ impl MaiVersion for Sdgb1_50 {
const VERSION: &str = "1.50"; const VERSION: &str = "1.50";
} }
impl MaiVersion for Sdgb1_53 {
const AES_KEY: &[u8; 32] = b"o2U8F6<adcYl25f_qwx_n]5_qxRcbLN>";
const AES_IV: &[u8; 16] = b"AL<G:k:X6Vu7@_U]";
const OBFUSECATE_SUFFIX: &str = "MaimaiChnLatuAa81";
const VERSION: &str = "1.53";
}

View File

@@ -66,6 +66,17 @@ pub struct MusicRating {
pub achievement: i32, pub achievement: i32,
} }
#[cfg_attr(feature = "parquet", derive(parquet_derive::ParquetRecordWriter))]
#[derive(Default, Debug, Clone, PartialEq)]
pub struct MusicRatingFlatten {
pub user_id: u32,
pub music_id: u32,
pub level: u32,
pub rom_version: i64,
pub achievement: i32,
pub dx_rating: u32,
}
#[cfg_attr(feature = "bincode", derive(bincode::Encode, bincode::Decode))] #[cfg_attr(feature = "bincode", derive(bincode::Encode, bincode::Decode))]
#[derive(Default, Debug, Clone, PartialEq, Serialize, Deserialize)] #[derive(Default, Debug, Clone, PartialEq, Serialize, Deserialize)]
#[serde(rename_all = "camelCase")] #[serde(rename_all = "camelCase")]

View File

@@ -18,6 +18,7 @@ pub use get_user_rating_api::{
GetUserRatingApi, GetUserRatingApi,
GetUserRatingApiResp, // api GetUserRatingApiResp, // api
MusicRating, MusicRating,
MusicRatingFlatten,
Udemae, Udemae,
UserRating, UserRating,
}; };

View File

@@ -65,6 +65,7 @@ impl UserLoginApiResp {
100 => Some(LoginError::AlreadyLogged), 100 => Some(LoginError::AlreadyLogged),
102 => Some(LoginError::QRCodeExpired), 102 => Some(LoginError::QRCodeExpired),
103 => Some(LoginError::AccountUnregistered), 103 => Some(LoginError::AccountUnregistered),
106 => Some(LoginError::KeychipMismatch),
error => Some(LoginError::Unknown { error }), error => Some(LoginError::Unknown { error }),
} }
} }
@@ -78,6 +79,8 @@ pub enum LoginError {
AlreadyLogged, AlreadyLogged,
#[snafu(display("userId does not exist"))] #[snafu(display("userId does not exist"))]
AccountUnregistered, AccountUnregistered,
#[snafu(display("KeyChip-ID mismatch"))]
KeychipMismatch,
#[snafu(display("Unknown error: {error}"))] #[snafu(display("Unknown error: {error}"))]
Unknown { error: i32 }, Unknown { error: i32 },

View File

@@ -11,10 +11,17 @@ default = ["compio", "fetchall"]
compio = ["dep:compio", "sdgb-api/compio"] compio = ["dep:compio", "sdgb-api/compio"]
tokio = ["dep:tokio", "sdgb-api/tokio"] tokio = ["dep:tokio", "sdgb-api/tokio"]
fetchall = ["dep:redb", "dep:futures-util", "dep:parquet", "sdgb-api/parquet"] fetchall = [
"dep:redb",
"dep:futures-util",
"dep:parquet",
"dep:music-db",
"sdgb-api/parquet",
]
[dependencies] [dependencies]
sdgb-api = { workspace = true, features = ["bincode"] } sdgb-api = { workspace = true, features = ["bincode"] }
music-db = { workspace = true, optional = true }
# (de)serialization # (de)serialization
serde = { workspace = true } serde = { workspace = true }
@@ -33,9 +40,9 @@ redb = { workspace = true, optional = true }
tokio = { workspace = true, features = ["macros"], optional = true } tokio = { workspace = true, features = ["macros"], optional = true }
compio = { workspace = true, features = ["macros"], optional = true } compio = { workspace = true, features = ["macros"], optional = true }
nyquest-preset = { version = "0.3.0", features = ["async"] } nyquest-preset = { workspace = true, features = ["async"] }
palc = { version = "0.0.1", features = ["derive"] } palc = { version = "0.0.2" }
futures-util = { version = "0.3.31", optional = true } futures-util = { version = "0.3.31", optional = true }
ctrlc = { version = "3.4.7", features = ["termination"] } ctrlc = { version = "3.4.7", features = ["termination"] }

View File

@@ -3,7 +3,7 @@ use palc::Subcommand;
use strum::EnumString; use strum::EnumString;
#[derive(Parser)] #[derive(Parser)]
#[command(about = "SDGB api tool", long_about = env!("CARGO_PKG_DESCRIPTION"))] #[command(long_about = env!("CARGO_PKG_DESCRIPTION"))]
pub struct Cli { pub struct Cli {
/// Try to generate machine readable format. /// Try to generate machine readable format.
/// ///
@@ -81,12 +81,14 @@ pub enum Commands {
skip_login: bool, skip_login: bool,
}, },
/// Scrape all user, read possible id from stdin
#[cfg(feature = "fetchall")] #[cfg(feature = "fetchall")]
ListAllUser { ListAllUser {
#[arg(short, long, default_value_t = 5)] #[arg(short, long, default_value_t = 5)]
concurrency: usize, concurrency: usize,
}, },
#[cfg(feature = "fetchall")] #[cfg(feature = "fetchall")]
/// Scrape B50 data
ScrapeAllB50 { ScrapeAllB50 {
#[arg(short, long, default_value_t = 5)] #[arg(short, long, default_value_t = 5)]
concurrency: usize, concurrency: usize,
@@ -96,6 +98,7 @@ pub enum Commands {
#[arg(long, default_value_t = 16500)] #[arg(long, default_value_t = 16500)]
max_rating: i64, max_rating: i64,
}, },
/// Scrape Region data
#[cfg(feature = "fetchall")] #[cfg(feature = "fetchall")]
ScrapeAllRegion { ScrapeAllRegion {
#[arg(short, long, default_value_t = 5)] #[arg(short, long, default_value_t = 5)]
@@ -106,6 +109,7 @@ pub enum Commands {
#[arg(long, default_value_t = 16500)] #[arg(long, default_value_t = 16500)]
max_rating: i64, max_rating: i64,
}, },
/// Scrape all player record
#[cfg(feature = "fetchall")] #[cfg(feature = "fetchall")]
ScrapeAllRecord { ScrapeAllRecord {
#[arg(short, long, default_value_t = 5)] #[arg(short, long, default_value_t = 5)]
@@ -139,7 +143,7 @@ pub enum Commands {
}, },
} }
#[derive(Debug, Default, EnumString)] #[derive(Debug, Default, EnumString, strum::Display)]
#[strum(serialize_all = "snake_case")] #[strum(serialize_all = "snake_case")]
pub enum RatingFormat { pub enum RatingFormat {
#[default] #[default]

View File

@@ -11,10 +11,10 @@ use palc::Parser;
use spdlog::{Level, LevelFilter::MoreSevereEqual, sink::StdStreamSink, terminal_style::StyleMode}; use spdlog::{Level, LevelFilter::MoreSevereEqual, sink::StdStreamSink, terminal_style::StyleMode};
use sdgb_api::{ use sdgb_api::{
all_net::QRCode, all_net::{GetResponse, QRCode},
auth_lite::{SDGB, SDHJ, delivery_raw}, auth_lite::{SDGB, SDHJ, delivery_raw},
title::{ title::{
MaiVersionExt, Sdgb1_50, MaiVersionExt, Sdgb1_50, Sdgb1_53,
helper::get_user_all_music, helper::get_user_all_music,
methods::APIMethod, methods::APIMethod,
model::{ model::{
@@ -57,7 +57,7 @@ async fn main() -> Result<(), Box<dyn snafu::Error>> {
*log.sinks_mut() = vec![Arc::new(sink)]; *log.sinks_mut() = vec![Arc::new(sink)];
Ok(()) Ok(())
})?; })?;
spdlog::swap_default_logger(logger); _ = spdlog::swap_default_logger(logger);
ctrlc::set_handler(|| { ctrlc::set_handler(|| {
if EARLY_QUIT.load(Ordering::Relaxed) { if EARLY_QUIT.load(Ordering::Relaxed) {
@@ -184,13 +184,23 @@ async fn main() -> Result<(), Box<dyn snafu::Error>> {
"sdgb 1.50 resp: {decoded}, {}ms", "sdgb 1.50 resp: {decoded}, {}ms",
time.elapsed().unwrap_or_default().as_millis() time.elapsed().unwrap_or_default().as_millis()
); );
let time = SystemTime::now();
let decoded: PingResp =
Sdgb1_53::request(&client, APIMethod::Ping, "", Ping {}).await?;
info!(
"sdgb 1.53 resp: {decoded}, {}ms",
time.elapsed().unwrap_or_default().as_millis()
);
} }
Commands::QRLogin { ref qrcode_content } => { Commands::QRLogin { ref qrcode_content } => {
let qrcode = QRCode { qrcode_content }; let qrcode = QRCode { qrcode_content };
let resp = qrcode.login(&client).await; let resp = qrcode.login(&client).await;
match &resp { match &resp {
Ok(user_id) => info!("login succeed: {user_id}"), Ok(GetResponse { user_id, token, .. }) => {
info!("login succeed: {user_id}, {token:?}")
}
Err(e) => error!("login failed: {e}"), Err(e) => error!("login failed: {e}"),
} }
@@ -249,11 +259,13 @@ async fn main() -> Result<(), Box<dyn snafu::Error>> {
utils::helpers::{cached_concurrent_fetch_userfn, read_cache}, utils::helpers::{cached_concurrent_fetch_userfn, read_cache},
}; };
let mut players: Vec<GetUserPreviewApiResp> = read_cache(PLAYERS)?; let players: Vec<GetUserPreviewApiResp> = read_cache(PLAYERS)?;
players.retain(|p| p.player_rating >= min_rating && p.player_rating <= max_rating);
cached_concurrent_fetch_userfn( cached_concurrent_fetch_userfn(
players.iter().map(|p| p.user_id).collect::<Vec<u32>>(), players
.iter()
.filter(|p| p.player_rating >= min_rating && p.player_rating <= max_rating)
.map(|p| p.user_id)
.collect::<Vec<u32>>(),
&client, &client,
concurrency, concurrency,
RECORDS, RECORDS,
@@ -274,11 +286,13 @@ async fn main() -> Result<(), Box<dyn snafu::Error>> {
utils::helpers::{cached_concurrent_fetch, read_cache}, utils::helpers::{cached_concurrent_fetch, read_cache},
}; };
let mut players: Vec<GetUserPreviewApiResp> = read_cache(PLAYERS)?; let players: Vec<GetUserPreviewApiResp> = read_cache(PLAYERS)?;
players.retain(|p| p.player_rating >= min_rating && p.player_rating <= max_rating);
cached_concurrent_fetch::<GetUserRatingApiExt>( cached_concurrent_fetch::<GetUserRatingApiExt>(
players.iter().map(|p| p.user_id).collect::<Vec<u32>>(), players
.iter()
.filter(|p| p.player_rating >= min_rating && p.player_rating <= max_rating)
.map(|p| p.user_id)
.collect::<Vec<u32>>(),
&client, &client,
concurrency, concurrency,
B50, B50,
@@ -298,11 +312,13 @@ async fn main() -> Result<(), Box<dyn snafu::Error>> {
utils::helpers::{cached_concurrent_fetch, read_cache}, utils::helpers::{cached_concurrent_fetch, read_cache},
}; };
let mut players: Vec<GetUserPreviewApiResp> = read_cache(PLAYERS)?; let players: Vec<GetUserPreviewApiResp> = read_cache(PLAYERS)?;
players.retain(|p| p.player_rating >= min_rating && p.player_rating <= max_rating);
cached_concurrent_fetch::<GetUserRegionApiExt>( cached_concurrent_fetch::<GetUserRegionApiExt>(
players.iter().map(|p| p.user_id).collect::<Vec<u32>>(), players
.iter()
.filter(|p| p.player_rating >= min_rating && p.player_rating <= max_rating)
.map(|p| p.user_id)
.collect::<Vec<u32>>(),
&client, &client,
concurrency, concurrency,
REGIONS, REGIONS,
@@ -363,9 +379,52 @@ async fn main() -> Result<(), Box<dyn snafu::Error>> {
} }
#[cfg(feature = "fetchall")] #[cfg(feature = "fetchall")]
Commands::ScrapeAllB50Dump {} => { Commands::ScrapeAllB50Dump {} => {
use crate::{cache::B50, utils::helpers::dump_json}; use sdgb_api::title::model::{MusicRating, MusicRatingFlatten};
dump_json::<GetUserRatingApiResp>("b50.json", B50)?; use crate::{
cache::B50,
utils::helpers::{dump_parquet, read_cache},
};
let records: Vec<GetUserRatingApiResp> = read_cache(B50)?;
dump_parquet::<MusicRatingFlatten>(
records
.into_iter()
.map(
|GetUserRatingApiResp {
user_id,
user_rating,
}| {
user_rating
.rating_list
.into_iter()
.chain(user_rating.next_rating_list)
.filter_map(
move |MusicRating {
music_id,
level,
rom_version,
achievement,
}| {
let (_rank, dx_rating) =
music_db::query_music_level(music_id, level)?
.dx_rating(achievement);
Some(MusicRatingFlatten {
user_id,
music_id,
level,
rom_version,
achievement,
dx_rating,
})
},
)
},
)
.flatten()
.collect::<Vec<_>>(),
"b50.parquet",
)?;
} }
Commands::Userdata { Commands::Userdata {

View File

@@ -11,7 +11,6 @@ use parquet::file::writer::SerializedFileWriter;
use parquet::record::RecordWriter; use parquet::record::RecordWriter;
use redb::ReadableTable; use redb::ReadableTable;
use redb::TableDefinition; use redb::TableDefinition;
use serde::Serialize;
use spdlog::{error, info}; use spdlog::{error, info};
use sdgb_api::title::MaiVersionExt; use sdgb_api::title::MaiVersionExt;
@@ -96,30 +95,6 @@ where
Ok(()) Ok(())
} }
pub fn dump_json<D>(
output_path: impl AsRef<Path>,
definition: TableDefinition<'_, u32, Vec<u8>>,
) -> Result<(), Box<dyn snafu::Error>>
where
D: for<'d> BorrowDecode<'d, ()> + Serialize,
{
let file = OpenOptions::new()
.create(true)
.truncate(true)
.write(true)
.open(output_path)?;
#[cfg(file_lock_ready)]
file.try_lock()?;
let data = read_cache::<D>(definition)?;
let writer = BufWriter::new(file);
serde_json::to_writer(writer, &data)?;
info!("dumped {} records", data.len());
Ok(())
}
pub async fn cached_concurrent_fetch<A: APIExt>( pub async fn cached_concurrent_fetch<A: APIExt>(
user_ids: impl Into<Vec<u32>>, user_ids: impl Into<Vec<u32>>,
client: &AsyncClient, client: &AsyncClient,
@@ -169,10 +144,8 @@ where
{ {
let cache_table = cache::open_table_ro(&read, definition)?; let cache_table = cache::open_table_ro(&read, definition)?;
let data = cache_table.get(user_id)?; let data = cache_table.get(user_id)?;
if let Some(data) = data { if data.is_some() {
let decoded: (R, _) = borrow_decode_from_slice(&data.value(), config)?; return Ok(());
return Ok(decoded.0);
} }
} }
@@ -181,16 +154,14 @@ where
} }
let resp = scrape(&client, user_id).await; let resp = scrape(&client, user_id).await;
match &resp { match &resp {
Ok(resp) => { Ok(resp) => {
use sdgb_api::bincode::encode_to_vec; use sdgb_api::bincode::encode_to_vec;
info!("fetched: {user_id}");
if let Ok(mut table) = cache::open_table(&write, definition) if let Ok(mut table) = cache::open_table(&write, definition)
&& let Ok(encoded) = encode_to_vec(resp, config) && let Ok(encoded) = encode_to_vec(resp, config)
{ {
info!("encode length for {user_id}: {}", encoded.len());
_ = table.insert(user_id, encoded); _ = table.insert(user_id, encoded);
} }
} }
@@ -200,10 +171,9 @@ where
} }
} }
Result::<_, Box<dyn snafu::Error>>::Ok(resp?) Result::<_, Box<dyn snafu::Error>>::Ok(())
}) })
.buffer_unordered(concurrency) // slower to avoid being banned .buffer_unordered(concurrency) // slower to avoid being banned
.filter_map(async |r| r.ok())
.collect::<Vec<_>>() .collect::<Vec<_>>()
.await; .await;
drop(collect); drop(collect);

View File

@@ -1,106 +0,0 @@
import orjson as json
from typing import Callable
from datetime import datetime
from decimal import Decimal
from helpers import dx_rating, find_level, query_music_db, salted_hash_userid
def clean_b50(b50: dict[str, str | dict]):
urating: dict[str, list[dict[str, int]]] = b50["userRating"]
def add_rating(entry: dict[str, int]):
"""
```
{
"musicId": 11638,
"level": 2,
"romVersion": 24005,
"achievement": 988145
}
```
- level: EXPERT
- ver: DX, 1.40.05
- ach: 98.8145%
"""
entry["musicTitle"] = None
entry["difficulty"] = None
entry["dxRating"] = 0
music_info = query_music_db(entry["musicId"])
if music_info is None:
return
entry["musicTitle"] = music_info["name"]
levels = find_level(music_info, entry["level"])
if not levels:
return
level: dict[str, str | int] = levels.pop()
difficulty = level["difficulty"]
entry["difficulty"] = difficulty
entry["dxRating"] = dx_rating(
difficulty=Decimal(difficulty),
achievement=entry["achievement"],
)
for b35 in urating["ratingList"]:
add_rating(b35)
for b15 in urating["newRatingList"]:
add_rating(b15)
urating["rating"] = sum(
map(
lambda lst: sum(map(lambda entry: entry["dxRating"], urating[lst])),
["ratingList", "newRatingList"],
)
)
def record_time(*, _: list[datetime] = []):
last_time = _
if not last_time:
last_time.append(datetime.now())
else:
new = datetime.now()
diff = (new - last_time.pop()).total_seconds()
last_time.append(new)
return diff
def process(
clean_fields: Callable[[dict], None],
input_file: str,
output_file: str,
):
record_time()
with open(input_file, "rb") as f:
data = json.loads(f.read())
print(f"loaded, cost {record_time():.2f}s")
for entry in data:
entry["userId"] = salted_hash_userid(entry["userId"])
clean_fields(entry)
print(f"processed, cost {record_time():.2f}s")
with open(output_file, "wb") as f:
f.write(json.dumps(data))
print(f"written out, cost {record_time():.2f}s")
return data
def main():
process(
clean_b50,
"b50.json",
"b50_pub.json",
)
if __name__ == "__main__":
main()

View File

@@ -0,0 +1,5 @@
import polars as pl
pl.read_json("music_db/src/musicDB.json").explode(pl.col("levels")).unnest(
pl.col("levels")
).with_columns(pl.col("difficulty").cast(pl.Decimal)).write_parquet("musics.parquet")

View File

@@ -1,11 +0,0 @@
import orjson as json
def main():
with open("players.json", "r", encoding="utf-8") as f:
d: list[dict[str, int | str]] = json.loads(f.read())
print(d[-1]["userId"])
if __name__ == "__main__":
main()

View File

@@ -1,105 +0,0 @@
from decimal import Decimal, getcontext
import hashlib
import orjson as json
from diskcache import Cache
getcontext().prec = 28
CACHE = Cache("target")
def salted_hash_userid(user_id: int):
hex = CACHE.get(user_id)
if hex is not None:
return hex
SALT = b"Lt2N5xgjJOqRsT5qVt7wWYw6SqOPZDI7"
hash_uid = hashlib.sha256(f"{user_id}".encode("utf-8") + SALT)
result = hash_uid.hexdigest()[:16]
CACHE.add(user_id, result)
return result
def dx_rating(difficulty: Decimal, achievement: int) -> int:
# Constants
SSS_PLUS_THRESHOLD = Decimal("100.5")
SSS_PLUS_FACTOR = Decimal("0.224")
SSS_PRO_THRESHOLD = Decimal("100.4999")
SSS_PRO_FACTOR = Decimal("0.222")
SSS_THRESHOLD = Decimal("100.0")
SSS_FACTOR = Decimal("0.216")
SS_PLUS_PRO_THRESHOLD = Decimal("99.9999")
SS_PLUS_PRO_FACTOR = Decimal("0.214")
SS_PLUS_THRESHOLD = Decimal("99.5")
SS_PLUS_FACTOR = Decimal("0.211")
SS_THRESHOLD = Decimal("99.0")
SS_FACTOR = Decimal("0.208")
S_PLUS_PRO_THRESHOLD = Decimal("98.9999")
S_PLUS_PRO_FACTOR = Decimal("0.206")
S_PLUS_THRESHOLD = Decimal("98.0")
S_PLUS_FACTOR = Decimal("0.203")
S_THRESHOLD = Decimal("97.0")
S_FACTOR = Decimal("0.2")
AAA_PRO_THRESHOLD = Decimal("96.9999")
AAA_PRO_FACTOR = Decimal("0.176")
AAA_THRESHOLD = Decimal("94.0")
AAA_FACTOR = Decimal("0.168")
AA_THRESHOLD = Decimal("90.0")
AA_FACTOR = Decimal("0.152")
A_THRESHOLD = Decimal("80.0")
A_FACTOR = Decimal("0.136")
ach = Decimal(achievement) / Decimal("10000")
if ach > Decimal("101.0") or ach < A_THRESHOLD:
return 0
if ach >= SSS_PLUS_THRESHOLD:
factor = SSS_PLUS_FACTOR
ach = Decimal("100.5")
elif ach >= SSS_PRO_THRESHOLD:
factor = SSS_PRO_FACTOR
elif ach >= SSS_THRESHOLD:
factor = SSS_FACTOR
elif ach >= SS_PLUS_PRO_THRESHOLD:
factor = SS_PLUS_PRO_FACTOR
elif ach >= SS_PLUS_THRESHOLD:
factor = SS_PLUS_FACTOR
elif ach >= SS_THRESHOLD:
factor = SS_FACTOR
elif ach >= S_PLUS_PRO_THRESHOLD:
factor = S_PLUS_PRO_FACTOR
elif ach >= S_PLUS_THRESHOLD:
factor = S_PLUS_FACTOR
elif ach >= S_THRESHOLD:
factor = S_FACTOR
elif ach >= AAA_PRO_THRESHOLD:
factor = AAA_PRO_FACTOR
elif ach >= AAA_THRESHOLD:
factor = AAA_FACTOR
elif ach >= AA_THRESHOLD:
factor = AA_FACTOR
elif ach >= A_THRESHOLD:
factor = A_FACTOR
else:
return 0
result = (factor * difficulty * ach).quantize(Decimal("1."), rounding="ROUND_FLOOR")
return int(result)
with open("musicDB.json", "r", encoding="utf-8") as f:
MUSIC_DB = json.loads(f.read())
MUSIC_DB = {entry["id"]: entry for entry in MUSIC_DB}
def query_music_db(music_id: int):
music_info = MUSIC_DB.get(music_id)
if music_info is None:
return
return music_info
def find_level(music_info: dict, level_id: int):
return [level for level in music_info["levels"] if level["level"] == level_id]

View File

@@ -4,6 +4,7 @@ import xml.dom.minidom as minidom
from pathlib import Path from pathlib import Path
ONLY_REMOVED = True ONLY_REMOVED = True
EXTEND_LIST = ["C:/MaimaiDX/SDEZ-1.60/Package/Sinmai_Data/StreamingAssets/A100"]
def makeMusicDBJson(): def makeMusicDBJson():
@@ -13,24 +14,31 @@ def makeMusicDBJson():
免得国服每次更新还要重新生成太麻烦 免得国服每次更新还要重新生成太麻烦
""" """
# 记得改 # 记得改
A000_DIR = Path( A000_DIR = Path("C:/MaimaiDX/SDEZ-1.60/Package/Sinmai_Data/StreamingAssets/A000")
"C:/MaimaiDX/SDEZ-1.56-B/Standard/Package/Sinmai_Data/StreamingAssets/A000" OPTION_DIR = Path("C:/MaimaiDX/SDEZ-1.60/Package/option")
)
OPTION_DIR = Path("C:/MaimaiDX/SDGA-1.50-G/NoMovieData/StreamingAssets")
music_db: list[dict[str, str | int | list[dict[str, str | int]]]] = [] music_db: list[dict[str, str | int | list[dict[str, str | int]]]] = []
DEST_PATH = Path("./musicDB.json") DEST_PATH = Path("./music_db/src/musicDB.json")
dup_count = 0 dup_count = 0
music_ids = set() music_ids = set()
music_folders = [f for f in (A000_DIR / "music").iterdir() if f.is_dir()] music_folders = [f for f in (A000_DIR / "music").iterdir() if f.is_dir()]
for extend_dir in EXTEND_LIST:
extend_dir = Path(extend_dir)
if (extend_dir / "music").exists():
print(f"adding {extend_dir.name} patch...")
music_folders.extend(
[f for f in (extend_dir / "music").iterdir() if f.is_dir()]
)
for option_dir in OPTION_DIR.iterdir(): for option_dir in OPTION_DIR.iterdir():
# only removed ones # only removed songs
if ONLY_REMOVED and option_dir.name != "A100": if ONLY_REMOVED and not option_dir.name.endswith("100"):
continue continue
if (option_dir / "music").exists(): if (option_dir / "music").exists():
print("adding mega omnimix patch...")
music_folders.extend( music_folders.extend(
[f for f in (option_dir / "music").iterdir() if f.is_dir()] [f for f in (option_dir / "music").iterdir() if f.is_dir()]
) )

View File

@@ -0,0 +1,20 @@
from sys import argv
import polars as pl
import polars_hash as pl_hash
file = argv[1]
user_ids = (
pl.DataFrame({"user_id_num": range(10000000, 14000001)})
.with_columns(
pl.col("user_id_num")
.cast(pl.String)
.add("Lt2N5xgjJOqRsT5qVt7wWYw6SqOPZDI7")
.alias("user_id"),
)
.with_columns(pl_hash.col("user_id").chash.sha2_256().str.head(16))
.join(pl.read_parquet(file), on="user_id", how="inner")["user_id_num"]
)
with open("id.txt", "w", encoding="utf-8") as f:
f.write("\n".join(map(str, user_ids)))