Compare commits

...

90 Commits

Author SHA1 Message Date
mokurin000
68b6a36fc8 chore: remove pcrt function 2025-08-17 10:52:12 +08:00
mokurin000
c9b31cbf50 fix: dx rating calculate 2025-08-15 14:00:32 +08:00
mokurin000
7ba0cd666d test: add dx rating calculate check 2025-08-14 22:54:25 +08:00
mokurin000
f78c631570 feat: support dx rating of below A rank 2025-08-14 22:22:29 +08:00
mokurin000
2b6bb12dce enhance: enable higher compression level 2025-08-11 13:32:54 +08:00
mokurin000
32cf576b88 refactor: dedup clean uid 2025-08-11 13:12:11 +08:00
mokurin000
b0942e2af4 feat: high-concurrency userid hashing 2025-08-11 12:12:14 +08:00
mokurin000
1d2e3fc7cc chore: optimize parquet fields 2025-08-10 22:17:46 +08:00
mokurin000
0ce47537fb build: bump redb to 3.0 2025-08-10 21:59:47 +08:00
mokurin000
c8c101f72a feat: skip login support 2025-08-09 13:39:07 +08:00
mokurin000
adba34cde6 perf: get more musics at once 2025-08-06 03:21:50 +08:00
mokurin000
ad85d05470 fix: totally fix bincode feature gate 2025-08-04 22:35:30 +08:00
mokurin000
d23abb02fd fix: feature gate, bincode 2025-08-04 22:32:08 +08:00
mokurin000
18eaf01c58 build: bump dependencies 2025-08-04 15:54:27 +08:00
mokurin000
fb03009f0d build: optional parquet support 2025-08-04 15:53:18 +08:00
mokurin000
468f7c9873 feat: generate better looking curves 2025-08-04 13:59:43 +08:00
mokurin000
e8749a8cd2 fix: colorize curve 2025-08-04 13:49:18 +08:00
mokurin000
2a48f2a2ce fix: handle missing music 2025-08-04 13:44:51 +08:00
mokurin000
b9e4b36053 feat: implement per user pc-rating curve 2025-08-04 13:03:21 +08:00
mokurin000
9d3ca2fe76 feat: export regions 2025-08-04 02:01:39 +08:00
mokurin000
a3ba321e5e refactor: drop players.json support 2025-08-04 01:55:44 +08:00
mokurin000
c1767e592e log: fix misleading info! log 2025-08-04 01:50:44 +08:00
mokurin000
73e1046be9 perf: parquet based data export 2025-08-04 01:49:43 +08:00
mokurin000
89d8177180 chore: load players.json via orjson 2025-08-03 20:48:55 +08:00
mokurin000
c8867a68e6 chore: use better layout 2025-08-03 20:10:59 +08:00
mokurin000
a698434526 fix: dump user region resp 2025-08-03 20:02:57 +08:00
mokurin000
90a3fc09df feat: dump user regions 2025-08-03 20:00:50 +08:00
mokurin000
ca81c6495a fix: make sure tables are initialized 2025-08-03 19:30:03 +08:00
mokurin000
a1b3a8ef0e feat: scrape user regions 2025-08-03 19:04:16 +08:00
mokurin000
56a36933e1 fix: create records table 2025-08-03 18:11:27 +08:00
mokurin000
497c67ff89 chore: collect structured GetUserMusicApiResp 2025-08-03 18:08:50 +08:00
mokurin000
81c8f21729 feat: scrape all player record 2025-08-03 17:54:31 +08:00
mokurin000
9a6e414793 refactor: add Ext postfix for APIExt types 2025-08-03 12:15:37 +08:00
mokurin000
0d379bf248 feat: add helper for userallmusicdetail 2025-08-03 12:12:32 +08:00
mokurin000
7e4dc9b978 refactor: more flexible cached scrape 2025-08-03 12:06:07 +08:00
mokurin000
a60e65e110 perf: migrate to orjson for performance 2025-08-03 10:32:29 +08:00
mokurin000
0e40282e87 docs: add some comments 2025-08-03 10:06:10 +08:00
mokurin000
83caae4a31 feat: export b50 with music title, difficulty 2025-08-03 00:45:17 +08:00
mokurin000
503f5f3f33 refactor: music-db crate 2025-08-02 23:52:46 +08:00
mokurin000
f7b3161847 fix: missing version dot 2025-08-02 23:35:52 +08:00
mokurin000
d3c1ed73ee feat: print map version 2025-08-02 23:24:31 +08:00
mokurin000
2c4b7ed447 chore: remove more useless fields 2025-08-02 23:03:01 +08:00
mokurin000
af8cee2528 fix: don't output trophy, which is also always zero 2025-08-02 22:54:50 +08:00
mokurin000
bd40ebba8a feat: public export without userId 2025-08-02 22:47:50 +08:00
mokurin000
f25349ca26 fix: don't block other command when database is opened 2025-08-02 22:32:30 +08:00
mokurin000
03dc2eea94 feat: dump players.json without userId 2025-08-02 22:31:04 +08:00
mokurin000
0d9c8c79b4 enhance: compact & upgrade database 2025-08-02 22:13:04 +08:00
mokurin000
0b2bf20e50 fix: dumb cache update 2025-08-02 22:08:44 +08:00
mokurin000
3e1a0185fa feat: dump fetched b50 2025-08-02 22:02:52 +08:00
mokurin000
c85c2101b7 feat: filter abnormal users 2025-08-02 20:03:50 +08:00
mokurin000
2415a7e029 log: fix logging message 2025-08-02 19:58:25 +08:00
mokurin000
957166d8f4 feat: B50 fetchall impl 2025-08-02 19:56:30 +08:00
mokurin000
84edce688d refactor: split userid read & fetch 2025-08-02 19:49:41 +08:00
mokurin000
929e4641ea refactor: split read_cache 2025-08-02 19:41:49 +08:00
mokurin000
9b53cb633c refactor: implement fetchall with generic type 2025-08-02 19:39:39 +08:00
mokurin000
de330005b3 enhance: special case dx rating calculate 2025-08-02 17:29:37 +08:00
mokurin000
c877f8efeb fix: SSS+ rating calculating 2025-08-02 17:12:06 +08:00
mokurin000
677cdbfd9b fix: 101.0000% rank 2025-08-02 16:22:18 +08:00
mokurin000
45cba29b23 log: make sure to log login timestamp 2025-08-02 16:07:58 +08:00
mokurin000
fe008cca67 docs: fix incorrect comment on machine_readable 2025-08-02 11:01:34 +08:00
mokurin000
c3010f2f10 chore: better display 2025-08-02 10:58:11 +08:00
mokurin000
9e17df0624 feat: convert music detail to dxrating 2025-08-02 10:03:02 +08:00
mokurin000
d337c48ff1 fix: dxratingnet conversion 2025-08-02 09:42:49 +08:00
mokurin000
c86626bc75 style: reformat some code 2025-08-02 09:14:34 +08:00
mokurin000
abea7fce11 docs: add more documentation 2025-08-02 02:05:05 +08:00
mokurin000
3721b2f8fd chore: also print playCount 2025-08-02 00:54:15 +08:00
mokurin000
2cb3c77d92 feat: display music details 2025-08-02 00:52:09 +08:00
mokurin000
23d8345b0e log: always colorful output to stderr 2025-08-02 00:13:31 +08:00
mokurin000
dbe0890a4a log: ping-pong delay log 2025-08-02 00:11:12 +08:00
mokurin000
000251df65 chore: sdgb 1.40 stopped service 2025-08-02 00:08:21 +08:00
mokurin000
780785b7ea build: disable fetchall feature by default 2025-08-02 00:06:58 +08:00
mokurin000
f000b8636c feat: implement full user music fetch 2025-08-02 00:02:57 +08:00
mokurin000
7b4dfbe5b2 feat: dxrating payload dump 2025-08-01 18:42:01 +08:00
mokurin000
4cf7fd1ee9 fix: output flatten list of records 2025-08-01 18:34:29 +08:00
mokurin000
4e07eaf2e0 feat: support dxratingnet format 2025-08-01 18:16:16 +08:00
mokurin000
953feee4c4 refactor: extract json_display 2025-08-01 16:25:27 +08:00
mokurin000
8d7ac62f80 log: do not send logs to stdout 2025-08-01 16:18:44 +08:00
mokurin000
7fe64ac4cd perf: speed-up musicDB load 2025-08-01 03:50:36 +08:00
mokurin000
68e8a6e005 feat: DX RATING calculation 2025-08-01 03:31:16 +08:00
mokurin000
ef2df9052b fix: achievement formatr 2025-08-01 02:55:47 +08:00
mokurin000
1c2a6b6161 feat: dump music level 2025-08-01 02:27:30 +08:00
mokurin000
6fd7361ca1 feat: simple musicDB for title 2025-08-01 02:03:47 +08:00
mokurin000
9b046036c9 feat: initial support for GetUserRating 2025-08-01 01:15:13 +08:00
mokurin000
0b8de2b4bc fix: logout without custom timestamp is useless 2025-07-31 22:30:59 +08:00
mokurin000
183955e655 feat: implement GetUserRatingApi 2025-07-31 22:24:05 +08:00
mokurin000
8dfc834d15 refactor: rename associated type to Response 2025-07-31 21:50:13 +08:00
mokurin000
b0b8cea00e enhance: typed request, thanks to crabtime 2025-07-31 21:42:38 +08:00
mokurin000
b408d1ba51 chore: add util scripts 2025-07-31 20:28:44 +08:00
mokurin000
789d46991d fix: totally fix rustc version check
C-style shit syntax
2025-07-31 20:02:46 +08:00
6cb1dcefe8 fix: compile on older compilers 2025-07-31 18:47:40 +08:00
40 changed files with 39407 additions and 252 deletions

17
.gitignore vendored
View File

@@ -2,5 +2,18 @@
/*.txt
/players.redb
/players.json*
/players.redb*
/*.json*
/players*.parquet
/region*.parquet
/records*.parquet
/.python-version
/uv.lock
/.venv
*.pyc
/*html

700
Cargo.lock generated

File diff suppressed because it is too large Load Diff

View File

@@ -1,9 +1,10 @@
[workspace]
members = ["sdgb-api", "sdgb-cli"]
members = ["music_db", "sdgb-api", "sdgb-cli"]
resolver = "3"
default-members = ["sdgb-cli"]
[workspace.dependencies]
music-db = { path = "./music_db", default-features = false }
sdgb-api = { path = "./sdgb-api", default-features = false }
spdlog-rs = { version = "0.4.3", default-features = false, features = [
@@ -12,14 +13,18 @@ spdlog-rs = { version = "0.4.3", default-features = false, features = [
] }
snafu = { version = "0.8.6", features = ["backtrace", "rust_1_81"] }
serde = { version = "1.0.219", features = ["derive"] }
serde_json = "1.0.141"
strum = { version = "0.27.2", features = ["derive"] }
tokio = { version = "1", features = ["rt-multi-thread"] }
tokio = { version = "1.47.1", features = ["rt-multi-thread"] }
compio = { version = "0.15.0", features = ["runtime"] }
redb = "2.6.1"
redb = "3.0.0"
crabtime = { git = "https://github.com/wdanilo/crabtime.git", rev = "2ed856f5" }
parquet = "56.0.0"
[profile.release]
lto = true
lto = "thin"
strip = true
codegen-units = 1
codegen-units = 4
panic = "abort"

5
README.md Normal file
View File

@@ -0,0 +1,5 @@
# sdgb-utils-rs
- SBGA 舞萌DX API 文档参考
- “裸” cli 工具,没多少人性化功能
- 暂时不完整开放,留在私仓

19
music_db/Cargo.toml Normal file
View File

@@ -0,0 +1,19 @@
[package]
name = "music-db"
version = "0.1.0"
edition = "2024"
[dependencies]
rustc-hash = "2.1.1"
rust_decimal = { version = "1.37.2", default-features = false, features = [
"serde-with-arbitrary-precision",
"macros",
] }
serde = { workspace = true }
serde_json = { workspace = true }
spdlog-rs = { workspace = true, optional = true }
[features]
default = ["log"]
log = ["dep:spdlog-rs"]

117
music_db/src/lib.rs Normal file
View File

@@ -0,0 +1,117 @@
use std::sync::LazyLock;
use rust_decimal::{Decimal, dec, serde::DecimalFromString};
use rustc_hash::FxHashMap;
use serde::Deserialize;
#[derive(Deserialize)]
#[serde(rename_all = "camelCase")]
pub struct MusicInfo {
pub id: u32,
pub name: String,
pub version: i64,
pub levels: Vec<Level>,
}
#[derive(Deserialize)]
#[serde(rename_all = "camelCase")]
pub struct Level {
/// 0, 1, 2, 3, 4, 5
pub level: u32,
/// for example: "13.7"
pub difficulty: DecimalFromString,
}
type MusicDB = FxHashMap<u32, MusicInfo>;
pub fn preload_db() {
_ = &*MUSIC_DB;
}
pub fn query_music(music_id: u32) -> Option<&'static MusicInfo> {
MUSIC_DB.as_ref()?.get(&music_id)
}
pub fn query_music_level(music_id: u32, level: u32) -> Option<&'static Level> {
MUSIC_DB
.as_ref()?
.get(&music_id)?
.levels
.iter()
.find(|d| d.level == level)
}
pub static MUSIC_DB: LazyLock<Option<MusicDB>> = LazyLock::new(|| {
let db: Vec<MusicInfo> = serde_json::from_slice(include_bytes!("musicDB.json"))
.inspect_err(|_e| {
#[cfg(feature = "log")]
spdlog::warn!("failed to load musicDB: {_e}")
})
.ok()?;
Some(db.into_iter().map(|entry| (entry.id, entry)).collect())
});
impl Level {
/// achievement: xxx.xxxx% * 10000
///
/// This will **NOT** ignore utage level, you can calculate a in-theory DX Rating.
///
/// On invalid input, it returns 0.
pub fn dx_rating(&self, achievement: i32) -> (&'static str, u32) {
let achievement = achievement.min(1005000); // SSS+ case
let (rank, _, factor) = RANKS
.into_iter()
.rev()
.find(|&(_, threshold, _)| threshold <= achievement)
.unwrap(); // save here, due to zero threshold
let difficulty_rank: Decimal = self.difficulty.value;
let achievement = Decimal::new(achievement as _, 4);
#[cfg(feature = "log")]
spdlog::info!("factor: {factor}, achievement: {achievement}");
// when ach > 100.5%, calculate as 100.5%
let rating: u32 = (factor * difficulty_rank * achievement)
.floor()
.try_into()
.unwrap_or_default();
(rank, rating)
}
}
const RANKS: [(&'static str, i32, Decimal); 23] = [
("D", 0, dec!(0.0)),
("D", 100000, dec!(0.016)),
("D", 200000, dec!(0.032)),
("D", 300000, dec!(0.048)),
("D", 400000, dec!(0.064)),
("C", 500000, dec!(0.080)),
("B", 600000, dec!(0.096)),
("BB", 700000, dec!(0.112)),
("BBB", 750000, dec!(0.120)),
("BBB", 799999, dec!(0.128)),
("A", 800000, dec!(0.136)),
("AA", 900000, dec!(0.152)),
("AAA", 940000, dec!(0.168)),
("AAA", 969999, dec!(0.176)),
("S", 970000, dec!(0.200)),
("S+", 980000, dec!(0.203)),
("S+", 989999, dec!(0.206)),
("SS", 990000, dec!(0.208)),
("SS+", 995000, dec!(0.211)),
("SS+", 999999, dec!(0.214)),
("SSS", 1000000, dec!(0.216)),
("SSS", 1004999, dec!(0.222)),
("SSS+", 1005000, dec!(0.224)),
];
#[cfg(test)]
mod tests {
use crate::query_music_level;
#[test]
fn test_rating_calculate() {
let level = query_music_level(11696, 3).expect("not found");
assert_eq!(level.dx_rating(953184), ("AAA", 184));
}
}

36666
music_db/src/musicDB.json Normal file

File diff suppressed because it is too large Load Diff

14
pyproject.toml Normal file
View File

@@ -0,0 +1,14 @@
[project]
name = "sdgb-utils-rs"
version = "0.1.0"
description = "Add your description here"
readme = "README.md"
requires-python = ">=3.12"
dependencies = [
"diskcache>=5.6.3",
"loguru>=0.7.3",
"orjson>=3.11.1",
"polars>=1.32.0",
"polars-hash>=0.5.4",
"pyecharts>=2.0.8",
]

View File

@@ -11,6 +11,8 @@ compio = ["dep:compio"]
tokio = ["dep:tokio"]
bincode = ["dep:bincode"]
parquet = ['dep:parquet', 'dep:parquet_derive']
[dependencies]
snafu = { workspace = true }
serde_json = { workspace = true }
@@ -18,6 +20,13 @@ strum = { workspace = true }
tokio = { workspace = true, optional = true }
compio = { workspace = true, optional = true }
spdlog-rs = { workspace = true }
music-db = { workspace = true }
# (de)serialization
serde = { workspace = true }
# magic macro
crabtime = { workspace = true }
# hashing
digest = "0.10.7"
@@ -28,10 +37,8 @@ md5 = "0.8.0"
chrono = "0.4.41"
# network request
nyquest = { version = "0.2.0", features = ["async", "json"] }
nyquest = { version = "0.3.0", features = ["async", "json"] }
# (de)serialization
serde = { version = "1.0.219", features = ["derive"] }
# compression / encryption
flate2 = "1.1.2"
@@ -39,3 +46,6 @@ cbc = { version = "0.1.2", features = ["alloc"] }
aes = "0.8.4"
cipher = { version = "0.4.4", features = ["block-padding"] }
bincode = { version = "2.0.1", optional = true }
parquet = { version = "56.0.0", optional = true }
parquet_derive = { version = "56.0.0", optional = true }

View File

@@ -4,12 +4,13 @@ use nyquest::{AsyncClient, Body, Request, header::USER_AGENT};
mod model;
use model::{GetResponse, GetUserId};
use serde::Serialize;
pub struct QRCode<'a> {
pub qrcode_content: &'a str,
}
#[derive(Debug, snafu::Snafu)]
#[derive(Debug, snafu::Snafu, Serialize)]
pub enum QRLoginError {
#[snafu(display("QRCode expired [10mins]"))]
QRCodeExpired10,
@@ -23,13 +24,17 @@ pub enum QRLoginError {
#[snafu(context(false))]
#[snafu(display("request error: {source}"))]
NyquestError {
#[serde(skip)]
source: nyquest::Error,
#[serde(skip)]
backtrace: Backtrace,
},
#[snafu(context(false))]
JSONError {
#[serde(skip)]
source: serde_json::error::Error,
#[serde(skip)]
backtrace: Backtrace,
},
}
@@ -53,7 +58,7 @@ impl QRCode<'_> {
2 => Err(QRLoginError::QRCodeExpired10),
1 => Err(QRLoginError::QRCodeExpired30),
50 => Err(QRLoginError::BadSingature),
error_kind @ _ => Err(QRLoginError::Unknown { error_kind }),
error_kind => Err(QRLoginError::Unknown { error_kind }),
}
}
}

View File

@@ -0,0 +1,11 @@
pub fn level_name(level: u32) -> &'static str {
match level {
0 => "BASIC",
1 => "ADVANCED",
2 => "EXPERT",
3 => "MASTER",
4 => "RE: MASTER",
5 => "UTAGE",
_ => "Unknown",
}
}

View File

@@ -2,9 +2,12 @@ pub mod all_net;
pub mod auth_lite;
pub mod title;
pub mod helper;
mod error;
pub use error::ApiError;
#[cfg(feature = "bincode")]
pub use bincode;
#[cfg(all(feature = "compio", feature = "tokio"))]

View File

@@ -9,36 +9,7 @@ use flate2::write::{ZlibDecoder, ZlibEncoder};
use spdlog::debug;
use crate::error::ApiError;
use crate::title::{MaiVersion, MaiVersionExt, Sdgb1_40, Sdgb1_50};
impl MaiVersionExt for Sdgb1_40 {
fn decode(mut data: impl AsMut<[u8]>) -> Result<Vec<u8>, ApiError> {
let mut decompressed = decompress(data.as_mut());
if decompressed.is_empty() {
return Err(ApiError::EmptyResponse);
}
let orig_len = decompressed.len();
let remain = 16 - decompressed.len() % 16;
if
// weird but nessacary for Rust Pkcs7
remain != 16 {
decompressed.resize(remain + orig_len, remain as _);
}
let unpad_size = decrypt(&mut decompressed, Self::AES_KEY, Self::AES_IV)?.len();
decompressed.truncate(unpad_size);
Ok(decompressed)
}
fn encode(data: impl AsRef<[u8]>) -> Result<Vec<u8>, ApiError> {
let enc = encrypt(data, Self::AES_KEY, Self::AES_IV)?;
let compressed = compress(enc)?;
Ok(compressed)
}
}
use crate::title::{MaiVersion, MaiVersionExt, Sdgb1_50};
impl MaiVersionExt for Sdgb1_50 {
fn decode(mut data: impl AsMut<[u8]>) -> Result<Vec<u8>, ApiError> {
@@ -110,21 +81,6 @@ mod _tests {
use crate::title::{Sdgb1_50, encryption::*};
#[test]
fn test_sdgb_140_dec_enc() -> Result<(), ApiError> {
let data = [
120_u8, 156, 171, 77, 91, 233, 184, 108, 2, 71, 125, 142, 118, 135, 112, 181, 85, 217,
239, 243, 159, 153, 248, 98, 159, 185, 63, 43, 173, 106, 221, 115, 104, 105, 221, 107,
0, 241, 176, 16, 37,
];
let dec = Sdgb1_40::decode(data)?;
assert_eq!(dec, br#"{"result":"Pong"}"#);
let enc = Sdgb1_40::encode(dec)?;
assert_eq!(enc, data);
Ok(())
}
#[test]
fn test_sdgb_150_dec_enc() -> Result<(), ApiError> {
let data = [

View File

@@ -0,0 +1,55 @@
use nyquest::AsyncClient;
use crate::{
ApiError,
title::{
MaiVersionExt as _, Sdgb1_50,
methods::APIMethod,
model::{GetUserMusicApi, GetUserMusicApiResp},
},
};
pub async fn get_user_all_music(
client: &AsyncClient,
user_id: u32,
) -> Result<GetUserMusicApiResp, ApiError> {
let mut user_music_list = Vec::new();
let mut index = None;
loop {
let GetUserMusicApiResp {
next_index,
user_music_list: mut new_list,
..
} = Sdgb1_50::request::<_, GetUserMusicApiResp>(
&client,
APIMethod::GetUserMusicApi,
user_id,
GetUserMusicApi {
user_id,
next_index: index.unwrap_or_default(),
max_count: 2000,
},
)
.await?;
if new_list.is_empty() {
break;
}
user_music_list.append(&mut new_list);
if next_index == 0 {
break;
}
index = Some(next_index);
}
Ok(GetUserMusicApiResp {
user_id,
next_index: 0,
length: user_music_list.len() as _,
user_music_list,
})
}

View File

@@ -0,0 +1,27 @@
use crate::title::methods::{APIExt, APIMethod};
#[crabtime::function]
fn api_implement(api_names: Vec<String>) {
for api_name in api_names {
crabtime::output!(
pub struct {{api_name}}Ext;
impl APIExt for {{api_name}}Ext {
const METHOD: APIMethod = APIMethod::{{api_name}};
type Payload = crate::title::model::{{api_name}};
type Response = crate::title::model::{{api_name}}Resp;
}
);
}
}
api_implement!([
"Ping",
"UserLoginApi",
"UserLogoutApi",
"GetUserDataApi",
"GetUserPreviewApi",
"GetUserRatingApi",
"GetUserMusicApi",
"GetUserRegionApi",
]);

View File

@@ -0,0 +1,21 @@
use crate::title::methods::HasUid;
#[crabtime::function]
fn uid_get_impl(api_names: Vec<String>) {
for api_name in api_names {
crabtime::output!(
impl HasUid for crate::title::model::{{api_name}}Resp {
fn get_uid(&self) -> u32 {
self.user_id
}
}
);
}
}
uid_get_impl!([
"GetUserDataApi",
"GetUserMusicApi",
"GetUserPreviewApi",
"GetUserRatingApi"
]);

View File

@@ -1,3 +1,5 @@
use serde::{Deserialize, Serialize};
#[derive(strum::IntoStaticStr)]
pub enum APIMethod {
GetGameChargeApi,
@@ -45,6 +47,20 @@ pub enum APIMethod {
UserLogoutApi,
}
pub trait APIExt {
const METHOD: APIMethod;
type Payload: Serialize + Send + 'static;
type Response: for<'de> Deserialize<'de>;
}
pub trait HasUid {
fn get_uid(&self) -> u32;
}
mod api_ext;
mod has_uid;
pub use api_ext::*;
#[cfg(test)]
mod _test {
use crate::title::{MaiVersionExt, Sdgb1_50, methods::APIMethod};

View File

@@ -1,11 +1,13 @@
use std::fmt::Display;
use crate::title::methods::APIMethod;
use crate::title::methods::{APIExt, APIMethod};
pub mod encryption;
pub mod methods;
pub mod model;
pub mod helper;
use super::ApiError;
use nyquest::{
@@ -107,18 +109,18 @@ pub trait MaiVersionExt: MaiVersion {
Ok(serde_json::from_slice(&raw_data)?)
}
}
fn request_ext<M: APIExt>(
client: &AsyncClient,
data: M::Payload,
agent_extra: impl Display + Send + 'static,
) -> impl Future<Output = Result<M::Response, ApiError>> {
Self::request(client, M::METHOD, agent_extra, data)
}
}
pub struct Sdgb1_40;
pub struct Sdgb1_50;
impl MaiVersion for Sdgb1_40 {
const AES_KEY: &[u8; 32] = b"n7bx6:@Fg_:2;5E89Phy7AyIcpxEQ:R@";
const AES_IV: &[u8; 16] = b";;KjR1C3hgB1ovXa";
const OBFUSECATE_SUFFIX: &str = "MaimaiChnBEs2D5vW";
const VERSION: &str = "1.40";
}
impl MaiVersion for Sdgb1_50 {
const AES_KEY: &[u8; 32] = b"a>32bVP7v<63BVLkY[xM>daZ1s9MBP<R";
const AES_IV: &[u8; 16] = b"d6xHIKq]1J]Dt^ue";

View File

@@ -0,0 +1,98 @@
use music_db::query_music;
use crate::title::model::{
dxrating::{DxCalculatedEntries, DxLevelName, DxMusicRecord, DxSheetId},
get_user_music_api::UserMusicDetail,
get_user_rating_api::{MusicRating, UserRating},
};
impl DxCalculatedEntries {
pub fn from_user_rating_lossy(rating: &UserRating) -> DxCalculatedEntries {
let b35 = rating
.rating_list
.iter()
.map(DxMusicRecord::try_from)
.flatten()
.collect();
let b15 = rating
.new_rating_list
.iter()
.map(DxMusicRecord::try_from)
.flatten()
.collect();
DxCalculatedEntries { b35, b15 }
}
}
impl TryFrom<u32> for DxLevelName {
type Error = ConversionError;
fn try_from(level: u32) -> Result<Self, Self::Error> {
Self::from_repr(level).ok_or(ConversionError::UnknownDifficulty { level })
}
}
impl TryFrom<&UserMusicDetail> for DxMusicRecord {
type Error = ConversionError;
fn try_from(
&UserMusicDetail {
music_id,
level,
achievement,
..
}: &UserMusicDetail,
) -> Result<Self, Self::Error> {
let music_title = query_music(music_id)
.map(|info| info.name.clone())
.ok_or(ConversionError::MusicNotInDB)?;
Ok(Self {
sheet_id: DxSheetId {
music_title,
level: DxLevelName::try_from(level)?,
dx_version: music_id >= 10000,
},
achievement_rate: (achievement as f64) / 10000.0,
})
}
}
impl TryFrom<&MusicRating> for DxMusicRecord {
type Error = ConversionError;
fn try_from(
&MusicRating {
music_id,
level,
achievement,
..
}: &MusicRating,
) -> Result<Self, Self::Error> {
let music_title = query_music(music_id)
.map(|info| info.name.clone())
.ok_or(ConversionError::MusicNotInDB)?;
Ok(Self {
sheet_id: DxSheetId {
music_title,
level: DxLevelName::try_from(level)?,
dx_version: music_id >= 10000,
},
achievement_rate: (achievement as f64) / 10000.0,
})
}
}
#[derive(Debug, snafu::Snafu)]
pub enum ConversionError {
#[snafu(display("Music was not found in database"))]
MusicNotInDB,
#[snafu(display("Utage difficulty was disallowed"))]
UtageDifficulty,
#[snafu(display("Unknown difficulty: {level}"))]
UnknownDifficulty { level: u32 },
}

View File

@@ -0,0 +1,95 @@
use serde::Serialize;
/// Full payload for image generate api
#[derive(Debug, Clone, PartialEq, Serialize)]
#[serde(rename_all = "camelCase")]
pub struct DxRatingNet {
pub calculated_entries: DxCalculatedEntries,
pub version: DataVersion,
/// use `_generic`
pub region: &'static str,
}
/// Export/Import format
#[derive(Debug, Clone, PartialEq, Serialize)]
#[serde(rename_all = "camelCase")]
pub struct DxCalculatedEntries {
pub b35: Vec<DxMusicRecord>,
pub b15: Vec<DxMusicRecord>,
}
/// full music record
#[derive(Debug, Clone, PartialEq, Serialize)]
#[serde(rename_all = "camelCase")]
pub struct DxMusicRecord {
pub sheet_id: DxSheetId,
pub achievement_rate: f64,
}
#[derive(Debug, Clone, PartialEq)]
pub struct DxSheetId {
pub music_title: String,
pub dx_version: bool,
pub level: DxLevelName,
}
#[derive(Debug, Clone, Copy, PartialEq, strum::IntoStaticStr, strum::FromRepr)]
#[strum(serialize_all = "lowercase")]
#[repr(u32)]
pub enum DxLevelName {
Basic,
Advanced,
Expert,
Master,
ReMaster,
}
#[derive(Debug, Clone, Copy, PartialEq)]
pub enum DataVersion {
Buddies,
BuddiesPlus,
Prism,
PrismPlus,
}
impl Serialize for DataVersion {
fn serialize<S>(&self, serializer: S) -> Result<S::Ok, S::Error>
where
S: serde::Serializer,
{
serializer.serialize_str(match self {
DataVersion::Buddies => "BUDDiES",
DataVersion::BuddiesPlus => "BUDDiES PLUS",
DataVersion::Prism => "PRiSM",
DataVersion::PrismPlus => "PRiSM PLUS",
})
}
}
impl Serialize for DxSheetId {
fn serialize<S>(&self, serializer: S) -> Result<S::Ok, S::Error>
where
S: serde::Serializer,
{
serializer.serialize_str(&self.to_string())
}
}
impl ToString for DxSheetId {
fn to_string(&self) -> String {
let mut output = self.music_title.clone();
if self.dx_version {
output += "__dxrt__dx__dxrt__"
} else {
output += "__dxrt__std__dxrt__"
}
output += self.level.into();
output
}
}
mod conversion;

View File

@@ -40,7 +40,7 @@ pub struct UserData {
pub grade_rank: i64,
/// 段位认定
pub course_rank: i64,
/// 友人对战段位
/// 友人对战阶级
pub class_rank: i64,
pub nameplate_id: i64,
pub frame_id: i64,

View File

@@ -0,0 +1,183 @@
use std::fmt::Display;
use music_db::query_music;
use music_db::query_music_level;
use serde::Deserialize;
use serde::Serialize;
use crate::helper::level_name;
#[derive(Default, Debug, Clone, PartialEq, Serialize, Deserialize)]
#[serde(rename_all = "camelCase")]
pub struct GetUserMusicApi {
pub user_id: u32,
pub next_index: u32,
pub max_count: u32,
}
#[derive(Default, Debug, Clone, PartialEq, Serialize, Deserialize)]
#[cfg_attr(feature = "bincode", derive(bincode::Encode, bincode::Decode))]
#[serde(rename_all = "camelCase")]
pub struct GetUserMusicApiResp {
pub user_id: u32,
pub length: u32,
pub next_index: u32,
pub user_music_list: Vec<UserMusic>,
}
#[cfg_attr(feature = "bincode", derive(bincode::Encode, bincode::Decode))]
#[derive(Default, Debug, Clone, PartialEq, Serialize, Deserialize)]
#[serde(rename_all = "camelCase")]
pub struct UserMusic {
pub user_music_detail_list: Vec<UserMusicDetail>,
pub length: u32,
}
#[cfg_attr(feature = "bincode", derive(bincode::Encode, bincode::Decode))]
#[derive(Default, Debug, Clone, PartialEq, Serialize, Deserialize)]
#[serde(rename_all = "camelCase")]
pub struct UserMusicDetail {
pub music_id: u32,
pub level: u32,
pub play_count: i64,
/// 达成率
pub achievement: i64,
/// Full Combo
///
/// - 0: None
/// - 1: Full Combo
/// - 2: Full Combo+
/// - 3: All Perfect
/// - 4: All Perfect+
pub combo_status: i64,
/// Full Sync
///
/// - 0: None
/// - 1: FullSync
/// - 2: FullSync+
/// - 3: FullSync DX
/// - 4: Full Sync DX+
/// - 5: SYNC
pub sync_status: i64,
/// DX 分数
pub deluxscore_max: i64,
/// - D = 0,
/// - C = 1,
/// - B = 2,
/// - BB = 3,
/// - BBB = 4,
/// - A = 5,
/// - AA = 6,
/// - AAA = 7,
/// - S = 8,
/// - S_PLUS = 9,
/// - SS = 10,
/// - SS_PLUS = 11,
/// - SSS = 12,
/// - SSS_PLUS = 13
pub score_rank: i64,
/// 理论次数
pub ext_num1: i64,
pub ext_num2: i64,
}
#[cfg_attr(feature = "parquet", derive(parquet_derive::ParquetRecordWriter))]
pub struct UserMusicDetailFlatten {
pub user_id: u32,
pub music_id: u32,
pub level: u8,
pub play_count: u32,
pub achievement: u32,
pub combo_status: u8,
pub sync_status: u8,
pub deluxscore_max: u16,
pub score_rank: u8,
pub ext_num1: u32,
pub ext_num2: u32,
}
impl Display for UserMusicDetail {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
if let Some(music_title) = query_music(self.music_id).map(|i| &i.name) {
f.write_fmt(format_args!("曲目名称: \t{music_title}\n"))?;
}
f.write_fmt(format_args!("难度名称: \t{}\n", level_name(self.level)))?;
f.write_fmt(format_args!("游玩次数: \t{}\n", self.play_count))?;
f.write_fmt(format_args!(
"达成率: \t{}.{:04}%\n",
self.achievement / 10000,
self.achievement % 10000
))?;
f.write_fmt(format_args!(
"达成状态: \t{}\n",
match self.combo_status {
0 => "",
1 => "Full Combo",
2 => "Full Combo+",
3 => "All Perfect",
4 => "All Perfect+",
_ => "未知",
}
))?;
f.write_fmt(format_args!(
"同步状态: \t{}\n",
match self.sync_status {
0 => "",
1 => "Full Sync",
2 => "Full Sync+",
3 => "Full Sync DX",
4 => "Full Sync DX+",
5 => "SYNC", // 一起玩过
_ => "未知",
}
))?;
f.write_fmt(format_args!("DX 分数: \t{}\n", self.deluxscore_max))?;
if let Some(level) = query_music_level(self.music_id, self.level) {
let (rank, rating) = level.dx_rating(self.achievement as _);
f.write_fmt(format_args!("DX RATING: \t{rating}\n"))?;
f.write_fmt(format_args!("RANK: \t{rank}"))?;
}
Ok(())
}
}
impl UserMusicDetailFlatten {
pub fn new(
user_id: u32,
UserMusicDetail {
music_id,
level,
play_count,
achievement,
combo_status,
sync_status,
deluxscore_max,
score_rank,
ext_num1,
ext_num2,
}: UserMusicDetail,
) -> Self {
Self {
user_id,
music_id,
level: level as _,
sync_status: sync_status as _,
deluxscore_max: deluxscore_max as _,
score_rank: score_rank as _,
play_count: play_count as _,
achievement: achievement as _,
combo_status: combo_status as _,
ext_num1: ext_num1 as _,
ext_num2: ext_num2 as _,
}
}
}

View File

@@ -1,6 +1,5 @@
use std::fmt::Display;
use bincode::{Decode, Encode};
use serde::{Deserialize, Serialize};
#[derive(Serialize)]
@@ -9,7 +8,15 @@ pub struct GetUserPreviewApi {
pub user_id: u32,
}
#[derive(Debug, Clone, Serialize, Deserialize, Encode, Decode)]
impl From<u32> for GetUserPreviewApi {
fn from(user_id: u32) -> Self {
Self { user_id }
}
}
#[derive(Debug, Clone, Serialize, Deserialize)]
#[cfg_attr(feature = "bincode", derive(bincode::Encode, bincode::Decode))]
#[cfg_attr(feature = "parquet", derive(parquet_derive::ParquetRecordWriter))]
#[serde(rename_all = "camelCase")]
pub struct GetUserPreviewApiResp {
pub user_id: u32,
@@ -45,7 +52,6 @@ impl Display for GetUserPreviewApiResp {
f.write_fmt(format_args!("DX Rating: {}\n", self.player_rating))?;
f.write_fmt(format_args!("牌子: {}\n", self.nameplate_id))?;
f.write_fmt(format_args!("图标: {}\n", self.icon_id))?;
f.write_fmt(format_args!("trophy: {}\n", self.trophy_id))?;
f.write_fmt(format_args!("Net成员: {}\n", self.is_net_member))?;
f.write_fmt(format_args!("继承账号: {}\n", self.is_inherit))?;
f.write_fmt(format_args!("总觉醒: {}\n", self.total_awake))?;

View File

@@ -0,0 +1,191 @@
use std::fmt::Display;
use music_db::query_music;
use music_db::query_music_level;
use serde::Deserialize;
use serde::Serialize;
use crate::helper::level_name;
#[derive(Default, Debug, Clone, PartialEq, Serialize, Deserialize)]
#[serde(rename_all = "camelCase")]
pub struct GetUserRatingApi {
pub user_id: u32,
}
impl From<u32> for GetUserRatingApi {
fn from(user_id: u32) -> Self {
Self { user_id }
}
}
#[cfg_attr(feature = "bincode", derive(bincode::Encode, bincode::Decode))]
#[derive(Debug, Serialize, Deserialize)]
#[serde(rename_all = "camelCase")]
pub struct GetUserRatingApiResp {
pub user_id: u32,
pub user_rating: UserRating,
}
#[cfg_attr(feature = "bincode", derive(bincode::Encode, bincode::Decode))]
#[derive(Default, Debug, Clone, PartialEq, Serialize, Deserialize)]
#[serde(rename_all = "camelCase")]
pub struct UserRating {
/// total rating, now it's 0
pub rating: i64,
/// b35
pub rating_list: Vec<MusicRating>,
/// b15
pub new_rating_list: Vec<MusicRating>,
/// 候补 b35
pub next_rating_list: Vec<MusicRating>,
/// 候补 b15
pub next_new_rating_list: Vec<MusicRating>,
pub udemae: Udemae,
}
#[cfg_attr(feature = "bincode", derive(bincode::Encode, bincode::Decode))]
#[derive(Default, Debug, Clone, PartialEq, Serialize, Deserialize)]
#[serde(rename_all = "camelCase")]
pub struct MusicRating {
/// Maimai music id
pub music_id: u32,
/// - 0: BASIC
/// - 1: ADVANCED
/// - 2: EXPERT
/// - 3: MASTER
/// - 4: RE: MASTER
/// - 5: Utage 宴会场
pub level: u32,
/// 歌曲 ROM 版本
///
/// - `1mmpp` -> `1.mm.pp`
/// - `2mmpp` -> `1.mm.pp` DX
pub rom_version: i64,
/// 达成率 * 10000 的整数
pub achievement: i32,
}
#[cfg_attr(feature = "bincode", derive(bincode::Encode, bincode::Decode))]
#[derive(Default, Debug, Clone, PartialEq, Serialize, Deserialize)]
#[serde(rename_all = "camelCase")]
pub struct Udemae {
pub max_lose_num: i64,
pub npc_total_win_num: i64,
pub npc_total_lose_num: i64,
pub npc_max_win_num: i64,
pub npc_max_lose_num: i64,
pub npc_win_num: i64,
pub npc_lose_num: i64,
pub rate: i64,
pub class_value: i64,
pub max_rate: i64,
pub max_class_value: i64,
pub total_win_num: i64,
pub total_lose_num: i64,
pub max_win_num: i64,
pub win_num: i64,
pub lose_num: i64,
#[serde(rename = "MaxLoseNum")]
pub max_lose_num2: i64,
#[serde(rename = "NpcTotalWinNum")]
pub npc_total_win_num2: i64,
#[serde(rename = "NpcTotalLoseNum")]
pub npc_total_lose_num2: i64,
#[serde(rename = "NpcMaxWinNum")]
pub npc_max_win_num2: i64,
#[serde(rename = "NpcMaxLoseNum")]
pub npc_max_lose_num2: i64,
#[serde(rename = "NpcWinNum")]
pub npc_win_num2: i64,
#[serde(rename = "NpcLoseNum")]
pub npc_lose_num2: i64,
}
impl Display for GetUserRatingApiResp {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
let b35 = &self.user_rating.rating_list;
let b15 = &self.user_rating.new_rating_list;
f.write_fmt(format_args!("用户ID: {}\n", self.user_id))?;
f.write_str("\n--------- B35 ---------\n")?;
for music in b35 {
f.write_fmt(format_args!("{music}\n---\n"))?;
}
f.write_str("\n--------- B15 ---------\n")?;
for music in b15 {
f.write_fmt(format_args!("{music}\n---\n"))?;
}
let b35_rating: u32 = b35.iter().filter_map(|m| m.dx_rating()).sum();
let b15_rating: u32 = b15.iter().filter_map(|m| m.dx_rating()).sum();
f.write_str("\n--------- Total ---------\n")?;
f.write_fmt(format_args!("B35 Rating: {b35_rating}\n"))?;
f.write_fmt(format_args!("B15 Rating: {b15_rating}\n"))?;
f.write_fmt(format_args!("总 Rating: {}\n", b35_rating + b15_rating))?;
Ok(())
}
}
impl Display for MusicRating {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
f.write_fmt(format_args!("歌曲ID: \t{}\n", self.music_id))?;
if let Some(title) = self.music_title() {
f.write_fmt(format_args!("曲目标题: \t{title}\n"))?;
}
f.write_fmt(format_args!(
"谱面版本: \t{}\n",
match (self.music_id / 10000) % 10 {
0 => "SD",
1 => "DX",
_ => "",
}
))?;
f.write_fmt(format_args!("游玩难度: \t{}\n", level_name(self.level)))?;
f.write_fmt(format_args!(
"达成率: \t{}.{:04}%\n",
self.achievement / 10000,
self.achievement % 10000
))?;
if self.rom_version >= 20000 {
f.write_fmt(format_args!(
"谱面版本: \tDX 1.{:02}.{:02}\n",
(self.rom_version / 100) % 100,
self.rom_version % 100,
))?;
} else {
f.write_fmt(format_args!(
"谱面版本: \tSD 1.{:02}.{:02}\n",
(self.rom_version / 100) % 100,
self.rom_version % 100,
))?;
}
if let Some(dx_rating) = self.dx_rating() {
f.write_fmt(format_args!("DX RATING: \t{dx_rating}"))?;
}
Ok(())
}
}
impl MusicRating {
pub fn music_title(&self) -> Option<String> {
Some(query_music(self.music_id).as_ref()?.name.clone())
}
pub fn dx_rating(&self) -> Option<u32> {
Some(
query_music_level(self.music_id, self.level)?
.dx_rating(self.achievement)
.1,
)
}
}

View File

@@ -0,0 +1,68 @@
use serde::{Deserialize, Serialize};
#[derive(Default, Debug, Clone, PartialEq, Serialize, Deserialize)]
#[serde(rename_all = "camelCase")]
pub struct GetUserRegionApi {
pub user_id: u32,
}
impl From<u32> for GetUserRegionApi {
fn from(user_id: u32) -> Self {
Self { user_id }
}
}
impl From<GetUserRegionApiResp> for Vec<UserRegionFlatten> {
fn from(
GetUserRegionApiResp {
user_id,
user_region_list,
..
}: GetUserRegionApiResp,
) -> Self {
user_region_list
.into_iter()
.map(
|UserRegion {
region_id,
play_count,
created,
}| {
UserRegionFlatten {
user_id,
region_id,
play_count,
created,
}
},
)
.collect()
}
}
#[derive(Default, Debug, Clone, PartialEq)]
#[cfg_attr(feature = "parquet", derive(parquet_derive::ParquetRecordWriter))]
pub struct UserRegionFlatten {
pub user_id: u32,
pub region_id: u32,
pub play_count: i64,
pub created: String,
}
#[cfg_attr(feature = "bincode", derive(bincode::Encode, bincode::Decode))]
#[derive(Default, Debug, Clone, PartialEq, Serialize, Deserialize)]
#[serde(rename_all = "camelCase")]
pub struct GetUserRegionApiResp {
pub user_id: u32,
pub length: i64,
pub user_region_list: Vec<UserRegion>,
}
#[cfg_attr(feature = "bincode", derive(bincode::Encode, bincode::Decode))]
#[derive(Default, Debug, Clone, PartialEq, Serialize, Deserialize)]
#[serde(rename_all = "camelCase")]
pub struct UserRegion {
pub region_id: u32,
pub play_count: i64,
pub created: String,
}

View File

@@ -11,4 +11,33 @@ mod user_login_api;
pub use user_login_api::{LoginError, UserLoginApi, UserLoginApiResp};
mod get_user_data_api;
pub use get_user_data_api::{GetUserDataApi, GetUserDataApiResp};
pub use get_user_data_api::{GetUserDataApi, GetUserDataApiResp, UserData};
mod get_user_rating_api;
pub use get_user_rating_api::{
GetUserRatingApi,
GetUserRatingApiResp, // api
MusicRating,
Udemae,
UserRating,
};
mod get_user_music_api;
pub use get_user_music_api::{
GetUserMusicApi, GetUserMusicApiResp, UserMusic, UserMusicDetail, UserMusicDetailFlatten,
};
mod get_user_region_api;
pub use get_user_region_api::{
GetUserRegionApi, GetUserRegionApiResp, UserRegion, UserRegionFlatten,
};
mod dxrating;
pub use dxrating::{
DataVersion,
DxCalculatedEntries, // entries
DxLevelName, // level name
DxMusicRecord,
DxRatingNet,
DxSheetId,
};

View File

@@ -1,6 +1,7 @@
use std::time::{SystemTime, UNIX_EPOCH};
use serde::{Deserialize, Serialize};
use spdlog::info;
#[derive(Default, Debug, Clone, PartialEq, Serialize, Deserialize)]
#[serde(rename_all = "camelCase")]
@@ -11,7 +12,9 @@ pub struct UserLoginApi {
pub acsess_code: String,
pub place_id: String,
pub client_id: String,
/// set to `false` is fine
/// false 的情况,二维码扫描后半小时可登录。
///
/// true 的情况,可延长至二维码扫描后的两小时可登录。
pub is_continue: bool,
/// fixed to 0
pub generic_flag: u8,
@@ -33,12 +36,14 @@ pub struct UserLoginApiResp {
}
impl UserLoginApi {
pub fn new(user_id: u32) -> Self {
pub fn new(user_id: u32, is_continue: bool) -> Self {
let date_time = SystemTime::now()
.duration_since(UNIX_EPOCH)
.map(|t| t.as_secs())
.unwrap_or_default();
info!("login unix timestamp: {date_time}");
// 爱玩星球焦作解放店
UserLoginApi {
user_id,
@@ -46,7 +51,7 @@ impl UserLoginApi {
region_id: 13,
acsess_code: "".to_owned(),
place_id: 3223.to_string(),
is_continue: false,
is_continue,
generic_flag: 0,
client_id: "A63E01E6170".into(),
}
@@ -60,7 +65,7 @@ impl UserLoginApiResp {
100 => Some(LoginError::AlreadyLogged),
102 => Some(LoginError::QRCodeExpired),
103 => Some(LoginError::AccountUnregistered),
error @ _ => Some(LoginError::Unknown { error }),
error => Some(LoginError::Unknown { error }),
}
}
}

View File

@@ -18,7 +18,7 @@ pub struct UserLogoutApi {
pub type_: i64,
}
#[derive(Default, Debug, Clone, PartialEq, Deserialize)]
#[derive(Default, Debug, Clone, PartialEq, Deserialize, Serialize)]
#[serde(rename_all = "camelCase")]
pub struct UserLogoutApiResp {
pub return_code: i64,

View File

@@ -11,21 +11,38 @@ default = ["compio", "fetchall"]
compio = ["dep:compio", "sdgb-api/compio"]
tokio = ["dep:tokio", "sdgb-api/tokio"]
fetchall = ["dep:redb", "dep:futures-util"]
fetchall = ["dep:redb", "dep:futures-util", "dep:parquet", "sdgb-api/parquet"]
[dependencies]
sdgb-api = { workspace = true, features = ["bincode"] }
spdlog-rs = { workspace = true }
snafu = { workspace = true }
# (de)serialization
serde = { workspace = true }
serde_json = { workspace = true }
strum = { workspace = true }
# logging / errors
spdlog-rs = { workspace = true }
snafu = { workspace = true }
# kv database
redb = { workspace = true, optional = true }
# async runtime
tokio = { workspace = true, features = ["macros"], optional = true }
compio = { workspace = true, features = ["macros"], optional = true }
nyquest-preset = { version = "0.2.0", features = ["async"] }
nyquest-preset = { version = "0.3.0", features = ["async"] }
palc = { version = "0.0.1", features = ["derive"] }
futures-util = { version = "0.3.31", optional = true }
ctrlc = { version = "3.4.7", features = ["termination"] }
# magic macro
crabtime = { workspace = true }
parquet = { workspace = true, optional = true }
[build-dependencies]
version_check = "0.9.5"

7
sdgb-cli/build.rs Normal file
View File

@@ -0,0 +1,7 @@
fn main() {
println!("cargo::rustc-check-cfg=cfg(file_lock_ready)");
if version_check::is_min_version("1.89") == Some(true) {
println!("cargo:rustc-cfg=file_lock_ready");
}
}

View File

@@ -1,35 +1,64 @@
use std::sync::LazyLock;
use redb::{ReadTransaction, Table, TableDefinition, WriteTransaction};
use redb::{ReadTransaction, ReadableDatabase as _, Table, TableDefinition, WriteTransaction};
static DATABASE: LazyLock<redb::Database> = LazyLock::new(|| {
redb::Database::builder()
let mut db = redb::Database::builder()
.create("players.redb")
.expect("failed to open database")
.expect("failed to open database");
_ = db.compact();
db
});
const DIFINITION: TableDefinition<'_, u32, Vec<u8>> = redb::TableDefinition::new("players");
pub fn write_txn() -> Result<WriteTransaction, redb::Error> {
Ok(DATABASE.begin_write()?)
}
pub fn open_table(write: &WriteTransaction) -> Result<Table<'_, u32, Vec<u8>>, redb::Error> {
Ok(write.open_table(DIFINITION)?)
}
pub fn read_txn() -> Result<ReadTransaction, redb::Error> {
Ok(DATABASE.begin_read()?)
}
pub fn open_table_read(
read: &ReadTransaction,
) -> Result<redb::ReadOnlyTable<u32, Vec<u8>>, redb::Error> {
Ok(read.open_table(DIFINITION)?)
pub fn open_table<'a>(
write: &'a WriteTransaction,
definition: TableDefinition<'_, u32, Vec<u8>>,
) -> Result<Table<'a, u32, Vec<u8>>, redb::Error> {
Ok(write.open_table(definition)?)
}
pub fn init_db() -> Result<(), redb::Error> {
let write_txn = DATABASE.begin_write()?;
write_txn.open_table(DIFINITION)?;
write_txn.commit()?;
Ok(())
pub fn open_table_ro(
read: &ReadTransaction,
definition: TableDefinition<'_, u32, Vec<u8>>,
) -> Result<redb::ReadOnlyTable<u32, Vec<u8>>, redb::Error> {
Ok(read.open_table(definition)?)
}
#[crabtime::function]
fn table_definitions_impl(tables: Vec<String>) {
let mut defs: Vec<String> = Vec::new();
for table in tables {
let definition = table.to_uppercase();
let table_name = format!("\"{table}\"");
crabtime::output!(
pub const {{definition}}: TableDefinition<'_, u32, Vec<u8>> = redb::TableDefinition::new({{table_name}});
);
defs.push(format!("write_txn.open_table({definition})?;"));
}
let init_statements = defs.join("\n");
crabtime::output!(
pub fn init_db() -> Result<(), redb::Error> {
let write_txn = DATABASE.begin_write()?;
{
{ init_statements }
}
write_txn.commit()?;
Ok(())
}
);
}
table_definitions_impl!(["players", "b50", "records", "regions"]);

View File

@@ -5,6 +5,11 @@ use strum::EnumString;
#[derive(Parser)]
#[command(about = "SDGB api tool", long_about = env!("CARGO_PKG_DESCRIPTION"))]
pub struct Cli {
/// Try to generate machine readable format.
///
/// You must specify this for `-f, --format` to take effect.
#[arg(short = 'M', long)]
pub machine_readable: bool,
#[command(subcommand)]
pub command: Commands,
}
@@ -24,6 +29,7 @@ pub enum Commands {
qrcode_content: String,
},
/// Retrieve update package of SDGB
AuthLite {
#[arg(short, long, default_value = "1.50")]
title_ver: String,
@@ -31,14 +37,48 @@ pub enum Commands {
variant: AuthLiteVariant,
},
/// Test delay to SDGB server
Ping,
/// Get basic info
Preview {
#[arg(short, long)]
user_id: u32,
},
/// Get B35 + B15 play records
Rating {
#[arg(short, long)]
user_id: u32,
/// JSON format.
///
/// `origin`: official json response
///
/// `dx_rating_net`: DxRatingNet Format
#[arg(short, long, default_value_t = RatingFormat::default())]
format: RatingFormat,
},
/// Get all play records
MusicDetail {
#[arg(short, long)]
user_id: u32,
/// JSON format.
///
/// `origin`: official json response
///
/// `dx_rating_net`: DxRatingNet Format
#[arg(short, long, default_value_t = RatingFormat::default())]
format: RatingFormat,
},
/// Retrieve full userdata
///
/// WARNING: This requires to login & logout your account
Userdata {
#[arg(short, long)]
user_id: u32,
#[arg(long)]
skip_login: bool,
},
#[cfg(feature = "fetchall")]
@@ -46,11 +86,68 @@ pub enum Commands {
#[arg(short, long, default_value_t = 5)]
concurrency: usize,
},
#[cfg(feature = "fetchall")]
ScrapeAllB50 {
#[arg(short, long, default_value_t = 5)]
concurrency: usize,
#[arg(long, default_value_t = 1000)]
min_rating: i64,
#[arg(long, default_value_t = 16500)]
max_rating: i64,
},
#[cfg(feature = "fetchall")]
ScrapeAllRegion {
#[arg(short, long, default_value_t = 5)]
concurrency: usize,
#[arg(long, default_value_t = 1000)]
min_rating: i64,
#[arg(long, default_value_t = 16500)]
max_rating: i64,
},
#[cfg(feature = "fetchall")]
ScrapeAllRecord {
#[arg(short, long, default_value_t = 5)]
concurrency: usize,
#[arg(long, default_value_t = 10000)]
min_rating: i64,
#[arg(long, default_value_t = 16400)]
max_rating: i64,
},
#[cfg(feature = "fetchall")]
ListAllUserDump {},
#[cfg(feature = "fetchall")]
ScrapeAllB50Dump {},
#[cfg(feature = "fetchall")]
ScrapeAllRegionDump {},
#[cfg(feature = "fetchall")]
ScrapeAllRecordDump {},
Logout {
#[arg(short, long)]
user_id: u32,
/// Second-precision login unix timestamp, must be the same as on `login`
///
/// For official arcades, it's commonly the time `amdaemon.exe` starts
///
/// For unofficial clients, it depends.
#[arg(short, long)]
timestamp: u64,
},
}
#[derive(Debug, Default, EnumString)]
#[strum(serialize_all = "snake_case")]
pub enum RatingFormat {
#[default]
/// Official API response
Origin,
/// dxrating.net format
DxRatingNet,
/// dxrating.net image gen payload
DxRatingPayload,
}

View File

@@ -1,24 +1,36 @@
use std::sync::atomic::{AtomicBool, Ordering};
use std::{
sync::{
Arc,
atomic::{AtomicBool, Ordering},
},
time::SystemTime,
};
use nyquest_preset::nyquest::ClientBuilder;
use palc::Parser;
use spdlog::{Level, LevelFilter::MoreSevereEqual};
use spdlog::{Level, LevelFilter::MoreSevereEqual, sink::StdStreamSink, terminal_style::StyleMode};
use sdgb_api::{
all_net::QRCode,
auth_lite::{SDGB, SDHJ, delivery_raw},
title::{
MaiVersionExt, Sdgb1_40, Sdgb1_50,
MaiVersionExt, Sdgb1_50,
helper::get_user_all_music,
methods::APIMethod,
model::{
GetUserDataApi, GetUserDataApiResp, GetUserPreviewApi, GetUserPreviewApiResp, Ping,
PingResp, UserLogoutApi, UserLogoutApiResp,
DataVersion, DxCalculatedEntries, DxMusicRecord, DxRatingNet, GetUserDataApi,
GetUserDataApiResp, GetUserPreviewApi, GetUserPreviewApiResp, GetUserRatingApi,
GetUserRatingApiResp, Ping, PingResp, UserLoginApiResp, UserLogoutApi,
UserLogoutApiResp,
},
},
};
use spdlog::{error, info, warn};
use crate::{commands::Cli, utils::login_action};
use crate::{
commands::{Cli, Commands, RatingFormat},
utils::{human_readable_display, json_display, login_action},
};
#[cfg(feature = "fetchall")]
mod cache;
@@ -32,11 +44,20 @@ static EARLY_QUIT: AtomicBool = AtomicBool::new(false);
async fn main() -> Result<(), Box<dyn snafu::Error>> {
nyquest_preset::register();
if cfg!(debug_assertions) {
spdlog::default_logger().set_level_filter(MoreSevereEqual(Level::Debug));
} else {
spdlog::default_logger().set_level_filter(MoreSevereEqual(Level::Info));
}
let logger = spdlog::default_logger().fork_with(|log| {
log.set_level_filter(MoreSevereEqual(if cfg!(debug_assertions) {
Level::Debug
} else {
Level::Info
}));
let sink = StdStreamSink::builder()
.stderr()
.style_mode(StyleMode::Always)
.build()?;
*log.sinks_mut() = vec![Arc::new(sink)];
Ok(())
})?;
spdlog::swap_default_logger(logger);
ctrlc::set_handler(|| {
if EARLY_QUIT.load(Ordering::Relaxed) {
@@ -48,26 +69,103 @@ async fn main() -> Result<(), Box<dyn snafu::Error>> {
}
})?;
let Cli { command } = <Cli as Parser>::parse();
let Cli {
command,
machine_readable,
} = <Cli as Parser>::parse();
let human_readable = !machine_readable;
let client = ClientBuilder::default().build_async().await?;
// TODO: refactor via enum_dispatch
match command {
commands::Commands::Logout { user_id } => {
Commands::MusicDetail { user_id, format } => {
let music_detail = get_user_all_music(&client, user_id).await?;
let details = music_detail
.user_music_list
.iter()
.map(|m| &m.user_music_detail_list)
.flatten();
match (human_readable, format) {
(true, _) => {
let mut count = 0;
for detail in details {
println!("{detail}");
println!("----------");
count += 1;
}
println!("共查询到 {count} 条记录!");
}
(false, RatingFormat::Origin) => json_display(music_detail)?,
(false, RatingFormat::DxRatingNet) => {
let dx_export = Vec::from_iter(
details
.map(|music| {
DxMusicRecord::try_from(music).inspect_err(|e| {
warn!("failed to process {}: {e}", music.music_id)
})
})
.flatten(),
);
json_display(dx_export)?;
}
(_, format) => {
error!("{format:?} was not supported yet");
json_display(())?;
}
}
}
Commands::Rating { user_id, format } => {
let rating: GetUserRatingApiResp = Sdgb1_50::request(
&client,
APIMethod::GetUserRatingApi,
user_id,
GetUserRatingApi { user_id },
)
.await?;
match (human_readable, format) {
(true, _) => println!("{rating}"),
(false, RatingFormat::Origin) => json_display(rating)?,
(false, RatingFormat::DxRatingNet) => {
let mut data = DxCalculatedEntries::from_user_rating_lossy(&rating.user_rating);
let mut records = data.b35;
records.append(&mut data.b15);
json_display(records)?;
}
(false, RatingFormat::DxRatingPayload) => {
let data = DxCalculatedEntries::from_user_rating_lossy(&rating.user_rating);
let payload = DxRatingNet {
calculated_entries: data,
version: DataVersion::Prism,
region: "_generic",
};
json_display(payload)?;
}
}
}
Commands::Logout { user_id, timestamp } => {
let logout: UserLogoutApiResp = Sdgb1_50::request(
&client,
APIMethod::UserLogoutApi,
user_id,
UserLogoutApi {
user_id,
date_time: timestamp,
..Default::default()
},
)
.await?;
println!("{logout:?}");
if human_readable {
println!("啥都木有");
} else {
json_display(logout)?;
}
}
commands::Commands::Preview { user_id } => {
Commands::Preview { user_id } => {
let preview: GetUserPreviewApiResp = Sdgb1_50::request(
&client,
APIMethod::GetUserPreviewApi,
@@ -76,30 +174,32 @@ async fn main() -> Result<(), Box<dyn snafu::Error>> {
)
.await?;
println!("{preview}");
human_readable_display(preview, human_readable)?;
}
commands::Commands::Ping => {
let decoded: PingResp = Sdgb1_40::request(
&client,
APIMethod::Ping,
"",
Ping {}, // note: must not be `Ping`, or serde_json serializes to nothing
)
.await?;
info!("sdgb 1.40 resp: {decoded}");
Commands::Ping => {
let time = SystemTime::now();
let decoded: PingResp =
Sdgb1_50::request(&client, APIMethod::Ping, "", Ping {}).await?;
info!("sdgb 1.50 resp: {decoded}");
info!(
"sdgb 1.50 resp: {decoded}, {}ms",
time.elapsed().unwrap_or_default().as_millis()
);
}
commands::Commands::QRLogin { ref qrcode_content } => {
Commands::QRLogin { ref qrcode_content } => {
let qrcode = QRCode { qrcode_content };
match qrcode.login(&client).await {
let resp = qrcode.login(&client).await;
match &resp {
Ok(user_id) => info!("login succeed: {user_id}"),
Err(e) => error!("login failed: {e}"),
}
if !human_readable {
json_display(resp)?;
}
}
commands::Commands::AuthLite { title_ver, variant } => {
Commands::AuthLite { title_ver, variant } => {
let resp = match variant {
commands::AuthLiteVariant::SDGB => delivery_raw::<SDGB>(&client, title_ver).await?,
commands::AuthLiteVariant::SDHJ => delivery_raw::<SDHJ>(&client, title_ver).await?,
@@ -108,14 +208,14 @@ async fn main() -> Result<(), Box<dyn snafu::Error>> {
}
#[cfg(feature = "fetchall")]
commands::Commands::ListAllUser { concurrency } => {
use futures_util::StreamExt;
use sdgb_api::bincode::borrow_decode_from_slice;
use std::io::{self, BufRead};
Commands::ListAllUser { concurrency } => {
use crate::{cache::PLAYERS, utils::helpers::cached_concurrent_fetch};
use sdgb_api::title::methods::GetUserPreviewApiExt;
use std::io::BufRead as _;
let mut user_ids = Vec::new();
{
let mut stdin = io::stdin().lock();
let mut stdin = std::io::stdin().lock();
let mut buf = String::new();
while stdin.read_line(&mut buf).is_ok_and(|size| size != 0) {
@@ -129,104 +229,149 @@ async fn main() -> Result<(), Box<dyn snafu::Error>> {
}
}
let _ = cache::init_db();
let read = cache::read_txn()?;
let write = cache::write_txn()?;
let config = sdgb_api::bincode::config::Configuration::<
sdgb_api::bincode::config::LittleEndian,
>::default()
.with_no_limit();
cached_concurrent_fetch::<GetUserPreviewApiExt>(
user_ids,
&client,
concurrency,
PLAYERS,
)
.await?;
}
info!("number of user_id: {}", user_ids.len());
#[cfg(feature = "fetchall")]
Commands::ScrapeAllRecord {
concurrency,
min_rating,
max_rating,
} => {
use crate::{
cache::{PLAYERS, RECORDS},
utils::helpers::{cached_concurrent_fetch_userfn, read_cache},
};
let collect = futures_util::stream::iter(user_ids)
.map(async |user_id| {
{
let cache_table = cache::open_table_read(&read)?;
let data = cache_table.get(user_id)?;
if let Some(data) = data {
let decoded: (GetUserPreviewApiResp, _) =
borrow_decode_from_slice(&data.value(), config)?;
let mut players: Vec<GetUserPreviewApiResp> = read_cache(PLAYERS)?;
players.retain(|p| p.player_rating >= min_rating && p.player_rating <= max_rating);
return Ok(decoded.0);
}
}
if EARLY_QUIT.load(Ordering::Relaxed) {
return Err("early skip due to ctrl-c")?;
}
let resp = Sdgb1_50::request::<_, GetUserPreviewApiResp>(
&client,
APIMethod::GetUserPreviewApi,
user_id,
GetUserPreviewApi { user_id },
)
.await;
match &resp {
Ok(resp) => {
use sdgb_api::bincode::encode_to_vec;
info!("found: {user_id}");
if let Ok(mut table) = cache::open_table(&write)
&& let Ok(encoded) = encode_to_vec(resp, config)
{
_ = table.insert(resp.user_id, encoded);
}
}
Err(sdgb_api::ApiError::JSON { .. }) => {}
Err(e) => {
error!("preview failed: {e}");
}
}
Result::<_, Box<dyn snafu::Error>>::Ok(resp?)
})
.buffer_unordered(concurrency) // slower to avoid being banned
.filter_map(async |r| r.ok())
.collect::<Vec<_>>()
.await;
drop(collect);
let _ = write.commit();
cached_concurrent_fetch_userfn(
players.iter().map(|p| p.user_id).collect::<Vec<u32>>(),
&client,
concurrency,
RECORDS,
get_user_all_music,
)
.await?;
}
#[cfg(feature = "fetchall")]
commands::Commands::ListAllUserDump { .. } => {
use std::{fs::OpenOptions, io::BufWriter};
Commands::ScrapeAllB50 {
concurrency,
min_rating,
max_rating,
} => {
use sdgb_api::title::methods::GetUserRatingApiExt;
use redb::ReadableTable;
use sdgb_api::bincode::{self, borrow_decode_from_slice};
use crate::{
cache::{B50, PLAYERS},
utils::helpers::{cached_concurrent_fetch, read_cache},
};
use crate::cache::{open_table_read, read_txn};
let mut players: Vec<GetUserPreviewApiResp> = read_cache(PLAYERS)?;
players.retain(|p| p.player_rating >= min_rating && p.player_rating <= max_rating);
let txn = read_txn()?;
let table = open_table_read(&txn)?;
cached_concurrent_fetch::<GetUserRatingApiExt>(
players.iter().map(|p| p.user_id).collect::<Vec<u32>>(),
&client,
concurrency,
B50,
)
.await?;
}
#[cfg(feature = "fetchall")]
Commands::ScrapeAllRegion {
concurrency,
min_rating,
max_rating,
} => {
use sdgb_api::title::methods::GetUserRegionApiExt;
let config = bincode::config::Configuration::<bincode::config::LittleEndian>::default()
.with_no_limit();
use crate::{
cache::{PLAYERS, REGIONS},
utils::helpers::{cached_concurrent_fetch, read_cache},
};
let user_ids = table
.iter()?
.flatten()
.map(|d| borrow_decode_from_slice(&d.1.value(), config))
.flatten()
.map(|(value, _)| value)
.collect::<Vec<GetUserPreviewApiResp>>();
let mut players: Vec<GetUserPreviewApiResp> = read_cache(PLAYERS)?;
players.retain(|p| p.player_rating >= min_rating && p.player_rating <= max_rating);
let file = OpenOptions::new()
.create(true)
.truncate(true)
.write(true)
.open("players.json")?;
file.lock()?;
let writer = BufWriter::new(file);
serde_json::to_writer(writer, &user_ids)?;
info!("dumped {} user id", user_ids.len());
cached_concurrent_fetch::<GetUserRegionApiExt>(
players.iter().map(|p| p.user_id).collect::<Vec<u32>>(),
&client,
concurrency,
REGIONS,
)
.await?;
}
commands::Commands::Userdata { user_id } => {
#[cfg(feature = "fetchall")]
Commands::ListAllUserDump {} => {
use crate::{
cache::PLAYERS,
utils::helpers::{dump_parquet, read_cache},
};
let players: Vec<GetUserPreviewApiResp> = read_cache(PLAYERS)?;
dump_parquet(players, "players.parquet")?;
}
#[cfg(feature = "fetchall")]
Commands::ScrapeAllRegionDump {} => {
use crate::{
cache::REGIONS,
utils::helpers::{dump_parquet, read_cache},
};
use sdgb_api::title::model::{GetUserRegionApiResp, UserRegionFlatten};
let regions: Vec<GetUserRegionApiResp> = read_cache(REGIONS)?;
let regions_flat = regions
.into_iter()
.map(Vec::<UserRegionFlatten>::from)
.flatten()
.collect::<Vec<_>>();
dump_parquet(regions_flat, "regions.parquet")?;
}
#[cfg(feature = "fetchall")]
Commands::ScrapeAllRecordDump {} => {
use crate::{
cache::RECORDS,
utils::helpers::{dump_parquet, read_cache},
};
use sdgb_api::title::model::GetUserMusicApiResp;
use sdgb_api::title::model::UserMusicDetailFlatten;
let records: Vec<GetUserMusicApiResp> = read_cache(RECORDS)?;
dump_parquet(
records
.into_iter()
.map(|resp| {
resp.user_music_list
.into_iter()
.map(|music| music.user_music_detail_list)
.flatten()
.map(move |detail| UserMusicDetailFlatten::new(resp.user_id, detail))
})
.flatten()
.collect::<Vec<UserMusicDetailFlatten>>(),
"records.parquet",
)?;
}
#[cfg(feature = "fetchall")]
Commands::ScrapeAllB50Dump {} => {
use crate::{cache::B50, utils::helpers::dump_json};
dump_json::<GetUserRatingApiResp>("b50.json", B50)?;
}
Commands::Userdata {
user_id,
skip_login,
} => {
let action = async |_| match Sdgb1_50::request::<_, GetUserDataApiResp>(
&client,
APIMethod::GetUserDataApi,
@@ -242,7 +387,12 @@ async fn main() -> Result<(), Box<dyn snafu::Error>> {
error!("failed to get userdata: {e}");
}
};
login_action(&client, user_id, action).await?;
// userdata does not require loginResult
if skip_login {
action(UserLoginApiResp::default()).await;
} else {
login_action(&client, user_id, action).await?;
}
}
}

View File

@@ -0,0 +1,213 @@
use std::sync::Arc;
use std::{fs::OpenOptions, io::BufWriter};
use std::{path::Path, sync::atomic::Ordering};
use futures_util::StreamExt;
use nyquest_preset::nyquest::AsyncClient;
use parquet::basic::BrotliLevel;
use parquet::file::properties::WriterProperties;
use parquet::file::writer::SerializedFileWriter;
use parquet::record::RecordWriter;
use redb::ReadableTable;
use redb::TableDefinition;
use serde::Serialize;
use spdlog::{error, info};
use sdgb_api::title::MaiVersionExt;
use sdgb_api::title::{Sdgb1_50, methods::APIExt};
use sdgb_api::{ApiError, bincode};
use bincode::{BorrowDecode, Encode, borrow_decode_from_slice};
use crate::{EARLY_QUIT, cache};
#[allow(unused)]
pub fn read_cache_keys(
definition: TableDefinition<'_, u32, Vec<u8>>,
) -> Result<Vec<u32>, Box<dyn snafu::Error>> {
let txn = cache::read_txn()?;
let table = cache::open_table_ro(&txn, definition)?;
Ok(table
.iter()?
.flatten()
.map(|(value, _)| value.value())
.collect::<Vec<u32>>())
}
pub fn read_cache<D>(
definition: TableDefinition<'_, u32, Vec<u8>>,
) -> Result<Vec<D>, Box<dyn snafu::Error>>
where
D: for<'d> BorrowDecode<'d, ()>,
{
let txn = cache::read_txn()?;
let table = cache::open_table_ro(&txn, definition)?;
let config =
bincode::config::Configuration::<bincode::config::LittleEndian>::default().with_no_limit();
Ok(table
.iter()?
.flatten()
.map(|d| borrow_decode_from_slice(&d.1.value(), config))
.flatten()
.map(|(value, _)| value)
.collect::<Vec<D>>())
}
pub fn dump_parquet<D>(
data: impl Into<Vec<D>>,
output_path: impl AsRef<Path>,
) -> Result<(), Box<dyn snafu::Error>>
where
for<'a> &'a [D]: RecordWriter<D>,
{
let data = data.into();
let file = OpenOptions::new()
.create(true)
.truncate(true)
.write(true)
.open(output_path)?;
#[cfg(file_lock_ready)]
file.try_lock()?;
let writer = BufWriter::new(file);
let schema = data.as_slice().schema()?;
let props = Arc::new(
WriterProperties::builder()
.set_compression(parquet::basic::Compression::BROTLI(BrotliLevel::try_new(
6,
)?))
.build(),
);
let mut writer = SerializedFileWriter::new(writer, schema, props).unwrap();
let mut row_group = writer.next_row_group().unwrap();
data.as_slice().write_to_row_group(&mut row_group)?;
row_group.close()?;
writer.close().unwrap();
info!("dumped {} records", data.len());
Ok(())
}
pub fn dump_json<D>(
output_path: impl AsRef<Path>,
definition: TableDefinition<'_, u32, Vec<u8>>,
) -> Result<(), Box<dyn snafu::Error>>
where
D: for<'d> BorrowDecode<'d, ()> + Serialize,
{
let file = OpenOptions::new()
.create(true)
.truncate(true)
.write(true)
.open(output_path)?;
#[cfg(file_lock_ready)]
file.try_lock()?;
let data = read_cache::<D>(definition)?;
let writer = BufWriter::new(file);
serde_json::to_writer(writer, &data)?;
info!("dumped {} records", data.len());
Ok(())
}
pub async fn cached_concurrent_fetch<A: APIExt>(
user_ids: impl Into<Vec<u32>>,
client: &AsyncClient,
concurrency: usize,
definition: TableDefinition<'_, u32, Vec<u8>>,
) -> Result<(), Box<dyn snafu::Error>>
where
A::Payload: From<u32>,
A::Response: Encode + for<'a> BorrowDecode<'a, ()>,
{
cached_concurrent_fetch_userfn(
user_ids,
client,
concurrency,
definition,
async |client, user_id| {
Sdgb1_50::request_ext::<A>(client, A::Payload::from(user_id), user_id).await
},
)
.await
}
pub async fn cached_concurrent_fetch_userfn<R>(
user_ids: impl Into<Vec<u32>>,
client: &AsyncClient,
concurrency: usize,
definition: TableDefinition<'_, u32, Vec<u8>>,
scrape: impl AsyncFn(&AsyncClient, u32) -> Result<R, ApiError>,
) -> Result<(), Box<dyn snafu::Error>>
where
R: Encode + for<'a> BorrowDecode<'a, ()>,
{
let _ = cache::init_db();
let user_ids = user_ids.into();
let read = cache::read_txn()?;
let write = cache::write_txn()?;
let config = sdgb_api::bincode::config::Configuration::<
sdgb_api::bincode::config::LittleEndian,
>::default()
.with_no_limit();
info!("number of user_id: {}", user_ids.len());
let collect = futures_util::stream::iter(user_ids)
.map(async |user_id| {
{
let cache_table = cache::open_table_ro(&read, definition)?;
let data = cache_table.get(user_id)?;
if let Some(data) = data {
let decoded: (R, _) = borrow_decode_from_slice(&data.value(), config)?;
return Ok(decoded.0);
}
}
if EARLY_QUIT.load(Ordering::Relaxed) {
return Err("early skip due to ctrl-c")?;
}
let resp = scrape(&client, user_id).await;
match &resp {
Ok(resp) => {
use sdgb_api::bincode::encode_to_vec;
info!("fetched: {user_id}");
if let Ok(mut table) = cache::open_table(&write, definition)
&& let Ok(encoded) = encode_to_vec(resp, config)
{
_ = table.insert(user_id, encoded);
}
}
Err(sdgb_api::ApiError::JSON { .. }) => {}
Err(e) => {
error!("fetch failed: {e}");
}
}
Result::<_, Box<dyn snafu::Error>>::Ok(resp?)
})
.buffer_unordered(concurrency) // slower to avoid being banned
.filter_map(async |r| r.ok())
.collect::<Vec<_>>()
.await;
drop(collect);
let _ = write.commit();
Ok(())
}

View File

@@ -1,3 +1,5 @@
use std::{fmt::Display, io::stdout};
use nyquest_preset::nyquest::AsyncClient;
use sdgb_api::{
ApiError,
@@ -7,6 +9,7 @@ use sdgb_api::{
model::{UserLoginApi, UserLoginApiResp, UserLogoutApi, UserLogoutApiResp},
},
};
use serde::Serialize;
use spdlog::info;
pub async fn login_action<R>(
@@ -14,11 +17,9 @@ pub async fn login_action<R>(
user_id: u32,
action: impl AsyncFnOnce(UserLoginApiResp) -> R,
) -> Result<R, ApiError> {
let login = UserLoginApi::new(user_id);
let login = UserLoginApi::new(user_id, true);
let date_time = login.date_time;
info!("login unix timestamp: {date_time}");
let login_resp: UserLoginApiResp =
Sdgb1_50::request(&client, APIMethod::UserLoginApi, user_id, login).await?;
@@ -44,3 +45,25 @@ pub async fn login_action<R>(
info!("logout: {logout_resp:?}");
Ok(return_data)
}
pub fn json_display(value: impl Serialize) -> Result<(), Box<dyn snafu::Error>> {
let lock = stdout().lock();
serde_json::to_writer_pretty(lock, &value)?;
Ok(())
}
pub fn human_readable_display(
value: impl Display + Serialize,
human_readable: bool,
) -> Result<(), Box<dyn snafu::Error>> {
if human_readable {
println!("{value}");
} else {
json_display(value)?;
}
Ok(())
}
#[cfg(feature = "fetchall")]
pub mod helpers;

106
utils/export_b50.py Normal file
View File

@@ -0,0 +1,106 @@
import orjson as json
from typing import Callable
from datetime import datetime
from decimal import Decimal
from helpers import dx_rating, find_level, query_music_db, salted_hash_userid
def clean_b50(b50: dict[str, str | dict]):
urating: dict[str, list[dict[str, int]]] = b50["userRating"]
def add_rating(entry: dict[str, int]):
"""
```
{
"musicId": 11638,
"level": 2,
"romVersion": 24005,
"achievement": 988145
}
```
- level: EXPERT
- ver: DX, 1.40.05
- ach: 98.8145%
"""
entry["musicTitle"] = None
entry["difficulty"] = None
entry["dxRating"] = 0
music_info = query_music_db(entry["musicId"])
if music_info is None:
return
entry["musicTitle"] = music_info["name"]
levels = find_level(music_info, entry["level"])
if not levels:
return
level: dict[str, str | int] = levels.pop()
difficulty = level["difficulty"]
entry["difficulty"] = difficulty
entry["dxRating"] = dx_rating(
difficulty=Decimal(difficulty),
achievement=entry["achievement"],
)
for b35 in urating["ratingList"]:
add_rating(b35)
for b15 in urating["newRatingList"]:
add_rating(b15)
urating["rating"] = sum(
map(
lambda lst: sum(map(lambda entry: entry["dxRating"], urating[lst])),
["ratingList", "newRatingList"],
)
)
def record_time(*, _: list[datetime] = []):
last_time = _
if not last_time:
last_time.append(datetime.now())
else:
new = datetime.now()
diff = (new - last_time.pop()).total_seconds()
last_time.append(new)
return diff
def process(
clean_fields: Callable[[dict], None],
input_file: str,
output_file: str,
):
record_time()
with open(input_file, "rb") as f:
data = json.loads(f.read())
print(f"loaded, cost {record_time():.2f}s")
for entry in data:
entry["userId"] = salted_hash_userid(entry["userId"])
clean_fields(entry)
print(f"processed, cost {record_time():.2f}s")
with open(output_file, "wb") as f:
f.write(json.dumps(data))
print(f"written out, cost {record_time():.2f}s")
return data
def main():
process(
clean_b50,
"b50.json",
"b50_pub.json",
)
if __name__ == "__main__":
main()

11
utils/get_last_uid.py Normal file
View File

@@ -0,0 +1,11 @@
import orjson as json
def main():
with open("players.json", "r", encoding="utf-8") as f:
d: list[dict[str, int | str]] = json.loads(f.read())
print(d[-1]["userId"])
if __name__ == "__main__":
main()

13
utils/hash_userid.py Normal file
View File

@@ -0,0 +1,13 @@
from sys import argv
import polars as pl
import polars_hash as pl_hash
file = argv[1]
pl.scan_parquet(file).with_columns(
pl.col("user_id").cast(pl.String).add("Lt2N5xgjJOqRsT5qVt7wWYw6SqOPZDI7")
).with_columns(
pl_hash.col("user_id").chash.sha2_256().str.head(16)
).collect().write_parquet(
file.replace(".parquet", "_pub.parquet"), compression="zstd", compression_level=15
)

105
utils/helpers.py Normal file
View File

@@ -0,0 +1,105 @@
from decimal import Decimal, getcontext
import hashlib
import orjson as json
from diskcache import Cache
getcontext().prec = 28
CACHE = Cache("target")
def salted_hash_userid(user_id: int):
hex = CACHE.get(user_id)
if hex is not None:
return hex
SALT = b"Lt2N5xgjJOqRsT5qVt7wWYw6SqOPZDI7"
hash_uid = hashlib.sha256(f"{user_id}".encode("utf-8") + SALT)
result = hash_uid.hexdigest()[:16]
CACHE.add(user_id, result)
return result
def dx_rating(difficulty: Decimal, achievement: int) -> int:
# Constants
SSS_PLUS_THRESHOLD = Decimal("100.5")
SSS_PLUS_FACTOR = Decimal("0.224")
SSS_PRO_THRESHOLD = Decimal("100.4999")
SSS_PRO_FACTOR = Decimal("0.222")
SSS_THRESHOLD = Decimal("100.0")
SSS_FACTOR = Decimal("0.216")
SS_PLUS_PRO_THRESHOLD = Decimal("99.9999")
SS_PLUS_PRO_FACTOR = Decimal("0.214")
SS_PLUS_THRESHOLD = Decimal("99.5")
SS_PLUS_FACTOR = Decimal("0.211")
SS_THRESHOLD = Decimal("99.0")
SS_FACTOR = Decimal("0.208")
S_PLUS_PRO_THRESHOLD = Decimal("98.9999")
S_PLUS_PRO_FACTOR = Decimal("0.206")
S_PLUS_THRESHOLD = Decimal("98.0")
S_PLUS_FACTOR = Decimal("0.203")
S_THRESHOLD = Decimal("97.0")
S_FACTOR = Decimal("0.2")
AAA_PRO_THRESHOLD = Decimal("96.9999")
AAA_PRO_FACTOR = Decimal("0.176")
AAA_THRESHOLD = Decimal("94.0")
AAA_FACTOR = Decimal("0.168")
AA_THRESHOLD = Decimal("90.0")
AA_FACTOR = Decimal("0.152")
A_THRESHOLD = Decimal("80.0")
A_FACTOR = Decimal("0.136")
ach = Decimal(achievement) / Decimal("10000")
if ach > Decimal("101.0") or ach < A_THRESHOLD:
return 0
if ach >= SSS_PLUS_THRESHOLD:
factor = SSS_PLUS_FACTOR
ach = Decimal("100.5")
elif ach >= SSS_PRO_THRESHOLD:
factor = SSS_PRO_FACTOR
elif ach >= SSS_THRESHOLD:
factor = SSS_FACTOR
elif ach >= SS_PLUS_PRO_THRESHOLD:
factor = SS_PLUS_PRO_FACTOR
elif ach >= SS_PLUS_THRESHOLD:
factor = SS_PLUS_FACTOR
elif ach >= SS_THRESHOLD:
factor = SS_FACTOR
elif ach >= S_PLUS_PRO_THRESHOLD:
factor = S_PLUS_PRO_FACTOR
elif ach >= S_PLUS_THRESHOLD:
factor = S_PLUS_FACTOR
elif ach >= S_THRESHOLD:
factor = S_FACTOR
elif ach >= AAA_PRO_THRESHOLD:
factor = AAA_PRO_FACTOR
elif ach >= AAA_THRESHOLD:
factor = AAA_FACTOR
elif ach >= AA_THRESHOLD:
factor = AA_FACTOR
elif ach >= A_THRESHOLD:
factor = A_FACTOR
else:
return 0
result = (factor * difficulty * ach).quantize(Decimal("1."), rounding="ROUND_FLOOR")
return int(result)
with open("musicDB.json", "r", encoding="utf-8") as f:
MUSIC_DB = json.loads(f.read())
MUSIC_DB = {entry["id"]: entry for entry in MUSIC_DB}
def query_music_db(music_id: int):
music_info = MUSIC_DB.get(music_id)
if music_info is None:
return
return music_info
def find_level(music_info: dict, level_id: int):
return [level for level in music_info["levels"] if level["level"] == level_id]

128
utils/music_db_dump.py Normal file
View File

@@ -0,0 +1,128 @@
# forked from maimaiDX-Api
import json
import xml.dom.minidom as minidom
from pathlib import Path
ONLY_REMOVED = True
def makeMusicDBJson():
"""
从 HDD 的文件来生成 music_db.json
推荐的是如果要国服用 那就用国际服的文件来生成
免得国服每次更新还要重新生成太麻烦
"""
# 记得改
A000_DIR = Path(
"C:/MaimaiDX/SDEZ-1.56-B/Standard/Package/Sinmai_Data/StreamingAssets/A000"
)
OPTION_DIR = Path("C:/MaimaiDX/SDGA-1.50-G/NoMovieData/StreamingAssets")
music_db: list[dict[str, str | int | list[dict[str, str | int]]]] = []
DEST_PATH = Path("./musicDB.json")
dup_count = 0
music_ids = set()
music_folders = [f for f in (A000_DIR / "music").iterdir() if f.is_dir()]
for option_dir in OPTION_DIR.iterdir():
# only removed ones
if ONLY_REMOVED and option_dir.name != "A100":
continue
if (option_dir / "music").exists():
music_folders.extend(
[f for f in (option_dir / "music").iterdir() if f.is_dir()]
)
for folder in music_folders:
xml_path = folder / "Music.xml"
if xml_path.exists():
xml = minidom.parse(xml_path.as_posix())
data = xml.getElementsByTagName("MusicData")[0]
music_id = int(
data.getElementsByTagName("name")[0]
.getElementsByTagName("id")[0]
.firstChild.data
)
music_name = (
data.getElementsByTagName("name")[0]
.getElementsByTagName("str")[0]
.firstChild.data
)
music_version = (
data.getElementsByTagName("AddVersion")[0]
.getElementsByTagName("id")[0]
.firstChild.data
)
def handle_note(note: minidom.Element):
if (
"false"
== note.getElementsByTagName("isEnable")
.pop()
.firstChild.data.lower()
):
return
if music_id >= 100000:
level = 5
else:
level = int(
note.getElementsByTagName("file")
.pop()
.getElementsByTagName("path")
.pop()
.firstChild.data[-5]
)
difficulty_int = (
note.getElementsByTagName("level").pop().firstChild.data
)
difficulty_dec = (
note.getElementsByTagName("levelDecimal").pop().firstChild.data
)
difficulty = f"{difficulty_int}.{difficulty_dec}"
return level, difficulty
music_levels = [
{"level": level, "difficulty": difficulty}
for level, difficulty in filter(
lambda d: d is not None,
(
handle_note(note)
for note in data.getElementsByTagName("notesData")[
0
].getElementsByTagName("Notes")
),
)
]
if music_id not in music_ids:
music_ids.add(music_id)
music_db.append(
{
"id": music_id,
"name": music_name,
"version": int(music_version),
"levels": music_levels,
}
)
else:
# e.g. SDEZ-only song
dup_count += 1
print(f"Found {len(music_db)} music data")
print(f"Found {dup_count} duplications")
music_db = sorted(
music_db,
key=lambda m: m["id"],
)
with open(DEST_PATH, "w", encoding="utf-8") as f:
json.dump(music_db, f, ensure_ascii=False, indent=4)
if __name__ == "__main__":
makeMusicDBJson()
print("Done.")