perf: reduce useless data read

This commit is contained in:
mokurin000
2025-09-17 15:39:04 +08:00
parent a7777d127a
commit ee23914e29

View File

@@ -169,10 +169,8 @@ where
{
let cache_table = cache::open_table_ro(&read, definition)?;
let data = cache_table.get(user_id)?;
if let Some(data) = data {
let decoded: (R, _) = borrow_decode_from_slice(&data.value(), config)?;
return Ok(decoded.0);
if data.is_some() {
return Ok(());
}
}
@@ -181,16 +179,14 @@ where
}
let resp = scrape(&client, user_id).await;
match &resp {
Ok(resp) => {
use sdgb_api::bincode::encode_to_vec;
info!("fetched: {user_id}");
if let Ok(mut table) = cache::open_table(&write, definition)
&& let Ok(encoded) = encode_to_vec(resp, config)
{
info!("encode length for {user_id}: {}", encoded.len());
_ = table.insert(user_id, encoded);
}
}
@@ -200,10 +196,9 @@ where
}
}
Result::<_, Box<dyn snafu::Error>>::Ok(resp?)
Result::<_, Box<dyn snafu::Error>>::Ok(())
})
.buffer_unordered(concurrency) // slower to avoid being banned
.filter_map(async |r| r.ok())
.collect::<Vec<_>>()
.await;
drop(collect);