Skip to content
Draft
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
39 changes: 20 additions & 19 deletions apps/labrinth/src/routes/v2/version_file.rs
Original file line number Diff line number Diff line change
Expand Up @@ -291,27 +291,28 @@ pub async fn update_files(
hashes: update_data.hashes,
};

let response =
v3::version_file::update_files(pool, redis, web::Json(update_data))
.await
.or_else(v2_reroute::flatten_404_error)?;
let returned_versions = match v3::version_file::update_files(
pool,
redis,
web::Json(update_data),
)
.await
{
Ok(resp) => resp,
Err(ApiError::NotFound) => return Ok(HttpResponse::NotFound().body("")),
Err(err) => return Err(err),
};

// Convert response to V2 format
match v2_reroute::extract_ok_json::<HashMap<String, Version>>(response)
.await
{
Ok(returned_versions) => {
let v3_versions = returned_versions
.into_iter()
.map(|(hash, version)| {
let v2_version = LegacyVersion::from(version);
(hash, v2_version)
})
.collect::<HashMap<_, _>>();
Ok(HttpResponse::Ok().json(v3_versions))
}
Err(response) => Ok(response),
}
let v3_versions = returned_versions
.0
.into_iter()
.map(|(hash, version)| {
let v2_version = LegacyVersion::from(version);
(hash, v2_version)
})
.collect::<HashMap<_, _>>();
Ok(HttpResponse::Ok().json(v3_versions))
}

#[derive(Serialize, Deserialize)]
Expand Down
70 changes: 63 additions & 7 deletions apps/labrinth/src/routes/v3/version_file.rs
Original file line number Diff line number Diff line change
Expand Up @@ -28,7 +28,10 @@ pub fn config(cfg: &mut web::ServiceConfig) {
);
cfg.service(
web::scope("version_files")
// DEPRECATED - use `update_many` instead
// see `fn update_files` comment
.route("update", web::post().to(update_files))
.route("update_many", web::post().to(update_files_many))
.route("update_individual", web::post().to(update_individual_files))
.route("", web::post().to(get_versions_from_hashes)),
);
Expand Down Expand Up @@ -331,11 +334,60 @@ pub struct ManyUpdateData {
pub version_types: Option<Vec<VersionType>>,
}

pub async fn update_files_many(
pool: web::Data<ReadOnlyPgPool>,
redis: web::Data<RedisPool>,
update_data: web::Json<ManyUpdateData>,
) -> Result<web::Json<HashMap<String, Vec<models::projects::Version>>>, ApiError>
{
update_files_internal(pool, redis, update_data)
.await
.map(web::Json)
}

// DEPRECATED - use `update_files_many` instead
//
// This returns a `HashMap<String, Version>` where the key is the file hash.
// But one file hash can have multiple versions associated with it.
// So you can end up in a situation where:
// - file with hash H is linked to versions V1, V2
// - user downloads mod with file hash H
// - every time the app checks for updates:
// - it asks the backend, what is the version of H?
// - backend says V1
// - the app asks, is there a compatible version newer than V1?
// - backend says, yes, V2
// - user updates to V2, but it's the same file, so it's the same hash H,
// and the update button stays
//
// By using `update_files_many`, we can have the app know that both V1 and V2
// are linked to H
//
// This endpoint is kept for backwards compat, since it still works in 99% of
// cases where H only maps to a single version, and for older clients. This
// endpoint will only take the first version for each file hash.
pub async fn update_files(
pool: web::Data<ReadOnlyPgPool>,
redis: web::Data<RedisPool>,
update_data: web::Json<ManyUpdateData>,
) -> Result<HttpResponse, ApiError> {
) -> Result<web::Json<HashMap<String, models::projects::Version>>, ApiError> {
let file_hashes_to_versions =
update_files_internal(pool, redis, update_data).await?;
let resp = file_hashes_to_versions
.into_iter()
.filter_map(|(hash, versions)| {
let first_version = versions.into_iter().next()?;
Some((hash, first_version))
})
.collect();
Ok(web::Json(resp))
}

async fn update_files_internal(
pool: web::Data<ReadOnlyPgPool>,
redis: web::Data<RedisPool>,
update_data: web::Json<ManyUpdateData>,
) -> Result<HashMap<String, Vec<models::projects::Version>>, ApiError> {
let algorithm = update_data
.algorithm
.clone()
Expand Down Expand Up @@ -385,21 +437,25 @@ pub async fn update_files(
)
.await?;

let mut response = HashMap::new();
let mut response = HashMap::<String, Vec<models::projects::Version>>::new();
for file in files {
if let Some(version) = versions
.iter()
.find(|x| x.inner.project_id == file.project_id)
&& let Some(hash) = file.hashes.get(&algorithm)
{
response.insert(
hash.clone(),
models::projects::Version::from(version.clone()),
);
// add the version info for this file hash
// note: one file hash can have multiple versions associated with it
// just having a `HashMap<String, Version>` would mean that some version info is lost
// so we return a vec of them instead
response
.entry(hash.clone())
.or_default()
.push(models::projects::Version::from(version.clone()));
}
}

Ok(HttpResponse::Ok().json(response))
Ok(response)
}

#[derive(Serialize, Deserialize)]
Expand Down
50 changes: 26 additions & 24 deletions packages/app-lib/src/state/cache.rs
Original file line number Diff line number Diff line change
Expand Up @@ -1307,9 +1307,9 @@ impl CachedEntry {
let variations =
futures::future::try_join_all(filtered_keys.iter().map(
|((loaders_key, game_version), hashes)| {
fetch_json::<HashMap<String, Version>>(
fetch_json::<HashMap<String, Vec<Version>>>(
Method::POST,
concat!(env!("MODRINTH_API_URL"), "version_files/update"),
concat!(env!("MODRINTH_API_URL"), "version_files/update_many"),
None,
Some(serde_json::json!({
"algorithm": "sha1",
Expand All @@ -1330,28 +1330,30 @@ impl CachedEntry {
&filtered_keys[index];

for hash in hashes {
let version = variation.remove(hash);

if let Some(version) = version {
let version_id = version.id.clone();
vals.push((
CacheValue::Version(version).get_entry(),
false,
));

vals.push((
CacheValue::FileUpdate(CachedFileUpdate {
hash: hash.clone(),
game_version: game_version.clone(),
loaders: loaders_key
.split('+')
.map(|x| x.to_string())
.collect(),
update_version_id: version_id,
})
.get_entry(),
true,
));
let versions = variation.remove(hash);

if let Some(versions) = versions {
for version in versions {
let version_id = version.id.clone();
vals.push((
CacheValue::Version(version).get_entry(),
false,
));

vals.push((
CacheValue::FileUpdate(CachedFileUpdate {
hash: hash.clone(),
game_version: game_version.clone(),
loaders: loaders_key
.split('+')
.map(|x| x.to_string())
.collect(),
update_version_id: version_id,
})
.get_entry(),
true,
));
}
} else {
vals.push((
CacheValueType::FileUpdate.get_empty_entry(
Expand Down
20 changes: 9 additions & 11 deletions packages/app-lib/src/state/profiles.rs
Original file line number Diff line number Diff line change
Expand Up @@ -1009,17 +1009,15 @@ impl Profile {
initial_file.file_name
);

let update_version_id = if let Some(update) = file_updates
.iter()
.find(|x| x.hash == hash.hash)
.map(|x| x.update_version_id.clone())
{
if let Some(metadata) = &file {
if metadata.version_id != update {
Some(update)
} else {
None
}
let update_version_id = if let Some(metadata) = &file {
let update_ids: Vec<String> = file_updates
.iter()
.filter(|x| x.hash == hash.hash)
.map(|x| x.update_version_id.clone())
.collect();

if !update_ids.contains(&metadata.version_id) {
update_ids.into_iter().next()
} else {
None
}
Expand Down