diff --git a/apps/labrinth/src/routes/v2/version_file.rs b/apps/labrinth/src/routes/v2/version_file.rs index 1d843b5b06..5f2df286dc 100644 --- a/apps/labrinth/src/routes/v2/version_file.rs +++ b/apps/labrinth/src/routes/v2/version_file.rs @@ -291,27 +291,28 @@ pub async fn update_files( hashes: update_data.hashes, }; - let response = - v3::version_file::update_files(pool, redis, web::Json(update_data)) - .await - .or_else(v2_reroute::flatten_404_error)?; + let returned_versions = match v3::version_file::update_files( + pool, + redis, + web::Json(update_data), + ) + .await + { + Ok(resp) => resp, + Err(ApiError::NotFound) => return Ok(HttpResponse::NotFound().body("")), + Err(err) => return Err(err), + }; // Convert response to V2 format - match v2_reroute::extract_ok_json::>(response) - .await - { - Ok(returned_versions) => { - let v3_versions = returned_versions - .into_iter() - .map(|(hash, version)| { - let v2_version = LegacyVersion::from(version); - (hash, v2_version) - }) - .collect::>(); - Ok(HttpResponse::Ok().json(v3_versions)) - } - Err(response) => Ok(response), - } + let v3_versions = returned_versions + .0 + .into_iter() + .map(|(hash, version)| { + let v2_version = LegacyVersion::from(version); + (hash, v2_version) + }) + .collect::>(); + Ok(HttpResponse::Ok().json(v3_versions)) } #[derive(Serialize, Deserialize)] diff --git a/apps/labrinth/src/routes/v3/version_file.rs b/apps/labrinth/src/routes/v3/version_file.rs index 29a7fedda8..bdf060ef11 100644 --- a/apps/labrinth/src/routes/v3/version_file.rs +++ b/apps/labrinth/src/routes/v3/version_file.rs @@ -28,7 +28,10 @@ pub fn config(cfg: &mut web::ServiceConfig) { ); cfg.service( web::scope("version_files") + // DEPRECATED - use `update_many` instead + // see `fn update_files` comment .route("update", web::post().to(update_files)) + .route("update_many", web::post().to(update_files_many)) .route("update_individual", web::post().to(update_individual_files)) .route("", web::post().to(get_versions_from_hashes)), ); @@ -331,11 +334,60 @@ pub struct ManyUpdateData { pub version_types: Option>, } +pub async fn update_files_many( + pool: web::Data, + redis: web::Data, + update_data: web::Json, +) -> Result>>, ApiError> +{ + update_files_internal(pool, redis, update_data) + .await + .map(web::Json) +} + +// DEPRECATED - use `update_files_many` instead +// +// This returns a `HashMap` where the key is the file hash. +// But one file hash can have multiple versions associated with it. +// So you can end up in a situation where: +// - file with hash H is linked to versions V1, V2 +// - user downloads mod with file hash H +// - every time the app checks for updates: +// - it asks the backend, what is the version of H? +// - backend says V1 +// - the app asks, is there a compatible version newer than V1? +// - backend says, yes, V2 +// - user updates to V2, but it's the same file, so it's the same hash H, +// and the update button stays +// +// By using `update_files_many`, we can have the app know that both V1 and V2 +// are linked to H +// +// This endpoint is kept for backwards compat, since it still works in 99% of +// cases where H only maps to a single version, and for older clients. This +// endpoint will only take the first version for each file hash. pub async fn update_files( pool: web::Data, redis: web::Data, update_data: web::Json, -) -> Result { +) -> Result>, ApiError> { + let file_hashes_to_versions = + update_files_internal(pool, redis, update_data).await?; + let resp = file_hashes_to_versions + .into_iter() + .filter_map(|(hash, versions)| { + let first_version = versions.into_iter().next()?; + Some((hash, first_version)) + }) + .collect(); + Ok(web::Json(resp)) +} + +async fn update_files_internal( + pool: web::Data, + redis: web::Data, + update_data: web::Json, +) -> Result>, ApiError> { let algorithm = update_data .algorithm .clone() @@ -385,21 +437,25 @@ pub async fn update_files( ) .await?; - let mut response = HashMap::new(); + let mut response = HashMap::>::new(); for file in files { if let Some(version) = versions .iter() .find(|x| x.inner.project_id == file.project_id) && let Some(hash) = file.hashes.get(&algorithm) { - response.insert( - hash.clone(), - models::projects::Version::from(version.clone()), - ); + // add the version info for this file hash + // note: one file hash can have multiple versions associated with it + // just having a `HashMap` would mean that some version info is lost + // so we return a vec of them instead + response + .entry(hash.clone()) + .or_default() + .push(models::projects::Version::from(version.clone())); } } - Ok(HttpResponse::Ok().json(response)) + Ok(response) } #[derive(Serialize, Deserialize)] diff --git a/packages/app-lib/src/state/cache.rs b/packages/app-lib/src/state/cache.rs index 151dd51457..eee2fb108c 100644 --- a/packages/app-lib/src/state/cache.rs +++ b/packages/app-lib/src/state/cache.rs @@ -1307,9 +1307,9 @@ impl CachedEntry { let variations = futures::future::try_join_all(filtered_keys.iter().map( |((loaders_key, game_version), hashes)| { - fetch_json::>( + fetch_json::>>( Method::POST, - concat!(env!("MODRINTH_API_URL"), "version_files/update"), + concat!(env!("MODRINTH_API_URL"), "version_files/update_many"), None, Some(serde_json::json!({ "algorithm": "sha1", @@ -1330,28 +1330,30 @@ impl CachedEntry { &filtered_keys[index]; for hash in hashes { - let version = variation.remove(hash); - - if let Some(version) = version { - let version_id = version.id.clone(); - vals.push(( - CacheValue::Version(version).get_entry(), - false, - )); - - vals.push(( - CacheValue::FileUpdate(CachedFileUpdate { - hash: hash.clone(), - game_version: game_version.clone(), - loaders: loaders_key - .split('+') - .map(|x| x.to_string()) - .collect(), - update_version_id: version_id, - }) - .get_entry(), - true, - )); + let versions = variation.remove(hash); + + if let Some(versions) = versions { + for version in versions { + let version_id = version.id.clone(); + vals.push(( + CacheValue::Version(version).get_entry(), + false, + )); + + vals.push(( + CacheValue::FileUpdate(CachedFileUpdate { + hash: hash.clone(), + game_version: game_version.clone(), + loaders: loaders_key + .split('+') + .map(|x| x.to_string()) + .collect(), + update_version_id: version_id, + }) + .get_entry(), + true, + )); + } } else { vals.push(( CacheValueType::FileUpdate.get_empty_entry( diff --git a/packages/app-lib/src/state/profiles.rs b/packages/app-lib/src/state/profiles.rs index 84d4e77262..6669ee129a 100644 --- a/packages/app-lib/src/state/profiles.rs +++ b/packages/app-lib/src/state/profiles.rs @@ -1009,17 +1009,15 @@ impl Profile { initial_file.file_name ); - let update_version_id = if let Some(update) = file_updates - .iter() - .find(|x| x.hash == hash.hash) - .map(|x| x.update_version_id.clone()) - { - if let Some(metadata) = &file { - if metadata.version_id != update { - Some(update) - } else { - None - } + let update_version_id = if let Some(metadata) = &file { + let update_ids: Vec = file_updates + .iter() + .filter(|x| x.hash == hash.hash) + .map(|x| x.update_version_id.clone()) + .collect(); + + if !update_ids.contains(&metadata.version_id) { + update_ids.into_iter().next() } else { None }