diff --git a/crates/rspack_binding_api/src/raw_options/raw_split_chunks/raw_split_chunk_chunks.rs b/crates/rspack_binding_api/src/raw_options/raw_split_chunks/raw_split_chunk_chunks.rs index 0533c83bf297..fb1770b84649 100644 --- a/crates/rspack_binding_api/src/raw_options/raw_split_chunks/raw_split_chunk_chunks.rs +++ b/crates/rspack_binding_api/src/raw_options/raw_split_chunks/raw_split_chunk_chunks.rs @@ -2,23 +2,26 @@ use std::sync::Arc; use napi::{JsString, bindgen_prelude::Either3}; use rspack_napi::{string::JsStringExt, threadsafe_function::ThreadsafeFunction}; +use rspack_plugin_split_chunks::{ + ChunkFilter, create_chunk_filter_from_str, create_regex_chunk_filter_from_str, +}; use rspack_regex::RspackRegex; use crate::chunk::ChunkWrapper; pub type Chunks<'a> = Either3, ThreadsafeFunction>; -pub fn create_chunks_filter(raw: Chunks) -> rspack_plugin_split_chunks::ChunkFilter { +pub fn create_chunks_filter(raw: Chunks) -> ChunkFilter { match raw { - Either3::A(regex) => rspack_plugin_split_chunks::create_regex_chunk_filter_from_str(regex), + Either3::A(regex) => create_regex_chunk_filter_from_str(regex), Either3::B(js_str) => { let js_str = js_str.into_string(); - rspack_plugin_split_chunks::create_chunk_filter_from_str(&js_str) + create_chunk_filter_from_str(&js_str) } - Either3::C(f) => Arc::new(move |chunk_ukey, compilation| { + Either3::C(f) => ChunkFilter::Func(Arc::new(move |chunk_ukey, compilation| { let f = f.clone(); let chunk_wrapper = ChunkWrapper::new(*chunk_ukey, compilation); Box::pin(async move { f.call_with_sync(chunk_wrapper).await }) - }), + })), } } diff --git a/crates/rspack_core/src/chunk_graph/chunk_graph_chunk.rs b/crates/rspack_core/src/chunk_graph/chunk_graph_chunk.rs index b306ba373351..a201246c1fd1 100644 --- a/crates/rspack_core/src/chunk_graph/chunk_graph_chunk.rs +++ b/crates/rspack_core/src/chunk_graph/chunk_graph_chunk.rs @@ -353,6 +353,39 @@ impl ChunkGraph { } } + pub fn disconnect_chunks_and_modules( + &mut self, + chunks: &[ChunkUkey], + modules: &[ModuleIdentifier], + ) { + for chunk in chunks.iter() { + let cgc = self.expect_chunk_graph_chunk_mut(*chunk); + for module in modules.iter() { + cgc.modules.remove(module); + if let Some(source_types_by_module) = &mut cgc.source_types_by_module { + source_types_by_module.remove(module); + } + } + } + for module in modules.iter() { + let cgm = self.expect_chunk_graph_module_mut(*module); + for chunk in chunks.iter() { + cgm.chunks.remove(chunk); + } + } + } + + pub fn connect_chunk_and_modules(&mut self, chunk: ChunkUkey, modules: &[ModuleIdentifier]) { + for module in modules.iter() { + let cgm = self.expect_chunk_graph_module_mut(*module); + cgm.chunks.insert(chunk); + } + let cgc = self.expect_chunk_graph_chunk_mut(chunk); + for module in modules.iter() { + cgc.modules.insert(*module); + } + } + pub fn get_chunk_modules<'module>( &self, chunk: &ChunkUkey, diff --git a/crates/rspack_plugin_split_chunks/src/common.rs b/crates/rspack_plugin_split_chunks/src/common.rs index aa4714f7df9f..36e706b9eaa8 100644 --- a/crates/rspack_plugin_split_chunks/src/common.rs +++ b/crates/rspack_plugin_split_chunks/src/common.rs @@ -5,14 +5,57 @@ use std::{ use derive_more::Debug; use futures::future::BoxFuture; -use rspack_collections::IdentifierMap; +use rspack_collections::{IdentifierMap, UkeySet}; use rspack_core::{ChunkUkey, Compilation, Module, SourceType}; use rspack_error::Result; use rspack_regex::RspackRegex; use rustc_hash::{FxHashMap, FxHashSet}; -pub type ChunkFilter = +pub type ChunkFilterFunc = Arc BoxFuture<'static, Result> + Sync + Send>; + +#[derive(Clone)] +pub enum ChunkFilter { + Func(ChunkFilterFunc), + All, + Regex(RspackRegex), + Async, + Initial, +} + +impl ChunkFilter { + pub fn is_func(&self) -> bool { + matches!(self, ChunkFilter::Func(_)) + } + + pub async fn test_func(&self, chunk_ukey: &ChunkUkey, compilation: &Compilation) -> Result { + if let ChunkFilter::Func(func) = self { + func(chunk_ukey, compilation).await + } else { + panic!("ChunkFilter is not a function"); + } + } + + pub fn test_internal(&self, chunk_ukey: &ChunkUkey, compilation: &Compilation) -> bool { + match self { + ChunkFilter::Func(_) => panic!("ChunkFilter is a function"), + ChunkFilter::All => true, + ChunkFilter::Regex(re) => { + let chunk = compilation.chunk_by_ukey.expect_get(chunk_ukey); + chunk.name().is_some_and(|name| re.test(name)) + } + ChunkFilter::Async => { + let chunk = compilation.chunk_by_ukey.expect_get(chunk_ukey); + !chunk.can_be_initial(&compilation.chunk_group_by_ukey) + } + ChunkFilter::Initial => { + let chunk = compilation.chunk_by_ukey.expect_get(chunk_ukey); + chunk.can_be_initial(&compilation.chunk_group_by_ukey) + } + } + } +} + pub type ModuleTypeFilter = Arc bool + Send + Sync>; pub type ModuleLayerFilter = Arc) -> BoxFuture<'static, Result> + Send + Sync>; @@ -26,23 +69,15 @@ pub fn create_default_module_layer_filter() -> ModuleLayerFilter { } pub fn create_async_chunk_filter() -> ChunkFilter { - Arc::new(|chunk_ukey, compilation| { - let chunk = compilation.chunk_by_ukey.expect_get(chunk_ukey); - let can_be_initial = chunk.can_be_initial(&compilation.chunk_group_by_ukey); - Box::pin(async move { Ok(!can_be_initial) }) - }) + ChunkFilter::Async } pub fn create_initial_chunk_filter() -> ChunkFilter { - Arc::new(|chunk_ukey, compilation| { - let chunk = compilation.chunk_by_ukey.expect_get(chunk_ukey); - let can_be_initial = chunk.can_be_initial(&compilation.chunk_group_by_ukey); - Box::pin(async move { Ok(can_be_initial) }) - }) + ChunkFilter::Initial } pub fn create_all_chunk_filter() -> ChunkFilter { - Arc::new(|_chunk, _compilation| Box::pin(async move { Ok(true) })) + ChunkFilter::All } pub fn create_chunk_filter_from_str(chunks: &str) -> ChunkFilter { @@ -55,11 +90,7 @@ pub fn create_chunk_filter_from_str(chunks: &str) -> ChunkFilter { } pub fn create_regex_chunk_filter_from_str(re: RspackRegex) -> ChunkFilter { - Arc::new(move |chunk_ukey, compilation| { - let chunk = compilation.chunk_by_ukey.expect_get(chunk_ukey); - let res = chunk.name().is_some_and(|name| re.test(name)); - Box::pin(async move { Ok(res) }) - }) + ChunkFilter::Regex(re) } #[derive(Debug, Default, Clone)] @@ -171,3 +202,4 @@ pub struct FallbackCacheGroup { } pub(crate) type ModuleSizes = IdentifierMap>; +pub(crate) type ModuleChunks = IdentifierMap>; diff --git a/crates/rspack_plugin_split_chunks/src/plugin/chunk.rs b/crates/rspack_plugin_split_chunks/src/plugin/chunk.rs index f2e35f4a43ff..f96e67ceca3b 100644 --- a/crates/rspack_plugin_split_chunks/src/plugin/chunk.rs +++ b/crates/rspack_plugin_split_chunks/src/plugin/chunk.rs @@ -1,7 +1,8 @@ -use rspack_collections::{DatabaseItem, UkeySet}; -use rspack_core::{Chunk, ChunkUkey, Compilation, incremental::Mutation}; +use rayon::prelude::*; +use rspack_collections::{DatabaseItem, IdentifierMap, UkeySet}; +use rspack_core::{Chunk, ChunkUkey, Compilation, ModuleIdentifier, incremental::Mutation}; -use crate::{SplitChunksPlugin, module_group::ModuleGroup}; +use crate::{SplitChunksPlugin, common::ModuleChunks, module_group::ModuleGroup}; fn put_split_chunk_reason( chunk_reason: &mut Option, @@ -21,6 +22,16 @@ fn put_split_chunk_reason( } impl SplitChunksPlugin { + pub(crate) fn get_module_chunks( + all_modules: &[ModuleIdentifier], + compilation: &Compilation, + ) -> ModuleChunks { + let chunk_graph = &compilation.chunk_graph; + all_modules + .par_iter() + .map(|module| (*module, chunk_graph.get_module_chunks(*module).clone())) + .collect::>() + } /// Affected by `splitChunks.cacheGroups.{cacheGroup}.reuseExistingChunk` /// /// If there is a code splitting chunk that the contains the same modules as the current `ModuleGroup`, @@ -164,29 +175,31 @@ impl SplitChunksPlugin { original_chunks: &UkeySet, compilation: &mut Compilation, ) { - for module_identifier in &item.modules { - if let Some(module) = compilation.module_by_identifier(module_identifier) - && module - .chunk_condition(&new_chunk, compilation) - .is_some_and(|condition| !condition) - { - continue; - } + let modules = item + .modules + .iter() + .filter(|mid| { + if let Some(module) = compilation.module_by_identifier(mid) + && module + .chunk_condition(&new_chunk, compilation) + .is_some_and(|condition| !condition) + { + return false; + } + true + }) + .copied() + .collect::>(); - // First, we remove modules from old chunks + let chunks = original_chunks.iter().copied().collect::>(); - // Remove module from old chunks - for used_chunk in original_chunks { - compilation - .chunk_graph - .disconnect_chunk_and_module(used_chunk, *module_identifier); - } + compilation + .chunk_graph + .disconnect_chunks_and_modules(&chunks, &modules); - // Add module to new chunk - compilation - .chunk_graph - .connect_chunk_and_module(new_chunk, *module_identifier); - } + compilation + .chunk_graph + .connect_chunk_and_modules(new_chunk, &modules); } /// Since the modules are moved into the `new_chunk`, we should diff --git a/crates/rspack_plugin_split_chunks/src/plugin/max_size.rs b/crates/rspack_plugin_split_chunks/src/plugin/max_size.rs index 8b2a9c694cf9..92c452bcc7f2 100644 --- a/crates/rspack_plugin_split_chunks/src/plugin/max_size.rs +++ b/crates/rspack_plugin_split_chunks/src/plugin/max_size.rs @@ -515,7 +515,11 @@ impl SplitChunksPlugin { ); if max_size_setting.is_none() - && !(fallback_cache_group.chunks_filter)(&chunk.ukey(), compilation).await? + && !(if fallback_cache_group.chunks_filter.is_func() { + fallback_cache_group.chunks_filter.test_func(&chunk.ukey(), compilation).await? + } else { + fallback_cache_group.chunks_filter.test_internal(&chunk.ukey(), compilation) + }) { tracing::debug!("Chunk({:?}) skips `maxSize` checking. Reason: max_size_setting.is_none() and chunks_filter is false", chunk.chunk_reason()); return Ok(None); diff --git a/crates/rspack_plugin_split_chunks/src/plugin/mod.rs b/crates/rspack_plugin_split_chunks/src/plugin/mod.rs index 296abdeff113..e552ed1cb829 100644 --- a/crates/rspack_plugin_split_chunks/src/plugin/mod.rs +++ b/crates/rspack_plugin_split_chunks/src/plugin/mod.rs @@ -54,9 +54,10 @@ impl SplitChunksPlugin { .collect::>(); let module_sizes = Self::get_module_sizes(&all_modules, compilation); + let module_chunks = Self::get_module_chunks(&all_modules, compilation); let mut module_group_map = self - .prepare_module_group_map(&all_modules, compilation, &module_sizes) + .prepare_module_group_map(&all_modules, compilation, &module_sizes, &module_chunks) .await?; tracing::trace!("prepared module_group_map {:#?}", module_group_map); logger.time_end(start); diff --git a/crates/rspack_plugin_split_chunks/src/plugin/module_group.rs b/crates/rspack_plugin_split_chunks/src/plugin/module_group.rs index 4f29e850b25b..f7fd5027966e 100644 --- a/crates/rspack_plugin_split_chunks/src/plugin/module_group.rs +++ b/crates/rspack_plugin_split_chunks/src/plugin/module_group.rs @@ -1,4 +1,5 @@ use std::{ + cmp::Ordering, collections::{HashMap, hash_map}, hash::{BuildHasherDefault, Hash, Hasher}, }; @@ -8,18 +9,18 @@ use futures::future::join_all; use rayon::prelude::*; use rspack_collections::{IdentifierMap, UkeyIndexMap, UkeySet}; use rspack_core::{ - ChunkByUkey, ChunkGraph, ChunkUkey, Compilation, Module, ModuleGraph, ModuleIdentifier, + ChunkByUkey, ChunkUkey, Compilation, Module, ModuleGraph, ModuleIdentifier, PrefetchExportsInfoMode, UsageKey, }; use rspack_error::{Result, ToStringResultToRspackResultExt}; use rspack_util::{fx_hash::FxDashMap, tracing_preset::TRACING_BENCH_TARGET}; -use rustc_hash::{FxHashMap, FxHasher}; +use rustc_hash::{FxHashMap, FxHashSet, FxHasher}; use tracing::instrument; use super::ModuleGroupMap; use crate::{ SplitChunksPlugin, - common::ModuleSizes, + common::{ModuleChunks, ModuleSizes}, module_group::{CacheGroupIdx, ModuleGroup, compare_entries}, options::{ cache_group::CacheGroup, @@ -154,8 +155,8 @@ impl Combinator { fn get_combs( &self, module: ModuleIdentifier, - chunk_graph: &ChunkGraph, used_exports: bool, + module_chunks: &ModuleChunks, ) -> Vec> { if used_exports { let (chunk_sets_in_graph, chunk_sets_by_count) = self.group_by_used_exports(); @@ -180,7 +181,9 @@ impl Combinator { result } else { let (chunk_sets_in_graph, chunk_sets_by_count) = self.group_by_chunks(); - let chunks = chunk_graph.get_module_chunks(module); + let chunks = module_chunks + .get(&module) + .expect("should have module chunks"); self.get_combination( get_key(chunks.iter().copied()), &self.combinations_cache, @@ -193,12 +196,14 @@ impl Combinator { fn prepare_group_by_chunks( &mut self, all_modules: &[ModuleIdentifier], - chunk_graph: &ChunkGraph, + module_chunks: &ModuleChunks, ) { self.chunk_sets_in_graph = all_modules .par_iter() .filter_map(|module| { - let chunks = chunk_graph.get_module_chunks(*module); + let chunks = module_chunks + .get(module) + .expect("should have module chunks"); if chunks.is_empty() { return None; } @@ -228,15 +233,19 @@ impl Combinator { &mut self, all_modules: &[ModuleIdentifier], module_graph: &ModuleGraph, - chunk_graph: &ChunkGraph, chunk_by_ukey: &ChunkByUkey, + module_chunks: &ModuleChunks, ) { let (module_grouped_chunks, used_exports_chunks): (Vec<_>, Vec<_>) = all_modules .par_iter() .filter_map(|module| { let grouped_chunks = Self::group_chunks_by_exports( module, - chunk_graph.get_module_chunks(*module).iter().cloned(), + module_chunks + .get(module) + .expect("should have module chunks") + .iter() + .cloned(), module_graph, chunk_by_ukey, ); @@ -289,20 +298,20 @@ impl SplitChunksPlugin { &self, module_group_map: &mut ModuleGroupMap, ) -> (String, ModuleGroup) { - // perf(hyf): I wonder if we could use BinaryHeap to avoid sorting for find_best_module_group call debug_assert!(!module_group_map.is_empty()); - let mut iter: std::collections::hash_map::Iter = module_group_map.iter(); - let (key, mut best_module_group) = iter.next().expect("at least have one item"); - - let mut best_entry_key = key; - for (key, each_module_group) in iter { - if compare_entries(best_module_group, each_module_group) < 0f64 { - best_entry_key = key; - best_module_group = each_module_group; - } - } - let best_entry_key = best_entry_key.clone(); + let best_entry_key = module_group_map + .iter() + .min_by(|a, b| { + if compare_entries(a.1, b.1) < 0f64 { + Ordering::Greater + } else { + Ordering::Less + } + }) + .map(|(key, _)| key.clone()) + .expect("at least have one item"); + let best_module_group = module_group_map .remove(&best_entry_key) .expect("This should never happen, please file an issue"); @@ -315,6 +324,7 @@ impl SplitChunksPlugin { all_modules: &[ModuleIdentifier], compilation: &Compilation, module_sizes: &ModuleSizes, + module_chunks: &ModuleChunks, ) -> Result { let module_graph = compilation.get_module_graph(); @@ -322,7 +332,7 @@ impl SplitChunksPlugin { let mut combinator = Combinator::default(); - combinator.prepare_group_by_chunks(all_modules, &compilation.chunk_graph); + combinator.prepare_group_by_chunks(all_modules, module_chunks); if self .cache_groups @@ -332,24 +342,21 @@ impl SplitChunksPlugin { combinator.prepare_group_by_used_exports( all_modules, &module_graph, - &compilation.chunk_graph, &compilation.chunk_by_ukey, + module_chunks, ); } let module_group_results = rspack_futures::scope::<_, Result<_>>(|token| { all_modules.iter().for_each(|module| { - let s = unsafe { token.used((&self, module, &module_graph, compilation, &module_group_map, &combinator, &module_sizes)) }; - s.spawn(|(plugin, module, module_graph, compilation, module_group_map, combinator, module_sizes)| async move { - let module = module_graph.module_by_identifier(module).expect("should have module").as_ref(); - let belong_to_chunks = compilation - .chunk_graph - .get_module_chunks(module.identifier()); - + let s = unsafe { token.used((&self, module, &module_graph, compilation, &module_group_map, &combinator, &module_sizes, module_chunks)) }; + s.spawn(|(plugin, module, module_graph, compilation, module_group_map, combinator, module_sizes, module_chunks)| async move { + let belong_to_chunks = module_chunks.get(module).expect("should have module chunks"); if belong_to_chunks.is_empty() { return Ok(()); } + let module = module_graph.module_by_identifier(module).expect("should have module").as_ref(); let mut temp = Vec::with_capacity(plugin.cache_groups.len()); for idx in 0..plugin.cache_groups.len() { @@ -384,12 +391,30 @@ impl SplitChunksPlugin { .enumerate() .filter(|(index, _)| temp[*index]); + let mut used_exports_combs = None; + let mut non_used_exports_combs = None; + let mut added_keys = FxHashSet::default(); + for (cache_group_index, (idx, cache_group)) in filtered.enumerate() { - let combs = combinator.get_combs( - module.identifier(), - &compilation.chunk_graph, - cache_group.used_exports - ); + let combs = if cache_group.used_exports { + if used_exports_combs.is_none() { + used_exports_combs = Some(combinator.get_combs( + module.identifier(), + true, + module_chunks, + )); + } + used_exports_combs.as_ref().expect("should have used_exports_combs") + } else { + if non_used_exports_combs.is_none() { + non_used_exports_combs = Some(combinator.get_combs( + module.identifier(), + false, + module_chunks, + )); + } + non_used_exports_combs.as_ref().expect("should have non_used_exports_combs") + }; for chunk_combination in combs { if chunk_combination.is_empty() { @@ -408,20 +433,30 @@ impl SplitChunksPlugin { continue; } - let selected_chunks = join_all(chunk_combination.iter().map(|c| - async move { + + let selected_chunks = if cache_group.chunk_filter.is_func() { + join_all(chunk_combination.iter().map(|c| async move { // Filter by `splitChunks.cacheGroups.{cacheGroup}.chunks` - (cache_group.chunk_filter)(c, compilation).await.map(|filtered| (c, filtered)) - } - )).await.into_iter().collect::>>()?.into_iter().filter_map( - |(chunk, filtered)| { - if filtered { - Some(chunk) - } else { - None - } - } - ).copied().collect::>(); + cache_group.chunk_filter.test_func(c, compilation).await.map(|filtered| (c, filtered)) + })) + .await + .into_iter() + .collect::>>()? + .into_iter() + .filter_map( + |(chunk, filtered)| { + if filtered { + Some(chunk) + } else { + None + } + } + ).copied().collect::>() + } else { + chunk_combination.iter().filter(|c| { + cache_group.chunk_filter.test_internal(c, compilation) + }).copied().collect::>() + }; // Filter by `splitChunks.cacheGroups.{cacheGroup}.minChunks` if selected_chunks.len() < cache_group.min_chunks as usize { @@ -451,6 +486,7 @@ impl SplitChunksPlugin { &mut chunk_key_to_string, compilation, module_sizes, + &mut added_keys, ).await?; } } @@ -480,18 +516,29 @@ impl SplitChunksPlugin { ) { // remove all modules from other entries and update size let keys_of_invalid_group = module_group_map - .iter_mut() - .par_bridge() + .par_iter_mut() .filter_map(|(key, other_module_group)| { other_module_group .chunks .intersection(used_chunks) .next()?; - current_module_group.modules.iter().for_each(|module| { - tracing::trace!("remove module({module}) from {key}"); - other_module_group.remove_module(*module, module_sizes); - }); + let module_count = other_module_group.modules.len(); + + let duplicated_modules = if other_module_group.modules.len() > current_module_group.modules.len() { + current_module_group.modules.intersection(&other_module_group.modules).copied().collect::>() + } else { + other_module_group.modules.intersection(¤t_module_group.modules).copied().collect::>() + }; + + for module in duplicated_modules { + other_module_group.remove_module(module, module_sizes); + } + + if module_count == other_module_group.modules.len() { + // nothing is removed + return None; + } if other_module_group.modules.is_empty() { tracing::trace!( @@ -543,66 +590,6 @@ impl SplitChunksPlugin { module_group_map.remove(&key); }); } - - // #[allow(clippy::type_complexity)] - // fn prepare_combination_maps( - // module_graph: &ModuleGraph, - // chunk_graph: &ChunkGraph, - // used_exports: bool, - // chunk_by_ukey: &ChunkByUkey, - // ) -> ( - // HashMap, ChunksKeyHashBuilder>, - // FxHashMap>>, - // Option>>>, - // ) { - // let mut chunk_sets_in_graph = - // HashMap::, ChunksKeyHashBuilder>::default(); - // let mut chunk_sets_by_count = FxHashMap::>>::default(); - - // let mut grouped_by_exports_map: Option>>> = - // None; - - // if used_exports { - // let mut grouped_by_exports: FxHashMap>> = - // Default::default(); - // for module in module_graph.modules().keys() { - // let grouped_chunks = Self::group_chunks_by_exports( - // module, - // chunk_graph.get_module_chunks(*module).iter().cloned(), - // module_graph, - // chunk_by_ukey, - // ); - // for chunks in &grouped_chunks { - // let chunk_key = get_key(chunks.iter()); - // chunk_sets_in_graph.insert(chunk_key, chunks.clone()); - // } - - // grouped_by_exports.insert(*module, grouped_chunks); - // } - - // grouped_by_exports_map = Some(grouped_by_exports); - // } else { - // for module in module_graph.modules().keys() { - // let chunks = chunk_graph.get_module_chunks(*module); - // let chunk_key = get_key(chunks.iter()); - // chunk_sets_in_graph.insert(chunk_key, chunks.clone()); - // } - // } - - // for chunks in chunk_sets_in_graph.values() { - // let count = chunks.len(); - // chunk_sets_by_count - // .entry(count) - // .and_modify(|set| set.push(chunks.clone())) - // .or_insert(vec![chunks.clone()]); - // } - - // ( - // chunk_sets_in_graph, - // chunk_sets_by_count, - // grouped_by_exports_map, - // ) - // } } async fn merge_matched_item_into_module_group_map( @@ -611,6 +598,7 @@ async fn merge_matched_item_into_module_group_map( chunk_key_to_string: &mut HashMap, compilation: &Compilation, module_sizes: &ModuleSizes, + added_keys: &mut FxHashSet, ) -> Result<()> { let MatchedItem { idx, @@ -658,11 +646,13 @@ async fn merge_matched_item_into_module_group_map( let mut module_group = { module_group_map - .entry(key) - .or_insert_with(|| ModuleGroup::new(idx, chunk_name, cache_group_index, cache_group)) + .entry(key.clone()) + .or_insert_with(|| ModuleGroup::new(idx, chunk_name.clone(), cache_group_index, cache_group)) }; - - module_group.add_module(module.identifier(), module_sizes); + if chunk_name.is_none() || added_keys.insert(key) { + module_group.add_module(module.identifier(), module_sizes); + } module_group.chunks.extend(selected_chunks.iter().copied()); + Ok(()) }