Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
Original file line number Diff line number Diff line change
Expand Up @@ -2,23 +2,26 @@ use std::sync::Arc;

use napi::{JsString, bindgen_prelude::Either3};
use rspack_napi::{string::JsStringExt, threadsafe_function::ThreadsafeFunction};
use rspack_plugin_split_chunks::{
ChunkFilter, create_chunk_filter_from_str, create_regex_chunk_filter_from_str,
};
use rspack_regex::RspackRegex;

use crate::chunk::ChunkWrapper;

pub type Chunks<'a> = Either3<RspackRegex, JsString<'a>, ThreadsafeFunction<ChunkWrapper, bool>>;

pub fn create_chunks_filter(raw: Chunks) -> rspack_plugin_split_chunks::ChunkFilter {
pub fn create_chunks_filter(raw: Chunks) -> ChunkFilter {
match raw {
Either3::A(regex) => rspack_plugin_split_chunks::create_regex_chunk_filter_from_str(regex),
Either3::A(regex) => create_regex_chunk_filter_from_str(regex),
Either3::B(js_str) => {
let js_str = js_str.into_string();
rspack_plugin_split_chunks::create_chunk_filter_from_str(&js_str)
create_chunk_filter_from_str(&js_str)
}
Either3::C(f) => Arc::new(move |chunk_ukey, compilation| {
Either3::C(f) => ChunkFilter::Func(Arc::new(move |chunk_ukey, compilation| {
let f = f.clone();
let chunk_wrapper = ChunkWrapper::new(*chunk_ukey, compilation);
Box::pin(async move { f.call_with_sync(chunk_wrapper).await })
}),
})),
}
}
33 changes: 33 additions & 0 deletions crates/rspack_core/src/chunk_graph/chunk_graph_chunk.rs
Original file line number Diff line number Diff line change
Expand Up @@ -353,6 +353,39 @@ impl ChunkGraph {
}
}

pub fn disconnect_chunks_and_modules(
&mut self,
chunks: &[ChunkUkey],
modules: &[ModuleIdentifier],
) {
for chunk in chunks.iter() {
let cgc = self.expect_chunk_graph_chunk_mut(*chunk);
for module in modules.iter() {
cgc.modules.remove(module);
if let Some(source_types_by_module) = &mut cgc.source_types_by_module {
source_types_by_module.remove(module);
}
}
}
for module in modules.iter() {
let cgm = self.expect_chunk_graph_module_mut(*module);
for chunk in chunks.iter() {
cgm.chunks.remove(chunk);
}
}
}

pub fn connect_chunk_and_modules(&mut self, chunk: ChunkUkey, modules: &[ModuleIdentifier]) {
for module in modules.iter() {
let cgm = self.expect_chunk_graph_module_mut(*module);
cgm.chunks.insert(chunk);
}
let cgc = self.expect_chunk_graph_chunk_mut(chunk);
for module in modules.iter() {
cgc.modules.insert(*module);
}
}

pub fn get_chunk_modules<'module>(
&self,
chunk: &ChunkUkey,
Expand Down
68 changes: 50 additions & 18 deletions crates/rspack_plugin_split_chunks/src/common.rs
Original file line number Diff line number Diff line change
Expand Up @@ -5,14 +5,57 @@ use std::{

use derive_more::Debug;
use futures::future::BoxFuture;
use rspack_collections::IdentifierMap;
use rspack_collections::{IdentifierMap, UkeySet};
use rspack_core::{ChunkUkey, Compilation, Module, SourceType};
use rspack_error::Result;
use rspack_regex::RspackRegex;
use rustc_hash::{FxHashMap, FxHashSet};

pub type ChunkFilter =
pub type ChunkFilterFunc =
Arc<dyn Fn(&ChunkUkey, &Compilation) -> BoxFuture<'static, Result<bool>> + Sync + Send>;

#[derive(Clone)]
pub enum ChunkFilter {
Func(ChunkFilterFunc),
All,
Regex(RspackRegex),
Async,
Initial,
}

impl ChunkFilter {
pub fn is_func(&self) -> bool {
matches!(self, ChunkFilter::Func(_))
}

pub async fn test_func(&self, chunk_ukey: &ChunkUkey, compilation: &Compilation) -> Result<bool> {
if let ChunkFilter::Func(func) = self {
func(chunk_ukey, compilation).await
} else {
panic!("ChunkFilter is not a function");
}
}

pub fn test_internal(&self, chunk_ukey: &ChunkUkey, compilation: &Compilation) -> bool {
match self {
ChunkFilter::Func(_) => panic!("ChunkFilter is a function"),
ChunkFilter::All => true,
ChunkFilter::Regex(re) => {
let chunk = compilation.chunk_by_ukey.expect_get(chunk_ukey);
chunk.name().is_some_and(|name| re.test(name))
}
ChunkFilter::Async => {
let chunk = compilation.chunk_by_ukey.expect_get(chunk_ukey);
!chunk.can_be_initial(&compilation.chunk_group_by_ukey)
}
ChunkFilter::Initial => {
let chunk = compilation.chunk_by_ukey.expect_get(chunk_ukey);
chunk.can_be_initial(&compilation.chunk_group_by_ukey)
}
}
}
}

pub type ModuleTypeFilter = Arc<dyn Fn(&dyn Module) -> bool + Send + Sync>;
pub type ModuleLayerFilter =
Arc<dyn Fn(Option<String>) -> BoxFuture<'static, Result<bool>> + Send + Sync>;
Expand All @@ -26,23 +69,15 @@ pub fn create_default_module_layer_filter() -> ModuleLayerFilter {
}

pub fn create_async_chunk_filter() -> ChunkFilter {
Arc::new(|chunk_ukey, compilation| {
let chunk = compilation.chunk_by_ukey.expect_get(chunk_ukey);
let can_be_initial = chunk.can_be_initial(&compilation.chunk_group_by_ukey);
Box::pin(async move { Ok(!can_be_initial) })
})
ChunkFilter::Async
}

pub fn create_initial_chunk_filter() -> ChunkFilter {
Arc::new(|chunk_ukey, compilation| {
let chunk = compilation.chunk_by_ukey.expect_get(chunk_ukey);
let can_be_initial = chunk.can_be_initial(&compilation.chunk_group_by_ukey);
Box::pin(async move { Ok(can_be_initial) })
})
ChunkFilter::Initial
}

pub fn create_all_chunk_filter() -> ChunkFilter {
Arc::new(|_chunk, _compilation| Box::pin(async move { Ok(true) }))
ChunkFilter::All
}

pub fn create_chunk_filter_from_str(chunks: &str) -> ChunkFilter {
Expand All @@ -55,11 +90,7 @@ pub fn create_chunk_filter_from_str(chunks: &str) -> ChunkFilter {
}

pub fn create_regex_chunk_filter_from_str(re: RspackRegex) -> ChunkFilter {
Arc::new(move |chunk_ukey, compilation| {
let chunk = compilation.chunk_by_ukey.expect_get(chunk_ukey);
let res = chunk.name().is_some_and(|name| re.test(name));
Box::pin(async move { Ok(res) })
})
ChunkFilter::Regex(re)
}

#[derive(Debug, Default, Clone)]
Expand Down Expand Up @@ -171,3 +202,4 @@ pub struct FallbackCacheGroup {
}

pub(crate) type ModuleSizes = IdentifierMap<FxHashMap<SourceType, f64>>;
pub(crate) type ModuleChunks = IdentifierMap<UkeySet<ChunkUkey>>;
59 changes: 36 additions & 23 deletions crates/rspack_plugin_split_chunks/src/plugin/chunk.rs
Original file line number Diff line number Diff line change
@@ -1,7 +1,8 @@
use rspack_collections::{DatabaseItem, UkeySet};
use rspack_core::{Chunk, ChunkUkey, Compilation, incremental::Mutation};
use rayon::prelude::*;
use rspack_collections::{DatabaseItem, IdentifierMap, UkeySet};
use rspack_core::{Chunk, ChunkUkey, Compilation, ModuleIdentifier, incremental::Mutation};

use crate::{SplitChunksPlugin, module_group::ModuleGroup};
use crate::{SplitChunksPlugin, common::ModuleChunks, module_group::ModuleGroup};

fn put_split_chunk_reason(
chunk_reason: &mut Option<String>,
Expand All @@ -21,6 +22,16 @@ fn put_split_chunk_reason(
}

impl SplitChunksPlugin {
pub(crate) fn get_module_chunks(
all_modules: &[ModuleIdentifier],
compilation: &Compilation,
) -> ModuleChunks {
let chunk_graph = &compilation.chunk_graph;
all_modules
.par_iter()
.map(|module| (*module, chunk_graph.get_module_chunks(*module).clone()))
.collect::<IdentifierMap<_>>()
}
/// Affected by `splitChunks.cacheGroups.{cacheGroup}.reuseExistingChunk`
///
/// If there is a code splitting chunk that the contains the same modules as the current `ModuleGroup`,
Expand Down Expand Up @@ -164,29 +175,31 @@ impl SplitChunksPlugin {
original_chunks: &UkeySet<ChunkUkey>,
compilation: &mut Compilation,
) {
for module_identifier in &item.modules {
if let Some(module) = compilation.module_by_identifier(module_identifier)
&& module
.chunk_condition(&new_chunk, compilation)
.is_some_and(|condition| !condition)
{
continue;
}
let modules = item
.modules
.iter()
.filter(|mid| {
if let Some(module) = compilation.module_by_identifier(mid)
&& module
.chunk_condition(&new_chunk, compilation)
.is_some_and(|condition| !condition)
{
return false;
}
true
})
.copied()
.collect::<Vec<_>>();

// First, we remove modules from old chunks
let chunks = original_chunks.iter().copied().collect::<Vec<_>>();

// Remove module from old chunks
for used_chunk in original_chunks {
compilation
.chunk_graph
.disconnect_chunk_and_module(used_chunk, *module_identifier);
}
compilation
.chunk_graph
.disconnect_chunks_and_modules(&chunks, &modules);

// Add module to new chunk
compilation
.chunk_graph
.connect_chunk_and_module(new_chunk, *module_identifier);
}
compilation
.chunk_graph
.connect_chunk_and_modules(new_chunk, &modules);
}

/// Since the modules are moved into the `new_chunk`, we should
Expand Down
6 changes: 5 additions & 1 deletion crates/rspack_plugin_split_chunks/src/plugin/max_size.rs
Original file line number Diff line number Diff line change
Expand Up @@ -515,7 +515,11 @@ impl SplitChunksPlugin {
);

if max_size_setting.is_none()
&& !(fallback_cache_group.chunks_filter)(&chunk.ukey(), compilation).await?
&& !(if fallback_cache_group.chunks_filter.is_func() {
fallback_cache_group.chunks_filter.test_func(&chunk.ukey(), compilation).await?
} else {
fallback_cache_group.chunks_filter.test_internal(&chunk.ukey(), compilation)
})
{
tracing::debug!("Chunk({:?}) skips `maxSize` checking. Reason: max_size_setting.is_none() and chunks_filter is false", chunk.chunk_reason());
return Ok(None);
Expand Down
3 changes: 2 additions & 1 deletion crates/rspack_plugin_split_chunks/src/plugin/mod.rs
Original file line number Diff line number Diff line change
Expand Up @@ -54,9 +54,10 @@ impl SplitChunksPlugin {
.collect::<Vec<_>>();

let module_sizes = Self::get_module_sizes(&all_modules, compilation);
let module_chunks = Self::get_module_chunks(&all_modules, compilation);

let mut module_group_map = self
.prepare_module_group_map(&all_modules, compilation, &module_sizes)
.prepare_module_group_map(&all_modules, compilation, &module_sizes, &module_chunks)
.await?;
tracing::trace!("prepared module_group_map {:#?}", module_group_map);
logger.time_end(start);
Expand Down
Loading
Loading