|
| 1 | +use std::{cmp::min, sync::LazyLock}; |
| 2 | + |
1 | 3 | use anyhow::{Context, Result, bail}; |
| 4 | +use qstring::QString; |
| 5 | +use regex::Regex; |
2 | 6 | use serde::{Deserialize, Serialize}; |
3 | 7 | use tracing::Instrument; |
4 | 8 | use turbo_rcstr::{RcStr, rcstr}; |
@@ -165,6 +169,16 @@ impl BrowserChunkingContextBuilder { |
165 | 169 | self |
166 | 170 | } |
167 | 171 |
|
| 172 | + pub fn filename(mut self, filename: RcStr) -> Self { |
| 173 | + self.chunking_context.filename = Some(filename); |
| 174 | + self |
| 175 | + } |
| 176 | + |
| 177 | + pub fn chunk_filename(mut self, chunk_filename: RcStr) -> Self { |
| 178 | + self.chunking_context.chunk_filename = Some(chunk_filename); |
| 179 | + self |
| 180 | + } |
| 181 | + |
168 | 182 | pub fn build(self) -> Vc<BrowserChunkingContext> { |
169 | 183 | BrowserChunkingContext::cell(self.chunking_context) |
170 | 184 | } |
@@ -227,6 +241,10 @@ pub struct BrowserChunkingContext { |
227 | 241 | module_id_strategy: ResolvedVc<Box<dyn ModuleIdStrategy>>, |
228 | 242 | /// The chunking configs |
229 | 243 | chunking_configs: Vec<(ResolvedVc<Box<dyn ChunkType>>, ChunkingConfig)>, |
| 244 | + /// Evaluate chunk filename template |
| 245 | + filename: Option<RcStr>, |
| 246 | + /// Non evaluate chunk filename template |
| 247 | + chunk_filename: Option<RcStr>, |
230 | 248 | } |
231 | 249 |
|
232 | 250 | impl BrowserChunkingContext { |
@@ -265,6 +283,8 @@ impl BrowserChunkingContext { |
265 | 283 | manifest_chunks: false, |
266 | 284 | module_id_strategy: ResolvedVc::upcast(DevModuleIdStrategy::new_resolved()), |
267 | 285 | chunking_configs: Default::default(), |
| 286 | + filename: Default::default(), |
| 287 | + chunk_filename: Default::default(), |
268 | 288 | }, |
269 | 289 | } |
270 | 290 | } |
@@ -410,32 +430,76 @@ impl ChunkingContext for BrowserChunkingContext { |
410 | 430 | extension.starts_with("."), |
411 | 431 | "`extension` should include the leading '.', got '{extension}'" |
412 | 432 | ); |
413 | | - let root_path = self.chunk_root_path.clone(); |
414 | | - let name = match self.content_hashing { |
415 | | - None => { |
416 | | - ident |
417 | | - .output_name(self.root_path.clone(), extension) |
418 | | - .owned() |
419 | | - .await? |
420 | | - } |
421 | | - Some(ContentHashing::Direct { length }) => { |
422 | | - let Some(asset) = asset else { |
423 | | - bail!("chunk_path requires an asset when content hashing is enabled"); |
| 433 | + |
| 434 | + let output_name = ident |
| 435 | + .output_name(self.root_path.clone(), extension.clone()) |
| 436 | + .owned() |
| 437 | + .await?; |
| 438 | + |
| 439 | + let mut filename = match asset { |
| 440 | + Some(asset) => { |
| 441 | + let ident = ident.await?; |
| 442 | + |
| 443 | + let mut evaluate = false; |
| 444 | + let mut dev_chunk_list = false; |
| 445 | + ident.modifiers.iter().for_each(|m| { |
| 446 | + if m.contains("evaluate") { |
| 447 | + evaluate = true; |
| 448 | + } |
| 449 | + if m.contains("dev chunk list") { |
| 450 | + dev_chunk_list = true; |
| 451 | + } |
| 452 | + }); |
| 453 | + let query = QString::from(ident.query.as_str()); |
| 454 | + let name = if dev_chunk_list { |
| 455 | + output_name.as_str() |
| 456 | + } else { |
| 457 | + query.get("name").unwrap_or(output_name.as_str()) |
424 | 458 | }; |
425 | | - let content = asset.content().await?; |
426 | | - if let AssetContent::File(file) = &*content { |
427 | | - let hash = hash_xxh3_hash64(&file.await?); |
428 | | - let length = length as usize; |
429 | | - format!("{hash:0length$x}{extension}").into() |
| 459 | + |
| 460 | + let filename_template = if evaluate { |
| 461 | + &self.filename |
430 | 462 | } else { |
431 | | - bail!( |
432 | | - "chunk_path requires an asset with file content when content hashing is \ |
433 | | - enabled" |
434 | | - ); |
| 463 | + &self.chunk_filename |
| 464 | + }; |
| 465 | + |
| 466 | + match filename_template { |
| 467 | + Some(filename) => { |
| 468 | + let mut filename = filename.to_string(); |
| 469 | + |
| 470 | + if match_name_placeholder(&filename) { |
| 471 | + filename = replace_name_placeholder(&filename, name); |
| 472 | + } |
| 473 | + |
| 474 | + if match_content_hash_placeholder(&filename) { |
| 475 | + let content = asset.content().await?; |
| 476 | + if let AssetContent::File(file) = &*content { |
| 477 | + let content_hash = hash_xxh3_hash64(&file.await?); |
| 478 | + filename = replace_content_hash_placeholder( |
| 479 | + &filename, |
| 480 | + &format!("{content_hash:016x}"), |
| 481 | + ); |
| 482 | + } else { |
| 483 | + bail!( |
| 484 | + "chunk_path requires an asset with file content when content \ |
| 485 | + hashing is enabled" |
| 486 | + ); |
| 487 | + } |
| 488 | + }; |
| 489 | + |
| 490 | + filename |
| 491 | + } |
| 492 | + None => name.to_string(), |
435 | 493 | } |
436 | 494 | } |
| 495 | + None => output_name.to_string(), |
437 | 496 | }; |
438 | | - Ok(root_path.join(&name)?.cell()) |
| 497 | + |
| 498 | + if !filename.ends_with(extension.as_str()) { |
| 499 | + filename.push_str(&extension); |
| 500 | + } |
| 501 | + |
| 502 | + self.chunk_root_path.join(&filename).map(|p| p.cell()) |
439 | 503 | } |
440 | 504 |
|
441 | 505 | #[turbo_tasks::function] |
@@ -715,3 +779,40 @@ impl ChunkingContext for BrowserChunkingContext { |
715 | 779 | }) |
716 | 780 | } |
717 | 781 | } |
| 782 | + |
| 783 | +pub fn clean_separators(s: &str) -> String { |
| 784 | + static SEPARATOR_REGEX: LazyLock<Regex> = LazyLock::new(|| Regex::new(r".*[/#?]").unwrap()); |
| 785 | + SEPARATOR_REGEX.replace_all(s, "").to_string() |
| 786 | +} |
| 787 | + |
| 788 | +static NAME_PLACEHOLDER_REGEX: LazyLock<Regex> = LazyLock::new(|| Regex::new(r"\[name\]").unwrap()); |
| 789 | + |
| 790 | +pub fn match_name_placeholder(s: &str) -> bool { |
| 791 | + NAME_PLACEHOLDER_REGEX.is_match(s) |
| 792 | +} |
| 793 | + |
| 794 | +pub fn replace_name_placeholder(s: &str, name: &str) -> String { |
| 795 | + NAME_PLACEHOLDER_REGEX.replace_all(s, name).to_string() |
| 796 | +} |
| 797 | + |
| 798 | +static CONTENT_HASH_PLACEHOLDER_REGEX: LazyLock<Regex> = |
| 799 | + LazyLock::new(|| Regex::new(r"\[contenthash(?::(?P<len>\d+))?\]").unwrap()); |
| 800 | + |
| 801 | +pub fn match_content_hash_placeholder(s: &str) -> bool { |
| 802 | + CONTENT_HASH_PLACEHOLDER_REGEX.is_match(s) |
| 803 | +} |
| 804 | + |
| 805 | +pub fn replace_content_hash_placeholder(s: &str, hash: &str) -> String { |
| 806 | + CONTENT_HASH_PLACEHOLDER_REGEX |
| 807 | + .replace_all(s, |caps: ®ex::Captures| { |
| 808 | + let len = caps.name("len").map(|m| m.as_str()).unwrap_or(""); |
| 809 | + let len = if len.is_empty() { |
| 810 | + hash.len() |
| 811 | + } else { |
| 812 | + len.parse().unwrap_or(hash.len()) |
| 813 | + }; |
| 814 | + let len = min(len, hash.len()); |
| 815 | + hash[..len].to_string() |
| 816 | + }) |
| 817 | + .to_string() |
| 818 | +} |
0 commit comments