|
| 1 | +use std::{cmp::min, sync::LazyLock}; |
| 2 | + |
1 | 3 | use anyhow::{Context, Result, bail}; |
| 4 | +use qstring::QString; |
| 5 | +use regex::Regex; |
2 | 6 | use serde::{Deserialize, Serialize}; |
3 | 7 | use tracing::Instrument; |
4 | 8 | use turbo_rcstr::{RcStr, rcstr}; |
@@ -174,6 +178,16 @@ impl BrowserChunkingContextBuilder { |
174 | 178 | self |
175 | 179 | } |
176 | 180 |
|
| 181 | + pub fn filename(mut self, filename: RcStr) -> Self { |
| 182 | + self.chunking_context.filename = Some(filename); |
| 183 | + self |
| 184 | + } |
| 185 | + |
| 186 | + pub fn chunk_filename(mut self, chunk_filename: RcStr) -> Self { |
| 187 | + self.chunking_context.chunk_filename = Some(chunk_filename); |
| 188 | + self |
| 189 | + } |
| 190 | + |
177 | 191 | pub fn build(self) -> Vc<BrowserChunkingContext> { |
178 | 192 | BrowserChunkingContext::cell(self.chunking_context) |
179 | 193 | } |
@@ -238,6 +252,10 @@ pub struct BrowserChunkingContext { |
238 | 252 | export_usage: Option<ResolvedVc<ExportUsageInfo>>, |
239 | 253 | /// The chunking configs |
240 | 254 | chunking_configs: Vec<(ResolvedVc<Box<dyn ChunkType>>, ChunkingConfig)>, |
| 255 | + /// Evaluate chunk filename template |
| 256 | + filename: Option<RcStr>, |
| 257 | + /// Non evaluate chunk filename template |
| 258 | + chunk_filename: Option<RcStr>, |
241 | 259 | } |
242 | 260 |
|
243 | 261 | impl BrowserChunkingContext { |
@@ -277,6 +295,8 @@ impl BrowserChunkingContext { |
277 | 295 | module_id_strategy: ResolvedVc::upcast(DevModuleIdStrategy::new_resolved()), |
278 | 296 | export_usage: None, |
279 | 297 | chunking_configs: Default::default(), |
| 298 | + filename: Default::default(), |
| 299 | + chunk_filename: Default::default(), |
280 | 300 | }, |
281 | 301 | } |
282 | 302 | } |
@@ -422,32 +442,76 @@ impl ChunkingContext for BrowserChunkingContext { |
422 | 442 | extension.starts_with("."), |
423 | 443 | "`extension` should include the leading '.', got '{extension}'" |
424 | 444 | ); |
425 | | - let root_path = self.chunk_root_path.clone(); |
426 | | - let name = match self.content_hashing { |
427 | | - None => { |
428 | | - ident |
429 | | - .output_name(self.root_path.clone(), extension) |
430 | | - .owned() |
431 | | - .await? |
432 | | - } |
433 | | - Some(ContentHashing::Direct { length }) => { |
434 | | - let Some(asset) = asset else { |
435 | | - bail!("chunk_path requires an asset when content hashing is enabled"); |
| 445 | + |
| 446 | + let output_name = ident |
| 447 | + .output_name(self.root_path.clone(), extension.clone()) |
| 448 | + .owned() |
| 449 | + .await?; |
| 450 | + |
| 451 | + let mut filename = match asset { |
| 452 | + Some(asset) => { |
| 453 | + let ident = ident.await?; |
| 454 | + |
| 455 | + let mut evaluate = false; |
| 456 | + let mut dev_chunk_list = false; |
| 457 | + ident.modifiers.iter().for_each(|m| { |
| 458 | + if m.contains("evaluate") { |
| 459 | + evaluate = true; |
| 460 | + } |
| 461 | + if m.contains("dev chunk list") { |
| 462 | + dev_chunk_list = true; |
| 463 | + } |
| 464 | + }); |
| 465 | + let query = QString::from(ident.query.as_str()); |
| 466 | + let name = if dev_chunk_list { |
| 467 | + output_name.as_str() |
| 468 | + } else { |
| 469 | + query.get("name").unwrap_or(output_name.as_str()) |
436 | 470 | }; |
437 | | - let content = asset.content().await?; |
438 | | - if let AssetContent::File(file) = &*content { |
439 | | - let hash = hash_xxh3_hash64(&file.await?); |
440 | | - let length = length as usize; |
441 | | - format!("{hash:0length$x}{extension}").into() |
| 471 | + |
| 472 | + let filename_template = if evaluate { |
| 473 | + &self.filename |
442 | 474 | } else { |
443 | | - bail!( |
444 | | - "chunk_path requires an asset with file content when content hashing is \ |
445 | | - enabled" |
446 | | - ); |
| 475 | + &self.chunk_filename |
| 476 | + }; |
| 477 | + |
| 478 | + match filename_template { |
| 479 | + Some(filename) => { |
| 480 | + let mut filename = filename.to_string(); |
| 481 | + |
| 482 | + if match_name_placeholder(&filename) { |
| 483 | + filename = replace_name_placeholder(&filename, name); |
| 484 | + } |
| 485 | + |
| 486 | + if match_content_hash_placeholder(&filename) { |
| 487 | + let content = asset.content().await?; |
| 488 | + if let AssetContent::File(file) = &*content { |
| 489 | + let content_hash = hash_xxh3_hash64(&file.await?); |
| 490 | + filename = replace_content_hash_placeholder( |
| 491 | + &filename, |
| 492 | + &format!("{content_hash:016x}"), |
| 493 | + ); |
| 494 | + } else { |
| 495 | + bail!( |
| 496 | + "chunk_path requires an asset with file content when content \ |
| 497 | + hashing is enabled" |
| 498 | + ); |
| 499 | + } |
| 500 | + }; |
| 501 | + |
| 502 | + filename |
| 503 | + } |
| 504 | + None => name.to_string(), |
447 | 505 | } |
448 | 506 | } |
| 507 | + None => output_name.to_string(), |
449 | 508 | }; |
450 | | - Ok(root_path.join(&name)?.cell()) |
| 509 | + |
| 510 | + if !filename.ends_with(extension.as_str()) { |
| 511 | + filename.push_str(&extension); |
| 512 | + } |
| 513 | + |
| 514 | + self.chunk_root_path.join(&filename).map(|p| p.cell()) |
451 | 515 | } |
452 | 516 |
|
453 | 517 | #[turbo_tasks::function] |
@@ -739,3 +803,40 @@ impl ChunkingContext for BrowserChunkingContext { |
739 | 803 | } |
740 | 804 | } |
741 | 805 | } |
| 806 | + |
| 807 | +pub fn clean_separators(s: &str) -> String { |
| 808 | + static SEPARATOR_REGEX: LazyLock<Regex> = LazyLock::new(|| Regex::new(r".*[/#?]").unwrap()); |
| 809 | + SEPARATOR_REGEX.replace_all(s, "").to_string() |
| 810 | +} |
| 811 | + |
| 812 | +static NAME_PLACEHOLDER_REGEX: LazyLock<Regex> = LazyLock::new(|| Regex::new(r"\[name\]").unwrap()); |
| 813 | + |
| 814 | +pub fn match_name_placeholder(s: &str) -> bool { |
| 815 | + NAME_PLACEHOLDER_REGEX.is_match(s) |
| 816 | +} |
| 817 | + |
| 818 | +pub fn replace_name_placeholder(s: &str, name: &str) -> String { |
| 819 | + NAME_PLACEHOLDER_REGEX.replace_all(s, name).to_string() |
| 820 | +} |
| 821 | + |
| 822 | +static CONTENT_HASH_PLACEHOLDER_REGEX: LazyLock<Regex> = |
| 823 | + LazyLock::new(|| Regex::new(r"\[contenthash(?::(?P<len>\d+))?\]").unwrap()); |
| 824 | + |
| 825 | +pub fn match_content_hash_placeholder(s: &str) -> bool { |
| 826 | + CONTENT_HASH_PLACEHOLDER_REGEX.is_match(s) |
| 827 | +} |
| 828 | + |
| 829 | +pub fn replace_content_hash_placeholder(s: &str, hash: &str) -> String { |
| 830 | + CONTENT_HASH_PLACEHOLDER_REGEX |
| 831 | + .replace_all(s, |caps: ®ex::Captures| { |
| 832 | + let len = caps.name("len").map(|m| m.as_str()).unwrap_or(""); |
| 833 | + let len = if len.is_empty() { |
| 834 | + hash.len() |
| 835 | + } else { |
| 836 | + len.parse().unwrap_or(hash.len()) |
| 837 | + }; |
| 838 | + let len = min(len, hash.len()); |
| 839 | + hash[..len].to_string() |
| 840 | + }) |
| 841 | + .to_string() |
| 842 | +} |
0 commit comments