diff --git a/packages/nx-extensions/src/executors/package-json/executor.ts b/packages/nx-extensions/src/executors/package-json/executor.ts index 4ddd632577..b18a31fa9d 100644 --- a/packages/nx-extensions/src/executors/package-json/executor.ts +++ b/packages/nx-extensions/src/executors/package-json/executor.ts @@ -44,6 +44,17 @@ export default async function* packageJsonExecutor( } const monorepoDependencies = getMonorepoDependencies(context); + + // Read optional dependencies from the original package.json + let originalOptionalDependencies: Record | undefined; + const originalPackageJsonPath = `${context.root}/package.json`; + if (fs.existsSync(originalPackageJsonPath)) { + const originalPackageJson = JSON.parse( + fs.readFileSync(originalPackageJsonPath).toString() + ); + originalOptionalDependencies = originalPackageJson.optionalDependencies; + } + for await (const event of startBuild(options, context)) { if (!event.success) { throw 'There was an error with the build. See above.'; @@ -54,7 +65,8 @@ export default async function* packageJsonExecutor( options, context, helperDependencies, - monorepoDependencies + monorepoDependencies, + originalOptionalDependencies ); if (built === false) { return { @@ -87,7 +99,8 @@ async function buildPackageJson( options: PackageJsonExecutorSchema, context: ExecutorContext, helperDependencies: ProjectGraphDependency[], - monorepoDependencies: MonorepoDependency[] + monorepoDependencies: MonorepoDependency[], + originalOptionalDependencies?: Record ) { const packageJson = createPackageJson( context.projectName, @@ -118,6 +131,21 @@ async function buildPackageJson( packageJson.dependencies[dep.name] = dep.version; } + // Preserve optionalDependencies from the original package.json + if (originalOptionalDependencies) { + packageJson.optionalDependencies = originalOptionalDependencies; + + // Remove optional dependencies from regular dependencies to avoid duplication + for (const optionalDep of Object.keys(originalOptionalDependencies)) { + if ( + packageJson.dependencies && + packageJson.dependencies[optionalDep] + ) { + delete packageJson.dependencies[optionalDep]; + } + } + } + // make main relative to context root if (main.startsWith(context.root)) { main = main.substring(context.root.length).replace(/^\//, ''); diff --git a/packages/playground/cli/src/blueprints-v1/download.ts b/packages/playground/cli/src/blueprints-v1/download.ts index 9ae32b9297..5a601b3c0e 100644 --- a/packages/playground/cli/src/blueprints-v1/download.ts +++ b/packages/playground/cli/src/blueprints-v1/download.ts @@ -5,6 +5,12 @@ import path, { basename } from 'path'; export const CACHE_FOLDER = path.join(os.homedir(), '.wordpress-playground'); +interface CacheMetadata { + etag?: string; + lastModified?: string; + downloadedAt: number; +} + export async function fetchSqliteIntegration( monitor: EmscriptenDownloadMonitor ) { @@ -16,30 +22,95 @@ export async function fetchSqliteIntegration( return sqliteZip; } -// @TODO: Support HTTP cache, invalidate the local file if the remote file has -// changed export async function cachedDownload( remoteUrl: string, cacheKey: string, monitor: EmscriptenDownloadMonitor ) { const artifactPath = path.join(CACHE_FOLDER, cacheKey); - if (!fs.existsSync(artifactPath)) { - fs.ensureDirSync(CACHE_FOLDER); - await downloadTo(remoteUrl, artifactPath, monitor); + const metadataPath = path.join(CACHE_FOLDER, `${cacheKey}.metadata.json`); + + // Check if file exists and if it needs to be re-downloaded + if (fs.existsSync(artifactPath)) { + try { + // Load existing metadata for conditional request headers + let metadata: CacheMetadata | null = null; + if (fs.existsSync(metadataPath)) { + try { + metadata = JSON.parse( + fs.readFileSync(metadataPath, 'utf-8') + ); + } catch { + // If metadata is corrupted, re-download + metadata = null; + } + } + + // Use fetch with conditional headers - it will automatically handle + // If-None-Match and If-Modified-Since based on provided headers + const headers: HeadersInit = {}; + if (metadata?.etag) { + headers['If-None-Match'] = metadata.etag; + } + if (metadata?.lastModified) { + headers['If-Modified-Since'] = metadata.lastModified; + } + + // Make a conditional request + const response = await fetch(remoteUrl, { + method: 'HEAD', + headers, + cache: 'no-cache', + }); + + // If we get 304 Not Modified, use the cached version + if (response.status === 304) { + return readAsFile(artifactPath); + } + + // If we have headers but no 304, the file has changed + if (metadata && (metadata.etag || metadata.lastModified)) { + // File has changed, will re-download below + } else { + // No cache headers available, fall back to time-based check + // Re-download if file is older than 24 hours + const oneDayAgo = Date.now() - 24 * 60 * 60 * 1000; + if (metadata && metadata.downloadedAt >= oneDayAgo) { + return readAsFile(artifactPath); + } + } + } catch (error) { + // If we can't check the remote file (network issue, etc.), + // use the cached version if it exists + console.warn( + 'Unable to check remote file, using cached version:', + error + ); + return readAsFile(artifactPath); + } } + + fs.ensureDirSync(CACHE_FOLDER); + await downloadTo(remoteUrl, artifactPath, metadataPath, monitor); return readAsFile(artifactPath); } async function downloadTo( remoteUrl: string, localPath: string, + metadataPath: string, monitor: EmscriptenDownloadMonitor ) { const response = await monitor.monitorFetch(fetch(remoteUrl)); + + // Extract cache headers for metadata + const etag = response.headers.get('etag'); + const lastModified = response.headers.get('last-modified'); + const reader = response.body!.getReader(); const tmpPath = `${localPath}.partial`; const writer = fs.createWriteStream(tmpPath); + while (true) { const { done, value } = await reader.read(); if (value) { @@ -49,11 +120,24 @@ async function downloadTo( break; } } + writer.close(); if (!writer.closed) { await new Promise((resolve, reject) => { writer.on('finish', () => { fs.renameSync(tmpPath, localPath); + + // Save metadata + const metadata: CacheMetadata = { + downloadedAt: Date.now(), + }; + if (etag) metadata.etag = etag; + if (lastModified) metadata.lastModified = lastModified; + + fs.writeFileSync( + metadataPath, + JSON.stringify(metadata, null, 2) + ); resolve(null); }); writer.on('error', (err: any) => { @@ -65,5 +149,8 @@ async function downloadTo( } export function readAsFile(path: string, fileName?: string): File { - return new File([fs.readFileSync(path)], fileName ?? basename(path)); + const buffer = fs.readFileSync(path); + // Convert Buffer to Uint8Array to fix the linter error + const uint8Array = new Uint8Array(buffer); + return new File([uint8Array], fileName ?? basename(path)); }