diff --git a/build/gulpfile.vscode.js b/build/gulpfile.vscode.js index 3cc7ff023900..4f95a33f95a9 100644 --- a/build/gulpfile.vscode.js +++ b/build/gulpfile.vscode.js @@ -355,7 +355,6 @@ function packageTask(platform, arch, sourceFolderName, destinationFolderName, op api, // --- Start Positron --- positronApi, - getQuartoBinaries(), moduleSources, // --- End Positron --- telemetry, @@ -363,6 +362,14 @@ function packageTask(platform, arch, sourceFolderName, destinationFolderName, op deps ); + /// --- Start Positron --- + // The Quarto binaries are not available for Windows ARM builds, but are + // for all other platforms/architectures + if (!(platform === 'win32' && arch === 'arm64')) { + all = es.merge(all, getQuartoBinaries()); + } + // --- End Positron --- + if (platform === 'win32') { all = es.merge(all, gulp.src([ 'resources/win32/bower.ico', diff --git a/build/lib/extensions.js b/build/lib/extensions.js index 1a4956d9343a..e345f7ece224 100644 --- a/build/lib/extensions.js +++ b/build/lib/extensions.js @@ -172,14 +172,6 @@ function fromLocalWebpack(extensionPath, webpackConfigFileName, disableMangle) { // --- Start PWB: from Positron --- // Replace vsce.listFiles with listExtensionFiles to queue the work listExtensionFiles({ cwd: extensionPath, packageManager: packageManger, packagedDependencies }).then(fileNames => { - const files = fileNames - .map(fileName => path_1.default.join(extensionPath, fileName)) - .map(filePath => new vinyl_1.default({ - path: filePath, - stat: fs_1.default.statSync(filePath), - base: extensionPath, - contents: fs_1.default.createReadStream(filePath) - })); // check for a webpack configuration files, then invoke webpack // and merge its output with the files stream. const webpackConfigLocations = glob_1.default.sync(path_1.default.join(extensionPath, '**', webpackConfigFileName), { ignore: ['**/node_modules'] }); @@ -237,7 +229,8 @@ function fromLocalWebpack(extensionPath, webpackConfigFileName, disableMangle) { })); }); }); - event_stream_1.default.merge(...webpackStreams, event_stream_1.default.readArray(files)) + const localFilesStream = createSequentialFileStream(extensionPath, fileNames); + event_stream_1.default.merge(...webpackStreams, localFilesStream) // .pipe(es.through(function (data) { // // debug // console.log('out', data.path, data.contents.length); @@ -259,15 +252,7 @@ function fromLocalNormal(extensionPath) { // Replace vsce.listFiles with listExtensionFiles to queue the work listExtensionFiles({ cwd: extensionPath, packageManager: vsce.PackageManager.Npm }) .then(fileNames => { - const files = fileNames - .map(fileName => path_1.default.join(extensionPath, fileName)) - .map(filePath => new vinyl_1.default({ - path: filePath, - stat: fs_1.default.statSync(filePath), - base: extensionPath, - contents: fs_1.default.createReadStream(filePath) - })); - event_stream_1.default.readArray(files).pipe(result); + createSequentialFileStream(extensionPath, fileNames).pipe(result); }) .catch(err => result.emit('error', err)); // --- End PWB: from Positron --- @@ -787,6 +772,102 @@ async function buildExtensionMedia(isWatch, outputRoot) { }))); } // --- Start PWB: from Positron --- +/** + * Create a stream that emits files in the order of `fileNames`, one at a time, + * reading each file from disk before emitting it. + * + * This is used to serialize file reads when packaging extensions, to avoid + * running out of file descriptors (EMFILE) when building. + * + * @param extensionPath The root path of the extension + * @param fileNames The list of file names to emit, relative to `extensionPath` + * @returns A stream that emits the files in order + */ +function createSequentialFileStream(extensionPath, fileNames) { + const stream = event_stream_1.default.through(); + const queue = [...fileNames]; + let ended = false; + const finish = () => { + if (!ended) { + ended = true; + stream.emit('end'); + } + }; + stream.on('close', () => { + ended = true; + queue.length = 0; + }); + stream.on('error', () => { + ended = true; + queue.length = 0; + }); + const pump = () => { + if (ended) { + return; + } + if (queue.length === 0) { + finish(); + return; + } + const relativePath = queue.shift(); + const absolutePath = path_1.default.join(extensionPath, relativePath); + let stats; + try { + stats = fs_1.default.statSync(absolutePath); + } + catch (error) { + ended = true; + queue.length = 0; + stream.emit('error', error); + return; + } + let fileStream; + try { + fileStream = fs_1.default.createReadStream(absolutePath); + } + catch (error) { + ended = true; + queue.length = 0; + stream.emit('error', error); + return; + } + let settled = false; + const cleanup = () => { + if (settled) { + return; + } + settled = true; + fileStream.removeListener('end', cleanup); + fileStream.removeListener('close', cleanup); + fileStream.removeListener('error', onError); + setImmediate(pump); + }; + const onError = (err) => { + if (settled) { + return; + } + settled = true; + fileStream.removeListener('end', cleanup); + fileStream.removeListener('close', cleanup); + fileStream.removeListener('error', onError); + ended = true; + queue.length = 0; + stream.emit('error', err); + }; + fileStream.on('end', cleanup); + fileStream.on('close', cleanup); + fileStream.on('error', onError); + const file = new vinyl_1.default({ + path: absolutePath, + stat: stats, + base: extensionPath, + contents: fileStream + }); + stream.emit('data', file); + }; + setImmediate(pump); + return stream; +} // Node 20 consistently crashes when there are too many `vsce.listFiles` // operations in flight at once; these operations are expensive as they recurse // back into `yarn`. The code below serializes these operations when building diff --git a/build/lib/extensions.ts b/build/lib/extensions.ts index 760ca91a1a88..74bccd2046fa 100644 --- a/build/lib/extensions.ts +++ b/build/lib/extensions.ts @@ -93,7 +93,6 @@ function fromLocal(extensionPath: string, forWeb: boolean, disableMangle: boolea return input; } - function fromLocalWebpack(extensionPath: string, webpackConfigFileName: string, disableMangle: boolean): Stream { const vsce = require('@vscode/vsce') as typeof import('@vscode/vsce'); const webpack = require('webpack'); @@ -140,15 +139,6 @@ function fromLocalWebpack(extensionPath: string, webpackConfigFileName: string, // --- Start PWB: from Positron --- // Replace vsce.listFiles with listExtensionFiles to queue the work listExtensionFiles({ cwd: extensionPath, packageManager: packageManger, packagedDependencies }).then(fileNames => { - const files = fileNames - .map(fileName => path.join(extensionPath, fileName)) - .map(filePath => new File({ - path: filePath, - stat: fs.statSync(filePath), - base: extensionPath, - contents: fs.createReadStream(filePath) as any - })); - // check for a webpack configuration files, then invoke webpack // and merge its output with the files stream. const webpackConfigLocations = (glob.sync( @@ -215,7 +205,9 @@ function fromLocalWebpack(extensionPath: string, webpackConfigFileName: string, }); }); - es.merge(...webpackStreams, es.readArray(files)) + const localFilesStream = createSequentialFileStream(extensionPath, fileNames); + + es.merge(...webpackStreams, localFilesStream) // .pipe(es.through(function (data) { // // debug // console.log('out', data.path, data.contents.length); @@ -241,16 +233,7 @@ function fromLocalNormal(extensionPath: string): Stream { // Replace vsce.listFiles with listExtensionFiles to queue the work listExtensionFiles({ cwd: extensionPath, packageManager: vsce.PackageManager.Npm }) .then(fileNames => { - const files = fileNames - .map(fileName => path.join(extensionPath, fileName)) - .map(filePath => new File({ - path: filePath, - stat: fs.statSync(filePath), - base: extensionPath, - contents: fs.createReadStream(filePath) as any - })); - - es.readArray(files).pipe(result); + createSequentialFileStream(extensionPath, fileNames).pipe(result); }) .catch(err => result.emit('error', err)); // --- End PWB: from Positron --- @@ -266,6 +249,7 @@ const baseHeaders = { }; // --- Start Positron --- + function getPlatformDownloads(bootstrap: boolean): string[] { // return both architectures for mac universal installer if (bootstrap && process.platform === 'darwin' && !process.env['VSCODE_DEV']) { @@ -858,6 +842,115 @@ export async function buildExtensionMedia(isWatch: boolean, outputRoot?: string) // --- Start PWB: from Positron --- +/** + * Create a stream that emits files in the order of `fileNames`, one at a time, + * reading each file from disk before emitting it. + * + * This is used to serialize file reads when packaging extensions, to avoid + * running out of file descriptors (EMFILE) when building. + * + * @param extensionPath The root path of the extension + * @param fileNames The list of file names to emit, relative to `extensionPath` + * @returns A stream that emits the files in order + */ +function createSequentialFileStream(extensionPath: string, fileNames: string[]): Stream { + const stream = es.through(); + const queue = [...fileNames]; + let ended = false; + + const finish = () => { + if (!ended) { + ended = true; + stream.emit('end'); + } + }; + + stream.on('close', () => { + ended = true; + queue.length = 0; + }); + + stream.on('error', () => { + ended = true; + queue.length = 0; + }); + + const pump = () => { + if (ended) { + return; + } + if (queue.length === 0) { + finish(); + return; + } + + const relativePath = queue.shift()!; + const absolutePath = path.join(extensionPath, relativePath); + let stats: fs.Stats; + try { + stats = fs.statSync(absolutePath); + } catch (error) { + ended = true; + queue.length = 0; + stream.emit('error', error); + return; + } + + let fileStream: fs.ReadStream; + try { + fileStream = fs.createReadStream(absolutePath); + } catch (error) { + ended = true; + queue.length = 0; + stream.emit('error', error); + return; + } + + let settled = false; + const cleanup = () => { + if (settled) { + return; + } + settled = true; + fileStream.removeListener('end', cleanup); + fileStream.removeListener('close', cleanup); + fileStream.removeListener('error', onError); + setImmediate(pump); + }; + + const onError = (err: Error) => { + if (settled) { + return; + } + settled = true; + fileStream.removeListener('end', cleanup); + fileStream.removeListener('close', cleanup); + fileStream.removeListener('error', onError); + ended = true; + queue.length = 0; + stream.emit('error', err); + }; + + fileStream.on('end', cleanup); + fileStream.on('close', cleanup); + fileStream.on('error', onError); + + const file = new File({ + path: absolutePath, + stat: stats, + base: extensionPath, + contents: fileStream as any + }); + + stream.emit('data', file); + }; + + setImmediate(pump); + + return stream; +} + + // Node 20 consistently crashes when there are too many `vsce.listFiles` // operations in flight at once; these operations are expensive as they recurse // back into `yarn`. The code below serializes these operations when building diff --git a/extensions/positron-assistant/scripts/install-copilot-language-server.ts b/extensions/positron-assistant/scripts/install-copilot-language-server.ts index 9e6a13ae395f..3357b64fe9b7 100644 --- a/extensions/positron-assistant/scripts/install-copilot-language-server.ts +++ b/extensions/positron-assistant/scripts/install-copilot-language-server.ts @@ -64,7 +64,10 @@ async function main() { // On macOS, we need both the x64 and arm64 versions of the language // server. By default npm just installs the one for the current CPU // architecture. - if (platform() === 'darwin') { + // + // We also need to force the platform on windows since there is no arm64 + // build of the copilot language server yet. + if (platform() === 'darwin' || platform() === 'win32') { console.log(`Installing ${packageName} (${targetArch})...`); // Use --force to prevent npm from blocking the installation due to diff --git a/extensions/positron-copilot-chat b/extensions/positron-copilot-chat index 7d7d253c8e09..24cbd9beab35 160000 --- a/extensions/positron-copilot-chat +++ b/extensions/positron-copilot-chat @@ -1 +1 @@ -Subproject commit 7d7d253c8e09cc107c11ab607561385b9aa63ff9 +Subproject commit 24cbd9beab355f15da2222c16c1808b606ef2f57 diff --git a/extensions/positron-r/package.json b/extensions/positron-r/package.json index 1ce168f3a3b3..7fe85070cfae 100644 --- a/extensions/positron-r/package.json +++ b/extensions/positron-r/package.json @@ -974,7 +974,7 @@ }, "positron": { "binaryDependencies": { - "ark": "0.1.213" + "ark": "0.1.214" }, "minimumRVersion": "4.2.0", "minimumRenvVersion": "1.0.9" diff --git a/extensions/positron-r/scripts/install-kernel.ts b/extensions/positron-r/scripts/install-kernel.ts index 3f6574bd8de9..b902a6bc9f3d 100644 --- a/extensions/positron-r/scripts/install-kernel.ts +++ b/extensions/positron-r/scripts/install-kernel.ts @@ -21,13 +21,115 @@ const mkdtempAsync = promisify(fs.mkdtemp); // Create a promisified version of https.get. We can't use the built-in promisify // because the callback doesn't follow the promise convention of (error, result). -const httpsGetAsync = (opts: https.RequestOptions) => { +const httpsGetAsync = (opts: https.RequestOptions | string | URL) => { return new Promise((resolve, reject) => { const req = https.get(opts, resolve); req.once('error', reject); }); }; +const readResponseBody = async (response: IncomingMessage): Promise => { + return await new Promise((resolve, reject) => { + const chunks: Buffer[] = []; + response.on('data', chunk => { + chunks.push(typeof chunk === 'string' ? Buffer.from(chunk) : chunk); + }); + response.once('end', () => resolve(Buffer.concat(chunks))); + response.once('error', reject); + }); +}; + +/** + * Information about a specific Ark asset to download. + */ +interface ArkAssetTarget { + /// The suffix of the asset file to download. + readonly assetSuffix: string; + + /// An optional subdirectory into which the asset should be extracted. + readonly subdirectory?: string; + + /// A human-readable label for the asset. + readonly label: string; +} + +const redirectStatusCodes = new Set([301, 302, 307, 308]); + +type NodeArch = ReturnType; + +/** + * Get the download targets for a specific platform and architecture. + * + * @param currentPlatform The current platform (e.g., 'win32', 'darwin', 'linux'). + * @param currentArch The desired architecture (e.g., 'x64', 'arm64'). + * + * @returns An array of ArkAssetTarget objects representing the download targets. + */ +function getDownloadTargets(currentPlatform: NodeJS.Platform, currentArch: NodeArch): ArkAssetTarget[] { + switch (currentPlatform) { + case 'win32': + // On Windows, we always download both the x64 and arm64 builds, since + // Windows on ARM can run x64 binaries via emulation. + return [ + { assetSuffix: 'windows-arm64', subdirectory: 'windows-arm64', label: 'Windows ARM64' }, + { assetSuffix: 'windows-x64', subdirectory: 'windows-x64', label: 'Windows x64' } + ]; + case 'darwin': + // Use the universal binary on macOS. + return [{ assetSuffix: 'darwin-universal', label: 'macOS Universal' }]; + case 'linux': + // On Linux, we only download the build for the current architecture. + return [{ + assetSuffix: currentArch === 'arm64' ? 'linux-arm64' : 'linux-x64', + label: currentArch === 'arm64' ? 'Linux ARM64' : 'Linux x64' + }]; + default: + throw new Error(`Unsupported platform ${currentPlatform}.`); + } +} + +/** + * Build the request options for downloading an Ark asset. + * + * @param url The URL of the asset to download. + * @param headers The headers to include in the request. + * @returns The request options for the HTTPS request. + */ +function buildRequestOptions(url: URL, headers: Record): https.RequestOptions { + return { + headers, + method: 'GET', + protocol: url.protocol, + hostname: url.hostname, + path: `${url.pathname}${url.search}` + }; +} + +/** + * Downloads a release asset from GitHub. + * + * @param assetUrl The URL of the asset to download. + * @param headers The headers to include in the request. + * @returns The downloaded asset as a Buffer. + */ +async function downloadReleaseAsset(assetUrl: string, headers: Record): Promise { + let requestUrl = new URL(assetUrl); + let response = await httpsGetAsync(buildRequestOptions(requestUrl, headers)); + while (response.statusCode && redirectStatusCodes.has(response.statusCode)) { + const location = response.headers.location; + if (!location) { + throw new Error('Redirect response missing Location header while downloading Ark asset.'); + } + requestUrl = new URL(location); + response = await httpsGetAsync(buildRequestOptions(requestUrl, headers)); + } + if (response.statusCode !== 200) { + const body = await readResponseBody(response); + throw new Error(`Failed to download Ark: HTTP ${response.statusCode}\n\n${body.toString('utf-8')}`); + } + return await readResponseBody(response); +} + /** * Gets the version of Ark specified in package.json. * @@ -107,120 +209,81 @@ async function downloadAndReplaceArk(version: string, githubPat: string | undefined): Promise { try { - const headers: Record = { + const baseHeaders: Record = { 'Accept': 'application/vnd.github.v3.raw', // eslint-disable-line 'User-Agent': 'positron-ark-downloader' // eslint-disable-line }; - // If we have a githubPat, set it for better rate limiting. if (githubPat) { - headers.Authorization = `token ${githubPat}`; + baseHeaders.Authorization = `token ${githubPat}`; } - const requestOptions: https.RequestOptions = { - headers, + + const releasesResponse = await httpsGetAsync({ + headers: baseHeaders, method: 'GET', protocol: 'https:', hostname: 'api.github.com', - path: `/repos/posit-dev/ark/releases` - }; - - const response = await httpsGetAsync(requestOptions as any) as any; - - let responseBody = ''; + path: '/repos/posit-dev/ark/releases' + } as https.RequestOptions); + const releasesBuffer = await readResponseBody(releasesResponse); + if (releasesResponse.statusCode !== 200) { + throw new Error(`Failed to download Ark: HTTP ${releasesResponse.statusCode}\n\n${releasesBuffer.toString('utf-8')}`); + } - response.on('data', (chunk: any) => { - responseBody += chunk; - }); + const releases = JSON.parse(releasesBuffer.toString('utf-8')); + if (!Array.isArray(releases)) { + throw new Error(`Unexpected response from Github:\n\n${releasesBuffer.toString('utf-8')}`); + } + const release = releases.find((asset: any) => asset.tag_name === version); + if (!release) { + throw new Error(`Could not find Ark ${version} in the releases.`); + } - response.on('end', async () => { - if (response.statusCode !== 200) { - throw new Error(`Failed to download Ark: HTTP ${response.statusCode}\n\n` + - `${responseBody}`); - } - const releases = JSON.parse(responseBody); - if (!Array.isArray(releases)) { - throw new Error(`Unexpected response from Github:\n\n` + - `${responseBody}`); - } - const release = releases.find((asset: any) => asset.tag_name === version); - if (!release) { - throw new Error(`Could not find Ark ${version} in the releases.`); - } + const currentPlatform = platform() as NodeJS.Platform; + const currentArch = arch(); + const targets = getDownloadTargets(currentPlatform, currentArch); + const arkDir = path.join('resources', 'ark'); + await fs.promises.mkdir(arkDir, { recursive: true }); - let os: string; - switch (platform()) { - case 'win32': os = 'windows-x64'; break; - case 'darwin': os = 'darwin-universal'; break; - case 'linux': os = (arch() === 'arm64' ? 'linux-arm64' : 'linux-x64'); break; - default: { - throw new Error(`Unsupported platform ${platform()}.`); - } + if (currentPlatform === 'win32') { + const legacyKernelPath = path.join(arkDir, 'ark.exe'); + if (await existsAsync(legacyKernelPath)) { + await fs.promises.unlink(legacyKernelPath); } + } - const assetName = `ark-${version}-${os}.zip`; - const asset = release.assets.find((asset: any) => asset.name === assetName); + for (const target of targets) { + const assetName = `ark-${version}-${target.assetSuffix}.zip`; + const asset = release.assets?.find((item: any) => item.name === assetName); if (!asset) { throw new Error(`Could not find Ark with asset name ${assetName} in the release.`); } - console.log(`Downloading Ark ${version} from ${asset.url}...`); - const url = new URL(asset.url); - // Reset the Accept header to download the asset. - headers.Accept = 'application/octet-stream'; - const requestOptions: https.RequestOptions = { - headers, - method: 'GET', - protocol: url.protocol, - hostname: url.hostname, - path: url.pathname + + console.log(`Downloading Ark ${version} (${target.label}) from ${asset.url}...`); + const assetHeaders = { + ...baseHeaders, + Accept: 'application/octet-stream' }; + const binaryData = await downloadReleaseAsset(asset.url, assetHeaders); - let dlResponse = await httpsGetAsync(requestOptions) as any; - while (dlResponse.statusCode === 302) { - // Follow redirects. - dlResponse = await httpsGetAsync(dlResponse.headers.location) as any; + if (binaryData.length < 1024) { + console.error(binaryData.toString('utf-8')); + throw new Error(`Binary data is too small (${binaryData.length} bytes); download probably failed.`); } - let binaryData = Buffer.alloc(0); - // Ensure we got a 200 response on the final request. - if (dlResponse.statusCode !== 200) { - throw new Error(`Failed to download Ark: HTTP ${dlResponse.statusCode}`); + const targetDir = target.subdirectory ? path.join(arkDir, target.subdirectory) : arkDir; + if (target.subdirectory) { + await fs.promises.rm(targetDir, { recursive: true, force: true }); } + await fs.promises.mkdir(targetDir, { recursive: true }); - dlResponse.on('data', (chunk: any) => { - binaryData = Buffer.concat([binaryData, chunk]); - }); - dlResponse.on('end', async () => { - const arkDir = path.join('resources', 'ark'); - - // Ensure we got some bytes. Less than 1024 bytes is probably - // an error; none of our assets are under 1mb - if (binaryData.length < 1024) { - // Log the data we did get - console.error(binaryData.toString('utf-8')); - throw new Error( - `Binary data is too small (${binaryData.length} bytes); download probably failed.`); - } - - // Create the resources/ark directory if it doesn't exist. - if (!await existsAsync(arkDir)) { - await fs.promises.mkdir(arkDir); - } - - console.log(`Successfully downloaded Ark ${version} (${binaryData.length} bytes).`); - const zipFileDest = path.join(arkDir, 'ark.zip'); - await writeFileAsync(zipFileDest, binaryData); - - await decompress(zipFileDest, arkDir).then(files => { - console.log(`Successfully unzipped Ark ${version}.`); - }); - - // Clean up the zipfile. - await fs.promises.unlink(zipFileDest); - - // Write a VERSION file with the version number. - await writeFileAsync(path.join('resources', 'ark', 'VERSION'), version); - - }); - }); + const zipFileDest = path.join(targetDir, 'ark.zip'); + await writeFileAsync(zipFileDest, binaryData); + await decompress(zipFileDest, targetDir); + await fs.promises.unlink(zipFileDest); + console.log(`Successfully installed Ark ${version} (${target.label}).`); + } + + await writeFileAsync(path.join('resources', 'ark', 'VERSION'), version); } catch (error) { throw new Error(`Error downloading Ark: ${error}`); } @@ -297,12 +360,18 @@ async function downloadFromGitHubRepository( // Create the resources/ark directory if it doesn't exist const arkDir = path.join('resources', 'ark'); - if (!await existsAsync(arkDir)) { - await fs.promises.mkdir(arkDir, { recursive: true }); - } + await fs.promises.mkdir(arkDir, { recursive: true }); - // Copy the binary to the resources directory + // Copy the binary to the resources directory (root) so packaging picks it up await fs.promises.copyFile(binaryPath, path.join(arkDir, kernelName)); + + // On Windows, also place the binary inside the architecture-specific subdirectory + if (platform() === 'win32') { + const windowsSubdir = process.arch === 'arm64' ? 'windows-arm64' : 'windows-x64'; + const targetDir = path.join(arkDir, windowsSubdir); + await fs.promises.mkdir(targetDir, { recursive: true }); + await fs.promises.copyFile(binaryPath, path.join(targetDir, kernelName)); + } console.log(`Successfully built and installed Ark from ${org}/${repo}@${revision}`); // Write the version information to VERSION file diff --git a/extensions/positron-r/src/kernel-spec.ts b/extensions/positron-r/src/kernel-spec.ts index 8907175df17e..a636c7eb27e3 100644 --- a/extensions/positron-r/src/kernel-spec.ts +++ b/extensions/positron-r/src/kernel-spec.ts @@ -18,6 +18,7 @@ import { EXTENSION_ROOT_DIR } from './constants'; * @param rHomePath The R_HOME path for the R version * @param runtimeName The (display) name of the runtime * @param sessionMode The mode in which to create the session + * @param options Additional options: specifically, the R binary path and architecture * * @returns A JupyterKernelSpec definining the kernel's path, arguments, and * metadata. @@ -25,10 +26,15 @@ import { EXTENSION_ROOT_DIR } from './constants'; export function createJupyterKernelSpec( rHomePath: string, runtimeName: string, - sessionMode: positron.LanguageRuntimeSessionMode): JupyterKernelSpec { + sessionMode: positron.LanguageRuntimeSessionMode, + options?: { rBinaryPath?: string; rArchitecture?: string }): JupyterKernelSpec { // Path to the kernel executable - const kernelPath = getArkKernelPath(); + const kernelPath = getArkKernelPath({ + rBinaryPath: options?.rBinaryPath, + rHomePath, + rArch: options?.rArchitecture + }); if (!kernelPath) { throw new Error('Unable to find R kernel'); } diff --git a/extensions/positron-r/src/kernel.ts b/extensions/positron-r/src/kernel.ts index 469fd67d4f79..960357897767 100644 --- a/extensions/positron-r/src/kernel.ts +++ b/extensions/positron-r/src/kernel.ts @@ -1,13 +1,31 @@ /*--------------------------------------------------------------------------------------------- - * Copyright (C) 2023-2024 Posit Software, PBC. All rights reserved. + * Copyright (C) 2023-2025 Posit Software, PBC. All rights reserved. * Licensed under the Elastic License 2.0. See LICENSE.txt for license information. *--------------------------------------------------------------------------------------------*/ +import * as fs from 'fs'; import * as os from 'os'; +import * as path from 'path'; import * as vscode from 'vscode'; import { LOGGER } from './extension'; import { EXTENSION_ROOT_DIR } from './constants'; +/** + * Options that help locate the Ark kernel binary. + */ +interface ArkKernelLookupOptions { + /// The path to the R binary, if known. + readonly rBinaryPath?: string; + + /// The R_HOME path, if known. + readonly rHomePath?: string; + + /// The architecture of the R binary, if known. + readonly rArch?: string; +} + +type WindowsKernelArch = 'arm64' | 'x64'; + /** * Attempts to locate a copy of the Ark kernel. The kernel is searched for in the following * locations, in order: @@ -17,10 +35,10 @@ import { EXTENSION_ROOT_DIR } from './constants'; * 3. A locally built kernel (development builds for kernel developers). * 4. A local, downloaded copy of the kernel (development builds for everyone else). * - * @param context The extension context. + * @param options Additional hints that help resolve the correct kernel path. * @returns A path to the Ark kernel, or undefined if the kernel could not be found. */ -export function getArkKernelPath(): string | undefined { +export function getArkKernelPath(options?: ArkKernelLookupOptions): string | undefined { // First, check to see whether there is an override for the kernel path. const arkConfig = vscode.workspace.getConfiguration('positron.r'); @@ -30,18 +48,16 @@ export function getArkKernelPath(): string | undefined { } const kernelName = os.platform() === 'win32' ? 'ark.exe' : 'ark'; - const path = require('path'); - const fs = require('fs'); // Look for locally built Debug or Release kernels. If both exist, we'll use // whichever is newest. This is the location where the kernel is typically built // by developers, who have `positron` and `ark` directories side-by-side. - let devKernel = undefined; + let devKernel: string | undefined; const positronParent = path.dirname(path.dirname(path.dirname(EXTENSION_ROOT_DIR))); const devDebugKernel = path.join(positronParent, 'ark', 'target', 'debug', kernelName); const devReleaseKernel = path.join(positronParent, 'ark', 'target', 'release', kernelName); - const debugModified = fs.statSync(devDebugKernel, { throwIfNoEntry: false })?.mtime; - const releaseModified = fs.statSync(devReleaseKernel, { throwIfNoEntry: false })?.mtime; + const debugModified = safeStatSync(devDebugKernel)?.mtime; + const releaseModified = safeStatSync(devReleaseKernel)?.mtime; if (debugModified) { devKernel = (releaseModified && releaseModified > debugModified) ? devReleaseKernel : devDebugKernel; @@ -53,10 +69,211 @@ export function getArkKernelPath(): string | undefined { return devKernel; } - // Now try the default (embedded) kernel. This is where the kernel is placed in - // development and release builds. - const embeddedKernel = path.join(EXTENSION_ROOT_DIR, 'resources', 'ark', kernelName); - if (fs.existsSync(embeddedKernel)) { - return embeddedKernel; + const arkRoot = path.join(EXTENSION_ROOT_DIR, 'resources', 'ark'); + + if (os.platform() === 'win32') { + // On Windows, we need additional logic to locate the correct kernel + // binary since it may be in a subdirectory based on architecture. + const embeddedKernel = resolveWindowsEmbeddedKernel(arkRoot, kernelName, options); + if (embeddedKernel) { + return embeddedKernel; + } + } else { + const embeddedKernel = path.join(arkRoot, kernelName); + if (fs.existsSync(embeddedKernel)) { + return embeddedKernel; + } + } + + return undefined; +} + +/** + * Resolves the path to the embedded Ark kernel on Windows. + * + * @param arkRoot The root directory of the Ark installation. + * @param kernelName The name of the kernel executable. + * @param options Additional options for kernel resolution. + * + * @returns The path to the embedded kernel, or undefined if not found. + */ +function resolveWindowsEmbeddedKernel( + arkRoot: string, + kernelName: string, + options?: ArkKernelLookupOptions +): string | undefined { + const preferredArch = determineWindowsKernelArch(options); + const searchOrder = getWindowsSearchOrder(preferredArch); + for (const subdir of searchOrder) { + const candidate = path.join(arkRoot, subdir, kernelName); + const stats = safeStatSync(candidate); + if (stats?.isFile()) { + return candidate; + } + } + + const fallback = path.join(arkRoot, kernelName); + if (fs.existsSync(fallback)) { + return fallback; + } + + return undefined; +} + +/** + * Determines the architecture of the Ark kernel on Windows. + * + * @param options Kernel lookup options. + * @returns The architecture of the kernel, or undefined if not found. + */ +function determineWindowsKernelArch(options?: ArkKernelLookupOptions): WindowsKernelArch | undefined { + if (!options) { + return undefined; + } + + // First, see if the architecture was explicitly specified. + const normalized = normalizeWindowsArch(options.rArch); + if (normalized) { + LOGGER.debug(`Using previously detected Windows architecture: ${normalized}`); + return normalized; + } + + // If unknown, peek at the R binary, if we have one. + const sniffed = sniffWindowsBinaryArchitecture(options.rBinaryPath); + if (sniffed) { + LOGGER.debug(`Sniffed Windows architecture from R binary: ${sniffed}`); + return sniffed; + } + + // In the absence of any other information, try to derive the architecture + // from the R binary and R_HOME paths, if we have them. + const arch = deriveArchFromPaths([options.rBinaryPath, options.rHomePath]); + if (arch) { + LOGGER.debug(`Derived Windows architecture from ${options.rBinaryPath} and ${options.rHomePath}: ${arch}`); + return arch; + } +} + +/** + * Normalizes a Windows architecture string. + * + * @param value The architecture string to normalize. + * @returns The normalized architecture, or undefined if not recognized. + */ +export function normalizeWindowsArch(value: string | undefined): WindowsKernelArch | undefined { + if (!value) { + return undefined; + } + const normalized = value.toLowerCase(); + if (normalized === 'arm64' || normalized === 'aarch64') { + return 'arm64'; + } + if (normalized === 'x64' || normalized === 'x86_64' || normalized === 'amd64') { + return 'x64'; + } + return undefined; +} + +/** + * Derives the architecture of the Ark kernel from a list of paths. + * + * @param paths + * @returns The derived architecture, or undefined if not found. + */ +function deriveArchFromPaths(paths: Array): WindowsKernelArch | undefined { + for (const candidate of paths) { + if (!candidate) { + continue; + } + const normalized = candidate.toLowerCase().replace(/\\/g, '/'); + if (/(^|\/)arm64(\/|$)/.test(normalized) || normalized.includes('-arm64')) { + return 'arm64'; + } + if (/(^|\/)aarch64(\/|$)/.test(normalized) || normalized.includes('-aarch64')) { + return 'arm64'; + } + if (/(^|\/)(x64|amd64)(\/|$)/.test(normalized)) { + return 'x64'; + } + } + return undefined; +} + +/*** + * Get the search order for Windows kernel architectures, based on a preferred + * architecture. + */ +function getWindowsSearchOrder(preferred?: WindowsKernelArch): string[] { + if (preferred === 'arm64') { + return ['windows-arm64', 'windows-x64']; + } + if (preferred === 'x64') { + return ['windows-x64', 'windows-arm64']; + } + if (process.arch === 'arm64') { + return ['windows-arm64', 'windows-x64']; + } + return ['windows-x64', 'windows-arm64']; +} + +/** + * Wrapper around `fs.statSync` that returns `undefined` if the path does not exist + * or is otherwise inaccessible. + * + * @param targetPath The path to check. + * @returns The file stats, or undefined if the path is inaccessible. + */ +function safeStatSync(targetPath: string): fs.Stats | undefined { + try { + return fs.statSync(targetPath); + } catch { + return undefined; + } +} + +/** + * Sniffs the architecture of a Windows binary by examining its PE header. + * + * @param binaryPath The path to the binary file. + * @returns The detected architecture, or undefined if not recognized. + */ +export function sniffWindowsBinaryArchitecture(binaryPath?: string): WindowsKernelArch | undefined { + if (!binaryPath) { + return undefined; + } + try { + const fd = fs.openSync(binaryPath, 'r'); + try { + // Read the PE header to determine the architecture. + const header = Buffer.alloc(64); + fs.readSync(fd, header, 0, header.length, 0); + const peOffset = header.readUInt32LE(0x3C); + const peHeader = Buffer.alloc(6); + fs.readSync(fd, peHeader, 0, peHeader.length, peOffset); + if (peHeader.toString('utf8', 0, 2) !== 'PE') { + // Not a PE file. + return undefined; + } + // Read the machine type from the PE header. + const machine = peHeader.readUInt16LE(4); + switch (machine) { + case 0xAA64: + LOGGER.debug(`Detected ARM64 architecture for Windows binary at ${binaryPath}`); + return 'arm64'; + case 0x8664: + LOGGER.debug(`Detected x64 architecture for Windows binary at ${binaryPath}`); + return 'x64'; + case 0x14c: + LOGGER.debug(`Detected x86 architecture for Windows binary at ${binaryPath} (unsupported)`); + return undefined; // 32 bit x86, which we don't support + default: + return undefined; + } + } finally { + fs.closeSync(fd); + } + } catch (error) { + LOGGER.debug(`Unable to determine Windows R architecture from ${binaryPath}: ${error}`); + return undefined; } } diff --git a/extensions/positron-r/src/provider.ts b/extensions/positron-r/src/provider.ts index 513018db4adb..e8ecfd48da12 100644 --- a/extensions/positron-r/src/provider.ts +++ b/extensions/positron-r/src/provider.ts @@ -268,6 +268,7 @@ export async function makeMetadata( homepath: rInst.homepath, binpath: rInst.binpath, scriptpath: scriptPath, + arch: rInst.arch || undefined, current: rInst.current, default: rInst.default, reasonDiscovered: rInst.reasonDiscovered, @@ -676,16 +677,47 @@ function rHeadquarters(): string[] { case 'linux': return [path.join('/opt', 'R')]; case 'win32': { - const paths = [ - path.join(process.env['ProgramW6432'] || 'C:\\Program Files', 'R') - ]; + // If the environment variable PROGRAMFILES is set, use that. + const programFilesDirs = new Set(); + const programFilesEnv = process.env['PROGRAMFILES'] || process.env['ProgramFiles']; + if (programFilesEnv) { + programFilesDirs.add(programFilesEnv); + } + + // Respect the PROGRAMW6432 environment variable if it is set, too + if (process.env['ProgramW6432']) { + programFilesDirs.add(process.env['ProgramW6432']); + } + + // If no environment variables provided a location to look, fall + // back to C:\Program Files + if (programFilesDirs.size === 0) { + programFilesDirs.add('C:\\Program Files'); + } + + // In each of the Program Files directories, look for R installations + // in both R\ and R-aarch64\ (on ARM64 Windows) + // Also look in %LOCALAPPDATA%\Programs\R and R-aarch64 + // (on ARM64 Windows) + const paths: string[] = []; + for (const baseDir of programFilesDirs) { + paths.push(path.join(baseDir, 'R')); + if (process.arch === 'arm64') { + // also look in R-aarch64 on ARM64 Windows + paths.push(path.join(baseDir, 'R-aarch64')); + } + } if (process.env['LOCALAPPDATA']) { paths.push(path.join(process.env['LOCALAPPDATA'], 'Programs', 'R')); + if (process.arch === 'arm64') { + // also look in R-aarch64 on ARM64 Windows + paths.push(path.join(process.env['LOCALAPPDATA'], 'Programs', 'R-aarch64')); + } } return [...new Set(paths)]; } default: - throw new Error('Unsupported platform'); + throw new Error(`Unsupported platform: ${process.platform}`); } } diff --git a/extensions/positron-r/src/r-installation.ts b/extensions/positron-r/src/r-installation.ts index 7b76193a4514..08b75b0c2e2a 100644 --- a/extensions/positron-r/src/r-installation.ts +++ b/extensions/positron-r/src/r-installation.ts @@ -11,6 +11,7 @@ import { LOGGER } from './extension'; import { MINIMUM_R_VERSION } from './constants'; import { arePathsSame } from './path-utils'; import { getDefaultInterpreterPath, isExcludedInstallation } from './interpreter-settings.js'; +import { normalizeWindowsArch, sniffWindowsBinaryArchitecture } from './kernel.js'; /** * Extra metadata included in the LanguageRuntimeMetadata for R installations. @@ -25,6 +26,9 @@ export interface RMetadataExtra { /** R's Rscript path */ readonly scriptpath: string; + /** Architecture reported by this installation (normalized, e.g. arm64, x86_64) */ + readonly arch?: string; + /** * Is this known to be the current version of R? * https://github.com/posit-dev/positron/issues/2659 @@ -229,6 +233,7 @@ export class RInstallation { const platformPart = builtParts[1]; const architecture = platformPart.match('^(aarch64|x86_64)'); + let derivedArch = ''; if (architecture) { const arch = architecture[1]; @@ -236,19 +241,38 @@ export class RInstallation { // Remap known architectures to equivalent values used by Rig, // just for overall consistency and familiarity if (arch === 'aarch64') { - this.arch = 'arm64'; + derivedArch = 'arm64'; } else if (arch === 'x86_64') { - this.arch = 'x86_64'; + derivedArch = 'x86_64'; } else { // Should never happen because of how our `match()` works console.warn(`Matched an unknown architecture '${arch}' for R '${this.version}'.`); - this.arch = arch; + derivedArch = arch; } - } else { - // Unknown architecture - this.arch = ''; } + if (process.platform === 'win32') { + // Windows arm builds currently misreport in the Built field; prefer the path signature (e.g. ...-aarch64). + const normalizedBin = this.binpath.toLowerCase(); + const pathSegments = normalizedBin.split(path.sep).filter(segment => segment.length > 0); + if (pathSegments.some(segment => segment === 'arm64' || segment === 'aarch64' || segment.endsWith('-arm64') || segment.endsWith('-aarch64'))) { + derivedArch = 'arm64'; + } else if (!derivedArch && pathSegments.some(segment => segment === 'x64' || segment.endsWith('-x64'))) { + derivedArch = 'x86_64'; + } + + // Double check against the binary itself and log a warning if there's a mismatch. + const detectedArch = sniffWindowsBinaryArchitecture(this.binpath); + if (detectedArch) { + const normalizedArch = normalizeWindowsArch(derivedArch); + if (normalizedArch && detectedArch !== normalizedArch) { + LOGGER.warn(`Discrepancy between derived Windows architecture ${derivedArch} and sniffed architecture ${detectedArch} for R ${this.version} at ${this.binpath}`); + } + } + } + + this.arch = derivedArch; + LOGGER.info(`R installation discovered: ${JSON.stringify(this, null, 2)}`); } diff --git a/extensions/positron-r/src/runtime-manager.ts b/extensions/positron-r/src/runtime-manager.ts index 44bf1d0e48a0..fc6d286a5b5c 100644 --- a/extensions/positron-r/src/runtime-manager.ts +++ b/extensions/positron-r/src/runtime-manager.ts @@ -65,7 +65,11 @@ export class RRuntimeManager implements positron.LanguageRuntimeManager { const kernelSpec = createJupyterKernelSpec( metadataExtra.homepath, runtimeMetadata.runtimeName, - sessionMetadata.sessionMode); + sessionMetadata.sessionMode, + { + rBinaryPath: metadataExtra.binpath, + rArchitecture: metadataExtra.arch + }); const session = new RSession(runtimeMetadata, sessionMetadata, kernelSpec, diff --git a/extensions/positron-supervisor/package.json b/extensions/positron-supervisor/package.json index 640044b21945..fd1e3cb189a0 100644 --- a/extensions/positron-supervisor/package.json +++ b/extensions/positron-supervisor/package.json @@ -168,7 +168,7 @@ }, "positron": { "binaryDependencies": { - "kallichore": "0.1.54" + "kallichore": "0.1.56" } }, "extensionDependencies": [ diff --git a/extensions/positron-supervisor/scripts/install-kallichore-server.ts b/extensions/positron-supervisor/scripts/install-kallichore-server.ts index 1c2e94506a42..78ba5cecfa34 100644 --- a/extensions/positron-supervisor/scripts/install-kallichore-server.ts +++ b/extensions/positron-supervisor/scripts/install-kallichore-server.ts @@ -145,13 +145,15 @@ async function downloadAndReplaceKallichore(version: string, throw new Error(`Could not find Kallichore ${version} in the releases.`); } + const currentPlatform = platform(); + const currentArch = arch(); let os: string; - switch (platform()) { - case 'win32': os = 'windows-x64'; break; + switch (currentPlatform) { + case 'win32': os = (currentArch === 'arm64' ? 'windows-arm64' : 'windows-x64'); break; case 'darwin': os = 'darwin-universal'; break; - case 'linux': os = (arch() === 'arm64' ? 'linux-arm64' : 'linux-x64'); break; + case 'linux': os = (currentArch === 'arm64' ? 'linux-arm64' : 'linux-x64'); break; default: { - throw new Error(`Unsupported platform ${platform()}.`); + throw new Error(`Unsupported platform ${currentPlatform}.`); } }