Skip to content

Commit f91fa60

Browse files
committed
fix: correctly scope npm
1 parent b356062 commit f91fa60

File tree

7 files changed

+41
-37
lines changed

7 files changed

+41
-37
lines changed

.github/workflows/build.yml

Lines changed: 3 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -506,7 +506,9 @@ jobs:
506506
mv artifacts/llama.cpp/gitRelease.bundle ./llama/gitRelease.bundle
507507
508508
mv artifacts/build-templates templates/packed/
509-
509+
rm -f ./templates/package.json
510+
rm -f ./templates/package-lock.json
511+
510512
echo "Built binaries:"
511513
ls bins
512514
- name: Move binaries to standalone prebuilt binary modules

package.json

Lines changed: 11 additions & 11 deletions
Original file line numberDiff line numberDiff line change
@@ -222,16 +222,16 @@
222222
}
223223
},
224224
"optionalDependencies": {
225-
"@node-llama-cpp/linux-arm64": "0.1.0",
226-
"@node-llama-cpp/linux-armv7l": "0.1.0",
227-
"@node-llama-cpp/linux-x64": "0.1.0",
228-
"@node-llama-cpp/linux-x64-cuda": "0.1.0",
229-
"@node-llama-cpp/linux-x64-vulkan": "0.1.0",
230-
"@node-llama-cpp/mac-arm64-metal": "0.1.0",
231-
"@node-llama-cpp/mac-x64": "0.1.0",
232-
"@node-llama-cpp/win-arm64": "0.1.0",
233-
"@node-llama-cpp/win-x64": "0.1.0",
234-
"@node-llama-cpp/win-x64-cuda": "0.1.0",
235-
"@node-llama-cpp/win-x64-vulkan": "0.1.0"
225+
"@aibrow/node-llama-cpp-linux-arm64": "0.1.0",
226+
"@aibrow/node-llama-cpp-linux-armv7l": "0.1.0",
227+
"@aibrow/node-llama-cpp-linux-x64": "0.1.0",
228+
"@aibrow/node-llama-cpp-linux-x64-cuda": "0.1.0",
229+
"@aibrow/node-llama-cpp-linux-x64-vulkan": "0.1.0",
230+
"@aibrow/node-llama-cpp-mac-arm64-metal": "0.1.0",
231+
"@aibrow/node-llama-cpp-mac-x64": "0.1.0",
232+
"@aibrow/node-llama-cpp-win-arm64": "0.1.0",
233+
"@aibrow/node-llama-cpp-win-x64": "0.1.0",
234+
"@aibrow/node-llama-cpp-win-x64-cuda": "0.1.0",
235+
"@aibrow/node-llama-cpp-win-x64-vulkan": "0.1.0"
236236
}
237237
}

scripts/movePrebuiltBinariesToStandaloneModules.ts

Lines changed: 11 additions & 11 deletions
Original file line numberDiff line numberDiff line change
@@ -27,18 +27,18 @@ async function moveBinariesFolderToStandaloneModule(folderNameFilter: (folderNam
2727
}
2828
}
2929

30-
await moveBinariesFolderToStandaloneModule((folderName) => folderName.startsWith("mac-arm64-metal"), "@aibrow/node-llama-cpp/mac-arm64-metal");
31-
await moveBinariesFolderToStandaloneModule((folderName) => folderName.startsWith("mac-x64"), "@aibrow/node-llama-cpp/mac-x64");
30+
await moveBinariesFolderToStandaloneModule((folderName) => folderName.startsWith("mac-arm64-metal"), "@aibrow/node-llama-cpp-mac-arm64-metal");
31+
await moveBinariesFolderToStandaloneModule((folderName) => folderName.startsWith("mac-x64"), "@aibrow/node-llama-cpp-mac-x64");
3232

33-
await moveBinariesFolderToStandaloneModule((folderName) => folderName.startsWith("linux-x64-cuda"), "@aibrow/node-llama-cpp/linux-x64-cuda");
34-
await moveBinariesFolderToStandaloneModule((folderName) => folderName.startsWith("linux-x64-vulkan"), "@aibrow/node-llama-cpp/linux-x64-vulkan");
35-
await moveBinariesFolderToStandaloneModule((folderName) => folderName.startsWith("linux-x64"), "@aibrow/node-llama-cpp/linux-x64");
33+
await moveBinariesFolderToStandaloneModule((folderName) => folderName.startsWith("linux-x64-cuda"), "@aibrow/node-llama-cpp-linux-x64-cuda");
34+
await moveBinariesFolderToStandaloneModule((folderName) => folderName.startsWith("linux-x64-vulkan"), "@aibrow/node-llama-cpp-linux-x64-vulkan");
35+
await moveBinariesFolderToStandaloneModule((folderName) => folderName.startsWith("linux-x64"), "@aibrow/node-llama-cpp-linux-x64");
3636

37-
await moveBinariesFolderToStandaloneModule((folderName) => folderName.startsWith("linux-arm64"), "@aibrow/node-llama-cpp/linux-arm64");
38-
await moveBinariesFolderToStandaloneModule((folderName) => folderName.startsWith("linux-armv7l"), "@aibrow/node-llama-cpp/linux-armv7l");
37+
await moveBinariesFolderToStandaloneModule((folderName) => folderName.startsWith("linux-arm64"), "@aibrow/node-llama-cpp-linux-arm64");
38+
await moveBinariesFolderToStandaloneModule((folderName) => folderName.startsWith("linux-armv7l"), "@aibrow/node-llama-cpp-linux-armv7l");
3939

40-
await moveBinariesFolderToStandaloneModule((folderName) => folderName.startsWith("win-x64-cuda"), "@aibrow/node-llama-cpp/win-x64-cuda");
41-
await moveBinariesFolderToStandaloneModule((folderName) => folderName.startsWith("win-x64-vulkan"), "@aibrow/node-llama-cpp/win-x64-vulkan");
42-
await moveBinariesFolderToStandaloneModule((folderName) => folderName.startsWith("win-x64"), "@aibrow/node-llama-cpp/win-x64");
40+
await moveBinariesFolderToStandaloneModule((folderName) => folderName.startsWith("win-x64-cuda"), "@aibrow/node-llama-cpp-win-x64-cuda");
41+
await moveBinariesFolderToStandaloneModule((folderName) => folderName.startsWith("win-x64-vulkan"), "@aibrow/node-llama-cpp-win-x64-vulkan");
42+
await moveBinariesFolderToStandaloneModule((folderName) => folderName.startsWith("win-x64"), "@aibrow/node-llama-cpp-win-x64");
4343

44-
await moveBinariesFolderToStandaloneModule((folderName) => folderName.startsWith("win-arm64"), "@aibrow/node-llama-cpp/win-arm64");
44+
await moveBinariesFolderToStandaloneModule((folderName) => folderName.startsWith("win-arm64"), "@aibrow/node-llama-cpp-win-arm64");

scripts/postVersion.ts

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -10,7 +10,7 @@ const currentVersion = packageJson.version;
1010

1111
if (packageJson.optionalDependencies != null) {
1212
for (const packageName of Object.keys(packageJson.optionalDependencies)) {
13-
if (!packageName.startsWith("@aibrow/node-llama-cpp/"))
13+
if (!packageName.startsWith("@aibrow/node-llama-cpp-"))
1414
continue;
1515

1616
console.info(`Updating optional dependency "${packageName}" to version "${currentVersion}"`);

scripts/prepareStandalonePrebuiltBinaryModules.ts

Lines changed: 2 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -5,10 +5,11 @@ import {$, cd} from "zx";
55

66
const __dirname = path.dirname(fileURLToPath(import.meta.url));
77
const packageDirectory = path.join(__dirname, "..", "packages");
8-
const packageScope = "@aibrow/node-llama-cpp";
8+
const packageScope = "@aibrow";
99
const subPackagesDirectory = path.join(packageDirectory, packageScope);
1010

1111
for (const packageName of await fs.readdir(subPackagesDirectory)) {
12+
if (!packageName.startsWith("node-llama-cpp-")) { continue }
1213
const packagePath = path.join(subPackagesDirectory, packageName);
1314
const packagePackageJsonPath = path.join(packagePath, "package.json");
1415

scripts/publishStandalonePrebuiltBinaryModules.ts

Lines changed: 2 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -13,7 +13,7 @@ const GH_RELEASE_REF = env.get("GH_RELEASE_REF")
1313

1414
const __dirname = path.dirname(fileURLToPath(import.meta.url));
1515
const packageDirectory = path.join(__dirname, "..", "packages");
16-
const packageScope = "@aibrow/node-llama-cpp";
16+
const packageScope = "@aibrow";
1717
const subPackagesDirectory = path.join(packageDirectory, packageScope);
1818

1919
const argv = await yargs(hideBin(process.argv))
@@ -28,6 +28,7 @@ if (packageVersion === "")
2828
throw new Error("packageVersion is empty");
2929

3030
for (const packageName of await fs.readdir(subPackagesDirectory)) {
31+
if (!packageName.startsWith("node-llama-cpp-")) { continue }
3132
const packagePath = path.join(subPackagesDirectory, packageName);
3233
const packagePackageJsonPath = path.join(packagePath, "package.json");
3334

src/bindings/utils/compileLLamaCpp.ts

Lines changed: 11 additions & 11 deletions
Original file line numberDiff line numberDiff line change
@@ -506,41 +506,41 @@ function getPrebuiltBinariesPackageDirectoryForBuildOptions(buildOptions: BuildO
506506
if (buildOptions.platform === "mac") {
507507
if (buildOptions.arch === "arm64" && buildOptions.gpu === "metal")
508508
// @ts-ignore
509-
return getBinariesPathFromModules(() => import("@node-llama-cpp/mac-arm64-metal"));
509+
return getBinariesPathFromModules(() => import("@aibrow/node-llama-cpp-mac-arm64-metal"));
510510
else if (buildOptions.arch === "x64" && buildOptions.gpu === false)
511511
// @ts-ignore
512-
return getBinariesPathFromModules(() => import("@node-llama-cpp/mac-x64"));
512+
return getBinariesPathFromModules(() => import("@aibrow/node-llama-cpp-mac-x64"));
513513
} else if (buildOptions.platform === "linux") {
514514
if (buildOptions.arch === "x64") {
515515
if (buildOptions.gpu === "cuda")
516516
// @ts-ignore
517-
return getBinariesPathFromModules(() => import("@node-llama-cpp/linux-x64-cuda"));
517+
return getBinariesPathFromModules(() => import("@aibrow/node-llama-cpp-linux-x64-cuda"));
518518
else if (buildOptions.gpu === "vulkan")
519519
// @ts-ignore
520-
return getBinariesPathFromModules(() => import("@node-llama-cpp/linux-x64-vulkan"));
520+
return getBinariesPathFromModules(() => import("@aibrow/node-llama-cpp-linux-x64-vulkan"));
521521
else if (buildOptions.gpu === false)
522522
// @ts-ignore
523-
return getBinariesPathFromModules(() => import("@node-llama-cpp/linux-x64"));
523+
return getBinariesPathFromModules(() => import("@aibrow/node-llama-cpp-linux-x64"));
524524
} else if (buildOptions.arch === "arm64")
525525
// @ts-ignore
526-
return getBinariesPathFromModules(() => import("@node-llama-cpp/linux-arm64"));
526+
return getBinariesPathFromModules(() => import("@aibrow/node-llama-cpp-linux-arm64"));
527527
else if (buildOptions.arch === "arm")
528528
// @ts-ignore
529-
return getBinariesPathFromModules(() => import("@node-llama-cpp/linux-armv7l"));
529+
return getBinariesPathFromModules(() => import("@aibrow/node-llama-cpp-linux-armv7l"));
530530
} else if (buildOptions.platform === "win") {
531531
if (buildOptions.arch === "x64") {
532532
if (buildOptions.gpu === "cuda")
533533
// @ts-ignore
534-
return getBinariesPathFromModules(() => import("@node-llama-cpp/win-x64-cuda"));
534+
return getBinariesPathFromModules(() => import("@aibrow/node-llama-cpp-win-x64-cuda"));
535535
else if (buildOptions.gpu === "vulkan")
536536
// @ts-ignore
537-
return getBinariesPathFromModules(() => import("@node-llama-cpp/win-x64-vulkan"));
537+
return getBinariesPathFromModules(() => import("@aibrow/node-llama-cpp-win-x64-vulkan"));
538538
else if (buildOptions.gpu === false)
539539
// @ts-ignore
540-
return getBinariesPathFromModules(() => import("@node-llama-cpp/win-x64"));
540+
return getBinariesPathFromModules(() => import("@aibrow/node-llama-cpp-win-x64"));
541541
} else if (buildOptions.arch === "arm64")
542542
// @ts-ignore
543-
return getBinariesPathFromModules(() => import("@node-llama-cpp/win-arm64"));
543+
return getBinariesPathFromModules(() => import("@aibrow/node-llama-cpp-win-arm64"));
544544
}
545545
/* eslint-enable import/no-unresolved */
546546

0 commit comments

Comments
 (0)