@@ -506,41 +506,41 @@ function getPrebuiltBinariesPackageDirectoryForBuildOptions(buildOptions: BuildO
506506 if ( buildOptions . platform === "mac" ) {
507507 if ( buildOptions . arch === "arm64" && buildOptions . gpu === "metal" )
508508 // @ts -ignore
509- return getBinariesPathFromModules ( ( ) => import ( "@node-llama-cpp/ mac-arm64-metal" ) ) ;
509+ return getBinariesPathFromModules ( ( ) => import ( "@aibrow/ node-llama-cpp- mac-arm64-metal" ) ) ;
510510 else if ( buildOptions . arch === "x64" && buildOptions . gpu === false )
511511 // @ts -ignore
512- return getBinariesPathFromModules ( ( ) => import ( "@node-llama-cpp/ mac-x64" ) ) ;
512+ return getBinariesPathFromModules ( ( ) => import ( "@aibrow/ node-llama-cpp- mac-x64" ) ) ;
513513 } else if ( buildOptions . platform === "linux" ) {
514514 if ( buildOptions . arch === "x64" ) {
515515 if ( buildOptions . gpu === "cuda" )
516516 // @ts -ignore
517- return getBinariesPathFromModules ( ( ) => import ( "@node-llama-cpp/ linux-x64-cuda" ) ) ;
517+ return getBinariesPathFromModules ( ( ) => import ( "@aibrow/ node-llama-cpp- linux-x64-cuda" ) ) ;
518518 else if ( buildOptions . gpu === "vulkan" )
519519 // @ts -ignore
520- return getBinariesPathFromModules ( ( ) => import ( "@node-llama-cpp/ linux-x64-vulkan" ) ) ;
520+ return getBinariesPathFromModules ( ( ) => import ( "@aibrow/ node-llama-cpp- linux-x64-vulkan" ) ) ;
521521 else if ( buildOptions . gpu === false )
522522 // @ts -ignore
523- return getBinariesPathFromModules ( ( ) => import ( "@node-llama-cpp/ linux-x64" ) ) ;
523+ return getBinariesPathFromModules ( ( ) => import ( "@aibrow/ node-llama-cpp- linux-x64" ) ) ;
524524 } else if ( buildOptions . arch === "arm64" )
525525 // @ts -ignore
526- return getBinariesPathFromModules ( ( ) => import ( "@node-llama-cpp/ linux-arm64" ) ) ;
526+ return getBinariesPathFromModules ( ( ) => import ( "@aibrow/ node-llama-cpp- linux-arm64" ) ) ;
527527 else if ( buildOptions . arch === "arm" )
528528 // @ts -ignore
529- return getBinariesPathFromModules ( ( ) => import ( "@node-llama-cpp/ linux-armv7l" ) ) ;
529+ return getBinariesPathFromModules ( ( ) => import ( "@aibrow/ node-llama-cpp- linux-armv7l" ) ) ;
530530 } else if ( buildOptions . platform === "win" ) {
531531 if ( buildOptions . arch === "x64" ) {
532532 if ( buildOptions . gpu === "cuda" )
533533 // @ts -ignore
534- return getBinariesPathFromModules ( ( ) => import ( "@node-llama-cpp/ win-x64-cuda" ) ) ;
534+ return getBinariesPathFromModules ( ( ) => import ( "@aibrow/ node-llama-cpp- win-x64-cuda" ) ) ;
535535 else if ( buildOptions . gpu === "vulkan" )
536536 // @ts -ignore
537- return getBinariesPathFromModules ( ( ) => import ( "@node-llama-cpp/ win-x64-vulkan" ) ) ;
537+ return getBinariesPathFromModules ( ( ) => import ( "@aibrow/ node-llama-cpp- win-x64-vulkan" ) ) ;
538538 else if ( buildOptions . gpu === false )
539539 // @ts -ignore
540- return getBinariesPathFromModules ( ( ) => import ( "@node-llama-cpp/ win-x64" ) ) ;
540+ return getBinariesPathFromModules ( ( ) => import ( "@aibrow/ node-llama-cpp- win-x64" ) ) ;
541541 } else if ( buildOptions . arch === "arm64" )
542542 // @ts -ignore
543- return getBinariesPathFromModules ( ( ) => import ( "@node-llama-cpp/ win-arm64" ) ) ;
543+ return getBinariesPathFromModules ( ( ) => import ( "@aibrow/ node-llama-cpp- win-arm64" ) ) ;
544544 }
545545 /* eslint-enable import/no-unresolved */
546546
0 commit comments