Skip to content

Commit 938dfac

Browse files
committed
Two fixes for aibrow. Firstly, check if we're part of a sea binary when running the binary test. Secondly, export getLlamaForOptions from the entry point
1 parent ee12b83 commit 938dfac

File tree

2 files changed

+4
-1
lines changed

2 files changed

+4
-1
lines changed

src/bindings/getLlama.ts

Lines changed: 2 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -1,5 +1,6 @@
11
import process from "process";
22
import path from "path";
3+
import sea from 'node:sea';
34
import console from "console";
45
import {createRequire} from "module";
56
import {
@@ -914,6 +915,7 @@ function getShouldTestBinaryBeforeLoading({
914915
platformInfo: BinaryPlatformInfo,
915916
buildMetadata: BuildMetadataFile
916917
}) {
918+
if (sea.isSea()) { return false }
917919
if (platform === "linux") {
918920
if (isPrebuiltBinary)
919921
return true;

src/index.ts

Lines changed: 2 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -1,6 +1,6 @@
11
import {DisposedError} from "lifecycle-utils";
22
import {Llama} from "./bindings/Llama.js";
3-
import {getLlama, type LlamaOptions, type LastBuildOptions} from "./bindings/getLlama.js";
3+
import {getLlama, getLlamaForOptions, type LlamaOptions, type LastBuildOptions} from "./bindings/getLlama.js";
44
import {getLlamaGpuTypes} from "./bindings/utils/getLlamaGpuTypes.js";
55
import {NoBinaryFoundError} from "./bindings/utils/NoBinaryFoundError.js";
66
import {
@@ -120,6 +120,7 @@ import type {TemplateChatWrapperSegmentsOptions} from "./chatWrappers/generic/ut
120120
export {
121121
Llama,
122122
getLlama,
123+
getLlamaForOptions,
123124
getLlamaGpuTypes,
124125
type LlamaOptions,
125126
type LastBuildOptions,

0 commit comments

Comments
 (0)