11
11
import com .facebook .jni .HybridData ;
12
12
import com .facebook .jni .annotations .DoNotStrip ;
13
13
import java .io .File ;
14
+ import java .util .List ;
14
15
import org .pytorch .executorch .ExecuTorchRuntime ;
15
16
import org .pytorch .executorch .annotations .Experimental ;
16
17
@@ -32,14 +33,14 @@ public class LlmModule {
32
33
33
34
@ DoNotStrip
34
35
private static native HybridData initHybrid (
35
- int modelType , String modulePath , String tokenizerPath , float temperature , String dataPath );
36
+ int modelType , String modulePath , String tokenizerPath , float temperature , List < String > dataFiles );
36
37
37
38
/**
38
39
* Constructs a LLM Module for a model with given type, model path, tokenizer, temperature, and
39
- * data path .
40
+ * dataFiles .
40
41
*/
41
42
public LlmModule (
42
- int modelType , String modulePath , String tokenizerPath , float temperature , String dataPath ) {
43
+ int modelType , String modulePath , String tokenizerPath , float temperature , List < String > dataFiles ) {
43
44
ExecuTorchRuntime runtime = ExecuTorchRuntime .getRuntime ();
44
45
45
46
File modelFile = new File (modulePath );
@@ -50,25 +51,35 @@ public LlmModule(
50
51
if (!tokenizerFile .canRead () || !tokenizerFile .isFile ()) {
51
52
throw new RuntimeException ("Cannot load tokenizer path " + tokenizerPath );
52
53
}
53
- mHybridData = initHybrid (modelType , modulePath , tokenizerPath , temperature , dataPath );
54
+
55
+ mHybridData = initHybrid (modelType , modulePath , tokenizerPath , temperature , dataFiles );
56
+ }
57
+
58
+ /**
59
+ * Constructs a LLM Module for a model with given type, model path, tokenizer, temperature, and
60
+ * data path.
61
+ */
62
+ public LlmModule (
63
+ int modelType , String modulePath , String tokenizerPath , float temperature , String dataPath ) {
64
+ this (modelType , modulePath , tokenizerPath , temperature , List .of (dataPath ));
54
65
}
55
66
56
67
/** Constructs a LLM Module for a model with given model path, tokenizer, temperature. */
57
68
public LlmModule (String modulePath , String tokenizerPath , float temperature ) {
58
- this (MODEL_TYPE_TEXT , modulePath , tokenizerPath , temperature , null );
69
+ this (MODEL_TYPE_TEXT , modulePath , tokenizerPath , temperature , List . of () );
59
70
}
60
71
61
72
/**
62
73
* Constructs a LLM Module for a model with given model path, tokenizer, temperature and data
63
74
* path.
64
75
*/
65
76
public LlmModule (String modulePath , String tokenizerPath , float temperature , String dataPath ) {
66
- this (MODEL_TYPE_TEXT , modulePath , tokenizerPath , temperature , dataPath );
77
+ this (MODEL_TYPE_TEXT , modulePath , tokenizerPath , temperature , List . of ( dataPath ) );
67
78
}
68
79
69
80
/** Constructs a LLM Module for a model with given path, tokenizer, and temperature. */
70
81
public LlmModule (int modelType , String modulePath , String tokenizerPath , float temperature ) {
71
- this (modelType , modulePath , tokenizerPath , temperature , null );
82
+ this (modelType , modulePath , tokenizerPath , temperature , List . of () );
72
83
}
73
84
74
85
/** Constructs a LLM Module for a model with the given LlmModuleConfig */
0 commit comments