11
11
import com .facebook .jni .HybridData ;
12
12
import com .facebook .jni .annotations .DoNotStrip ;
13
13
import java .io .File ;
14
+ import java .util .List ;
14
15
import org .pytorch .executorch .ExecuTorchRuntime ;
15
16
import org .pytorch .executorch .annotations .Experimental ;
16
17
@@ -32,14 +33,22 @@ public class LlmModule {
32
33
33
34
@ DoNotStrip
34
35
private static native HybridData initHybrid (
35
- int modelType , String modulePath , String tokenizerPath , float temperature , String dataPath );
36
+ int modelType ,
37
+ String modulePath ,
38
+ String tokenizerPath ,
39
+ float temperature ,
40
+ List <String > dataFiles );
36
41
37
42
/**
38
43
* Constructs a LLM Module for a model with given type, model path, tokenizer, temperature, and
39
- * data path .
44
+ * dataFiles .
40
45
*/
41
46
public LlmModule (
42
- int modelType , String modulePath , String tokenizerPath , float temperature , String dataPath ) {
47
+ int modelType ,
48
+ String modulePath ,
49
+ String tokenizerPath ,
50
+ float temperature ,
51
+ List <String > dataFiles ) {
43
52
ExecuTorchRuntime runtime = ExecuTorchRuntime .getRuntime ();
44
53
45
54
File modelFile = new File (modulePath );
@@ -50,25 +59,35 @@ public LlmModule(
50
59
if (!tokenizerFile .canRead () || !tokenizerFile .isFile ()) {
51
60
throw new RuntimeException ("Cannot load tokenizer path " + tokenizerPath );
52
61
}
53
- mHybridData = initHybrid (modelType , modulePath , tokenizerPath , temperature , dataPath );
62
+
63
+ mHybridData = initHybrid (modelType , modulePath , tokenizerPath , temperature , dataFiles );
64
+ }
65
+
66
+ /**
67
+ * Constructs a LLM Module for a model with given type, model path, tokenizer, temperature, and
68
+ * data path.
69
+ */
70
+ public LlmModule (
71
+ int modelType , String modulePath , String tokenizerPath , float temperature , String dataPath ) {
72
+ this (modelType , modulePath , tokenizerPath , temperature , List .of (dataPath ));
54
73
}
55
74
56
75
/** Constructs a LLM Module for a model with given model path, tokenizer, temperature. */
57
76
public LlmModule (String modulePath , String tokenizerPath , float temperature ) {
58
- this (MODEL_TYPE_TEXT , modulePath , tokenizerPath , temperature , null );
77
+ this (MODEL_TYPE_TEXT , modulePath , tokenizerPath , temperature , List . of () );
59
78
}
60
79
61
80
/**
62
81
* Constructs a LLM Module for a model with given model path, tokenizer, temperature and data
63
82
* path.
64
83
*/
65
84
public LlmModule (String modulePath , String tokenizerPath , float temperature , String dataPath ) {
66
- this (MODEL_TYPE_TEXT , modulePath , tokenizerPath , temperature , dataPath );
85
+ this (MODEL_TYPE_TEXT , modulePath , tokenizerPath , temperature , List . of ( dataPath ) );
67
86
}
68
87
69
88
/** Constructs a LLM Module for a model with given path, tokenizer, and temperature. */
70
89
public LlmModule (int modelType , String modulePath , String tokenizerPath , float temperature ) {
71
- this (modelType , modulePath , tokenizerPath , temperature , null );
90
+ this (modelType , modulePath , tokenizerPath , temperature , List . of () );
72
91
}
73
92
74
93
/** Constructs a LLM Module for a model with the given LlmModuleConfig */
0 commit comments