1111import com .facebook .jni .HybridData ;
1212import com .facebook .jni .annotations .DoNotStrip ;
1313import java .io .File ;
14+ import java .util .List ;
1415import org .pytorch .executorch .ExecuTorchRuntime ;
1516import org .pytorch .executorch .annotations .Experimental ;
1617
@@ -32,14 +33,22 @@ public class LlmModule {
3233
3334 @ DoNotStrip
3435 private static native HybridData initHybrid (
35- int modelType , String modulePath , String tokenizerPath , float temperature , String dataPath );
36+ int modelType ,
37+ String modulePath ,
38+ String tokenizerPath ,
39+ float temperature ,
40+ List <String > dataFiles );
3641
3742 /**
3843 * Constructs a LLM Module for a model with given type, model path, tokenizer, temperature, and
39- * data path .
44+ * dataFiles .
4045 */
4146 public LlmModule (
42- int modelType , String modulePath , String tokenizerPath , float temperature , String dataPath ) {
47+ int modelType ,
48+ String modulePath ,
49+ String tokenizerPath ,
50+ float temperature ,
51+ List <String > dataFiles ) {
4352 ExecuTorchRuntime runtime = ExecuTorchRuntime .getRuntime ();
4453
4554 File modelFile = new File (modulePath );
@@ -50,25 +59,35 @@ public LlmModule(
5059 if (!tokenizerFile .canRead () || !tokenizerFile .isFile ()) {
5160 throw new RuntimeException ("Cannot load tokenizer path " + tokenizerPath );
5261 }
53- mHybridData = initHybrid (modelType , modulePath , tokenizerPath , temperature , dataPath );
62+
63+ mHybridData = initHybrid (modelType , modulePath , tokenizerPath , temperature , dataFiles );
64+ }
65+
66+ /**
67+ * Constructs a LLM Module for a model with given type, model path, tokenizer, temperature, and
68+ * data path.
69+ */
70+ public LlmModule (
71+ int modelType , String modulePath , String tokenizerPath , float temperature , String dataPath ) {
72+ this (modelType , modulePath , tokenizerPath , temperature , List .of (dataPath ));
5473 }
5574
5675 /** Constructs a LLM Module for a model with given model path, tokenizer, temperature. */
5776 public LlmModule (String modulePath , String tokenizerPath , float temperature ) {
58- this (MODEL_TYPE_TEXT , modulePath , tokenizerPath , temperature , null );
77+ this (MODEL_TYPE_TEXT , modulePath , tokenizerPath , temperature , List . of () );
5978 }
6079
6180 /**
6281 * Constructs a LLM Module for a model with given model path, tokenizer, temperature and data
6382 * path.
6483 */
6584 public LlmModule (String modulePath , String tokenizerPath , float temperature , String dataPath ) {
66- this (MODEL_TYPE_TEXT , modulePath , tokenizerPath , temperature , dataPath );
85+ this (MODEL_TYPE_TEXT , modulePath , tokenizerPath , temperature , List . of ( dataPath ) );
6786 }
6887
6988 /** Constructs a LLM Module for a model with given path, tokenizer, and temperature. */
7089 public LlmModule (int modelType , String modulePath , String tokenizerPath , float temperature ) {
71- this (modelType , modulePath , tokenizerPath , temperature , null );
90+ this (modelType , modulePath , tokenizerPath , temperature , List . of () );
7291 }
7392
7493 /** Constructs a LLM Module for a model with the given LlmModuleConfig */
0 commit comments