Skip to content

Commit e08df47

Browse files
committed
Remove generateFromPos API
1 parent 16b6d1c commit e08df47

File tree

2 files changed

+1
-20
lines changed
  • examples/demo-apps/android/LlamaDemo/app/src/main/java/com/example/executorchllamademo
  • extension/android/executorch_android/src/main/java/org/pytorch/executorch/extension/llm

2 files changed

+1
-20
lines changed

examples/demo-apps/android/LlamaDemo/app/src/main/java/com/example/executorchllamademo/MainActivity.java

Lines changed: 1 addition & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -778,10 +778,9 @@ public void run() {
778778
mCurrentSettingsFields.getModelType(),
779779
mCurrentSettingsFields.getBackendType())
780780
== ModelUtils.VISION_MODEL) {
781-
mModule.generateFromPos(
781+
mModule.generate(
782782
finalPrompt,
783783
ModelUtils.VISION_MODEL_SEQ_LEN,
784-
startPos,
785784
MainActivity.this,
786785
false);
787786
} else if (mCurrentSettingsFields.getModelType() == ModelType.LLAMA_GUARD_3) {

extension/android/executorch_android/src/main/java/org/pytorch/executorch/extension/llm/LlmModule.java

Lines changed: 0 additions & 18 deletions
Original file line numberDiff line numberDiff line change
@@ -218,24 +218,6 @@ public long prefillPrompt(String prompt, long startPos, int bos, int eos) {
218218
// returns a tuple of (status, updated startPos)
219219
private native int appendTextInput(String prompt, int bos, int eos);
220220

221-
/**
222-
* Generate tokens from the given prompt, starting from the given position.
223-
*
224-
* <p>This is a deprecated API. Please use {@link #generate(String, int, LlmCallback, boolean)}
225-
*
226-
* @param prompt The text prompt to LLaVA.
227-
* @param seqLen The total sequence length, including the prompt tokens and new tokens.
228-
* @param startPos The starting position in KV cache of the input in the LLM.
229-
* @param callback callback object to receive results.
230-
* @param echo indicate whether to echo the input prompt or not.
231-
* @return The error code.
232-
*/
233-
@Deprecated
234-
public int generateFromPos(
235-
String prompt, int seqLen, long startPos, LlmCallback callback, boolean echo) {
236-
return generate(prompt, seqLen, callback, echo);
237-
}
238-
239221
/**
240222
* Reset the context of the LLM. This will clear the KV cache and reset the state of the LLM.
241223
*

0 commit comments

Comments
 (0)