Skip to content

Commit 697ae5d

Browse files
author
David Motsonashvili
committed
fix overloaded methods in java
1 parent a268daf commit 697ae5d

File tree

3 files changed

+24
-17
lines changed

3 files changed

+24
-17
lines changed

firebase-ai/api.txt

Lines changed: 4 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -154,9 +154,9 @@ package com.google.firebase.ai.java {
154154
method @RequiresPermission(android.Manifest.permission.RECORD_AUDIO) public abstract com.google.common.util.concurrent.ListenableFuture<kotlin.Unit> startAudioConversation(boolean enableInterruptions);
155155
method @RequiresPermission(android.Manifest.permission.RECORD_AUDIO) public abstract com.google.common.util.concurrent.ListenableFuture<kotlin.Unit> startAudioConversation(kotlin.jvm.functions.Function1<? super com.google.firebase.ai.type.FunctionCallPart,com.google.firebase.ai.type.FunctionResponsePart>? functionCallHandler);
156156
method @RequiresPermission(android.Manifest.permission.RECORD_AUDIO) public abstract com.google.common.util.concurrent.ListenableFuture<kotlin.Unit> startAudioConversation(kotlin.jvm.functions.Function1<? super com.google.firebase.ai.type.FunctionCallPart,com.google.firebase.ai.type.FunctionResponsePart>? functionCallHandler, boolean enableInterruptions);
157-
method @RequiresPermission(android.Manifest.permission.RECORD_AUDIO) public abstract com.google.common.util.concurrent.ListenableFuture<kotlin.Unit> startAudioConversation(kotlin.jvm.functions.Function1<? super com.google.firebase.ai.type.FunctionCallPart,com.google.firebase.ai.type.FunctionResponsePart>? functionCallHandler, kotlin.jvm.functions.Function1<? super com.google.firebase.ai.type.LiveServerMessage,kotlin.Unit>? transcriptHandler = null, boolean enableInterruptions);
158-
method @RequiresPermission(android.Manifest.permission.RECORD_AUDIO) public abstract com.google.common.util.concurrent.ListenableFuture<kotlin.Unit> startAudioConversation(kotlin.jvm.functions.Function1<? super com.google.firebase.ai.type.LiveServerMessage,kotlin.Unit>? transcriptHandler = null);
159-
method @RequiresPermission(android.Manifest.permission.RECORD_AUDIO) public abstract com.google.common.util.concurrent.ListenableFuture<kotlin.Unit> startAudioConversation(kotlin.jvm.functions.Function1<? super com.google.firebase.ai.type.LiveServerMessage,kotlin.Unit>? transcriptHandler = null, boolean enableInterruptions);
157+
method @RequiresPermission(android.Manifest.permission.RECORD_AUDIO) public abstract com.google.common.util.concurrent.ListenableFuture<kotlin.Unit> startAudioConversation(kotlin.jvm.functions.Function1<? super com.google.firebase.ai.type.FunctionCallPart,com.google.firebase.ai.type.FunctionResponsePart>? functionCallHandler, kotlin.jvm.functions.Function2<? super com.google.firebase.ai.type.Transcription?,? super com.google.firebase.ai.type.Transcription?,kotlin.Unit>? transcriptHandler = null, boolean enableInterruptions);
158+
method @RequiresPermission(android.Manifest.permission.RECORD_AUDIO) public abstract com.google.common.util.concurrent.ListenableFuture<kotlin.Unit> startAudioConversation(kotlin.jvm.functions.Function2<? super com.google.firebase.ai.type.Transcription?,? super com.google.firebase.ai.type.Transcription?,kotlin.Unit>? transcriptHandler = null);
159+
method @RequiresPermission(android.Manifest.permission.RECORD_AUDIO) public abstract com.google.common.util.concurrent.ListenableFuture<kotlin.Unit> startAudioConversation(kotlin.jvm.functions.Function2<? super com.google.firebase.ai.type.Transcription?,? super com.google.firebase.ai.type.Transcription?,kotlin.Unit>? transcriptHandler = null, boolean enableInterruptions);
160160
method @RequiresPermission(android.Manifest.permission.RECORD_AUDIO) public abstract com.google.common.util.concurrent.ListenableFuture<kotlin.Unit> stopAudioConversation();
161161
method public abstract void stopReceiving();
162162
field public static final com.google.firebase.ai.java.LiveSessionFutures.Companion Companion;
@@ -924,7 +924,7 @@ package com.google.firebase.ai.type {
924924
method public suspend Object? sendVideoRealtime(com.google.firebase.ai.type.InlineData video, kotlin.coroutines.Continuation<? super kotlin.Unit>);
925925
method @RequiresPermission(android.Manifest.permission.RECORD_AUDIO) public suspend Object? startAudioConversation(kotlin.jvm.functions.Function1<? super com.google.firebase.ai.type.FunctionCallPart,com.google.firebase.ai.type.FunctionResponsePart>? functionCallHandler = null, boolean enableInterruptions = false, kotlin.coroutines.Continuation<? super kotlin.Unit>);
926926
method @RequiresPermission(android.Manifest.permission.RECORD_AUDIO) public suspend Object? startAudioConversation(kotlin.jvm.functions.Function1<? super com.google.firebase.ai.type.FunctionCallPart,com.google.firebase.ai.type.FunctionResponsePart>? functionCallHandler = null, kotlin.coroutines.Continuation<? super kotlin.Unit>);
927-
method @RequiresPermission(android.Manifest.permission.RECORD_AUDIO) public suspend Object? startAudioConversation(kotlin.jvm.functions.Function1<? super com.google.firebase.ai.type.FunctionCallPart,com.google.firebase.ai.type.FunctionResponsePart>? functionCallHandler = null, kotlin.jvm.functions.Function1<? super com.google.firebase.ai.type.LiveServerMessage,kotlin.Unit>? transcriptHandler = null, boolean enableInterruptions = false, kotlin.coroutines.Continuation<? super kotlin.Unit>);
927+
method @RequiresPermission(android.Manifest.permission.RECORD_AUDIO) public suspend Object? startAudioConversation(kotlin.jvm.functions.Function1<? super com.google.firebase.ai.type.FunctionCallPart,com.google.firebase.ai.type.FunctionResponsePart>? functionCallHandler = null, kotlin.jvm.functions.Function2<? super com.google.firebase.ai.type.Transcription?,? super com.google.firebase.ai.type.Transcription?,kotlin.Unit>? transcriptHandler = null, boolean enableInterruptions = false, kotlin.coroutines.Continuation<? super kotlin.Unit>);
928928
method public void stopAudioConversation();
929929
method public void stopReceiving();
930930
}

firebase-ai/src/main/kotlin/com/google/firebase/ai/java/LiveSessionFutures.kt

Lines changed: 15 additions & 9 deletions
Original file line numberDiff line numberDiff line change
@@ -29,6 +29,7 @@ import com.google.firebase.ai.type.LiveSession
2929
import com.google.firebase.ai.type.MediaData
3030
import com.google.firebase.ai.type.PublicPreviewAPI
3131
import com.google.firebase.ai.type.SessionAlreadyReceivingException
32+
import com.google.firebase.ai.type.Transcription
3233
import io.ktor.websocket.close
3334
import kotlinx.coroutines.reactive.asPublisher
3435
import org.reactivestreams.Publisher
@@ -57,11 +58,12 @@ public abstract class LiveSessionFutures internal constructor() {
5758
* Starts an audio conversation with the model, which can only be stopped using
5859
* [stopAudioConversation].
5960
* @param transcriptHandler A callback function that is invoked whenever the model receives a
60-
* transcript.
61+
* transcript. The first [Transcription] object is the input transcription, and the second is the
62+
* output transcription
6163
*/
6264
@RequiresPermission(RECORD_AUDIO)
6365
public abstract fun startAudioConversation(
64-
transcriptHandler: ((LiveServerMessage) -> Unit)? = null,
66+
transcriptHandler: ((Transcription?, Transcription?) -> Unit)? = null,
6567
): ListenableFuture<Unit>
6668

6769
/**
@@ -92,14 +94,15 @@ public abstract class LiveSessionFutures internal constructor() {
9294
* ongoing reply.
9395
*
9496
* @param transcriptHandler A callback function that is invoked whenever the model receives a
95-
* transcript.
97+
* transcript. The first [Transcription] object is the input transcription, and the second is the
98+
* output transcription
9699
*
97100
* **WARNING**: The user interruption feature relies on device-specific support, and may not be
98101
* consistently available.
99102
*/
100103
@RequiresPermission(RECORD_AUDIO)
101104
public abstract fun startAudioConversation(
102-
transcriptHandler: ((LiveServerMessage) -> Unit)? = null,
105+
transcriptHandler: ((Transcription?, Transcription?) -> Unit)? = null,
103106
enableInterruptions: Boolean
104107
): ListenableFuture<Unit>
105108

@@ -111,7 +114,8 @@ public abstract class LiveSessionFutures internal constructor() {
111114
* function call.
112115
*
113116
* @param transcriptHandler A callback function that is invoked whenever the model receives a
114-
* transcript.
117+
* transcript. The first [Transcription] object is the input transcription, and the second is the
118+
* output transcription
115119
*
116120
* @param enableInterruptions If enabled, allows the user to speak over or interrupt the model's
117121
* ongoing reply.
@@ -122,7 +126,7 @@ public abstract class LiveSessionFutures internal constructor() {
122126
@RequiresPermission(RECORD_AUDIO)
123127
public abstract fun startAudioConversation(
124128
functionCallHandler: ((FunctionCallPart) -> FunctionResponsePart)?,
125-
transcriptHandler: ((LiveServerMessage) -> Unit)? = null,
129+
transcriptHandler: ((Transcription?, Transcription?) -> Unit)? = null,
126130
enableInterruptions: Boolean
127131
): ListenableFuture<Unit>
128132

@@ -287,7 +291,9 @@ public abstract class LiveSessionFutures internal constructor() {
287291
) = SuspendToFutureAdapter.launchFuture { session.startAudioConversation(functionCallHandler) }
288292

289293
@RequiresPermission(RECORD_AUDIO)
290-
override fun startAudioConversation(transcriptHandler: ((LiveServerMessage) -> Unit)?) =
294+
override fun startAudioConversation(
295+
transcriptHandler: ((Transcription?, Transcription?) -> Unit)?
296+
) =
291297
SuspendToFutureAdapter.launchFuture {
292298
session.startAudioConversation(transcriptHandler = transcriptHandler)
293299
}
@@ -304,7 +310,7 @@ public abstract class LiveSessionFutures internal constructor() {
304310

305311
@RequiresPermission(RECORD_AUDIO)
306312
override fun startAudioConversation(
307-
transcriptHandler: ((LiveServerMessage) -> Unit)?,
313+
transcriptHandler: ((Transcription?, Transcription?) -> Unit)?,
308314
enableInterruptions: Boolean
309315
) =
310316
SuspendToFutureAdapter.launchFuture {
@@ -317,7 +323,7 @@ public abstract class LiveSessionFutures internal constructor() {
317323
@RequiresPermission(RECORD_AUDIO)
318324
override fun startAudioConversation(
319325
functionCallHandler: ((FunctionCallPart) -> FunctionResponsePart)?,
320-
transcriptHandler: ((LiveServerMessage) -> Unit)?,
326+
transcriptHandler: ((Transcription?, Transcription?) -> Unit)?,
321327
enableInterruptions: Boolean
322328
) =
323329
SuspendToFutureAdapter.launchFuture {

firebase-ai/src/main/kotlin/com/google/firebase/ai/type/LiveSession.kt

Lines changed: 5 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -134,7 +134,8 @@ internal constructor(
134134
* automatically sent to the model.
135135
*
136136
* @param transcriptHandler A callback function that is invoked whenever the model receives a
137-
* transcript.
137+
* transcript. The first [Transcription] object is the input transcription, and the second is the
138+
* output transcription
138139
*
139140
* @param enableInterruptions If enabled, allows the user to speak over or interrupt the model's
140141
* ongoing reply.
@@ -145,7 +146,7 @@ internal constructor(
145146
@RequiresPermission(RECORD_AUDIO)
146147
public suspend fun startAudioConversation(
147148
functionCallHandler: ((FunctionCallPart) -> FunctionResponsePart)? = null,
148-
transcriptHandler: ((LiveServerMessage) -> Unit)? = null,
149+
transcriptHandler: ((Transcription?, Transcription?) -> Unit)? = null,
149150
enableInterruptions: Boolean = false,
150151
) {
151152

@@ -419,7 +420,7 @@ internal constructor(
419420
*/
420421
private fun processModelResponses(
421422
functionCallHandler: ((FunctionCallPart) -> FunctionResponsePart)?,
422-
transcriptHandler: ((LiveServerMessage) -> Unit)?
423+
transcriptHandler: ((Transcription?, Transcription?) -> Unit)?
423424
) {
424425
receive()
425426
.onEach {
@@ -449,7 +450,7 @@ internal constructor(
449450
}
450451
is LiveServerContent -> {
451452
if (it.outputTranscription != null || it.inputTranscription != null) {
452-
transcriptHandler?.invoke(it)
453+
transcriptHandler?.invoke(it.inputTranscription, it.outputTranscription)
453454
}
454455
if (it.interrupted) {
455456
playBackQueue.clear()

0 commit comments

Comments
 (0)