Skip to content

Commit 39617fc

Browse files
committed
v1.1.12 bump gradio from 5.0 to 6.0, several fixes to ensure compatibility with new gradio version
1 parent 5b82f97 commit 39617fc

File tree

2 files changed

+9
-4
lines changed

2 files changed

+9
-4
lines changed

pyproject.toml

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -4,7 +4,7 @@ build-backend = "setuptools.build_meta"
44

55
[project]
66
name = "f5-tts"
7-
version = "1.1.11"
7+
version = "1.1.12"
88
description = "F5-TTS: A Fairytaler that Fakes Fluent and Faithful Speech with Flow Matching"
99
readme = "README.md"
1010
license = {text = "MIT License"}

src/f5_tts/infer/infer_gradio.py

Lines changed: 8 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -816,7 +816,9 @@ def load_chat_model(chat_model_name):
816816
lines=2,
817817
)
818818

819-
chatbot_interface = gr.Chatbot(label="Conversation", type="messages")
819+
chatbot_interface = gr.Chatbot(
820+
label="Conversation"
821+
) # type="messages" hard-coded and no need to pass in since gradio 6.0
820822

821823
with gr.Row():
822824
with gr.Column():
@@ -853,6 +855,10 @@ def process_audio_input(conv_state, audio_path, text):
853855
@gpu_decorator
854856
def generate_text_response(conv_state, system_prompt):
855857
"""Generate text response from AI"""
858+
for single_state in conv_state:
859+
if isinstance(single_state["content"], list):
860+
assert len(single_state["content"]) == 1 and single_state["content"][0]["type"] == "text"
861+
single_state["content"] = single_state["content"][0]["text"]
856862

857863
system_prompt_state = [{"role": "system", "content": system_prompt}]
858864
response = chat_model_inference(system_prompt_state + conv_state, chat_model_state, chat_tokenizer_state)
@@ -866,7 +872,7 @@ def generate_audio_response(conv_state, ref_audio, ref_text, remove_silence, ran
866872
if not conv_state or not ref_audio:
867873
return None, ref_text, seed_input
868874

869-
last_ai_response = conv_state[-1]["content"]
875+
last_ai_response = conv_state[-1]["content"][0]["text"]
870876
if not last_ai_response or conv_state[-1]["role"] != "assistant":
871877
return None, ref_text, seed_input
872878

@@ -1108,7 +1114,6 @@ def main(port, host, share, api, root_path, inbrowser):
11081114
server_name=host,
11091115
server_port=port,
11101116
share=share,
1111-
show_api=api,
11121117
root_path=root_path,
11131118
inbrowser=inbrowser,
11141119
)

0 commit comments

Comments
 (0)