Skip to content

Commit 4d2f0f6

Browse files
committed
Suppressing PortAudio debug messages.
1 parent 5d8757b commit 4d2f0f6

File tree

3 files changed

+43
-21
lines changed

3 files changed

+43
-21
lines changed

software/poetry.lock

Lines changed: 22 additions & 2 deletions
Some generated files are not rendered by default. Learn more about customizing how changed files appear on GitHub.

software/pyproject.toml

Lines changed: 1 addition & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -34,6 +34,7 @@ pytimeparse = "^1.1.8"
3434
python-crontab = "^3.0.0"
3535
inquirer = "^3.2.4"
3636
pyqrcode = "^1.2.1"
37+
sounddevice = "^0.4.6"
3738

3839
[build-system]
3940
requires = ["poetry-core"]

software/start.py

Lines changed: 20 additions & 19 deletions
Original file line numberDiff line numberDiff line change
@@ -8,6 +8,7 @@
88
from source.server.tunnel import create_tunnel
99
from source.server.server import main
1010
from source.server.utils.local_mode import select_local_model
11+
import sounddevice
1112

1213
import signal
1314
app = typer.Typer()
@@ -17,32 +18,32 @@ def run(
1718
server: bool = typer.Option(False, "--server", help="Run server"),
1819
server_host: str = typer.Option("0.0.0.0", "--server-host", help="Specify the server host where the server will deploy"),
1920
server_port: int = typer.Option(10001, "--server-port", help="Specify the server port where the server will deploy"),
20-
21+
2122
tunnel_service: str = typer.Option("ngrok", "--tunnel-service", help="Specify the tunnel service"),
2223
expose: bool = typer.Option(False, "--expose", help="Expose server to internet"),
23-
24+
2425
client: bool = typer.Option(False, "--client", help="Run client"),
2526
server_url: str = typer.Option(None, "--server-url", help="Specify the server URL that the client should expect. Defaults to server-host and server-port"),
2627
client_type: str = typer.Option("auto", "--client-type", help="Specify the client type"),
27-
28+
2829
llm_service: str = typer.Option("litellm", "--llm-service", help="Specify the LLM service"),
29-
30+
3031
model: str = typer.Option("gpt-4", "--model", help="Specify the model"),
3132
llm_supports_vision: bool = typer.Option(False, "--llm-supports-vision", help="Specify if the LLM service supports vision"),
3233
llm_supports_functions: bool = typer.Option(False, "--llm-supports-functions", help="Specify if the LLM service supports functions"),
3334
context_window: int = typer.Option(2048, "--context-window", help="Specify the context window size"),
3435
max_tokens: int = typer.Option(4096, "--max-tokens", help="Specify the maximum number of tokens"),
3536
temperature: float = typer.Option(0.8, "--temperature", help="Specify the temperature for generation"),
36-
37+
3738
tts_service: str = typer.Option("openai", "--tts-service", help="Specify the TTS service"),
38-
39+
3940
stt_service: str = typer.Option("openai", "--stt-service", help="Specify the STT service"),
4041

4142
local: bool = typer.Option(False, "--local", help="Use recommended local services for LLM, STT, and TTS"),
42-
43+
4344
qr: bool = typer.Option(False, "--qr", help="Print the QR code for the server URL")
4445
):
45-
46+
4647
_run(
4748
server=server,
4849
server_host=server_host,
@@ -69,41 +70,41 @@ def _run(
6970
server: bool = False,
7071
server_host: str = "0.0.0.0",
7172
server_port: int = 10001,
72-
73+
7374
tunnel_service: str = "bore",
7475
expose: bool = False,
75-
76+
7677
client: bool = False,
7778
server_url: str = None,
7879
client_type: str = "auto",
79-
80+
8081
llm_service: str = "litellm",
81-
82+
8283
model: str = "gpt-4",
8384
llm_supports_vision: bool = False,
8485
llm_supports_functions: bool = False,
8586
context_window: int = 2048,
8687
max_tokens: int = 4096,
8788
temperature: float = 0.8,
88-
89+
8990
tts_service: str = "openai",
90-
91+
9192
stt_service: str = "openai",
9293

9394
local: bool = False,
94-
95+
9596
qr: bool = False
9697
):
97-
98+
9899
if local:
99100
tts_service = "piper"
100101
# llm_service = "llamafile"
101102
stt_service = "local-whisper"
102103
select_local_model()
103-
104+
104105
if not server_url:
105106
server_url = f"{server_host}:{server_port}"
106-
107+
107108
if not server and not client:
108109
server = True
109110
client = True
@@ -152,4 +153,4 @@ def handle_exit(signum, frame):
152153
if client:
153154
client_thread.join()
154155
except KeyboardInterrupt:
155-
os.kill(os.getpid(), signal.SIGINT)
156+
os.kill(os.getpid(), signal.SIGINT)

0 commit comments

Comments
 (0)