Skip to content

Commit 6374279

Browse files
author
litongjava
committed
add vcpkg.json
1 parent 4d5b53f commit 6374279

File tree

9 files changed

+34647
-27
lines changed

9 files changed

+34647
-27
lines changed

CMakeLists.txt

Lines changed: 5 additions & 18 deletions
Original file line numberDiff line numberDiff line change
@@ -3,27 +3,14 @@ project(whisper_cpp_server)
33

44
set(CMAKE_CXX_STANDARD 14)
55
# find SDL2 library
6-
find_package(SDL2 REQUIRED)
6+
find_package(SDL2 CONFIG REQUIRED)
77
include_directories(${SDL2_INCLUDE_DIRS})
88

9-
10-
# 检测操作系统
11-
if (${CMAKE_SYSTEM_NAME} MATCHES "Darwin")
12-
# macOS
13-
include_directories(/Users/ping/code/cpp/project-litongjava/whisper.cpp)
14-
link_directories(/Users/ping/code/cpp/project-litongjava/whisper.cpp/cmake-build-release)
15-
elseif (${CMAKE_SYSTEM_NAME} MATCHES "Linux")
16-
# Linux
17-
include_directories(/mnt/e/code/cpp/project-ping/whisper.cpp)
18-
link_directories(/mnt/e/code/cpp/project-ping/whisper.cpp/cmake-build-release)
19-
endif ()
20-
21-
add_executable(web_socket_server main.cpp web_socket_server.cpp)
22-
# 链接 whisper.cpp 库
23-
target_link_libraries(web_socket_server whisper ${SDL2_LIBRARIES})
24-
9+
add_executable(sdl_version sdl_version.cpp)
10+
target_link_libraries(sdl_version ${SDL2_LIBRARIES})
2511
add_executable(stream_components stream_components.cpp)
2612
target_link_libraries(stream_components whisper ${SDL2_LIBRARIES})
2713

28-
14+
#add_executable(server server.cpp httplib.h json.hpp)
15+
#target_link_libraries(server common.cpp whisper)
2916

README.md

Lines changed: 59 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,59 @@
1+
# whisper-cpp server
2+
3+
Simple http server. WAV Files are passed to the inference model via http requests.
4+
5+
```
6+
./server -h
7+
8+
usage: ./bin/server [options]
9+
10+
options:
11+
-h, --help [default] show this help message and exit
12+
-t N, --threads N [4 ] number of threads to use during computation
13+
-p N, --processors N [1 ] number of processors to use during computation
14+
-ot N, --offset-t N [0 ] time offset in milliseconds
15+
-on N, --offset-n N [0 ] segment index offset
16+
-d N, --duration N [0 ] duration of audio to process in milliseconds
17+
-mc N, --max-context N [-1 ] maximum number of text context tokens to store
18+
-ml N, --max-len N [0 ] maximum segment length in characters
19+
-sow, --split-on-word [false ] split on word rather than on token
20+
-bo N, --best-of N [2 ] number of best candidates to keep
21+
-bs N, --beam-size N [-1 ] beam size for beam search
22+
-wt N, --word-thold N [0.01 ] word timestamp probability threshold
23+
-et N, --entropy-thold N [2.40 ] entropy threshold for decoder fail
24+
-lpt N, --logprob-thold N [-1.00 ] log probability threshold for decoder fail
25+
-debug, --debug-mode [false ] enable debug mode (eg. dump log_mel)
26+
-tr, --translate [false ] translate from source language to english
27+
-di, --diarize [false ] stereo audio diarization
28+
-tdrz, --tinydiarize [false ] enable tinydiarize (requires a tdrz model)
29+
-nf, --no-fallback [false ] do not use temperature fallback while decoding
30+
-ps, --print-special [false ] print special tokens
31+
-pc, --print-colors [false ] print colors
32+
-pp, --print-progress [false ] print progress
33+
-nt, --no-timestamps [false ] do not print timestamps
34+
-l LANG, --language LANG [en ] spoken language ('auto' for auto-detect)
35+
-dl, --detect-language [false ] exit after automatically detecting language
36+
--prompt PROMPT [ ] initial prompt
37+
-m FNAME, --model FNAME [models/ggml-base.en.bin] model path
38+
-oved D, --ov-e-device DNAME [CPU ] the OpenVINO device used for encode inference
39+
--host HOST, [127.0.0.1] Hostname/ip-adress for the server
40+
--port PORT, [8080 ] Port number for the server
41+
```
42+
43+
## request examples
44+
45+
**/inference**
46+
```
47+
curl 127.0.0.1:8080/inference \
48+
-H "Content-Type: multipart/form-data" \
49+
-F file="<file-path>" \
50+
-F temperature="0.2" \
51+
-F response-format="json"
52+
```
53+
54+
**/load**
55+
```
56+
curl 127.0.0.1:8080/load \
57+
-H "Content-Type: multipart/form-data" \
58+
-F model="<path-to-model-file>"
59+
```

0 commit comments

Comments
 (0)