Skip to content
This repository was archived by the owner on Feb 3, 2025. It is now read-only.

Commit 9d452bc

Browse files
committed
Add building with CMake and update README
1 parent a937c24 commit 9d452bc

File tree

2 files changed

+40
-19
lines changed

2 files changed

+40
-19
lines changed

tftrt/benchmarking-cpp/CMakeLists.txt

Lines changed: 11 additions & 11 deletions
Original file line numberDiff line numberDiff line change
@@ -1,5 +1,5 @@
11
cmake_minimum_required(VERSION 3.13)
2-
project(TF_TRT_Benchmark_Runner)
2+
project(TFTRT_Benchmark_Runner)
33

44
#-------------------------------------------------------------
55
# Configuration
@@ -29,19 +29,19 @@ add_custom_target(tf_symlinks DEPENDS ${tf_framework_shared_lib} ${tf_shared_lib
2929
#-----------------------------------------------------------
3030
# Benchmark Runner Targets
3131
#-----------------------------------------------------------
32-
add_executable(tf_trt_benchmark_runner main.cc)
32+
add_executable(tftrt_benchmark_runner main.cc)
3333

34-
target_link_libraries(tf_trt_benchmark_runner tensorflow_cc)
35-
target_link_libraries(tf_trt_benchmark_runner tensorflow_framework)
34+
target_link_libraries(tftrt_benchmark_runner tensorflow_cc)
35+
target_link_libraries(tftrt_benchmark_runner tensorflow_framework)
3636

37-
target_compile_options(tf_trt_benchmark_runner PRIVATE -D_GLIBCXX_USE_CXX11_ABI=1 -DGOOGLE_CUDA -DGOOGLE_TENSORRT)
37+
target_compile_options(tftrt_benchmark_runner PRIVATE -D_GLIBCXX_USE_CXX11_ABI=1 -DGOOGLE_CUDA -DGOOGLE_TENSORRT)
3838

39-
target_link_directories(tf_trt_benchmark_runner PRIVATE ${tf_python_dir})
40-
target_link_directories(tf_trt_benchmark_runner PRIVATE ${tf_dir})
39+
target_link_directories(tftrt_benchmark_runner PRIVATE ${tf_python_dir})
40+
target_link_directories(tftrt_benchmark_runner PRIVATE ${tf_dir})
4141

42-
target_compile_options(tf_trt_benchmark_runner PRIVATE -O2 -Wl,-rpath=${tf_python_dir})
42+
target_compile_options(tftrt_benchmark_runner PRIVATE -O2 -Wl,-rpath=${tf_python_dir})
4343

44-
target_include_directories(tf_trt_benchmark_runner PRIVATE ${tf_python_dir}/include)
45-
target_include_directories(tf_trt_benchmark_runner PRIVATE ${trt_include_path})
44+
target_include_directories(tftrt_benchmark_runner PRIVATE ${tf_python_dir}/include)
45+
target_include_directories(tftrt_benchmark_runner PRIVATE ${trt_include_path})
4646

47-
add_dependencies(tf_trt_benchmark_runner tf_symlinks)
47+
add_dependencies(tftrt_benchmark_runner tf_symlinks)

tftrt/benchmarking-cpp/README.md

Lines changed: 29 additions & 8 deletions
Original file line numberDiff line numberDiff line change
@@ -4,12 +4,6 @@ This straightforward example uses TF's C++ API to serve a saved model and measur
44

55
## Docker Environment
66

7-
Pull the image:
8-
9-
```
10-
docker pull nvcr.io/nvidia/tensorflow:22.06-tf2-py3
11-
```
12-
137
Start the container:
148

159
```
@@ -32,18 +26,45 @@ python3 convert_model.py --model-dir /path/to/model/dir --output-dir /path/to/de
3226

3327
## Building
3428

29+
The binary relies on a modified Tensorflow, which will need to be rebuilt. Internal users can use a container with Tensorflow already modified and built, instead of building with Bazel, which will take much longer.
30+
31+
### Bazel
32+
33+
The `setup.sh` script applies the Tensorflow patch and prepares the container for the Bazel build.
34+
3535
```
3636
/workspace/tensorrt/tftrt/benchmarking-cpp/build-scripts/setup.sh
3737
cd /opt/tensorflow
3838
./tftrt-build.sh
3939
```
4040

41+
The binary will be located at `/opt/tensorflow/tensorflow-source/bazel-bin/tensorflow/examples/benchmarking-cpp/tftrt_benchmark_runner`.
42+
43+
### Prebuilt
44+
45+
For internal NVIDIA users, a container with a prebuilt modified Tensorflow is available:
46+
47+
```
48+
docker run --rm --gpus all --ipc=host --ulimit memlock=-1 --ulimit stack=67108864 -it --name TFTRT_CPP gitlab-master.nvidia.com:5005/dl/dgx/tensorflow:cpp-tensorboard-patch-py3-base
49+
```
50+
51+
Use CMake to build the binary without needing to rebuild Tensorflow:
52+
53+
```
54+
cd /workspace/tensorrt/tftrt/benchmarking-cpp
55+
mkdir build && cd build
56+
cmake ..
57+
make
58+
```
59+
60+
The binary will be located at `/workspace/tensorrt/tftrt/benchmarking-cpp/tftrt_benchmark_runner`.
61+
4162
## Running
4263

4364
```
44-
/opt/tensorflow/tensorflow-source/bazel-bin/tensorflow/examples/benchmarking-cpp/tftrt_benchmark_runner --model_path="/path/to/dest/dir"
65+
./tftrt_benchmark_runner --model_path="/path/to/dest/dir"
4566
```
4667

47-
## Profiling
68+
### Profiling
4869

4970
To profile, set the `--out_dir` flag. Run `tensorboard --logdir [out_dir]` to view results.

0 commit comments

Comments
 (0)