We read every piece of feedback, and take your input very seriously.
To see all available qualifiers, see our documentation.
There was an error while loading. Please reload this page.
1 parent 7264ca1 commit 5d5b4adCopy full SHA for 5d5b4ad
doc/fluid/advanced_usage/deploy/inference/native_infer.md
@@ -221,9 +221,8 @@ const int num_threads = 10; // 假设有 10 个服务线程
221
std::vector<std::thread> threads;
222
std::vector<decl_type(main_predictor)> predictors;
223
224
-// 线程外创建所有的predictor
225
-predictors.emplace_back(std::move(main_predictor));
226
-for (int i = 1; i < num_threads; i++) {
+// 将克隆的 predictor 放入 vector 供线程使用
+for (int i = 0; i < num_threads; i++) {
227
predictors.emplace_back(main_predictor->Clone());
228
}
229
0 commit comments