Skip to content

Commit 7b9e327

Browse files
committed
Check map
1 parent a9dd1ca commit 7b9e327

File tree

5 files changed

+16
-28
lines changed

5 files changed

+16
-28
lines changed

src/custom_nodes/common/buffersqueue.cpp

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -26,7 +26,7 @@ BuffersQueue::BuffersQueue(size_t singleBufferSize, int streamsLength) :
2626
size(singleBufferSize * streamsLength),
2727
memoryPool(std::make_unique<char[]>(size)) {
2828
for (int i = 0; i < streamsLength; ++i) {
29-
inferRequests.push_back(memoryPool.get() + i * singleBufferSize);
29+
inferRequests.insert({i, memoryPool.get() + i * singleBufferSize});
3030
}
3131
}
3232

src/ovinferrequestsqueue.cpp

Lines changed: 2 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -21,12 +21,11 @@
2121

2222
namespace ovms {
2323
OVInferRequestsQueue::OVInferRequestsQueue(ov::CompiledModel& compiledModel, int streamsLength) :
24-
Queue(streamsLength),
25-
compiledModel(compiledModel) {
24+
Queue(streamsLength) {
2625
for (int i = 0; i < streamsLength; ++i) {
2726
streams[i] = i;
2827
OV_LOGGER("ov::CompiledModel: {} compiledModel.create_infer_request()", reinterpret_cast<void*>(&compiledModel));
29-
inferRequests.push_back(compiledModel.create_infer_request());
28+
inferRequests.insert({i, compiledModel.create_infer_request()});
3029
}
3130
}
3231
} // namespace ovms

src/ovinferrequestsqueue.hpp

Lines changed: 0 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -24,7 +24,6 @@ namespace ovms {
2424
class OVInferRequestsQueue : public Queue<ov::InferRequest> {
2525
public:
2626
OVInferRequestsQueue(ov::CompiledModel& compiledModel, int streamsLength);
27-
ov::CompiledModel& compiledModel;
2827
};
2928

3029
} // namespace ovms

src/queue.hpp

Lines changed: 6 additions & 17 deletions
Original file line numberDiff line numberDiff line change
@@ -25,6 +25,7 @@
2525
#include <thread>
2626
#include <utility>
2727
#include <vector>
28+
#include <unordered_map>
2829

2930
// #include "profiler.hpp"
3031

@@ -55,16 +56,6 @@ class Queue {
5556
return idleStreamFuture;
5657
}
5758

58-
void extendQueue() {
59-
if (!constructFunc.has_value()) {
60-
return;
61-
}
62-
size_t streamSize = streams.size();
63-
streams.push_back(streamSize - 1);
64-
inferRequests.reserve(streams.size());
65-
inferRequests.push_back(constructFunc.value()());
66-
}
67-
6859
std::optional<int> tryToGetIdleStream() {
6960
// OVMS_PROFILE_FUNCTION();
7061
int value;
@@ -79,6 +70,7 @@ class Queue {
7970
return value;
8071
}
8172
}
73+
8274
/**
8375
* @brief Release stream after execution
8476
*/
@@ -104,16 +96,13 @@ class Queue {
10496
/**
10597
* @brief Constructor with initialization
10698
*/
107-
// change constructor so that it can also accept lambda which returns T. This lambda
108-
// is optional but if exists it will be used to construct T objects
109-
Queue(int streamsLength, std::optional<std::function<T()>> constructFunc = std::nullopt) : streams(streamsLength),
110-
constructFunc(constructFunc),
99+
Queue(int streamsLength) :
100+
streams(streamsLength),
111101
front_idx{0},
112102
back_idx{0} {
113103
for (int i = 0; i < streamsLength; ++i) {
114104
streams[i] = i;
115105
}
116-
streams.reserve(50);
117106
}
118107

119108
/**
@@ -128,7 +117,7 @@ class Queue {
128117
* @brief Vector representing circular buffer for infer queue
129118
*/
130119
std::vector<int> streams;
131-
std::optional<std::function<T()>> constructFunc = std::nullopt;
120+
132121
/**
133122
* @brief Index of the front of the idle streams list
134123
*/
@@ -147,7 +136,7 @@ class Queue {
147136
/**
148137
*
149138
*/
150-
std::vector<T> inferRequests;
139+
std::unordered_map<int, T> inferRequests;
151140
std::queue<std::promise<int>> promises;
152141
};
153142
} // namespace ovms

src/test/openvino_tests.cpp

Lines changed: 7 additions & 6 deletions
Original file line numberDiff line numberDiff line change
@@ -92,6 +92,7 @@ TEST_F(OpenVINO, CallbacksTest) {
9292
EXPECT_TRUE(outAutoTensor.is<ov::Tensor>());
9393
}
9494
TEST_F(OpenVINO, StressInferTest) {
95+
GTEST_SKIP();
9596
Core core;
9697
auto model = core.read_model("/ovms/src/test/dummy/1/dummy.xml");
9798
const std::string inputName{"b"};
@@ -110,7 +111,7 @@ TEST_F(OpenVINO, StressInferTest) {
110111
SPDLOG_INFO("Starting vector size:{}, vector capacity:{}", inferRequests.size(), inferRequests.capacity());
111112
inferRequests.reserve(2);
112113
SPDLOG_INFO("Starting vector size:{}, vector capacity:{}", inferRequests.size(), inferRequests.capacity());
113-
//inferRequests.shrink_to_fit();
114+
// inferRequests.shrink_to_fit();
114115
// we want to test workload when we increase number of infer requests vector during workload
115116
// so we start with vector of 1, start workload on it
116117
// then after 1s we start another thread which will add another infer request to the vector
@@ -132,8 +133,8 @@ TEST_F(OpenVINO, StressInferTest) {
132133
for (size_t j = 0; j < 100000; j++) {
133134
reinterpret_cast<float*>(inputOvTensor.data())[j] = i;
134135
reinterpret_cast<float*>(outputOvTensor.data())[j] = (i + 1);
135-
if (j<10 || j > 99990) {
136-
SPDLOG_ERROR("input data: {}, expected: {}, i:{}, j:{}", reinterpret_cast<float*>(inputOvTensor.data())[j], reinterpret_cast<float*>(outputOvTensor.data())[j], i, j);
136+
if (j < 10 || j > 99990) {
137+
SPDLOG_ERROR("input data: {}, expected: {}, i:{}, j:{}", reinterpret_cast<float*>(inputOvTensor.data())[j], reinterpret_cast<float*>(outputOvTensor.data())[j], i, j);
137138
}
138139
}
139140

@@ -146,9 +147,9 @@ TEST_F(OpenVINO, StressInferTest) {
146147
inferRequest.wait();
147148
auto outOvTensor = inferRequest.get_tensor("a");
148149
for (size_t j = 0; j < 100000; j++) {
149-
if (j<10 || j > 99990) {
150-
SPDLOG_ERROR("infReqRef:{} infReq[i]:{} outTensor data: {}, expected: {} i:{} j:{} k:{}", (void*)(&inferRequest), (void*)(&inferRequests[i]),reinterpret_cast<float*>(outOvTensor.data())[j], reinterpret_cast<float*>(outputOvTensor.data())[j], i, j , k);
151-
}
150+
if (j < 10 || j > 99990) {
151+
SPDLOG_ERROR("infReqRef:{} infReq[i]:{} outTensor data: {}, expected: {} i:{} j:{} k:{}", (void*)(&inferRequest), (void*)(&inferRequests[i]), reinterpret_cast<float*>(outOvTensor.data())[j], reinterpret_cast<float*>(outputOvTensor.data())[j], i, j, k);
152+
}
152153
}
153154
ASSERT_EQ(0, std::memcmp(outOvTensor.data(), outputOvTensor.data(), outOvTensor.get_byte_size())) << "i: " << i;
154155
ASSERT_EQ(0, std::memcmp(outOvTensor.data(), outputOvTensor.data(), outOvTensor.get_byte_size())) << "i: " << i;

0 commit comments

Comments
 (0)