Skip to content
Open
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
10 changes: 2 additions & 8 deletions bindings/python/src/nn_archive/NNArchiveBindings.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -65,23 +65,21 @@ void NNArchiveBindings::bind(pybind11::module& m, void* pCallstack) {
///////////////////////////////////////////////////////////////////////

// Bind NNArchive
nnArchive.def(py::init([](const std::filesystem::path& archivePath, NNArchiveEntry::Compression compression, const std::string& extractFolder) {
nnArchive.def(py::init([](const std::filesystem::path& archivePath, NNArchiveEntry::Compression compression) {
NNArchiveOptions options;
options.compression(compression);
options.extractFolder(extractFolder);
return NNArchive(archivePath, options);
}),
py::arg("archivePath"),
py::arg("compression") = NNArchiveEntry::Compression::AUTO,
py::arg("extractFolder") = "/tmp/",
DOC(dai, NNArchive, NNArchive));
nnArchive.def(py::init<const std::filesystem::path&, NNArchiveOptions>(),
py::arg("archivePath"),
py::arg("options") = NNArchiveOptions(),
DOC(dai, NNArchive, NNArchive));
nnArchive.def("getBlob", &NNArchive::getBlob, DOC(dai, NNArchive, getBlob));
nnArchive.def("getSuperBlob", &NNArchive::getSuperBlob, DOC(dai, NNArchive, getBlob));
nnArchive.def("getModelPath", &NNArchive::getModelPath, DOC(dai, NNArchive, getModelPath));
nnArchive.def("getOtherModelFormat", &NNArchive::getOtherModelFormat, DOC(dai, NNArchive, getOtherModelFormat));
nnArchive.def("getConfig", &NNArchive::getConfig<NNArchiveConfig>, DOC(dai, NNArchive, getConfig));
nnArchive.def("getConfigV1", &NNArchive::getConfig<v1::Config>, DOC(dai, NNArchive, getConfig));
nnArchive.def("getModelType", &NNArchive::getModelType, DOC(dai, NNArchive, getModelType));
Expand All @@ -96,10 +94,6 @@ void NNArchiveBindings::bind(pybind11::module& m, void* pCallstack) {
"compression",
[](const NNArchiveOptions& opt) { return opt.compression(); },
[](NNArchiveOptions& opt, NNArchiveEntry::Compression compression) { opt.compression(compression); });
nnArchiveOptions.def_property(
"extractFolder",
[](const NNArchiveOptions& opt) { return opt.extractFolder(); },
[](NNArchiveOptions& opt, const std::string& extractFolder) { opt.extractFolder(extractFolder); });

// Bind NNArchiveVersionedConfig
nnArchiveVersionedConfig.def(py::init<const std::filesystem::path&, NNArchiveEntry::Compression>(),
Expand Down
2 changes: 0 additions & 2 deletions examples/cpp/RVC2/NNArchive/nn_archive_superblob.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -40,8 +40,6 @@ int main() {
throw std::runtime_error("SuperBlob should not be null for superblob type");
}

std::cout << "Superblob path: " << archive.getModelPath().value() << std::endl;

if(archive.getBlob()) {
throw std::runtime_error("Blob should be null for superblob type");
}
Expand Down
6 changes: 2 additions & 4 deletions examples/python/RVC2/NNArchive/nn_archive_superblob.py
Original file line number Diff line number Diff line change
Expand Up @@ -19,11 +19,9 @@
# Therefore, getSuperBlob() is available
assert archive.getSuperBlob() is not None

# The archive is unpacked and thus a path to the superblob model is also available
assert archive.getModelPath() is not None

# There is no blob available
# There is no blob or other model format available
assert archive.getBlob() is None
assert archive.getOtherModelFormat() is None

# You can access any config version
v1config: dai.nn_archive.v1.Config = archive.getConfig()
Expand Down
17 changes: 6 additions & 11 deletions include/depthai/nn_archive/NNArchive.hpp
Original file line number Diff line number Diff line change
Expand Up @@ -14,8 +14,6 @@
namespace dai {

struct NNArchiveOptions {
NNArchiveOptions();

// General parameters
DEPTAHI_ARG_DEFAULT(NNArchiveEntry::Compression, compression, NNArchiveEntry::Compression::AUTO);

Expand All @@ -26,7 +24,7 @@ struct NNArchiveOptions {
// ...

// Parameters for other formats, ONNX, PT, etc..
DEPTAHI_ARG_DEFAULT(std::filesystem::path, extractFolder, std::filesystem::path());
// ...
};

class NNArchive {
Expand Down Expand Up @@ -54,11 +52,11 @@ class NNArchive {
std::optional<OpenVINO::SuperBlob> getSuperBlob() const;

/**
* @brief Return a path to the model inside the archive if getModelType() returns OTHER or DLC, nothing otherwise
* @brief Return a model from the archive if getModelType() returns OTHER or DLC, nothing otherwise
*
* @return std::optional<Path>: Model path
* @return std::optional<std::vector<uint8_t>: Model
*/
std::optional<std::filesystem::path> getModelPath() const;
std::optional<std::vector<uint8_t>> getOtherModelFormat() const;

/**
* @brief Get NNArchive config wrapper
Expand Down Expand Up @@ -117,9 +115,6 @@ class NNArchive {
// Read model from archive
std::vector<uint8_t> readModelFromArchive(const std::filesystem::path& archivePath, const std::string& modelPathInArchive) const;

// Unpack archive to tmp directory
void unpackArchiveInDirectory(const std::filesystem::path& archivePath, const std::filesystem::path& directory) const;

model::ModelType modelType;
NNArchiveOptions archiveOptions;

Expand All @@ -132,8 +127,8 @@ class NNArchive {
// Superblob related stuff
std::shared_ptr<OpenVINO::SuperBlob> superblobPtr;

// Other formats - return path to the unpacked archive
std::filesystem::path unpackedModelPath;
// Other formats
std::shared_ptr<std::vector<uint8_t>> otherModelFormatPtr;
};

} // namespace dai
15 changes: 15 additions & 0 deletions include/depthai/pipeline/node/NeuralNetwork.hpp
Original file line number Diff line number Diff line change
Expand Up @@ -125,6 +125,21 @@ class NeuralNetwork : public DeviceNodeCRTP<DeviceNode, NeuralNetwork, NeuralNet
*/
void setBlob(const std::filesystem::path& path);

/**
* Load network model into assets and use once pipeline is started.
*
* @param model Network model
*/
void setOtherModelFormat(std::vector<uint8_t> model);

/**
* Load network model into assets and use once pipeline is started.
*
* @throws Error if file doesn't exist or isn't a valid network model.
* @param path Path to the network model
*/
void setOtherModelFormat(const std::filesystem::path& path);

/**
* Load network xml and bin files into assets.
* @param xmlModelPath Path to the neural network model file.
Expand Down
67 changes: 24 additions & 43 deletions src/nn_archive/NNArchive.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -15,12 +15,7 @@

namespace dai {

NNArchiveOptions::NNArchiveOptions() {
// Default options
extractFolder(platform::getTempPath());
}

NNArchive::NNArchive(const std::filesystem::path& archivePath, NNArchiveOptions options) : archiveOptions(options) {
NNArchive::NNArchive(const std::filesystem::path& archivePath, NNArchiveOptions options) : archiveOptions(std::move(options)) {
// Make sure archive exits
if(!std::filesystem::exists(archivePath)) DAI_CHECK_V(false, "Archive file does not exist: {}", archivePath);

Expand All @@ -34,11 +29,6 @@ NNArchive::NNArchive(const std::filesystem::path& archivePath, NNArchiveOptions
// Read archive type
modelType = model::readModelType(modelPathInArchive);

// Unpack model
std::filesystem::path unpackedArchivePath = std::filesystem::path(archiveOptions.extractFolder()) / std::filesystem::path(archivePath).filename();
unpackArchiveInDirectory(archivePath, unpackedArchivePath);
unpackedModelPath = (unpackedArchivePath / modelPathInArchive);

switch(modelType) {
case model::ModelType::BLOB:
blobPtr.reset(new OpenVINO::Blob(readModelFromArchive(archivePath, modelPathInArchive)));
Expand All @@ -48,7 +38,8 @@ NNArchive::NNArchive(const std::filesystem::path& archivePath, NNArchiveOptions
break;
case model::ModelType::DLC:
case model::ModelType::OTHER:
break; // Just do nothing, model is already unpacked
otherModelFormatPtr = std::make_shared<std::vector<uint8_t>>(readModelFromArchive(archivePath, modelPathInArchive));
break;
case model::ModelType::NNARCHIVE:
DAI_CHECK_V(false, "NNArchive inside NNArchive is not supported. Please unpack the inner archive first.");
break;
Expand Down Expand Up @@ -96,13 +87,14 @@ std::optional<OpenVINO::SuperBlob> NNArchive::getSuperBlob() const {
}
}

std::optional<std::filesystem::path> NNArchive::getModelPath() const {
std::optional<std::vector<uint8_t>> NNArchive::getOtherModelFormat() const {
switch(modelType) {
case model::ModelType::OTHER:
case model::ModelType::DLC:
return *otherModelFormatPtr;
case model::ModelType::BLOB:
case model::ModelType::SUPERBLOB:
return unpackedModelPath;
return std::nullopt;
break;
case model::ModelType::NNARCHIVE:
DAI_CHECK_V(false, "NNArchive inside NNArchive is not supported. Please unpack the inner archive first.");
Expand All @@ -129,11 +121,6 @@ std::vector<uint8_t> NNArchive::readModelFromArchive(const std::filesystem::path
return modelBytes;
}

void NNArchive::unpackArchiveInDirectory(const std::filesystem::path& archivePath, const std::filesystem::path& directory) const {
utility::ArchiveUtil archive(archivePath, archiveOptions.compression());
archive.unpackArchiveInDirectory(directory);
}

std::optional<std::pair<uint32_t, uint32_t>> NNArchive::getInputSize(uint32_t index) const {
auto inputs = archiveVersionedConfigPtr->getConfig<nn_archive::v1::Config>().model.inputs;
if(inputs.size() <= index) {
Expand Down Expand Up @@ -186,34 +173,28 @@ std::optional<uint32_t> NNArchive::getInputHeight(uint32_t index) const {
}

std::vector<dai::Platform> NNArchive::getSupportedPlatforms() const {
auto pathToModel = getModelPath();
if(!pathToModel) {
return {};
}
auto pathToModelChecked = *pathToModel;

auto endsWith = [](const std::filesystem::path& path, const std::string& suffix) { return path.extension() == suffix; };
switch(modelType) {
case model::ModelType::DLC:
return {Platform::RVC4};

if(endsWith(pathToModelChecked, ".dlc")) {
return {Platform::RVC4};
}
if(endsWith(pathToModelChecked, ".superblob")) {
return {Platform::RVC2};
}
if(endsWith(pathToModelChecked, ".blob")) {
auto model = OpenVINO::Blob(pathToModelChecked);
if(model.device == OpenVINO::Device::VPUX) {
return {Platform::RVC3};
}
if(model.device == OpenVINO::Device::VPU) {
case model::ModelType::SUPERBLOB:
return {Platform::RVC2};
}

// Should never get here
return {};
}
case model::ModelType::BLOB:
if(blobPtr->device == OpenVINO::Device::VPUX) {
return {Platform::RVC3};
}
if(blobPtr->device == OpenVINO::Device::VPU) {
return {Platform::RVC2};
}
// Should never get here
return {};

return {};
case model::ModelType::NNARCHIVE:
case model::ModelType::OTHER:
default:
return {};
}
}

} // namespace dai
26 changes: 20 additions & 6 deletions src/pipeline/node/NeuralNetwork.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -187,7 +187,11 @@ void NeuralNetwork::setNNArchiveSuperblob(const NNArchive& nnArchive, int numSha
}

void NeuralNetwork::setNNArchiveOther(const NNArchive& nnArchive) {
setModelPath(nnArchive.getModelPath().value());
DAI_CHECK_V(nnArchive.getModelType() == model::ModelType::DLC || nnArchive.getModelType() == model::ModelType::OTHER, "NNArchive type is not DLC or OTHER");
if(nnArchive.getOtherModelFormat().has_value()) {
auto otherModelFormat = nnArchive.getOtherModelFormat().value();
setOtherModelFormat(otherModelFormat);
}
}

// Specify local filesystem path to load the blob (which gets loaded at loadAssets)
Expand All @@ -214,6 +218,18 @@ void NeuralNetwork::setBlob(OpenVINO::Blob blob) {
properties.modelSource = Properties::ModelSource::BLOB;
}

void NeuralNetwork::setOtherModelFormat(std::vector<uint8_t> otherModel) {
auto asset = assetManager.set("__model", std::move(otherModel));
properties.modelUri = asset->getRelativeUri();
properties.modelSource = Properties::ModelSource::CUSTOM_MODEL;
}

void NeuralNetwork::setOtherModelFormat(const std::filesystem::path& path) {
auto modelAsset = assetManager.set("__model", path);
properties.modelUri = modelAsset->getRelativeUri();
properties.modelSource = Properties::ModelSource::CUSTOM_MODEL;
}

void NeuralNetwork::setModelPath(const std::filesystem::path& modelPath) {
switch(model::readModelType(modelPath)) {
case model::ModelType::BLOB:
Expand All @@ -226,11 +242,9 @@ void NeuralNetwork::setModelPath(const std::filesystem::path& modelPath) {
setNNArchive(NNArchive(modelPath));
break;
case model::ModelType::DLC:
case model::ModelType::OTHER: {
auto modelAsset = assetManager.set("__model", modelPath);
properties.modelUri = modelAsset->getRelativeUri();
properties.modelSource = Properties::ModelSource::CUSTOM_MODEL;
} break;
case model::ModelType::OTHER:
setOtherModelFormat(modelPath);
break;
}
}

Expand Down
59 changes: 6 additions & 53 deletions tests/src/onhost_tests/nn_archive/nn_archive_test.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -5,89 +5,42 @@
#include <memory>
#include <string>

namespace {
class TestHelper {
public:
TestHelper() {
srand(time(nullptr));
extractFolder = std::filesystem::path("/tmp/depthai_test_" + std::to_string(rand())).string();
}

~TestHelper() {
std::filesystem::remove_all(extractFolder);
}

std::string extractFolder;
};
} // namespace

TEST_CASE("NNArchive loads a BLOB properly") {
dai::NNArchive nnArchive(BLOB_ARCHIVE_PATH);

auto helper = std::make_unique<TestHelper>();
std::string extractFolder = helper->extractFolder;

// Loaded archive is BLOB
REQUIRE(nnArchive.getModelType() == dai::model::ModelType::BLOB);

// Returns blob
REQUIRE(nnArchive.getBlob().has_value());

// Returns path to blob
REQUIRE(nnArchive.getModelPath().has_value());
std::filesystem::path path = nnArchive.getModelPath().value();
REQUIRE(std::filesystem::exists(path));
REQUIRE(path.extension() == ".blob");

// Returns nothing for other types
REQUIRE(!nnArchive.getSuperBlob().has_value());
REQUIRE(!nnArchive.getOtherModelFormat().has_value());
}

TEST_CASE("NNArchive loads a SUPERBLOB properly") {
dai::NNArchive nnArchive(SUPERBLOB_ARCHIVE_PATH);

auto helper = std::make_unique<TestHelper>();
std::string extractFolder = helper->extractFolder;

// Loaded archive is SUPERBLOB
REQUIRE(nnArchive.getModelType() == dai::model::ModelType::SUPERBLOB);

// Returns superblob
REQUIRE(nnArchive.getSuperBlob().has_value());

// Returns path to superblob
REQUIRE(nnArchive.getModelPath().has_value());
std::filesystem::path path = nnArchive.getModelPath().value();
REQUIRE(std::filesystem::exists(path));
REQUIRE(path.extension() == ".superblob");
REQUIRE(!nnArchive.getBlob().has_value());
REQUIRE(!nnArchive.getOtherModelFormat().has_value());

// Returns nothing for other types
REQUIRE(!nnArchive.getBlob().has_value());
REQUIRE(nnArchive.getSupportedPlatforms().size() == 1);
REQUIRE(nnArchive.getSupportedPlatforms()[0] == dai::Platform::RVC2);
}

TEST_CASE("NNArchive loads other formats properly") {
// Used for initialization and cleanup (even if the test fails)
auto helper = std::make_unique<TestHelper>();
std::string extractFolder = helper->extractFolder;

dai::NNArchive nnArchive(ONNX_ARCHIVE_PATH, dai::NNArchiveOptions().extractFolder(extractFolder));
dai::NNArchive nnArchive(ONNX_ARCHIVE_PATH);

// Loaded archive is ONNX
REQUIRE(nnArchive.getModelType() == dai::model::ModelType::OTHER);

// Returns model path
REQUIRE(nnArchive.getModelPath().has_value());

// Check that the returned path exists
std::filesystem::path modelPath = nnArchive.getModelPath().value();
REQUIRE(std::filesystem::exists(modelPath));

// Check that it has the .onnx extension
REQUIRE(modelPath.extension() == ".onnx");

// Returns nothing for other types
// Returns model
REQUIRE(nnArchive.getOtherModelFormat().has_value());
REQUIRE(!nnArchive.getBlob().has_value());
REQUIRE(!nnArchive.getSuperBlob().has_value());

Expand Down
Loading