Skip to content
Open
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
11 changes: 11 additions & 0 deletions projects/hipdnn/backend/include/HipdnnOperationType.h
Original file line number Diff line number Diff line change
Expand Up @@ -28,4 +28,15 @@ typedef enum
HIPDNN_OPERATION_TYPE_BATCHNORM_BACKWARD = 5, ///< Batch normalization backward pass
HIPDNN_OPERATION_TYPE_BATCHNORM_INFERENCE_VARIANCE
= 6, ///< Batch normalization inference with variance
HIPDNN_OPERATION_TYPE_BATCHNORM = 7, ///< Batch normalization training forward
HIPDNN_OPERATION_TYPE_POINTWISE = 8, ///< Pointwise operation
HIPDNN_OPERATION_TYPE_MATMUL = 9, ///< Matrix multiplication
HIPDNN_OPERATION_TYPE_RMSNORM = 10, ///< RMS normalization
HIPDNN_OPERATION_TYPE_LAYERNORM = 11, ///< Layer normalization
HIPDNN_OPERATION_TYPE_SDPA_FORWARD = 12, ///< Scaled dot-product attention forward
HIPDNN_OPERATION_TYPE_BLOCK_SCALE_QUANTIZE = 13, ///< Block scale quantization
HIPDNN_OPERATION_TYPE_SDPA_BACKWARD = 14, ///< Scaled dot-product attention backward
HIPDNN_OPERATION_TYPE_BLOCK_SCALE_DEQUANTIZE = 15, ///< Block scale dequantization
HIPDNN_OPERATION_TYPE_CUSTOM_OP = 16, ///< Custom operation
HIPDNN_OPERATION_TYPE_REDUCTION = 17, ///< Reduction operation
} hipdnnOperationType_t;
52 changes: 52 additions & 0 deletions projects/hipdnn/backend/src/BackendEnumStringUtils.hpp
Original file line number Diff line number Diff line change
Expand Up @@ -13,6 +13,7 @@
#include "HipdnnDataType.h"
#include "HipdnnDiagonalAlignment.h"
#include "HipdnnNormFwdPhase.h"
#include "HipdnnOperationType.h"
#include "HipdnnPointwiseMode.h"
#include "HipdnnStatus.h"

Expand Down Expand Up @@ -676,6 +677,12 @@ inline const char* hipdnnGetAttributeNameString(hipdnnBackendAttributeName_t att
case HIPDNN_ATTR_BLOCK_SCALE_DEQUANTIZE_MATH_PREC_EXT:
return "HIPDNN_ATTR_BLOCK_SCALE_DEQUANTIZE_MATH_PREC_EXT";

// Operation extension attributes
case HIPDNN_ATTR_OPERATION_NAME_EXT:
return "HIPDNN_ATTR_OPERATION_NAME_EXT";
case HIPDNN_ATTR_OPERATION_TYPE_EXT:
return "HIPDNN_ATTR_OPERATION_TYPE_EXT";

// Extension API
case HIPDNN_ATTR_KNOB_INFO_SERIALIZED_VALUE_EXT:
return "HIPDNN_ATTR_KNOB_INFO_SERIALIZED_VALUE_EXT";
Expand All @@ -687,6 +694,51 @@ inline const char* hipdnnGetAttributeNameString(hipdnnBackendAttributeName_t att
}
}

inline const char* hipdnnGetOperationTypeString(hipdnnOperationType_t type)
{
switch(type)
{
case HIPDNN_OPERATION_TYPE_BATCHNORM:
return "HIPDNN_OPERATION_TYPE_BATCHNORM";
case HIPDNN_OPERATION_TYPE_BATCHNORM_BACKWARD:
return "HIPDNN_OPERATION_TYPE_BATCHNORM_BACKWARD";
case HIPDNN_OPERATION_TYPE_BATCHNORM_INFERENCE:
return "HIPDNN_OPERATION_TYPE_BATCHNORM_INFERENCE";
case HIPDNN_OPERATION_TYPE_BATCHNORM_INFERENCE_VARIANCE:
return "HIPDNN_OPERATION_TYPE_BATCHNORM_INFERENCE_VARIANCE";
case HIPDNN_OPERATION_TYPE_BLOCK_SCALE_DEQUANTIZE:
return "HIPDNN_OPERATION_TYPE_BLOCK_SCALE_DEQUANTIZE";
case HIPDNN_OPERATION_TYPE_BLOCK_SCALE_QUANTIZE:
return "HIPDNN_OPERATION_TYPE_BLOCK_SCALE_QUANTIZE";
case HIPDNN_OPERATION_TYPE_CONVOLUTION_BACKWARD_DATA:
return "HIPDNN_OPERATION_TYPE_CONVOLUTION_BACKWARD_DATA";
case HIPDNN_OPERATION_TYPE_CONVOLUTION_BACKWARD_WEIGHTS:
return "HIPDNN_OPERATION_TYPE_CONVOLUTION_BACKWARD_WEIGHTS";
case HIPDNN_OPERATION_TYPE_CONVOLUTION_FORWARD:
return "HIPDNN_OPERATION_TYPE_CONVOLUTION_FORWARD";
case HIPDNN_OPERATION_TYPE_CUSTOM_OP:
return "HIPDNN_OPERATION_TYPE_CUSTOM_OP";
case HIPDNN_OPERATION_TYPE_LAYERNORM:
return "HIPDNN_OPERATION_TYPE_LAYERNORM";
case HIPDNN_OPERATION_TYPE_MATMUL:
return "HIPDNN_OPERATION_TYPE_MATMUL";
case HIPDNN_OPERATION_TYPE_NOT_SET:
return "HIPDNN_OPERATION_TYPE_NOT_SET";
case HIPDNN_OPERATION_TYPE_POINTWISE:
return "HIPDNN_OPERATION_TYPE_POINTWISE";
case HIPDNN_OPERATION_TYPE_REDUCTION:
return "HIPDNN_OPERATION_TYPE_REDUCTION";
case HIPDNN_OPERATION_TYPE_RMSNORM:
return "HIPDNN_OPERATION_TYPE_RMSNORM";
case HIPDNN_OPERATION_TYPE_SDPA_BACKWARD:
return "HIPDNN_OPERATION_TYPE_SDPA_BACKWARD";
case HIPDNN_OPERATION_TYPE_SDPA_FORWARD:
return "HIPDNN_OPERATION_TYPE_SDPA_FORWARD";
default:
return "HIPDNN_OPERATION_TYPE_UNKNOWN";
}
}

inline const char* hipdnnGetPluginLoadingModeString(hipdnnPluginLoadingMode_ext_t mode)
{
switch(mode)
Expand Down
31 changes: 31 additions & 0 deletions projects/hipdnn/backend/src/descriptors/NodeFactory.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -15,8 +15,39 @@ std::shared_ptr<IBackendDescriptor> NodeFactory::createOperationFromNode(

switch(nodeT.attributes.type)
{
// Uncomment when fromNode() is implemented in the lifting PR:
// case NodeAttributes::BatchnormAttributes:
// return BatchnormOperationDescriptor::fromNode(nodeT, tensorMap);
// case NodeAttributes::BatchnormBackwardAttributes:
// return BatchnormBackwardOperationDescriptor::fromNode(nodeT, tensorMap);
// case NodeAttributes::BatchnormInferenceAttributes:
// return BatchnormInferenceOperationDescriptor::fromNode(nodeT, tensorMap);
// case NodeAttributes::BatchnormInferenceAttributesVarianceExt:
// return BatchnormInferenceVarianceExtOperationDescriptor::fromNode(nodeT, tensorMap);
// case NodeAttributes::BlockScaleDequantizeAttributes:
// return BlockScaleDequantizeOperationDescriptor::fromNode(nodeT, tensorMap);
// case NodeAttributes::BlockScaleQuantizeAttributes:
// return BlockScaleQuantizeOperationDescriptor::fromNode(nodeT, tensorMap);
// case NodeAttributes::ConvolutionBwdAttributes:
// return ConvolutionBwdOperationDescriptor::fromNode(nodeT, tensorMap);
case NodeAttributes::ConvolutionFwdAttributes:
return ConvolutionFwdOperationDescriptor::fromNode(nodeT, tensorMap);
// case NodeAttributes::ConvolutionWrwAttributes:
// return ConvolutionWrwOperationDescriptor::fromNode(nodeT, tensorMap);
// case NodeAttributes::CustomOpAttributes:
// return CustomOpOperationDescriptor::fromNode(nodeT, tensorMap);
// case NodeAttributes::LayernormAttributes:
// return LayernormOperationDescriptor::fromNode(nodeT, tensorMap);
// case NodeAttributes::MatmulAttributes:
// return MatmulOperationDescriptor::fromNode(nodeT, tensorMap);
// case NodeAttributes::PointwiseAttributes:
// return PointwiseOperationDescriptor::fromNode(nodeT, tensorMap);
// case NodeAttributes::RMSNormAttributes:
// return RMSNormOperationDescriptor::fromNode(nodeT, tensorMap);
// case NodeAttributes::SdpaAttributes:
// return SdpaFpropOperationDescriptor::fromNode(nodeT, tensorMap);
// case NodeAttributes::SdpaBackwardAttributes:
// return SdpaBackwardOperationDescriptor::fromNode(nodeT, tensorMap);
default:
throw HipdnnException(
HIPDNN_STATUS_NOT_SUPPORTED,
Expand Down
16 changes: 16 additions & 0 deletions projects/hipdnn/backend/src/descriptors/NodeFactory.hpp
Original file line number Diff line number Diff line change
Expand Up @@ -3,7 +3,23 @@

#pragma once

// Uncomment when fromNode() is implemented in the lifting PR:
// #include "BatchnormBackwardOperationDescriptor.hpp"
// #include "BatchnormInferenceOperationDescriptor.hpp"
// #include "BatchnormInferenceVarianceExtOperationDescriptor.hpp"
// #include "BatchnormOperationDescriptor.hpp"
// #include "BlockScaleDequantizeOperationDescriptor.hpp"
// #include "BlockScaleQuantizeOperationDescriptor.hpp"
// #include "ConvolutionBwdOperationDescriptor.hpp"
#include "ConvolutionFwdOperationDescriptor.hpp"
// #include "ConvolutionWrwOperationDescriptor.hpp"
// #include "CustomOpOperationDescriptor.hpp"
// #include "LayernormOperationDescriptor.hpp"
// #include "MatmulOperationDescriptor.hpp"
// #include "PointwiseOperationDescriptor.hpp"
// #include "RMSNormOperationDescriptor.hpp"
// #include "SdpaBackwardOperationDescriptor.hpp"
// #include "SdpaFpropOperationDescriptor.hpp"
#include "IGraphOperation.hpp"
#include "TensorDescriptor.hpp"
#include <hipdnn_data_sdk/data_objects/graph_generated.h>
Expand Down
55 changes: 54 additions & 1 deletion projects/hipdnn/backend/tests/TestBackendEnumStringUtils.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -538,7 +538,6 @@ TEST(TestBackendEnumStringUtils, GetBackendAttributeName)
"HIPDNN_ATTR_BATCHNORM_MATH_PREC_EXT");
EXPECT_STREQ(hipdnnGetAttributeNameString(HIPDNN_ATTR_OPERATION_BATCHNORM_PEER_STATS_EXT),
"HIPDNN_ATTR_OPERATION_BATCHNORM_PEER_STATS_EXT");

// Block scale dequantize operation attributes
EXPECT_STREQ(hipdnnGetAttributeNameString(HIPDNN_ATTR_OPERATION_BLOCK_SCALE_DEQUANTIZE_X_EXT),
"HIPDNN_ATTR_OPERATION_BLOCK_SCALE_DEQUANTIZE_X_EXT");
Expand All @@ -555,6 +554,60 @@ TEST(TestBackendEnumStringUtils, GetBackendAttributeName)
"HIPDNN_ATTR_OPERATION_BLOCK_SCALE_DEQUANTIZE_IS_NEGATIVE_SCALE_EXT");
EXPECT_STREQ(hipdnnGetAttributeNameString(HIPDNN_ATTR_BLOCK_SCALE_DEQUANTIZE_MATH_PREC_EXT),
"HIPDNN_ATTR_BLOCK_SCALE_DEQUANTIZE_MATH_PREC_EXT");

// Operation extension attributes
EXPECT_STREQ(hipdnnGetAttributeNameString(HIPDNN_ATTR_OPERATION_NAME_EXT),
"HIPDNN_ATTR_OPERATION_NAME_EXT");
EXPECT_STREQ(hipdnnGetAttributeNameString(HIPDNN_ATTR_OPERATION_TYPE_EXT),
"HIPDNN_ATTR_OPERATION_TYPE_EXT");

// Unknown attribute
EXPECT_STREQ(hipdnnGetAttributeNameString(static_cast<hipdnnBackendAttributeName_t>(-1)),
"HIPDNN_ATTR_UNKNOWN");
}

TEST(TestBackendEnumStringUtils, GetOperationTypeString)
{
EXPECT_STREQ(hipdnnGetOperationTypeString(HIPDNN_OPERATION_TYPE_BATCHNORM),
"HIPDNN_OPERATION_TYPE_BATCHNORM");
EXPECT_STREQ(hipdnnGetOperationTypeString(HIPDNN_OPERATION_TYPE_BATCHNORM_BACKWARD),
"HIPDNN_OPERATION_TYPE_BATCHNORM_BACKWARD");
EXPECT_STREQ(hipdnnGetOperationTypeString(HIPDNN_OPERATION_TYPE_BATCHNORM_INFERENCE),
"HIPDNN_OPERATION_TYPE_BATCHNORM_INFERENCE");
EXPECT_STREQ(hipdnnGetOperationTypeString(HIPDNN_OPERATION_TYPE_BATCHNORM_INFERENCE_VARIANCE),
"HIPDNN_OPERATION_TYPE_BATCHNORM_INFERENCE_VARIANCE");
EXPECT_STREQ(hipdnnGetOperationTypeString(HIPDNN_OPERATION_TYPE_BLOCK_SCALE_DEQUANTIZE),
"HIPDNN_OPERATION_TYPE_BLOCK_SCALE_DEQUANTIZE");
EXPECT_STREQ(hipdnnGetOperationTypeString(HIPDNN_OPERATION_TYPE_BLOCK_SCALE_QUANTIZE),
"HIPDNN_OPERATION_TYPE_BLOCK_SCALE_QUANTIZE");
EXPECT_STREQ(hipdnnGetOperationTypeString(HIPDNN_OPERATION_TYPE_CONVOLUTION_BACKWARD_DATA),
"HIPDNN_OPERATION_TYPE_CONVOLUTION_BACKWARD_DATA");
EXPECT_STREQ(hipdnnGetOperationTypeString(HIPDNN_OPERATION_TYPE_CONVOLUTION_BACKWARD_WEIGHTS),
"HIPDNN_OPERATION_TYPE_CONVOLUTION_BACKWARD_WEIGHTS");
EXPECT_STREQ(hipdnnGetOperationTypeString(HIPDNN_OPERATION_TYPE_CONVOLUTION_FORWARD),
"HIPDNN_OPERATION_TYPE_CONVOLUTION_FORWARD");
EXPECT_STREQ(hipdnnGetOperationTypeString(HIPDNN_OPERATION_TYPE_CUSTOM_OP),
"HIPDNN_OPERATION_TYPE_CUSTOM_OP");
EXPECT_STREQ(hipdnnGetOperationTypeString(HIPDNN_OPERATION_TYPE_LAYERNORM),
"HIPDNN_OPERATION_TYPE_LAYERNORM");
EXPECT_STREQ(hipdnnGetOperationTypeString(HIPDNN_OPERATION_TYPE_MATMUL),
"HIPDNN_OPERATION_TYPE_MATMUL");
EXPECT_STREQ(hipdnnGetOperationTypeString(HIPDNN_OPERATION_TYPE_NOT_SET),
"HIPDNN_OPERATION_TYPE_NOT_SET");
EXPECT_STREQ(hipdnnGetOperationTypeString(HIPDNN_OPERATION_TYPE_POINTWISE),
"HIPDNN_OPERATION_TYPE_POINTWISE");
EXPECT_STREQ(hipdnnGetOperationTypeString(HIPDNN_OPERATION_TYPE_REDUCTION),
"HIPDNN_OPERATION_TYPE_REDUCTION");
EXPECT_STREQ(hipdnnGetOperationTypeString(HIPDNN_OPERATION_TYPE_RMSNORM),
"HIPDNN_OPERATION_TYPE_RMSNORM");
EXPECT_STREQ(hipdnnGetOperationTypeString(HIPDNN_OPERATION_TYPE_SDPA_BACKWARD),
"HIPDNN_OPERATION_TYPE_SDPA_BACKWARD");
EXPECT_STREQ(hipdnnGetOperationTypeString(HIPDNN_OPERATION_TYPE_SDPA_FORWARD),
"HIPDNN_OPERATION_TYPE_SDPA_FORWARD");

// Unknown type
EXPECT_STREQ(hipdnnGetOperationTypeString(static_cast<hipdnnOperationType_t>(-1)),
"HIPDNN_OPERATION_TYPE_UNKNOWN");
}

TEST(TestBackendEnumStringUtils, GetStatusString)
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -9,8 +9,24 @@
#include <hipdnn_frontend/attributes/GraphAttributes.hpp>
#include <hipdnn_frontend/attributes/TensorAttributes.hpp>
#include <hipdnn_frontend/detail/BackendWrapper.hpp>
// Uncomment when unpack_from_descriptor() is implemented in the lifting PR:
// #include <hipdnn_frontend/node/BatchnormBackwardNode.hpp>
// #include <hipdnn_frontend/node/BatchnormInferenceNode.hpp>
// #include <hipdnn_frontend/node/BatchnormInferenceNodeVarianceExt.hpp>
// #include <hipdnn_frontend/node/BatchnormNode.hpp>
// #include <hipdnn_frontend/node/BlockScaleDequantizeNode.hpp>
// #include <hipdnn_frontend/node/BlockScaleQuantizeNode.hpp>
// #include <hipdnn_frontend/node/ConvolutionDgradNode.hpp>
#include <hipdnn_frontend/node/ConvolutionFpropNode.hpp>
// #include <hipdnn_frontend/node/ConvolutionWgradNode.hpp>
// #include <hipdnn_frontend/node/CustomOpNode.hpp>
// #include <hipdnn_frontend/node/LayerNormNode.hpp>
// #include <hipdnn_frontend/node/MatmulNode.hpp>
#include <hipdnn_frontend/node/Node.hpp>
// #include <hipdnn_frontend/node/PointwiseNode.hpp>
// #include <hipdnn_frontend/node/RMSNormNode.hpp>
// #include <hipdnn_frontend/node/SdpaBpropNode.hpp>
// #include <hipdnn_frontend/node/SdpaFpropNode.hpp>
#include <memory>
#include <string>
#include <unordered_map>
Expand Down Expand Up @@ -53,10 +69,62 @@ namespace hipdnn_frontend::detail
{
switch(opType)
{
// Uncomment when unpack_from_descriptor() is implemented in the lifting PR:
// case HIPDNN_OPERATION_TYPE_BATCHNORM:
// return {std::make_shared<graph::BatchnormNode>(graph::BatchnormAttributes{}, graphAttrs),
// {}};
// case HIPDNN_OPERATION_TYPE_BATCHNORM_BACKWARD:
// return {std::make_shared<graph::BatchnormBackwardNode>(
// graph::BatchnormBackwardAttributes{}, graphAttrs),
// {}};
// case HIPDNN_OPERATION_TYPE_BATCHNORM_INFERENCE:
// return {std::make_shared<graph::BatchnormInferenceNode>(
// graph::BatchnormInferenceAttributes{}, graphAttrs),
// {}};
// case HIPDNN_OPERATION_TYPE_BATCHNORM_INFERENCE_VARIANCE:
// return {std::make_shared<graph::BatchnormInferenceNodeVarianceExt>(
// graph::BatchnormInferenceAttributesVarianceExt{}, graphAttrs),
// {}};
// case HIPDNN_OPERATION_TYPE_BLOCK_SCALE_DEQUANTIZE:
// return {std::make_shared<graph::BlockScaleDequantizeNode>(
// graph::BlockScaleDequantizeAttributes{}, graphAttrs),
// {}};
// case HIPDNN_OPERATION_TYPE_BLOCK_SCALE_QUANTIZE:
// return {std::make_shared<graph::BlockScaleQuantizeNode>(
// graph::BlockScaleQuantizeAttributes{}, graphAttrs),
// {}};
// case HIPDNN_OPERATION_TYPE_CONVOLUTION_BACKWARD_DATA:
// return {std::make_shared<graph::ConvolutionDgradNode>(graph::ConvDgradAttributes{},
// graphAttrs),
// {}};
// case HIPDNN_OPERATION_TYPE_CONVOLUTION_BACKWARD_WEIGHTS:
// return {std::make_shared<graph::ConvolutionWgradNode>(graph::ConvWgradAttributes{},
// graphAttrs),
// {}};
case HIPDNN_OPERATION_TYPE_CONVOLUTION_FORWARD:
return {
std::make_shared<graph::ConvolutionFpropNode>(graph::ConvFpropAttributes{}, graphAttrs),
{}};
// case HIPDNN_OPERATION_TYPE_CUSTOM_OP:
// return {std::make_shared<graph::CustomOpNode>(graph::CustomOpAttributes{}, graphAttrs),
// {}};
// case HIPDNN_OPERATION_TYPE_LAYERNORM:
// return {std::make_shared<graph::LayerNormNode>(graph::LayernormAttributes{}, graphAttrs),
// {}};
// case HIPDNN_OPERATION_TYPE_MATMUL:
// return {std::make_shared<graph::MatmulNode>(graph::MatmulAttributes{}, graphAttrs), {}};
// case HIPDNN_OPERATION_TYPE_POINTWISE:
// return {std::make_shared<graph::PointwiseNode>(graph::PointwiseAttributes{}, graphAttrs),
// {}};
// case HIPDNN_OPERATION_TYPE_RMSNORM:
// return {std::make_shared<graph::RMSNormNode>(graph::RMSNormAttributes{}, graphAttrs),
// {}};
// case HIPDNN_OPERATION_TYPE_SDPA_BACKWARD:
// return {std::make_shared<graph::SdpaBpropNode>(graph::SdpaBackwardAttributes{},
// graphAttrs),
// {}};
// case HIPDNN_OPERATION_TYPE_SDPA_FORWARD:
// return {std::make_shared<graph::SdpaFpropNode>(graph::SdpaAttributes{}, graphAttrs), {}};
default:
return {nullptr,
{ErrorCode::HIPDNN_BACKEND_ERROR,
Expand Down
1 change: 1 addition & 0 deletions test/therock/hipdnn_install_tests/CMakeLists.txt
Original file line number Diff line number Diff line change
Expand Up @@ -7,6 +7,7 @@ set(CMAKE_CXX_STANDARD 17)

enable_testing()

# Add a CMake install test for a given hipDNN package and header.
function(add_hipdnn_install_test)
cmake_parse_arguments(ARG "" "PACKAGE;HEADER" "" ${ARGN})

Expand Down
Loading