@@ -566,26 +566,9 @@ static void buildTransConvOpWithQuantInfo(
566566 result.addTypes (finalOutputType);
567567}
568568
569- // / The tosa.fully_connected op has its own builder as it does not have
570- // / strides/dilation/padding.
571- static void buildFCOpWithQuantInfo (OpBuilder &builder, OperationState &result,
572- Type outputType, Value input, Value weight,
573- Value bias) {
574-
575- result.addOperands ({input, weight, bias});
576- auto quantAttr = ::buildConvOpQuantizationAttr (builder, input, weight);
577- if (quantAttr) {
578- result.addAttribute (" quantization_info" , quantAttr);
579- result.addTypes (
580- buildConvOpResultTypeInfo (builder, outputType, input, weight));
581- } else {
582- result.addTypes (outputType);
583- }
584- }
585-
586- // / The tosa.matmul op is also intended to be generated where a
587- // / fully_connected op must be constructed where the weight is not a constant.
588- // / In this case, the fully_connected op must be expressed using matmul.
569+ // / The tosa.matmul op is also intended to be generated where a fully_connected
570+ // / op must be constructed where the weight is not a constant. In this case,
571+ // / the fully_connected op must be expressed using matmul.
589572// / TODO: Add link to the leglization document explaining this.
590573static void buildMatMulOpWithQuantInfo (OpBuilder &builder,
591574 OperationState &result, Type outputType,
@@ -889,76 +872,6 @@ bool tosa::EqualOp::isCompatibleReturnTypes(TypeRange l, TypeRange r) {
889872 return succeeded (verifyCompatibleShape (l[0 ], r[0 ]));
890873}
891874
892- LogicalResult tosa::FullyConnectedOp::inferReturnTypeComponents (
893- MLIRContext *context, ::std::optional<Location> location,
894- FullyConnectedOp::Adaptor adaptor,
895- SmallVectorImpl<ShapedTypeComponents> &inferredReturnShapes) {
896- ShapeAdaptor inputShape (adaptor.getInput ().getType ());
897- ShapeAdaptor weightShape (adaptor.getWeight ().getType ());
898- ShapeAdaptor biasShape (adaptor.getBias ().getType ());
899-
900- // All shapes are dynamic.
901- SmallVector<int64_t > outShape;
902- outShape.resize (2 , ShapedType::kDynamic );
903-
904- if (inputShape.hasRank ()) {
905- outShape[0 ] = inputShape.getDimSize (0 );
906- }
907-
908- if (weightShape.hasRank ()) {
909- outShape[1 ] = weightShape.getDimSize (0 );
910- }
911-
912- if (biasShape.hasRank ()) {
913- outShape[1 ] = outShape[1 ] == ShapedType::kDynamic ? biasShape.getDimSize (0 )
914- : outShape[1 ];
915- }
916-
917- inferredReturnShapes.push_back (ShapedTypeComponents (outShape));
918- return success ();
919- }
920-
921- LogicalResult FullyConnectedOp::verify () {
922- // All TOSA conv ops have an input() and weight().
923- auto inputType = llvm::dyn_cast<RankedTensorType>(getInput ().getType ());
924-
925- RankedTensorType weightType =
926- llvm::dyn_cast<RankedTensorType>(getWeight ().getType ());
927-
928- // Must be ranked tensor types
929- if (!inputType) {
930- emitOpError (" expect a ranked tensor for input, got " ) << getInput ();
931- return failure ();
932- }
933- if (!weightType) {
934- emitOpError (" expect a ranked tensor for weight, got " ) << getWeight ();
935- return failure ();
936- }
937-
938- auto inputEType = inputType.getElementType ();
939- auto weightEType = weightType.getElementType ();
940-
941- bool inputIsQuant = !llvm::isa<FloatType>(inputEType);
942- bool weightIsQuant = !llvm::isa<FloatType>(weightEType);
943-
944- // Either both must be quantized or both unquantized.
945- if (inputIsQuant != weightIsQuant) {
946- emitOpError (
947- " expect both input and weight to be float or not together, got " )
948- << inputEType << " and " << weightEType;
949- return failure ();
950- }
951-
952- // Quantized type must have constructed the quantizationattr, and unquantized
953- // types should not have a quantizationattr.
954- if ((inputIsQuant && !getInputZp ()) || (!inputIsQuant && getInputZp ())) {
955- emitOpError (" input zero point is required for quantized type, and not "
956- " allowed for float type" );
957- return failure ();
958- }
959- return success ();
960- }
961-
962875LogicalResult tosa::MatMulOp::inferReturnTypeComponents (
963876 MLIRContext *context, ::std::optional<Location> location,
964877 MatMulOp::Adaptor adaptor,
0 commit comments