diff --git a/scaleway-async/scaleway_async/inference/v1/marshalling.py b/scaleway-async/scaleway_async/inference/v1/marshalling.py index 0c3640168..98d21b2de 100644 --- a/scaleway-async/scaleway_async/inference/v1/marshalling.py +++ b/scaleway-async/scaleway_async/inference/v1/marshalling.py @@ -16,11 +16,11 @@ EndpointPrivateNetworkDetails, EndpointPublicNetworkDetails, Endpoint, + DeploymentQuantization, + Deployment, ModelSupportedQuantization, ModelSupportedNode, ModelSupportInfo, - DeploymentQuantization, - Deployment, Model, ListDeploymentsResponse, ListModelsResponse, @@ -105,83 +105,6 @@ def unmarshal_Endpoint(data: Any) -> Endpoint: return Endpoint(**args) -def unmarshal_ModelSupportedQuantization(data: Any) -> ModelSupportedQuantization: - if not isinstance(data, dict): - raise TypeError( - "Unmarshalling the type 'ModelSupportedQuantization' failed as data isn't a dictionary." - ) - - args: Dict[str, Any] = {} - - field = data.get("quantization_bits", None) - if field is not None: - args["quantization_bits"] = field - else: - args["quantization_bits"] = 0 - - field = data.get("allowed", None) - if field is not None: - args["allowed"] = field - else: - args["allowed"] = False - - field = data.get("max_context_size", None) - if field is not None: - args["max_context_size"] = field - else: - args["max_context_size"] = 0 - - return ModelSupportedQuantization(**args) - - -def unmarshal_ModelSupportedNode(data: Any) -> ModelSupportedNode: - if not isinstance(data, dict): - raise TypeError( - "Unmarshalling the type 'ModelSupportedNode' failed as data isn't a dictionary." - ) - - args: Dict[str, Any] = {} - - field = data.get("node_type_name", None) - if field is not None: - args["node_type_name"] = field - else: - args["node_type_name"] = None - - field = data.get("quantizations", None) - if field is not None: - args["quantizations"] = ( - [unmarshal_ModelSupportedQuantization(v) for v in field] - if field is not None - else None - ) - else: - args["quantizations"] = [] - - return ModelSupportedNode(**args) - - -def unmarshal_ModelSupportInfo(data: Any) -> ModelSupportInfo: - if not isinstance(data, dict): - raise TypeError( - "Unmarshalling the type 'ModelSupportInfo' failed as data isn't a dictionary." - ) - - args: Dict[str, Any] = {} - - field = data.get("nodes", None) - if field is not None: - args["nodes"] = ( - [unmarshal_ModelSupportedNode(v) for v in field] - if field is not None - else None - ) - else: - args["nodes"] = [] - - return ModelSupportInfo(**args) - - def unmarshal_DeploymentQuantization(data: Any) -> DeploymentQuantization: if not isinstance(data, dict): raise TypeError( @@ -314,6 +237,83 @@ def unmarshal_Deployment(data: Any) -> Deployment: return Deployment(**args) +def unmarshal_ModelSupportedQuantization(data: Any) -> ModelSupportedQuantization: + if not isinstance(data, dict): + raise TypeError( + "Unmarshalling the type 'ModelSupportedQuantization' failed as data isn't a dictionary." + ) + + args: Dict[str, Any] = {} + + field = data.get("quantization_bits", None) + if field is not None: + args["quantization_bits"] = field + else: + args["quantization_bits"] = 0 + + field = data.get("allowed", None) + if field is not None: + args["allowed"] = field + else: + args["allowed"] = False + + field = data.get("max_context_size", None) + if field is not None: + args["max_context_size"] = field + else: + args["max_context_size"] = 0 + + return ModelSupportedQuantization(**args) + + +def unmarshal_ModelSupportedNode(data: Any) -> ModelSupportedNode: + if not isinstance(data, dict): + raise TypeError( + "Unmarshalling the type 'ModelSupportedNode' failed as data isn't a dictionary." + ) + + args: Dict[str, Any] = {} + + field = data.get("node_type_name", None) + if field is not None: + args["node_type_name"] = field + else: + args["node_type_name"] = None + + field = data.get("quantizations", None) + if field is not None: + args["quantizations"] = ( + [unmarshal_ModelSupportedQuantization(v) for v in field] + if field is not None + else None + ) + else: + args["quantizations"] = [] + + return ModelSupportedNode(**args) + + +def unmarshal_ModelSupportInfo(data: Any) -> ModelSupportInfo: + if not isinstance(data, dict): + raise TypeError( + "Unmarshalling the type 'ModelSupportInfo' failed as data isn't a dictionary." + ) + + args: Dict[str, Any] = {} + + field = data.get("nodes", None) + if field is not None: + args["nodes"] = ( + [unmarshal_ModelSupportedNode(v) for v in field] + if field is not None + else None + ) + else: + args["nodes"] = [] + + return ModelSupportInfo(**args) + + def unmarshal_Model(data: Any) -> Model: if not isinstance(data, dict): raise TypeError( diff --git a/scaleway/scaleway/inference/v1/marshalling.py b/scaleway/scaleway/inference/v1/marshalling.py index 0c3640168..98d21b2de 100644 --- a/scaleway/scaleway/inference/v1/marshalling.py +++ b/scaleway/scaleway/inference/v1/marshalling.py @@ -16,11 +16,11 @@ EndpointPrivateNetworkDetails, EndpointPublicNetworkDetails, Endpoint, + DeploymentQuantization, + Deployment, ModelSupportedQuantization, ModelSupportedNode, ModelSupportInfo, - DeploymentQuantization, - Deployment, Model, ListDeploymentsResponse, ListModelsResponse, @@ -105,83 +105,6 @@ def unmarshal_Endpoint(data: Any) -> Endpoint: return Endpoint(**args) -def unmarshal_ModelSupportedQuantization(data: Any) -> ModelSupportedQuantization: - if not isinstance(data, dict): - raise TypeError( - "Unmarshalling the type 'ModelSupportedQuantization' failed as data isn't a dictionary." - ) - - args: Dict[str, Any] = {} - - field = data.get("quantization_bits", None) - if field is not None: - args["quantization_bits"] = field - else: - args["quantization_bits"] = 0 - - field = data.get("allowed", None) - if field is not None: - args["allowed"] = field - else: - args["allowed"] = False - - field = data.get("max_context_size", None) - if field is not None: - args["max_context_size"] = field - else: - args["max_context_size"] = 0 - - return ModelSupportedQuantization(**args) - - -def unmarshal_ModelSupportedNode(data: Any) -> ModelSupportedNode: - if not isinstance(data, dict): - raise TypeError( - "Unmarshalling the type 'ModelSupportedNode' failed as data isn't a dictionary." - ) - - args: Dict[str, Any] = {} - - field = data.get("node_type_name", None) - if field is not None: - args["node_type_name"] = field - else: - args["node_type_name"] = None - - field = data.get("quantizations", None) - if field is not None: - args["quantizations"] = ( - [unmarshal_ModelSupportedQuantization(v) for v in field] - if field is not None - else None - ) - else: - args["quantizations"] = [] - - return ModelSupportedNode(**args) - - -def unmarshal_ModelSupportInfo(data: Any) -> ModelSupportInfo: - if not isinstance(data, dict): - raise TypeError( - "Unmarshalling the type 'ModelSupportInfo' failed as data isn't a dictionary." - ) - - args: Dict[str, Any] = {} - - field = data.get("nodes", None) - if field is not None: - args["nodes"] = ( - [unmarshal_ModelSupportedNode(v) for v in field] - if field is not None - else None - ) - else: - args["nodes"] = [] - - return ModelSupportInfo(**args) - - def unmarshal_DeploymentQuantization(data: Any) -> DeploymentQuantization: if not isinstance(data, dict): raise TypeError( @@ -314,6 +237,83 @@ def unmarshal_Deployment(data: Any) -> Deployment: return Deployment(**args) +def unmarshal_ModelSupportedQuantization(data: Any) -> ModelSupportedQuantization: + if not isinstance(data, dict): + raise TypeError( + "Unmarshalling the type 'ModelSupportedQuantization' failed as data isn't a dictionary." + ) + + args: Dict[str, Any] = {} + + field = data.get("quantization_bits", None) + if field is not None: + args["quantization_bits"] = field + else: + args["quantization_bits"] = 0 + + field = data.get("allowed", None) + if field is not None: + args["allowed"] = field + else: + args["allowed"] = False + + field = data.get("max_context_size", None) + if field is not None: + args["max_context_size"] = field + else: + args["max_context_size"] = 0 + + return ModelSupportedQuantization(**args) + + +def unmarshal_ModelSupportedNode(data: Any) -> ModelSupportedNode: + if not isinstance(data, dict): + raise TypeError( + "Unmarshalling the type 'ModelSupportedNode' failed as data isn't a dictionary." + ) + + args: Dict[str, Any] = {} + + field = data.get("node_type_name", None) + if field is not None: + args["node_type_name"] = field + else: + args["node_type_name"] = None + + field = data.get("quantizations", None) + if field is not None: + args["quantizations"] = ( + [unmarshal_ModelSupportedQuantization(v) for v in field] + if field is not None + else None + ) + else: + args["quantizations"] = [] + + return ModelSupportedNode(**args) + + +def unmarshal_ModelSupportInfo(data: Any) -> ModelSupportInfo: + if not isinstance(data, dict): + raise TypeError( + "Unmarshalling the type 'ModelSupportInfo' failed as data isn't a dictionary." + ) + + args: Dict[str, Any] = {} + + field = data.get("nodes", None) + if field is not None: + args["nodes"] = ( + [unmarshal_ModelSupportedNode(v) for v in field] + if field is not None + else None + ) + else: + args["nodes"] = [] + + return ModelSupportInfo(**args) + + def unmarshal_Model(data: Any) -> Model: if not isinstance(data, dict): raise TypeError(