Skip to content

Commit 5e8380a

Browse files
Adding docstring
1 parent 3fe79fe commit 5e8380a

File tree

5 files changed

+68
-17
lines changed

5 files changed

+68
-17
lines changed
Original file line numberDiff line numberDiff line change
@@ -1,24 +1,24 @@
11
from __future__ import annotations
22

3+
import base64
34
from typing import Any
45

56
from aws_lambda_powertools.utilities.kafka_consumer.deserializer.base import DeserializerBase
67

78

8-
class NoOpDeserializer(DeserializerBase):
9+
class DefaultDeserializer(DeserializerBase):
910
"""
10-
A pass-through deserializer that performs no transformation on the input data.
11+
A default deserializer that performs base64 decode + binary decode on the input data.
1112
12-
This deserializer simply returns the input data unchanged, which is useful when
13-
no deserialization is needed or when handling raw data formats.
13+
This deserializer simply returns the input data with base64 decode, which is useful when
14+
no customized deserialization is needed or when handling raw data formats.
1415
"""
1516

1617
def deserialize(self, data: bytes | str) -> dict[str, Any]:
1718
"""
18-
Return the input data unchanged.
19+
Return the input data base64 decoded.
1920
20-
This method implements the deserialize interface but performs no transformation,
21-
simply returning the input data as-is.
21+
This method implements the deserialize interface and performs base64 decode.
2222
2323
Parameters
2424
----------
@@ -28,9 +28,7 @@ def deserialize(self, data: bytes | str) -> dict[str, Any]:
2828
Returns
2929
-------
3030
dict[str, Any]
31-
The input data unchanged. Note that despite the type annotation,
32-
this method returns the exact same object that was passed in,
33-
preserving its original type.
31+
The input data base64 decoded.
3432
3533
Example
3634
--------
@@ -46,4 +44,4 @@ def deserialize(self, data: bytes | str) -> dict[str, Any]:
4644
>>> result = deserializer.deserialize(bytes_data)
4745
>>> print(result == bytes_data) # Output: True
4846
"""
49-
return data
47+
return base64.b64decode(data).decode("utf-8")

aws_lambda_powertools/utilities/kafka_consumer/deserializer/deserializer.py

Lines changed: 3 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -2,8 +2,8 @@
22

33
from typing import TYPE_CHECKING, Any
44

5+
from aws_lambda_powertools.utilities.kafka_consumer.deserializer.default import DefaultDeserializer
56
from aws_lambda_powertools.utilities.kafka_consumer.deserializer.json import JsonDeserializer
6-
from aws_lambda_powertools.utilities.kafka_consumer.deserializer.no_op import NoOpDeserializer
77

88
if TYPE_CHECKING:
99
from aws_lambda_powertools.utilities.kafka_consumer.deserializer.base import DeserializerBase
@@ -68,4 +68,5 @@ def get_deserializer(schema_type: str | object, schema_value: Any) -> Deserializ
6868
elif schema_type == "JSON":
6969
return JsonDeserializer()
7070

71-
return NoOpDeserializer()
71+
# Default to default deserializer that is base64 decode + bytes decoded
72+
return DefaultDeserializer()

aws_lambda_powertools/utilities/kafka_consumer/deserializer/json.py

Lines changed: 2 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -1,5 +1,6 @@
11
from __future__ import annotations
22

3+
import base64
34
import json
45

56
from aws_lambda_powertools.utilities.kafka_consumer.deserializer.base import DeserializerBase
@@ -46,7 +47,7 @@ def deserialize(self, data: bytes | str) -> dict:
4647
"""
4748
try:
4849
value = self._decode_input(data)
49-
return json.loads(value.decode("utf-8"))
50+
return json.loads(base64.b64decode(value).decode("utf-8"))
5051
except Exception as e:
5152
raise KafkaConsumerDeserializationError(
5253
f"JSON deserialization error: {type(e).__name__}: {str(e)}",

aws_lambda_powertools/utilities/kafka_consumer/serialization/base.py

Lines changed: 40 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -8,6 +8,46 @@
88

99

1010
class OutputSerializerBase(ABC):
11+
"""
12+
Abstract base class for output serializers.
13+
14+
This class defines the interface for serializers that transform dictionary data
15+
into specific output formats or class instances.
16+
17+
Methods
18+
-------
19+
serialize(data, output_class)
20+
Abstract method that must be implemented by subclasses to serialize data.
21+
22+
Examples
23+
--------
24+
>>> class MyOutputSerializer(OutputSerializerBase):
25+
... def serialize(self, data: dict[str, Any], output_class=None):
26+
... if output_class:
27+
... # Convert dictionary to class instance
28+
... return output_class(**data)
29+
... return data # Return as is if no output class provided
30+
"""
1131
@abstractmethod
1232
def serialize(self, data: dict[str, Any], output_class: type[T] | None = None) -> T | dict[str, Any]:
33+
"""
34+
Serialize dictionary data into a specific output format or class instance.
35+
36+
This abstract method must be implemented by subclasses to provide
37+
specific serialization logic.
38+
39+
Parameters
40+
----------
41+
data : dict[str, Any]
42+
The dictionary data to serialize.
43+
output_class : type[T] or None, optional
44+
Optional class type to convert the dictionary into. If provided,
45+
the method should return an instance of this class.
46+
47+
Returns
48+
-------
49+
T or dict[str, Any]
50+
An instance of output_class if provided, otherwise a processed dictionary.
51+
The generic type T represents the type of the output_class.
52+
"""
1353
pass

tests/functional/kafka_consumer/_avro/test_kafka_consumer_with_avro.py

Lines changed: 14 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -87,7 +87,10 @@ def kafka_event_with_avro_data(avro_encoded_value, avro_encoded_key):
8787

8888

8989
def test_kafka_consumer_with_avro_and_dataclass(
90-
kafka_event_with_avro_data, avro_value_schema, lambda_context, user_value_dataclass,
90+
kafka_event_with_avro_data,
91+
avro_value_schema,
92+
lambda_context,
93+
user_value_dataclass,
9194
):
9295
"""Test Kafka consumer with Avro deserialization and dataclass output serialization."""
9396

@@ -120,7 +123,10 @@ def handler(event: ConsumerRecords, context):
120123

121124

122125
def test_kafka_consumer_with_avro_and_custom_object(
123-
kafka_event_with_avro_data, avro_value_schema, lambda_context, user_value_dict,
126+
kafka_event_with_avro_data,
127+
avro_value_schema,
128+
lambda_context,
129+
user_value_dict,
124130
):
125131
"""Test Kafka consumer with Avro deserialization and custom object serialization."""
126132

@@ -280,7 +286,12 @@ def lambda_handler(event: ConsumerRecords, context):
280286

281287

282288
def test_kafka_consumer_with_different_serializers_for_key_and_value(
283-
kafka_event_with_avro_data, lambda_context, avro_value_schema, avro_key_schema, user_key_dataclass, user_value_dict,
289+
kafka_event_with_avro_data,
290+
lambda_context,
291+
avro_value_schema,
292+
avro_key_schema,
293+
user_key_dataclass,
294+
user_value_dict,
284295
):
285296
"""Test using different serializer types for key and value."""
286297

0 commit comments

Comments
 (0)