Skip to content

Commit 8da90c5

Browse files
Merge branch 'develop' into dependabot/docker/docs/squidfunk/mkdocs-material-405aeb6dbf1fcddd1082993eacf288a46648ea56b846323f001aee619015a23b
2 parents 2c674bc + 4d9a3be commit 8da90c5

File tree

13 files changed

+736
-32
lines changed

13 files changed

+736
-32
lines changed
Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -1,3 +1,3 @@
11
"""Exposes version constant to avoid circular dependencies."""
22

3-
VERSION = "3.19.1a1"
3+
VERSION = "3.19.1a2"

aws_lambda_powertools/utilities/parser/models/dynamodb.py

Lines changed: 88 additions & 30 deletions
Original file line numberDiff line numberDiff line change
@@ -2,53 +2,111 @@
22
from datetime import datetime
33
from typing import Any, Dict, List, Literal, Optional, Type, Union
44

5-
from pydantic import BaseModel, field_validator
5+
from pydantic import BaseModel, Field, field_validator
66

77
from aws_lambda_powertools.shared.dynamodb_deserializer import TypeDeserializer
88

99
_DESERIALIZER = TypeDeserializer()
1010

1111

1212
class DynamoDBStreamChangedRecordModel(BaseModel):
13-
ApproximateCreationDateTime: Optional[datetime] = None
14-
Keys: Dict[str, Any]
15-
NewImage: Optional[Union[Dict[str, Any], Type[BaseModel], BaseModel]] = None
16-
OldImage: Optional[Union[Dict[str, Any], Type[BaseModel], BaseModel]] = None
17-
SequenceNumber: str
18-
SizeBytes: int
19-
StreamViewType: Literal["NEW_AND_OLD_IMAGES", "KEYS_ONLY", "NEW_IMAGE", "OLD_IMAGE"]
20-
21-
# context on why it's commented: https://github.com/aws-powertools/powertools-lambda-python/pull/118
22-
# since both images are optional, they can both be None. However, at least one must
23-
# exist in a legal model of NEW_AND_OLD_IMAGES type
24-
# @root_validator
25-
# def check_one_image_exists(cls, values): # noqa: ERA001
26-
# new_img, old_img = values.get("NewImage"), values.get("OldImage") # noqa: ERA001
27-
# stream_type = values.get("StreamViewType") # noqa: ERA001
28-
# if stream_type == "NEW_AND_OLD_IMAGES" and not new_img and not old_img: # noqa: ERA001
29-
# raise TypeError("DynamoDB streams model failed validation, missing both new & old stream images") # noqa: ERA001,E501
30-
# return values # noqa: ERA001
13+
ApproximateCreationDateTime: Optional[datetime] = Field( # AWS sends this as Unix epoch float
14+
default=None,
15+
description="The approximate date and time when the stream record was created (Unix epoch time).",
16+
examples=[1693997155.0],
17+
)
18+
Keys: Dict[str, Any] = Field(description="Primary key attributes for the item.", examples=[{"Id": {"N": "101"}}])
19+
NewImage: Optional[Union[Dict[str, Any], Type[BaseModel], BaseModel]] = Field(
20+
default=None,
21+
description="The item after modifications, in DynamoDB attribute-value format.",
22+
examples=[{"Message": {"S": "New item!"}, "Id": {"N": "101"}}],
23+
)
24+
OldImage: Optional[Union[Dict[str, Any], Type[BaseModel], BaseModel]] = Field(
25+
default=None,
26+
description="The item before modifications, in DynamoDB attribute-value format.",
27+
examples=[{"Message": {"S": "Old item!"}, "Id": {"N": "100"}}],
28+
)
29+
SequenceNumber: str = Field(description="A unique identifier for the stream record.", examples=["222"])
30+
SizeBytes: int = Field(description="The size of the stream record, in bytes.", examples=[26])
31+
StreamViewType: Literal["NEW_AND_OLD_IMAGES", "KEYS_ONLY", "NEW_IMAGE", "OLD_IMAGE"] = Field(
32+
description="The type of data included in the stream record.",
33+
examples=["NEW_AND_OLD_IMAGES"],
34+
)
3135

3236
@field_validator("Keys", "NewImage", "OldImage", mode="before")
3337
def deserialize_field(cls, value):
3438
return {k: _DESERIALIZER.deserialize(v) for k, v in value.items()}
3539

3640

3741
class UserIdentity(BaseModel):
38-
type: Literal["Service"] # noqa: VNE003, A003
39-
principalId: Literal["dynamodb.amazonaws.com"]
42+
type: Literal["Service"] = Field(
43+
description="The type of identity that made the request, which is always 'Service' for DynamoDB streams.",
44+
examples=["Service"],
45+
)
46+
principalId: Literal["dynamodb.amazonaws.com"] = Field(
47+
description="The unique identifier for the principal that made the request.",
48+
examples=["dynamodb.amazonaws.com"],
49+
)
4050

4151

4252
class DynamoDBStreamRecordModel(BaseModel):
43-
eventID: str
44-
eventName: Literal["INSERT", "MODIFY", "REMOVE"]
45-
eventVersion: float
46-
eventSource: Literal["aws:dynamodb"]
47-
awsRegion: str
48-
eventSourceARN: str
49-
dynamodb: DynamoDBStreamChangedRecordModel
50-
userIdentity: Optional[UserIdentity] = None
53+
eventID: str = Field(description="A unique identifier for the event.", examples=["1"])
54+
eventName: Literal["INSERT", "MODIFY", "REMOVE"] = Field(
55+
description="The type of operation that was performed on the item.",
56+
examples=["INSERT"],
57+
)
58+
eventVersion: float = Field(description="The version of the stream record format.", examples=["1.0"])
59+
eventSource: Literal["aws:dynamodb"] = Field(
60+
description="The source of the event, which is always 'aws:dynamodb' for DynamoDB streams.",
61+
examples=["aws:dynamodb"],
62+
)
63+
awsRegion: str = Field(description="The AWS region where the stream record was generated.", examples=["us-west-2"])
64+
eventSourceARN: str = Field(
65+
description="The Amazon Resource Name (ARN) of the DynamoDB stream.",
66+
examples=["arn:aws:dynamodb:us-west-2:123456789012:table/ExampleTable/stream/2021-01-01T00:00:00.000"],
67+
)
68+
dynamodb: DynamoDBStreamChangedRecordModel = Field(
69+
description="Contains the details of the DynamoDB stream record.",
70+
examples=[
71+
{
72+
"ApproximateCreationDateTime": 1693997155.0,
73+
"Keys": {"Id": {"N": "101"}},
74+
"NewImage": {"Message": {"S": "New item!"}, "Id": {"N": "101"}},
75+
"OldImage": {"Message": {"S": "Old item!"}, "Id": {"N": "100"}},
76+
"SequenceNumber": "222",
77+
"SizeBytes": 26,
78+
"StreamViewType": "NEW_AND_OLD_IMAGES",
79+
},
80+
],
81+
)
82+
userIdentity: Optional[UserIdentity] = Field(
83+
default=None,
84+
description="Information about the identity that made the request.",
85+
examples=[{"type": "Service", "principalId": "dynamodb.amazonaws.com"}],
86+
)
5187

5288

5389
class DynamoDBStreamModel(BaseModel):
54-
Records: List[DynamoDBStreamRecordModel]
90+
Records: List[DynamoDBStreamRecordModel] = Field(
91+
description="A list of records that contain the details of the DynamoDB stream events.",
92+
examples=[
93+
{
94+
"eventID": "1",
95+
"eventName": "INSERT",
96+
"eventVersion": "1.0",
97+
"eventSource": "aws:dynamodb",
98+
"awsRegion": "us-west-2",
99+
"eventSourceARN": "arn:aws:dynamodb:us-west-2:123456789012:table/ExampleTable/stream/2021-01-01T00:00:00.000", # noqa E501
100+
"dynamodb": {
101+
"ApproximateCreationDateTime": 1693997155.0,
102+
"Keys": {"Id": {"N": "101"}},
103+
"NewImage": {"Message": {"S": "New item!"}, "Id": {"N": "101"}},
104+
"OldImage": {"Message": {"S": "Old item!"}, "Id": {"N": "100"}},
105+
"SequenceNumber": "222",
106+
"SizeBytes": 26,
107+
"StreamViewType": "NEW_AND_OLD_IMAGES",
108+
},
109+
"userIdentity": {"type": "Service", "principalId": "dynamodb.amazonaws.com"},
110+
},
111+
],
112+
)
Lines changed: 68 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,68 @@
1+
---
2+
title: CI/CD Integration
3+
description: Automate Lambda function builds and deployments
4+
---
5+
6+
<!-- markdownlint-disable MD043 -->
7+
8+
Automate your Lambda function builds and deployments using popular CI/CD platforms. These examples show how to build and deploy Lambda functions with Powertools for AWS with proper cross-platform compatibility and deploy them reliably.
9+
10+
## GitHub Actions
11+
12+
**GitHub Actions** provides a powerful, integrated CI/CD platform that runs directly in your GitHub repository. It offers excellent integration with AWS services, supports matrix builds for testing multiple configurations, and provides a rich ecosystem of pre-built actions.
13+
14+
=== "Modern AWS Lambda deploy action"
15+
16+
```yaml
17+
--8<-- "examples/build_recipes/cicd/github-actions/deploy-modern.yml"
18+
```
19+
20+
=== "Multi-environment deployment"
21+
22+
```yaml
23+
--8<-- "examples/build_recipes/cicd/github-actions/deploy-multi-env.yml"
24+
```
25+
26+
=== "Simple source code deployment"
27+
28+
```yaml
29+
--8<-- "examples/build_recipes/cicd/github-actions/deploy-simple.yml"
30+
```
31+
32+
=== "S3 deployment method"
33+
34+
```yaml
35+
--8<-- "examples/build_recipes/cicd/github-actions/deploy-s3.yml"
36+
```
37+
38+
=== "Build tool integration"
39+
40+
```yaml
41+
--8<-- "examples/build_recipes/cicd/github-actions/deploy-build-tools.yml"
42+
```
43+
44+
## AWS CodeBuild
45+
46+
**AWS CodeBuild** is a fully managed build service that compiles source code, runs tests, and produces deployment packages. It integrates seamlessly with other AWS services and provides consistent build environments with automatic scaling.
47+
48+
=== "Basic CodeBuild Configuration"
49+
50+
```yaml
51+
--8<-- "examples/build_recipes/cicd/codebuild/buildspec.yml"
52+
```
53+
54+
## Best Practices for CI/CD
55+
56+
1. **Use Linux runners** (ubuntu-latest) to ensure Lambda compatibility
57+
2. **Cache dependencies** to speed up builds (uv, poetry cache, pip cache)
58+
3. **Run tests first** before building deployment packages
59+
4. **Use matrix builds** to test multiple Python versions or configurations
60+
5. **Implement proper secrets management** with GitHub Secrets or AWS Parameter Store
61+
6. **Add deployment gates** for production environments
62+
7. **Monitor deployment success** with CloudWatch metrics and alarms
63+
64+
???+ tip "Performance Optimization"
65+
- Use **uv** for fastest dependency installation in CI/CD
66+
- **Cache virtual environments** between builds when possible
67+
- **Parallelize builds** for multiple environments
68+
- **Use container images** for complex dependencies or large packages
Lines changed: 131 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,131 @@
1+
version: 0.2
2+
3+
env:
4+
variables:
5+
PYTHON_VERSION: "3.13"
6+
BUILD_STAGE: "build"
7+
parameter-store:
8+
POWERTOOLS_VERSION: "/build/powertools-version"
9+
10+
batch:
11+
fast-fail: false
12+
build-list:
13+
- identifier: test
14+
env:
15+
variables:
16+
BUILD_STAGE: "test"
17+
- identifier: build_dev
18+
env:
19+
variables:
20+
BUILD_STAGE: "build"
21+
ENVIRONMENT: "dev"
22+
depend-on:
23+
- test
24+
- identifier: build_prod
25+
env:
26+
variables:
27+
BUILD_STAGE: "build"
28+
ENVIRONMENT: "prod"
29+
depend-on:
30+
- test
31+
32+
phases:
33+
install:
34+
runtime-versions:
35+
python: $PYTHON_VERSION
36+
commands:
37+
- echo "Build stage: $BUILD_STAGE, Environment: $ENVIRONMENT"
38+
- pip install --upgrade pip uv
39+
40+
pre_build:
41+
commands:
42+
- |
43+
if [ "$BUILD_STAGE" = "test" ]; then
44+
echo "Installing test dependencies..."
45+
uv venv test-env
46+
source test-env/bin/activate
47+
uv pip install aws-lambda-powertools[all]==$POWERTOOLS_VERSION pytest pytest-cov
48+
cp -r src/ test-src/
49+
else
50+
echo "Installing build dependencies..."
51+
uv venv build-env
52+
source build-env/bin/activate
53+
uv pip install aws-lambda-powertools[all]==$POWERTOOLS_VERSION
54+
uv pip install pydantic requests
55+
fi
56+
57+
build:
58+
commands:
59+
- |
60+
if [ "$BUILD_STAGE" = "test" ]; then
61+
echo "Running tests..."
62+
source test-env/bin/activate
63+
cd test-src
64+
pytest tests/ --cov=. --cov-report=xml --cov-report=term
65+
echo "Tests completed successfully"
66+
else
67+
echo "Building deployment package for $ENVIRONMENT..."
68+
source build-env/bin/activate
69+
70+
# Create environment-specific package
71+
mkdir -p package-$ENVIRONMENT/
72+
cp -r build-env/lib/python*/site-packages/* package-$ENVIRONMENT/
73+
cp -r src/* package-$ENVIRONMENT/
74+
75+
# Environment-specific optimizations
76+
if [ "$ENVIRONMENT" = "prod" ]; then
77+
echo "Applying production optimizations..."
78+
find package-$ENVIRONMENT/ -name "*.pyc" -delete
79+
find package-$ENVIRONMENT/ -name "__pycache__" -type d -exec rm -rf {} + 2>/dev/null || true
80+
find package-$ENVIRONMENT/ -name "tests" -type d -exec rm -rf {} + 2>/dev/null || true
81+
find package-$ENVIRONMENT/ -name "*.dist-info" -type d -exec rm -rf {} + 2>/dev/null || true
82+
fi
83+
84+
# Create deployment ZIP
85+
cd package-$ENVIRONMENT && zip -r ../lambda-$ENVIRONMENT.zip . && cd ..
86+
87+
echo "Package size for $ENVIRONMENT: $(du -sh lambda-$ENVIRONMENT.zip)"
88+
fi
89+
90+
post_build:
91+
commands:
92+
- |
93+
if [ "$BUILD_STAGE" = "build" ]; then
94+
echo "Deploying to $ENVIRONMENT environment..."
95+
96+
# Deploy to environment-specific function
97+
aws lambda update-function-code \
98+
--function-name powertools-app-$ENVIRONMENT \
99+
--zip-file fileb://lambda-$ENVIRONMENT.zip \
100+
--region $AWS_DEFAULT_REGION
101+
102+
# Update environment-specific configuration
103+
LOG_LEVEL="INFO"
104+
if [ "$ENVIRONMENT" = "dev" ]; then
105+
LOG_LEVEL="DEBUG"
106+
fi
107+
108+
aws lambda update-function-configuration \
109+
--function-name powertools-app-$ENVIRONMENT \
110+
--environment Variables="{
111+
ENVIRONMENT=$ENVIRONMENT,
112+
POWERTOOLS_SERVICE_NAME=powertools-app-$ENVIRONMENT,
113+
POWERTOOLS_METRICS_NAMESPACE=MyApp/$ENVIRONMENT,
114+
POWERTOOLS_LOG_LEVEL=$LOG_LEVEL
115+
}" \
116+
--region $AWS_DEFAULT_REGION
117+
118+
echo "Deployment to $ENVIRONMENT completed successfully!"
119+
fi
120+
121+
artifacts:
122+
files:
123+
- lambda-*.zip
124+
- coverage.xml
125+
name: lambda-artifacts-$(date +%Y-%m-%d-%H-%M-%S)
126+
127+
cache:
128+
paths:
129+
- 'build-env/**/*'
130+
- 'test-env/**/*'
131+

0 commit comments

Comments
 (0)