Skip to content

Commit b82ea7a

Browse files
authored
Merge pull request #225 from risenberg-cyberark/s3
feat: Add S3 lambda event support to Parser utility #224
2 parents 88bd2e0 + 0d21704 commit b82ea7a

File tree

5 files changed

+209
-0
lines changed

5 files changed

+209
-0
lines changed

aws_lambda_powertools/utilities/parser/models/__init__.py

Lines changed: 3 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -2,6 +2,7 @@
22
from .cloudwatch import CloudWatchLogsData, CloudWatchLogsDecode, CloudWatchLogsLogEvent, CloudWatchLogsModel
33
from .dynamodb import DynamoDBStreamChangedRecordModel, DynamoDBStreamModel, DynamoDBStreamRecordModel
44
from .event_bridge import EventBridgeModel
5+
from .s3 import S3Model, S3RecordModel
56
from .ses import SesModel, SesRecordModel
67
from .sns import SnsModel, SnsNotificationModel, SnsRecordModel
78
from .sqs import SqsModel, SqsRecordModel
@@ -18,6 +19,8 @@
1819
"EventBridgeModel",
1920
"DynamoDBStreamChangedRecordModel",
2021
"DynamoDBStreamRecordModel",
22+
"S3Model",
23+
"S3RecordModel",
2124
"SesModel",
2225
"SesRecordModel",
2326
"SnsModel",
Lines changed: 72 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,72 @@
1+
from datetime import datetime
2+
from typing import List, Optional
3+
4+
from pydantic import BaseModel
5+
from pydantic.fields import Field
6+
from pydantic.networks import IPvAnyNetwork
7+
from pydantic.types import PositiveInt
8+
from typing_extensions import Literal
9+
10+
11+
class S3EventRecordGlacierRestoreEventData(BaseModel):
12+
lifecycleRestorationExpiryTime: datetime
13+
lifecycleRestoreStorageClass: str
14+
15+
16+
class S3EventRecordGlacierEventData(BaseModel):
17+
restoreEventData: S3EventRecordGlacierRestoreEventData
18+
19+
20+
class S3Identity(BaseModel):
21+
principalId: str
22+
23+
24+
class S3RequestParameters(BaseModel):
25+
sourceIPAddress: IPvAnyNetwork
26+
27+
28+
class S3ResponseElements(BaseModel):
29+
x_amz_request_id: str = Field(None, alias="x-amz-request-id")
30+
x_amz_id_2: str = Field(None, alias="x-amz-id-2")
31+
32+
33+
class S3OwnerIdentify(BaseModel):
34+
principalId: str
35+
36+
37+
class S3Bucket(BaseModel):
38+
name: str
39+
ownerIdentity: S3OwnerIdentify
40+
arn: str
41+
42+
43+
class S3Object(BaseModel):
44+
key: str
45+
size: PositiveInt
46+
eTag: str
47+
sequencer: str
48+
versionId: Optional[str]
49+
50+
51+
class S3Message(BaseModel):
52+
s3SchemaVersion: str
53+
configurationId: str
54+
bucket: S3Bucket
55+
object: S3Object # noqa: A003,VNE003
56+
57+
58+
class S3RecordModel(BaseModel):
59+
eventVersion: str
60+
eventSource: Literal["aws:s3"]
61+
awsRegion: str
62+
eventTime: datetime
63+
eventName: str
64+
userIdentity: S3Identity
65+
requestParameters: S3RequestParameters
66+
responseElements: S3ResponseElements
67+
s3: S3Message
68+
glacierEventData: Optional[S3EventRecordGlacierEventData]
69+
70+
71+
class S3Model(BaseModel):
72+
Records: List[S3RecordModel]

docs/content/utilities/parser.mdx

Lines changed: 1 addition & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -158,6 +158,7 @@ Model name | Description
158158
**SqsModel** | Lambda Event Source payload for Amazon SQS
159159
**AlbModel** | Lambda Event Source payload for Amazon Application Load Balancer
160160
**CloudwatchLogsModel** | Lambda Event Source payload for Amazon CloudWatch Logs
161+
**S3Model** | Lambda Event Source payload for Amazon S3
161162

162163
You can extend them to include your own models, and yet have all other known fields parsed along the way.
163164

tests/events/s3EventGlacier.json

Lines changed: 44 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,44 @@
1+
{
2+
"Records": [
3+
{
4+
"eventVersion": "2.1",
5+
"eventSource": "aws:s3",
6+
"awsRegion": "us-east-2",
7+
"eventTime": "2019-09-03T19:37:27.192Z",
8+
"eventName": "ObjectCreated:Put",
9+
"userIdentity": {
10+
"principalId": "AWS:AIDAINPONIXQXHT3IKHL2"
11+
},
12+
"requestParameters": {
13+
"sourceIPAddress": "205.255.255.255"
14+
},
15+
"responseElements": {
16+
"x-amz-request-id": "D82B88E5F771F645",
17+
"x-amz-id-2": "vlR7PnpV2Ce81l0PRw6jlUpck7Jo5ZsQjryTjKlc5aLWGVHPZLj5NeC6qMa0emYBDXOo6QBU0Wo="
18+
},
19+
"s3": {
20+
"s3SchemaVersion": "1.0",
21+
"configurationId": "828aa6fc-f7b5-4305-8584-487c791949c1",
22+
"bucket": {
23+
"name": "lambda-artifacts-deafc19498e3f2df",
24+
"ownerIdentity": {
25+
"principalId": "A3I5XTEXAMAI3E"
26+
},
27+
"arn": "arn:aws:s3:::lambda-artifacts-deafc19498e3f2df"
28+
},
29+
"object": {
30+
"key": "b21b84d653bb07b05b1e6b33684dc11b",
31+
"size": 1305107,
32+
"eTag": "b21b84d653bb07b05b1e6b33684dc11b",
33+
"sequencer": "0C0F6F405D6ED209E1"
34+
}
35+
},
36+
"glacierEventData": {
37+
"restoreEventData": {
38+
"lifecycleRestorationExpiryTime": "1970-01-01T00:01:00.000Z",
39+
"lifecycleRestoreStorageClass": "standard"
40+
}
41+
}
42+
}
43+
]
44+
}

tests/functional/parser/test_s3.py

Lines changed: 89 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,89 @@
1+
from aws_lambda_powertools.utilities.parser import event_parser
2+
from aws_lambda_powertools.utilities.parser.models import S3Model, S3RecordModel
3+
from aws_lambda_powertools.utilities.typing import LambdaContext
4+
from tests.functional.parser.utils import load_event
5+
6+
7+
@event_parser(model=S3Model)
8+
def handle_s3(event: S3Model, _: LambdaContext):
9+
records = list(event.Records)
10+
assert len(records) == 1
11+
record: S3RecordModel = records[0]
12+
assert record.eventVersion == "2.1"
13+
assert record.eventSource == "aws:s3"
14+
assert record.awsRegion == "us-east-2"
15+
convert_time = int(round(record.eventTime.timestamp() * 1000))
16+
assert convert_time == 1567539447192
17+
assert record.eventName == "ObjectCreated:Put"
18+
user_identity = record.userIdentity
19+
assert user_identity.principalId == "AWS:AIDAINPONIXQXHT3IKHL2"
20+
request_parameters = record.requestParameters
21+
assert str(request_parameters.sourceIPAddress) == "205.255.255.255/32"
22+
assert record.responseElements.x_amz_request_id == "D82B88E5F771F645"
23+
assert (
24+
record.responseElements.x_amz_id_2
25+
== "vlR7PnpV2Ce81l0PRw6jlUpck7Jo5ZsQjryTjKlc5aLWGVHPZLj5NeC6qMa0emYBDXOo6QBU0Wo="
26+
)
27+
s3 = record.s3
28+
assert s3.s3SchemaVersion == "1.0"
29+
assert s3.configurationId == "828aa6fc-f7b5-4305-8584-487c791949c1"
30+
bucket = s3.bucket
31+
assert bucket.name == "lambda-artifacts-deafc19498e3f2df"
32+
assert bucket.ownerIdentity.principalId == "A3I5XTEXAMAI3E"
33+
assert bucket.arn == "arn:aws:s3:::lambda-artifacts-deafc19498e3f2df"
34+
assert s3.object.key == "b21b84d653bb07b05b1e6b33684dc11b"
35+
assert s3.object.size == 1305107
36+
assert s3.object.eTag == "b21b84d653bb07b05b1e6b33684dc11b"
37+
assert s3.object.versionId is None
38+
assert s3.object.sequencer == "0C0F6F405D6ED209E1"
39+
assert record.glacierEventData is None
40+
41+
42+
@event_parser(model=S3Model)
43+
def handle_s3_glacier(event: S3Model, _: LambdaContext):
44+
records = list(event.Records)
45+
assert len(records) == 1
46+
record: S3RecordModel = records[0]
47+
assert record.eventVersion == "2.1"
48+
assert record.eventSource == "aws:s3"
49+
assert record.awsRegion == "us-east-2"
50+
convert_time = int(round(record.eventTime.timestamp() * 1000))
51+
assert convert_time == 1567539447192
52+
assert record.eventName == "ObjectCreated:Put"
53+
user_identity = record.userIdentity
54+
assert user_identity.principalId == "AWS:AIDAINPONIXQXHT3IKHL2"
55+
request_parameters = record.requestParameters
56+
assert str(request_parameters.sourceIPAddress) == "205.255.255.255/32"
57+
assert record.responseElements.x_amz_request_id == "D82B88E5F771F645"
58+
assert (
59+
record.responseElements.x_amz_id_2
60+
== "vlR7PnpV2Ce81l0PRw6jlUpck7Jo5ZsQjryTjKlc5aLWGVHPZLj5NeC6qMa0emYBDXOo6QBU0Wo="
61+
)
62+
s3 = record.s3
63+
assert s3.s3SchemaVersion == "1.0"
64+
assert s3.configurationId == "828aa6fc-f7b5-4305-8584-487c791949c1"
65+
bucket = s3.bucket
66+
assert bucket.name == "lambda-artifacts-deafc19498e3f2df"
67+
assert bucket.ownerIdentity.principalId == "A3I5XTEXAMAI3E"
68+
assert bucket.arn == "arn:aws:s3:::lambda-artifacts-deafc19498e3f2df"
69+
assert s3.object.key == "b21b84d653bb07b05b1e6b33684dc11b"
70+
assert s3.object.size == 1305107
71+
assert s3.object.eTag == "b21b84d653bb07b05b1e6b33684dc11b"
72+
assert s3.object.versionId is None
73+
assert s3.object.sequencer == "0C0F6F405D6ED209E1"
74+
assert record.glacierEventData is not None
75+
convert_time = int(
76+
round(record.glacierEventData.restoreEventData.lifecycleRestorationExpiryTime.timestamp() * 1000)
77+
)
78+
assert convert_time == 60000
79+
assert record.glacierEventData.restoreEventData.lifecycleRestoreStorageClass == "standard"
80+
81+
82+
def test_s3_trigger_event():
83+
event_dict = load_event("s3Event.json")
84+
handle_s3(event_dict, LambdaContext())
85+
86+
87+
def test_s3_glacier_trigger_event():
88+
event_dict = load_event("s3EventGlacier.json")
89+
handle_s3_glacier(event_dict, LambdaContext())

0 commit comments

Comments
 (0)