Skip to content

Commit 4c6741a

Browse files
Merging from develop
2 parents fc9875b + 26f44a8 commit 4c6741a

File tree

25 files changed

+856
-29
lines changed

25 files changed

+856
-29
lines changed

.clusterfuzzlite/Dockerfile

Lines changed: 12 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,12 @@
1+
FROM gcr.io/oss-fuzz-base/base-builder-python
2+
3+
# Copy project source
4+
COPY . $SRC/powertools
5+
6+
WORKDIR $SRC/powertools
7+
8+
# Install project dependencies
9+
RUN pip3 install -e ".[all]"
10+
11+
# Copy build script
12+
COPY .clusterfuzzlite/build.sh $SRC/

.clusterfuzzlite/build.sh

Lines changed: 6 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,6 @@
1+
#!/bin/bash -eu
2+
3+
# Build fuzz targets from tests/fuzz/
4+
for fuzzer in $(find $SRC/powertools/tests/fuzz -name 'fuzz_*.py'); do
5+
compile_python_fuzzer "$fuzzer"
6+
done

.clusterfuzzlite/project.yaml

Lines changed: 4 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,4 @@
1+
language: python
2+
main_repo: https://github.com/aws-powertools/powertools-lambda-python
3+
sanitizers:
4+
- address
Lines changed: 34 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,34 @@
1+
name: ClusterFuzzLite fuzzing
2+
3+
on:
4+
schedule:
5+
# Run daily at 8 AM UTC
6+
- cron: "0 8 * * *"
7+
workflow_dispatch:
8+
9+
permissions:
10+
contents: read
11+
12+
jobs:
13+
PR:
14+
runs-on: ubuntu-latest
15+
concurrency:
16+
group: ${{ github.workflow }}-${{ github.ref }}
17+
cancel-in-progress: true
18+
steps:
19+
- name: Build Fuzzers
20+
id: build
21+
uses: google/clusterfuzzlite/actions/build_fuzzers@884713a6c30a92e5e8544c39945cd7cb630abcd1 # v1
22+
with:
23+
language: python
24+
github-token: ${{ secrets.GITHUB_TOKEN }}
25+
sanitizer: address
26+
27+
- name: Run Fuzzers
28+
id: run
29+
uses: google/clusterfuzzlite/actions/run_fuzzers@884713a6c30a92e5e8544c39945cd7cb630abcd1 # v1
30+
with:
31+
github-token: ${{ secrets.GITHUB_TOKEN }}
32+
fuzz-seconds: 30
33+
mode: code-change
34+
sanitizer: address

aws_lambda_powertools/event_handler/api_gateway.py

Lines changed: 5 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -826,6 +826,8 @@ def _openapi_operation_request_body(
826826

827827
# Generate the request body media type
828828
request_media_content: dict[str, Any] = {"schema": body_schema}
829+
if field_info.openapi_examples:
830+
request_media_content["examples"] = field_info.openapi_examples
829831
request_body_oai["content"] = {request_media_type: request_media_content}
830832
return request_body_oai
831833

@@ -2459,7 +2461,7 @@ def register_resolver(func: AnyCallableT) -> AnyCallableT:
24592461

24602462
return register_resolver
24612463

2462-
def resolve(self, event: dict[str, Any], context: LambdaContext) -> dict[str, Any]:
2464+
def resolve(self, event: Mapping[str, Any], context: LambdaContext) -> dict[str, Any]:
24632465
"""Resolves the response based on the provide event and decorator routes
24642466
24652467
## Internals
@@ -2512,10 +2514,10 @@ def resolve(self, event: dict[str, Any], context: LambdaContext) -> dict[str, An
25122514
event = event.raw_event
25132515

25142516
if self._debug:
2515-
print(self._serializer(event))
2517+
print(self._serializer(cast(dict, event)))
25162518

25172519
# Populate router(s) dependencies without keeping a reference to each registered router
2518-
BaseRouter.current_event = self._to_proxy_event(event)
2520+
BaseRouter.current_event = self._to_proxy_event(cast(dict, event))
25192521
BaseRouter.lambda_context = context
25202522

25212523
response = self._resolve().build(self.current_event, self._cors)

aws_lambda_powertools/event_handler/openapi/params.py

Lines changed: 17 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -724,6 +724,8 @@ def __init__(
724724
)
725725
if examples is not None:
726726
kwargs["examples"] = examples
727+
if openapi_examples is not None:
728+
kwargs["openapi_examples"] = openapi_examples
727729
current_json_schema_extra = json_schema_extra or extra
728730

729731
# Pydantic 2.12+ no longer copies alias to validation_alias automatically
@@ -733,6 +735,7 @@ def __init__(
733735
elif alias is None and validation_alias is not _Unset and validation_alias is not None:
734736
alias = validation_alias
735737
kwargs["alias"] = alias
738+
self.openapi_examples = openapi_examples
736739

737740
kwargs.update(
738741
{
@@ -1124,11 +1127,23 @@ def get_field_info_annotated_type(annotation, value, is_path_param: bool) -> tup
11241127
"""
11251128
annotated_args = get_args(annotation)
11261129
type_annotation = annotated_args[0]
1127-
powertools_annotations = [arg for arg in annotated_args[1:] if isinstance(arg, FieldInfo)]
1130+
1131+
# Handle both FieldInfo instances and FieldInfo subclasses (e.g., Body vs Body())
1132+
powertools_annotations: list[FieldInfo] = []
1133+
for arg in annotated_args[1:]:
1134+
if isinstance(arg, FieldInfo):
1135+
powertools_annotations.append(arg)
1136+
elif isinstance(arg, type) and issubclass(arg, FieldInfo):
1137+
# If it's a class (e.g., Body instead of Body()), instantiate it
1138+
powertools_annotations.append(arg())
11281139

11291140
# Preserve non-FieldInfo metadata (like annotated_types constraints)
11301141
# This is important for constraints like Interval, Gt, Lt, etc.
1131-
other_metadata = [arg for arg in annotated_args[1:] if not isinstance(arg, FieldInfo)]
1142+
other_metadata = [
1143+
arg
1144+
for arg in annotated_args[1:]
1145+
if not isinstance(arg, FieldInfo) and not (isinstance(arg, type) and issubclass(arg, FieldInfo))
1146+
]
11321147

11331148
# Determine which annotation to use
11341149
powertools_annotation: FieldInfo | None = None

aws_lambda_powertools/logging/formatter.py

Lines changed: 6 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -281,6 +281,12 @@ def append_context_keys(self, **additional_keys: Any) -> Generator[None, None, N
281281
**additional_keys: Any
282282
Key-value pairs to include in the log context during the lifespan of the context manager.
283283
284+
Warning
285+
-------
286+
All keys added within this context are removed when exiting, even if they existed before.
287+
If a key with the same name already exists, the original value will be lost after the context exits.
288+
To persist keys across multiple log messages, use `append_keys()` instead.
289+
284290
Example
285291
--------
286292
logger = Logger(service="example_service")

aws_lambda_powertools/logging/logger.py

Lines changed: 6 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -820,6 +820,12 @@ def append_context_keys(self, **additional_keys: Any) -> Generator[None, None, N
820820
**additional_keys: Any
821821
Key-value pairs to include in the log context during the lifespan of the context manager.
822822
823+
Warning
824+
-------
825+
All keys added within this context are removed when exiting, even if they existed before.
826+
If a key with the same name already exists, the original value will be lost after the context exits.
827+
To persist keys across multiple log messages, use `append_keys()` instead.
828+
823829
Example
824830
--------
825831
**Logging with contextual keys**

aws_lambda_powertools/metrics/metrics.py

Lines changed: 7 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -123,6 +123,13 @@ def add_metric(
123123
def add_dimension(self, name: str, value: str) -> None:
124124
self.provider.add_dimension(name=name, value=value)
125125

126+
def add_dimensions(self, **dimensions: str) -> None:
127+
"""Add a new set of dimensions creating an additional dimension array.
128+
129+
Creates a new dimension set in the CloudWatch EMF Dimensions array.
130+
"""
131+
self.provider.add_dimensions(**dimensions)
132+
126133
def serialize_metric_set(
127134
self,
128135
metrics: dict | None = None,

aws_lambda_powertools/metrics/provider/cloudwatch_emf/cloudwatch.py

Lines changed: 79 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -94,6 +94,7 @@ def __init__(
9494

9595
self.metadata_set = metadata_set if metadata_set is not None else {}
9696
self.timestamp: int | None = None
97+
self.dimension_sets: list[dict[str, str]] = [] # Store multiple dimension sets
9798

9899
self._metric_units = [unit.value for unit in MetricUnit]
99100
self._metric_unit_valid_options = list(MetricUnit.__members__)
@@ -256,21 +257,30 @@ def serialize_metric_set(
256257

257258
metric_names_and_values.update({metric_name: metric_value})
258259

260+
# Build Dimensions array: primary set + additional dimension sets
261+
dimension_arrays: list[list[str]] = [list(dimensions.keys())]
262+
all_dimensions: dict[str, str] = dict(dimensions)
263+
264+
# Add each additional dimension set
265+
for dim_set in self.dimension_sets:
266+
all_dimensions.update(dim_set)
267+
dimension_arrays.append(list(dim_set.keys()))
268+
259269
return {
260270
"_aws": {
261271
"Timestamp": self.timestamp or int(datetime.datetime.now().timestamp() * 1000), # epoch
262272
"CloudWatchMetrics": [
263273
{
264274
"Namespace": self.namespace, # "test_namespace"
265-
"Dimensions": [list(dimensions.keys())], # [ "service" ]
275+
"Dimensions": dimension_arrays, # [["service"], ["env", "region"]]
266276
"Metrics": metric_definition,
267277
},
268278
],
269279
},
270280
# NOTE: Mypy doesn't recognize splats '** syntax' in TypedDict
271-
**dimensions, # "service": "test_service"
272-
**metadata, # type: ignore[typeddict-item] # "username": "test"
273-
**metric_names_and_values, # "single_metric": 1.0
281+
**all_dimensions, # type: ignore[typeddict-item] # All dimension key-value pairs
282+
**metadata, # type: ignore[typeddict-item]
283+
**metric_names_and_values,
274284
}
275285

276286
def add_dimension(self, name: str, value: str) -> None:
@@ -316,6 +326,70 @@ def add_dimension(self, name: str, value: str) -> None:
316326

317327
self.dimension_set[name] = value
318328

329+
def add_dimensions(self, **dimensions: str) -> None:
330+
"""Add a new set of dimensions creating an additional dimension array.
331+
332+
Creates a new dimension set in the CloudWatch EMF Dimensions array.
333+
334+
Example
335+
-------
336+
**Add multiple dimension sets**
337+
338+
metrics.add_dimensions(environment="prod", region="us-east-1")
339+
340+
Parameters
341+
----------
342+
dimensions : str
343+
Dimension key-value pairs as keyword arguments
344+
"""
345+
logger.debug(f"Adding dimension set: {dimensions}")
346+
347+
if not dimensions:
348+
warnings.warn(
349+
"Empty dimensions dictionary provided",
350+
category=PowertoolsUserWarning,
351+
stacklevel=2,
352+
)
353+
return
354+
355+
sanitized = self._sanitize_dimensions(dimensions)
356+
if not sanitized:
357+
return
358+
359+
self._validate_dimension_limit(sanitized)
360+
361+
self.dimension_sets.append({**self.default_dimensions, **sanitized})
362+
363+
def _sanitize_dimensions(self, dimensions: dict[str, str]) -> dict[str, str]:
364+
"""Convert dimension values to strings and filter out empty ones."""
365+
sanitized: dict[str, str] = {}
366+
367+
for name, value in dimensions.items():
368+
str_name = str(name)
369+
str_value = str(value)
370+
371+
if not str_name.strip() or not str_value.strip():
372+
warnings.warn(
373+
f"Dimension {str_name} has empty name or value",
374+
category=PowertoolsUserWarning,
375+
stacklevel=2,
376+
)
377+
continue
378+
379+
sanitized[str_name] = str_value
380+
381+
return sanitized
382+
383+
def _validate_dimension_limit(self, new_dimensions: dict[str, str]) -> None:
384+
"""Validate that adding new dimensions won't exceed CloudWatch limits."""
385+
all_keys = set(self.dimension_set.keys())
386+
for ds in self.dimension_sets:
387+
all_keys.update(ds.keys())
388+
all_keys.update(new_dimensions.keys())
389+
390+
if len(all_keys) > MAX_DIMENSIONS:
391+
raise SchemaValidationError(f"Maximum dimensions ({MAX_DIMENSIONS}) exceeded")
392+
319393
def add_metadata(self, key: str, value: Any) -> None:
320394
"""Adds high cardinal metadata for metrics object
321395
@@ -377,6 +451,7 @@ def clear_metrics(self) -> None:
377451
logger.debug("Clearing out existing metric set from memory")
378452
self.metric_set.clear()
379453
self.dimension_set.clear()
454+
self.dimension_sets.clear()
380455
self.metadata_set.clear()
381456
self.set_default_dimensions(**self.default_dimensions)
382457

0 commit comments

Comments
 (0)