From f1db319511ae2a325f2338f3e4af0192e6822201 Mon Sep 17 00:00:00 2001 From: Ramprasad G Date: Mon, 27 Feb 2023 21:33:06 -0800 Subject: [PATCH 01/11] Update README.md (#100) adding missing import of StorageResolution --- README.md | 1 + 1 file changed, 1 insertion(+) diff --git a/README.md b/README.md index 561bb7d..3e3e5ee 100644 --- a/README.md +++ b/README.md @@ -35,6 +35,7 @@ To get a metric logger, you can decorate your function with a `metric_scope`: ```py from aws_embedded_metrics import metric_scope +from aws_embedded_metrics.storage_resolution import StorageResolution @metric_scope def my_handler(metrics): From f44aaf2eec60902115f64fa3b2dd33fa98b91c92 Mon Sep 17 00:00:00 2001 From: liquidpele Date: Thu, 18 May 2023 01:16:53 -0400 Subject: [PATCH 02/11] Fix defect where duplicate data is saved if one metric requires splitting up into multiple log lines (#102) Co-authored-by: reecepeg --- .../serializers/log_serializer.py | 8 +++++- tests/serializer/test_log_serializer.py | 27 +++++++++++++++++++ 2 files changed, 34 insertions(+), 1 deletion(-) diff --git a/aws_embedded_metrics/serializers/log_serializer.py b/aws_embedded_metrics/serializers/log_serializer.py index cf8e05a..36bbb12 100644 --- a/aws_embedded_metrics/serializers/log_serializer.py +++ b/aws_embedded_metrics/serializers/log_serializer.py @@ -67,15 +67,19 @@ def create_body() -> Dict[str, Any]: # Track batch number to know where to slice metric data i = 0 - + complete_metrics = set() while remaining_data: remaining_data = False current_body = create_body() for metric_name, metric in context.metrics.items(): + # ensure we don't add duplicates of metrics we already completed + if metric_name in complete_metrics: + continue if len(metric.values) == 1: current_body[metric_name] = metric.values[0] + complete_metrics.add(metric_name) else: # Slice metric data as each batch cannot contain more than # MAX_DATAPOINTS_PER_METRIC entries for a given metric @@ -87,6 +91,8 @@ def create_body() -> Dict[str, Any]: # of the metric value list if len(metric.values) > end_index: remaining_data = True + else: + complete_metrics.add(metric_name) metric_body = {"Name": metric_name, "Unit": metric.unit} if metric.storage_resolution == StorageResolution.HIGH: diff --git a/tests/serializer/test_log_serializer.py b/tests/serializer/test_log_serializer.py index 60d1e63..ff5e77a 100644 --- a/tests/serializer/test_log_serializer.py +++ b/tests/serializer/test_log_serializer.py @@ -248,6 +248,33 @@ def test_serialize_with_more_than_100_metrics_and_datapoints(): assert metric_results == expected_results +def test_serialize_no_duplication_bug(): + """ + A bug existed where metrics with lots of values have to be broken up + but single value metrics got duplicated across each section. + This test verifies the fix to ensure no duplication. + """ + context = get_context() + single_expected_result = 1 + single_found_result = 0 + + # create a metric with a single value + single_key = "Metric-single" + context.put_metric(single_key, single_expected_result) + # add a lot of another metric so the log batches must be broken up + for i in range(1000): + context.put_metric("Metric-many", 0) + + results = serializer.serialize(context) + + # count up all values for the single metric to ensure no duplicates + for batch in results: + for metric_key, value in json.loads(batch).items(): + if metric_key == single_key: + single_found_result += value + assert single_expected_result == single_found_result + + def test_serialize_with_multiple_metrics(): # arrange metrics = 2 From 079941fce5bef7d9ed1faa1be704b5815af750f0 Mon Sep 17 00:00:00 2001 From: Gordon Pham-Nguyen <4203037+gordonpn@users.noreply.github.com> Date: Wed, 31 May 2023 10:35:59 -0400 Subject: [PATCH 03/11] Update examples (#103) --- README.md | 1 + examples/README.md | 49 ++++++++++++++++++++++++++++++++++++++++++++++ 2 files changed, 50 insertions(+) create mode 100644 examples/README.md diff --git a/README.md b/README.md index 3e3e5ee..199161f 100644 --- a/README.md +++ b/README.md @@ -18,6 +18,7 @@ Generate CloudWatch Metrics embedded within structured log events. The embedded - Easily generate custom metrics from Lambda functions without requiring custom batching code, making blocking network requests or relying on 3rd party software. - Other compute environments (EC2, On-prem, ECS, EKS, and other container environments) are supported by installing the [CloudWatch Agent](https://docs.aws.amazon.com/AmazonCloudWatch/latest/monitoring/CloudWatch_Embedded_Metric_Format_Generation_CloudWatch_Agent.html). + - Examples can be found in [examples/README.md](examples/README.md) - **Linking metrics to high cardinality context** diff --git a/examples/README.md b/examples/README.md new file mode 100644 index 0000000..493bf86 --- /dev/null +++ b/examples/README.md @@ -0,0 +1,49 @@ +# Examples + +## Docker + +With Docker images, using the `awslogs` log driver will send your container logs to CloudWatch Logs. All you have to do is write to STDOUT and your EMF logs will be processed. + +[Official Docker documentation for `awslogs` driver](https://docs.docker.com/config/containers/logging/awslogs/) + +## ECS and Fargate + +With ECS and Fargate, you can use the `awslogs` log driver to have your logs sent to CloudWatch Logs on your behalf. After configuring your task to use the `awslogs` log driver, you may write your EMF logs to STDOUT and they will be processed. + +[ECS documentation on `awslogs` log driver](https://docs.aws.amazon.com/AmazonECS/latest/developerguide/using_awslogs.html) + +## Fluent Bit and Fluentd + +Fluent Bit can be used to collect logs and push them to CloudWatch Logs. After configuring the Amazon CloudWatch Logs output plugin, you may write your EMF logs to STDOUT and they will be processed. + +[Getting Started with Fluent Bit](https://docs.fluentbit.io/manual/installation/getting-started-with-fluent-bit) + +[Amazon CloudWatch output plugin for Fluent Bit](https://docs.fluentbit.io/manual/pipeline/outputs/cloudwatch) + +### Example Metrics + +```json +{ + "_aws": { + "Timestamp": 1583902595342, + "CloudWatchMetrics": [ + { + "Dimensions": [[ "ServiceName", "ServiceType" ]], + "Metrics": [{ "Name": "ProcessingTime", "Unit": "Milliseconds" }], + "Namespace": "aws-embedded-metrics" + } + ] + }, + "ServiceName": "example", + "ServiceType": "AWS::ECS::Container", + "Method": "GET", + "Url": "/test", + "containerId": "702e4bcf1345", + "createdAt": "2020-03-11T04:54:24.981207801Z", + "startedAt": "2020-03-11T04:54:25.594413051Z", + "image": ".dkr.ecr..amazonaws.com/emf-examples:latest", + "cluster": "emf-example", + "taskArn": "arn:aws:ecs:::task/2fe946f6-8a2e-41a4-8fec-c4983bad8f74", + "ProcessingTime": 5 +} +``` From 0c447f9489b317866f143275cec04d5f4719b70d Mon Sep 17 00:00:00 2001 From: Himtanaya Bhadada Date: Tue, 11 Jul 2023 13:06:26 -0700 Subject: [PATCH 04/11] bump version to 3.1.1 (#107) * bump version to 3.1.1 * Reverting to old stable debain version to let the build pass --------- Co-authored-by: Himtanaya Bhadada --- setup.py | 2 +- tests/integ/agent/Dockerfile | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/setup.py b/setup.py index 08a70cd..841b1e0 100644 --- a/setup.py +++ b/setup.py @@ -5,7 +5,7 @@ setup( name="aws-embedded-metrics", - version="3.1.0", + version="3.1.1", author="Amazon Web Services", author_email="jarnance@amazon.com", description="AWS Embedded Metrics Package", diff --git a/tests/integ/agent/Dockerfile b/tests/integ/agent/Dockerfile index d8078d9..eb443fe 100644 --- a/tests/integ/agent/Dockerfile +++ b/tests/integ/agent/Dockerfile @@ -1,4 +1,4 @@ -FROM debian:latest +FROM debian:bullseye RUN apt-get update && \ apt-get install -y ca-certificates curl && \ From 7137ad1bfa35a797433572e55bcbb8f9919d03b5 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Anton=20Gr=C3=BCbel?= Date: Thu, 13 Jul 2023 16:33:15 +0200 Subject: [PATCH 05/11] add py.typed file (#104) Co-authored-by: Mark Kuhn --- aws_embedded_metrics/py.typed | 0 setup.py | 3 +++ 2 files changed, 3 insertions(+) create mode 100644 aws_embedded_metrics/py.typed diff --git a/aws_embedded_metrics/py.typed b/aws_embedded_metrics/py.typed new file mode 100644 index 0000000..e69de29 diff --git a/setup.py b/setup.py index 841b1e0..43e81e9 100644 --- a/setup.py +++ b/setup.py @@ -22,6 +22,9 @@ "License :: OSI Approved :: Apache Software License", ], packages=find_packages(exclude=["tests*"]), + package_data={ + "aws_embedded_metrics": ["py.typed"], + }, include_package_data=True, install_requires=["aiohttp"], test_suite="tests", From 4c36304abcfb3f9e22a7c48c8c896163fd4ac51d Mon Sep 17 00:00:00 2001 From: Amruth Rayabagi <118546401+rayabagi@users.noreply.github.com> Date: Wed, 13 Sep 2023 14:47:43 -0700 Subject: [PATCH 06/11] Added support to set custom timestamp (#110) Added support to set custom timestamp Co-authored-by: Mark Kuhn --- README.md | 14 ++++++ aws_embedded_metrics/constants.py | 3 ++ aws_embedded_metrics/exceptions.py | 6 +++ .../logger/metrics_context.py | 23 ++++++++- aws_embedded_metrics/logger/metrics_logger.py | 5 ++ aws_embedded_metrics/utils.py | 8 ++++ aws_embedded_metrics/validator.py | 33 ++++++++++++- setup.py | 2 +- tests/integ/agent/test_end_to_end.py | 1 + tests/logger/test_metrics_context.py | 48 +++++++++++++++++-- tests/logger/test_metrics_logger.py | 18 ++++++- 11 files changed, 152 insertions(+), 9 deletions(-) diff --git a/README.md b/README.md index 199161f..1903169 100644 --- a/README.md +++ b/README.md @@ -178,6 +178,20 @@ Examples: set_namespace("MyApplication") ``` +- **set_timestamp**(timestamp: datetime) -> MetricsLogger + +Sets the timestamp of the metrics. If not set, current time of the client will be used. + +Timestamp must meet CloudWatch requirements, otherwise a InvalidTimestampError will be thrown. See [Timestamps](https://docs.aws.amazon.com/AmazonCloudWatch/latest/monitoring/cloudwatch_concepts.html#about_timestamp) for valid values. + +Examples: + +```py + set_timestamp(datetime.datetime.now()) +``` + + + - **flush**() Flushes the current MetricsContext to the configured sink and resets all properties and metric values. The namespace and default dimensions will be preserved across flushes. diff --git a/aws_embedded_metrics/constants.py b/aws_embedded_metrics/constants.py index af2ac16..45f1b6a 100644 --- a/aws_embedded_metrics/constants.py +++ b/aws_embedded_metrics/constants.py @@ -20,3 +20,6 @@ MAX_METRIC_NAME_LENGTH = 1024 MAX_NAMESPACE_LENGTH = 256 VALID_NAMESPACE_REGEX = '^[a-zA-Z0-9._#:/-]+$' +TIMESTAMP = "Timestamp" +MAX_TIMESTAMP_PAST_AGE = 14 * 24 * 60 * 60 * 1000 # 14 days +MAX_TIMESTAMP_FUTURE_AGE = 2 * 60 * 60 * 1000 # 2 hours diff --git a/aws_embedded_metrics/exceptions.py b/aws_embedded_metrics/exceptions.py index 2ca0f8d..22cfe94 100644 --- a/aws_embedded_metrics/exceptions.py +++ b/aws_embedded_metrics/exceptions.py @@ -33,3 +33,9 @@ class InvalidNamespaceError(Exception): def __init__(self, message: str) -> None: # Call the base class constructor with the parameters it needs super().__init__(message) + + +class InvalidTimestampError(Exception): + def __init__(self, message: str) -> None: + # Call the base class constructor with the parameters it needs + super().__init__(message) diff --git a/aws_embedded_metrics/logger/metrics_context.py b/aws_embedded_metrics/logger/metrics_context.py index 3e5303a..ac11ea9 100644 --- a/aws_embedded_metrics/logger/metrics_context.py +++ b/aws_embedded_metrics/logger/metrics_context.py @@ -12,7 +12,8 @@ # limitations under the License. -from aws_embedded_metrics import constants, utils +from datetime import datetime +from aws_embedded_metrics import constants, utils, validator from aws_embedded_metrics.config import get_config from aws_embedded_metrics.logger.metric import Metric from aws_embedded_metrics.validator import validate_dimension_set, validate_metric @@ -39,7 +40,7 @@ def __init__( self.default_dimensions: Dict[str, str] = default_dimensions or {} self.metrics: Dict[str, Metric] = {} self.should_use_default_dimensions = True - self.meta: Dict[str, Any] = {"Timestamp": utils.now()} + self.meta: Dict[str, Any] = {constants.TIMESTAMP: utils.now()} self.metric_name_and_resolution_map: Dict[str, StorageResolution] = {} def put_metric(self, key: str, value: float, unit: str = None, storage_resolution: StorageResolution = StorageResolution.STANDARD) -> None: @@ -176,3 +177,21 @@ def create_copy_with_context(self, preserve_dimensions: bool = False) -> "Metric @staticmethod def empty() -> "MetricsContext": return MetricsContext() + + def set_timestamp(self, timestamp: datetime) -> None: + """ + Set the timestamp of metrics emitted in this context. If not set, the timestamp will default to the time the context is constructed. + + Timestamp must meet CloudWatch requirements, otherwise a InvalidTimestampError will be thrown. + See [Timestamps](https://docs.aws.amazon.com/AmazonCloudWatch/latest/monitoring/cloudwatch_concepts.html#about_timestamp) + for valid values. + + Parameters: + timestamp (datetime): The timestamp value to be set. + + Raises: + InvalidTimestampError: If the provided timestamp is invalid. + + """ + validator.validate_timestamp(timestamp) + self.meta[constants.TIMESTAMP] = utils.convert_to_milliseconds(timestamp) diff --git a/aws_embedded_metrics/logger/metrics_logger.py b/aws_embedded_metrics/logger/metrics_logger.py index c25cae2..ebbe469 100644 --- a/aws_embedded_metrics/logger/metrics_logger.py +++ b/aws_embedded_metrics/logger/metrics_logger.py @@ -11,6 +11,7 @@ # See the License for the specific language governing permissions and # limitations under the License. +from datetime import datetime from aws_embedded_metrics.environment import Environment from aws_embedded_metrics.logger.metrics_context import MetricsContext from aws_embedded_metrics.validator import validate_namespace @@ -114,6 +115,10 @@ def add_stack_trace(self, key: str, details: Any = None, exc_info: Tuple = None) self.set_property(key, trace_value) return self + def set_timestamp(self, timestamp: datetime) -> "MetricsLogger": + self.context.set_timestamp(timestamp) + return self + def new(self) -> "MetricsLogger": return MetricsLogger( self.resolve_environment, self.context.create_copy_with_context() diff --git a/aws_embedded_metrics/utils.py b/aws_embedded_metrics/utils.py index b73d10f..8753cd6 100644 --- a/aws_embedded_metrics/utils.py +++ b/aws_embedded_metrics/utils.py @@ -12,4 +12,12 @@ # limitations under the License. import time +from datetime import datetime def now() -> int: return int(round(time.time() * 1000)) + + +def convert_to_milliseconds(dt: datetime) -> int: + if dt == datetime.min: + return 0 + + return int(round(dt.timestamp() * 1000)) diff --git a/aws_embedded_metrics/validator.py b/aws_embedded_metrics/validator.py index 21f9c41..d6ac3fe 100644 --- a/aws_embedded_metrics/validator.py +++ b/aws_embedded_metrics/validator.py @@ -17,7 +17,9 @@ from aws_embedded_metrics.unit import Unit from aws_embedded_metrics.storage_resolution import StorageResolution from aws_embedded_metrics.exceptions import DimensionSetExceededError, InvalidDimensionError, InvalidMetricError, InvalidNamespaceError -import aws_embedded_metrics.constants as constants +from aws_embedded_metrics.exceptions import InvalidTimestampError +from datetime import datetime +from aws_embedded_metrics import constants, utils def validate_dimension_set(dimension_set: Dict[str, str]) -> None: @@ -114,3 +116,32 @@ def validate_namespace(namespace: str) -> None: if not re.match(constants.VALID_NAMESPACE_REGEX, namespace): raise InvalidNamespaceError(f"Namespace contains invalid characters: {namespace}") + + +def validate_timestamp(timestamp: datetime) -> None: + """ + Validates a given timestamp based on CloudWatch Timestamp guidelines. + + Timestamp must meet CloudWatch requirements, otherwise a InvalidTimestampError will be thrown. + See [Timestamps](https://docs.aws.amazon.com/AmazonCloudWatch/latest/monitoring/cloudwatch_concepts.html#about_timestamp) + for valid values. + + Parameters: + timestamp (datetime): Datetime object representing the timestamp to validate. + + Raises: + InvalidTimestampError: If the timestamp is either None, too old, or too far in the future. + """ + if not timestamp: + raise InvalidTimestampError("Timestamp must be a valid datetime object") + + given_time_in_milliseconds = utils.convert_to_milliseconds(timestamp) + current_time_in_milliseconds = utils.now() + + if given_time_in_milliseconds < (current_time_in_milliseconds - constants.MAX_TIMESTAMP_PAST_AGE): + raise InvalidTimestampError( + f"Timestamp {str(timestamp)} must not be older than {int(constants.MAX_TIMESTAMP_PAST_AGE/(24 * 60 * 60 * 1000))} days") + + if given_time_in_milliseconds > (current_time_in_milliseconds + constants.MAX_TIMESTAMP_FUTURE_AGE): + raise InvalidTimestampError( + f"Timestamp {str(timestamp)} must not be newer than {int(constants.MAX_TIMESTAMP_FUTURE_AGE/(60 * 60 * 1000))} hours") diff --git a/setup.py b/setup.py index 43e81e9..27c3417 100644 --- a/setup.py +++ b/setup.py @@ -5,7 +5,7 @@ setup( name="aws-embedded-metrics", - version="3.1.1", + version="3.2.0", author="Amazon Web Services", author_email="jarnance@amazon.com", description="AWS Embedded Metrics Package", diff --git a/tests/integ/agent/test_end_to_end.py b/tests/integ/agent/test_end_to_end.py index 9bb363e..8b45d39 100644 --- a/tests/integ/agent/test_end_to_end.py +++ b/tests/integ/agent/test_end_to_end.py @@ -42,6 +42,7 @@ async def do_work(metrics): metrics.put_dimensions({"Operation": "Agent"}) metrics.put_metric(metric_name, 100, "Milliseconds") metrics.set_property("RequestId", "422b1569-16f6-4a03-b8f0-fe3fd9b100f8") + metrics.set_timestamp(datetime.utcnow()) # act await do_work() diff --git a/tests/logger/test_metrics_context.py b/tests/logger/test_metrics_context.py index 86f62d0..a0ab133 100644 --- a/tests/logger/test_metrics_context.py +++ b/tests/logger/test_metrics_context.py @@ -1,15 +1,17 @@ +from faker import Faker +from importlib import reload +from datetime import datetime, timedelta import pytest import math import random -from aws_embedded_metrics import constants +from aws_embedded_metrics import constants, utils from aws_embedded_metrics.unit import Unit from aws_embedded_metrics.storage_resolution import StorageResolution from aws_embedded_metrics import config from aws_embedded_metrics.logger.metrics_context import MetricsContext -from aws_embedded_metrics.constants import DEFAULT_NAMESPACE +from aws_embedded_metrics.constants import DEFAULT_NAMESPACE, MAX_TIMESTAMP_FUTURE_AGE, MAX_TIMESTAMP_PAST_AGE from aws_embedded_metrics.exceptions import DimensionSetExceededError, InvalidDimensionError, InvalidMetricError -from importlib import reload -from faker import Faker +from aws_embedded_metrics.exceptions import InvalidTimestampError fake = Faker() @@ -458,6 +460,44 @@ def test_cannot_put_more_than_30_dimensions(): context.put_dimensions(dimension_set) +@pytest.mark.parametrize( + "timestamp", + [ + datetime.now(), + datetime.now() - timedelta(milliseconds=MAX_TIMESTAMP_PAST_AGE - 5000), + datetime.now() + timedelta(milliseconds=MAX_TIMESTAMP_FUTURE_AGE - 5000) + ] +) +def test_set_valid_timestamp_verify_timestamp(timestamp: datetime): + context = MetricsContext() + + context.set_timestamp(timestamp) + + assert context.meta[constants.TIMESTAMP] == utils.convert_to_milliseconds(timestamp) + + +@pytest.mark.parametrize( + "timestamp", + [ + None, + datetime.min, + datetime(1970, 1, 1, 0, 0, 0), + datetime.max, + datetime(9999, 12, 31, 23, 59, 59, 999999), + datetime(1, 1, 1, 0, 0, 0, 0, None), + datetime(1, 1, 1), + datetime(1, 1, 1, 0, 0), + datetime.now() - timedelta(milliseconds=MAX_TIMESTAMP_PAST_AGE + 1), + datetime.now() + timedelta(milliseconds=MAX_TIMESTAMP_FUTURE_AGE + 5000) + ] +) +def test_set_invalid_timestamp_raises_exception(timestamp: datetime): + context = MetricsContext() + + with pytest.raises(InvalidTimestampError): + context.set_timestamp(timestamp) + + # Test utility method diff --git a/tests/logger/test_metrics_logger.py b/tests/logger/test_metrics_logger.py index 8602c47..08bd971 100644 --- a/tests/logger/test_metrics_logger.py +++ b/tests/logger/test_metrics_logger.py @@ -1,4 +1,5 @@ -from aws_embedded_metrics import config +from datetime import datetime +from aws_embedded_metrics import config, utils from aws_embedded_metrics.logger import metrics_logger from aws_embedded_metrics.sinks import Sink from aws_embedded_metrics.environment import Environment @@ -493,6 +494,21 @@ async def test_configure_flush_to_preserve_dimensions(mocker): assert dimensions[0][dimension_key] == dimension_value +@pytest.mark.asyncio +async def test_can_set_timestamp(mocker): + # arrange + expected_value = datetime.now() + + logger, sink, env = get_logger_and_sink(mocker) + + # act + logger.set_timestamp(expected_value) + await logger.flush() + + # assert + context = get_flushed_context(sink) + assert context.meta[constants.TIMESTAMP] == utils.convert_to_milliseconds(expected_value) + # Test helper methods From 66e8b6b2ad7c02fa03ae094e9355b4f810d2512b Mon Sep 17 00:00:00 2001 From: Gordon Pham-Nguyen <4203037+gordonpn@users.noreply.github.com> Date: Thu, 6 Jun 2024 19:54:54 -0400 Subject: [PATCH 07/11] Add awsfirelens log driver to example documentation (#114) --- examples/README.md | 4 +++- 1 file changed, 3 insertions(+), 1 deletion(-) diff --git a/examples/README.md b/examples/README.md index 493bf86..4527755 100644 --- a/examples/README.md +++ b/examples/README.md @@ -8,7 +8,9 @@ With Docker images, using the `awslogs` log driver will send your container logs ## ECS and Fargate -With ECS and Fargate, you can use the `awslogs` log driver to have your logs sent to CloudWatch Logs on your behalf. After configuring your task to use the `awslogs` log driver, you may write your EMF logs to STDOUT and they will be processed. +With ECS and Fargate, you can use the `awsfirelens` (recommended) or `awslogs` log driver to have your logs sent to CloudWatch Logs on your behalf. After configuring the options for your preferred log driver, you may write your EMF logs to STDOUT and they will be processed. + +[`awsfirelens` documentation](https://github.com/aws/amazon-cloudwatch-logs-for-fluent-bit) [ECS documentation on `awslogs` log driver](https://docs.aws.amazon.com/AmazonECS/latest/developerguide/using_awslogs.html) From 1836dd89b626e622327c1307a17b87170f4e307b Mon Sep 17 00:00:00 2001 From: Alin Radu <5991570+acradu@users.noreply.github.com> Date: Wed, 2 Oct 2024 10:51:29 -0700 Subject: [PATCH 08/11] Fix @metric_scope for generator and async generator functions (#113) Co-authored-by: Alin RADU --- aws_embedded_metrics/metric_scope/__init__.py | 55 +++++++++++++++---- tests/metric_scope/test_metric_scope.py | 37 +++++++++++++ 2 files changed, 82 insertions(+), 10 deletions(-) diff --git a/aws_embedded_metrics/metric_scope/__init__.py b/aws_embedded_metrics/metric_scope/__init__.py index 47044bc..b185f38 100644 --- a/aws_embedded_metrics/metric_scope/__init__.py +++ b/aws_embedded_metrics/metric_scope/__init__.py @@ -18,35 +18,70 @@ def metric_scope(fn): # type: ignore + if inspect.isasyncgenfunction(fn): + @wraps(fn) + async def async_gen_wrapper(*args, **kwargs): # type: ignore + logger = create_metrics_logger() + if "metrics" in inspect.signature(fn).parameters: + kwargs["metrics"] = logger + + try: + fn_gen = fn(*args, **kwargs) + while True: + result = await fn_gen.__anext__() + await logger.flush() + yield result + except Exception as ex: + await logger.flush() + if not isinstance(ex, StopIteration): + raise + + return async_gen_wrapper + + elif inspect.isgeneratorfunction(fn): + @wraps(fn) + def gen_wrapper(*args, **kwargs): # type: ignore + logger = create_metrics_logger() + if "metrics" in inspect.signature(fn).parameters: + kwargs["metrics"] = logger + + try: + fn_gen = fn(*args, **kwargs) + while True: + result = next(fn_gen) + asyncio.run(logger.flush()) + yield result + except Exception as ex: + asyncio.run(logger.flush()) + if not isinstance(ex, StopIteration): + raise - if asyncio.iscoroutinefunction(fn): + return gen_wrapper + elif asyncio.iscoroutinefunction(fn): @wraps(fn) - async def wrapper(*args, **kwargs): # type: ignore + async def async_wrapper(*args, **kwargs): # type: ignore logger = create_metrics_logger() if "metrics" in inspect.signature(fn).parameters: kwargs["metrics"] = logger + try: return await fn(*args, **kwargs) - except Exception as e: - raise e finally: await logger.flush() - return wrapper - else: + return async_wrapper + else: @wraps(fn) def wrapper(*args, **kwargs): # type: ignore logger = create_metrics_logger() if "metrics" in inspect.signature(fn).parameters: kwargs["metrics"] = logger + try: return fn(*args, **kwargs) - except Exception as e: - raise e finally: - loop = asyncio.get_event_loop() - loop.run_until_complete(logger.flush()) + asyncio.run(logger.flush()) return wrapper diff --git a/tests/metric_scope/test_metric_scope.py b/tests/metric_scope/test_metric_scope.py index 20bf131..9ebd1f1 100644 --- a/tests/metric_scope/test_metric_scope.py +++ b/tests/metric_scope/test_metric_scope.py @@ -168,6 +168,43 @@ def my_handler(metrics): actual_timestamp_second = int(round(logger.context.meta["Timestamp"] / 1000)) assert expected_timestamp_second == actual_timestamp_second + +def test_sync_scope_iterates_generator(mock_logger): + expected_results = [1, 2] + + @metric_scope + def my_handler(): + yield from expected_results + raise Exception("test exception") + + actual_results = [] + with pytest.raises(Exception, match="test exception"): + for result in my_handler(): + actual_results.append(result) + + assert actual_results == expected_results + assert InvocationTracker.invocations == 3 + + +@pytest.mark.asyncio +async def test_async_scope_iterates_async_generator(mock_logger): + expected_results = [1, 2] + + @metric_scope + async def my_handler(): + for item in expected_results: + yield item + await asyncio.sleep(1) + raise Exception("test exception") + + actual_results = [] + with pytest.raises(Exception, match="test exception"): + async for result in my_handler(): + actual_results.append(result) + + assert actual_results == expected_results + assert InvocationTracker.invocations == 3 + # Test helpers From 7bbcd3d137502a7f7acf4c36cd82dd81dbfa8188 Mon Sep 17 00:00:00 2001 From: seoberha Date: Wed, 5 Feb 2025 18:00:55 -0800 Subject: [PATCH 09/11] Bump version to 3.3.0 --- setup.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/setup.py b/setup.py index 27c3417..7e8cc13 100644 --- a/setup.py +++ b/setup.py @@ -5,7 +5,7 @@ setup( name="aws-embedded-metrics", - version="3.2.0", + version="3.3.0", author="Amazon Web Services", author_email="jarnance@amazon.com", description="AWS Embedded Metrics Package", From a6043157977bc7b6bd964d38255bba76d107b8f9 Mon Sep 17 00:00:00 2001 From: Boxuan Hu Date: Fri, 7 Feb 2025 02:07:54 +0000 Subject: [PATCH 10/11] Replace User creds with Role creds for CWL agent --- bin/run-integ-tests.sh | 21 +++++++++++++++++++++ bin/start-agent.sh | 1 + tox.ini | 1 + 3 files changed, 23 insertions(+) diff --git a/bin/run-integ-tests.sh b/bin/run-integ-tests.sh index f8d5d98..51fb3de 100755 --- a/bin/run-integ-tests.sh +++ b/bin/run-integ-tests.sh @@ -18,6 +18,27 @@ status_code=0 # Configure and start the agent ################################### +# Check if IAM user credentials exist +if [ -z "$AWS_ACCESS_KEY_ID" ] || [ -z "$AWS_SECRET_ACCESS_KEY" ]; then + echo "No IAM user credentials found, assuming we are running on CodeBuild pipeline, falling back to IAM role..." + + # Store the AWS STS assume-role output and extract credentials + CREDS=$(aws sts assume-role \ + --role-arn $Code_Build_Execution_Role_ARN \ + --role-session-name "session-$(uuidgen)" \ + --query 'Credentials.[AccessKeyId,SecretAccessKey,SessionToken]' \ + --output text \ + --duration-seconds 3600) + + # Parse the output into separate variables + read AWS_ACCESS_KEY_ID AWS_SECRET_ACCESS_KEY AWS_SESSION_TOKEN <<< $CREDS + + # Export the variables + export AWS_ACCESS_KEY_ID AWS_SECRET_ACCESS_KEY AWS_SESSION_TOKEN +else + echo "Using provided IAM user credentials..." +fi + $rootdir/bin/start-agent.sh ################################### diff --git a/bin/start-agent.sh b/bin/start-agent.sh index 0fb09a8..e4b0c7d 100755 --- a/bin/start-agent.sh +++ b/bin/start-agent.sh @@ -22,6 +22,7 @@ cd $rootdir/tests/integ/agent echo "[AmazonCloudWatchAgent] aws_access_key_id = $AWS_ACCESS_KEY_ID aws_secret_access_key = $AWS_SECRET_ACCESS_KEY +aws_session_token = $AWS_SESSION_TOKEN " > ./.aws/credentials echo "[profile AmazonCloudWatchAgent] diff --git a/tox.ini b/tox.ini index fe51c57..a8a60fc 100644 --- a/tox.ini +++ b/tox.ini @@ -23,6 +23,7 @@ passenv = AWS_REGION AWS_ACCESS_KEY_ID AWS_SECRET_ACCESS_KEY + AWS_SESSION_TOKEN [testenv:flake8] basepython = python3.7 From e03ffdf32cd48f46bc1e53f855a191ca533a886a Mon Sep 17 00:00:00 2001 From: ZahidMirza95 <57571948+ZahidMirza95@users.noreply.github.com> Date: Wed, 26 Feb 2025 10:55:51 -0800 Subject: [PATCH 11/11] changed cw-agent dockerhub image to use ecr --- tests/canary/agent/container-definitions.json | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/tests/canary/agent/container-definitions.json b/tests/canary/agent/container-definitions.json index 3b21fe9..facf927 100644 --- a/tests/canary/agent/container-definitions.json +++ b/tests/canary/agent/container-definitions.json @@ -30,7 +30,7 @@ }, { "name": "cloudwatch-agent-python", - "image": "amazon/cloudwatch-agent:latest", + "image": "public.ecr.aws/cloudwatch-agent/cloudwatch-agent:latest", "logConfiguration": { "logDriver": "awslogs", "options": { @@ -47,4 +47,4 @@ } ] } -] \ No newline at end of file +]