From 0125d469d474fc69b5eb63c373a8fe53ea65013c Mon Sep 17 00:00:00 2001
From: Tom McCarthy <thommcc@amazon.com>
Date: Wed, 10 Jun 2020 13:37:40 +0200
Subject: [PATCH] fix: default dimension creation now happens when metrics are
 serialized instead of on metrics constructor

---
 aws_lambda_powertools/metrics/base.py    | 10 ++++-
 aws_lambda_powertools/metrics/metrics.py |  9 ++--
 tests/functional/test_metrics.py         | 57 +++++++++++++++++++-----
 3 files changed, 59 insertions(+), 17 deletions(-)

diff --git a/aws_lambda_powertools/metrics/base.py b/aws_lambda_powertools/metrics/base.py
index a53f2ec2f63..d6529cf71e2 100644
--- a/aws_lambda_powertools/metrics/base.py
+++ b/aws_lambda_powertools/metrics/base.py
@@ -36,6 +36,8 @@ class MetricManager:
     ---------------------
     POWERTOOLS_METRICS_NAMESPACE : str
         metric namespace to be set for all metrics
+    POWERTOOLS_SERVICE_NAME : str
+        service name used for default dimension
 
     Raises
     ------
@@ -49,10 +51,13 @@ class MetricManager:
         When metric object fails EMF schema validation
     """
 
-    def __init__(self, metric_set: Dict[str, str] = None, dimension_set: Dict = None, namespace: str = None):
+    def __init__(
+        self, metric_set: Dict[str, str] = None, dimension_set: Dict = None, namespace: str = None, service: str = None
+    ):
         self.metric_set = metric_set if metric_set is not None else {}
         self.dimension_set = dimension_set if dimension_set is not None else {}
         self.namespace = namespace or os.getenv("POWERTOOLS_METRICS_NAMESPACE")
+        self.service = service or os.environ.get("POWERTOOLS_SERVICE_NAME")
         self._metric_units = [unit.value for unit in MetricUnit]
         self._metric_unit_options = list(MetricUnit.__members__)
 
@@ -158,6 +163,9 @@ def serialize_metric_set(self, metrics: Dict = None, dimensions: Dict = None) ->
         if dimensions is None:  # pragma: no cover
             dimensions = self.dimension_set
 
+        if self.service and not self.dimension_set.get("service"):
+            self.dimension_set["service"] = self.service
+
         logger.debug("Serializing...", {"metrics": metrics, "dimensions": dimensions})
 
         dimension_keys: List[str] = list(dimensions.keys())
diff --git a/aws_lambda_powertools/metrics/metrics.py b/aws_lambda_powertools/metrics/metrics.py
index de6be5c4c40..43cbeea2dc1 100644
--- a/aws_lambda_powertools/metrics/metrics.py
+++ b/aws_lambda_powertools/metrics/metrics.py
@@ -1,7 +1,6 @@
 import functools
 import json
 import logging
-import os
 import warnings
 from typing import Any, Callable
 
@@ -72,11 +71,11 @@ def do_something():
     def __init__(self, service: str = None, namespace: str = None):
         self.metric_set = self._metrics
         self.dimension_set = self._dimensions
-        self.service = service or os.environ.get("POWERTOOLS_SERVICE_NAME")
+        self.service = service
         self.namespace = namespace
-        if self.service:
-            self.dimension_set["service"] = self.service
-        super().__init__(metric_set=self.metric_set, dimension_set=self.dimension_set, namespace=self.namespace)
+        super().__init__(
+            metric_set=self.metric_set, dimension_set=self.dimension_set, namespace=self.namespace, service=self.service
+        )
 
     def clear_metrics(self):
         logger.debug("Clearing out existing metric set from memory")
diff --git a/tests/functional/test_metrics.py b/tests/functional/test_metrics.py
index f3b29bd7d1f..3e2ebe34fe1 100644
--- a/tests/functional/test_metrics.py
+++ b/tests/functional/test_metrics.py
@@ -466,7 +466,7 @@ def lambda_handler(evt, ctx):
 
     output = json.loads(capsys.readouterr().out.strip())
 
-    dimensions.insert(0, {"name": "service", "value": "test_service"})
+    dimensions.append({"name": "service", "value": "test_service"})
     expected = serialize_metrics(metrics=metrics, dimensions=dimensions, namespace=namespace)
 
     remove_timestamp(metrics=[output, expected])  # Timestamp will always be different
@@ -503,33 +503,31 @@ def lambda_handler(evt, ctx):
     assert expected == output
 
 
-def test_log_metrics_with_renamed_service(capsys, metrics):
+def test_log_metrics_with_renamed_service(capsys, metrics, metric):
     # GIVEN Metrics is initialized with service specified
     my_metrics = Metrics(service="test_service", namespace="test_application")
     for metric in metrics:
         my_metrics.add_metric(**metric)
 
-    # WHEN we manually call add_dimension to change the value of the service dimension
-    my_metrics.add_dimension(name="service", value="another_test_service")
-
     @my_metrics.log_metrics
     def lambda_handler(evt, ctx):
+        # WHEN we manually call add_dimension to change the value of the service dimension
+        my_metrics.add_dimension(name="service", value="another_test_service")
+        my_metrics.add_metric(**metric)
         return True
 
     lambda_handler({}, {})
 
     output = json.loads(capsys.readouterr().out.strip())
+    lambda_handler({}, {})
+    second_output = json.loads(capsys.readouterr().out.strip())
 
-    expected_dimensions = [{"name": "service", "value": "test_service"}]
-    expected = serialize_metrics(
-        metrics=metrics, dimensions=expected_dimensions, namespace={"name": "test_application"}
-    )
-
-    remove_timestamp(metrics=[output, expected])  # Timestamp will always be different
+    remove_timestamp(metrics=[output])  # Timestamp will always be different
 
     # THEN we should have no exceptions and the dimensions should be set to the name provided in the
     # add_dimension call
     assert output["service"] == "another_test_service"
+    assert second_output["service"] == "another_test_service"
 
 
 def test_log_metrics_with_namespace_overridden(capsys, metrics, dimensions):
@@ -649,3 +647,40 @@ def lambda_handler(evt, context):
         lambda_handler({}, {})
         assert len(w) == 1
         assert str(w[-1].message) == "No metrics to publish, skipping"
+
+
+def test_log_metrics_with_implicit_dimensions_called_twice(capsys, metrics):
+    # GIVEN Metrics is initialized with service specified
+    my_metrics = Metrics(service="test_service", namespace="test_application")
+
+    # WHEN we utilize log_metrics to serialize and don't explicitly add any dimensions,
+    # and the lambda function is called more than once
+    @my_metrics.log_metrics
+    def lambda_handler(evt, ctx):
+        for metric in metrics:
+            my_metrics.add_metric(**metric)
+        return True
+
+    lambda_handler({}, {})
+    output = json.loads(capsys.readouterr().out.strip())
+
+    lambda_handler({}, {})
+    second_output = json.loads(capsys.readouterr().out.strip())
+
+    expected_dimensions = [{"name": "service", "value": "test_service"}]
+    expected = serialize_metrics(
+        metrics=metrics, dimensions=expected_dimensions, namespace={"name": "test_application"}
+    )
+
+    remove_timestamp(metrics=[output, expected, second_output])  # Timestamp will always be different
+
+    # THEN we should have no exceptions and the dimensions should be set to the name provided in the
+    # service passed to Metrics constructor
+    assert output["service"] == "test_service"
+    assert second_output["service"] == "test_service"
+
+    for metric_record in output["_aws"]["CloudWatchMetrics"]:
+        assert ["service"] in metric_record["Dimensions"]
+
+    for metric_record in second_output["_aws"]["CloudWatchMetrics"]:
+        assert ["service"] in metric_record["Dimensions"]