• Home
  • Features
  • Pricing
  • Docs
  • Announcements
  • Sign In

localstack / localstack / 19844934392

01 Dec 2025 07:55PM UTC coverage: 86.945% (+0.1%) from 86.821%
19844934392

push

github

web-flow
Update ASF APIs, provider signatures, disable lambda patches (#13444)

Co-authored-by: LocalStack Bot <localstack-bot@users.noreply.github.com>
Co-authored-by: Silvio Vasiljevic <silvio.vasiljevic@gmail.com>

69707 of 80174 relevant lines covered (86.94%)

0.87 hits per line

Source File
Press 'n' to go to next uncovered line, 'b' for previous

90.42
/localstack-core/localstack/services/cloudwatch/provider_v2.py
1
import datetime
1✔
2
import json
1✔
3
import logging
1✔
4
import re
1✔
5
import threading
1✔
6
import uuid
1✔
7

8
from localstack.aws.api import CommonServiceException, RequestContext, handler
1✔
9
from localstack.aws.api.cloudwatch import (
1✔
10
    AccountId,
11
    ActionPrefix,
12
    AlarmName,
13
    AlarmNamePrefix,
14
    AlarmNames,
15
    AlarmTypes,
16
    AmazonResourceName,
17
    CloudwatchApi,
18
    ContributorId,
19
    DashboardBody,
20
    DashboardName,
21
    DashboardNamePrefix,
22
    DashboardNames,
23
    Datapoint,
24
    DeleteDashboardsOutput,
25
    DescribeAlarmHistoryOutput,
26
    DescribeAlarmsForMetricOutput,
27
    DescribeAlarmsOutput,
28
    DimensionFilters,
29
    Dimensions,
30
    EntityMetricDataList,
31
    ExtendedStatistic,
32
    ExtendedStatistics,
33
    GetDashboardOutput,
34
    GetMetricDataMaxDatapoints,
35
    GetMetricDataOutput,
36
    GetMetricStatisticsOutput,
37
    HistoryItemType,
38
    IncludeLinkedAccounts,
39
    InvalidParameterCombinationException,
40
    InvalidParameterValueException,
41
    LabelOptions,
42
    ListDashboardsOutput,
43
    ListMetricsOutput,
44
    ListTagsForResourceOutput,
45
    MaxRecords,
46
    MetricData,
47
    MetricDataQueries,
48
    MetricDataQuery,
49
    MetricDataResult,
50
    MetricDataResultMessages,
51
    MetricName,
52
    MetricStat,
53
    Namespace,
54
    NextToken,
55
    Period,
56
    PutCompositeAlarmInput,
57
    PutDashboardOutput,
58
    PutMetricAlarmInput,
59
    RecentlyActive,
60
    ResourceNotFound,
61
    ScanBy,
62
    StandardUnit,
63
    StateReason,
64
    StateReasonData,
65
    StateValue,
66
    Statistic,
67
    Statistics,
68
    StrictEntityValidation,
69
    TagKeyList,
70
    TagList,
71
    TagResourceOutput,
72
    Timestamp,
73
    UntagResourceOutput,
74
)
75
from localstack.aws.connect import connect_to
1✔
76
from localstack.http import Request
1✔
77
from localstack.services.cloudwatch.alarm_scheduler import AlarmScheduler
1✔
78
from localstack.services.cloudwatch.cloudwatch_database_helper import CloudwatchDatabase
1✔
79
from localstack.services.cloudwatch.models import (
1✔
80
    CloudWatchStore,
81
    LocalStackAlarm,
82
    LocalStackCompositeAlarm,
83
    LocalStackDashboard,
84
    LocalStackMetricAlarm,
85
    cloudwatch_stores,
86
)
87
from localstack.services.edge import ROUTER
1✔
88
from localstack.services.plugins import SERVICE_PLUGINS, ServiceLifecycleHook
1✔
89
from localstack.state import AssetDirectory, StateVisitor
1✔
90
from localstack.utils.aws import arns
1✔
91
from localstack.utils.aws.arns import extract_account_id_from_arn, lambda_function_name
1✔
92
from localstack.utils.collections import PaginatedList
1✔
93
from localstack.utils.json import CustomEncoder as JSONEncoder
1✔
94
from localstack.utils.strings import camel_to_snake_case
1✔
95
from localstack.utils.sync import poll_condition
1✔
96
from localstack.utils.threads import start_worker_thread
1✔
97
from localstack.utils.time import timestamp_millis
1✔
98

99
PATH_GET_RAW_METRICS = "/_aws/cloudwatch/metrics/raw"
1✔
100
MOTO_INITIAL_UNCHECKED_REASON = "Unchecked: Initial alarm creation"
1✔
101
LIST_METRICS_MAX_RESULTS = 500
1✔
102
# If the values in these fields are not the same, their values are added when generating labels
103
LABEL_DIFFERENTIATORS = ["Stat", "Period"]
1✔
104
HISTORY_VERSION = "1.0"
1✔
105

106
LOG = logging.getLogger(__name__)
1✔
107
_STORE_LOCK = threading.RLock()
1✔
108
AWS_MAX_DATAPOINTS_ACCEPTED: int = 1440
1✔
109

110

111
class ValidationException(CommonServiceException):
1✔
112
    def __init__(self, message: str):
1✔
113
        super().__init__("ValidationError", message, 400, True)
1✔
114

115

116
def _validate_parameters_for_put_metric_data(metric_data: MetricData) -> None:
1✔
117
    for index, metric_item in enumerate(metric_data):
1✔
118
        indexplusone = index + 1
1✔
119
        if metric_item.get("Value") and metric_item.get("Values"):
1✔
120
            raise InvalidParameterCombinationException(
1✔
121
                f"The parameters MetricData.member.{indexplusone}.Value and MetricData.member.{indexplusone}.Values are mutually exclusive and you have specified both."
122
            )
123

124
        if metric_item.get("StatisticValues") and metric_item.get("Value"):
1✔
125
            raise InvalidParameterCombinationException(
1✔
126
                f"The parameters MetricData.member.{indexplusone}.Value and MetricData.member.{indexplusone}.StatisticValues are mutually exclusive and you have specified both."
127
            )
128

129
        if metric_item.get("Values") and metric_item.get("Counts"):
1✔
130
            values = metric_item.get("Values")
1✔
131
            counts = metric_item.get("Counts")
1✔
132
            if len(values) != len(counts):
1✔
133
                raise InvalidParameterValueException(
1✔
134
                    f"The parameters MetricData.member.{indexplusone}.Values and MetricData.member.{indexplusone}.Counts must be of the same size."
135
                )
136

137

138
class CloudwatchProvider(CloudwatchApi, ServiceLifecycleHook):
1✔
139
    """
140
    Cloudwatch provider.
141

142
    LIMITATIONS:
143
        - simplified composite alarm rule evaluation:
144
            - only OR operator is supported
145
            - only ALARM expression is supported
146
            - only metric alarms can be included in the rule and they should be referenced by ARN only
147
    """
148

149
    def __init__(self):
1✔
150
        self.alarm_scheduler: AlarmScheduler = None
1✔
151
        self.store = None
1✔
152
        self.cloudwatch_database = CloudwatchDatabase()
1✔
153

154
    @staticmethod
1✔
155
    def get_store(account_id: str, region: str) -> CloudWatchStore:
1✔
156
        return cloudwatch_stores[account_id][region]
1✔
157

158
    def accept_state_visitor(self, visitor: StateVisitor):
1✔
159
        visitor.visit(cloudwatch_stores)
×
160
        visitor.visit(AssetDirectory(self.service, CloudwatchDatabase.CLOUDWATCH_DATA_ROOT))
×
161

162
    def on_after_init(self):
1✔
163
        ROUTER.add(PATH_GET_RAW_METRICS, self.get_raw_metrics)
1✔
164

165
    def on_before_start(self):
1✔
166
        self.start_alarm_scheduler()
1✔
167

168
    def on_before_state_reset(self):
1✔
169
        self.shutdown_alarm_scheduler()
×
170
        self.cloudwatch_database.clear_tables()
×
171

172
    def on_after_state_reset(self):
1✔
173
        self.cloudwatch_database = CloudwatchDatabase()
×
174
        self.start_alarm_scheduler()
×
175

176
    def on_before_state_load(self):
1✔
177
        self.shutdown_alarm_scheduler()
×
178

179
    def on_after_state_load(self):
1✔
180
        self.start_alarm_scheduler()
×
181

182
        def restart_alarms(*args):
×
183
            poll_condition(lambda: SERVICE_PLUGINS.is_running("cloudwatch"))
×
184
            self.alarm_scheduler.restart_existing_alarms()
×
185

186
        start_worker_thread(restart_alarms)
×
187

188
    def on_before_stop(self):
1✔
189
        self.shutdown_alarm_scheduler()
1✔
190

191
    def start_alarm_scheduler(self):
1✔
192
        if not self.alarm_scheduler:
1✔
193
            LOG.debug("starting cloudwatch scheduler")
1✔
194
            self.alarm_scheduler = AlarmScheduler()
1✔
195

196
    def shutdown_alarm_scheduler(self):
1✔
197
        if self.alarm_scheduler:
1✔
198
            LOG.debug("stopping cloudwatch scheduler")
1✔
199
            self.alarm_scheduler.shutdown_scheduler()
1✔
200
            self.alarm_scheduler = None
1✔
201

202
    def delete_alarms(self, context: RequestContext, alarm_names: AlarmNames, **kwargs) -> None:
1✔
203
        """
204
        Delete alarms.
205
        """
206
        with _STORE_LOCK:
1✔
207
            for alarm_name in alarm_names:
1✔
208
                alarm_arn = arns.cloudwatch_alarm_arn(
1✔
209
                    alarm_name, account_id=context.account_id, region_name=context.region
210
                )  # obtain alarm ARN from alarm name
211
                self.alarm_scheduler.delete_scheduler_for_alarm(alarm_arn)
1✔
212
                store = self.get_store(context.account_id, context.region)
1✔
213
                store.alarms.pop(alarm_arn, None)
1✔
214

215
    def put_metric_data(
1✔
216
        self,
217
        context: RequestContext,
218
        namespace: Namespace,
219
        metric_data: MetricData = None,
220
        entity_metric_data: EntityMetricDataList = None,
221
        strict_entity_validation: StrictEntityValidation = None,
222
        **kwargs,
223
    ) -> None:
224
        # TODO add support for entity_metric_data and strict_entity_validation
225
        _validate_parameters_for_put_metric_data(metric_data)
1✔
226

227
        self.cloudwatch_database.add_metric_data(
1✔
228
            context.account_id, context.region, namespace, metric_data
229
        )
230

231
    def get_metric_data(
1✔
232
        self,
233
        context: RequestContext,
234
        metric_data_queries: MetricDataQueries,
235
        start_time: Timestamp,
236
        end_time: Timestamp,
237
        next_token: NextToken = None,
238
        scan_by: ScanBy = None,
239
        max_datapoints: GetMetricDataMaxDatapoints = None,
240
        label_options: LabelOptions = None,
241
        **kwargs,
242
    ) -> GetMetricDataOutput:
243
        results: list[MetricDataResult] = []
1✔
244
        limit = max_datapoints or 100_800
1✔
245
        messages: MetricDataResultMessages = []
1✔
246
        nxt: str | None = None
1✔
247
        label_additions = []
1✔
248

249
        for diff in LABEL_DIFFERENTIATORS:
1✔
250
            non_unique = []
1✔
251
            for query in metric_data_queries:
1✔
252
                non_unique.append(query["MetricStat"][diff])
1✔
253
            if len(set(non_unique)) > 1:
1✔
254
                label_additions.append(diff)
1✔
255

256
        for query in metric_data_queries:
1✔
257
            query_result = self.cloudwatch_database.get_metric_data_stat(
1✔
258
                account_id=context.account_id,
259
                region=context.region,
260
                query=query,
261
                start_time=start_time,
262
                end_time=end_time,
263
                scan_by=scan_by,
264
            )
265
            if query_result.get("messages"):
1✔
266
                messages.extend(query_result.get("messages"))
×
267

268
            label = query.get("Label") or f"{query['MetricStat']['Metric']['MetricName']}"
1✔
269
            # TODO: does this happen even if a label is set in the query?
270
            for label_addition in label_additions:
1✔
271
                label = f"{label} {query['MetricStat'][label_addition]}"
1✔
272

273
            timestamps = query_result.get("timestamps", {})
1✔
274
            values = query_result.get("values", {})
1✔
275

276
            # Paginate
277
            timestamp_value_dicts = [
1✔
278
                {
279
                    "Timestamp": timestamp,
280
                    "Value": float(value),
281
                }
282
                for timestamp, value in zip(timestamps, values, strict=False)
283
            ]
284

285
            pagination = PaginatedList(timestamp_value_dicts)
1✔
286
            timestamp_page, nxt = pagination.get_page(
1✔
287
                lambda item: str(item.get("Timestamp")),
288
                next_token=next_token,
289
                page_size=limit,
290
            )
291

292
            timestamps = [item.get("Timestamp") for item in timestamp_page]
1✔
293
            values = [item.get("Value") for item in timestamp_page]
1✔
294

295
            metric_data_result = {
1✔
296
                "Id": query.get("Id"),
297
                "Label": label,
298
                "StatusCode": "Complete",
299
                "Timestamps": timestamps,
300
                "Values": values,
301
            }
302
            results.append(MetricDataResult(**metric_data_result))
1✔
303

304
        return GetMetricDataOutput(MetricDataResults=results, NextToken=nxt, Messages=messages)
1✔
305

306
    def set_alarm_state(
1✔
307
        self,
308
        context: RequestContext,
309
        alarm_name: AlarmName,
310
        state_value: StateValue,
311
        state_reason: StateReason,
312
        state_reason_data: StateReasonData = None,
313
        **kwargs,
314
    ) -> None:
315
        if state_value not in ("OK", "ALARM", "INSUFFICIENT_DATA"):
1✔
316
            raise ValidationException(
1✔
317
                f"1 validation error detected: Value '{state_value}' at 'stateValue' failed to satisfy constraint: Member must satisfy enum value set: [INSUFFICIENT_DATA, ALARM, OK]"
318
            )
319

320
        try:
1✔
321
            if state_reason_data:
1✔
322
                state_reason_data = json.loads(state_reason_data)
1✔
323
        except ValueError:
×
324
            raise InvalidParameterValueException(
×
325
                "TODO: check right error message: Json was not correctly formatted"
326
            )
327
        with _STORE_LOCK:
1✔
328
            store = self.get_store(context.account_id, context.region)
1✔
329
            alarm = store.alarms.get(
1✔
330
                arns.cloudwatch_alarm_arn(
331
                    alarm_name, account_id=context.account_id, region_name=context.region
332
                )
333
            )
334
            if not alarm:
1✔
335
                raise ResourceNotFound()
1✔
336

337
            old_state = alarm.alarm["StateValue"]
1✔
338

339
            old_state_reason = alarm.alarm["StateReason"]
1✔
340
            old_state_update_timestamp = alarm.alarm["StateUpdatedTimestamp"]
1✔
341

342
            if old_state == state_value:
1✔
343
                return
×
344

345
            alarm.alarm["StateTransitionedTimestamp"] = datetime.datetime.now(datetime.UTC)
1✔
346
            # update startDate (=last ALARM date) - should only update when a new alarm is triggered
347
            # the date is only updated if we have a reason-data, which is set by an alarm
348
            if state_reason_data:
1✔
349
                state_reason_data["startDate"] = state_reason_data.get("queryDate")
1✔
350

351
            self._update_state(
1✔
352
                context,
353
                alarm,
354
                state_value,
355
                state_reason,
356
                state_reason_data,
357
            )
358

359
            self._evaluate_composite_alarms(context, alarm)
1✔
360

361
            if not alarm.alarm["ActionsEnabled"]:
1✔
362
                return
1✔
363
            if state_value == "OK":
1✔
364
                actions = alarm.alarm["OKActions"]
1✔
365
            elif state_value == "ALARM":
1✔
366
                actions = alarm.alarm["AlarmActions"]
1✔
367
            else:
368
                actions = alarm.alarm["InsufficientDataActions"]
×
369
            for action in actions:
1✔
370
                data = arns.parse_arn(action)
1✔
371
                # test for sns - can this be done in a more generic way?
372
                if data["service"] == "sns":
1✔
373
                    service = connect_to(
1✔
374
                        region_name=data["region"], aws_access_key_id=data["account"]
375
                    ).sns
376
                    subject = f"""{state_value}: "{alarm_name}" in {context.region}"""
1✔
377
                    message = create_message_response_update_state_sns(alarm, old_state)
1✔
378
                    service.publish(TopicArn=action, Subject=subject, Message=message)
1✔
379
                elif data["service"] == "lambda":
1✔
380
                    service = connect_to(
1✔
381
                        region_name=data["region"], aws_access_key_id=data["account"]
382
                    ).lambda_
383
                    message = create_message_response_update_state_lambda(
1✔
384
                        alarm, old_state, old_state_reason, old_state_update_timestamp
385
                    )
386
                    service.invoke(FunctionName=lambda_function_name(action), Payload=message)
1✔
387
                else:
388
                    # TODO: support other actions
389
                    LOG.warning(
×
390
                        "Action for service %s not implemented, action '%s' will not be triggered.",
391
                        data["service"],
392
                        action,
393
                    )
394

395
    def get_raw_metrics(self, request: Request):
1✔
396
        """this feature was introduced with https://github.com/localstack/localstack/pull/3535
397
        # in the meantime, it required a valid aws-header so that the account-id/region could be extracted
398
        # with the new implementation, we want to return all data, but add the account-id/region as additional attributes
399

400
        # TODO endpoint should be refactored or deprecated at some point
401
        #   - result should be paginated
402
        #   - include aggregated metrics (but we would also need to change/adapt the shape of "metrics" that we return)
403
        :returns: json {"metrics": [{"ns": "namespace", "n": "metric_name", "v": value, "t": timestamp,
404
        "d": [<dimensions-key-pair-values>],"account": account, "region": region}]}
405
        """
406
        return {"metrics": self.cloudwatch_database.get_all_metric_data() or []}
1✔
407

408
    @handler("PutMetricAlarm", expand=False)
1✔
409
    def put_metric_alarm(self, context: RequestContext, request: PutMetricAlarmInput) -> None:
1✔
410
        # missing will be the default, when not set (but it will not explicitly be set)
411
        if request.get("TreatMissingData", "missing") not in [
1✔
412
            "breaching",
413
            "notBreaching",
414
            "ignore",
415
            "missing",
416
        ]:
417
            raise ValidationException(
×
418
                f"The value {request['TreatMissingData']} is not supported for TreatMissingData parameter. Supported values are [breaching, notBreaching, ignore, missing]."
419
            )
420
            # do some sanity checks:
421
        if request.get("Period"):
1✔
422
            # Valid values are 10, 30, and any multiple of 60.
423
            value = request.get("Period")
1✔
424
            if value not in (10, 30):
1✔
425
                if value % 60 != 0:
1✔
426
                    raise ValidationException("Period must be 10, 30 or a multiple of 60")
×
427
        if request.get("Statistic"):
1✔
428
            if request.get("Statistic") not in [
1✔
429
                "SampleCount",
430
                "Average",
431
                "Sum",
432
                "Minimum",
433
                "Maximum",
434
            ]:
435
                raise ValidationException(
×
436
                    f"Value '{request.get('Statistic')}' at 'statistic' failed to satisfy constraint: Member must satisfy enum value set: [Maximum, SampleCount, Sum, Minimum, Average]"
437
                )
438

439
        extended_statistic = request.get("ExtendedStatistic")
1✔
440
        if extended_statistic and not extended_statistic.startswith("p"):
1✔
441
            raise InvalidParameterValueException(
×
442
                f"The value {extended_statistic} for parameter ExtendedStatistic is not supported."
443
            )
444
        evaluate_low_sample_count_percentile = request.get("EvaluateLowSampleCountPercentile")
1✔
445
        if evaluate_low_sample_count_percentile and evaluate_low_sample_count_percentile not in (
1✔
446
            "evaluate",
447
            "ignore",
448
        ):
449
            raise ValidationException(
×
450
                f"Option {evaluate_low_sample_count_percentile} is not supported. "
451
                "Supported options for parameter EvaluateLowSampleCountPercentile are evaluate and ignore."
452
            )
453
        with _STORE_LOCK:
1✔
454
            store = self.get_store(context.account_id, context.region)
1✔
455
            metric_alarm = LocalStackMetricAlarm(context.account_id, context.region, {**request})
1✔
456
            alarm_arn = metric_alarm.alarm["AlarmArn"]
1✔
457
            store.alarms[alarm_arn] = metric_alarm
1✔
458
            self.alarm_scheduler.schedule_metric_alarm(alarm_arn)
1✔
459

460
    @handler("PutCompositeAlarm", expand=False)
1✔
461
    def put_composite_alarm(self, context: RequestContext, request: PutCompositeAlarmInput) -> None:
1✔
462
        with _STORE_LOCK:
1✔
463
            store = self.get_store(context.account_id, context.region)
1✔
464
            composite_alarm = LocalStackCompositeAlarm(
1✔
465
                context.account_id, context.region, {**request}
466
            )
467

468
            alarm_rule = composite_alarm.alarm["AlarmRule"]
1✔
469
            rule_expression_validation_result = self._validate_alarm_rule_expression(alarm_rule)
1✔
470
            [LOG.warning(w) for w in rule_expression_validation_result]
1✔
471

472
            alarm_arn = composite_alarm.alarm["AlarmArn"]
1✔
473
            store.alarms[alarm_arn] = composite_alarm
1✔
474

475
    def describe_alarms(
1✔
476
        self,
477
        context: RequestContext,
478
        alarm_names: AlarmNames = None,
479
        alarm_name_prefix: AlarmNamePrefix = None,
480
        alarm_types: AlarmTypes = None,
481
        children_of_alarm_name: AlarmName = None,
482
        parents_of_alarm_name: AlarmName = None,
483
        state_value: StateValue = None,
484
        action_prefix: ActionPrefix = None,
485
        max_records: MaxRecords = None,
486
        next_token: NextToken = None,
487
        **kwargs,
488
    ) -> DescribeAlarmsOutput:
489
        store = self.get_store(context.account_id, context.region)
1✔
490
        alarms = list(store.alarms.values())
1✔
491
        if action_prefix:
1✔
492
            alarms = [a.alarm for a in alarms if a.alarm["AlarmAction"].startswith(action_prefix)]
×
493
        elif alarm_name_prefix:
1✔
494
            alarms = [a.alarm for a in alarms if a.alarm["AlarmName"].startswith(alarm_name_prefix)]
×
495
        elif alarm_names:
1✔
496
            alarms = [a.alarm for a in alarms if a.alarm["AlarmName"] in alarm_names]
1✔
497
        elif state_value:
1✔
498
            alarms = [a.alarm for a in alarms if a.alarm["StateValue"] == state_value]
×
499
        else:
500
            alarms = [a.alarm for a in list(store.alarms.values())]
1✔
501

502
        # TODO: Pagination
503
        metric_alarms = [a for a in alarms if a.get("AlarmRule") is None]
1✔
504
        composite_alarms = [a for a in alarms if a.get("AlarmRule") is not None]
1✔
505
        return DescribeAlarmsOutput(CompositeAlarms=composite_alarms, MetricAlarms=metric_alarms)
1✔
506

507
    def describe_alarms_for_metric(
1✔
508
        self,
509
        context: RequestContext,
510
        metric_name: MetricName,
511
        namespace: Namespace,
512
        statistic: Statistic = None,
513
        extended_statistic: ExtendedStatistic = None,
514
        dimensions: Dimensions = None,
515
        period: Period = None,
516
        unit: StandardUnit = None,
517
        **kwargs,
518
    ) -> DescribeAlarmsForMetricOutput:
519
        store = self.get_store(context.account_id, context.region)
1✔
520
        alarms = [
1✔
521
            a.alarm
522
            for a in store.alarms.values()
523
            if isinstance(a, LocalStackMetricAlarm)
524
            and a.alarm.get("MetricName") == metric_name
525
            and a.alarm.get("Namespace") == namespace
526
        ]
527

528
        if statistic:
1✔
529
            alarms = [a for a in alarms if a.get("Statistic") == statistic]
1✔
530
        if dimensions:
1✔
531
            alarms = [a for a in alarms if a.get("Dimensions") == dimensions]
1✔
532
        if period:
1✔
533
            alarms = [a for a in alarms if a.get("Period") == period]
×
534
        if unit:
1✔
535
            alarms = [a for a in alarms if a.get("Unit") == unit]
×
536
        return DescribeAlarmsForMetricOutput(MetricAlarms=alarms)
1✔
537

538
    def list_tags_for_resource(
1✔
539
        self, context: RequestContext, resource_arn: AmazonResourceName, **kwargs
540
    ) -> ListTagsForResourceOutput:
541
        store = self.get_store(context.account_id, context.region)
1✔
542
        tags = store.TAGS.list_tags_for_resource(resource_arn)
1✔
543
        return ListTagsForResourceOutput(Tags=tags.get("Tags", []))
1✔
544

545
    def untag_resource(
1✔
546
        self,
547
        context: RequestContext,
548
        resource_arn: AmazonResourceName,
549
        tag_keys: TagKeyList,
550
        **kwargs,
551
    ) -> UntagResourceOutput:
552
        store = self.get_store(context.account_id, context.region)
1✔
553
        store.TAGS.untag_resource(resource_arn, tag_keys)
1✔
554
        return UntagResourceOutput()
1✔
555

556
    def tag_resource(
1✔
557
        self, context: RequestContext, resource_arn: AmazonResourceName, tags: TagList, **kwargs
558
    ) -> TagResourceOutput:
559
        store = self.get_store(context.account_id, context.region)
1✔
560
        store.TAGS.tag_resource(resource_arn, tags)
1✔
561
        return TagResourceOutput()
1✔
562

563
    def put_dashboard(
1✔
564
        self,
565
        context: RequestContext,
566
        dashboard_name: DashboardName,
567
        dashboard_body: DashboardBody,
568
        **kwargs,
569
    ) -> PutDashboardOutput:
570
        pattern = r"^[a-zA-Z0-9_-]+$"
1✔
571
        if not re.match(pattern, dashboard_name):
1✔
572
            raise InvalidParameterValueException(
1✔
573
                "The value for field DashboardName contains invalid characters. "
574
                "It can only contain alphanumerics, dash (-) and underscore (_).\n"
575
            )
576

577
        store = self.get_store(context.account_id, context.region)
1✔
578
        store.dashboards[dashboard_name] = LocalStackDashboard(
1✔
579
            context.account_id, context.region, dashboard_name, dashboard_body
580
        )
581
        return PutDashboardOutput()
1✔
582

583
    def get_dashboard(
1✔
584
        self, context: RequestContext, dashboard_name: DashboardName, **kwargs
585
    ) -> GetDashboardOutput:
586
        store = self.get_store(context.account_id, context.region)
1✔
587
        dashboard = store.dashboards.get(dashboard_name)
1✔
588
        if not dashboard:
1✔
589
            raise InvalidParameterValueException(f"Dashboard {dashboard_name} does not exist.")
×
590

591
        return GetDashboardOutput(
1✔
592
            DashboardName=dashboard_name,
593
            DashboardBody=dashboard.dashboard_body,
594
            DashboardArn=dashboard.dashboard_arn,
595
        )
596

597
    def delete_dashboards(
1✔
598
        self, context: RequestContext, dashboard_names: DashboardNames, **kwargs
599
    ) -> DeleteDashboardsOutput:
600
        store = self.get_store(context.account_id, context.region)
1✔
601
        for dashboard_name in dashboard_names:
1✔
602
            store.dashboards.pop(dashboard_name, None)
1✔
603
        return DeleteDashboardsOutput()
1✔
604

605
    def list_dashboards(
1✔
606
        self,
607
        context: RequestContext,
608
        dashboard_name_prefix: DashboardNamePrefix = None,
609
        next_token: NextToken = None,
610
        **kwargs,
611
    ) -> ListDashboardsOutput:
612
        store = self.get_store(context.account_id, context.region)
1✔
613
        dashboard_names = list(store.dashboards.keys())
1✔
614
        dashboard_names = [
1✔
615
            name for name in dashboard_names if name.startswith(dashboard_name_prefix or "")
616
        ]
617

618
        entries = [
1✔
619
            {
620
                "DashboardName": name,
621
                "DashboardArn": store.dashboards[name].dashboard_arn,
622
                "LastModified": store.dashboards[name].last_modified,
623
                "Size": store.dashboards[name].size,
624
            }
625
            for name in dashboard_names
626
        ]
627
        return ListDashboardsOutput(
1✔
628
            DashboardEntries=entries,
629
        )
630

631
    def list_metrics(
1✔
632
        self,
633
        context: RequestContext,
634
        namespace: Namespace = None,
635
        metric_name: MetricName = None,
636
        dimensions: DimensionFilters = None,
637
        next_token: NextToken = None,
638
        recently_active: RecentlyActive = None,
639
        include_linked_accounts: IncludeLinkedAccounts = None,
640
        owning_account: AccountId = None,
641
        **kwargs,
642
    ) -> ListMetricsOutput:
643
        result = self.cloudwatch_database.list_metrics(
1✔
644
            context.account_id,
645
            context.region,
646
            namespace,
647
            metric_name,
648
            dimensions or [],
649
        )
650

651
        metrics = [
1✔
652
            {
653
                "Namespace": metric.get("namespace"),
654
                "MetricName": metric.get("metric_name"),
655
                "Dimensions": metric.get("dimensions"),
656
            }
657
            for metric in result.get("metrics", [])
658
        ]
659
        aliases_list = PaginatedList(metrics)
1✔
660
        page, nxt = aliases_list.get_page(
1✔
661
            lambda metric: f"{metric.get('Namespace')}-{metric.get('MetricName')}-{metric.get('Dimensions')}",
662
            next_token=next_token,
663
            page_size=LIST_METRICS_MAX_RESULTS,
664
        )
665
        return ListMetricsOutput(Metrics=page, NextToken=nxt)
1✔
666

667
    def get_metric_statistics(
1✔
668
        self,
669
        context: RequestContext,
670
        namespace: Namespace,
671
        metric_name: MetricName,
672
        start_time: Timestamp,
673
        end_time: Timestamp,
674
        period: Period,
675
        dimensions: Dimensions = None,
676
        statistics: Statistics = None,
677
        extended_statistics: ExtendedStatistics = None,
678
        unit: StandardUnit = None,
679
        **kwargs,
680
    ) -> GetMetricStatisticsOutput:
681
        start_time_unix = int(start_time.timestamp())
1✔
682
        end_time_unix = int(end_time.timestamp())
1✔
683

684
        if not start_time_unix < end_time_unix:
1✔
685
            raise InvalidParameterValueException(
1✔
686
                "The parameter StartTime must be less than the parameter EndTime."
687
            )
688

689
        expected_datapoints = (end_time_unix - start_time_unix) / period
1✔
690

691
        if expected_datapoints > AWS_MAX_DATAPOINTS_ACCEPTED:
1✔
692
            raise InvalidParameterCombinationException(
1✔
693
                f"You have requested up to {int(expected_datapoints)} datapoints, which exceeds the limit of {AWS_MAX_DATAPOINTS_ACCEPTED}. "
694
                f"You may reduce the datapoints requested by increasing Period, or decreasing the time range."
695
            )
696

697
        stat_datapoints = {}
1✔
698

699
        units = (
1✔
700
            [unit]
701
            if unit
702
            else self.cloudwatch_database.get_units_for_metric_data_stat(
703
                account_id=context.account_id,
704
                region=context.region,
705
                start_time=start_time,
706
                end_time=end_time,
707
                metric_name=metric_name,
708
                namespace=namespace,
709
            )
710
        )
711

712
        for stat in statistics:
1✔
713
            for selected_unit in units:
1✔
714
                query_result = self.cloudwatch_database.get_metric_data_stat(
1✔
715
                    account_id=context.account_id,
716
                    region=context.region,
717
                    start_time=start_time,
718
                    end_time=end_time,
719
                    scan_by="TimestampDescending",
720
                    query=MetricDataQuery(
721
                        MetricStat=MetricStat(
722
                            Metric={
723
                                "MetricName": metric_name,
724
                                "Namespace": namespace,
725
                                "Dimensions": dimensions or [],
726
                            },
727
                            Period=period,
728
                            Stat=stat,
729
                            Unit=selected_unit,
730
                        )
731
                    ),
732
                )
733

734
                timestamps = query_result.get("timestamps", [])
1✔
735
                values = query_result.get("values", [])
1✔
736
                for i, timestamp in enumerate(timestamps):
1✔
737
                    stat_datapoints.setdefault(selected_unit, {})
1✔
738
                    stat_datapoints[selected_unit].setdefault(timestamp, {})
1✔
739
                    stat_datapoints[selected_unit][timestamp][stat] = float(values[i])
1✔
740
                    stat_datapoints[selected_unit][timestamp]["Unit"] = selected_unit
1✔
741

742
        datapoints: list[Datapoint] = []
1✔
743
        for selected_unit, results in stat_datapoints.items():
1✔
744
            for timestamp, stats in results.items():
1✔
745
                datapoints.append(
1✔
746
                    Datapoint(
747
                        Timestamp=timestamp,
748
                        SampleCount=stats.get("SampleCount"),
749
                        Average=stats.get("Average"),
750
                        Sum=stats.get("Sum"),
751
                        Minimum=stats.get("Minimum"),
752
                        Maximum=stats.get("Maximum"),
753
                        Unit="None" if selected_unit == "NULL_VALUE" else selected_unit,
754
                    )
755
                )
756

757
        return GetMetricStatisticsOutput(Datapoints=datapoints, Label=metric_name)
1✔
758

759
    def _update_state(
1✔
760
        self,
761
        context: RequestContext,
762
        alarm: LocalStackAlarm,
763
        state_value: str,
764
        state_reason: str,
765
        state_reason_data: dict = None,
766
    ):
767
        old_state = alarm.alarm["StateValue"]
1✔
768
        old_state_reason = alarm.alarm["StateReason"]
1✔
769
        store = self.get_store(context.account_id, context.region)
1✔
770
        current_time = datetime.datetime.now()
1✔
771
        # version is not present in state reason data for composite alarm, hence the check
772
        if state_reason_data and isinstance(alarm, LocalStackMetricAlarm):
1✔
773
            state_reason_data["version"] = HISTORY_VERSION
1✔
774
        history_data = {
1✔
775
            "version": HISTORY_VERSION,
776
            "oldState": {"stateValue": old_state, "stateReason": old_state_reason},
777
            "newState": {
778
                "stateValue": state_value,
779
                "stateReason": state_reason,
780
                "stateReasonData": state_reason_data,
781
            },
782
        }
783
        store.histories.append(
1✔
784
            {
785
                "Timestamp": timestamp_millis(alarm.alarm["StateUpdatedTimestamp"]),
786
                "HistoryItemType": HistoryItemType.StateUpdate,
787
                "AlarmName": alarm.alarm["AlarmName"],
788
                "HistoryData": json.dumps(history_data),
789
                "HistorySummary": f"Alarm updated from {old_state} to {state_value}",
790
                "AlarmType": "MetricAlarm"
791
                if isinstance(alarm, LocalStackMetricAlarm)
792
                else "CompositeAlarm",
793
            }
794
        )
795
        alarm.alarm["StateValue"] = state_value
1✔
796
        alarm.alarm["StateReason"] = state_reason
1✔
797
        if state_reason_data:
1✔
798
            alarm.alarm["StateReasonData"] = json.dumps(state_reason_data)
1✔
799
        alarm.alarm["StateUpdatedTimestamp"] = current_time
1✔
800

801
    def disable_alarm_actions(
1✔
802
        self, context: RequestContext, alarm_names: AlarmNames, **kwargs
803
    ) -> None:
804
        self._set_alarm_actions(context, alarm_names, enabled=False)
1✔
805

806
    def enable_alarm_actions(
1✔
807
        self, context: RequestContext, alarm_names: AlarmNames, **kwargs
808
    ) -> None:
809
        self._set_alarm_actions(context, alarm_names, enabled=True)
1✔
810

811
    def _set_alarm_actions(self, context, alarm_names, enabled):
1✔
812
        store = self.get_store(context.account_id, context.region)
1✔
813
        for name in alarm_names:
1✔
814
            alarm_arn = arns.cloudwatch_alarm_arn(
1✔
815
                name, account_id=context.account_id, region_name=context.region
816
            )
817
            alarm = store.alarms.get(alarm_arn)
1✔
818
            if alarm:
1✔
819
                alarm.alarm["ActionsEnabled"] = enabled
1✔
820

821
    def describe_alarm_history(
1✔
822
        self,
823
        context: RequestContext,
824
        alarm_name: AlarmName | None = None,
825
        alarm_contributor_id: ContributorId | None = None,
826
        alarm_types: AlarmTypes | None = None,
827
        history_item_type: HistoryItemType | None = None,
828
        start_date: Timestamp | None = None,
829
        end_date: Timestamp | None = None,
830
        max_records: MaxRecords | None = None,
831
        next_token: NextToken | None = None,
832
        scan_by: ScanBy | None = None,
833
        **kwargs,
834
    ) -> DescribeAlarmHistoryOutput:
835
        store = self.get_store(context.account_id, context.region)
1✔
836
        history = store.histories
1✔
837
        if alarm_name:
1✔
838
            history = [h for h in history if h["AlarmName"] == alarm_name]
1✔
839

840
        def _get_timestamp(input: dict):
1✔
841
            if timestamp_string := input.get("Timestamp"):
×
842
                return datetime.datetime.fromisoformat(timestamp_string)
×
843
            return None
×
844

845
        if start_date:
1✔
846
            history = [h for h in history if (date := _get_timestamp(h)) and date >= start_date]
×
847
        if end_date:
1✔
848
            history = [h for h in history if (date := _get_timestamp(h)) and date <= end_date]
×
849
        return DescribeAlarmHistoryOutput(AlarmHistoryItems=history)
1✔
850

851
    def _evaluate_composite_alarms(self, context: RequestContext, triggering_alarm):
1✔
852
        # TODO either pass store as a parameter or acquire RLock (with _STORE_LOCK:)
853
        # everything works ok now but better ensure protection of critical section in front of future changes
854
        store = self.get_store(context.account_id, context.region)
1✔
855
        alarms = list(store.alarms.values())
1✔
856
        composite_alarms = [a for a in alarms if isinstance(a, LocalStackCompositeAlarm)]
1✔
857
        for composite_alarm in composite_alarms:
1✔
858
            self._evaluate_composite_alarm(context, composite_alarm, triggering_alarm)
1✔
859

860
    def _evaluate_composite_alarm(self, context, composite_alarm, triggering_alarm):
1✔
861
        store = self.get_store(context.account_id, context.region)
1✔
862
        alarm_rule = composite_alarm.alarm["AlarmRule"]
1✔
863
        rule_expression_validation = self._validate_alarm_rule_expression(alarm_rule)
1✔
864
        if rule_expression_validation:
1✔
865
            LOG.warning(
1✔
866
                "Alarm rule contains unsupported expressions and will not be evaluated: %s",
867
                rule_expression_validation,
868
            )
869
            return
1✔
870
        new_state_value = StateValue.OK
1✔
871
        # assuming that a rule consists only of ALARM evaluations of metric alarms, with OR logic applied
872
        for metric_alarm_arn in self._get_alarm_arns(alarm_rule):
1✔
873
            metric_alarm = store.alarms.get(metric_alarm_arn)
1✔
874
            if not metric_alarm:
1✔
875
                LOG.warning(
×
876
                    "Alarm rule won't be evaluated as there is no alarm with ARN %s",
877
                    metric_alarm_arn,
878
                )
879
                return
×
880
            if metric_alarm.alarm["StateValue"] == StateValue.ALARM:
1✔
881
                triggering_alarm = metric_alarm
1✔
882
                new_state_value = StateValue.ALARM
1✔
883
                break
1✔
884
        old_state_value = composite_alarm.alarm["StateValue"]
1✔
885
        if old_state_value == new_state_value:
1✔
886
            return
1✔
887
        triggering_alarm_arn = triggering_alarm.alarm.get("AlarmArn")
1✔
888
        triggering_alarm_state = triggering_alarm.alarm.get("StateValue")
1✔
889
        triggering_alarm_state_change_timestamp = triggering_alarm.alarm.get(
1✔
890
            "StateTransitionedTimestamp"
891
        )
892
        state_reason_formatted_timestamp = triggering_alarm_state_change_timestamp.strftime(
1✔
893
            "%A %d %B, %Y %H:%M:%S %Z"
894
        )
895
        state_reason = (
1✔
896
            f"{triggering_alarm_arn} "
897
            f"transitioned to {triggering_alarm_state} "
898
            f"at {state_reason_formatted_timestamp}"
899
        )
900
        state_reason_data = {
1✔
901
            "triggeringAlarms": [
902
                {
903
                    "arn": triggering_alarm_arn,
904
                    "state": {
905
                        "value": triggering_alarm_state,
906
                        "timestamp": timestamp_millis(triggering_alarm_state_change_timestamp),
907
                    },
908
                }
909
            ]
910
        }
911
        self._update_state(
1✔
912
            context, composite_alarm, new_state_value, state_reason, state_reason_data
913
        )
914
        if composite_alarm.alarm["ActionsEnabled"]:
1✔
915
            self._run_composite_alarm_actions(
1✔
916
                context, composite_alarm, old_state_value, triggering_alarm
917
            )
918

919
    def _validate_alarm_rule_expression(self, alarm_rule):
1✔
920
        validation_result = []
1✔
921
        alarms_conditions = [alarm.strip() for alarm in alarm_rule.split("OR")]
1✔
922
        for alarm_condition in alarms_conditions:
1✔
923
            if not alarm_condition.startswith("ALARM"):
1✔
924
                validation_result.append(
1✔
925
                    f"Unsupported expression in alarm rule condition {alarm_condition}: Only ALARM expression is supported by Localstack as of now"
926
                )
927
        return validation_result
1✔
928

929
    def _get_alarm_arns(self, composite_alarm_rule):
1✔
930
        # regexp for everything within (" ")
931
        return re.findall(r'\("([^"]*)"\)', composite_alarm_rule)
1✔
932

933
    def _run_composite_alarm_actions(
1✔
934
        self, context, composite_alarm, old_state_value, triggering_alarm
935
    ):
936
        new_state_value = composite_alarm.alarm["StateValue"]
1✔
937
        if new_state_value == StateValue.OK:
1✔
938
            actions = composite_alarm.alarm["OKActions"]
1✔
939
        elif new_state_value == StateValue.ALARM:
1✔
940
            actions = composite_alarm.alarm["AlarmActions"]
1✔
941
        else:
942
            actions = composite_alarm.alarm["InsufficientDataActions"]
×
943
        for action in actions:
1✔
944
            data = arns.parse_arn(action)
1✔
945
            if data["service"] == "sns":
1✔
946
                service = connect_to(
1✔
947
                    region_name=data["region"], aws_access_key_id=data["account"]
948
                ).sns
949
                subject = f"""{new_state_value}: "{composite_alarm.alarm["AlarmName"]}" in {context.region}"""
1✔
950
                message = create_message_response_update_composite_alarm_state_sns(
1✔
951
                    composite_alarm, triggering_alarm, old_state_value
952
                )
953
                service.publish(TopicArn=action, Subject=subject, Message=message)
1✔
954
            else:
955
                # TODO: support other actions
956
                LOG.warning(
×
957
                    "Action for service %s not implemented, action '%s' will not be triggered.",
958
                    data["service"],
959
                    action,
960
                )
961

962

963
def create_metric_data_query_from_alarm(alarm: LocalStackMetricAlarm):
1✔
964
    # TODO may need to be adapted for other use cases
965
    #  verified return value with a snapshot test
966
    return [
1✔
967
        {
968
            "id": str(uuid.uuid4()),
969
            "metricStat": {
970
                "metric": {
971
                    "namespace": alarm.alarm["Namespace"],
972
                    "name": alarm.alarm["MetricName"],
973
                    "dimensions": alarm.alarm.get("Dimensions") or {},
974
                },
975
                "period": int(alarm.alarm["Period"]),
976
                "stat": alarm.alarm["Statistic"],
977
            },
978
            "returnData": True,
979
        }
980
    ]
981

982

983
def create_message_response_update_state_lambda(
1✔
984
    alarm: LocalStackMetricAlarm, old_state, old_state_reason, old_state_timestamp
985
):
986
    _alarm = alarm.alarm
1✔
987
    response = {
1✔
988
        "accountId": extract_account_id_from_arn(_alarm["AlarmArn"]),
989
        "alarmArn": _alarm["AlarmArn"],
990
        "alarmData": {
991
            "alarmName": _alarm["AlarmName"],
992
            "state": {
993
                "value": _alarm["StateValue"],
994
                "reason": _alarm["StateReason"],
995
                "timestamp": _alarm["StateUpdatedTimestamp"],
996
            },
997
            "previousState": {
998
                "value": old_state,
999
                "reason": old_state_reason,
1000
                "timestamp": old_state_timestamp,
1001
            },
1002
            "configuration": {
1003
                "description": _alarm.get("AlarmDescription", ""),
1004
                "metrics": _alarm.get(
1005
                    "Metrics", create_metric_data_query_from_alarm(alarm)
1006
                ),  # TODO: add test with metric_data_queries
1007
            },
1008
        },
1009
        "time": _alarm["StateUpdatedTimestamp"],
1010
        "region": alarm.region,
1011
        "source": "aws.cloudwatch",
1012
    }
1013
    return json.dumps(response, cls=JSONEncoder)
1✔
1014

1015

1016
def create_message_response_update_state_sns(alarm: LocalStackMetricAlarm, old_state: StateValue):
1✔
1017
    _alarm = alarm.alarm
1✔
1018
    response = {
1✔
1019
        "AWSAccountId": alarm.account_id,
1020
        "OldStateValue": old_state,
1021
        "AlarmName": _alarm["AlarmName"],
1022
        "AlarmDescription": _alarm.get("AlarmDescription"),
1023
        "AlarmConfigurationUpdatedTimestamp": _alarm["AlarmConfigurationUpdatedTimestamp"],
1024
        "NewStateValue": _alarm["StateValue"],
1025
        "NewStateReason": _alarm["StateReason"],
1026
        "StateChangeTime": _alarm["StateUpdatedTimestamp"],
1027
        # the long-name for 'region' should be used - as we don't have it, we use the short name
1028
        # which needs to be slightly changed to make snapshot tests work
1029
        "Region": alarm.region.replace("-", " ").capitalize(),
1030
        "AlarmArn": _alarm["AlarmArn"],
1031
        "OKActions": _alarm.get("OKActions", []),
1032
        "AlarmActions": _alarm.get("AlarmActions", []),
1033
        "InsufficientDataActions": _alarm.get("InsufficientDataActions", []),
1034
    }
1035

1036
    # collect trigger details
1037
    details = {
1✔
1038
        "MetricName": _alarm.get("MetricName", ""),
1039
        "Namespace": _alarm.get("Namespace", ""),
1040
        "Unit": _alarm.get("Unit", None),  # testing with AWS revealed this currently returns None
1041
        "Period": int(_alarm.get("Period", 0)),
1042
        "EvaluationPeriods": int(_alarm.get("EvaluationPeriods", 0)),
1043
        "ComparisonOperator": _alarm.get("ComparisonOperator", ""),
1044
        "Threshold": float(_alarm.get("Threshold", 0.0)),
1045
        "TreatMissingData": _alarm.get("TreatMissingData", ""),
1046
        "EvaluateLowSampleCountPercentile": _alarm.get("EvaluateLowSampleCountPercentile", ""),
1047
    }
1048

1049
    # Dimensions not serializable
1050
    dimensions = []
1✔
1051
    alarm_dimensions = _alarm.get("Dimensions", [])
1✔
1052
    if alarm_dimensions:
1✔
1053
        for d in _alarm["Dimensions"]:
1✔
1054
            dimensions.append({"value": d["Value"], "name": d["Name"]})
1✔
1055
    details["Dimensions"] = dimensions or ""
1✔
1056

1057
    alarm_statistic = _alarm.get("Statistic")
1✔
1058
    alarm_extended_statistic = _alarm.get("ExtendedStatistic")
1✔
1059

1060
    if alarm_statistic:
1✔
1061
        details["StatisticType"] = "Statistic"
1✔
1062
        details["Statistic"] = camel_to_snake_case(alarm_statistic).upper()  # AWS returns uppercase
1✔
1063
    elif alarm_extended_statistic:
×
1064
        details["StatisticType"] = "ExtendedStatistic"
×
1065
        details["ExtendedStatistic"] = alarm_extended_statistic
×
1066

1067
    response["Trigger"] = details
1✔
1068

1069
    return json.dumps(response, cls=JSONEncoder)
1✔
1070

1071

1072
def create_message_response_update_composite_alarm_state_sns(
1✔
1073
    composite_alarm: LocalStackCompositeAlarm,
1074
    triggering_alarm: LocalStackMetricAlarm,
1075
    old_state: StateValue,
1076
):
1077
    _alarm = composite_alarm.alarm
1✔
1078
    response = {
1✔
1079
        "AWSAccountId": composite_alarm.account_id,
1080
        "AlarmName": _alarm["AlarmName"],
1081
        "AlarmDescription": _alarm.get("AlarmDescription"),
1082
        "AlarmRule": _alarm.get("AlarmRule"),
1083
        "OldStateValue": old_state,
1084
        "NewStateValue": _alarm["StateValue"],
1085
        "NewStateReason": _alarm["StateReason"],
1086
        "StateChangeTime": _alarm["StateUpdatedTimestamp"],
1087
        # the long-name for 'region' should be used - as we don't have it, we use the short name
1088
        # which needs to be slightly changed to make snapshot tests work
1089
        "Region": composite_alarm.region.replace("-", " ").capitalize(),
1090
        "AlarmArn": _alarm["AlarmArn"],
1091
        "OKActions": _alarm.get("OKActions", []),
1092
        "AlarmActions": _alarm.get("AlarmActions", []),
1093
        "InsufficientDataActions": _alarm.get("InsufficientDataActions", []),
1094
    }
1095

1096
    triggering_children = [
1✔
1097
        {
1098
            "Arn": triggering_alarm.alarm.get("AlarmArn"),
1099
            "State": {
1100
                "Value": triggering_alarm.alarm["StateValue"],
1101
                "Timestamp": triggering_alarm.alarm["StateUpdatedTimestamp"],
1102
            },
1103
        }
1104
    ]
1105

1106
    response["TriggeringChildren"] = triggering_children
1✔
1107

1108
    return json.dumps(response, cls=JSONEncoder)
1✔
STATUS · Troubleshooting · Open an Issue · Sales · Support · CAREERS · ENTERPRISE · START FREE · SCHEDULE DEMO
ANNOUNCEMENTS · TWITTER · TOS & SLA · Supported CI Services · What's a CI service? · Automated Testing

© 2026 Coveralls, Inc