• Home
  • Features
  • Pricing
  • Docs
  • Announcements
  • Sign In

localstack / localstack / 74eac421-9806-47ba-8f8c-92653ce53828

15 Apr 2025 06:51PM UTC coverage: 86.472% (+0.07%) from 86.407%
74eac421-9806-47ba-8f8c-92653ce53828

push

circleci

web-flow
CloudFormation Engine v2: Base Mappings and Conditions tests for Update Graph and PreProc (#12527)

2 of 2 new or added lines in 1 file covered. (100.0%)

69 existing lines in 6 files now uncovered.

63665 of 73625 relevant lines covered (86.47%)

0.86 hits per line

Source File
Press 'n' to go to next uncovered line, 'b' for previous

92.28
/localstack-core/localstack/services/stepfunctions/provider.py
1
import copy
1✔
2
import datetime
1✔
3
import json
1✔
4
import logging
1✔
5
import re
1✔
6
import time
1✔
7
from typing import Final, Optional
1✔
8

9
from localstack.aws.api import CommonServiceException, RequestContext
1✔
10
from localstack.aws.api.stepfunctions import (
1✔
11
    ActivityDoesNotExist,
12
    AliasDescription,
13
    Arn,
14
    CharacterRestrictedName,
15
    ConflictException,
16
    CreateActivityOutput,
17
    CreateStateMachineAliasOutput,
18
    CreateStateMachineInput,
19
    CreateStateMachineOutput,
20
    Definition,
21
    DeleteActivityOutput,
22
    DeleteStateMachineAliasOutput,
23
    DeleteStateMachineOutput,
24
    DeleteStateMachineVersionOutput,
25
    DescribeActivityOutput,
26
    DescribeExecutionOutput,
27
    DescribeMapRunOutput,
28
    DescribeStateMachineAliasOutput,
29
    DescribeStateMachineForExecutionOutput,
30
    DescribeStateMachineOutput,
31
    EncryptionConfiguration,
32
    ExecutionDoesNotExist,
33
    ExecutionList,
34
    ExecutionRedriveFilter,
35
    ExecutionStatus,
36
    GetActivityTaskOutput,
37
    GetExecutionHistoryOutput,
38
    IncludedData,
39
    IncludeExecutionDataGetExecutionHistory,
40
    InspectionLevel,
41
    InvalidArn,
42
    InvalidDefinition,
43
    InvalidExecutionInput,
44
    InvalidLoggingConfiguration,
45
    InvalidName,
46
    InvalidToken,
47
    ListActivitiesOutput,
48
    ListExecutionsOutput,
49
    ListExecutionsPageToken,
50
    ListMapRunsOutput,
51
    ListStateMachineAliasesOutput,
52
    ListStateMachinesOutput,
53
    ListStateMachineVersionsOutput,
54
    ListTagsForResourceOutput,
55
    LoggingConfiguration,
56
    LogLevel,
57
    LongArn,
58
    MaxConcurrency,
59
    MissingRequiredParameter,
60
    Name,
61
    PageSize,
62
    PageToken,
63
    Publish,
64
    PublishStateMachineVersionOutput,
65
    ResourceNotFound,
66
    RevealSecrets,
67
    ReverseOrder,
68
    RevisionId,
69
    RoutingConfigurationList,
70
    SendTaskFailureOutput,
71
    SendTaskHeartbeatOutput,
72
    SendTaskSuccessOutput,
73
    SensitiveCause,
74
    SensitiveData,
75
    SensitiveError,
76
    StartExecutionOutput,
77
    StartSyncExecutionOutput,
78
    StateMachineAliasList,
79
    StateMachineAlreadyExists,
80
    StateMachineDoesNotExist,
81
    StateMachineList,
82
    StateMachineType,
83
    StateMachineTypeNotSupported,
84
    StepfunctionsApi,
85
    StopExecutionOutput,
86
    TagKeyList,
87
    TagList,
88
    TagResourceOutput,
89
    TaskDoesNotExist,
90
    TaskTimedOut,
91
    TaskToken,
92
    TestStateOutput,
93
    ToleratedFailureCount,
94
    ToleratedFailurePercentage,
95
    TraceHeader,
96
    TracingConfiguration,
97
    UntagResourceOutput,
98
    UpdateMapRunOutput,
99
    UpdateStateMachineAliasOutput,
100
    UpdateStateMachineOutput,
101
    ValidateStateMachineDefinitionDiagnostic,
102
    ValidateStateMachineDefinitionDiagnosticList,
103
    ValidateStateMachineDefinitionInput,
104
    ValidateStateMachineDefinitionOutput,
105
    ValidateStateMachineDefinitionResultCode,
106
    ValidateStateMachineDefinitionSeverity,
107
    ValidationException,
108
    VersionDescription,
109
)
110
from localstack.services.plugins import ServiceLifecycleHook
1✔
111
from localstack.services.stepfunctions.asl.component.state.state_execution.state_map.iteration.itemprocessor.map_run_record import (
1✔
112
    MapRunRecord,
113
)
114
from localstack.services.stepfunctions.asl.eval.callback.callback import (
1✔
115
    ActivityCallbackEndpoint,
116
    CallbackConsumerTimeout,
117
    CallbackNotifyConsumerError,
118
    CallbackOutcomeFailure,
119
    CallbackOutcomeSuccess,
120
)
121
from localstack.services.stepfunctions.asl.eval.event.logging import (
1✔
122
    CloudWatchLoggingConfiguration,
123
    CloudWatchLoggingSession,
124
)
125
from localstack.services.stepfunctions.asl.parse.asl_parser import (
1✔
126
    ASLParserException,
127
)
128
from localstack.services.stepfunctions.asl.static_analyser.express_static_analyser import (
1✔
129
    ExpressStaticAnalyser,
130
)
131
from localstack.services.stepfunctions.asl.static_analyser.static_analyser import (
1✔
132
    StaticAnalyser,
133
)
134
from localstack.services.stepfunctions.asl.static_analyser.test_state.test_state_analyser import (
1✔
135
    TestStateStaticAnalyser,
136
)
137
from localstack.services.stepfunctions.asl.static_analyser.usage_metrics_static_analyser import (
1✔
138
    UsageMetricsStaticAnalyser,
139
)
140
from localstack.services.stepfunctions.backend.activity import Activity, ActivityTask
1✔
141
from localstack.services.stepfunctions.backend.alias import Alias
1✔
142
from localstack.services.stepfunctions.backend.execution import Execution, SyncExecution
1✔
143
from localstack.services.stepfunctions.backend.state_machine import (
1✔
144
    StateMachineInstance,
145
    StateMachineRevision,
146
    StateMachineVersion,
147
    TestStateMachine,
148
)
149
from localstack.services.stepfunctions.backend.store import SFNStore, sfn_stores
1✔
150
from localstack.services.stepfunctions.backend.test_state.execution import (
1✔
151
    TestStateExecution,
152
)
153
from localstack.services.stepfunctions.stepfunctions_utils import (
1✔
154
    assert_pagination_parameters_valid,
155
    get_next_page_token_from_arn,
156
    normalise_max_results,
157
)
158
from localstack.state import StateVisitor
1✔
159
from localstack.utils.aws.arns import (
1✔
160
    ARN_PARTITION_REGEX,
161
    stepfunctions_activity_arn,
162
    stepfunctions_express_execution_arn,
163
    stepfunctions_standard_execution_arn,
164
    stepfunctions_state_machine_arn,
165
)
166
from localstack.utils.collections import PaginatedList
1✔
167
from localstack.utils.strings import long_uid, short_uid
1✔
168

169
LOG = logging.getLogger(__name__)
1✔
170

171

172
class StepFunctionsProvider(StepfunctionsApi, ServiceLifecycleHook):
1✔
173
    _TEST_STATE_MAX_TIMEOUT_SECONDS: Final[int] = 300  # 5 minutes.
1✔
174

175
    @staticmethod
1✔
176
    def get_store(context: RequestContext) -> SFNStore:
1✔
177
        return sfn_stores[context.account_id][context.region]
1✔
178

179
    def accept_state_visitor(self, visitor: StateVisitor):
1✔
180
        visitor.visit(sfn_stores)
×
181

182
    _STATE_MACHINE_ARN_REGEX: Final[re.Pattern] = re.compile(
1✔
183
        rf"{ARN_PARTITION_REGEX}:states:[a-z0-9-]+:[0-9]{{12}}:stateMachine:[a-zA-Z0-9-_.]+(:\d+)?(:[a-zA-Z0-9-_.]+)*$"
184
    )
185

186
    _STATE_MACHINE_EXECUTION_ARN_REGEX: Final[re.Pattern] = re.compile(
1✔
187
        rf"{ARN_PARTITION_REGEX}:states:[a-z0-9-]+:[0-9]{{12}}:(stateMachine|execution|express):[a-zA-Z0-9-_.]+(:\d+)?(:[a-zA-Z0-9-_.]+)*$"
188
    )
189

190
    _ACTIVITY_ARN_REGEX: Final[re.Pattern] = re.compile(
1✔
191
        rf"{ARN_PARTITION_REGEX}:states:[a-z0-9-]+:[0-9]{{12}}:activity:[a-zA-Z0-9-_\.]{{1,80}}$"
192
    )
193

194
    _ALIAS_ARN_REGEX: Final[re.Pattern] = re.compile(
1✔
195
        rf"{ARN_PARTITION_REGEX}:states:[a-z0-9-]+:[0-9]{{12}}:stateMachine:[A-Za-z0-9_.-]+:[A-Za-z_.-]+[A-Za-z0-9_.-]{{0,80}}$"
196
    )
197

198
    _ALIAS_NAME_REGEX: Final[re.Pattern] = re.compile(r"^(?=.*[a-zA-Z_\-\.])[a-zA-Z0-9_\-\.]+$")
1✔
199

200
    @staticmethod
1✔
201
    def _validate_state_machine_arn(state_machine_arn: str) -> None:
1✔
202
        # TODO: InvalidArn exception message do not communicate which part of the ARN is incorrect.
203
        if not StepFunctionsProvider._STATE_MACHINE_ARN_REGEX.match(state_machine_arn):
1✔
204
            raise InvalidArn(f"Invalid arn: '{state_machine_arn}'")
1✔
205

206
    @staticmethod
1✔
207
    def _raise_state_machine_does_not_exist(state_machine_arn: str) -> None:
1✔
208
        raise StateMachineDoesNotExist(f"State Machine Does Not Exist: '{state_machine_arn}'")
1✔
209

210
    @staticmethod
1✔
211
    def _validate_state_machine_execution_arn(execution_arn: str) -> None:
1✔
212
        # TODO: InvalidArn exception message do not communicate which part of the ARN is incorrect.
213
        if not StepFunctionsProvider._STATE_MACHINE_EXECUTION_ARN_REGEX.match(execution_arn):
1✔
214
            raise InvalidArn(f"Invalid arn: '{execution_arn}'")
1✔
215

216
    @staticmethod
1✔
217
    def _validate_activity_arn(activity_arn: str) -> None:
1✔
218
        # TODO: InvalidArn exception message do not communicate which part of the ARN is incorrect.
219
        if not StepFunctionsProvider._ACTIVITY_ARN_REGEX.match(activity_arn):
1✔
220
            raise InvalidArn(f"Invalid arn: '{activity_arn}'")
1✔
221

222
    @staticmethod
1✔
223
    def _validate_state_machine_alias_arn(state_machine_alias_arn: Arn) -> None:
1✔
224
        if not StepFunctionsProvider._ALIAS_ARN_REGEX.match(state_machine_alias_arn):
1✔
225
            raise InvalidArn(f"Invalid arn: '{state_machine_alias_arn}'")
×
226

227
    def _raise_state_machine_type_not_supported(self):
1✔
228
        raise StateMachineTypeNotSupported(
1✔
229
            "This operation is not supported by this type of state machine"
230
        )
231

232
    @staticmethod
1✔
233
    def _raise_resource_type_not_in_context(resource_type: str) -> None:
1✔
234
        lower_resource_type = resource_type.lower()
1✔
235
        raise InvalidArn(
1✔
236
            f"Invalid Arn: 'Resource type not valid in this context: {lower_resource_type}'"
237
        )
238

239
    @staticmethod
1✔
240
    def _validate_activity_name(name: str) -> None:
1✔
241
        # The activity name is validated according to the AWS StepFunctions documentation, the name should not contain:
242
        # - white space
243
        # - brackets < > { } [ ]
244
        # - wildcard characters ? *
245
        # - special characters " # % \ ^ | ~ ` $ & , ; : /
246
        # - control characters (U+0000-001F, U+007F-009F)
247
        # https://docs.aws.amazon.com/step-functions/latest/apireference/API_CreateActivity.html#API_CreateActivity_RequestSyntax
248
        if not (1 <= len(name) <= 80):
1✔
249
            raise InvalidName(f"Invalid Name: '{name}'")
×
250
        invalid_chars = set(' <>{}[]?*"#%\\^|~`$&,;:/')
1✔
251
        control_chars = {chr(i) for i in range(32)} | {chr(i) for i in range(127, 160)}
1✔
252
        invalid_chars |= control_chars
1✔
253
        for char in name:
1✔
254
            if char in invalid_chars:
1✔
255
                raise InvalidName(f"Invalid Name: '{name}'")
1✔
256

257
    @staticmethod
1✔
258
    def _validate_state_machine_alias_name(name: CharacterRestrictedName) -> None:
1✔
259
        len_name = len(name)
1✔
260
        if len_name > 80:
1✔
261
            raise ValidationException(
1✔
262
                f"1 validation error detected: Value '{name}' at 'name' failed to satisfy constraint: "
263
                f"Member must have length less than or equal to 80"
264
            )
265
        if not StepFunctionsProvider._ALIAS_NAME_REGEX.match(name):
1✔
266
            raise ValidationException(
1✔
267
                # TODO: explore more error cases in which more than one validation error may occur which results
268
                #  in the counter below being greater than 1.
269
                f"1 validation error detected: Value '{name}' at 'name' failed to satisfy constraint: "
270
                f"Member must satisfy regular expression pattern: ^(?=.*[a-zA-Z_\\-\\.])[a-zA-Z0-9_\\-\\.]+$"
271
            )
272

273
    def _get_execution(self, context: RequestContext, execution_arn: Arn) -> Execution:
1✔
274
        execution: Optional[Execution] = self.get_store(context).executions.get(execution_arn)
1✔
275
        if not execution:
1✔
276
            raise ExecutionDoesNotExist(f"Execution Does Not Exist: '{execution_arn}'")
1✔
277
        return execution
1✔
278

279
    def _get_executions(
1✔
280
        self,
281
        context: RequestContext,
282
        execution_status: Optional[ExecutionStatus] = None,
283
    ):
284
        store = self.get_store(context)
1✔
285
        execution: list[Execution] = list(store.executions.values())
1✔
286
        if execution_status:
1✔
287
            execution = list(
1✔
288
                filter(
289
                    lambda e: e.exec_status == execution_status,
290
                    store.executions.values(),
291
                )
292
            )
293
        return execution
1✔
294

295
    def _get_activity(self, context: RequestContext, activity_arn: Arn) -> Activity:
1✔
296
        maybe_activity: Optional[Activity] = self.get_store(context).activities.get(
1✔
297
            activity_arn, None
298
        )
299
        if maybe_activity is None:
1✔
300
            raise ActivityDoesNotExist(f"Activity Does Not Exist: '{activity_arn}'")
1✔
301
        return maybe_activity
1✔
302

303
    def _idempotent_revision(
1✔
304
        self,
305
        context: RequestContext,
306
        name: str,
307
        definition: Definition,
308
        state_machine_type: StateMachineType,
309
        logging_configuration: LoggingConfiguration,
310
        tracing_configuration: TracingConfiguration,
311
    ) -> Optional[StateMachineRevision]:
312
        # CreateStateMachine's idempotency check is based on the state machine name, definition, type,
313
        # LoggingConfiguration and TracingConfiguration.
314
        # If a following request has a different roleArn or tags, Step Functions will ignore these differences and
315
        # treat it as an idempotent request of the previous. In this case, roleArn and tags will not be updated, even
316
        # if they are different.
317
        state_machines: list[StateMachineInstance] = list(
1✔
318
            self.get_store(context).state_machines.values()
319
        )
320
        revisions = filter(lambda sm: isinstance(sm, StateMachineRevision), state_machines)
1✔
321
        for state_machine in revisions:
1✔
322
            check = all(
1✔
323
                [
324
                    state_machine.name == name,
325
                    state_machine.definition == definition,
326
                    state_machine.sm_type == state_machine_type,
327
                    state_machine.logging_config == logging_configuration,
328
                    state_machine.tracing_config == tracing_configuration,
329
                ]
330
            )
331
            if check:
1✔
332
                return state_machine
1✔
333
        return None
1✔
334

335
    def _idempotent_start_execution(
1✔
336
        self,
337
        execution: Optional[Execution],
338
        state_machine: StateMachineInstance,
339
        name: Name,
340
        input_data: SensitiveData,
341
    ) -> Optional[Execution]:
342
        # StartExecution is idempotent for STANDARD workflows. For a STANDARD workflow,
343
        # if you call StartExecution with the same name and input as a running execution,
344
        # the call succeeds and return the same response as the original request.
345
        # If the execution is closed or if the input is different,
346
        # it returns a 400 ExecutionAlreadyExists error. You can reuse names after 90 days.
347

348
        if not execution:
1✔
349
            return None
×
350

351
        match (name, input_data, execution.exec_status, state_machine.sm_type):
1✔
352
            case (
1✔
353
                execution.name,
354
                execution.input_data,
355
                ExecutionStatus.RUNNING,
356
                StateMachineType.STANDARD,
357
            ):
358
                return execution
1✔
359

360
        raise CommonServiceException(
1✔
361
            code="ExecutionAlreadyExists",
362
            message=f"Execution Already Exists: '{execution.exec_arn}'",
363
        )
364

365
    def _revision_by_name(
1✔
366
        self, context: RequestContext, name: str
367
    ) -> Optional[StateMachineInstance]:
368
        state_machines: list[StateMachineInstance] = list(
1✔
369
            self.get_store(context).state_machines.values()
370
        )
371
        for state_machine in state_machines:
1✔
372
            if isinstance(state_machine, StateMachineRevision) and state_machine.name == name:
1✔
373
                return state_machine
1✔
374
        return None
1✔
375

376
    @staticmethod
1✔
377
    def _validate_definition(definition: str, static_analysers: list[StaticAnalyser]) -> None:
1✔
378
        try:
1✔
379
            for static_analyser in static_analysers:
1✔
380
                static_analyser.analyse(definition)
1✔
381
        except ASLParserException as asl_parser_exception:
1✔
382
            invalid_definition = InvalidDefinition()
1✔
383
            invalid_definition.message = repr(asl_parser_exception)
1✔
384
            raise invalid_definition
1✔
385
        except Exception as exception:
1✔
386
            exception_name = exception.__class__.__name__
1✔
387
            exception_args = list(exception.args)
1✔
388
            invalid_definition = InvalidDefinition()
1✔
389
            invalid_definition.message = (
1✔
390
                f"Error={exception_name} Args={exception_args} in definition '{definition}'."
391
            )
392
            raise invalid_definition
1✔
393

394
    @staticmethod
1✔
395
    def _sanitise_logging_configuration(
1✔
396
        logging_configuration: LoggingConfiguration,
397
    ) -> None:
398
        level = logging_configuration.get("level")
1✔
399
        destinations = logging_configuration.get("destinations")
1✔
400

401
        if destinations is not None and len(destinations) > 1:
1✔
402
            raise InvalidLoggingConfiguration(
1✔
403
                "Invalid Logging Configuration: Must specify exactly one Log Destination."
404
            )
405

406
        # A LogLevel that is not OFF, should have a destination.
407
        if level is not None and level != LogLevel.OFF and not destinations:
1✔
408
            raise InvalidLoggingConfiguration(
1✔
409
                "Invalid Logging Configuration: Must specify exactly one Log Destination."
410
            )
411

412
        # Default for level is OFF.
413
        level = level or LogLevel.OFF
1✔
414

415
        # Default for includeExecutionData is False.
416
        include_flag = logging_configuration.get("includeExecutionData", False)
1✔
417

418
        # Update configuration object.
419
        logging_configuration["level"] = level
1✔
420
        logging_configuration["includeExecutionData"] = include_flag
1✔
421

422
    def create_state_machine(
1✔
423
        self, context: RequestContext, request: CreateStateMachineInput, **kwargs
424
    ) -> CreateStateMachineOutput:
425
        if not request.get("publish", False) and request.get("versionDescription"):
1✔
426
            raise ValidationException("Version description can only be set when publish is true")
1✔
427

428
        # Extract parameters and set defaults.
429
        state_machine_name = request["name"]
1✔
430
        state_machine_role_arn = request["roleArn"]
1✔
431
        state_machine_definition = request["definition"]
1✔
432
        state_machine_type = request.get("type") or StateMachineType.STANDARD
1✔
433
        state_machine_tracing_configuration = request.get("tracingConfiguration")
1✔
434
        state_machine_tags = request.get("tags")
1✔
435
        state_machine_logging_configuration = request.get(
1✔
436
            "loggingConfiguration", LoggingConfiguration()
437
        )
438
        self._sanitise_logging_configuration(
1✔
439
            logging_configuration=state_machine_logging_configuration
440
        )
441

442
        # CreateStateMachine is an idempotent API. Subsequent requests won’t create a duplicate resource if it was
443
        # already created.
444
        idem_state_machine: Optional[StateMachineRevision] = self._idempotent_revision(
1✔
445
            context=context,
446
            name=state_machine_name,
447
            definition=state_machine_definition,
448
            state_machine_type=state_machine_type,
449
            logging_configuration=state_machine_logging_configuration,
450
            tracing_configuration=state_machine_tracing_configuration,
451
        )
452
        if idem_state_machine is not None:
1✔
453
            return CreateStateMachineOutput(
1✔
454
                stateMachineArn=idem_state_machine.arn,
455
                creationDate=idem_state_machine.create_date,
456
            )
457

458
        # Assert this state machine name is unique.
459
        state_machine_with_name: Optional[StateMachineRevision] = self._revision_by_name(
1✔
460
            context=context, name=state_machine_name
461
        )
462
        if state_machine_with_name is not None:
1✔
463
            raise StateMachineAlreadyExists(
1✔
464
                f"State Machine Already Exists: '{state_machine_with_name.arn}'"
465
            )
466

467
        # Compute the state machine's Arn.
468
        state_machine_arn = stepfunctions_state_machine_arn(
1✔
469
            name=state_machine_name,
470
            account_id=context.account_id,
471
            region_name=context.region,
472
        )
473
        state_machines = self.get_store(context).state_machines
1✔
474

475
        # Reduce the logging configuration to a usable cloud watch representation, and validate the destinations
476
        # if any were given.
477
        cloud_watch_logging_configuration = (
1✔
478
            CloudWatchLoggingConfiguration.from_logging_configuration(
479
                state_machine_arn=state_machine_arn,
480
                logging_configuration=state_machine_logging_configuration,
481
            )
482
        )
483
        if cloud_watch_logging_configuration is not None:
1✔
484
            cloud_watch_logging_configuration.validate()
1✔
485

486
        # Run static analysers on the definition given.
487
        if state_machine_type == StateMachineType.EXPRESS:
1✔
488
            StepFunctionsProvider._validate_definition(
1✔
489
                definition=state_machine_definition,
490
                static_analysers=[ExpressStaticAnalyser()],
491
            )
492
        else:
493
            StepFunctionsProvider._validate_definition(
1✔
494
                definition=state_machine_definition, static_analysers=[StaticAnalyser()]
495
            )
496

497
        # Create the state machine and add it to the store.
498
        state_machine = StateMachineRevision(
1✔
499
            name=state_machine_name,
500
            arn=state_machine_arn,
501
            role_arn=state_machine_role_arn,
502
            definition=state_machine_definition,
503
            sm_type=state_machine_type,
504
            logging_config=state_machine_logging_configuration,
505
            cloud_watch_logging_configuration=cloud_watch_logging_configuration,
506
            tracing_config=state_machine_tracing_configuration,
507
            tags=state_machine_tags,
508
        )
509
        state_machines[state_machine_arn] = state_machine
1✔
510

511
        create_output = CreateStateMachineOutput(
1✔
512
            stateMachineArn=state_machine.arn, creationDate=state_machine.create_date
513
        )
514

515
        # Create the first version if the 'publish' flag is used.
516
        if request.get("publish", False):
1✔
517
            version_description = request.get("versionDescription")
1✔
518
            state_machine_version = state_machine.create_version(description=version_description)
1✔
519
            if state_machine_version is not None:
1✔
520
                state_machine_version_arn = state_machine_version.arn
1✔
521
                state_machines[state_machine_version_arn] = state_machine_version
1✔
522
                create_output["stateMachineVersionArn"] = state_machine_version_arn
1✔
523

524
        # Run static analyser on definition and collect usage metrics
525
        UsageMetricsStaticAnalyser.process(state_machine_definition)
1✔
526

527
        return create_output
1✔
528

529
    def _validate_state_machine_alias_routing_configuration(
1✔
530
        self, context: RequestContext, routing_configuration_list: RoutingConfigurationList
531
    ) -> None:
532
        # TODO: to match AWS's approach best validation exceptions could be
533
        #  built in a process decoupled from the provider.
534

535
        routing_configuration_list_len = len(routing_configuration_list)
1✔
536
        if not (1 <= routing_configuration_list_len <= 2):
1✔
537
            # Replicate the object string dump format:
538
            # [RoutingConfigurationListItem(stateMachineVersionArn=arn_no_quotes, weight=int), ...]
539
            routing_configuration_serialization_parts = []
1✔
540
            for routing_configuration in routing_configuration_list:
1✔
541
                routing_configuration_serialization_parts.append(
1✔
542
                    "".join(
543
                        [
544
                            "RoutingConfigurationListItem(stateMachineVersionArn=",
545
                            routing_configuration["stateMachineVersionArn"],
546
                            ", weight=",
547
                            str(routing_configuration["weight"]),
548
                            ")",
549
                        ]
550
                    )
551
                )
552
            routing_configuration_serialization_list = (
1✔
553
                f"[{', '.join(routing_configuration_serialization_parts)}]"
554
            )
555
            raise ValidationException(
1✔
556
                f"1 validation error detected: Value '{routing_configuration_serialization_list}' "
557
                "at 'routingConfiguration' failed to "
558
                "satisfy constraint: Member must have length less than or equal to 2"
559
            )
560

561
        routing_configuration_arn_list = [
1✔
562
            routing_configuration["stateMachineVersionArn"]
563
            for routing_configuration in routing_configuration_list
564
        ]
565
        if len(set(routing_configuration_arn_list)) < routing_configuration_list_len:
1✔
566
            arn_list_string = f"[{', '.join(routing_configuration_arn_list)}]"
1✔
567
            raise ValidationException(
1✔
568
                "Routing configuration must contain distinct state machine version ARNs. "
569
                f"Received: {arn_list_string}"
570
            )
571

572
        routing_weights = [
1✔
573
            routing_configuration["weight"] for routing_configuration in routing_configuration_list
574
        ]
575
        for i, weight in enumerate(routing_weights):
1✔
576
            # TODO: check for weight type.
577
            if weight < 0:
1✔
578
                raise ValidationException(
×
579
                    f"Invalid value for parameter routingConfiguration[{i + 1}].weight, value: {weight}, valid min value: 0"
580
                )
581
            if weight > 100:
1✔
582
                raise ValidationException(
1✔
583
                    f"1 validation error detected: Value '{weight}' at 'routingConfiguration.{i + 1}.member.weight' "
584
                    "failed to satisfy constraint: Member must have value less than or equal to 100"
585
                )
586
        routing_weights_sum = sum(routing_weights)
1✔
587
        if not routing_weights_sum == 100:
1✔
588
            raise ValidationException(
1✔
589
                f"Sum of routing configuration weights must equal 100. Received: {json.dumps(routing_weights)}"
590
            )
591

592
        store = self.get_store(context=context)
1✔
593
        state_machines = store.state_machines
1✔
594

595
        first_routing_qualified_arn = routing_configuration_arn_list[0]
1✔
596
        shared_state_machine_revision_arn = self._get_state_machine_arn_from_qualified_arn(
1✔
597
            qualified_arn=first_routing_qualified_arn
598
        )
599
        for routing_configuration_arn in routing_configuration_arn_list:
1✔
600
            maybe_state_machine_version = state_machines.get(routing_configuration_arn)
1✔
601
            if not isinstance(maybe_state_machine_version, StateMachineVersion):
1✔
602
                arn_list_string = f"[{', '.join(routing_configuration_arn_list)}]"
1✔
603
                raise ValidationException(
1✔
604
                    f"Routing configuration must contain state machine version ARNs. Received: {arn_list_string}"
605
                )
606
            state_machine_revision_arn = self._get_state_machine_arn_from_qualified_arn(
1✔
607
                qualified_arn=routing_configuration_arn
608
            )
609
            if state_machine_revision_arn != shared_state_machine_revision_arn:
1✔
610
                raise ValidationException("TODO")
×
611

612
    @staticmethod
1✔
613
    def _get_state_machine_arn_from_qualified_arn(qualified_arn: Arn) -> Arn:
1✔
614
        last_colon_index = qualified_arn.rfind(":")
1✔
615
        base_arn = qualified_arn[:last_colon_index]
1✔
616
        return base_arn
1✔
617

618
    def create_state_machine_alias(
1✔
619
        self,
620
        context: RequestContext,
621
        name: CharacterRestrictedName,
622
        routing_configuration: RoutingConfigurationList,
623
        description: AliasDescription = None,
624
        **kwargs,
625
    ) -> CreateStateMachineAliasOutput:
626
        # Validate the inputs.
627
        self._validate_state_machine_alias_name(name=name)
1✔
628
        self._validate_state_machine_alias_routing_configuration(
1✔
629
            context=context, routing_configuration_list=routing_configuration
630
        )
631

632
        # Determine the state machine arn this alias maps to,
633
        # do so unsafely as validation already took place before initialisation.
634
        first_routing_qualified_arn = routing_configuration[0]["stateMachineVersionArn"]
1✔
635
        state_machine_revision_arn = self._get_state_machine_arn_from_qualified_arn(
1✔
636
            qualified_arn=first_routing_qualified_arn
637
        )
638
        alias = Alias(
1✔
639
            state_machine_arn=state_machine_revision_arn,
640
            name=name,
641
            description=description,
642
            routing_configuration_list=routing_configuration,
643
        )
644
        state_machine_alias_arn = alias.state_machine_alias_arn
1✔
645

646
        store = self.get_store(context=context)
1✔
647

648
        aliases = store.aliases
1✔
649
        if maybe_idempotent_alias := aliases.get(state_machine_alias_arn):
1✔
650
            if alias.is_idempotent(maybe_idempotent_alias):
1✔
651
                return CreateStateMachineAliasOutput(
1✔
652
                    stateMachineAliasArn=state_machine_alias_arn, creationDate=alias.create_date
653
                )
654
            else:
655
                # CreateStateMachineAlias is an idempotent API. Idempotent requests won’t create duplicate resources.
656
                raise ConflictException(
1✔
657
                    "Failed to create alias because an alias with the same name and a "
658
                    "different routing configuration already exists."
659
                )
660
        aliases[state_machine_alias_arn] = alias
1✔
661

662
        state_machine_revision = store.state_machines.get(state_machine_revision_arn)
1✔
663
        if not isinstance(state_machine_revision, StateMachineRevision):
1✔
664
            # The state machine was deleted but not the version referenced in this context.
665
            raise RuntimeError(f"No state machine revision for arn '{state_machine_revision_arn}'")
×
666
        state_machine_revision.aliases.add(alias)
1✔
667

668
        return CreateStateMachineAliasOutput(
1✔
669
            stateMachineAliasArn=state_machine_alias_arn, creationDate=alias.create_date
670
        )
671

672
    def describe_state_machine(
1✔
673
        self,
674
        context: RequestContext,
675
        state_machine_arn: Arn,
676
        included_data: IncludedData = None,
677
        **kwargs,
678
    ) -> DescribeStateMachineOutput:
679
        self._validate_state_machine_arn(state_machine_arn)
1✔
680
        state_machine = self.get_store(context).state_machines.get(state_machine_arn)
1✔
681
        if state_machine is None:
1✔
682
            self._raise_state_machine_does_not_exist(state_machine_arn)
1✔
683
        return state_machine.describe()
1✔
684

685
    def describe_state_machine_alias(
1✔
686
        self, context: RequestContext, state_machine_alias_arn: Arn, **kwargs
687
    ) -> DescribeStateMachineAliasOutput:
688
        self._validate_state_machine_alias_arn(state_machine_alias_arn=state_machine_alias_arn)
1✔
689
        alias: Optional[Alias] = self.get_store(context=context).aliases.get(
1✔
690
            state_machine_alias_arn
691
        )
692
        if alias is None:
1✔
693
            # TODO: assemble the correct exception
694
            raise ValidationException()
×
695
        description = alias.to_description()
1✔
696
        return description
1✔
697

698
    def describe_state_machine_for_execution(
1✔
699
        self,
700
        context: RequestContext,
701
        execution_arn: Arn,
702
        included_data: IncludedData = None,
703
        **kwargs,
704
    ) -> DescribeStateMachineForExecutionOutput:
705
        self._validate_state_machine_execution_arn(execution_arn)
1✔
706
        execution: Execution = self._get_execution(context=context, execution_arn=execution_arn)
1✔
707
        return execution.to_describe_state_machine_for_execution_output()
1✔
708

709
    def send_task_heartbeat(
1✔
710
        self, context: RequestContext, task_token: TaskToken, **kwargs
711
    ) -> SendTaskHeartbeatOutput:
712
        running_executions: list[Execution] = self._get_executions(context, ExecutionStatus.RUNNING)
1✔
713
        for execution in running_executions:
1✔
714
            try:
1✔
715
                if execution.exec_worker.env.callback_pool_manager.heartbeat(
1✔
716
                    callback_id=task_token
717
                ):
718
                    return SendTaskHeartbeatOutput()
1✔
719
            except CallbackNotifyConsumerError as consumer_error:
×
720
                if isinstance(consumer_error, CallbackConsumerTimeout):
×
721
                    raise TaskTimedOut()
×
722
                else:
723
                    raise TaskDoesNotExist()
×
724
        raise InvalidToken()
×
725

726
    def send_task_success(
1✔
727
        self,
728
        context: RequestContext,
729
        task_token: TaskToken,
730
        output: SensitiveData,
731
        **kwargs,
732
    ) -> SendTaskSuccessOutput:
733
        outcome = CallbackOutcomeSuccess(callback_id=task_token, output=output)
1✔
734
        running_executions: list[Execution] = self._get_executions(context, ExecutionStatus.RUNNING)
1✔
735
        for execution in running_executions:
1✔
736
            try:
1✔
737
                if execution.exec_worker.env.callback_pool_manager.notify(
1✔
738
                    callback_id=task_token, outcome=outcome
739
                ):
740
                    return SendTaskSuccessOutput()
1✔
741
            except CallbackNotifyConsumerError as consumer_error:
×
742
                if isinstance(consumer_error, CallbackConsumerTimeout):
×
743
                    raise TaskTimedOut()
×
744
                else:
745
                    raise TaskDoesNotExist()
×
746
        raise InvalidToken("Invalid token")
1✔
747

748
    def send_task_failure(
1✔
749
        self,
750
        context: RequestContext,
751
        task_token: TaskToken,
752
        error: SensitiveError = None,
753
        cause: SensitiveCause = None,
754
        **kwargs,
755
    ) -> SendTaskFailureOutput:
756
        outcome = CallbackOutcomeFailure(callback_id=task_token, error=error, cause=cause)
1✔
757
        store = self.get_store(context)
1✔
758
        for execution in store.executions.values():
1✔
759
            try:
1✔
760
                if execution.exec_worker.env.callback_pool_manager.notify(
1✔
761
                    callback_id=task_token, outcome=outcome
762
                ):
763
                    return SendTaskFailureOutput()
1✔
764
            except CallbackNotifyConsumerError as consumer_error:
×
765
                if isinstance(consumer_error, CallbackConsumerTimeout):
×
766
                    raise TaskTimedOut()
×
767
                else:
768
                    raise TaskDoesNotExist()
×
769
        raise InvalidToken("Invalid token")
1✔
770

771
    def start_execution(
1✔
772
        self,
773
        context: RequestContext,
774
        state_machine_arn: Arn,
775
        name: Name = None,
776
        input: SensitiveData = None,
777
        trace_header: TraceHeader = None,
778
        **kwargs,
779
    ) -> StartExecutionOutput:
780
        self._validate_state_machine_arn(state_machine_arn)
1✔
781

782
        store = self.get_store(context=context)
1✔
783

784
        alias: Optional[Alias] = store.aliases.get(state_machine_arn)
1✔
785
        alias_sample_state_machine_version_arn = alias.sample() if alias is not None else None
1✔
786
        unsafe_state_machine: Optional[StateMachineInstance] = store.state_machines.get(
1✔
787
            alias_sample_state_machine_version_arn or state_machine_arn
788
        )
789
        if not unsafe_state_machine:
1✔
790
            self._raise_state_machine_does_not_exist(state_machine_arn)
1✔
791

792
        # Update event change parameters about the state machine and should not affect those about this execution.
793
        state_machine_clone = copy.deepcopy(unsafe_state_machine)
1✔
794

795
        if input is None:
1✔
796
            input_data = dict()
1✔
797
        else:
798
            try:
1✔
799
                input_data = json.loads(input)
1✔
800
            except Exception as ex:
1✔
801
                raise InvalidExecutionInput(str(ex))  # TODO: report parsing error like AWS.
1✔
802

803
        normalised_state_machine_arn = (
1✔
804
            state_machine_clone.source_arn
805
            if isinstance(state_machine_clone, StateMachineVersion)
806
            else state_machine_clone.arn
807
        )
808
        exec_name = name or long_uid()  # TODO: validate name format
1✔
809
        if state_machine_clone.sm_type == StateMachineType.STANDARD:
1✔
810
            exec_arn = stepfunctions_standard_execution_arn(normalised_state_machine_arn, exec_name)
1✔
811
        else:
812
            # Exhaustive check on STANDARD and EXPRESS type, validated on creation.
813
            exec_arn = stepfunctions_express_execution_arn(normalised_state_machine_arn, exec_name)
1✔
814

815
        if execution := store.executions.get(exec_arn):
1✔
816
            # Return already running execution if name and input match
817
            existing_execution = self._idempotent_start_execution(
1✔
818
                execution=execution,
819
                state_machine=state_machine_clone,
820
                name=name,
821
                input_data=input_data,
822
            )
823

824
            if existing_execution:
1✔
825
                return existing_execution.to_start_output()
1✔
826

827
        # Create the execution logging session, if logging is configured.
828
        cloud_watch_logging_session = None
1✔
829
        if state_machine_clone.cloud_watch_logging_configuration is not None:
1✔
830
            cloud_watch_logging_session = CloudWatchLoggingSession(
1✔
831
                execution_arn=exec_arn,
832
                configuration=state_machine_clone.cloud_watch_logging_configuration,
833
            )
834

835
        execution = Execution(
1✔
836
            name=exec_name,
837
            sm_type=state_machine_clone.sm_type,
838
            role_arn=state_machine_clone.role_arn,
839
            exec_arn=exec_arn,
840
            account_id=context.account_id,
841
            region_name=context.region,
842
            state_machine=state_machine_clone,
843
            state_machine_alias_arn=alias.state_machine_alias_arn if alias is not None else None,
844
            start_date=datetime.datetime.now(tz=datetime.timezone.utc),
845
            cloud_watch_logging_session=cloud_watch_logging_session,
846
            input_data=input_data,
847
            trace_header=trace_header,
848
            activity_store=self.get_store(context).activities,
849
        )
850

851
        store.executions[exec_arn] = execution
1✔
852

853
        execution.start()
1✔
854
        return execution.to_start_output()
1✔
855

856
    def start_sync_execution(
1✔
857
        self,
858
        context: RequestContext,
859
        state_machine_arn: Arn,
860
        name: Name = None,
861
        input: SensitiveData = None,
862
        trace_header: TraceHeader = None,
863
        included_data: IncludedData = None,
864
        **kwargs,
865
    ) -> StartSyncExecutionOutput:
866
        self._validate_state_machine_arn(state_machine_arn)
1✔
867
        unsafe_state_machine: Optional[StateMachineInstance] = self.get_store(
1✔
868
            context
869
        ).state_machines.get(state_machine_arn)
870
        if not unsafe_state_machine:
1✔
871
            self._raise_state_machine_does_not_exist(state_machine_arn)
×
872

873
        if unsafe_state_machine.sm_type == StateMachineType.STANDARD:
1✔
874
            self._raise_state_machine_type_not_supported()
1✔
875

876
        # Update event change parameters about the state machine and should not affect those about this execution.
877
        state_machine_clone = copy.deepcopy(unsafe_state_machine)
1✔
878

879
        if input is None:
1✔
880
            input_data = dict()
×
881
        else:
882
            try:
1✔
883
                input_data = json.loads(input)
1✔
884
            except Exception as ex:
×
885
                raise InvalidExecutionInput(str(ex))  # TODO: report parsing error like AWS.
×
886

887
        normalised_state_machine_arn = (
1✔
888
            state_machine_clone.source_arn
889
            if isinstance(state_machine_clone, StateMachineVersion)
890
            else state_machine_clone.arn
891
        )
892
        exec_name = name or long_uid()  # TODO: validate name format
1✔
893
        exec_arn = stepfunctions_express_execution_arn(normalised_state_machine_arn, exec_name)
1✔
894

895
        if exec_arn in self.get_store(context).executions:
1✔
896
            raise InvalidName()  # TODO
×
897

898
        # Create the execution logging session, if logging is configured.
899
        cloud_watch_logging_session = None
1✔
900
        if state_machine_clone.cloud_watch_logging_configuration is not None:
1✔
901
            cloud_watch_logging_session = CloudWatchLoggingSession(
×
902
                execution_arn=exec_arn,
903
                configuration=state_machine_clone.cloud_watch_logging_configuration,
904
            )
905

906
        execution = SyncExecution(
1✔
907
            name=exec_name,
908
            sm_type=state_machine_clone.sm_type,
909
            role_arn=state_machine_clone.role_arn,
910
            exec_arn=exec_arn,
911
            account_id=context.account_id,
912
            region_name=context.region,
913
            state_machine=state_machine_clone,
914
            start_date=datetime.datetime.now(tz=datetime.timezone.utc),
915
            cloud_watch_logging_session=cloud_watch_logging_session,
916
            input_data=input_data,
917
            trace_header=trace_header,
918
            activity_store=self.get_store(context).activities,
919
        )
920
        self.get_store(context).executions[exec_arn] = execution
1✔
921

922
        execution.start()
1✔
923
        return execution.to_start_sync_execution_output()
1✔
924

925
    def describe_execution(
1✔
926
        self,
927
        context: RequestContext,
928
        execution_arn: Arn,
929
        included_data: IncludedData = None,
930
        **kwargs,
931
    ) -> DescribeExecutionOutput:
932
        self._validate_state_machine_execution_arn(execution_arn)
1✔
933
        execution: Execution = self._get_execution(context=context, execution_arn=execution_arn)
1✔
934

935
        # Action only compatible with STANDARD workflows.
936
        if execution.sm_type != StateMachineType.STANDARD:
1✔
937
            self._raise_resource_type_not_in_context(resource_type=execution.sm_type)
1✔
938

939
        return execution.to_describe_output()
1✔
940

941
    @staticmethod
1✔
942
    def _list_execution_filter(
1✔
943
        ex: Execution, state_machine_arn: str, status_filter: Optional[str]
944
    ) -> bool:
945
        state_machine_reference_arn_set = {ex.state_machine_arn, ex.state_machine_version_arn}
1✔
946
        if state_machine_arn not in state_machine_reference_arn_set:
1✔
947
            return False
1✔
948

949
        if not status_filter:
1✔
950
            return True
1✔
951
        return ex.exec_status == status_filter
1✔
952

953
    def list_executions(
1✔
954
        self,
955
        context: RequestContext,
956
        state_machine_arn: Arn = None,
957
        status_filter: ExecutionStatus = None,
958
        max_results: PageSize = None,
959
        next_token: ListExecutionsPageToken = None,
960
        map_run_arn: LongArn = None,
961
        redrive_filter: ExecutionRedriveFilter = None,
962
        **kwargs,
963
    ) -> ListExecutionsOutput:
964
        self._validate_state_machine_arn(state_machine_arn)
1✔
965
        assert_pagination_parameters_valid(
1✔
966
            max_results=max_results,
967
            next_token=next_token,
968
            next_token_length_limit=3096,
969
        )
970
        max_results = normalise_max_results(max_results)
1✔
971

972
        state_machine = self.get_store(context).state_machines.get(state_machine_arn)
1✔
973
        if state_machine is None:
1✔
974
            self._raise_state_machine_does_not_exist(state_machine_arn)
1✔
975

976
        if state_machine.sm_type != StateMachineType.STANDARD:
1✔
977
            self._raise_state_machine_type_not_supported()
1✔
978

979
        # TODO: add support for paging
980

981
        allowed_execution_status = [
1✔
982
            ExecutionStatus.SUCCEEDED,
983
            ExecutionStatus.TIMED_OUT,
984
            ExecutionStatus.PENDING_REDRIVE,
985
            ExecutionStatus.ABORTED,
986
            ExecutionStatus.FAILED,
987
            ExecutionStatus.RUNNING,
988
        ]
989

990
        validation_errors = []
1✔
991

992
        if status_filter and status_filter not in allowed_execution_status:
1✔
993
            validation_errors.append(
1✔
994
                f"Value '{status_filter}' at 'statusFilter' failed to satisfy constraint: Member must satisfy enum value set: [{', '.join(allowed_execution_status)}]"
995
            )
996

997
        if not state_machine_arn and not map_run_arn:
1✔
998
            validation_errors.append("Must provide a StateMachine ARN or MapRun ARN")
×
999

1000
        if validation_errors:
1✔
1001
            errors_message = "; ".join(validation_errors)
1✔
1002
            message = f"{len(validation_errors)} validation {'errors' if len(validation_errors) > 1 else 'error'} detected: {errors_message}"
1✔
1003
            raise CommonServiceException(message=message, code="ValidationException")
1✔
1004

1005
        executions: ExecutionList = [
1✔
1006
            execution.to_execution_list_item()
1007
            for execution in self.get_store(context).executions.values()
1008
            if self._list_execution_filter(
1009
                execution,
1010
                state_machine_arn=state_machine_arn,
1011
                status_filter=status_filter,
1012
            )
1013
        ]
1014

1015
        executions.sort(key=lambda item: item["startDate"], reverse=True)
1✔
1016

1017
        paginated_executions = PaginatedList(executions)
1✔
1018
        page, token_for_next_page = paginated_executions.get_page(
1✔
1019
            token_generator=lambda item: get_next_page_token_from_arn(item.get("executionArn")),
1020
            page_size=max_results,
1021
            next_token=next_token,
1022
        )
1023

1024
        return ListExecutionsOutput(executions=page, nextToken=token_for_next_page)
1✔
1025

1026
    def list_state_machines(
1✔
1027
        self,
1028
        context: RequestContext,
1029
        max_results: PageSize = None,
1030
        next_token: PageToken = None,
1031
        **kwargs,
1032
    ) -> ListStateMachinesOutput:
1033
        assert_pagination_parameters_valid(max_results, next_token)
1✔
1034
        max_results = normalise_max_results(max_results)
1✔
1035

1036
        state_machines: StateMachineList = [
1✔
1037
            sm.itemise()
1038
            for sm in self.get_store(context).state_machines.values()
1039
            if isinstance(sm, StateMachineRevision)
1040
        ]
1041
        state_machines.sort(key=lambda item: item["name"])
1✔
1042

1043
        paginated_state_machines = PaginatedList(state_machines)
1✔
1044
        page, token_for_next_page = paginated_state_machines.get_page(
1✔
1045
            token_generator=lambda item: get_next_page_token_from_arn(item.get("stateMachineArn")),
1046
            page_size=max_results,
1047
            next_token=next_token,
1048
        )
1049

1050
        return ListStateMachinesOutput(stateMachines=page, nextToken=token_for_next_page)
1✔
1051

1052
    def list_state_machine_aliases(
1✔
1053
        self,
1054
        context: RequestContext,
1055
        state_machine_arn: Arn,
1056
        next_token: PageToken = None,
1057
        max_results: PageSize = None,
1058
        **kwargs,
1059
    ) -> ListStateMachineAliasesOutput:
1060
        assert_pagination_parameters_valid(max_results, next_token)
1✔
1061

1062
        self._validate_state_machine_arn(state_machine_arn)
1✔
1063
        state_machines = self.get_store(context).state_machines
1✔
1064
        state_machine_revision = state_machines.get(state_machine_arn)
1✔
1065
        if not isinstance(state_machine_revision, StateMachineRevision):
1✔
UNCOV
1066
            raise InvalidArn(f"Invalid arn: {state_machine_arn}")
×
1067

1068
        state_machine_aliases: StateMachineAliasList = list()
1✔
1069
        valid_token_found = next_token is None
1✔
1070

1071
        for alias in state_machine_revision.aliases:
1✔
1072
            state_machine_aliases.append(alias.to_item())
1✔
1073
            if alias.tokenized_state_machine_alias_arn == next_token:
1✔
1074
                valid_token_found = True
1✔
1075

1076
        if not valid_token_found:
1✔
1077
            raise InvalidToken("Invalid Token: 'Invalid token'")
1✔
1078

1079
        state_machine_aliases.sort(key=lambda item: item["creationDate"])
1✔
1080

1081
        paginated_list = PaginatedList(state_machine_aliases)
1✔
1082

1083
        paginated_aliases, next_token = paginated_list.get_page(
1✔
1084
            token_generator=lambda item: get_next_page_token_from_arn(
1085
                item.get("stateMachineAliasArn")
1086
            ),
1087
            next_token=next_token,
1088
            page_size=100 if max_results == 0 or max_results is None else max_results,
1089
        )
1090

1091
        return ListStateMachineAliasesOutput(
1✔
1092
            stateMachineAliases=paginated_aliases, nextToken=next_token
1093
        )
1094

1095
    def list_state_machine_versions(
1✔
1096
        self,
1097
        context: RequestContext,
1098
        state_machine_arn: Arn,
1099
        next_token: PageToken = None,
1100
        max_results: PageSize = None,
1101
        **kwargs,
1102
    ) -> ListStateMachineVersionsOutput:
1103
        self._validate_state_machine_arn(state_machine_arn)
1✔
1104
        assert_pagination_parameters_valid(max_results, next_token)
1✔
1105
        max_results = normalise_max_results(max_results)
1✔
1106

1107
        state_machines = self.get_store(context).state_machines
1✔
1108
        state_machine_revision = state_machines.get(state_machine_arn)
1✔
1109
        if not isinstance(state_machine_revision, StateMachineRevision):
1✔
UNCOV
1110
            raise InvalidArn(f"Invalid arn: {state_machine_arn}")
×
1111

1112
        state_machine_version_items = list()
1✔
1113
        for version_arn in state_machine_revision.versions.values():
1✔
1114
            state_machine_version = state_machines[version_arn]
1✔
1115
            if isinstance(state_machine_version, StateMachineVersion):
1✔
1116
                state_machine_version_items.append(state_machine_version.itemise())
1✔
1117
            else:
UNCOV
1118
                raise RuntimeError(
×
1119
                    f"Expected {version_arn} to be a StateMachine Version, but got '{type(state_machine_version)}'."
1120
                )
1121

1122
        state_machine_version_items.sort(key=lambda item: item["creationDate"], reverse=True)
1✔
1123

1124
        paginated_state_machine_versions = PaginatedList(state_machine_version_items)
1✔
1125
        page, token_for_next_page = paginated_state_machine_versions.get_page(
1✔
1126
            token_generator=lambda item: get_next_page_token_from_arn(
1127
                item.get("stateMachineVersionArn")
1128
            ),
1129
            page_size=max_results,
1130
            next_token=next_token,
1131
        )
1132

1133
        return ListStateMachineVersionsOutput(
1✔
1134
            stateMachineVersions=page, nextToken=token_for_next_page
1135
        )
1136

1137
    def get_execution_history(
1✔
1138
        self,
1139
        context: RequestContext,
1140
        execution_arn: Arn,
1141
        max_results: PageSize = None,
1142
        reverse_order: ReverseOrder = None,
1143
        next_token: PageToken = None,
1144
        include_execution_data: IncludeExecutionDataGetExecutionHistory = None,
1145
        **kwargs,
1146
    ) -> GetExecutionHistoryOutput:
1147
        # TODO: add support for paging, ordering, and other manipulations.
1148
        self._validate_state_machine_execution_arn(execution_arn)
1✔
1149
        execution: Execution = self._get_execution(context=context, execution_arn=execution_arn)
1✔
1150

1151
        # Action only compatible with STANDARD workflows.
1152
        if execution.sm_type != StateMachineType.STANDARD:
1✔
1153
            self._raise_resource_type_not_in_context(resource_type=execution.sm_type)
1✔
1154

1155
        history: GetExecutionHistoryOutput = execution.to_history_output()
1✔
1156
        if reverse_order:
1✔
1157
            history["events"].reverse()
1✔
1158
        return history
1✔
1159

1160
    def delete_state_machine(
1✔
1161
        self, context: RequestContext, state_machine_arn: Arn, **kwargs
1162
    ) -> DeleteStateMachineOutput:
1163
        # TODO: halt executions?
1164
        self._validate_state_machine_arn(state_machine_arn)
1✔
1165
        state_machines = self.get_store(context).state_machines
1✔
1166
        state_machine = state_machines.get(state_machine_arn)
1✔
1167
        if isinstance(state_machine, StateMachineRevision):
1✔
1168
            state_machines.pop(state_machine_arn)
1✔
1169
            for version_arn in state_machine.versions.values():
1✔
1170
                state_machines.pop(version_arn, None)
1✔
1171
        return DeleteStateMachineOutput()
1✔
1172

1173
    def delete_state_machine_alias(
1✔
1174
        self, context: RequestContext, state_machine_alias_arn: Arn, **kwargs
1175
    ) -> DeleteStateMachineAliasOutput:
1176
        self._validate_state_machine_alias_arn(state_machine_alias_arn=state_machine_alias_arn)
1✔
1177
        store = self.get_store(context=context)
1✔
1178
        aliases = store.aliases
1✔
1179
        if (alias := aliases.pop(state_machine_alias_arn, None)) is not None:
1✔
1180
            state_machines = store.state_machines
1✔
1181
            for routing_configuration in alias.get_routing_configuration_list():
1✔
1182
                state_machine_version_arn = routing_configuration["stateMachineVersionArn"]
1✔
1183
                if (
1✔
1184
                    state_machine_version := state_machines.get(state_machine_version_arn)
1185
                ) is None or not isinstance(state_machine_version, StateMachineVersion):
1186
                    continue
1✔
1187
                if (
1✔
1188
                    state_machine_revision := state_machines.get(state_machine_version.source_arn)
1189
                ) is None or not isinstance(state_machine_revision, StateMachineRevision):
UNCOV
1190
                    continue
×
1191
                state_machine_revision.aliases.discard(alias)
1✔
1192
        return DeleteStateMachineOutput()
1✔
1193

1194
    def delete_state_machine_version(
1✔
1195
        self, context: RequestContext, state_machine_version_arn: LongArn, **kwargs
1196
    ) -> DeleteStateMachineVersionOutput:
1197
        self._validate_state_machine_arn(state_machine_version_arn)
1✔
1198
        state_machines = self.get_store(context).state_machines
1✔
1199

1200
        if not (
1✔
1201
            state_machine_version := state_machines.get(state_machine_version_arn)
1202
        ) or not isinstance(state_machine_version, StateMachineVersion):
1203
            return DeleteStateMachineVersionOutput()
1✔
1204

1205
        if (
1✔
1206
            state_machine_revision := state_machines.get(state_machine_version.source_arn)
1207
        ) and isinstance(state_machine_revision, StateMachineRevision):
1208
            referencing_alias_names: list[str] = list()
1✔
1209
            for alias in state_machine_revision.aliases:
1✔
1210
                if alias.is_router_for(state_machine_version_arn=state_machine_version_arn):
1✔
1211
                    referencing_alias_names.append(alias.name)
1✔
1212
            if referencing_alias_names:
1✔
1213
                referencing_alias_names_list_body = ", ".join(referencing_alias_names)
1✔
1214
                raise ConflictException(
1✔
1215
                    "Version to be deleted must not be referenced by an alias. "
1216
                    f"Current list of aliases referencing this version: [{referencing_alias_names_list_body}]"
1217
                )
1218
            state_machine_revision.delete_version(state_machine_version_arn)
1✔
1219

1220
        state_machines.pop(state_machine_version.arn, None)
1✔
1221
        return DeleteStateMachineVersionOutput()
1✔
1222

1223
    def stop_execution(
1✔
1224
        self,
1225
        context: RequestContext,
1226
        execution_arn: Arn,
1227
        error: SensitiveError = None,
1228
        cause: SensitiveCause = None,
1229
        **kwargs,
1230
    ) -> StopExecutionOutput:
1231
        self._validate_state_machine_execution_arn(execution_arn)
1✔
1232
        execution: Execution = self._get_execution(context=context, execution_arn=execution_arn)
1✔
1233

1234
        # Action only compatible with STANDARD workflows.
1235
        if execution.sm_type != StateMachineType.STANDARD:
1✔
1236
            self._raise_resource_type_not_in_context(resource_type=execution.sm_type)
1✔
1237

1238
        stop_date = datetime.datetime.now(tz=datetime.timezone.utc)
1✔
1239
        execution.stop(stop_date=stop_date, cause=cause, error=error)
1✔
1240
        return StopExecutionOutput(stopDate=stop_date)
1✔
1241

1242
    def update_state_machine(
1✔
1243
        self,
1244
        context: RequestContext,
1245
        state_machine_arn: Arn,
1246
        definition: Definition = None,
1247
        role_arn: Arn = None,
1248
        logging_configuration: LoggingConfiguration = None,
1249
        tracing_configuration: TracingConfiguration = None,
1250
        publish: Publish = None,
1251
        version_description: VersionDescription = None,
1252
        encryption_configuration: EncryptionConfiguration = None,
1253
        **kwargs,
1254
    ) -> UpdateStateMachineOutput:
1255
        self._validate_state_machine_arn(state_machine_arn)
1✔
1256
        state_machines = self.get_store(context).state_machines
1✔
1257

1258
        state_machine = state_machines.get(state_machine_arn)
1✔
1259
        if not isinstance(state_machine, StateMachineRevision):
1✔
1260
            self._raise_state_machine_does_not_exist(state_machine_arn)
1✔
1261

1262
        # TODO: Add logic to handle metrics for when SFN definitions update
1263
        if not any([definition, role_arn, logging_configuration]):
1✔
1264
            raise MissingRequiredParameter(
1✔
1265
                "Either the definition, the role ARN, the LoggingConfiguration, "
1266
                "or the TracingConfiguration must be specified"
1267
            )
1268

1269
        if definition is not None:
1✔
1270
            self._validate_definition(definition=definition, static_analysers=[StaticAnalyser()])
1✔
1271

1272
        if logging_configuration is not None:
1✔
1273
            self._sanitise_logging_configuration(logging_configuration=logging_configuration)
1✔
1274

1275
        revision_id = state_machine.create_revision(
1✔
1276
            definition=definition,
1277
            role_arn=role_arn,
1278
            logging_configuration=logging_configuration,
1279
        )
1280

1281
        version_arn = None
1✔
1282
        if publish:
1✔
1283
            version = state_machine.create_version(description=version_description)
1✔
1284
            if version is not None:
1✔
1285
                version_arn = version.arn
1✔
1286
                state_machines[version_arn] = version
1✔
1287
            else:
1288
                target_revision_id = revision_id or state_machine.revision_id
1✔
1289
                version_arn = state_machine.versions[target_revision_id]
1✔
1290

1291
        update_output = UpdateStateMachineOutput(
1✔
1292
            updateDate=datetime.datetime.now(tz=datetime.timezone.utc)
1293
        )
1294
        if revision_id is not None:
1✔
1295
            update_output["revisionId"] = revision_id
1✔
1296
        if version_arn is not None:
1✔
1297
            update_output["stateMachineVersionArn"] = version_arn
1✔
1298
        return update_output
1✔
1299

1300
    def update_state_machine_alias(
1✔
1301
        self,
1302
        context: RequestContext,
1303
        state_machine_alias_arn: Arn,
1304
        description: AliasDescription = None,
1305
        routing_configuration: RoutingConfigurationList = None,
1306
        **kwargs,
1307
    ) -> UpdateStateMachineAliasOutput:
1308
        self._validate_state_machine_alias_arn(state_machine_alias_arn=state_machine_alias_arn)
1✔
1309
        if not any([description, routing_configuration]):
1✔
UNCOV
1310
            raise MissingRequiredParameter(
×
1311
                "Either the description or the RoutingConfiguration must be specified"
1312
            )
1313
        if routing_configuration is not None:
1✔
1314
            self._validate_state_machine_alias_routing_configuration(
1✔
1315
                context=context, routing_configuration_list=routing_configuration
1316
            )
1317
        store = self.get_store(context=context)
1✔
1318
        alias = store.aliases.get(state_machine_alias_arn)
1✔
1319
        if alias is None:
1✔
1320
            raise ResourceNotFound("Request references a resource that does not exist.")
1✔
1321

1322
        alias.update(description=description, routing_configuration_list=routing_configuration)
1✔
1323
        return UpdateStateMachineAliasOutput(updateDate=alias.update_date)
1✔
1324

1325
    def publish_state_machine_version(
1✔
1326
        self,
1327
        context: RequestContext,
1328
        state_machine_arn: Arn,
1329
        revision_id: RevisionId = None,
1330
        description: VersionDescription = None,
1331
        **kwargs,
1332
    ) -> PublishStateMachineVersionOutput:
1333
        self._validate_state_machine_arn(state_machine_arn)
1✔
1334
        state_machines = self.get_store(context).state_machines
1✔
1335

1336
        state_machine_revision = state_machines.get(state_machine_arn)
1✔
1337
        if not isinstance(state_machine_revision, StateMachineRevision):
1✔
1338
            self._raise_state_machine_does_not_exist(state_machine_arn)
1✔
1339

1340
        if revision_id is not None and state_machine_revision.revision_id != revision_id:
1✔
1341
            raise ConflictException(
1✔
1342
                f"Failed to publish the State Machine version for revision {revision_id}. "
1343
                f"The current State Machine revision is {state_machine_revision.revision_id}."
1344
            )
1345

1346
        state_machine_version = state_machine_revision.create_version(description=description)
1✔
1347
        if state_machine_version is not None:
1✔
1348
            state_machines[state_machine_version.arn] = state_machine_version
1✔
1349
        else:
1350
            target_revision_id = revision_id or state_machine_revision.revision_id
1✔
1351
            state_machine_version_arn = state_machine_revision.versions.get(target_revision_id)
1✔
1352
            state_machine_version = state_machines[state_machine_version_arn]
1✔
1353

1354
        return PublishStateMachineVersionOutput(
1✔
1355
            creationDate=state_machine_version.create_date,
1356
            stateMachineVersionArn=state_machine_version.arn,
1357
        )
1358

1359
    def tag_resource(
1✔
1360
        self, context: RequestContext, resource_arn: Arn, tags: TagList, **kwargs
1361
    ) -> TagResourceOutput:
1362
        # TODO: add tagging for activities.
1363
        state_machines = self.get_store(context).state_machines
1✔
1364
        state_machine = state_machines.get(resource_arn)
1✔
1365
        if not isinstance(state_machine, StateMachineRevision):
1✔
1366
            raise ResourceNotFound(f"Resource not found: '{resource_arn}'")
1✔
1367

1368
        state_machine.tag_manager.add_all(tags)
1✔
1369
        return TagResourceOutput()
1✔
1370

1371
    def untag_resource(
1✔
1372
        self, context: RequestContext, resource_arn: Arn, tag_keys: TagKeyList, **kwargs
1373
    ) -> UntagResourceOutput:
1374
        # TODO: add untagging for activities.
1375
        state_machines = self.get_store(context).state_machines
1✔
1376
        state_machine = state_machines.get(resource_arn)
1✔
1377
        if not isinstance(state_machine, StateMachineRevision):
1✔
UNCOV
1378
            raise ResourceNotFound(f"Resource not found: '{resource_arn}'")
×
1379

1380
        state_machine.tag_manager.remove_all(tag_keys)
1✔
1381
        return UntagResourceOutput()
1✔
1382

1383
    def list_tags_for_resource(
1✔
1384
        self, context: RequestContext, resource_arn: Arn, **kwargs
1385
    ) -> ListTagsForResourceOutput:
1386
        # TODO: add untagging for activities.
1387
        state_machines = self.get_store(context).state_machines
1✔
1388
        state_machine = state_machines.get(resource_arn)
1✔
1389
        if not isinstance(state_machine, StateMachineRevision):
1✔
UNCOV
1390
            raise ResourceNotFound(f"Resource not found: '{resource_arn}'")
×
1391

1392
        tags: TagList = state_machine.tag_manager.to_tag_list()
1✔
1393
        return ListTagsForResourceOutput(tags=tags)
1✔
1394

1395
    def describe_map_run(
1✔
1396
        self, context: RequestContext, map_run_arn: LongArn, **kwargs
1397
    ) -> DescribeMapRunOutput:
1398
        store = self.get_store(context)
1✔
1399
        for execution in store.executions.values():
1✔
1400
            map_run_record: Optional[MapRunRecord] = (
1✔
1401
                execution.exec_worker.env.map_run_record_pool_manager.get(map_run_arn)
1402
            )
1403
            if map_run_record is not None:
1✔
1404
                return map_run_record.describe()
1✔
UNCOV
1405
        raise ResourceNotFound()
×
1406

1407
    def list_map_runs(
1✔
1408
        self,
1409
        context: RequestContext,
1410
        execution_arn: Arn,
1411
        max_results: PageSize = None,
1412
        next_token: PageToken = None,
1413
        **kwargs,
1414
    ) -> ListMapRunsOutput:
1415
        # TODO: add support for paging.
1416
        execution = self._get_execution(context=context, execution_arn=execution_arn)
1✔
1417
        map_run_records: list[MapRunRecord] = (
1✔
1418
            execution.exec_worker.env.map_run_record_pool_manager.get_all()
1419
        )
1420
        return ListMapRunsOutput(
1✔
1421
            mapRuns=[map_run_record.list_item() for map_run_record in map_run_records]
1422
        )
1423

1424
    def update_map_run(
1✔
1425
        self,
1426
        context: RequestContext,
1427
        map_run_arn: LongArn,
1428
        max_concurrency: MaxConcurrency = None,
1429
        tolerated_failure_percentage: ToleratedFailurePercentage = None,
1430
        tolerated_failure_count: ToleratedFailureCount = None,
1431
        **kwargs,
1432
    ) -> UpdateMapRunOutput:
UNCOV
1433
        if tolerated_failure_percentage is not None or tolerated_failure_count is not None:
×
1434
            raise NotImplementedError(
1435
                "Updating of ToleratedFailureCount and ToleratedFailurePercentage is currently unsupported."
1436
            )
1437
        # TODO: investigate behaviour of empty requests.
UNCOV
1438
        store = self.get_store(context)
×
UNCOV
1439
        for execution in store.executions.values():
×
UNCOV
1440
            map_run_record: Optional[MapRunRecord] = (
×
1441
                execution.exec_worker.env.map_run_record_pool_manager.get(map_run_arn)
1442
            )
UNCOV
1443
            if map_run_record is not None:
×
UNCOV
1444
                map_run_record.update(
×
1445
                    max_concurrency=max_concurrency,
1446
                    tolerated_failure_count=tolerated_failure_count,
1447
                    tolerated_failure_percentage=tolerated_failure_percentage,
1448
                )
UNCOV
1449
                LOG.warning(
×
1450
                    "StepFunctions UpdateMapRun changes are currently not being reflected in the MapRun instances."
1451
                )
UNCOV
1452
                return UpdateMapRunOutput()
×
UNCOV
1453
        raise ResourceNotFound()
×
1454

1455
    def test_state(
1✔
1456
        self,
1457
        context: RequestContext,
1458
        definition: Definition,
1459
        role_arn: Arn = None,
1460
        input: SensitiveData = None,
1461
        inspection_level: InspectionLevel = None,
1462
        reveal_secrets: RevealSecrets = None,
1463
        variables: SensitiveData = None,
1464
        **kwargs,
1465
    ) -> TestStateOutput:
1466
        StepFunctionsProvider._validate_definition(
1✔
1467
            definition=definition, static_analysers=[TestStateStaticAnalyser()]
1468
        )
1469

1470
        name: Optional[Name] = f"TestState-{short_uid()}"
1✔
1471
        arn = stepfunctions_state_machine_arn(
1✔
1472
            name=name, account_id=context.account_id, region_name=context.region
1473
        )
1474
        state_machine = TestStateMachine(
1✔
1475
            name=name,
1476
            arn=arn,
1477
            role_arn=role_arn,
1478
            definition=definition,
1479
        )
1480
        exec_arn = stepfunctions_standard_execution_arn(state_machine.arn, name)
1✔
1481

1482
        input_json = json.loads(input)
1✔
1483
        execution = TestStateExecution(
1✔
1484
            name=name,
1485
            role_arn=role_arn,
1486
            exec_arn=exec_arn,
1487
            account_id=context.account_id,
1488
            region_name=context.region,
1489
            state_machine=state_machine,
1490
            start_date=datetime.datetime.now(tz=datetime.timezone.utc),
1491
            input_data=input_json,
1492
            activity_store=self.get_store(context).activities,
1493
        )
1494
        execution.start()
1✔
1495

1496
        test_state_output = execution.to_test_state_output(
1✔
1497
            inspection_level=inspection_level or InspectionLevel.INFO
1498
        )
1499

1500
        return test_state_output
1✔
1501

1502
    def create_activity(
1✔
1503
        self,
1504
        context: RequestContext,
1505
        name: Name,
1506
        tags: TagList = None,
1507
        encryption_configuration: EncryptionConfiguration = None,
1508
        **kwargs,
1509
    ) -> CreateActivityOutput:
1510
        self._validate_activity_name(name=name)
1✔
1511

1512
        activity_arn = stepfunctions_activity_arn(
1✔
1513
            name=name, account_id=context.account_id, region_name=context.region
1514
        )
1515
        activities = self.get_store(context).activities
1✔
1516
        if activity_arn not in activities:
1✔
1517
            activity = Activity(arn=activity_arn, name=name)
1✔
1518
            activities[activity_arn] = activity
1✔
1519
        else:
1520
            activity = activities[activity_arn]
1✔
1521

1522
        return CreateActivityOutput(activityArn=activity.arn, creationDate=activity.creation_date)
1✔
1523

1524
    def delete_activity(
1✔
1525
        self, context: RequestContext, activity_arn: Arn, **kwargs
1526
    ) -> DeleteActivityOutput:
1527
        self._validate_activity_arn(activity_arn)
1✔
1528
        self.get_store(context).activities.pop(activity_arn, None)
1✔
1529
        return DeleteActivityOutput()
1✔
1530

1531
    def describe_activity(
1✔
1532
        self, context: RequestContext, activity_arn: Arn, **kwargs
1533
    ) -> DescribeActivityOutput:
1534
        self._validate_activity_arn(activity_arn)
1✔
1535
        activity = self._get_activity(context=context, activity_arn=activity_arn)
1✔
1536
        return activity.to_describe_activity_output()
1✔
1537

1538
    def list_activities(
1✔
1539
        self,
1540
        context: RequestContext,
1541
        max_results: PageSize = None,
1542
        next_token: PageToken = None,
1543
        **kwargs,
1544
    ) -> ListActivitiesOutput:
1545
        activities: list[Activity] = list(self.get_store(context).activities.values())
1✔
1546
        return ListActivitiesOutput(
1✔
1547
            activities=[activity.to_activity_list_item() for activity in activities]
1548
        )
1549

1550
    def _send_activity_task_started(
1✔
1551
        self,
1552
        context: RequestContext,
1553
        task_token: TaskToken,
1554
        worker_name: Optional[Name],
1555
    ) -> None:
1556
        executions: list[Execution] = self._get_executions(context)
1✔
1557
        for execution in executions:
1✔
1558
            callback_endpoint = execution.exec_worker.env.callback_pool_manager.get(
1✔
1559
                callback_id=task_token
1560
            )
1561
            if isinstance(callback_endpoint, ActivityCallbackEndpoint):
1✔
1562
                callback_endpoint.notify_activity_task_start(worker_name=worker_name)
1✔
1563
                return
1✔
UNCOV
1564
        raise InvalidToken()
×
1565

1566
    @staticmethod
1✔
1567
    def _pull_activity_task(activity: Activity) -> Optional[ActivityTask]:
1✔
1568
        seconds_left = 60
1✔
1569
        while seconds_left > 0:
1✔
1570
            try:
1✔
1571
                return activity.get_task()
1✔
1572
            except IndexError:
1✔
1573
                time.sleep(1)
1✔
1574
                seconds_left -= 1
1✔
1575
        return None
×
1576

1577
    def get_activity_task(
1✔
1578
        self,
1579
        context: RequestContext,
1580
        activity_arn: Arn,
1581
        worker_name: Name = None,
1582
        **kwargs,
1583
    ) -> GetActivityTaskOutput:
1584
        self._validate_activity_arn(activity_arn)
1✔
1585

1586
        activity = self._get_activity(context=context, activity_arn=activity_arn)
1✔
1587
        maybe_task: Optional[ActivityTask] = self._pull_activity_task(activity=activity)
1✔
1588
        if maybe_task is not None:
1✔
1589
            self._send_activity_task_started(
1✔
1590
                context, maybe_task.task_token, worker_name=worker_name
1591
            )
1592
            return GetActivityTaskOutput(
1✔
1593
                taskToken=maybe_task.task_token, input=maybe_task.task_input
1594
            )
1595

UNCOV
1596
        return GetActivityTaskOutput(taskToken=None, input=None)
×
1597

1598
    def validate_state_machine_definition(
1✔
1599
        self, context: RequestContext, request: ValidateStateMachineDefinitionInput, **kwargs
1600
    ) -> ValidateStateMachineDefinitionOutput:
1601
        # TODO: increase parity of static analysers, current implementation is an unblocker for this API action.
1602
        # TODO: add support for ValidateStateMachineDefinitionSeverity
1603
        # TODO: add support for ValidateStateMachineDefinitionMaxResult
1604

1605
        state_machine_type: StateMachineType = request.get("type", StateMachineType.STANDARD)
1✔
1606
        definition: str = request["definition"]
1✔
1607

1608
        static_analysers = list()
1✔
1609
        if state_machine_type == StateMachineType.STANDARD:
1✔
1610
            static_analysers.append(StaticAnalyser())
1✔
1611
        else:
1612
            static_analysers.append(ExpressStaticAnalyser())
1✔
1613

1614
        diagnostics: ValidateStateMachineDefinitionDiagnosticList = list()
1✔
1615
        try:
1✔
1616
            StepFunctionsProvider._validate_definition(
1✔
1617
                definition=definition, static_analysers=static_analysers
1618
            )
1619
            validation_result = ValidateStateMachineDefinitionResultCode.OK
1✔
1620
        except InvalidDefinition as invalid_definition:
1✔
1621
            validation_result = ValidateStateMachineDefinitionResultCode.FAIL
1✔
1622
            diagnostics.append(
1✔
1623
                ValidateStateMachineDefinitionDiagnostic(
1624
                    severity=ValidateStateMachineDefinitionSeverity.ERROR,
1625
                    code="SCHEMA_VALIDATION_FAILED",
1626
                    message=invalid_definition.message,
1627
                )
1628
            )
UNCOV
1629
        except Exception as ex:
×
UNCOV
1630
            validation_result = ValidateStateMachineDefinitionResultCode.FAIL
×
UNCOV
1631
            LOG.error("Unknown error during validation %s", ex)
×
1632

1633
        return ValidateStateMachineDefinitionOutput(
1✔
1634
            result=validation_result, diagnostics=diagnostics, truncated=False
1635
        )
STATUS · Troubleshooting · Open an Issue · Sales · Support · CAREERS · ENTERPRISE · START FREE · SCHEDULE DEMO
ANNOUNCEMENTS · TWITTER · TOS & SLA · Supported CI Services · What's a CI service? · Automated Testing

© 2026 Coveralls, Inc