• Home
  • Features
  • Pricing
  • Docs
  • Announcements
  • Sign In

localstack / localstack / 4e27dc30-df7d-47cf-9ddb-0b539d612501

17 Apr 2025 08:11PM UTC coverage: 86.279% (-0.02%) from 86.294%
4e27dc30-df7d-47cf-9ddb-0b539d612501

push

circleci

web-flow
Step Functions: Surface Support for Mocked Responses (#12525)

200 of 245 new or added lines in 9 files covered. (81.63%)

201 existing lines in 15 files now uncovered.

63889 of 74049 relevant lines covered (86.28%)

0.86 hits per line

Source File
Press 'n' to go to next uncovered line, 'b' for previous

92.25
/localstack-core/localstack/services/stepfunctions/provider.py
1
import copy
1✔
2
import datetime
1✔
3
import json
1✔
4
import logging
1✔
5
import re
1✔
6
import time
1✔
7
from typing import Final, Optional
1✔
8

9
from localstack.aws.api import CommonServiceException, RequestContext
1✔
10
from localstack.aws.api.stepfunctions import (
1✔
11
    ActivityDoesNotExist,
12
    AliasDescription,
13
    Arn,
14
    CharacterRestrictedName,
15
    ConflictException,
16
    CreateActivityOutput,
17
    CreateStateMachineAliasOutput,
18
    CreateStateMachineInput,
19
    CreateStateMachineOutput,
20
    Definition,
21
    DeleteActivityOutput,
22
    DeleteStateMachineAliasOutput,
23
    DeleteStateMachineOutput,
24
    DeleteStateMachineVersionOutput,
25
    DescribeActivityOutput,
26
    DescribeExecutionOutput,
27
    DescribeMapRunOutput,
28
    DescribeStateMachineAliasOutput,
29
    DescribeStateMachineForExecutionOutput,
30
    DescribeStateMachineOutput,
31
    EncryptionConfiguration,
32
    ExecutionDoesNotExist,
33
    ExecutionList,
34
    ExecutionRedriveFilter,
35
    ExecutionStatus,
36
    GetActivityTaskOutput,
37
    GetExecutionHistoryOutput,
38
    IncludedData,
39
    IncludeExecutionDataGetExecutionHistory,
40
    InspectionLevel,
41
    InvalidArn,
42
    InvalidDefinition,
43
    InvalidExecutionInput,
44
    InvalidLoggingConfiguration,
45
    InvalidName,
46
    InvalidToken,
47
    ListActivitiesOutput,
48
    ListExecutionsOutput,
49
    ListExecutionsPageToken,
50
    ListMapRunsOutput,
51
    ListStateMachineAliasesOutput,
52
    ListStateMachinesOutput,
53
    ListStateMachineVersionsOutput,
54
    ListTagsForResourceOutput,
55
    LoggingConfiguration,
56
    LogLevel,
57
    LongArn,
58
    MaxConcurrency,
59
    MissingRequiredParameter,
60
    Name,
61
    PageSize,
62
    PageToken,
63
    Publish,
64
    PublishStateMachineVersionOutput,
65
    ResourceNotFound,
66
    RevealSecrets,
67
    ReverseOrder,
68
    RevisionId,
69
    RoutingConfigurationList,
70
    SendTaskFailureOutput,
71
    SendTaskHeartbeatOutput,
72
    SendTaskSuccessOutput,
73
    SensitiveCause,
74
    SensitiveData,
75
    SensitiveError,
76
    StartExecutionOutput,
77
    StartSyncExecutionOutput,
78
    StateMachineAliasList,
79
    StateMachineAlreadyExists,
80
    StateMachineDoesNotExist,
81
    StateMachineList,
82
    StateMachineType,
83
    StateMachineTypeNotSupported,
84
    StepfunctionsApi,
85
    StopExecutionOutput,
86
    TagKeyList,
87
    TagList,
88
    TagResourceOutput,
89
    TaskDoesNotExist,
90
    TaskTimedOut,
91
    TaskToken,
92
    TestStateOutput,
93
    ToleratedFailureCount,
94
    ToleratedFailurePercentage,
95
    TraceHeader,
96
    TracingConfiguration,
97
    UntagResourceOutput,
98
    UpdateMapRunOutput,
99
    UpdateStateMachineAliasOutput,
100
    UpdateStateMachineOutput,
101
    ValidateStateMachineDefinitionDiagnostic,
102
    ValidateStateMachineDefinitionDiagnosticList,
103
    ValidateStateMachineDefinitionInput,
104
    ValidateStateMachineDefinitionOutput,
105
    ValidateStateMachineDefinitionResultCode,
106
    ValidateStateMachineDefinitionSeverity,
107
    ValidationException,
108
    VersionDescription,
109
)
110
from localstack.services.plugins import ServiceLifecycleHook
1✔
111
from localstack.services.stepfunctions.asl.component.state.state_execution.state_map.iteration.itemprocessor.map_run_record import (
1✔
112
    MapRunRecord,
113
)
114
from localstack.services.stepfunctions.asl.eval.callback.callback import (
1✔
115
    ActivityCallbackEndpoint,
116
    CallbackConsumerTimeout,
117
    CallbackNotifyConsumerError,
118
    CallbackOutcomeFailure,
119
    CallbackOutcomeSuccess,
120
)
121
from localstack.services.stepfunctions.asl.eval.event.logging import (
1✔
122
    CloudWatchLoggingConfiguration,
123
    CloudWatchLoggingSession,
124
)
125
from localstack.services.stepfunctions.asl.parse.asl_parser import (
1✔
126
    ASLParserException,
127
)
128
from localstack.services.stepfunctions.asl.static_analyser.express_static_analyser import (
1✔
129
    ExpressStaticAnalyser,
130
)
131
from localstack.services.stepfunctions.asl.static_analyser.static_analyser import (
1✔
132
    StaticAnalyser,
133
)
134
from localstack.services.stepfunctions.asl.static_analyser.test_state.test_state_analyser import (
1✔
135
    TestStateStaticAnalyser,
136
)
137
from localstack.services.stepfunctions.asl.static_analyser.usage_metrics_static_analyser import (
1✔
138
    UsageMetricsStaticAnalyser,
139
)
140
from localstack.services.stepfunctions.backend.activity import Activity, ActivityTask
1✔
141
from localstack.services.stepfunctions.backend.alias import Alias
1✔
142
from localstack.services.stepfunctions.backend.execution import Execution, SyncExecution
1✔
143
from localstack.services.stepfunctions.backend.state_machine import (
1✔
144
    StateMachineInstance,
145
    StateMachineRevision,
146
    StateMachineVersion,
147
    TestStateMachine,
148
)
149
from localstack.services.stepfunctions.backend.store import SFNStore, sfn_stores
1✔
150
from localstack.services.stepfunctions.backend.test_state.execution import (
1✔
151
    TestStateExecution,
152
)
153
from localstack.services.stepfunctions.mocking.mock_config import (
1✔
154
    MockTestCase,
155
    load_mock_test_case_for,
156
)
157
from localstack.services.stepfunctions.stepfunctions_utils import (
1✔
158
    assert_pagination_parameters_valid,
159
    get_next_page_token_from_arn,
160
    normalise_max_results,
161
)
162
from localstack.state import StateVisitor
1✔
163
from localstack.utils.aws.arns import (
1✔
164
    ARN_PARTITION_REGEX,
165
    stepfunctions_activity_arn,
166
    stepfunctions_express_execution_arn,
167
    stepfunctions_standard_execution_arn,
168
    stepfunctions_state_machine_arn,
169
)
170
from localstack.utils.collections import PaginatedList
1✔
171
from localstack.utils.strings import long_uid, short_uid
1✔
172

173
LOG = logging.getLogger(__name__)
1✔
174

175

176
class StepFunctionsProvider(StepfunctionsApi, ServiceLifecycleHook):
1✔
177
    _TEST_STATE_MAX_TIMEOUT_SECONDS: Final[int] = 300  # 5 minutes.
1✔
178

179
    @staticmethod
1✔
180
    def get_store(context: RequestContext) -> SFNStore:
1✔
181
        return sfn_stores[context.account_id][context.region]
1✔
182

183
    def accept_state_visitor(self, visitor: StateVisitor):
1✔
184
        visitor.visit(sfn_stores)
×
185

186
    _STATE_MACHINE_ARN_REGEX: Final[re.Pattern] = re.compile(
1✔
187
        rf"{ARN_PARTITION_REGEX}:states:[a-z0-9-]+:[0-9]{{12}}:stateMachine:[a-zA-Z0-9-_.]+(:\d+)?(:[a-zA-Z0-9-_.]+)*(?:#[a-zA-Z0-9-_]+)?$"
188
    )
189

190
    _STATE_MACHINE_EXECUTION_ARN_REGEX: Final[re.Pattern] = re.compile(
1✔
191
        rf"{ARN_PARTITION_REGEX}:states:[a-z0-9-]+:[0-9]{{12}}:(stateMachine|execution|express):[a-zA-Z0-9-_.]+(:\d+)?(:[a-zA-Z0-9-_.]+)*$"
192
    )
193

194
    _ACTIVITY_ARN_REGEX: Final[re.Pattern] = re.compile(
1✔
195
        rf"{ARN_PARTITION_REGEX}:states:[a-z0-9-]+:[0-9]{{12}}:activity:[a-zA-Z0-9-_\.]{{1,80}}$"
196
    )
197

198
    _ALIAS_ARN_REGEX: Final[re.Pattern] = re.compile(
1✔
199
        rf"{ARN_PARTITION_REGEX}:states:[a-z0-9-]+:[0-9]{{12}}:stateMachine:[A-Za-z0-9_.-]+:[A-Za-z_.-]+[A-Za-z0-9_.-]{{0,80}}$"
200
    )
201

202
    _ALIAS_NAME_REGEX: Final[re.Pattern] = re.compile(r"^(?=.*[a-zA-Z_\-\.])[a-zA-Z0-9_\-\.]+$")
1✔
203

204
    @staticmethod
1✔
205
    def _validate_state_machine_arn(state_machine_arn: str) -> None:
1✔
206
        # TODO: InvalidArn exception message do not communicate which part of the ARN is incorrect.
207
        if not StepFunctionsProvider._STATE_MACHINE_ARN_REGEX.match(state_machine_arn):
1✔
208
            raise InvalidArn(f"Invalid arn: '{state_machine_arn}'")
1✔
209

210
    @staticmethod
1✔
211
    def _raise_state_machine_does_not_exist(state_machine_arn: str) -> None:
1✔
212
        raise StateMachineDoesNotExist(f"State Machine Does Not Exist: '{state_machine_arn}'")
1✔
213

214
    @staticmethod
1✔
215
    def _validate_state_machine_execution_arn(execution_arn: str) -> None:
1✔
216
        # TODO: InvalidArn exception message do not communicate which part of the ARN is incorrect.
217
        if not StepFunctionsProvider._STATE_MACHINE_EXECUTION_ARN_REGEX.match(execution_arn):
1✔
218
            raise InvalidArn(f"Invalid arn: '{execution_arn}'")
1✔
219

220
    @staticmethod
1✔
221
    def _validate_activity_arn(activity_arn: str) -> None:
1✔
222
        # TODO: InvalidArn exception message do not communicate which part of the ARN is incorrect.
223
        if not StepFunctionsProvider._ACTIVITY_ARN_REGEX.match(activity_arn):
1✔
224
            raise InvalidArn(f"Invalid arn: '{activity_arn}'")
1✔
225

226
    @staticmethod
1✔
227
    def _validate_state_machine_alias_arn(state_machine_alias_arn: Arn) -> None:
1✔
228
        if not StepFunctionsProvider._ALIAS_ARN_REGEX.match(state_machine_alias_arn):
1✔
229
            raise InvalidArn(f"Invalid arn: '{state_machine_alias_arn}'")
×
230

231
    def _raise_state_machine_type_not_supported(self):
1✔
232
        raise StateMachineTypeNotSupported(
1✔
233
            "This operation is not supported by this type of state machine"
234
        )
235

236
    @staticmethod
1✔
237
    def _raise_resource_type_not_in_context(resource_type: str) -> None:
1✔
238
        lower_resource_type = resource_type.lower()
1✔
239
        raise InvalidArn(
1✔
240
            f"Invalid Arn: 'Resource type not valid in this context: {lower_resource_type}'"
241
        )
242

243
    @staticmethod
1✔
244
    def _validate_activity_name(name: str) -> None:
1✔
245
        # The activity name is validated according to the AWS StepFunctions documentation, the name should not contain:
246
        # - white space
247
        # - brackets < > { } [ ]
248
        # - wildcard characters ? *
249
        # - special characters " # % \ ^ | ~ ` $ & , ; : /
250
        # - control characters (U+0000-001F, U+007F-009F)
251
        # https://docs.aws.amazon.com/step-functions/latest/apireference/API_CreateActivity.html#API_CreateActivity_RequestSyntax
252
        if not (1 <= len(name) <= 80):
1✔
253
            raise InvalidName(f"Invalid Name: '{name}'")
×
254
        invalid_chars = set(' <>{}[]?*"#%\\^|~`$&,;:/')
1✔
255
        control_chars = {chr(i) for i in range(32)} | {chr(i) for i in range(127, 160)}
1✔
256
        invalid_chars |= control_chars
1✔
257
        for char in name:
1✔
258
            if char in invalid_chars:
1✔
259
                raise InvalidName(f"Invalid Name: '{name}'")
1✔
260

261
    @staticmethod
1✔
262
    def _validate_state_machine_alias_name(name: CharacterRestrictedName) -> None:
1✔
263
        len_name = len(name)
1✔
264
        if len_name > 80:
1✔
265
            raise ValidationException(
1✔
266
                f"1 validation error detected: Value '{name}' at 'name' failed to satisfy constraint: "
267
                f"Member must have length less than or equal to 80"
268
            )
269
        if not StepFunctionsProvider._ALIAS_NAME_REGEX.match(name):
1✔
270
            raise ValidationException(
1✔
271
                # TODO: explore more error cases in which more than one validation error may occur which results
272
                #  in the counter below being greater than 1.
273
                f"1 validation error detected: Value '{name}' at 'name' failed to satisfy constraint: "
274
                f"Member must satisfy regular expression pattern: ^(?=.*[a-zA-Z_\\-\\.])[a-zA-Z0-9_\\-\\.]+$"
275
            )
276

277
    def _get_execution(self, context: RequestContext, execution_arn: Arn) -> Execution:
1✔
278
        execution: Optional[Execution] = self.get_store(context).executions.get(execution_arn)
1✔
279
        if not execution:
1✔
280
            raise ExecutionDoesNotExist(f"Execution Does Not Exist: '{execution_arn}'")
1✔
281
        return execution
1✔
282

283
    def _get_executions(
1✔
284
        self,
285
        context: RequestContext,
286
        execution_status: Optional[ExecutionStatus] = None,
287
    ):
288
        store = self.get_store(context)
1✔
289
        execution: list[Execution] = list(store.executions.values())
1✔
290
        if execution_status:
1✔
291
            execution = list(
1✔
292
                filter(
293
                    lambda e: e.exec_status == execution_status,
294
                    store.executions.values(),
295
                )
296
            )
297
        return execution
1✔
298

299
    def _get_activity(self, context: RequestContext, activity_arn: Arn) -> Activity:
1✔
300
        maybe_activity: Optional[Activity] = self.get_store(context).activities.get(
1✔
301
            activity_arn, None
302
        )
303
        if maybe_activity is None:
1✔
304
            raise ActivityDoesNotExist(f"Activity Does Not Exist: '{activity_arn}'")
1✔
305
        return maybe_activity
1✔
306

307
    def _idempotent_revision(
1✔
308
        self,
309
        context: RequestContext,
310
        name: str,
311
        definition: Definition,
312
        state_machine_type: StateMachineType,
313
        logging_configuration: LoggingConfiguration,
314
        tracing_configuration: TracingConfiguration,
315
    ) -> Optional[StateMachineRevision]:
316
        # CreateStateMachine's idempotency check is based on the state machine name, definition, type,
317
        # LoggingConfiguration and TracingConfiguration.
318
        # If a following request has a different roleArn or tags, Step Functions will ignore these differences and
319
        # treat it as an idempotent request of the previous. In this case, roleArn and tags will not be updated, even
320
        # if they are different.
321
        state_machines: list[StateMachineInstance] = list(
1✔
322
            self.get_store(context).state_machines.values()
323
        )
324
        revisions = filter(lambda sm: isinstance(sm, StateMachineRevision), state_machines)
1✔
325
        for state_machine in revisions:
1✔
326
            check = all(
1✔
327
                [
328
                    state_machine.name == name,
329
                    state_machine.definition == definition,
330
                    state_machine.sm_type == state_machine_type,
331
                    state_machine.logging_config == logging_configuration,
332
                    state_machine.tracing_config == tracing_configuration,
333
                ]
334
            )
335
            if check:
1✔
336
                return state_machine
1✔
337
        return None
1✔
338

339
    def _idempotent_start_execution(
1✔
340
        self,
341
        execution: Optional[Execution],
342
        state_machine: StateMachineInstance,
343
        name: Name,
344
        input_data: SensitiveData,
345
    ) -> Optional[Execution]:
346
        # StartExecution is idempotent for STANDARD workflows. For a STANDARD workflow,
347
        # if you call StartExecution with the same name and input as a running execution,
348
        # the call succeeds and return the same response as the original request.
349
        # If the execution is closed or if the input is different,
350
        # it returns a 400 ExecutionAlreadyExists error. You can reuse names after 90 days.
351

352
        if not execution:
1✔
353
            return None
×
354

355
        match (name, input_data, execution.exec_status, state_machine.sm_type):
1✔
356
            case (
1✔
357
                execution.name,
358
                execution.input_data,
359
                ExecutionStatus.RUNNING,
360
                StateMachineType.STANDARD,
361
            ):
362
                return execution
1✔
363

364
        raise CommonServiceException(
1✔
365
            code="ExecutionAlreadyExists",
366
            message=f"Execution Already Exists: '{execution.exec_arn}'",
367
        )
368

369
    def _revision_by_name(
1✔
370
        self, context: RequestContext, name: str
371
    ) -> Optional[StateMachineInstance]:
372
        state_machines: list[StateMachineInstance] = list(
1✔
373
            self.get_store(context).state_machines.values()
374
        )
375
        for state_machine in state_machines:
1✔
376
            if isinstance(state_machine, StateMachineRevision) and state_machine.name == name:
1✔
377
                return state_machine
1✔
378
        return None
1✔
379

380
    @staticmethod
1✔
381
    def _validate_definition(definition: str, static_analysers: list[StaticAnalyser]) -> None:
1✔
382
        try:
1✔
383
            for static_analyser in static_analysers:
1✔
384
                static_analyser.analyse(definition)
1✔
385
        except ASLParserException as asl_parser_exception:
1✔
386
            invalid_definition = InvalidDefinition()
1✔
387
            invalid_definition.message = repr(asl_parser_exception)
1✔
388
            raise invalid_definition
1✔
389
        except Exception as exception:
1✔
390
            exception_name = exception.__class__.__name__
1✔
391
            exception_args = list(exception.args)
1✔
392
            invalid_definition = InvalidDefinition()
1✔
393
            invalid_definition.message = (
1✔
394
                f"Error={exception_name} Args={exception_args} in definition '{definition}'."
395
            )
396
            raise invalid_definition
1✔
397

398
    @staticmethod
1✔
399
    def _sanitise_logging_configuration(
1✔
400
        logging_configuration: LoggingConfiguration,
401
    ) -> None:
402
        level = logging_configuration.get("level")
1✔
403
        destinations = logging_configuration.get("destinations")
1✔
404

405
        if destinations is not None and len(destinations) > 1:
1✔
406
            raise InvalidLoggingConfiguration(
1✔
407
                "Invalid Logging Configuration: Must specify exactly one Log Destination."
408
            )
409

410
        # A LogLevel that is not OFF, should have a destination.
411
        if level is not None and level != LogLevel.OFF and not destinations:
1✔
412
            raise InvalidLoggingConfiguration(
1✔
413
                "Invalid Logging Configuration: Must specify exactly one Log Destination."
414
            )
415

416
        # Default for level is OFF.
417
        level = level or LogLevel.OFF
1✔
418

419
        # Default for includeExecutionData is False.
420
        include_flag = logging_configuration.get("includeExecutionData", False)
1✔
421

422
        # Update configuration object.
423
        logging_configuration["level"] = level
1✔
424
        logging_configuration["includeExecutionData"] = include_flag
1✔
425

426
    def create_state_machine(
1✔
427
        self, context: RequestContext, request: CreateStateMachineInput, **kwargs
428
    ) -> CreateStateMachineOutput:
429
        if not request.get("publish", False) and request.get("versionDescription"):
1✔
430
            raise ValidationException("Version description can only be set when publish is true")
1✔
431

432
        # Extract parameters and set defaults.
433
        state_machine_name = request["name"]
1✔
434
        state_machine_role_arn = request["roleArn"]
1✔
435
        state_machine_definition = request["definition"]
1✔
436
        state_machine_type = request.get("type") or StateMachineType.STANDARD
1✔
437
        state_machine_tracing_configuration = request.get("tracingConfiguration")
1✔
438
        state_machine_tags = request.get("tags")
1✔
439
        state_machine_logging_configuration = request.get(
1✔
440
            "loggingConfiguration", LoggingConfiguration()
441
        )
442
        self._sanitise_logging_configuration(
1✔
443
            logging_configuration=state_machine_logging_configuration
444
        )
445

446
        # CreateStateMachine is an idempotent API. Subsequent requests won’t create a duplicate resource if it was
447
        # already created.
448
        idem_state_machine: Optional[StateMachineRevision] = self._idempotent_revision(
1✔
449
            context=context,
450
            name=state_machine_name,
451
            definition=state_machine_definition,
452
            state_machine_type=state_machine_type,
453
            logging_configuration=state_machine_logging_configuration,
454
            tracing_configuration=state_machine_tracing_configuration,
455
        )
456
        if idem_state_machine is not None:
1✔
457
            return CreateStateMachineOutput(
1✔
458
                stateMachineArn=idem_state_machine.arn,
459
                creationDate=idem_state_machine.create_date,
460
            )
461

462
        # Assert this state machine name is unique.
463
        state_machine_with_name: Optional[StateMachineRevision] = self._revision_by_name(
1✔
464
            context=context, name=state_machine_name
465
        )
466
        if state_machine_with_name is not None:
1✔
467
            raise StateMachineAlreadyExists(
1✔
468
                f"State Machine Already Exists: '{state_machine_with_name.arn}'"
469
            )
470

471
        # Compute the state machine's Arn.
472
        state_machine_arn = stepfunctions_state_machine_arn(
1✔
473
            name=state_machine_name,
474
            account_id=context.account_id,
475
            region_name=context.region,
476
        )
477
        state_machines = self.get_store(context).state_machines
1✔
478

479
        # Reduce the logging configuration to a usable cloud watch representation, and validate the destinations
480
        # if any were given.
481
        cloud_watch_logging_configuration = (
1✔
482
            CloudWatchLoggingConfiguration.from_logging_configuration(
483
                state_machine_arn=state_machine_arn,
484
                logging_configuration=state_machine_logging_configuration,
485
            )
486
        )
487
        if cloud_watch_logging_configuration is not None:
1✔
488
            cloud_watch_logging_configuration.validate()
1✔
489

490
        # Run static analysers on the definition given.
491
        if state_machine_type == StateMachineType.EXPRESS:
1✔
492
            StepFunctionsProvider._validate_definition(
1✔
493
                definition=state_machine_definition,
494
                static_analysers=[ExpressStaticAnalyser()],
495
            )
496
        else:
497
            StepFunctionsProvider._validate_definition(
1✔
498
                definition=state_machine_definition, static_analysers=[StaticAnalyser()]
499
            )
500

501
        # Create the state machine and add it to the store.
502
        state_machine = StateMachineRevision(
1✔
503
            name=state_machine_name,
504
            arn=state_machine_arn,
505
            role_arn=state_machine_role_arn,
506
            definition=state_machine_definition,
507
            sm_type=state_machine_type,
508
            logging_config=state_machine_logging_configuration,
509
            cloud_watch_logging_configuration=cloud_watch_logging_configuration,
510
            tracing_config=state_machine_tracing_configuration,
511
            tags=state_machine_tags,
512
        )
513
        state_machines[state_machine_arn] = state_machine
1✔
514

515
        create_output = CreateStateMachineOutput(
1✔
516
            stateMachineArn=state_machine.arn, creationDate=state_machine.create_date
517
        )
518

519
        # Create the first version if the 'publish' flag is used.
520
        if request.get("publish", False):
1✔
521
            version_description = request.get("versionDescription")
1✔
522
            state_machine_version = state_machine.create_version(description=version_description)
1✔
523
            if state_machine_version is not None:
1✔
524
                state_machine_version_arn = state_machine_version.arn
1✔
525
                state_machines[state_machine_version_arn] = state_machine_version
1✔
526
                create_output["stateMachineVersionArn"] = state_machine_version_arn
1✔
527

528
        # Run static analyser on definition and collect usage metrics
529
        UsageMetricsStaticAnalyser.process(state_machine_definition)
1✔
530

531
        return create_output
1✔
532

533
    def _validate_state_machine_alias_routing_configuration(
1✔
534
        self, context: RequestContext, routing_configuration_list: RoutingConfigurationList
535
    ) -> None:
536
        # TODO: to match AWS's approach best validation exceptions could be
537
        #  built in a process decoupled from the provider.
538

539
        routing_configuration_list_len = len(routing_configuration_list)
1✔
540
        if not (1 <= routing_configuration_list_len <= 2):
1✔
541
            # Replicate the object string dump format:
542
            # [RoutingConfigurationListItem(stateMachineVersionArn=arn_no_quotes, weight=int), ...]
543
            routing_configuration_serialization_parts = []
1✔
544
            for routing_configuration in routing_configuration_list:
1✔
545
                routing_configuration_serialization_parts.append(
1✔
546
                    "".join(
547
                        [
548
                            "RoutingConfigurationListItem(stateMachineVersionArn=",
549
                            routing_configuration["stateMachineVersionArn"],
550
                            ", weight=",
551
                            str(routing_configuration["weight"]),
552
                            ")",
553
                        ]
554
                    )
555
                )
556
            routing_configuration_serialization_list = (
1✔
557
                f"[{', '.join(routing_configuration_serialization_parts)}]"
558
            )
559
            raise ValidationException(
1✔
560
                f"1 validation error detected: Value '{routing_configuration_serialization_list}' "
561
                "at 'routingConfiguration' failed to "
562
                "satisfy constraint: Member must have length less than or equal to 2"
563
            )
564

565
        routing_configuration_arn_list = [
1✔
566
            routing_configuration["stateMachineVersionArn"]
567
            for routing_configuration in routing_configuration_list
568
        ]
569
        if len(set(routing_configuration_arn_list)) < routing_configuration_list_len:
1✔
570
            arn_list_string = f"[{', '.join(routing_configuration_arn_list)}]"
1✔
571
            raise ValidationException(
1✔
572
                "Routing configuration must contain distinct state machine version ARNs. "
573
                f"Received: {arn_list_string}"
574
            )
575

576
        routing_weights = [
1✔
577
            routing_configuration["weight"] for routing_configuration in routing_configuration_list
578
        ]
579
        for i, weight in enumerate(routing_weights):
1✔
580
            # TODO: check for weight type.
581
            if weight < 0:
1✔
582
                raise ValidationException(
×
583
                    f"Invalid value for parameter routingConfiguration[{i + 1}].weight, value: {weight}, valid min value: 0"
584
                )
585
            if weight > 100:
1✔
586
                raise ValidationException(
1✔
587
                    f"1 validation error detected: Value '{weight}' at 'routingConfiguration.{i + 1}.member.weight' "
588
                    "failed to satisfy constraint: Member must have value less than or equal to 100"
589
                )
590
        routing_weights_sum = sum(routing_weights)
1✔
591
        if not routing_weights_sum == 100:
1✔
592
            raise ValidationException(
1✔
593
                f"Sum of routing configuration weights must equal 100. Received: {json.dumps(routing_weights)}"
594
            )
595

596
        store = self.get_store(context=context)
1✔
597
        state_machines = store.state_machines
1✔
598

599
        first_routing_qualified_arn = routing_configuration_arn_list[0]
1✔
600
        shared_state_machine_revision_arn = self._get_state_machine_arn_from_qualified_arn(
1✔
601
            qualified_arn=first_routing_qualified_arn
602
        )
603
        for routing_configuration_arn in routing_configuration_arn_list:
1✔
604
            maybe_state_machine_version = state_machines.get(routing_configuration_arn)
1✔
605
            if not isinstance(maybe_state_machine_version, StateMachineVersion):
1✔
606
                arn_list_string = f"[{', '.join(routing_configuration_arn_list)}]"
1✔
607
                raise ValidationException(
1✔
608
                    f"Routing configuration must contain state machine version ARNs. Received: {arn_list_string}"
609
                )
610
            state_machine_revision_arn = self._get_state_machine_arn_from_qualified_arn(
1✔
611
                qualified_arn=routing_configuration_arn
612
            )
613
            if state_machine_revision_arn != shared_state_machine_revision_arn:
1✔
614
                raise ValidationException("TODO")
×
615

616
    @staticmethod
1✔
617
    def _get_state_machine_arn_from_qualified_arn(qualified_arn: Arn) -> Arn:
1✔
618
        last_colon_index = qualified_arn.rfind(":")
1✔
619
        base_arn = qualified_arn[:last_colon_index]
1✔
620
        return base_arn
1✔
621

622
    def create_state_machine_alias(
1✔
623
        self,
624
        context: RequestContext,
625
        name: CharacterRestrictedName,
626
        routing_configuration: RoutingConfigurationList,
627
        description: AliasDescription = None,
628
        **kwargs,
629
    ) -> CreateStateMachineAliasOutput:
630
        # Validate the inputs.
631
        self._validate_state_machine_alias_name(name=name)
1✔
632
        self._validate_state_machine_alias_routing_configuration(
1✔
633
            context=context, routing_configuration_list=routing_configuration
634
        )
635

636
        # Determine the state machine arn this alias maps to,
637
        # do so unsafely as validation already took place before initialisation.
638
        first_routing_qualified_arn = routing_configuration[0]["stateMachineVersionArn"]
1✔
639
        state_machine_revision_arn = self._get_state_machine_arn_from_qualified_arn(
1✔
640
            qualified_arn=first_routing_qualified_arn
641
        )
642
        alias = Alias(
1✔
643
            state_machine_arn=state_machine_revision_arn,
644
            name=name,
645
            description=description,
646
            routing_configuration_list=routing_configuration,
647
        )
648
        state_machine_alias_arn = alias.state_machine_alias_arn
1✔
649

650
        store = self.get_store(context=context)
1✔
651

652
        aliases = store.aliases
1✔
653
        if maybe_idempotent_alias := aliases.get(state_machine_alias_arn):
1✔
654
            if alias.is_idempotent(maybe_idempotent_alias):
1✔
655
                return CreateStateMachineAliasOutput(
1✔
656
                    stateMachineAliasArn=state_machine_alias_arn, creationDate=alias.create_date
657
                )
658
            else:
659
                # CreateStateMachineAlias is an idempotent API. Idempotent requests won’t create duplicate resources.
660
                raise ConflictException(
1✔
661
                    "Failed to create alias because an alias with the same name and a "
662
                    "different routing configuration already exists."
663
                )
664
        aliases[state_machine_alias_arn] = alias
1✔
665

666
        state_machine_revision = store.state_machines.get(state_machine_revision_arn)
1✔
667
        if not isinstance(state_machine_revision, StateMachineRevision):
1✔
668
            # The state machine was deleted but not the version referenced in this context.
669
            raise RuntimeError(f"No state machine revision for arn '{state_machine_revision_arn}'")
×
670
        state_machine_revision.aliases.add(alias)
1✔
671

672
        return CreateStateMachineAliasOutput(
1✔
673
            stateMachineAliasArn=state_machine_alias_arn, creationDate=alias.create_date
674
        )
675

676
    def describe_state_machine(
1✔
677
        self,
678
        context: RequestContext,
679
        state_machine_arn: Arn,
680
        included_data: IncludedData = None,
681
        **kwargs,
682
    ) -> DescribeStateMachineOutput:
683
        self._validate_state_machine_arn(state_machine_arn)
1✔
684
        state_machine = self.get_store(context).state_machines.get(state_machine_arn)
1✔
685
        if state_machine is None:
1✔
686
            self._raise_state_machine_does_not_exist(state_machine_arn)
1✔
687
        return state_machine.describe()
1✔
688

689
    def describe_state_machine_alias(
1✔
690
        self, context: RequestContext, state_machine_alias_arn: Arn, **kwargs
691
    ) -> DescribeStateMachineAliasOutput:
692
        self._validate_state_machine_alias_arn(state_machine_alias_arn=state_machine_alias_arn)
1✔
693
        alias: Optional[Alias] = self.get_store(context=context).aliases.get(
1✔
694
            state_machine_alias_arn
695
        )
696
        if alias is None:
1✔
697
            # TODO: assemble the correct exception
698
            raise ValidationException()
×
699
        description = alias.to_description()
1✔
700
        return description
1✔
701

702
    def describe_state_machine_for_execution(
1✔
703
        self,
704
        context: RequestContext,
705
        execution_arn: Arn,
706
        included_data: IncludedData = None,
707
        **kwargs,
708
    ) -> DescribeStateMachineForExecutionOutput:
709
        self._validate_state_machine_execution_arn(execution_arn)
1✔
710
        execution: Execution = self._get_execution(context=context, execution_arn=execution_arn)
1✔
711
        return execution.to_describe_state_machine_for_execution_output()
1✔
712

713
    def send_task_heartbeat(
1✔
714
        self, context: RequestContext, task_token: TaskToken, **kwargs
715
    ) -> SendTaskHeartbeatOutput:
716
        running_executions: list[Execution] = self._get_executions(context, ExecutionStatus.RUNNING)
1✔
717
        for execution in running_executions:
1✔
718
            try:
1✔
719
                if execution.exec_worker.env.callback_pool_manager.heartbeat(
1✔
720
                    callback_id=task_token
721
                ):
722
                    return SendTaskHeartbeatOutput()
1✔
723
            except CallbackNotifyConsumerError as consumer_error:
×
724
                if isinstance(consumer_error, CallbackConsumerTimeout):
×
725
                    raise TaskTimedOut()
×
726
                else:
727
                    raise TaskDoesNotExist()
×
728
        raise InvalidToken()
×
729

730
    def send_task_success(
1✔
731
        self,
732
        context: RequestContext,
733
        task_token: TaskToken,
734
        output: SensitiveData,
735
        **kwargs,
736
    ) -> SendTaskSuccessOutput:
737
        outcome = CallbackOutcomeSuccess(callback_id=task_token, output=output)
1✔
738
        running_executions: list[Execution] = self._get_executions(context, ExecutionStatus.RUNNING)
1✔
739
        for execution in running_executions:
1✔
740
            try:
1✔
741
                if execution.exec_worker.env.callback_pool_manager.notify(
1✔
742
                    callback_id=task_token, outcome=outcome
743
                ):
744
                    return SendTaskSuccessOutput()
1✔
745
            except CallbackNotifyConsumerError as consumer_error:
×
746
                if isinstance(consumer_error, CallbackConsumerTimeout):
×
747
                    raise TaskTimedOut()
×
748
                else:
749
                    raise TaskDoesNotExist()
×
750
        raise InvalidToken("Invalid token")
1✔
751

752
    def send_task_failure(
1✔
753
        self,
754
        context: RequestContext,
755
        task_token: TaskToken,
756
        error: SensitiveError = None,
757
        cause: SensitiveCause = None,
758
        **kwargs,
759
    ) -> SendTaskFailureOutput:
760
        outcome = CallbackOutcomeFailure(callback_id=task_token, error=error, cause=cause)
1✔
761
        store = self.get_store(context)
1✔
762
        for execution in store.executions.values():
1✔
763
            try:
1✔
764
                if execution.exec_worker.env.callback_pool_manager.notify(
1✔
765
                    callback_id=task_token, outcome=outcome
766
                ):
767
                    return SendTaskFailureOutput()
1✔
768
            except CallbackNotifyConsumerError as consumer_error:
×
769
                if isinstance(consumer_error, CallbackConsumerTimeout):
×
770
                    raise TaskTimedOut()
×
771
                else:
772
                    raise TaskDoesNotExist()
×
773
        raise InvalidToken("Invalid token")
1✔
774

775
    def start_execution(
1✔
776
        self,
777
        context: RequestContext,
778
        state_machine_arn: Arn,
779
        name: Name = None,
780
        input: SensitiveData = None,
781
        trace_header: TraceHeader = None,
782
        **kwargs,
783
    ) -> StartExecutionOutput:
784
        self._validate_state_machine_arn(state_machine_arn)
1✔
785

786
        state_machine_arn_parts = state_machine_arn.split("#")
1✔
787
        state_machine_arn = state_machine_arn_parts[0]
1✔
788
        mock_test_case_name = (
1✔
789
            state_machine_arn_parts[1] if len(state_machine_arn_parts) == 2 else None
790
        )
791

792
        store = self.get_store(context=context)
1✔
793

794
        alias: Optional[Alias] = store.aliases.get(state_machine_arn)
1✔
795
        alias_sample_state_machine_version_arn = alias.sample() if alias is not None else None
1✔
796
        unsafe_state_machine: Optional[StateMachineInstance] = store.state_machines.get(
1✔
797
            alias_sample_state_machine_version_arn or state_machine_arn
798
        )
799
        if not unsafe_state_machine:
1✔
800
            self._raise_state_machine_does_not_exist(state_machine_arn)
1✔
801

802
        # Update event change parameters about the state machine and should not affect those about this execution.
803
        state_machine_clone = copy.deepcopy(unsafe_state_machine)
1✔
804

805
        if input is None:
1✔
806
            input_data = dict()
1✔
807
        else:
808
            try:
1✔
809
                input_data = json.loads(input)
1✔
810
            except Exception as ex:
1✔
811
                raise InvalidExecutionInput(str(ex))  # TODO: report parsing error like AWS.
1✔
812

813
        normalised_state_machine_arn = (
1✔
814
            state_machine_clone.source_arn
815
            if isinstance(state_machine_clone, StateMachineVersion)
816
            else state_machine_clone.arn
817
        )
818
        exec_name = name or long_uid()  # TODO: validate name format
1✔
819
        if state_machine_clone.sm_type == StateMachineType.STANDARD:
1✔
820
            exec_arn = stepfunctions_standard_execution_arn(normalised_state_machine_arn, exec_name)
1✔
821
        else:
822
            # Exhaustive check on STANDARD and EXPRESS type, validated on creation.
823
            exec_arn = stepfunctions_express_execution_arn(normalised_state_machine_arn, exec_name)
1✔
824

825
        if execution := store.executions.get(exec_arn):
1✔
826
            # Return already running execution if name and input match
827
            existing_execution = self._idempotent_start_execution(
1✔
828
                execution=execution,
829
                state_machine=state_machine_clone,
830
                name=name,
831
                input_data=input_data,
832
            )
833

834
            if existing_execution:
1✔
835
                return existing_execution.to_start_output()
1✔
836

837
        # Create the execution logging session, if logging is configured.
838
        cloud_watch_logging_session = None
1✔
839
        if state_machine_clone.cloud_watch_logging_configuration is not None:
1✔
840
            cloud_watch_logging_session = CloudWatchLoggingSession(
1✔
841
                execution_arn=exec_arn,
842
                configuration=state_machine_clone.cloud_watch_logging_configuration,
843
            )
844

845
        mock_test_case: Optional[MockTestCase] = None
1✔
846
        if mock_test_case_name is not None:
1✔
847
            state_machine_name = state_machine_clone.name
1✔
848
            mock_test_case = load_mock_test_case_for(
1✔
849
                state_machine_name=state_machine_name, test_case_name=mock_test_case_name
850
            )
851
            if mock_test_case is None:
1✔
NEW
852
                raise InvalidName(
×
853
                    f"Invalid mock test case name '{mock_test_case_name}' "
854
                    f"for state machine '{state_machine_name}'"
855
                )
856

857
        execution = Execution(
1✔
858
            name=exec_name,
859
            sm_type=state_machine_clone.sm_type,
860
            role_arn=state_machine_clone.role_arn,
861
            exec_arn=exec_arn,
862
            account_id=context.account_id,
863
            region_name=context.region,
864
            state_machine=state_machine_clone,
865
            state_machine_alias_arn=alias.state_machine_alias_arn if alias is not None else None,
866
            start_date=datetime.datetime.now(tz=datetime.timezone.utc),
867
            cloud_watch_logging_session=cloud_watch_logging_session,
868
            input_data=input_data,
869
            trace_header=trace_header,
870
            activity_store=self.get_store(context).activities,
871
            mock_test_case=mock_test_case,
872
        )
873

874
        store.executions[exec_arn] = execution
1✔
875

876
        execution.start()
1✔
877
        return execution.to_start_output()
1✔
878

879
    def start_sync_execution(
1✔
880
        self,
881
        context: RequestContext,
882
        state_machine_arn: Arn,
883
        name: Name = None,
884
        input: SensitiveData = None,
885
        trace_header: TraceHeader = None,
886
        included_data: IncludedData = None,
887
        **kwargs,
888
    ) -> StartSyncExecutionOutput:
889
        self._validate_state_machine_arn(state_machine_arn)
1✔
890
        unsafe_state_machine: Optional[StateMachineInstance] = self.get_store(
1✔
891
            context
892
        ).state_machines.get(state_machine_arn)
893
        if not unsafe_state_machine:
1✔
894
            self._raise_state_machine_does_not_exist(state_machine_arn)
×
895

896
        if unsafe_state_machine.sm_type == StateMachineType.STANDARD:
1✔
897
            self._raise_state_machine_type_not_supported()
1✔
898

899
        # Update event change parameters about the state machine and should not affect those about this execution.
900
        state_machine_clone = copy.deepcopy(unsafe_state_machine)
1✔
901

902
        if input is None:
1✔
903
            input_data = dict()
×
904
        else:
905
            try:
1✔
906
                input_data = json.loads(input)
1✔
907
            except Exception as ex:
×
908
                raise InvalidExecutionInput(str(ex))  # TODO: report parsing error like AWS.
×
909

910
        normalised_state_machine_arn = (
1✔
911
            state_machine_clone.source_arn
912
            if isinstance(state_machine_clone, StateMachineVersion)
913
            else state_machine_clone.arn
914
        )
915
        exec_name = name or long_uid()  # TODO: validate name format
1✔
916
        exec_arn = stepfunctions_express_execution_arn(normalised_state_machine_arn, exec_name)
1✔
917

918
        if exec_arn in self.get_store(context).executions:
1✔
919
            raise InvalidName()  # TODO
×
920

921
        # Create the execution logging session, if logging is configured.
922
        cloud_watch_logging_session = None
1✔
923
        if state_machine_clone.cloud_watch_logging_configuration is not None:
1✔
924
            cloud_watch_logging_session = CloudWatchLoggingSession(
×
925
                execution_arn=exec_arn,
926
                configuration=state_machine_clone.cloud_watch_logging_configuration,
927
            )
928

929
        execution = SyncExecution(
1✔
930
            name=exec_name,
931
            sm_type=state_machine_clone.sm_type,
932
            role_arn=state_machine_clone.role_arn,
933
            exec_arn=exec_arn,
934
            account_id=context.account_id,
935
            region_name=context.region,
936
            state_machine=state_machine_clone,
937
            start_date=datetime.datetime.now(tz=datetime.timezone.utc),
938
            cloud_watch_logging_session=cloud_watch_logging_session,
939
            input_data=input_data,
940
            trace_header=trace_header,
941
            activity_store=self.get_store(context).activities,
942
        )
943
        self.get_store(context).executions[exec_arn] = execution
1✔
944

945
        execution.start()
1✔
946
        return execution.to_start_sync_execution_output()
1✔
947

948
    def describe_execution(
1✔
949
        self,
950
        context: RequestContext,
951
        execution_arn: Arn,
952
        included_data: IncludedData = None,
953
        **kwargs,
954
    ) -> DescribeExecutionOutput:
955
        self._validate_state_machine_execution_arn(execution_arn)
1✔
956
        execution: Execution = self._get_execution(context=context, execution_arn=execution_arn)
1✔
957

958
        # Action only compatible with STANDARD workflows.
959
        if execution.sm_type != StateMachineType.STANDARD:
1✔
960
            self._raise_resource_type_not_in_context(resource_type=execution.sm_type)
1✔
961

962
        return execution.to_describe_output()
1✔
963

964
    @staticmethod
1✔
965
    def _list_execution_filter(
1✔
966
        ex: Execution, state_machine_arn: str, status_filter: Optional[str]
967
    ) -> bool:
968
        state_machine_reference_arn_set = {ex.state_machine_arn, ex.state_machine_version_arn}
1✔
969
        if state_machine_arn not in state_machine_reference_arn_set:
1✔
970
            return False
1✔
971

972
        if not status_filter:
1✔
973
            return True
1✔
974
        return ex.exec_status == status_filter
1✔
975

976
    def list_executions(
1✔
977
        self,
978
        context: RequestContext,
979
        state_machine_arn: Arn = None,
980
        status_filter: ExecutionStatus = None,
981
        max_results: PageSize = None,
982
        next_token: ListExecutionsPageToken = None,
983
        map_run_arn: LongArn = None,
984
        redrive_filter: ExecutionRedriveFilter = None,
985
        **kwargs,
986
    ) -> ListExecutionsOutput:
987
        self._validate_state_machine_arn(state_machine_arn)
1✔
988
        assert_pagination_parameters_valid(
1✔
989
            max_results=max_results,
990
            next_token=next_token,
991
            next_token_length_limit=3096,
992
        )
993
        max_results = normalise_max_results(max_results)
1✔
994

995
        state_machine = self.get_store(context).state_machines.get(state_machine_arn)
1✔
996
        if state_machine is None:
1✔
997
            self._raise_state_machine_does_not_exist(state_machine_arn)
1✔
998

999
        if state_machine.sm_type != StateMachineType.STANDARD:
1✔
1000
            self._raise_state_machine_type_not_supported()
1✔
1001

1002
        # TODO: add support for paging
1003

1004
        allowed_execution_status = [
1✔
1005
            ExecutionStatus.SUCCEEDED,
1006
            ExecutionStatus.TIMED_OUT,
1007
            ExecutionStatus.PENDING_REDRIVE,
1008
            ExecutionStatus.ABORTED,
1009
            ExecutionStatus.FAILED,
1010
            ExecutionStatus.RUNNING,
1011
        ]
1012

1013
        validation_errors = []
1✔
1014

1015
        if status_filter and status_filter not in allowed_execution_status:
1✔
1016
            validation_errors.append(
1✔
1017
                f"Value '{status_filter}' at 'statusFilter' failed to satisfy constraint: Member must satisfy enum value set: [{', '.join(allowed_execution_status)}]"
1018
            )
1019

1020
        if not state_machine_arn and not map_run_arn:
1✔
1021
            validation_errors.append("Must provide a StateMachine ARN or MapRun ARN")
×
1022

1023
        if validation_errors:
1✔
1024
            errors_message = "; ".join(validation_errors)
1✔
1025
            message = f"{len(validation_errors)} validation {'errors' if len(validation_errors) > 1 else 'error'} detected: {errors_message}"
1✔
1026
            raise CommonServiceException(message=message, code="ValidationException")
1✔
1027

1028
        executions: ExecutionList = [
1✔
1029
            execution.to_execution_list_item()
1030
            for execution in self.get_store(context).executions.values()
1031
            if self._list_execution_filter(
1032
                execution,
1033
                state_machine_arn=state_machine_arn,
1034
                status_filter=status_filter,
1035
            )
1036
        ]
1037

1038
        executions.sort(key=lambda item: item["startDate"], reverse=True)
1✔
1039

1040
        paginated_executions = PaginatedList(executions)
1✔
1041
        page, token_for_next_page = paginated_executions.get_page(
1✔
1042
            token_generator=lambda item: get_next_page_token_from_arn(item.get("executionArn")),
1043
            page_size=max_results,
1044
            next_token=next_token,
1045
        )
1046

1047
        return ListExecutionsOutput(executions=page, nextToken=token_for_next_page)
1✔
1048

1049
    def list_state_machines(
1✔
1050
        self,
1051
        context: RequestContext,
1052
        max_results: PageSize = None,
1053
        next_token: PageToken = None,
1054
        **kwargs,
1055
    ) -> ListStateMachinesOutput:
1056
        assert_pagination_parameters_valid(max_results, next_token)
1✔
1057
        max_results = normalise_max_results(max_results)
1✔
1058

1059
        state_machines: StateMachineList = [
1✔
1060
            sm.itemise()
1061
            for sm in self.get_store(context).state_machines.values()
1062
            if isinstance(sm, StateMachineRevision)
1063
        ]
1064
        state_machines.sort(key=lambda item: item["name"])
1✔
1065

1066
        paginated_state_machines = PaginatedList(state_machines)
1✔
1067
        page, token_for_next_page = paginated_state_machines.get_page(
1✔
1068
            token_generator=lambda item: get_next_page_token_from_arn(item.get("stateMachineArn")),
1069
            page_size=max_results,
1070
            next_token=next_token,
1071
        )
1072

1073
        return ListStateMachinesOutput(stateMachines=page, nextToken=token_for_next_page)
1✔
1074

1075
    def list_state_machine_aliases(
1✔
1076
        self,
1077
        context: RequestContext,
1078
        state_machine_arn: Arn,
1079
        next_token: PageToken = None,
1080
        max_results: PageSize = None,
1081
        **kwargs,
1082
    ) -> ListStateMachineAliasesOutput:
1083
        assert_pagination_parameters_valid(max_results, next_token)
1✔
1084

1085
        self._validate_state_machine_arn(state_machine_arn)
1✔
1086
        state_machines = self.get_store(context).state_machines
1✔
1087
        state_machine_revision = state_machines.get(state_machine_arn)
1✔
1088
        if not isinstance(state_machine_revision, StateMachineRevision):
1✔
1089
            raise InvalidArn(f"Invalid arn: {state_machine_arn}")
×
1090

1091
        state_machine_aliases: StateMachineAliasList = list()
1✔
1092
        valid_token_found = next_token is None
1✔
1093

1094
        for alias in state_machine_revision.aliases:
1✔
1095
            state_machine_aliases.append(alias.to_item())
1✔
1096
            if alias.tokenized_state_machine_alias_arn == next_token:
1✔
1097
                valid_token_found = True
1✔
1098

1099
        if not valid_token_found:
1✔
1100
            raise InvalidToken("Invalid Token: 'Invalid token'")
1✔
1101

1102
        state_machine_aliases.sort(key=lambda item: item["creationDate"])
1✔
1103

1104
        paginated_list = PaginatedList(state_machine_aliases)
1✔
1105

1106
        paginated_aliases, next_token = paginated_list.get_page(
1✔
1107
            token_generator=lambda item: get_next_page_token_from_arn(
1108
                item.get("stateMachineAliasArn")
1109
            ),
1110
            next_token=next_token,
1111
            page_size=100 if max_results == 0 or max_results is None else max_results,
1112
        )
1113

1114
        return ListStateMachineAliasesOutput(
1✔
1115
            stateMachineAliases=paginated_aliases, nextToken=next_token
1116
        )
1117

1118
    def list_state_machine_versions(
1✔
1119
        self,
1120
        context: RequestContext,
1121
        state_machine_arn: Arn,
1122
        next_token: PageToken = None,
1123
        max_results: PageSize = None,
1124
        **kwargs,
1125
    ) -> ListStateMachineVersionsOutput:
1126
        self._validate_state_machine_arn(state_machine_arn)
1✔
1127
        assert_pagination_parameters_valid(max_results, next_token)
1✔
1128
        max_results = normalise_max_results(max_results)
1✔
1129

1130
        state_machines = self.get_store(context).state_machines
1✔
1131
        state_machine_revision = state_machines.get(state_machine_arn)
1✔
1132
        if not isinstance(state_machine_revision, StateMachineRevision):
1✔
1133
            raise InvalidArn(f"Invalid arn: {state_machine_arn}")
×
1134

1135
        state_machine_version_items = list()
1✔
1136
        for version_arn in state_machine_revision.versions.values():
1✔
1137
            state_machine_version = state_machines[version_arn]
1✔
1138
            if isinstance(state_machine_version, StateMachineVersion):
1✔
1139
                state_machine_version_items.append(state_machine_version.itemise())
1✔
1140
            else:
1141
                raise RuntimeError(
×
1142
                    f"Expected {version_arn} to be a StateMachine Version, but got '{type(state_machine_version)}'."
1143
                )
1144

1145
        state_machine_version_items.sort(key=lambda item: item["creationDate"], reverse=True)
1✔
1146

1147
        paginated_state_machine_versions = PaginatedList(state_machine_version_items)
1✔
1148
        page, token_for_next_page = paginated_state_machine_versions.get_page(
1✔
1149
            token_generator=lambda item: get_next_page_token_from_arn(
1150
                item.get("stateMachineVersionArn")
1151
            ),
1152
            page_size=max_results,
1153
            next_token=next_token,
1154
        )
1155

1156
        return ListStateMachineVersionsOutput(
1✔
1157
            stateMachineVersions=page, nextToken=token_for_next_page
1158
        )
1159

1160
    def get_execution_history(
1✔
1161
        self,
1162
        context: RequestContext,
1163
        execution_arn: Arn,
1164
        max_results: PageSize = None,
1165
        reverse_order: ReverseOrder = None,
1166
        next_token: PageToken = None,
1167
        include_execution_data: IncludeExecutionDataGetExecutionHistory = None,
1168
        **kwargs,
1169
    ) -> GetExecutionHistoryOutput:
1170
        # TODO: add support for paging, ordering, and other manipulations.
1171
        self._validate_state_machine_execution_arn(execution_arn)
1✔
1172
        execution: Execution = self._get_execution(context=context, execution_arn=execution_arn)
1✔
1173

1174
        # Action only compatible with STANDARD workflows.
1175
        if execution.sm_type != StateMachineType.STANDARD:
1✔
1176
            self._raise_resource_type_not_in_context(resource_type=execution.sm_type)
1✔
1177

1178
        history: GetExecutionHistoryOutput = execution.to_history_output()
1✔
1179
        if reverse_order:
1✔
1180
            history["events"].reverse()
1✔
1181
        return history
1✔
1182

1183
    def delete_state_machine(
1✔
1184
        self, context: RequestContext, state_machine_arn: Arn, **kwargs
1185
    ) -> DeleteStateMachineOutput:
1186
        # TODO: halt executions?
1187
        self._validate_state_machine_arn(state_machine_arn)
1✔
1188
        state_machines = self.get_store(context).state_machines
1✔
1189
        state_machine = state_machines.get(state_machine_arn)
1✔
1190
        if isinstance(state_machine, StateMachineRevision):
1✔
1191
            state_machines.pop(state_machine_arn)
1✔
1192
            for version_arn in state_machine.versions.values():
1✔
1193
                state_machines.pop(version_arn, None)
1✔
1194
        return DeleteStateMachineOutput()
1✔
1195

1196
    def delete_state_machine_alias(
1✔
1197
        self, context: RequestContext, state_machine_alias_arn: Arn, **kwargs
1198
    ) -> DeleteStateMachineAliasOutput:
1199
        self._validate_state_machine_alias_arn(state_machine_alias_arn=state_machine_alias_arn)
1✔
1200
        store = self.get_store(context=context)
1✔
1201
        aliases = store.aliases
1✔
1202
        if (alias := aliases.pop(state_machine_alias_arn, None)) is not None:
1✔
1203
            state_machines = store.state_machines
1✔
1204
            for routing_configuration in alias.get_routing_configuration_list():
1✔
1205
                state_machine_version_arn = routing_configuration["stateMachineVersionArn"]
1✔
1206
                if (
1✔
1207
                    state_machine_version := state_machines.get(state_machine_version_arn)
1208
                ) is None or not isinstance(state_machine_version, StateMachineVersion):
1209
                    continue
1✔
1210
                if (
1✔
1211
                    state_machine_revision := state_machines.get(state_machine_version.source_arn)
1212
                ) is None or not isinstance(state_machine_revision, StateMachineRevision):
1213
                    continue
×
1214
                state_machine_revision.aliases.discard(alias)
1✔
1215
        return DeleteStateMachineOutput()
1✔
1216

1217
    def delete_state_machine_version(
1✔
1218
        self, context: RequestContext, state_machine_version_arn: LongArn, **kwargs
1219
    ) -> DeleteStateMachineVersionOutput:
1220
        self._validate_state_machine_arn(state_machine_version_arn)
1✔
1221
        state_machines = self.get_store(context).state_machines
1✔
1222

1223
        if not (
1✔
1224
            state_machine_version := state_machines.get(state_machine_version_arn)
1225
        ) or not isinstance(state_machine_version, StateMachineVersion):
1226
            return DeleteStateMachineVersionOutput()
1✔
1227

1228
        if (
1✔
1229
            state_machine_revision := state_machines.get(state_machine_version.source_arn)
1230
        ) and isinstance(state_machine_revision, StateMachineRevision):
1231
            referencing_alias_names: list[str] = list()
1✔
1232
            for alias in state_machine_revision.aliases:
1✔
1233
                if alias.is_router_for(state_machine_version_arn=state_machine_version_arn):
1✔
1234
                    referencing_alias_names.append(alias.name)
1✔
1235
            if referencing_alias_names:
1✔
1236
                referencing_alias_names_list_body = ", ".join(referencing_alias_names)
1✔
1237
                raise ConflictException(
1✔
1238
                    "Version to be deleted must not be referenced by an alias. "
1239
                    f"Current list of aliases referencing this version: [{referencing_alias_names_list_body}]"
1240
                )
1241
            state_machine_revision.delete_version(state_machine_version_arn)
1✔
1242

1243
        state_machines.pop(state_machine_version.arn, None)
1✔
1244
        return DeleteStateMachineVersionOutput()
1✔
1245

1246
    def stop_execution(
1✔
1247
        self,
1248
        context: RequestContext,
1249
        execution_arn: Arn,
1250
        error: SensitiveError = None,
1251
        cause: SensitiveCause = None,
1252
        **kwargs,
1253
    ) -> StopExecutionOutput:
1254
        self._validate_state_machine_execution_arn(execution_arn)
1✔
1255
        execution: Execution = self._get_execution(context=context, execution_arn=execution_arn)
1✔
1256

1257
        # Action only compatible with STANDARD workflows.
1258
        if execution.sm_type != StateMachineType.STANDARD:
1✔
1259
            self._raise_resource_type_not_in_context(resource_type=execution.sm_type)
1✔
1260

1261
        stop_date = datetime.datetime.now(tz=datetime.timezone.utc)
1✔
1262
        execution.stop(stop_date=stop_date, cause=cause, error=error)
1✔
1263
        return StopExecutionOutput(stopDate=stop_date)
1✔
1264

1265
    def update_state_machine(
1✔
1266
        self,
1267
        context: RequestContext,
1268
        state_machine_arn: Arn,
1269
        definition: Definition = None,
1270
        role_arn: Arn = None,
1271
        logging_configuration: LoggingConfiguration = None,
1272
        tracing_configuration: TracingConfiguration = None,
1273
        publish: Publish = None,
1274
        version_description: VersionDescription = None,
1275
        encryption_configuration: EncryptionConfiguration = None,
1276
        **kwargs,
1277
    ) -> UpdateStateMachineOutput:
1278
        self._validate_state_machine_arn(state_machine_arn)
1✔
1279
        state_machines = self.get_store(context).state_machines
1✔
1280

1281
        state_machine = state_machines.get(state_machine_arn)
1✔
1282
        if not isinstance(state_machine, StateMachineRevision):
1✔
1283
            self._raise_state_machine_does_not_exist(state_machine_arn)
1✔
1284

1285
        # TODO: Add logic to handle metrics for when SFN definitions update
1286
        if not any([definition, role_arn, logging_configuration]):
1✔
1287
            raise MissingRequiredParameter(
1✔
1288
                "Either the definition, the role ARN, the LoggingConfiguration, "
1289
                "or the TracingConfiguration must be specified"
1290
            )
1291

1292
        if definition is not None:
1✔
1293
            self._validate_definition(definition=definition, static_analysers=[StaticAnalyser()])
1✔
1294

1295
        if logging_configuration is not None:
1✔
1296
            self._sanitise_logging_configuration(logging_configuration=logging_configuration)
1✔
1297

1298
        revision_id = state_machine.create_revision(
1✔
1299
            definition=definition,
1300
            role_arn=role_arn,
1301
            logging_configuration=logging_configuration,
1302
        )
1303

1304
        version_arn = None
1✔
1305
        if publish:
1✔
1306
            version = state_machine.create_version(description=version_description)
1✔
1307
            if version is not None:
1✔
1308
                version_arn = version.arn
1✔
1309
                state_machines[version_arn] = version
1✔
1310
            else:
1311
                target_revision_id = revision_id or state_machine.revision_id
1✔
1312
                version_arn = state_machine.versions[target_revision_id]
1✔
1313

1314
        update_output = UpdateStateMachineOutput(
1✔
1315
            updateDate=datetime.datetime.now(tz=datetime.timezone.utc)
1316
        )
1317
        if revision_id is not None:
1✔
1318
            update_output["revisionId"] = revision_id
1✔
1319
        if version_arn is not None:
1✔
1320
            update_output["stateMachineVersionArn"] = version_arn
1✔
1321
        return update_output
1✔
1322

1323
    def update_state_machine_alias(
1✔
1324
        self,
1325
        context: RequestContext,
1326
        state_machine_alias_arn: Arn,
1327
        description: AliasDescription = None,
1328
        routing_configuration: RoutingConfigurationList = None,
1329
        **kwargs,
1330
    ) -> UpdateStateMachineAliasOutput:
1331
        self._validate_state_machine_alias_arn(state_machine_alias_arn=state_machine_alias_arn)
1✔
1332
        if not any([description, routing_configuration]):
1✔
1333
            raise MissingRequiredParameter(
×
1334
                "Either the description or the RoutingConfiguration must be specified"
1335
            )
1336
        if routing_configuration is not None:
1✔
1337
            self._validate_state_machine_alias_routing_configuration(
1✔
1338
                context=context, routing_configuration_list=routing_configuration
1339
            )
1340
        store = self.get_store(context=context)
1✔
1341
        alias = store.aliases.get(state_machine_alias_arn)
1✔
1342
        if alias is None:
1✔
1343
            raise ResourceNotFound("Request references a resource that does not exist.")
1✔
1344

1345
        alias.update(description=description, routing_configuration_list=routing_configuration)
1✔
1346
        return UpdateStateMachineAliasOutput(updateDate=alias.update_date)
1✔
1347

1348
    def publish_state_machine_version(
1✔
1349
        self,
1350
        context: RequestContext,
1351
        state_machine_arn: Arn,
1352
        revision_id: RevisionId = None,
1353
        description: VersionDescription = None,
1354
        **kwargs,
1355
    ) -> PublishStateMachineVersionOutput:
1356
        self._validate_state_machine_arn(state_machine_arn)
1✔
1357
        state_machines = self.get_store(context).state_machines
1✔
1358

1359
        state_machine_revision = state_machines.get(state_machine_arn)
1✔
1360
        if not isinstance(state_machine_revision, StateMachineRevision):
1✔
1361
            self._raise_state_machine_does_not_exist(state_machine_arn)
1✔
1362

1363
        if revision_id is not None and state_machine_revision.revision_id != revision_id:
1✔
1364
            raise ConflictException(
1✔
1365
                f"Failed to publish the State Machine version for revision {revision_id}. "
1366
                f"The current State Machine revision is {state_machine_revision.revision_id}."
1367
            )
1368

1369
        state_machine_version = state_machine_revision.create_version(description=description)
1✔
1370
        if state_machine_version is not None:
1✔
1371
            state_machines[state_machine_version.arn] = state_machine_version
1✔
1372
        else:
1373
            target_revision_id = revision_id or state_machine_revision.revision_id
1✔
1374
            state_machine_version_arn = state_machine_revision.versions.get(target_revision_id)
1✔
1375
            state_machine_version = state_machines[state_machine_version_arn]
1✔
1376

1377
        return PublishStateMachineVersionOutput(
1✔
1378
            creationDate=state_machine_version.create_date,
1379
            stateMachineVersionArn=state_machine_version.arn,
1380
        )
1381

1382
    def tag_resource(
1✔
1383
        self, context: RequestContext, resource_arn: Arn, tags: TagList, **kwargs
1384
    ) -> TagResourceOutput:
1385
        # TODO: add tagging for activities.
1386
        state_machines = self.get_store(context).state_machines
1✔
1387
        state_machine = state_machines.get(resource_arn)
1✔
1388
        if not isinstance(state_machine, StateMachineRevision):
1✔
1389
            raise ResourceNotFound(f"Resource not found: '{resource_arn}'")
1✔
1390

1391
        state_machine.tag_manager.add_all(tags)
1✔
1392
        return TagResourceOutput()
1✔
1393

1394
    def untag_resource(
1✔
1395
        self, context: RequestContext, resource_arn: Arn, tag_keys: TagKeyList, **kwargs
1396
    ) -> UntagResourceOutput:
1397
        # TODO: add untagging for activities.
1398
        state_machines = self.get_store(context).state_machines
1✔
1399
        state_machine = state_machines.get(resource_arn)
1✔
1400
        if not isinstance(state_machine, StateMachineRevision):
1✔
1401
            raise ResourceNotFound(f"Resource not found: '{resource_arn}'")
×
1402

1403
        state_machine.tag_manager.remove_all(tag_keys)
1✔
1404
        return UntagResourceOutput()
1✔
1405

1406
    def list_tags_for_resource(
1✔
1407
        self, context: RequestContext, resource_arn: Arn, **kwargs
1408
    ) -> ListTagsForResourceOutput:
1409
        # TODO: add untagging for activities.
1410
        state_machines = self.get_store(context).state_machines
1✔
1411
        state_machine = state_machines.get(resource_arn)
1✔
1412
        if not isinstance(state_machine, StateMachineRevision):
1✔
1413
            raise ResourceNotFound(f"Resource not found: '{resource_arn}'")
×
1414

1415
        tags: TagList = state_machine.tag_manager.to_tag_list()
1✔
1416
        return ListTagsForResourceOutput(tags=tags)
1✔
1417

1418
    def describe_map_run(
1✔
1419
        self, context: RequestContext, map_run_arn: LongArn, **kwargs
1420
    ) -> DescribeMapRunOutput:
1421
        store = self.get_store(context)
1✔
1422
        for execution in store.executions.values():
1✔
1423
            map_run_record: Optional[MapRunRecord] = (
1✔
1424
                execution.exec_worker.env.map_run_record_pool_manager.get(map_run_arn)
1425
            )
1426
            if map_run_record is not None:
1✔
1427
                return map_run_record.describe()
1✔
1428
        raise ResourceNotFound()
×
1429

1430
    def list_map_runs(
1✔
1431
        self,
1432
        context: RequestContext,
1433
        execution_arn: Arn,
1434
        max_results: PageSize = None,
1435
        next_token: PageToken = None,
1436
        **kwargs,
1437
    ) -> ListMapRunsOutput:
1438
        # TODO: add support for paging.
1439
        execution = self._get_execution(context=context, execution_arn=execution_arn)
1✔
1440
        map_run_records: list[MapRunRecord] = (
1✔
1441
            execution.exec_worker.env.map_run_record_pool_manager.get_all()
1442
        )
1443
        return ListMapRunsOutput(
1✔
1444
            mapRuns=[map_run_record.list_item() for map_run_record in map_run_records]
1445
        )
1446

1447
    def update_map_run(
1✔
1448
        self,
1449
        context: RequestContext,
1450
        map_run_arn: LongArn,
1451
        max_concurrency: MaxConcurrency = None,
1452
        tolerated_failure_percentage: ToleratedFailurePercentage = None,
1453
        tolerated_failure_count: ToleratedFailureCount = None,
1454
        **kwargs,
1455
    ) -> UpdateMapRunOutput:
1456
        if tolerated_failure_percentage is not None or tolerated_failure_count is not None:
×
1457
            raise NotImplementedError(
1458
                "Updating of ToleratedFailureCount and ToleratedFailurePercentage is currently unsupported."
1459
            )
1460
        # TODO: investigate behaviour of empty requests.
1461
        store = self.get_store(context)
×
1462
        for execution in store.executions.values():
×
1463
            map_run_record: Optional[MapRunRecord] = (
×
1464
                execution.exec_worker.env.map_run_record_pool_manager.get(map_run_arn)
1465
            )
1466
            if map_run_record is not None:
×
1467
                map_run_record.update(
×
1468
                    max_concurrency=max_concurrency,
1469
                    tolerated_failure_count=tolerated_failure_count,
1470
                    tolerated_failure_percentage=tolerated_failure_percentage,
1471
                )
1472
                LOG.warning(
×
1473
                    "StepFunctions UpdateMapRun changes are currently not being reflected in the MapRun instances."
1474
                )
1475
                return UpdateMapRunOutput()
×
1476
        raise ResourceNotFound()
×
1477

1478
    def test_state(
1✔
1479
        self,
1480
        context: RequestContext,
1481
        definition: Definition,
1482
        role_arn: Arn = None,
1483
        input: SensitiveData = None,
1484
        inspection_level: InspectionLevel = None,
1485
        reveal_secrets: RevealSecrets = None,
1486
        variables: SensitiveData = None,
1487
        **kwargs,
1488
    ) -> TestStateOutput:
1489
        StepFunctionsProvider._validate_definition(
1✔
1490
            definition=definition, static_analysers=[TestStateStaticAnalyser()]
1491
        )
1492

1493
        name: Optional[Name] = f"TestState-{short_uid()}"
1✔
1494
        arn = stepfunctions_state_machine_arn(
1✔
1495
            name=name, account_id=context.account_id, region_name=context.region
1496
        )
1497
        state_machine = TestStateMachine(
1✔
1498
            name=name,
1499
            arn=arn,
1500
            role_arn=role_arn,
1501
            definition=definition,
1502
        )
1503
        exec_arn = stepfunctions_standard_execution_arn(state_machine.arn, name)
1✔
1504

1505
        input_json = json.loads(input)
1✔
1506
        execution = TestStateExecution(
1✔
1507
            name=name,
1508
            role_arn=role_arn,
1509
            exec_arn=exec_arn,
1510
            account_id=context.account_id,
1511
            region_name=context.region,
1512
            state_machine=state_machine,
1513
            start_date=datetime.datetime.now(tz=datetime.timezone.utc),
1514
            input_data=input_json,
1515
            activity_store=self.get_store(context).activities,
1516
        )
1517
        execution.start()
1✔
1518

1519
        test_state_output = execution.to_test_state_output(
1✔
1520
            inspection_level=inspection_level or InspectionLevel.INFO
1521
        )
1522

1523
        return test_state_output
1✔
1524

1525
    def create_activity(
1✔
1526
        self,
1527
        context: RequestContext,
1528
        name: Name,
1529
        tags: TagList = None,
1530
        encryption_configuration: EncryptionConfiguration = None,
1531
        **kwargs,
1532
    ) -> CreateActivityOutput:
1533
        self._validate_activity_name(name=name)
1✔
1534

1535
        activity_arn = stepfunctions_activity_arn(
1✔
1536
            name=name, account_id=context.account_id, region_name=context.region
1537
        )
1538
        activities = self.get_store(context).activities
1✔
1539
        if activity_arn not in activities:
1✔
1540
            activity = Activity(arn=activity_arn, name=name)
1✔
1541
            activities[activity_arn] = activity
1✔
1542
        else:
1543
            activity = activities[activity_arn]
1✔
1544

1545
        return CreateActivityOutput(activityArn=activity.arn, creationDate=activity.creation_date)
1✔
1546

1547
    def delete_activity(
1✔
1548
        self, context: RequestContext, activity_arn: Arn, **kwargs
1549
    ) -> DeleteActivityOutput:
1550
        self._validate_activity_arn(activity_arn)
1✔
1551
        self.get_store(context).activities.pop(activity_arn, None)
1✔
1552
        return DeleteActivityOutput()
1✔
1553

1554
    def describe_activity(
1✔
1555
        self, context: RequestContext, activity_arn: Arn, **kwargs
1556
    ) -> DescribeActivityOutput:
1557
        self._validate_activity_arn(activity_arn)
1✔
1558
        activity = self._get_activity(context=context, activity_arn=activity_arn)
1✔
1559
        return activity.to_describe_activity_output()
1✔
1560

1561
    def list_activities(
1✔
1562
        self,
1563
        context: RequestContext,
1564
        max_results: PageSize = None,
1565
        next_token: PageToken = None,
1566
        **kwargs,
1567
    ) -> ListActivitiesOutput:
1568
        activities: list[Activity] = list(self.get_store(context).activities.values())
1✔
1569
        return ListActivitiesOutput(
1✔
1570
            activities=[activity.to_activity_list_item() for activity in activities]
1571
        )
1572

1573
    def _send_activity_task_started(
1✔
1574
        self,
1575
        context: RequestContext,
1576
        task_token: TaskToken,
1577
        worker_name: Optional[Name],
1578
    ) -> None:
1579
        executions: list[Execution] = self._get_executions(context)
1✔
1580
        for execution in executions:
1✔
1581
            callback_endpoint = execution.exec_worker.env.callback_pool_manager.get(
1✔
1582
                callback_id=task_token
1583
            )
1584
            if isinstance(callback_endpoint, ActivityCallbackEndpoint):
1✔
1585
                callback_endpoint.notify_activity_task_start(worker_name=worker_name)
1✔
1586
                return
1✔
1587
        raise InvalidToken()
×
1588

1589
    @staticmethod
1✔
1590
    def _pull_activity_task(activity: Activity) -> Optional[ActivityTask]:
1✔
1591
        seconds_left = 60
1✔
1592
        while seconds_left > 0:
1✔
1593
            try:
1✔
1594
                return activity.get_task()
1✔
1595
            except IndexError:
1✔
1596
                time.sleep(1)
1✔
1597
                seconds_left -= 1
1✔
1598
        return None
×
1599

1600
    def get_activity_task(
1✔
1601
        self,
1602
        context: RequestContext,
1603
        activity_arn: Arn,
1604
        worker_name: Name = None,
1605
        **kwargs,
1606
    ) -> GetActivityTaskOutput:
1607
        self._validate_activity_arn(activity_arn)
1✔
1608

1609
        activity = self._get_activity(context=context, activity_arn=activity_arn)
1✔
1610
        maybe_task: Optional[ActivityTask] = self._pull_activity_task(activity=activity)
1✔
1611
        if maybe_task is not None:
1✔
1612
            self._send_activity_task_started(
1✔
1613
                context, maybe_task.task_token, worker_name=worker_name
1614
            )
1615
            return GetActivityTaskOutput(
1✔
1616
                taskToken=maybe_task.task_token, input=maybe_task.task_input
1617
            )
1618

1619
        return GetActivityTaskOutput(taskToken=None, input=None)
×
1620

1621
    def validate_state_machine_definition(
1✔
1622
        self, context: RequestContext, request: ValidateStateMachineDefinitionInput, **kwargs
1623
    ) -> ValidateStateMachineDefinitionOutput:
1624
        # TODO: increase parity of static analysers, current implementation is an unblocker for this API action.
1625
        # TODO: add support for ValidateStateMachineDefinitionSeverity
1626
        # TODO: add support for ValidateStateMachineDefinitionMaxResult
1627

1628
        state_machine_type: StateMachineType = request.get("type", StateMachineType.STANDARD)
1✔
1629
        definition: str = request["definition"]
1✔
1630

1631
        static_analysers = list()
1✔
1632
        if state_machine_type == StateMachineType.STANDARD:
1✔
1633
            static_analysers.append(StaticAnalyser())
1✔
1634
        else:
1635
            static_analysers.append(ExpressStaticAnalyser())
1✔
1636

1637
        diagnostics: ValidateStateMachineDefinitionDiagnosticList = list()
1✔
1638
        try:
1✔
1639
            StepFunctionsProvider._validate_definition(
1✔
1640
                definition=definition, static_analysers=static_analysers
1641
            )
1642
            validation_result = ValidateStateMachineDefinitionResultCode.OK
1✔
1643
        except InvalidDefinition as invalid_definition:
1✔
1644
            validation_result = ValidateStateMachineDefinitionResultCode.FAIL
1✔
1645
            diagnostics.append(
1✔
1646
                ValidateStateMachineDefinitionDiagnostic(
1647
                    severity=ValidateStateMachineDefinitionSeverity.ERROR,
1648
                    code="SCHEMA_VALIDATION_FAILED",
1649
                    message=invalid_definition.message,
1650
                )
1651
            )
1652
        except Exception as ex:
×
1653
            validation_result = ValidateStateMachineDefinitionResultCode.FAIL
×
1654
            LOG.error("Unknown error during validation %s", ex)
×
1655

1656
        return ValidateStateMachineDefinitionOutput(
1✔
1657
            result=validation_result, diagnostics=diagnostics, truncated=False
1658
        )
STATUS · Troubleshooting · Open an Issue · Sales · Support · CAREERS · ENTERPRISE · START FREE · SCHEDULE DEMO
ANNOUNCEMENTS · TWITTER · TOS & SLA · Supported CI Services · What's a CI service? · Automated Testing

© 2026 Coveralls, Inc