• Home
  • Features
  • Pricing
  • Docs
  • Announcements
  • Sign In

rindPHI / isla / 6381727864

02 Oct 2023 02:49PM UTC coverage: 93.847% (-0.1%) from 93.961%
6381727864

Pull #85

github

web-flow
Merge 4d59d638d into 2c0157ed9
Pull Request #85: ISLa 2.0

1022 of 1022 new or added lines in 10 files covered. (100.0%)

6711 of 7151 relevant lines covered (93.85%)

0.94 hits per line

Source File
Press 'n' to go to next uncovered line, 'b' for previous

96.28
/src/isla/cli.py
1
# Copyright © 2022 CISPA Helmholtz Center for Information Security.
2
# Author: Dominic Steinhöfel.
3
#
4
# This file is part of ISLa.
5
#
6
# ISLa is free software: you can redistribute it and/or modify
7
# it under the terms of the GNU General Public License as published by
8
# the Free Software Foundation, either version 3 of the License, or
9
# (at your option) any later version.
10
#
11
# ISLa is distributed in the hope that it will be useful,
12
# but WITHOUT ANY WARRANTY; without even the implied warranty of
13
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
14
# GNU General Public License for more details.
15
#
16
# You should have received a copy of the GNU General Public License
17
# along with ISLa.  If not, see <http://www.gnu.org/licenses/>.
18

19
import argparse
1✔
20
import collections.abc
1✔
21
import json
1✔
22
import logging
1✔
23
import os
1✔
24
import pathlib
1✔
25
import subprocess
1✔
26
import sys
1✔
27
from argparse import Namespace, ArgumentParser
1✔
28
from contextlib import redirect_stdout, redirect_stderr
1✔
29
from functools import lru_cache
1✔
30
from functools import partial
1✔
31
from io import TextIOWrapper
1✔
32
from typing import Dict, Tuple, List, Optional, Iterable, cast, Any, Set
1✔
33

34
import toml
1✔
35
from grammar_graph import gg
1✔
36
from returns.functions import tap
1✔
37
from returns.maybe import Nothing, Some
1✔
38
from returns.pipeline import is_successful
1✔
39
from returns.result import safe, Success, Result, Failure
1✔
40

41
from isla import __version__ as isla_version, language
1✔
42
from isla.derivation_tree import DerivationTree
1✔
43
from isla.helpers import (
1✔
44
    is_float,
45
    Maybe,
46
    get_isla_resource_file_content,
47
    eassert,
48
)
49
from isla.isla_predicates import (
1✔
50
    STANDARD_STRUCTURAL_PREDICATES,
51
    STANDARD_SEMANTIC_PREDICATES,
52
)
53
from isla.isla_shortcuts import true
1✔
54
from isla.language import parse_bnf, parse_isla, StructuralPredicate, SemanticPredicate
1✔
55
from isla.solver import (
1✔
56
    ISLaSolver,
57
    GrammarBasedBlackboxCostComputer,
58
    CostSettings,
59
    CostWeightVector,
60
    CostComputer,
61
    SemanticError,
62
)
63
from isla.type_defs import Grammar
1✔
64

65
# Exit Codes
66
USAGE_ERROR = 2
1✔
67
DATA_FORMAT_ERROR = 65
1✔
68

69

70
def main(*args: str, stdout=sys.stdout, stderr=sys.stderr):
1✔
71
    if "-O" in sys.argv:
1✔
72
        sys.argv.remove("-O")
×
73
        os.execl(sys.executable, sys.executable, "-O", *sys.argv)
×
74
        sys.exit(0)
×
75

76
    read_isla_rc_defaults()
1✔
77
    parser = create_parsers(stdout, stderr)
1✔
78

79
    with redirect_stdout(stdout):
1✔
80
        with redirect_stderr(stderr):
1✔
81
            args = parser.parse_args(args or sys.argv[1:])
1✔
82

83
    if not args.command and not args.version:
1✔
84
        parser.print_usage(file=stderr)
×
85
        print(
×
86
            "isla: error: You have to choose a global option or one of the commands "
87
            + "`solve`, `fuzz`, `check`, or `parse`",
88
            file=stderr,
89
        )
90
        exit(USAGE_ERROR)
×
91

92
    if args.version:
1✔
93
        print(f"ISLa version {isla_version}", file=stdout)
1✔
94
        sys.exit(0)
1✔
95

96
    level_mapping = {
1✔
97
        "ERROR": logging.ERROR,
98
        "WARNING": logging.WARNING,
99
        "INFO": logging.INFO,
100
        "DEBUG": logging.DEBUG,
101
    }
102

103
    if hasattr(args, "log_level"):
1✔
104
        logging.basicConfig(stream=stderr, level=level_mapping[args.log_level])
1✔
105
    else:
106
        get_default(stderr, args.command, "--log-level").map(
1✔
107
            tap(
108
                lambda level: logging.basicConfig(
109
                    stream=stderr,
110
                    level=level_mapping[level],
111
                )
112
            )
113
        )
114

115
    args.func(args)
1✔
116

117

118
def solve(stdout, stderr, parser: ArgumentParser, args: Namespace):
1✔
119
    files = read_files(args.files)
1✔
120
    ensure_grammar_present(stderr, parser, args, files)
1✔
121

122
    command = args.command
1✔
123

124
    output_dir = args.output_dir
1✔
125
    if output_dir:
1✔
126
        assert_path_is_dir(stderr, command, output_dir)
1✔
127

128
    grammar = parse_grammar(command, args.grammar, files, stderr)
1✔
129
    structural_predicates, semantic_predicates = read_predicates(files, stderr)
1✔
130
    constraint = parse_constraint(
1✔
131
        command,
132
        args.constraint,
133
        files,
134
        grammar,
135
        stderr,
136
        structural_predicates=structural_predicates,
137
        semantic_predicates=semantic_predicates,
138
    )
139
    cost_computer = parse_cost_computer_spec(
1✔
140
        command, grammar, args.k, stderr, args.weight_vector
141
    )
142

143
    solver = ISLaSolver(
1✔
144
        grammar,
145
        constraint,
146
        max_number_free_instantiations=args.free_instantiations,
147
        max_number_smt_instantiations=args.smt_instantiations,
148
        enforce_unique_trees_in_queue=args.unique_trees,
149
        cost_computer=cost_computer,
150
        timeout_seconds=args.timeout if args.timeout > 0 else None,
151
        activate_unsat_support=args.unsat_support,
152
        grammar_unwinding_threshold=args.unwinding_depth,
153
        structural_predicates=structural_predicates,
154
        semantic_predicates=semantic_predicates,
155
    )
156

157
    try:
1✔
158
        num_solutions = args.num_solutions
1✔
159
        i = 0
1✔
160
        while not (0 < num_solutions <= i):
1✔
161
            try:
1✔
162
                tree = solver.solve()
1✔
163
                result = (
1✔
164
                    derivation_tree_to_json(tree, args.pretty_print)
165
                    if args.tree
166
                    else str(tree)
167
                )
168

169
                if not output_dir:
1✔
170
                    print(
1✔
171
                        result,
172
                        flush=True,
173
                        file=stdout,
174
                    )
175
                else:
176
                    with open(
1✔
177
                        os.path.join(
178
                            output_dir, f"{i}.{'json' if args.tree else 'txt'}"
179
                        ),
180
                        "wb",
181
                    ) as out_file:
182
                        out_file.write(result.encode("utf-8"))
1✔
183
            except StopIteration:
1✔
184
                print("UNSAT", flush=True, file=stderr)
1✔
185
                break
1✔
186
            except TimeoutError:
1✔
187
                break
1✔
188
            except Exception as exc:
1✔
189
                print(
1✔
190
                    f"isla solve: error: An exception ({type(exc).__name__}) occurred "
191
                    + f"during constraint solving, message: `{exc}`",
192
                    file=stderr,
193
                )
194
                sys.exit(1)
1✔
195

196
            i += 1
1✔
197
    except KeyboardInterrupt:
1✔
198
        sys.exit(0)
×
199

200

201
def read_predicates(
1✔
202
    files: Dict[str, str], stderr
203
) -> Tuple[Set[StructuralPredicate], Set[SemanticPredicate]]:
204
    """
205
    Executes a Python extension file, if any, and extracts predicates defined there.
206
    Returns those together with the standard predicates.
207
    :param files: The passed files (mapping from names to contents).
208
    :param stderr: Standard error sink.
209
    :return: The predicates.
210
    """
211

212
    _, maybe_structural_predicates, maybe_semantic_predicates = (
1✔
213
        safe(
214
            lambda: next(
215
                file_content
216
                for file_name, file_content in files.items()
217
                if file_name.endswith(".py")
218
            )
219
        )()
220
        .map(
221
            lambda file_content: process_python_extension("solve", file_content, stderr)
222
        )
223
        .lash(lambda _: Success((Nothing, Nothing, Nothing)))
224
    ).unwrap()
225

226
    structural_predicates = (
1✔
227
        STANDARD_STRUCTURAL_PREDICATES | maybe_structural_predicates.value_or(set())
228
    )
229
    semantic_predicates = (
1✔
230
        STANDARD_SEMANTIC_PREDICATES | maybe_semantic_predicates.value_or(set())
231
    )
232

233
    return structural_predicates, semantic_predicates
1✔
234

235

236
def fuzz(_, stderr, parser: ArgumentParser, args: Namespace):
1✔
237
    input_ending = "_input.txt"
1✔
238
    stdout_ending = "_stdout.txt"
1✔
239
    stderr_ending = "_stderr.txt"
1✔
240
    status_ending = "_status.txt"
1✔
241

242
    files = read_files(args.files)
1✔
243
    ensure_grammar_present(stderr, parser, args, files)
1✔
244

245
    command = args.command
1✔
246

247
    output_dir = args.output_dir
1✔
248
    assert_path_is_dir(stderr, command, output_dir)
1✔
249

250
    grammar = parse_grammar(command, args.grammar, files, stderr)
1✔
251
    structural_predicates, semantic_predicates = read_predicates(files, stderr)
1✔
252
    constraint = parse_constraint(
1✔
253
        command,
254
        args.constraint,
255
        files,
256
        grammar,
257
        stderr,
258
        structural_predicates=structural_predicates,
259
        semantic_predicates=semantic_predicates,
260
    )
261
    cost_computer = parse_cost_computer_spec(
1✔
262
        command, grammar, args.k, stderr, args.weight_vector
263
    )
264

265
    solver = ISLaSolver(
1✔
266
        grammar,
267
        constraint,
268
        max_number_free_instantiations=args.free_instantiations,
269
        max_number_smt_instantiations=args.smt_instantiations,
270
        enforce_unique_trees_in_queue=args.unique_trees,
271
        cost_computer=cost_computer,
272
        timeout_seconds=args.timeout if args.timeout > 0 else None,
273
        activate_unsat_support=False,
274
        grammar_unwinding_threshold=args.unwinding_depth,
275
        structural_predicates=structural_predicates,
276
        semantic_predicates=semantic_predicates,
277
    )
278

279
    fuzz_command = get_fuzz_command(args, command, stderr)
1✔
280

281
    def inst_fuzz_command(inp_file: str) -> str:
1✔
282
        return fuzz_command.replace("{}", inp_file)
1✔
283

284
    try:
1✔
285
        num_solutions = args.num_solutions
1✔
286
        i = 0
1✔
287
        while not (0 < num_solutions <= i):
1✔
288
            istr = str(i).rjust(4, "0")
1✔
289

290
            try:
1✔
291
                result = solver.solve()
1✔
292
            except StopIteration:
1✔
293
                print("UNSAT", flush=True, file=stderr)
1✔
294
                break
1✔
295
            except TimeoutError:
×
296
                break
×
297

298
            # Write input file
299
            with open(
1✔
300
                os.path.join(output_dir, f"{istr}{input_ending}"), "wb"
301
            ) as inp_file:
302
                inp_file.write(str(result).encode("utf-8"))
1✔
303
                inp_file.seek(0)
1✔
304
                inp_file_name = inp_file.name
1✔
305

306
            try:
1✔
307
                # Execute fuzz target
308
                target_result = subprocess.run(
1✔
309
                    inst_fuzz_command(inp_file_name),
310
                    shell=True,
311
                    capture_output=True,
312
                    check=True,
313
                    text=True,
314
                )
315

316
                standard_output = target_result.stdout
1✔
317
                error_output = target_result.stderr
1✔
318
                return_code = target_result.returncode
1✔
319
            except subprocess.CalledProcessError as cpe:
1✔
320
                standard_output = cpe.stdout
1✔
321
                error_output = cpe.stderr
1✔
322
                return_code = cpe.returncode
1✔
323

324
            # Write results
325
            with open(
1✔
326
                os.path.join(output_dir, f"{istr}{stdout_ending}"), "wb"
327
            ) as stdout_file:
328
                stdout_file.write(standard_output.encode("utf-8"))
1✔
329

330
            with open(
1✔
331
                os.path.join(output_dir, f"{istr}{stderr_ending}"), "wb"
332
            ) as stderr_file:
333
                stderr_file.write(error_output.encode("utf-8"))
1✔
334

335
            with open(
1✔
336
                os.path.join(output_dir, f"{istr}{status_ending}"), "wb"
337
            ) as stat_file:
338
                stat_file.write(str(return_code).encode("utf-8"))
1✔
339

340
            i += 1
1✔
341
    except KeyboardInterrupt:
×
342
        sys.exit(0)
×
343

344

345
def get_fuzz_command(args: Namespace, command, stderr):
1✔
346
    fuzz_command: str = args.test_target
1✔
347
    if "{}" not in fuzz_command:
1✔
348
        print(
1✔
349
            f'isla {command}: warning: the placeholder "{{}}" was not found in '
350
            f'the fuzz command "{fuzz_command}"; the generated inputs will not be '
351
            f"accessible for the test target.",
352
            file=stderr,
353
        )
354
    return fuzz_command
1✔
355

356

357
def check(stdout, stderr, parser: ArgumentParser, args: Namespace):
1✔
358
    code, msg, _ = do_check(stdout, stderr, parser, args)
1✔
359
    print(msg, file=stdout)
1✔
360
    sys.exit(code)
1✔
361

362

363
def find(stdout, stderr, parser: ArgumentParser, args: Namespace):
1✔
364
    language_spec_files: List[TextIOWrapper] = [
1✔
365
        io_wrapper
366
        for io_wrapper in args.files or []
367
        if io_wrapper.name.endswith(".bnf")
368
        or io_wrapper.name.endswith(".py")
369
        or io_wrapper.name.endswith(".isla")
370
    ]
371

372
    input_files: List[TextIOWrapper] = [
1✔
373
        io_wrapper
374
        for io_wrapper in args.files or []
375
        if io_wrapper not in language_spec_files
376
    ]
377

378
    if not input_files:
1✔
379
        print("no files passed to `find`", file=stderr)
1✔
380
        sys.exit(USAGE_ERROR)
1✔
381

382
    success = False
1✔
383
    for input_file in input_files:
1✔
384
        args.files = language_spec_files + [input_file]
1✔
385
        code, _, _ = do_check(stdout, stderr, parser, args)
1✔
386
        if not code:
1✔
387
            success = True
1✔
388
            print(input_file.name, file=stdout)
1✔
389

390
    if success:
1✔
391
        sys.exit(0)
1✔
392
    else:
393
        sys.exit(1)
1✔
394

395

396
def parse(stdout, stderr, parser: ArgumentParser, args: Namespace):
1✔
397
    code, msg, maybe_tree = do_check(stdout, stderr, parser, args)
1✔
398
    if code:
1✔
399
        print(msg, file=stdout)
1✔
400
        sys.exit(code)
1✔
401

402
    def write_tree(tree: DerivationTree):
1✔
403
        json_str = derivation_tree_to_json(tree, args.pretty_print)
1✔
404
        if args.output_file:
1✔
405
            with open(args.output_file, "w") as file:
1✔
406
                file.write(json_str)
1✔
407
        else:
408
            print(json_str, file=stdout)
1✔
409

410
    maybe_tree.map(tap(write_tree))
1✔
411

412

413
def repair(stdout, stderr, parser: ArgumentParser, args: Namespace):
1✔
414
    files = read_files(args.files)
1✔
415
    ensure_grammar_present(stderr, parser, args, files)
1✔
416
    ensure_constraint_present(stderr, parser, args, files)
1✔
417
    command = args.command
1✔
418

419
    grammar = parse_grammar(command, args.grammar, files, stderr)
1✔
420
    structural_predicates, semantic_predicates = read_predicates(files, stderr)
1✔
421
    constraint = parse_constraint(
1✔
422
        command,
423
        args.constraint,
424
        files,
425
        grammar,
426
        stderr,
427
        structural_predicates=structural_predicates,
428
        semantic_predicates=semantic_predicates,
429
    )
430

431
    match get_input_string(command, stderr, args, files, grammar, constraint):
1✔
432
        case Failure(_):
1✔
433
            print("input could not be parsed", file=stderr)
1✔
434
            sys.exit(1)
1✔
435
        case Success(parsed_tree):
1✔
436
            inp = parsed_tree
1✔
437
        case _:
×
438
            assert False
×
439

440
    solver = ISLaSolver(
1✔
441
        grammar,
442
        constraint,
443
        structural_predicates=structural_predicates,
444
        semantic_predicates=semantic_predicates,
445
    )
446
    maybe_repaired = solver.repair(inp, fix_timeout_seconds=args.timeout)
1✔
447

448
    if not is_successful(maybe_repaired):
1✔
449
        print(
1✔
450
            "sorry, I could not repair this input (tip: try `isla mutate` instead)",
451
            file=stderr,
452
        )
453
        sys.exit(1)
1✔
454

455
    def write_result(tree: DerivationTree):
1✔
456
        if args.output_file:
1✔
457
            with open(args.output_file, "w") as file:
1✔
458
                file.write(str(tree))
1✔
459
        else:
460
            print(str(tree), file=stdout)
1✔
461

462
    maybe_repaired.map(tap(write_result))
1✔
463
    sys.exit(0)
1✔
464

465

466
def mutate(stdout, stderr, parser: ArgumentParser, args: Namespace):
1✔
467
    files = read_files(args.files)
1✔
468
    ensure_grammar_present(stderr, parser, args, files)
1✔
469
    ensure_constraint_present(stderr, parser, args, files)
1✔
470
    command = args.command
1✔
471

472
    grammar = parse_grammar(command, args.grammar, files, stderr)
1✔
473
    structural_predicates, semantic_predicates = read_predicates(files, stderr)
1✔
474
    constraint = parse_constraint(
1✔
475
        command,
476
        args.constraint,
477
        files,
478
        grammar,
479
        stderr,
480
        structural_predicates=structural_predicates,
481
        semantic_predicates=semantic_predicates,
482
    )
483

484
    match get_input_string(command, stderr, args, files, grammar, constraint):
1✔
485
        case Failure(_):
1✔
486
            print("input could not be parsed", file=stderr)
1✔
487
            sys.exit(1)
1✔
488
        case Success(parsed_tree):
1✔
489
            inp = parsed_tree
1✔
490
        case _:
×
491
            assert False
×
492

493
    solver = ISLaSolver(
1✔
494
        grammar,
495
        constraint,
496
        structural_predicates=structural_predicates,
497
        semantic_predicates=semantic_predicates,
498
    )
499

500
    mutated = solver.mutate(
1✔
501
        inp,
502
        fix_timeout_seconds=args.timeout,
503
        min_mutations=args.min_mutations,
504
        max_mutations=args.max_mutations,
505
    )
506

507
    if args.output_file:
1✔
508
        with open(args.output_file, "w") as file:
1✔
509
            file.write(str(mutated))
1✔
510
    else:
511
        print(str(mutated), file=stdout)
1✔
512

513
    sys.exit(0)
1✔
514

515

516
def do_check(
1✔
517
    stdout, stderr, parser: ArgumentParser, args: Namespace
518
) -> Tuple[int, str, Maybe[DerivationTree]]:
519
    files = read_files(args.files)
1✔
520
    ensure_grammar_present(stderr, parser, args, files)
1✔
521
    ensure_constraint_present(stderr, parser, args, files)
1✔
522
    command = args.command
1✔
523

524
    grammar = parse_grammar(command, args.grammar, files, stderr)
1✔
525
    structural_predicates, semantic_predicates = read_predicates(files, stderr)
1✔
526
    constraint = parse_constraint(
1✔
527
        command,
528
        args.constraint,
529
        files,
530
        grammar,
531
        stderr,
532
        structural_predicates=structural_predicates,
533
        semantic_predicates=semantic_predicates,
534
    )
535

536
    match get_input_string(command, stderr, args, files, grammar, constraint):
1✔
537
        case Failure(_):
1✔
538
            return (
1✔
539
                1,
540
                "input could not be parsed",
541
                Nothing,
542
            )
543
        case Success(parsed_tree):
1✔
544
            tree = parsed_tree
1✔
545
        case _:
×
546
            assert False
×
547

548
    try:
1✔
549
        solver = ISLaSolver(
1✔
550
            grammar,
551
            constraint,
552
            structural_predicates=structural_predicates,
553
            semantic_predicates=semantic_predicates,
554
        )
555

556
        if not solver.check(tree):
1✔
557
            raise SemanticError()
1✔
558
    except SemanticError:
1✔
559
        return 1, "input does not satisfy the ISLa constraint", Nothing
1✔
560

561
    return 0, "input satisfies the ISLa constraint", Some(tree)
1✔
562

563

564
def create(stdout, stderr, parser: ArgumentParser, args: Namespace):
1✔
565
    command = args.command
1✔
566
    out_dir = args.output_dir
1✔
567
    base_name = args.base_name
1✔
568

569
    assert_path_is_dir(stderr, command, out_dir)
1✔
570

571
    grammar_1_file = open(os.path.join(out_dir, f"{base_name}_grammar_1.bnf"), "w")
1✔
572
    grammar_2_file = open(os.path.join(out_dir, f"{base_name}_grammar_2.py"), "w")
1✔
573
    constraint_file = open(os.path.join(out_dir, f"{base_name}_constraint.isla"), "w")
1✔
574

575
    grammar_1_text = get_isla_resource_file_content("resources/cli_stubs/grammar.bnf")
1✔
576
    grammar_2_text = get_isla_resource_file_content("resources/cli_stubs/grammar.py")
1✔
577
    constraint_text = get_isla_resource_file_content(
1✔
578
        "resources/cli_stubs/constraint.isla"
579
    )
580

581
    readme_text = (
1✔
582
        get_isla_resource_file_content("resources/cli_stubs/README.md")
583
        .replace("{grammar_1_file.name}", grammar_1_file.name)
584
        .replace("{grammar_2_file.name}", grammar_2_file.name)
585
        .replace("{constraint_file.name}", constraint_file.name)
586
    )
587

588
    grammar_1_file.write(grammar_1_text)
1✔
589
    grammar_1_file.close()
1✔
590

591
    grammar_2_file.write(grammar_2_text)
1✔
592
    grammar_2_file.close()
1✔
593

594
    constraint_file.write(constraint_text)
1✔
595
    constraint_file.close()
1✔
596

597
    readme_path = os.path.join(out_dir, "README.md")
1✔
598
    with open(readme_path, "w") as readme_file:
1✔
599
        readme_file.write(readme_text)
1✔
600

601
    print(
1✔
602
        "`isla create` produced the following files: "
603
        + ", ".join(
604
            map(
605
                str,
606
                [
607
                    readme_path,
608
                    grammar_1_file.name,
609
                    grammar_2_file.name,
610
                    constraint_file.name,
611
                ],
612
            )
613
        ),
614
        file=stdout,
615
    )
616

617

618
def dump_config(stdout, stderr, parser: ArgumentParser, args: Namespace):
1✔
619
    config_file_content = get_isla_resource_file_content("resources/.islarc")
1✔
620

621
    if args.output_file:
1✔
622
        with open(args.output_file, "w") as file:
1✔
623
            file.write(config_file_content)
1✔
624
    else:
625
        print(config_file_content, file=stdout)
1✔
626

627

628
def create_parsers(stdout, stderr):
1✔
629
    parser = argparse.ArgumentParser(
1✔
630
        prog="isla",
631
        description="""
632
The ISLa command line interface.""",
633
    )
634

635
    parser.add_argument(
1✔
636
        "-v", "--version", help="Print the ISLa version number", action="store_true"
637
    )
638

639
    subparsers = parser.add_subparsers(title="Commands", dest="command", required=False)
1✔
640

641
    create_solve_parser(subparsers, stdout, stderr)
1✔
642
    create_fuzz_parser(subparsers, stdout, stderr)
1✔
643
    create_check_parser(subparsers, stdout, stderr)
1✔
644
    create_find_parser(subparsers, stdout, stderr)
1✔
645
    create_parse_parser(subparsers, stdout, stderr)
1✔
646
    create_repair_parser(subparsers, stdout, stderr)
1✔
647
    create_mutate_parser(subparsers, stdout, stderr)
1✔
648
    create_create_parser(subparsers, stdout, stderr)
1✔
649
    create_dump_config_parser(subparsers, stdout, stderr)
1✔
650

651
    return parser
1✔
652

653

654
def read_files(files: Iterable[TextIOWrapper]) -> Dict[str, str]:
1✔
655
    return {io_wrapper.name: io_wrapper.read() for io_wrapper in files}
1✔
656

657

658
def ensure_grammar_present(
1✔
659
    stderr, parser: ArgumentParser, args: Namespace, files: Dict[str, str]
660
) -> None:
661
    if not args.grammar and all(
1✔
662
        not file.endswith(".bnf") and not file.endswith(".py") for file in files
663
    ):
664
        parser.print_usage(file=stderr)
1✔
665
        print(
1✔
666
            "isla solve: error: You must specify a grammar by `--grammar` "
667
            "or FILES arguments with `.bnf` or `.py` ending.",
668
            file=stderr,
669
        )
670

671
        exit(USAGE_ERROR)
1✔
672

673

674
def ensure_constraint_present(
1✔
675
    stderr, parser: ArgumentParser, args: Namespace, files: Dict[str, str]
676
) -> None:
677
    if not args.constraint and all(not file.endswith(".isla") for file in files):
1✔
678
        parser.print_usage(file=stderr)
1✔
679
        print(
1✔
680
            "isla solve: error: You must specify a constraint by `--constraint` "
681
            "or FILES arguments with `.isla` ending.",
682
            file=stderr,
683
        )
684

685
        exit(USAGE_ERROR)
1✔
686

687

688
def parse_constraint(
1✔
689
    subcommand: str,
690
    constraint_arg: Optional[List[str]],
691
    files: Dict[str, str],
692
    grammar: Grammar,
693
    stderr,
694
    structural_predicates: Set[StructuralPredicate] = STANDARD_STRUCTURAL_PREDICATES,
695
    semantic_predicates: Set[SemanticPredicate] = STANDARD_SEMANTIC_PREDICATES,
696
) -> language.Formula:
697
    constraint_arg = constraint_arg or []
1✔
698
    assert isinstance(constraint_arg, list)
1✔
699
    assert all(isinstance(elem, str) for elem in constraint_arg)
1✔
700

701
    constraint = true()
1✔
702

703
    try:
1✔
704
        for constraint_str in constraint_arg:
1✔
705
            with redirect_stderr(stderr):
1✔
706
                constraint &= parse_isla(
1✔
707
                    constraint_str,
708
                    structural_predicates=structural_predicates,
709
                    semantic_predicates=semantic_predicates,
710
                    grammar=grammar,
711
                )
712

713
        for constraint_file_name in filter(lambda f: f.endswith(".isla"), files):
1✔
714
            with open(constraint_file_name, "r") as constraint_file:
1✔
715
                with redirect_stderr(stderr):
1✔
716
                    constraint &= parse_isla(
1✔
717
                        constraint_file.read(),
718
                        structural_predicates=structural_predicates,
719
                        semantic_predicates=semantic_predicates,
720
                        grammar=grammar,
721
                    )
722
    except Exception as exc:
1✔
723
        exc_string = str(exc)
1✔
724
        print(
1✔
725
            f"isla {subcommand}: error: A {type(exc).__name__} occurred "
726
            + f"while parsing the constraint{f' ({exc_string})' if exc_string else ''}",
727
            file=stderr,
728
        )
729
        sys.exit(DATA_FORMAT_ERROR)
1✔
730

731
    return constraint
1✔
732

733

734
def parse_grammar(
1✔
735
    subcommand: str, grammar_arg: str, files: Dict[str, str], stderr
736
) -> Grammar:
737
    try:
1✔
738
        if grammar_arg:
1✔
739
            with redirect_stderr(stderr):
1✔
740
                grammar = parse_bnf(grammar_arg)
1✔
741
        else:
742
            grammar = {}
1✔
743
            for grammar_file_name in filter(
1✔
744
                lambda f: f.endswith(".bnf") or f.endswith(".py"), files
745
            ):
746
                with open(grammar_file_name, "r") as grammar_file:
1✔
747
                    with redirect_stderr(stderr):
1✔
748
                        grammar_file_content = grammar_file.read()
1✔
749
                        if grammar_file_name.endswith(".bnf"):
1✔
750
                            grammar |= parse_bnf(grammar_file_content)
1✔
751
                        else:
752
                            grammar |= process_python_extension(
1✔
753
                                subcommand, grammar_file_content, stderr
754
                            )[0].value_or({})
755

756
            if not grammar:
1✔
757
                print(
1✔
758
                    "Could not find any grammar definition in the given files "
759
                    + f"{', '.join(files)}; either supply a BNF grammar as file "
760
                    + "(`*.bnf`) or command line argument, or a Python grammar "
761
                    + " as file(`*.py`) defining a variable `grammar`.",
762
                    file=stderr,
763
                )
764
                sys.exit(USAGE_ERROR)
1✔
765

766
    except Exception as exc:
1✔
767
        exc_string = str(exc)
1✔
768
        if exc_string == "None":
1✔
769
            exc_string = ""
1✔
770
        print(
1✔
771
            f"isla {subcommand}: error: A {type(exc).__name__} occurred "
772
            + "while processing a provided file"
773
            + (f" ({exc_string})" if exc_string else ""),
774
            file=stderr,
775
        )
776
        sys.exit(DATA_FORMAT_ERROR)
1✔
777

778
    return grammar
1✔
779

780

781
def process_python_extension(
1✔
782
    subcommand: str, python_file_content: str, stderr
783
) -> Tuple[
784
    Maybe[Grammar], Maybe[Set[StructuralPredicate]], Maybe[Set[SemanticPredicate]]
785
]:
786
    query_program = """
1✔
787
try:
788
    grammar_ = grammar() if callable(grammar) else grammar
789
except NameError:
790
    grammar_ = None
791

792
try:
793
    predicates_ = predicates() if callable(predicates) else None
794
except NameError as err:
795
    predicates_ = None
796
    err_ = err
797
"""
798

799
    python_file_content = f"{python_file_content}\n{query_program}"
1✔
800

801
    new_symbols = {}
1✔
802
    exec(python_file_content, new_symbols)
1✔
803

804
    def assert_is_set_of_predicates(
1✔
805
        maybe_set_of_predicates: Any,
806
    ) -> Set[StructuralPredicate | SemanticPredicate]:
807
        if not isinstance(maybe_set_of_predicates, collections.abc.Iterable):
1✔
808
            print(
1✔
809
                f"isla {subcommand}: error: function `predicate` in Python extension "
810
                + "file does not return an iterable.",
811
                file=stderr,
812
            )
813
            sys.exit(DATA_FORMAT_ERROR)
1✔
814

815
        if any(
1✔
816
            not isinstance(elem, StructuralPredicate)
817
            and not isinstance(elem, SemanticPredicate)
818
            for elem in maybe_set_of_predicates
819
        ):
820
            print(
1✔
821
                f"isla {subcommand}: error: function `predicate` in Python extension "
822
                + "file does not return an iterable of predicates.",
823
                file=stderr,
824
            )
825
            sys.exit(DATA_FORMAT_ERROR)
1✔
826

827
        return set(maybe_set_of_predicates)
1✔
828

829
    def assert_is_valid_grammar(maybe_grammar: Any) -> Grammar:
1✔
830
        if (
1✔
831
            not isinstance(maybe_grammar, dict)
832
            or not all(isinstance(key, str) for key in maybe_grammar)
833
            or not all(
834
                isinstance(expansions, list) for expansions in maybe_grammar.values()
835
            )
836
            or not all(
837
                isinstance(expansion, str)
838
                for expansions in maybe_grammar.values()
839
                for expansion in expansions
840
            )
841
        ):
842
            print(
1✔
843
                f"isla {subcommand}: error: A grammar must be of type "
844
                + "`Dict[str, List[str]]`.",
845
                file=stderr,
846
            )
847
            sys.exit(DATA_FORMAT_ERROR)
1✔
848

849
        return maybe_grammar
1✔
850

851
    grammar = Maybe.from_optional(new_symbols["grammar_"]).map(assert_is_valid_grammar)
1✔
852

853
    predicates = Maybe.from_optional(new_symbols["predicates_"]).map(
1✔
854
        assert_is_set_of_predicates
855
    )
856

857
    structural_predicates = cast(
1✔
858
        Maybe[Set[StructuralPredicate]],
859
        predicates.map(partial(filter, StructuralPredicate.__instancecheck__)).map(set),
860
    )
861

862
    semantic_predicates = cast(
1✔
863
        Maybe[Set[SemanticPredicate]],
864
        predicates.map(partial(filter, SemanticPredicate.__instancecheck__)).map(set),
865
    )
866

867
    return grammar, structural_predicates, semantic_predicates
1✔
868

869

870
def parse_cost_computer_spec(
1✔
871
    command: str, grammar: Grammar, k_arg: int, stderr, weight_vector_arg: str
872
) -> CostComputer:
873
    weight_vector = weight_vector_arg.split(",")
1✔
874
    if len(weight_vector) != 5:
1✔
875
        print(
1✔
876
            f"isla {command}: error: Length of weight vector is "
877
            f"{len(weight_vector)}, expected 5",
878
            file=stderr,
879
        )
880
        sys.exit(DATA_FORMAT_ERROR)
1✔
881
    if any(not is_float(w) for w in weight_vector):
1✔
882
        print(
1✔
883
            f"isla {command}: error: non-numeric weight vector element encountered",
884
            file=stderr,
885
        )
886
        sys.exit(DATA_FORMAT_ERROR)
1✔
887
    weight_vector = list(map(float, weight_vector))
1✔
888
    cost_computer = GrammarBasedBlackboxCostComputer(
1✔
889
        CostSettings(
890
            CostWeightVector(
891
                tree_closing_cost=weight_vector[0],
892
                constraint_cost=weight_vector[1],
893
                derivation_depth_penalty=weight_vector[2],
894
                low_k_coverage_penalty=weight_vector[3],
895
                low_global_k_path_coverage_penalty=weight_vector[4],
896
            ),
897
            k=k_arg,
898
        ),
899
        gg.GrammarGraph.from_grammar(grammar),
900
    )
901
    return cost_computer
1✔
902

903

904
def get_input_string(
1✔
905
    command: str,
906
    stderr,
907
    args: Namespace,
908
    files: Dict[str, str],
909
    grammar: Grammar,
910
    constraint: language.Formula,
911
) -> Result[DerivationTree, SyntaxError]:
912
    """
913
    Looks for a passed input (either via `args.input_string` or a file name) and
914
    parses it, if any. Terminates with a `USAGE_ERROR` if no input can be found.
915
    Raises a `SyntaxError` if the input could not be parsed. If an input is recognized
916
    as a valid derivation tree in JSON format, it is treated as such and not parsed.
917

918
    :param command: The calling command.
919
    :param stderr: The standard error sink to use.
920
    :param args: The command line arguments.
921
    :param files: The passed files.
922
    :param grammar: The specified grammar.
923
    :param constraint: The specified constraint.
924
    :return: A derivation tree of the given input.
925
    """
926

927
    if hasattr(args, "input_string") and args.input_string:
1✔
928
        inp = args.input_string
1✔
929
    else:
930
        possible_inputs = [
1✔
931
            file
932
            for file in files
933
            if not file.endswith(".bnf")
934
            and not file.endswith(".isla")
935
            and not file.endswith(".py")
936
        ]
937

938
        if len(possible_inputs) != 1:
1✔
939
            print(
1✔
940
                f"isla {command}: error: you must specify exactly *one* input to check "
941
                + f"via `--input-string` or a file; found {len(possible_inputs)} "
942
                + "inputs",
943
                file=stderr,
944
            )
945
            sys.exit(USAGE_ERROR)
1✔
946

947
        inp = files[possible_inputs[0]]
1✔
948

949
        # Somehow, spurious newlines appear when reading files...
950
        inp = inp[:-1] if inp[-1] == "\n" else inp
1✔
951

952
    def solver():
1✔
953
        return ISLaSolver(grammar, constraint)
1✔
954

955
    def graph():
1✔
956
        return gg.GrammarGraph.from_grammar(grammar)
1✔
957

958
    return (
1✔
959
        safe(lambda: json.loads(inp))()
960
        .map(DerivationTree.from_parse_tree)
961
        .map(lambda tree: eassert(tree, graph().tree_is_valid(tree)))
962
        .lash(lambda _: safe(lambda: solver().parse(inp, skip_check=True))())
963
    )
964

965

966
def create_solve_parser(subparsers, stdout, stderr):
1✔
967
    parser = subparsers.add_parser(
1✔
968
        "solve",
969
        help="create solutions to ISLa constraints or check their unsatisfiability",
970
        formatter_class=argparse.ArgumentDefaultsHelpFormatter,
971
        description="""
972
Create solutions to an ISLa constraint and a reference grammar.""",
973
    )
974
    parser.set_defaults(func=lambda *args: solve(stdout, stderr, parser, *args))
1✔
975

976
    grammar_arg(parser)
1✔
977
    constraint_arg(parser)
1✔
978
    output_dir_arg(parser)
1✔
979

980
    parser.add_argument(
1✔
981
        "-T",
982
        "--tree",
983
        action=argparse.BooleanOptionalAction,
984
        default=get_default(sys.stderr, "solve", "--tree").unwrap(),
985
        help="""
986
outputs derivation trees in JSON format instead of strings""",
987
    )
988

989
    parser.add_argument(
1✔
990
        "-p",
991
        "--pretty-print",
992
        type=bool,
993
        action=argparse.BooleanOptionalAction,
994
        default=get_default(stderr, "solve", "--pretty-print").unwrap(),
995
        help="""
996
If this flag is set, created JSON parse trees are printed on multiple lines with
997
indentation; otherwise the whole string is printed on a single line. Only relevant
998
if `--tree` is used.""",
999
    )
1000

1001
    num_solutions_arg(parser)
1✔
1002
    timeout_arg(parser)
1✔
1003
    parser.add_argument(
1✔
1004
        "--unsat-support",
1005
        action="store_true",
1006
        default=get_default(stderr, "solve", "--unsat-support").unwrap(),
1007
        help="""
1008
Activate support for unsatisfiable constraints. This can be required to make the
1009
analysis of unsatisfiable constraints terminate, but reduces the performance of the
1010
generator for satisfiable formulas""",
1011
    )
1012
    free_insts_arg(parser)
1✔
1013
    smt_insts_arg(parser)
1✔
1014
    unique_trees_arg(parser)
1✔
1015
    unwinding_depth_arg(parser)
1✔
1016
    weight_vector_arg(parser)
1✔
1017
    k_arg(parser)
1✔
1018
    log_level_arg(parser)
1✔
1019
    grammar_constraint_extension_files_arg(parser)
1✔
1020

1021

1022
def create_fuzz_parser(subparsers, stdout, stderr):
1✔
1023
    parser = subparsers.add_parser(
1✔
1024
        "fuzz",
1025
        help="pass solutions to an ISLa constraint to a test subject",
1026
        description="""
1027
Create solutions to an ISLa constraint and a reference grammar, and pass these to
1028
a test subject. An output directory must be specified (`-d`). Into this directory,
1029
ISLa writes three files per generated test input: (1) the input (`..._input.txt`),
1030
(2) the standard output of the fuzzed program (`..._stdout.txt`), (3) the standard
1031
error of the fuzzed program (`..._stderr.txt`), and (4) the returned status code of
1032
the fuzzed program (`..._status.txt`).""",
1033
        formatter_class=argparse.ArgumentDefaultsHelpFormatter,
1034
    )
1035
    parser.set_defaults(func=lambda *args: fuzz(stdout, stderr, parser, *args))
1✔
1036

1037
    parser.add_argument(
1✔
1038
        "test_target",
1039
        metavar="TEST_TARGET",
1040
        help="""
1041
A command to run the test target. The placeholder `{}` will be replaced by a path to
1042
the input file""",
1043
    )
1044

1045
    output_dir_arg(parser, required=True)
1✔
1046

1047
    parser.add_argument(
1✔
1048
        "-e",
1049
        "--ending",
1050
        metavar="FILE_ENDING",
1051
        default=get_default(stderr, "fuzz", "--ending").unwrap(),
1052
        help="""
1053
The file ending for the generated files that are passed to the test target, if the
1054
test target expects a particular format""",
1055
    )
1056

1057
    grammar_arg(parser)
1✔
1058
    constraint_arg(parser)
1✔
1059
    num_solutions_arg(parser)
1✔
1060
    timeout_arg(parser)
1✔
1061
    free_insts_arg(parser)
1✔
1062
    smt_insts_arg(parser)
1✔
1063
    unique_trees_arg(parser)
1✔
1064
    unwinding_depth_arg(parser)
1✔
1065
    weight_vector_arg(parser)
1✔
1066
    k_arg(parser)
1✔
1067
    log_level_arg(parser)
1✔
1068
    grammar_constraint_extension_files_arg(parser)
1✔
1069

1070

1071
def create_check_parser(subparsers, stdout, stderr):
1✔
1072
    parser = subparsers.add_parser(
1✔
1073
        "check",
1074
        help="check whether an input satisfies an ISLa constraint",
1075
        description="""
1076
Check whether an input is derivable from a grammar and satisfies and an ISLa
1077
constraint.""",
1078
        formatter_class=argparse.ArgumentDefaultsHelpFormatter,
1079
    )
1080
    parser.set_defaults(func=lambda *args: check(stdout, stderr, parser, *args))
1✔
1081

1082
    input_string_arg(parser)
1✔
1083

1084
    grammar_arg(parser)
1✔
1085
    constraint_arg(parser)
1✔
1086
    log_level_arg(parser)
1✔
1087
    grammar_constraint_or_input_files_arg(parser)
1✔
1088

1089

1090
def create_find_parser(subparsers, stdout, stderr):
1✔
1091
    parser = subparsers.add_parser(
1✔
1092
        "find",
1093
        help="filter files satisfying syntactic & semantic constraints",
1094
        description="""
1095
From a list of passed files, lists those that can be parsed with the given grammar and
1096
satisfy the given ISLa constraint(s).""",
1097
        formatter_class=argparse.ArgumentDefaultsHelpFormatter,
1098
    )
1099
    parser.set_defaults(func=lambda *args: find(stdout, stderr, parser, *args))
1✔
1100

1101
    grammar_arg(parser)
1✔
1102
    constraint_arg(parser)
1✔
1103
    log_level_arg(parser)
1✔
1104
    grammar_constraint_or_input_files_arg(parser)
1✔
1105

1106

1107
def create_parse_parser(subparsers, stdout, stderr):
1✔
1108
    parser = subparsers.add_parser(
1✔
1109
        "parse",
1110
        help="parse an input into a derivation tree if it satisfies an ISLa constraint",
1111
        description="""
1112
Parse an input into a derivation tree if it is derivable from a grammar and
1113
satisfies an ISLa constraint.""",
1114
        formatter_class=argparse.ArgumentDefaultsHelpFormatter,
1115
    )
1116
    parser.set_defaults(func=lambda *args: parse(stdout, stderr, parser, *args))
1✔
1117

1118
    input_string_arg(parser)
1✔
1119

1120
    parser.add_argument(
1✔
1121
        "-o",
1122
        "--output-file",
1123
        default=get_default(stderr, "parse", "--output-file").value_or(None),
1124
        help="""
1125
The file into which to write the (JSON) derivation tree in case that the input
1126
could be successfully parsed and checked. If no file is given, the tree is printed
1127
to stdout""",
1128
    )
1129

1130
    parser.add_argument(
1✔
1131
        "-p",
1132
        "--pretty-print",
1133
        type=bool,
1134
        action=argparse.BooleanOptionalAction,
1135
        default=get_default(stderr, "parse", "--pretty-print").unwrap(),
1136
        help="""
1137
if this flag is set, the created JSON parse tree is printed on multiple lines with
1138
indentation; otherwise the whole string is printed on a single line""",
1139
    )
1140

1141
    grammar_arg(parser)
1✔
1142
    constraint_arg(parser)
1✔
1143
    log_level_arg(parser)
1✔
1144
    grammar_constraint_or_input_files_arg(parser)
1✔
1145

1146

1147
def create_repair_parser(subparsers, stdout, stderr):
1✔
1148
    parser = subparsers.add_parser(
1✔
1149
        "repair",
1150
        help="try to repair an existing input such that it satisfies an ISLa constraint",
1151
        description="""
1152
Parses an input and, if it does not already satisfy the specified constraint, gradually
1153
abstracts the input and tries to instantiate the abstracted parts such that the result
1154
satisfies the constraint. The input must be parseable using the grammar. Note that
1155
currently, no intensive structural manipulations are performed; we rather intend to fix
1156
the valuation of SMT-LIB and semantic predicate constraints.""",
1157
        formatter_class=argparse.ArgumentDefaultsHelpFormatter,
1158
    )
1159
    parser.set_defaults(func=lambda *args: repair(stdout, stderr, parser, *args))
1✔
1160

1161
    input_string_arg(parser)
1✔
1162

1163
    parser.add_argument(
1✔
1164
        "-o",
1165
        "--output-file",
1166
        default=get_default(stderr, "repair", "--output-file").value_or(None),
1167
        help="""
1168
The file into which to write the repaired result in case that the input could be
1169
successfully repaired. If no file is given, the result is printed to stdout""",
1170
    )
1171

1172
    grammar_arg(parser)
1✔
1173
    constraint_arg(parser)
1✔
1174

1175
    parser.add_argument(
1✔
1176
        "-t",
1177
        "--timeout",
1178
        type=float,
1179
        default=get_default(stderr, "repair", "--timeout").unwrap(),
1180
        help="""
1181
the number of (fractions of) seconds after which the solver should stop finding
1182
solutions when trying to repair an incomplete input""",
1183
    )
1184

1185
    log_level_arg(parser)
1✔
1186
    grammar_constraint_or_input_files_arg(parser)
1✔
1187

1188

1189
def create_mutate_parser(subparsers, stdout, stderr):
1✔
1190
    parser = subparsers.add_parser(
1✔
1191
        "mutate",
1192
        help="mutate an input such that the result satisfies an ISLa constraint",
1193
        description="""
1194
Performs structural mutations on the given input and repairs it afterward (see
1195
`isla repair` such that the result satisfies the given constraint(s).""",
1196
        formatter_class=argparse.ArgumentDefaultsHelpFormatter,
1197
    )
1198
    parser.set_defaults(func=lambda *args: mutate(stdout, stderr, parser, *args))
1✔
1199

1200
    input_string_arg(parser)
1✔
1201

1202
    parser.add_argument(
1✔
1203
        "-o",
1204
        "--output-file",
1205
        default=get_default(stderr, "mutate", "--output-file").value_or(None),
1206
        help="""
1207
The file into which to write the mutated result. If no file is given, the result is
1208
printed to stdout""",
1209
    )
1210

1211
    grammar_arg(parser)
1✔
1212
    constraint_arg(parser)
1✔
1213

1214
    parser.add_argument(
1✔
1215
        "-x",
1216
        "--min-mutations",
1217
        type=int,
1218
        default=get_default(stderr, "mutate", "--min-mutations").unwrap(),
1219
        help="the minimum number of mutation steps to perform",
1220
    )
1221

1222
    parser.add_argument(
1✔
1223
        "-X",
1224
        "--max-mutations",
1225
        type=int,
1226
        default=get_default(stderr, "mutate", "--max-mutations").unwrap(),
1227
        help="the maximum number of mutation steps to perform",
1228
    )
1229

1230
    parser.add_argument(
1✔
1231
        "-t",
1232
        "--timeout",
1233
        type=float,
1234
        default=get_default(stderr, "mutate", "--timeout").unwrap(),
1235
        help="""
1236
the number of (fractions of) seconds after which the solver should stop finding
1237
solutions when trying to repair a mutated input""",
1238
    )
1239

1240
    log_level_arg(parser)
1✔
1241
    grammar_constraint_or_input_files_arg(parser)
1✔
1242

1243

1244
def create_create_parser(subparsers, stdout, stderr):
1✔
1245
    parser = subparsers.add_parser(
1✔
1246
        "create",
1247
        help="create grammar and constraint stubs",
1248
        formatter_class=argparse.ArgumentDefaultsHelpFormatter,
1249
        description="""
1250
Create grammar and constraint stub files to help kickstart a new
1251
specification project.""",
1252
    )
1253
    parser.set_defaults(func=lambda *args: create(stdout, stderr, parser, *args))
1✔
1254

1255
    parser.add_argument(
1✔
1256
        "-b",
1257
        "--base-name",
1258
        default=get_default(stderr, "create", "--base-name").unwrap(),
1259
        help="the base name for the created stubs",
1260
    )
1261

1262
    parser.add_argument(
1✔
1263
        "output_dir",
1264
        metavar="OUTPUT_DIR",
1265
        help="the directory into which to write the created stubs",
1266
    )
1267

1268

1269
def create_dump_config_parser(subparsers, stdout, stderr):
1✔
1270
    parser = subparsers.add_parser(
1✔
1271
        "config",
1272
        help="dumps the default configuration file",
1273
        formatter_class=argparse.ArgumentDefaultsHelpFormatter,
1274
        description="""
1275
Dumps the default `.islarc` configuration file.""",
1276
    )
1277
    parser.set_defaults(func=lambda *args: dump_config(stdout, stderr, parser, *args))
1✔
1278

1279
    parser.add_argument(
1✔
1280
        "-o",
1281
        "--output-file",
1282
        help="""
1283
The file into which to write the current default `.islarc`. If no file is given, the
1284
configuration is printed to stdout""",
1285
    )
1286

1287

1288
def grammar_constraint_extension_files_arg(parser):
1✔
1289
    parser.add_argument(
1✔
1290
        "files",
1291
        nargs="*",
1292
        metavar="FILES",
1293
        type=argparse.FileType("r", encoding="UTF-8"),
1294
        help="""
1295
Possibly multiple ISLa constraint (`*.isla`), BNF grammar (`*.bnf`) or Python
1296
extension (`*.py`) files. Multiple grammar files will be simply merged; multiple ISLa
1297
constraints will be combined to a disjunction. Grammars must declare a rule for a
1298
nonterminal "<start>" (the start symbol) expanding to a single other nonterminal.
1299
Python extension files can specify a grammar by declaring a variable `grammar` of type
1300
`Dict[str, List[str]]`, or (preferably) by specifying a function `grammar()` returning
1301
Dict objects of that type. Furthermore, they can specify additional structural or
1302
semantic predicates by declaring a function
1303

1304
```python
1305
def predicates() -> typing.Set[
1306
        isla.language.StructuralPredicate |
1307
        isla.language.SemanticPredicate]:
1308
    # ...
1309
```
1310

1311
Note that you can _either_ pass a grammar as a file _or_ via the `--grammar` option.
1312
For constraints, it is possible to use both the option and a file input. However, a
1313
grammar and a constraint must be specified somehow.""",
1314
    )
1315

1316

1317
def grammar_constraint_or_input_files_arg(parser):
1✔
1318
    parser.add_argument(
1✔
1319
        "files",
1320
        nargs="*",
1321
        metavar="FILES",
1322
        type=argparse.FileType("r", encoding="UTF-8"),
1323
        help="""
1324
Possibly multiple ISLa constraint (`*.isla`), BNF grammar (`*.bnf`), Python
1325
extension (`*.py`) files, and/or input files to process (currently, only the `find`
1326
command accepts more than one input file). Multiple grammar files will be simply merged;
1327
multiple ISLa constraints will be combined to a disjunction. Grammars must declare a
1328
rule for a nonterminal "<start>" (the start symbol) expanding to a single other
1329
nonterminal. Python extension files can specify a grammar by declaring a variable
1330
`grammar` of type `Dict[str, List[str]]`, or (preferably) by specifying a function
1331
`grammar()` returning Dict objects of that type. Furthermore, they can specify
1332
additional structural or semantic predicates by declaring a function
1333

1334
```python
1335
def predicates() -> typing.Set[
1336
        isla.language.StructuralPredicate |
1337
        isla.language.SemanticPredicate]:
1338
    # ...
1339
```
1340

1341
Note that you can _either_ pass a grammar as a file _or_ via the `--grammar` option.
1342
For constraints, it is possible to use both the option and a file input. However, a
1343
grammar and a constraint must be specified somehow.""",
1344
    )
1345

1346

1347
def log_level_arg(parser):
1✔
1348
    command = parser.prog.split(" ")[-1]
1✔
1349

1350
    parser.add_argument(
1✔
1351
        "-l",
1352
        "--log-level",
1353
        choices=["ERROR", "WARNING", "INFO", "DEBUG"],
1354
        default=get_default(sys.stderr, command, "--log-level").unwrap(),
1355
        help="set the logging level",
1356
    )
1357

1358

1359
def weight_vector_arg(parser):
1✔
1360
    command = parser.prog.split(" ")[-1]
1✔
1361

1362
    parser.add_argument(
1✔
1363
        "-w",
1364
        "--weight-vector",
1365
        help="""
1366
Set the ISLa weight vector. Expects a comma-separated list of floating point values
1367
for the following cost factors: (1) Tree closing cost, (2) constraint cost, (3)
1368
derivation depth penalty, (4) low per-input k-path coverage penalty, and (5)
1369
low global k-path coverage penalty""",
1370
        default=get_default(sys.stderr, command, "--weight-vector").unwrap(),
1371
    )
1372

1373

1374
def k_arg(parser):
1✔
1375
    command = parser.prog.split(" ")[-1]
1✔
1376

1377
    parser.add_argument(
1✔
1378
        "-k",
1379
        type=int,
1380
        help="""
1381
set the length of the k-paths to be considered for coverage computations""",
1382
        default=get_default(sys.stderr, command, "-k").unwrap(),
1383
    )
1384

1385

1386
def unwinding_depth_arg(parser):
1✔
1387
    command = parser.prog.split(" ")[-1]
1✔
1388

1389
    parser.add_argument(
1✔
1390
        "--unwinding-depth",
1391
        type=int,
1392
        default=get_default(sys.stderr, command, "--unwinding-depth").unwrap(),
1393
        help="""
1394
Set the depth until which nonregular grammar elements in SMT-LIB expressions are
1395
unwound to make them regular before the SMT solver is queried""",
1396
    )
1397

1398

1399
def unique_trees_arg(parser):
1✔
1400
    command = parser.prog.split(" ")[-1]
1✔
1401

1402
    parser.add_argument(
1✔
1403
        "--unique-trees",
1404
        action="store_true",
1405
        default=get_default(sys.stderr, command, "--unique-trees").unwrap(),
1406
        help="""
1407
Enforces the uniqueness of derivation trees in the solver queue. This setting can
1408
improve the generator performance, but can also lead to omitted interesting solutions
1409
in certain cases""",
1410
    )
1411

1412

1413
def smt_insts_arg(parser):
1✔
1414
    command = parser.prog.split(" ")[-1]
1✔
1415

1416
    parser.add_argument(
1✔
1417
        "-s",
1418
        "--smt-instantiations",
1419
        type=int,
1420
        default=get_default(sys.stderr, command, "--smt-instantiations").unwrap(),
1421
        help="""
1422
the number of solutions obtained from the SMT solver for atomic SMT-LIB formulas""",
1423
    )
1424

1425

1426
def free_insts_arg(parser):
1✔
1427
    command = parser.prog.split(" ")[-1]
1✔
1428

1429
    parser.add_argument(
1✔
1430
        "-f",
1431
        "--free-instantiations",
1432
        type=int,
1433
        default=get_default(sys.stderr, command, "--free-instantiations").unwrap(),
1434
        help="""
1435
the number of times an unconstrained nonterminal should be randomly instantiated
1436
""",
1437
    )
1438

1439

1440
def timeout_arg(parser):
1✔
1441
    command = parser.prog.split(" ")[-1]
1✔
1442

1443
    parser.add_argument(
1✔
1444
        "-t",
1445
        "--timeout",
1446
        type=float,
1447
        default=get_default(sys.stderr, command, "--timeout").unwrap(),
1448
        help="""
1449
The number of (fractions of) seconds after which the solver should stop finding
1450
solutions. Negative numbers imply that no timeout is set""",
1451
    )
1452

1453

1454
def num_solutions_arg(parser):
1✔
1455
    command = parser.prog.split(" ")[-1]
1✔
1456

1457
    parser.add_argument(
1✔
1458
        "-n",
1459
        "--num-solutions",
1460
        type=int,
1461
        default=get_default(sys.stderr, command, "--num-solutions").unwrap(),
1462
        help="""
1463
The number of solutions to generate. Negative numbers indicate an infinite number of
1464
solutions (you need ot set a `--timeout` or forcefully stop ISLa)""",
1465
    )
1466

1467

1468
def output_dir_arg(parser: ArgumentParser, required: bool = False):
1✔
1469
    command = parser.prog.split(" ")[-1]
1✔
1470

1471
    parser.add_argument(
1✔
1472
        "-d",
1473
        "--output-dir",
1474
        default=get_default(sys.stderr, command, "--output-dir").value_or(None),
1475
        required=required,
1476
        help="a directory into which to place generated output files",
1477
    )
1478

1479

1480
def constraint_arg(parser):
1✔
1481
    parser.add_argument(
1✔
1482
        "-c",
1483
        "--constraint",
1484
        help="Add ISLa constraints to the solver. All constraints passed via "
1485
        "`--constraint` as well as constraints passed as file(s) "
1486
        "are combined to a single conjunction",
1487
        action="append",
1488
    )
1489

1490

1491
def grammar_arg(parser):
1✔
1492
    parser.add_argument(
1✔
1493
        "-g", "--grammar", help="the grammar in BNF (if not passed as a file)"
1494
    )
1495

1496

1497
def input_string_arg(parser):
1✔
1498
    parser.add_argument(
1✔
1499
        "-i",
1500
        "--input-string",
1501
        help="the input to check",
1502
    )
1503

1504

1505
def assert_path_is_dir(stderr, command: str, out_dir: str) -> None:
1✔
1506
    if not os.path.isdir(out_dir):
1✔
1507
        print(
1✔
1508
            f"isla {command}: error: path {out_dir} does not exist or is no directory",
1509
            file=stderr,
1510
        )
1511
        sys.exit(USAGE_ERROR)
1✔
1512

1513

1514
@lru_cache
1✔
1515
def read_isla_rc_defaults(
1✔
1516
    content: Maybe[str] = Nothing,
1517
) -> Dict[str, Dict[str, str | int | float | bool]]:
1518
    """
1519
    Attempts to read an `.islarc` configuration from the following source, in the
1520
    given order:
1521

1522
    1. The `content` parameter
1523
    2. The file `./.islarc` (in the current working directory)
1524
    3. The file `~/.islarc` (in the current user's home directory)
1525
    4. The file `resources/.islarc` (bundled with the ISLa distribution)
1526

1527
    Returns a configuration dictionary. The keys are ISLa commands or "default" for a
1528
    fallback; the values are dictionaries from command line parameters to default
1529
    values. Configurations in the files listed above are merged; Defaults specified in
1530
    configuration sources earlier in the list take precedence in case of conflicts.
1531

1532
    :param content: An optional TOML configuration string (not a path!).
1533
    :return: The configuration dictionary.
1534
    """
1535

1536
    sources: List[str] = []
1✔
1537
    content.map(tap(lambda c: sources.append(c)))
1✔
1538

1539
    dirs = (os.getcwd(), pathlib.Path.home())
1✔
1540
    candidate_locations = [os.path.join(dir, ".islarc") for dir in dirs]
1✔
1541
    sources.extend(
1✔
1542
        [
1543
            pathlib.Path(location).read_text()
1544
            for location in candidate_locations
1545
            if os.path.exists(location)
1546
        ]
1547
    )
1548

1549
    sources.append(get_isla_resource_file_content("resources/.islarc"))
1✔
1550

1551
    all_defaults = [toml.loads(source).get("defaults", {}) for source in sources]
1✔
1552

1553
    result: Dict[str, Dict[str, str | int | float | bool]] = {}
1✔
1554

1555
    for defaults in all_defaults:
1✔
1556
        # Expecting something like
1557
        #
1558
        # {
1559
        #     "default": [{"--log-level": "WARNING"}],
1560
        #     "create": [{"--base-name": "project", "--output-dir": "."}],
1561
        #     ...
1562
        # }
1563

1564
        if (
1✔
1565
            not isinstance(defaults, dict)
1566
            or any(not isinstance(key, str) for key in defaults)
1567
            or not all(
1568
                isinstance(value, list)
1569
                and len(value) == 1
1570
                and isinstance(value[0], dict)
1571
                and all(isinstance(inner_key, str) for inner_key in value[0])
1572
                and all(
1573
                    isinstance(inner_value, str)
1574
                    or isinstance(inner_value, int)
1575
                    or isinstance(inner_value, float)
1576
                    or isinstance(inner_value, bool)
1577
                    for inner_value in value[0].values()
1578
                )
1579
                for value in defaults.values()
1580
            )
1581
        ):
1582
            raise RuntimeError(
1✔
1583
                "Unexpected .islarc format: defaults should be a "
1584
                + "non-nested array of tables"
1585
            )
1586

1587
        for key, value in defaults.items():
1✔
1588
            for inner_key, inner_value in value[0].items():
1✔
1589
                result.setdefault(key, {}).setdefault(inner_key, inner_value)
1✔
1590

1591
    return result
1✔
1592

1593

1594
def get_default(
1✔
1595
    stderr, command: str, argument: str, content: Maybe[str] = Nothing
1596
) -> Maybe[str | int | float | bool]:
1597
    try:
1✔
1598
        config = read_isla_rc_defaults(content)
1✔
1599
    except RuntimeError as err:
1✔
1600
        print(f"isla {command}: error: could not load .islarc ({err})", file=stderr)
1✔
1601
        sys.exit(1)
1✔
1602

1603
    default = config.get("default", {}).get(argument, None)
1✔
1604
    return Some(config.get(command, {}).get(argument, default))
1✔
1605

1606

1607
def derivation_tree_to_json(tree: DerivationTree, pretty_print: bool = False) -> str:
1✔
1608
    return json.dumps(tree.to_parse_tree(), indent=None if not pretty_print else 4)
1✔
1609

1610

1611
if __name__ == "__main__":
1✔
1612
    if "-O" in sys.argv:
×
1613
        sys.argv.remove("-O")
×
1614
        os.execl(sys.executable, sys.executable, "-O", *sys.argv)
×
1615
    else:
1616
        main()
×
STATUS · Troubleshooting · Open an Issue · Sales · Support · CAREERS · ENTERPRISE · START FREE · SCHEDULE DEMO
ANNOUNCEMENTS · TWITTER · TOS & SLA · Supported CI Services · What's a CI service? · Automated Testing

© 2026 Coveralls, Inc