• Home
  • Features
  • Pricing
  • Docs
  • Announcements
  • Sign In

type-ruby / t-ruby / 20573109039

29 Dec 2025 12:40PM UTC coverage: 92.341% (+13.3%) from 79.076%
20573109039

Pull #30

github

web-flow
Merge 203008a55 into a7c451da7
Pull Request #30: feat: improve error messages with tsc-style diagnostics

571 of 640 new or added lines in 14 files covered. (89.22%)

4 existing lines in 2 files now uncovered.

8210 of 8891 relevant lines covered (92.34%)

1046.45 hits per line

Source File
Press 'n' to go to next uncovered line, 'b' for previous

90.15
/lib/t_ruby/cache.rb
1
# frozen_string_literal: true
2

3
require "digest"
1✔
4
require "json"
1✔
5
require "fileutils"
1✔
6

7
module TRuby
1✔
8
  # Cache entry with metadata
9
  class CacheEntry
1✔
10
    attr_reader :key, :value, :created_at, :accessed_at, :hits
1✔
11

12
    def initialize(key, value)
1✔
13
      @key = key
126✔
14
      @value = value
126✔
15
      @created_at = Time.now
126✔
16
      @accessed_at = Time.now
126✔
17
      @hits = 0
126✔
18
    end
19

20
    def access
1✔
21
      @accessed_at = Time.now
18✔
22
      @hits += 1
18✔
23
      @value
18✔
24
    end
25

26
    def stale?(max_age)
1✔
27
      Time.now - @created_at > max_age
2✔
28
    end
29

30
    def to_h
1✔
31
      {
32
        key: @key,
1✔
33
        value: @value,
34
        created_at: @created_at.to_i,
35
        hits: @hits,
36
      }
37
    end
38
  end
39

40
  # In-memory LRU cache
41
  class MemoryCache
1✔
42
    attr_reader :max_size, :hits, :misses
1✔
43

44
    def initialize(max_size: 1000)
1✔
45
      @max_size = max_size
91✔
46
      @cache = {}
91✔
47
      @access_order = []
91✔
48
      @hits = 0
91✔
49
      @misses = 0
91✔
50
      @mutex = Mutex.new
91✔
51
    end
52

53
    def get(key)
1✔
54
      @mutex.synchronize do
27✔
55
        if @cache.key?(key)
27✔
56
          @hits += 1
14✔
57
          touch(key)
14✔
58
          @cache[key].access
14✔
59
        else
60
          @misses += 1
13✔
61
          nil
13✔
62
        end
63
      end
64
    end
65

66
    def set(key, value)
1✔
67
      @mutex.synchronize do
122✔
68
        evict if @cache.size >= @max_size && !@cache.key?(key)
122✔
69

70
        @cache[key] = CacheEntry.new(key, value)
122✔
71
        touch(key)
122✔
72
        value
122✔
73
      end
74
    end
75

76
    def delete(key)
1✔
77
      @mutex.synchronize do
2✔
78
        @cache.delete(key)
2✔
79
        @access_order.delete(key)
2✔
80
      end
81
    end
82

83
    def clear
1✔
84
      @mutex.synchronize do
3✔
85
        @cache.clear
3✔
86
        @access_order.clear
3✔
87
        @hits = 0
3✔
88
        @misses = 0
3✔
89
      end
90
    end
91

92
    def size
1✔
93
      @cache.size
6✔
94
    end
95

96
    def hit_rate
1✔
97
      total = @hits + @misses
7✔
98
      return 0.0 if total.zero?
7✔
99

100
      @hits.to_f / total
3✔
101
    end
102

103
    def stats
1✔
104
      {
105
        size: size,
5✔
106
        max_size: @max_size,
107
        hits: @hits,
108
        misses: @misses,
109
        hit_rate: hit_rate,
110
      }
111
    end
112

113
    private
1✔
114

115
    def touch(key)
1✔
116
      @access_order.delete(key)
136✔
117
      @access_order.push(key)
136✔
118
    end
119

120
    def evict
1✔
121
      return if @access_order.empty?
1✔
122

123
      # Evict least recently used
124
      oldest_key = @access_order.shift
1✔
125
      @cache.delete(oldest_key)
1✔
126
    end
127
  end
128

129
  # File-based persistent cache
130
  class FileCache
1✔
131
    attr_reader :cache_dir, :max_age
1✔
132

133
    def initialize(cache_dir: ".t-ruby-cache", max_age: 3600)
1✔
134
      @cache_dir = cache_dir
13✔
135
      @max_age = max_age
13✔
136
      FileUtils.mkdir_p(@cache_dir)
13✔
137
    end
138

139
    def get(key)
1✔
140
      path = cache_path(key)
10✔
141
      return nil unless File.exist?(path)
10✔
142

143
      # Check if stale
144
      if File.mtime(path) < Time.now - @max_age
4✔
145
        File.delete(path)
1✔
146
        return nil
1✔
147
      end
148

149
      data = File.read(path)
3✔
150
      JSON.parse(data, symbolize_names: true)
3✔
151
    rescue JSON::ParserError
152
      File.delete(path)
1✔
153
      nil
1✔
154
    end
155

156
    def set(key, value)
1✔
157
      path = cache_path(key)
12✔
158
      File.write(path, JSON.generate(value))
12✔
159
      value
12✔
160
    end
161

162
    def delete(key)
1✔
163
      path = cache_path(key)
1✔
164
      FileUtils.rm_f(path)
1✔
165
    end
166

167
    def clear
1✔
168
      FileUtils.rm_rf(@cache_dir)
2✔
169
      FileUtils.mkdir_p(@cache_dir)
2✔
170
    end
171

172
    def prune
1✔
173
      Dir.glob(File.join(@cache_dir, "*.json")).each do |path|
1✔
174
        File.delete(path) if File.mtime(path) < Time.now - @max_age
2✔
175
      end
176
    end
177

178
    private
1✔
179

180
    def cache_path(key)
1✔
181
      hash = Digest::SHA256.hexdigest(key.to_s)[0, 16]
23✔
182
      File.join(@cache_dir, "#{hash}.json")
23✔
183
    end
184
  end
185

186
  # AST parse tree cache
187
  class ParseCache
1✔
188
    def initialize(memory_cache: nil, file_cache: nil)
1✔
189
      @memory_cache = memory_cache || MemoryCache.new(max_size: 500)
74✔
190
      @file_cache = file_cache
74✔
191
    end
192

193
    def get(source)
1✔
194
      key = source_key(source)
4✔
195

196
      # Try memory first
197
      result = @memory_cache.get(key)
4✔
198
      return result if result
4✔
199

200
      # Try file cache
201
      if @file_cache
3✔
202
        result = @file_cache.get(key)
1✔
203
        if result
1✔
204
          @memory_cache.set(key, result)
1✔
205
          return result
1✔
206
        end
207
      end
208

209
      nil
210
    end
211

212
    def set(source, parse_result)
1✔
213
      key = source_key(source)
3✔
214

215
      @memory_cache.set(key, parse_result)
3✔
216
      @file_cache&.set(key, parse_result)
3✔
217

218
      parse_result
3✔
219
    end
220

221
    def invalidate(source)
1✔
222
      key = source_key(source)
1✔
223
      @memory_cache.delete(key)
1✔
224
      @file_cache&.delete(key)
1✔
225
    end
226

227
    def stats
1✔
228
      @memory_cache.stats
3✔
229
    end
230

231
    private
1✔
232

233
    def source_key(source)
1✔
234
      Digest::SHA256.hexdigest(source)
8✔
235
    end
236
  end
237

238
  # Type resolution cache
239
  class TypeResolutionCache
1✔
240
    def initialize
1✔
241
      @cache = MemoryCache.new(max_size: 2000)
3✔
242
    end
243

244
    def get(type_expression)
1✔
245
      @cache.get(type_expression)
5✔
246
    end
247

248
    def set(type_expression, resolved_type)
1✔
249
      @cache.set(type_expression, resolved_type)
4✔
250
    end
251

252
    def clear
1✔
253
      @cache.clear
1✔
254
    end
255

256
    def stats
1✔
257
      @cache.stats
1✔
258
    end
259
  end
260

261
  # Declaration file cache
262
  class DeclarationCache
1✔
263
    def initialize(cache_dir: ".t-ruby-cache/declarations")
1✔
264
      @file_cache = FileCache.new(cache_dir: cache_dir, max_age: 86_400) # 24 hours
4✔
265
      @memory_cache = MemoryCache.new(max_size: 200)
4✔
266
    end
267

268
    def get(file_path)
1✔
269
      # Check modification time
270
      return nil unless File.exist?(file_path)
4✔
271

272
      mtime = File.mtime(file_path).to_i
3✔
273
      cache_key = "#{file_path}:#{mtime}"
3✔
274

275
      # Try memory first
276
      result = @memory_cache.get(cache_key)
3✔
277
      return result if result
3✔
278

279
      # Try file cache
280
      result = @file_cache.get(cache_key)
2✔
281
      if result
2✔
282
        @memory_cache.set(cache_key, result)
×
283
        return result
×
284
      end
285

286
      nil
287
    end
288

289
    def set(file_path, declarations)
1✔
290
      mtime = File.mtime(file_path).to_i
3✔
291
      cache_key = "#{file_path}:#{mtime}"
3✔
292

293
      @memory_cache.set(cache_key, declarations)
3✔
294
      @file_cache.set(cache_key, declarations)
3✔
295

296
      declarations
3✔
297
    end
298

299
    def clear
1✔
300
      @memory_cache.clear
1✔
301
      @file_cache.clear
1✔
302
    end
303
  end
304

305
  # Incremental compilation support
306
  class IncrementalCompiler
1✔
307
    attr_reader :file_hashes, :dependencies
1✔
308

309
    def initialize(compiler, cache: nil)
1✔
310
      @compiler = compiler
68✔
311
      @cache = cache || ParseCache.new
68✔
312
      @file_hashes = {}
68✔
313
      @dependencies = {}
68✔
314
      @compiled_files = {}
68✔
315
    end
316

317
    # Check if file needs recompilation
318
    def needs_compile?(file_path)
1✔
319
      return true unless File.exist?(file_path)
27✔
320

321
      current_hash = compute_file_hash(file_path)
27✔
322
      stored_hash = @file_hashes[file_path]
27✔
323

324
      return true if stored_hash.nil? || stored_hash != current_hash
27✔
325

326
      # Check dependencies
327
      deps = @dependencies[file_path] || []
7✔
328
      deps.any? { |dep| needs_compile?(dep) }
8✔
329
    end
330

331
    # Compile file with caching
332
    def compile(file_path)
1✔
333
      return @compiled_files[file_path] unless needs_compile?(file_path)
8✔
334

335
      result = @compiler.compile(file_path)
7✔
336
      @file_hashes[file_path] = compute_file_hash(file_path)
7✔
337
      @compiled_files[file_path] = result
7✔
338

339
      result
7✔
340
    end
341

342
    # Compile multiple files, skipping unchanged
343
    def compile_all(file_paths)
1✔
344
      results = {}
4✔
345
      to_compile = file_paths.select { |f| needs_compile?(f) }
10✔
346

347
      to_compile.each do |file_path|
4✔
348
        results[file_path] = compile(file_path)
4✔
349
      end
350

351
      results
4✔
352
    end
353

354
    # Register dependency between files
355
    def add_dependency(file_path, depends_on)
1✔
356
      @dependencies[file_path] ||= []
2✔
357
      @dependencies[file_path] << depends_on unless @dependencies[file_path].include?(depends_on)
2✔
358
    end
359

360
    # Clear compilation cache
361
    def clear
1✔
362
      @file_hashes.clear
2✔
363
      @dependencies.clear
2✔
364
      @compiled_files.clear
2✔
365
      @cache.stats # Just accessing for potential cleanup
2✔
366
    end
367

368
    # Update file hash after external compile (for watcher integration)
369
    def update_file_hash(file_path)
1✔
370
      @file_hashes[file_path] = compute_file_hash(file_path)
3✔
371
    end
372

373
    private
1✔
374

375
    def compute_file_hash(file_path)
1✔
376
      return nil unless File.exist?(file_path)
37✔
377

378
      Digest::SHA256.hexdigest(File.read(file_path))
37✔
379
    end
380
  end
381

382
  # Parallel file processor
383
  class ParallelProcessor
1✔
384
    attr_reader :thread_count
1✔
385

386
    def initialize(thread_count: nil)
1✔
387
      @thread_count = thread_count || determine_thread_count
63✔
388
    end
389

390
    # Process files in parallel
391
    def process_files(file_paths, &block)
1✔
392
      return [] if file_paths.empty?
3✔
393

394
      # Split into batches
395
      batches = file_paths.each_slice(batch_size(file_paths.length)).to_a
2✔
396

397
      results = []
2✔
398
      mutex = Mutex.new
2✔
399

400
      threads = batches.map do |batch|
2✔
401
        Thread.new do
8✔
402
          batch_results = batch.map { |file| block.call(file) }
21✔
403
          mutex.synchronize { results.concat(batch_results) }
16✔
404
        end
405
      end
406

407
      threads.each(&:join)
2✔
408
      results
2✔
409
    end
410

411
    # Process with work stealing
412
    def process_with_queue(file_paths, &block)
1✔
413
      queue = Queue.new
1✔
414
      file_paths.each { |f| queue << f }
4✔
415

416
      results = []
1✔
417
      mutex = Mutex.new
1✔
418

419
      threads = @thread_count.times.map do
1✔
420
        Thread.new do
2✔
421
          loop do
2✔
422
            file = begin
423
              queue.pop(true)
5✔
424
            rescue StandardError
425
              break
2✔
426
            end
427
            result = block.call(file)
3✔
428
            mutex.synchronize { results << result }
6✔
429
          end
430
        end
431
      end
432

433
      threads.each(&:join)
1✔
434
      results
1✔
435
    end
436

437
    private
1✔
438

439
    def determine_thread_count
1✔
440
      # Use number of CPU cores, max 8
441
      [Etc.nprocessors, 8].min
59✔
442
    rescue StandardError
443
      4
×
444
    end
445

446
    def batch_size(total)
1✔
447
      [total / @thread_count, 1].max
2✔
448
    end
449
  end
450

451
  # Cross-file Type Checker
452
  class CrossFileTypeChecker
1✔
453
    attr_reader :errors, :warnings, :file_types
1✔
454

455
    def initialize(type_checker: nil)
1✔
456
      @type_checker = type_checker || TypeChecker.new
68✔
457
      @file_types = {} # file_path => { types: [], functions: [], interfaces: [] }
68✔
458
      @global_registry = {} # name => { file: path, kind: :type/:func/:interface, definition: ... }
68✔
459
      @errors = []
68✔
460
      @warnings = []
68✔
461
    end
462

463
    # Register types from a file
464
    def register_file(file_path, ir_program)
1✔
465
      types = []
11✔
466
      functions = []
11✔
467
      interfaces = []
11✔
468

469
      ir_program.declarations.each do |decl|
11✔
470
        case decl
9✔
471
        when IR::TypeAlias
472
          types << { name: decl.name, definition: decl.definition }
3✔
473
          register_global(decl.name, file_path, :type, decl)
3✔
474
        when IR::Interface
475
          interfaces << { name: decl.name, members: decl.members }
3✔
476
          register_global(decl.name, file_path, :interface, decl)
3✔
477
        when IR::MethodDef
478
          functions << { name: decl.name, params: decl.params, return_type: decl.return_type }
3✔
479
          register_global(decl.name, file_path, :function, decl)
3✔
480
        end
481
      end
482

483
      @file_types[file_path] = { types: types, functions: functions, interfaces: interfaces }
11✔
484
    end
485

486
    # Check cross-file type consistency
487
    def check_all
1✔
488
      @errors = []
4✔
489
      @warnings = []
4✔
490

491
      # Check for duplicate definitions
492
      check_duplicate_definitions
4✔
493

494
      # Check for unresolved type references
495
      check_unresolved_references
4✔
496

497
      # Check interface implementations
498
      check_interface_implementations
4✔
499

500
      {
501
        success: @errors.empty?,
4✔
502
        errors: @errors,
503
        warnings: @warnings,
504
      }
505
    end
506

507
    # Check a specific file against global types
508
    def check_file(file_path, ir_program)
1✔
509
      file_errors = []
2✔
510

511
      ir_program.declarations.each do |decl|
2✔
512
        case decl
2✔
513
        when IR::MethodDef
514
          # Check parameter types
515
          decl.params.each do |param|
2✔
516
            next unless param.type_annotation && !type_exists?(param.type_annotation)
2✔
517

518
            file_errors << {
1✔
519
              file: file_path,
520
              message: "Unknown type '#{type_name(param.type_annotation)}' in parameter '#{param.name}'",
521
            }
522
          end
523

524
          # Check return type
525
          if decl.return_type && !type_exists?(decl.return_type)
2✔
526
            file_errors << {
×
527
              file: file_path,
528
              message: "Unknown return type '#{type_name(decl.return_type)}' in function '#{decl.name}'",
529
            }
530
          end
531
        end
532
      end
533

534
      file_errors
2✔
535
    end
536

537
    # Get all registered types
538
    def all_types
1✔
539
      @global_registry.keys
4✔
540
    end
541

542
    # Find where a type is defined
543
    def find_definition(name)
1✔
544
      @global_registry[name]
1✔
545
    end
546

547
    # Clear all registrations
548
    def clear
1✔
549
      @file_types.clear
2✔
550
      @global_registry.clear
2✔
551
      @errors.clear
2✔
552
      @warnings.clear
2✔
553
    end
554

555
    private
1✔
556

557
    def register_global(name, file_path, kind, definition)
1✔
558
      if @global_registry[name] && @global_registry[name][:file] != file_path
9✔
559
        # Duplicate definition from different file
560
        @warnings << {
×
561
          message: "#{kind.to_s.capitalize} '#{name}' defined in multiple files",
562
          files: [@global_registry[name][:file], file_path],
563
        }
564
      end
565

566
      @global_registry[name] = { file: file_path, kind: kind, definition: definition }
9✔
567
    end
568

569
    def check_duplicate_definitions
1✔
570
      @global_registry.group_by { |_, v| v[:file] }.each do |file, entries|
6✔
571
        # Check for duplicates within file
572
        names = entries.map(&:first)
2✔
573
        duplicates = names.select { |n| names.count(n) > 1 }.uniq
4✔
574

575
        duplicates.each do |name|
2✔
576
          @errors << {
×
577
            file: file,
578
            message: "Duplicate definition of '#{name}'",
579
          }
580
        end
581
      end
582
    end
583

584
    def check_unresolved_references
1✔
585
      @file_types.each do |file_path, info|
4✔
586
        # Check type alias definitions for unresolved types
587
        info[:types].each do |type_info|
2✔
588
          referenced_types = extract_type_references(type_info[:definition])
×
589
          referenced_types.each do |ref|
×
590
            next if type_exists_by_name?(ref)
×
591

592
            @errors << {
×
593
              file: file_path,
594
              message: "Unresolved type reference '#{ref}' in type alias '#{type_info[:name]}'",
595
            }
596
          end
597
        end
598
      end
599
    end
600

601
    def check_interface_implementations
1✔
602
      # For future: check that classes implement all interface methods
603
    end
604

605
    def type_exists?(type_node)
1✔
606
      case type_node
3✔
607
      when IR::SimpleType
608
        type_exists_by_name?(type_node.name)
3✔
609
      when IR::GenericType
610
        type_exists_by_name?(type_node.base)
×
611
      when IR::UnionType
612
        type_node.types.all? { |t| type_exists?(t) }
×
613
      when IR::IntersectionType
614
        type_node.types.all? { |t| type_exists?(t) }
×
615
      when IR::NullableType
616
        type_exists?(type_node.inner_type)
×
617
      else
618
        true # Assume valid for unknown types
×
619
      end
620
    end
621

622
    def type_exists_by_name?(name)
1✔
623
      return true if %w[String Integer Float Boolean Array Hash Symbol void nil Object Numeric
3✔
624
                        Enumerable].include?(name)
625
      return true if @global_registry[name]
1✔
626

627
      false
1✔
628
    end
629

630
    def type_name(type_node)
1✔
631
      case type_node
1✔
632
      when IR::SimpleType
633
        type_node.name
1✔
634
      when IR::GenericType
635
        "#{type_node.base}<...>"
×
636
      else
637
        type_node.to_s
×
638
      end
639
    end
640

641
    def extract_type_references(definition)
1✔
642
      return [] unless definition
×
643

644
      case definition
×
645
      when IR::SimpleType
646
        [definition.name]
×
647
      when IR::GenericType
648
        [definition.base] + definition.type_args.flat_map { |t| extract_type_references(t) }
×
649
      when IR::UnionType
650
        definition.types.flat_map { |t| extract_type_references(t) }
×
651
      when IR::IntersectionType
652
        definition.types.flat_map { |t| extract_type_references(t) }
×
653
      when IR::NullableType
654
        extract_type_references(definition.inner_type)
×
655
      else
656
        []
×
657
      end
658
    end
659
  end
660

661
  # Enhanced Incremental Compiler with IR and Cross-file support
662
  class EnhancedIncrementalCompiler < IncrementalCompiler
1✔
663
    attr_reader :cross_file_checker, :ir_cache
1✔
664

665
    def initialize(compiler, cache: nil, enable_cross_file: true)
1✔
666
      super(compiler, cache: cache)
60✔
667
      @ir_cache = {} # file_path => IR::Program
60✔
668
      @cross_file_checker = CrossFileTypeChecker.new if enable_cross_file
60✔
669
    end
670

671
    # Compile with IR caching
672
    def compile_with_ir(file_path)
1✔
673
      return @compiled_files[file_path] unless needs_compile?(file_path)
4✔
674

675
      # Get IR from compiler
676
      ir_program = @compiler.compile_to_ir(file_path)
4✔
677
      @ir_cache[file_path] = ir_program
4✔
678

679
      # Register with cross-file checker
680
      @cross_file_checker&.register_file(file_path, ir_program)
4✔
681

682
      # Compile from IR
683
      result = @compiler.compile(file_path)
4✔
684
      @file_hashes[file_path] = file_hash(file_path)
4✔
685
      @compiled_files[file_path] = result
4✔
686

687
      result
4✔
688
    end
689

690
    # Compile all with cross-file checking
691
    # Returns diagnostics using unified Diagnostic format
692
    def compile_all_with_checking(file_paths)
1✔
693
      results = {}
×
NEW
694
      all_diagnostics = []
×
695

696
      # First pass: compile and register all files
697
      file_paths.each do |file_path|
×
NEW
698
        source = File.exist?(file_path) ? File.read(file_path) : nil
×
699

700
        begin
NEW
701
          results[file_path] = compile_with_ir(file_path)
×
702
        rescue TypeCheckError => e
NEW
703
          all_diagnostics << Diagnostic.from_type_check_error(e, file: file_path, source: source)
×
704
        rescue ParseError => e
NEW
705
          all_diagnostics << Diagnostic.from_parse_error(e, file: file_path, source: source)
×
706
        rescue Scanner::ScanError => e
NEW
707
          all_diagnostics << Diagnostic.from_scan_error(e, file: file_path, source: source)
×
708
        rescue StandardError => e
NEW
709
          all_diagnostics << Diagnostic.new(
×
710
            code: "TR0001",
711
            message: e.message,
712
            file: file_path,
713
            line: 1,
714
            column: 1
715
          )
716
        end
717
      end
718

719
      # Second pass: cross-file type checking
720
      if @cross_file_checker
×
721
        check_result = @cross_file_checker.check_all
×
NEW
722
        check_result[:errors].each do |e|
×
NEW
723
          all_diagnostics << Diagnostic.new(
×
724
            code: "TR2002",
725
            message: e[:message],
726
            file: e[:file],
727
            line: 1,
728
            column: 1
729
          )
730
        end
731
      end
732

733
      {
734
        results: results,
×
735
        diagnostics: all_diagnostics,
736
        success: all_diagnostics.empty?,
737
      }
738
    end
739

740
    # Get cached IR for a file
741
    def get_ir(file_path)
1✔
742
      @ir_cache[file_path]
1✔
743
    end
744

745
    # Clear all caches
746
    def clear
1✔
747
      super
1✔
748
      @ir_cache.clear
1✔
749
      @cross_file_checker&.clear
1✔
750
    end
751

752
    private
1✔
753

754
    def file_hash(file_path)
1✔
755
      return nil unless File.exist?(file_path)
4✔
756

757
      Digest::SHA256.hexdigest(File.read(file_path))
4✔
758
    end
759
  end
760

761
  # Compilation profiler
762
  class CompilationProfiler
1✔
763
    def initialize
1✔
764
      @timings = {}
5✔
765
      @call_counts = {}
5✔
766
    end
767

768
    def profile(name, &block)
1✔
769
      start = Process.clock_gettime(Process::CLOCK_MONOTONIC)
7✔
770
      result = block.call
7✔
771
      elapsed = Process.clock_gettime(Process::CLOCK_MONOTONIC) - start
7✔
772

773
      @timings[name] ||= 0.0
7✔
774
      @timings[name] += elapsed
7✔
775

776
      @call_counts[name] ||= 0
7✔
777
      @call_counts[name] += 1
7✔
778

779
      result
7✔
780
    end
781

782
    def report
1✔
783
      puts "=== Compilation Profile ==="
1✔
784
      @timings.sort_by { |_, v| -v }.each do |name, time|
2✔
785
        calls = @call_counts[name]
1✔
786
        avg = time / calls
1✔
787
        puts "#{name}: #{format("%.3f", time)}s total, #{calls} calls, #{format("%.3f", avg * 1000)}ms avg"
1✔
788
      end
789
    end
790

791
    def reset
1✔
792
      @timings.clear
1✔
793
      @call_counts.clear
1✔
794
    end
795

796
    def to_h
1✔
797
      @timings.map do |name, time|
3✔
798
        {
799
          name: name,
3✔
800
          total_time: time,
801
          call_count: @call_counts[name],
802
          avg_time: time / @call_counts[name],
803
        }
804
      end
805
    end
806
  end
807
end
STATUS · Troubleshooting · Open an Issue · Sales · Support · CAREERS · ENTERPRISE · START FREE · SCHEDULE DEMO
ANNOUNCEMENTS · TWITTER · TOS & SLA · Supported CI Services · What's a CI service? · Automated Testing

© 2025 Coveralls, Inc