• Home
  • Features
  • Pricing
  • Docs
  • Announcements
  • Sign In

gcivil-nyu-org / team4-wed-fall25 / 55

03 Nov 2025 03:34PM UTC coverage: 56.582% (+7.8%) from 48.812%
55

push

travis-pro

web-flow
Syncing the develop and main branch

Syncing Prod and Dev

201 of 388 new or added lines in 11 files covered. (51.8%)

5 existing lines in 1 file now uncovered.

447 of 790 relevant lines covered (56.58%)

0.57 hits per line

Source File
Press 'n' to go to next uncovered line, 'b' for previous

42.86
/note2webapp/utils.py
1
import importlib.util
1✔
2
import traceback
1✔
3
import json
1✔
4
import os
1✔
5
import torch
1✔
6

7
TYPE_MAP = {"float": float, "int": int, "str": str, "bool": bool}
1✔
8

9

10
def generate_dummy_input(schema_path):
1✔
11
    with open(schema_path, "r") as f:
1✔
12
        schema = json.load(f)
1✔
13

14
    input_schema = schema.get("input", {})
1✔
15
    dummy = {}
1✔
16
    for key, typ in input_schema.items():
1✔
17
        py_type = TYPE_MAP.get(typ)
1✔
18
        if py_type == float:
1✔
19
            dummy[key] = 1.0
1✔
20
        elif py_type == int:
1✔
21
            dummy[key] = 42
1✔
22
        elif py_type == str:
1✔
23
            dummy[key] = "example"
1✔
24
        elif py_type == bool:
1✔
25
            dummy[key] = True
1✔
26
        else:
27
            raise ValueError(f"Unsupported type: {typ}")
1✔
28
    return dummy, schema.get("output", {})
1✔
29

30

31
def validate_model(version):
1✔
32
    try:
×
33
        # 🔁 Change to directory containing model.pt
34
        model_dir = os.path.dirname(version.model_file.path)
×
35
        os.chdir(model_dir)
×
36

37
        # ✅ Dynamically import predict.py
38
        spec = importlib.util.spec_from_file_location(
×
39
            "predict", version.predict_file.path
40
        )
41
        module = importlib.util.module_from_spec(spec)
×
42
        spec.loader.exec_module(module)
×
43

44
        if not hasattr(module, "predict"):
×
45
            raise Exception("predict() function missing in predict.py")
×
46

47
        # Load schema + generate dummy input
48
        if not version.schema_file:
×
49
            raise Exception("No schema file provided")
×
50
        dummy_input, expected_output = generate_dummy_input(version.schema_file.path)
×
51

52
        # Call predict with ONLY dummy input
53
        output = module.predict(dummy_input)
×
54

55
        # Validate output
56
        if not isinstance(output, dict):
×
57
            raise Exception("predict() must return a dict")
×
58

59
        for key, typ in expected_output.items():
×
60
            if key not in output:
×
61
                raise Exception(f"Missing key in output: {key}")
×
62
            if not isinstance(output[key], TYPE_MAP.get(typ)):
×
63
                raise Exception(
×
64
                    f"Wrong type for '{key}': expected {typ}, got {type(output[key]).__name__}"
65
                )
66

67
        version.status = "PASS"
×
68
        version.log = f"Validation successful ✅\nInput: {json.dumps(dummy_input)}\nOutput: {json.dumps(output, indent=2)}"
×
69

70
    except Exception:
×
71
        version.status = "FAIL"
×
72
        version.log = traceback.format_exc()
×
73

74
    version.save()
×
75
    return version
×
76

77

78
def test_model_on_cpu(version, input_data):
1✔
79
    """
80
    Run the selected version’s predict() on CPU.
81
    """
82

83
    try:
1✔
84
        predict_path = version.predict_file.path
1✔
NEW
85
        model_path = version.model_file.path
×
86

87
        # Dynamic import
NEW
88
        spec = importlib.util.spec_from_file_location("predict_module", predict_path)
×
NEW
89
        module = importlib.util.module_from_spec(spec)
×
NEW
90
        spec.loader.exec_module(module)
×
91

NEW
92
        if not hasattr(module, "_load_model"):
×
NEW
93
            raise Exception("predict.py missing _load_model()")
×
94

95
        # Force the model loader to use this version’s model file
NEW
96
        def patched_load_model():
×
NEW
97
            m = module.TinyRegressor(in_features=4)
×
NEW
98
            m.load_state_dict(torch.load(model_path, map_location="cpu"))
×
NEW
99
            m.eval()
×
NEW
100
            return m
×
101

NEW
102
        module._load_model = patched_load_model
×
103

NEW
104
        output = module.predict(input_data)
×
NEW
105
        return {"status": "ok", "output": output}
×
106
    except Exception as e:
1✔
107
        import traceback
1✔
108

109
        return {"status": "error", "error": str(e), "trace": traceback.format_exc()}
1✔
STATUS · Troubleshooting · Open an Issue · Sales · Support · CAREERS · ENTERPRISE · START FREE · SCHEDULE DEMO
ANNOUNCEMENTS · TWITTER · TOS & SLA · Supported CI Services · What's a CI service? · Automated Testing

© 2026 Coveralls, Inc