• Home
  • Features
  • Pricing
  • Docs
  • Announcements
  • Sign In

MilesCranmer / SymbolicRegression.jl / 9639805727

24 Jun 2024 05:00AM UTC coverage: 94.475% (-0.1%) from 94.617%
9639805727

Pull #326

github

web-flow
Merge 3ba1556f8 into ceddaa424
Pull Request #326: BREAKING: Change expression types to `DynamicExpressions.Expression` (from `DynamicExpressions.Node`)

239 of 250 new or added lines in 15 files covered. (95.6%)

4 existing lines in 3 files now uncovered.

2548 of 2697 relevant lines covered (94.48%)

46539295.05 hits per line

Source File
Press 'n' to go to next uncovered line, 'b' for previous

97.96
/src/ConstantOptimization.jl
1
module ConstantOptimizationModule
2

3
using LineSearches: LineSearches
4
using Optim: Optim
5
using DifferentiationInterface: value_and_gradient
6
using DynamicExpressions: Expression, Node, count_constants, get_constants, set_constants!
7
using ..CoreModule: Options, Dataset, DATA_TYPE, LOSS_TYPE
8
using ..UtilsModule: get_birth_order
9
using ..LossFunctionsModule: eval_loss, loss_to_score, batch_sample
10
using ..PopMemberModule: PopMember
11

12
function optimize_constants(
303,969✔
13
    dataset::Dataset{T,L}, member::P, options::Options
14
)::Tuple{P,Float64} where {T<:DATA_TYPE,L<:LOSS_TYPE,P<:PopMember{T,L}}
15
    if options.batching
303,967✔
16
        dispatch_optimize_constants(
96,778✔
17
            dataset, member, options, batch_sample(dataset, options)
18
        )
19
    else
20
        dispatch_optimize_constants(dataset, member, options, nothing)
216,689✔
21
    end
22
end
23
function dispatch_optimize_constants(
303,948✔
24
    dataset::Dataset{T,L}, member::P, options::Options, idx
25
) where {T<:DATA_TYPE,L<:LOSS_TYPE,P<:PopMember{T,L}}
26
    nconst = count_constants_for_optimization(member.tree)
304,008✔
27
    nconst == 0 && return (member, 0.0)
303,976✔
28
    if nconst == 1 && !(T <: Complex)
238,534✔
29
        algorithm = Optim.Newton(; linesearch=LineSearches.BackTracking())
97,139✔
30
        return _optimize_constants(
97,139✔
31
            dataset, member, options, algorithm, options.optimizer_options, idx
32
        )
33
    end
34
    return _optimize_constants(
141,396✔
35
        dataset,
36
        member,
37
        options,
38
        options.optimizer_algorithm,
39
        options.optimizer_options,
40
        idx,
41
    )
42
end
43
count_constants_for_optimization(ex::Expression) = count_constants(ex)
292,735✔
44

45
function _optimize_constants(
238,507✔
46
    dataset, member::P, options, algorithm, optimizer_options, idx
47
)::Tuple{P,Float64} where {T,L,P<:PopMember{T,L}}
48
    tree = member.tree
238,505✔
49
    eval_fraction = options.batching ? (options.batch_size / dataset.n) : 1.0
238,489✔
50
    f = Evaluator(dataset, options, idx)
238,497✔
51
    fg! = GradEvaluator(f)
238,469✔
52
    obj = if algorithm isa Optim.Newton || options.autodiff_backend === nothing
238,445✔
53
        f
232,738✔
54
    else
55
        Optim.only_fg!(fg!)
87,112✔
56
    end
57
    baseline = f(tree)
355,559✔
58
    x0, refs = get_constants(tree)
238,534✔
59
    result = Optim.optimize(obj, tree, algorithm, optimizer_options)
238,511✔
60
    num_evals = result.f_calls * eval_fraction
238,525✔
61
    # Try other initial conditions:
62
    for _ in 1:(options.optimizer_nrestarts)
321,652✔
63
        tmptree = copy(tree)
845,755✔
64
        eps = randn(T, size(x0)...)
1,011,691✔
65
        xt = @. x0 * (T(1) + T(1//2) * eps)
1,134,696✔
66
        set_constants!(tmptree, xt, refs)
577,307✔
67
        tmpresult = Optim.optimize(
567,387✔
68
            obj, tmptree, algorithm, optimizer_options; make_copy=false
69
        )
70
        num_evals += tmpresult.f_calls * eval_fraction
567,367✔
71
        # TODO: Does this need to take into account h_calls?
72

73
        if tmpresult.minimum < result.minimum
567,358✔
74
            result = tmpresult
134,964✔
75
        end
76
    end
720,060✔
77

78
    if result.minimum < baseline
238,524✔
79
        member.tree = result.minimizer::typeof(member.tree)
259,314✔
80
        member.loss = eval_loss(member.tree, dataset, options; regularization=true, idx=idx)
259,314✔
81
        member.score = loss_to_score(
286,669✔
82
            member.loss, dataset.use_baseline, dataset.baseline_loss, member, options
83
        )
84
        member.birth = get_birth_order(; deterministic=options.deterministic)
343,078✔
85
        num_evals += eval_fraction
171,813✔
86
    end
87

88
    return member, num_evals
238,536✔
89
end
90

91
struct Evaluator{D<:Dataset,O<:Options,I} <: Function
92
    dataset::D
321,610✔
93
    options::O
94
    idx::I
95
end
96
(e::Evaluator)(t) = eval_loss(t, e.dataset, e.options; regularization=false, idx=e.idx)
65,747,802✔
97
struct GradEvaluator{F<:Evaluator} <: Function
98
    f::F
321,594✔
99
end
100
function (g::GradEvaluator)(F, G, t)
17,143✔
101
    (val, grad) = value_and_gradient(g.f, g.f.options.autodiff_backend, t)
29,136✔
102
    if G !== nothing && grad !== nothing && grad.tree !== nothing
17,091✔
NEW
103
        G .= grad.tree.gradient
×
104
    end
105
    return val
17,090✔
106
end
107

108
end
STATUS · Troubleshooting · Open an Issue · Sales · Support · CAREERS · ENTERPRISE · START FREE · SCHEDULE DEMO
ANNOUNCEMENTS · TWITTER · TOS & SLA · Supported CI Services · What's a CI service? · Automated Testing

© 2025 Coveralls, Inc