• Home
  • Features
  • Pricing
  • Docs
  • Announcements
  • Sign In

MilesCranmer / SymbolicRegression.jl / 9704727222

27 Jun 2024 11:01PM UTC coverage: 95.922% (+1.3%) from 94.617%
9704727222

Pull #326

github

web-flow
Merge 1f104aaf8 into ceddaa424
Pull Request #326: BREAKING: Change expression types to `DynamicExpressions.Expression` (from `DynamicExpressions.Node`)

301 of 307 new or added lines in 17 files covered. (98.05%)

1 existing line in 1 file now uncovered.

2611 of 2722 relevant lines covered (95.92%)

35611300.15 hits per line

Source File
Press 'n' to go to next uncovered line, 'b' for previous

97.96
/src/ConstantOptimization.jl
1
module ConstantOptimizationModule
2

3
using LineSearches: LineSearches
4
using Optim: Optim
5
using DifferentiationInterface: value_and_gradient
6
using DynamicExpressions: Expression, Node, count_constants, get_constants, set_constants!
7
using ..CoreModule: Options, Dataset, DATA_TYPE, LOSS_TYPE
8
using ..UtilsModule: get_birth_order
9
using ..LossFunctionsModule: eval_loss, loss_to_score, batch_sample
10
using ..PopMemberModule: PopMember
11

12
function optimize_constants(
212,274✔
13
    dataset::Dataset{T,L}, member::P, options::Options
14
)::Tuple{P,Float64} where {T<:DATA_TYPE,L<:LOSS_TYPE,P<:PopMember{T,L}}
15
    if options.batching
212,259✔
16
        dispatch_optimize_constants(
8,991✔
17
            dataset, member, options, batch_sample(dataset, options)
18
        )
19
    else
20
        dispatch_optimize_constants(dataset, member, options, nothing)
204,648✔
21
    end
22
end
23
function dispatch_optimize_constants(
212,216✔
24
    dataset::Dataset{T,L}, member::P, options::Options, idx
25
) where {T<:DATA_TYPE,L<:LOSS_TYPE,P<:PopMember{T,L}}
26
    nconst = count_constants_for_optimization(member.tree)
212,249✔
27
    nconst == 0 && return (member, 0.0)
212,268✔
28
    if nconst == 1 && !(T <: Complex)
184,473✔
29
        algorithm = Optim.Newton(; linesearch=LineSearches.BackTracking())
55,539✔
30
        return _optimize_constants(
55,537✔
31
            dataset, member, options, algorithm, options.optimizer_options, idx
32
        )
33
    end
34
    return _optimize_constants(
128,939✔
35
        dataset,
36
        member,
37
        options,
38
        options.optimizer_algorithm,
39
        options.optimizer_options,
40
        idx,
41
    )
42
end
43
count_constants_for_optimization(ex::Expression) = count_constants(ex)
206,078✔
44

45
function _optimize_constants(
184,431✔
46
    dataset, member::P, options, algorithm, optimizer_options, idx
47
)::Tuple{P,Float64} where {T,L,P<:PopMember{T,L}}
48
    tree = member.tree
184,456✔
49
    eval_fraction = options.batching ? (options.batch_size / dataset.n) : 1.0
184,429✔
50
    f = Evaluator(dataset, options, idx)
184,444✔
51
    fg! = GradEvaluator(f)
184,409✔
52
    obj = if algorithm isa Optim.Newton || options.autodiff_backend === nothing
184,399✔
53
        f
181,561✔
54
    else
55
        Optim.only_fg!(fg!)
61,449✔
56
    end
57
    baseline = f(tree)
280,035✔
58
    x0, refs = get_constants(tree)
184,482✔
59
    result = Optim.optimize(obj, tree, algorithm, optimizer_options)
184,440✔
60
    num_evals = result.f_calls * eval_fraction
184,477✔
61
    # Try other initial conditions:
62
    for _ in 1:(options.optimizer_nrestarts)
244,033✔
63
        tmptree = copy(tree)
620,008✔
64
        eps = randn(T, size(x0)...)
753,735✔
65
        xt = @. x0 * (T(1) + T(1//2) * eps)
813,171✔
66
        set_constants!(tmptree, xt, refs)
411,546✔
67
        tmpresult = Optim.optimize(
406,618✔
68
            obj, tmptree, algorithm, optimizer_options; make_copy=false
69
        )
70
        num_evals += tmpresult.f_calls * eval_fraction
406,603✔
71
        # TODO: Does this need to take into account h_calls?
72

73
        if tmpresult.minimum < result.minimum
406,606✔
74
            result = tmpresult
104,757✔
75
        end
76
    end
526,277✔
77

78
    if result.minimum < baseline
184,482✔
79
        member.tree = result.minimizer::typeof(member.tree)
256,031✔
80
        member.loss = eval_loss(member.tree, dataset, options; regularization=true, idx=idx)
256,033✔
81
        member.score = loss_to_score(
282,225✔
82
            member.loss, dataset.use_baseline, dataset.baseline_loss, member, options
83
        )
84
        member.birth = get_birth_order(; deterministic=options.deterministic)
337,120✔
85
        num_evals += eval_fraction
168,830✔
86
    end
87

88
    return member, num_evals
184,479✔
89
end
90

91
struct Evaluator{D<:Dataset,O<:Options,I} <: Function
92
    dataset::D
244,023✔
93
    options::O
94
    idx::I
95
end
96
(e::Evaluator)(t) = eval_loss(t, e.dataset, e.options; regularization=false, idx=e.idx)
59,516,377✔
97
struct GradEvaluator{F<:Evaluator} <: Function
98
    f::F
243,970✔
99
end
100
function (g::GradEvaluator)(F, G, t)
8,521✔
101
    (val, grad) = value_and_gradient(g.f, g.f.options.autodiff_backend, t)
14,198✔
102
    if G !== nothing && grad !== nothing && grad.tree !== nothing
8,498✔
NEW
103
        G .= grad.tree.gradient
×
104
    end
105
    return val
8,489✔
106
end
107

108
end
STATUS · Troubleshooting · Open an Issue · Sales · Support · CAREERS · ENTERPRISE · START FREE · SCHEDULE DEMO
ANNOUNCEMENTS · TWITTER · TOS & SLA · Supported CI Services · What's a CI service? · Automated Testing

© 2025 Coveralls, Inc