• Home
  • Features
  • Pricing
  • Docs
  • Announcements
  • Sign In

MilesCranmer / SymbolicRegression.jl / 9686354911

26 Jun 2024 08:31PM UTC coverage: 93.22% (-1.4%) from 94.617%
9686354911

Pull #326

github

web-flow
Merge 6f8229c9f into ceddaa424
Pull Request #326: BREAKING: Change expression types to `DynamicExpressions.Expression` (from `DynamicExpressions.Node`)

275 of 296 new or added lines in 17 files covered. (92.91%)

34 existing lines in 5 files now uncovered.

2530 of 2714 relevant lines covered (93.22%)

32081968.55 hits per line

Source File
Press 'n' to go to next uncovered line, 'b' for previous

97.96
/src/ConstantOptimization.jl
1
module ConstantOptimizationModule
2

3
using LineSearches: LineSearches
4
using Optim: Optim
5
using DifferentiationInterface: value_and_gradient
6
using DynamicExpressions: Expression, Node, count_constants, get_constants, set_constants!
7
using ..CoreModule: Options, Dataset, DATA_TYPE, LOSS_TYPE
8
using ..UtilsModule: get_birth_order
9
using ..LossFunctionsModule: eval_loss, loss_to_score, batch_sample
10
using ..PopMemberModule: PopMember
11

12
function optimize_constants(
212,482✔
13
    dataset::Dataset{T,L}, member::P, options::Options
14
)::Tuple{P,Float64} where {T<:DATA_TYPE,L<:LOSS_TYPE,P<:PopMember{T,L}}
15
    if options.batching
212,480✔
16
        dispatch_optimize_constants(
100,028✔
17
            dataset, member, options, batch_sample(dataset, options)
18
        )
19
    else
20
        dispatch_optimize_constants(dataset, member, options, nothing)
127,678✔
21
    end
22
end
23
function dispatch_optimize_constants(
212,480✔
24
    dataset::Dataset{T,L}, member::P, options::Options, idx
25
) where {T<:DATA_TYPE,L<:LOSS_TYPE,P<:PopMember{T,L}}
26
    nconst = count_constants_for_optimization(member.tree)
212,483✔
27
    nconst == 0 && return (member, 0.0)
212,490✔
28
    if nconst == 1 && !(T <: Complex)
152,779✔
29
        algorithm = Optim.Newton(; linesearch=LineSearches.BackTracking())
60,075✔
30
        return _optimize_constants(
60,076✔
31
            dataset, member, options, algorithm, options.optimizer_options, idx
32
        )
33
    end
34
    return _optimize_constants(
92,704✔
35
        dataset,
36
        member,
37
        options,
38
        options.optimizer_algorithm,
39
        options.optimizer_options,
40
        idx,
41
    )
42
end
43
count_constants_for_optimization(ex::Expression) = count_constants(ex)
209,370✔
44

45
function _optimize_constants(
152,759✔
46
    dataset, member::P, options, algorithm, optimizer_options, idx
47
)::Tuple{P,Float64} where {T,L,P<:PopMember{T,L}}
48
    tree = member.tree
152,751✔
49
    eval_fraction = options.batching ? (options.batch_size / dataset.n) : 1.0
152,752✔
50
    f = Evaluator(dataset, options, idx)
152,764✔
51
    fg! = GradEvaluator(f)
152,751✔
52
    obj = if algorithm isa Optim.Newton || options.autodiff_backend === nothing
152,747✔
53
        f
151,325✔
54
    else
55
        Optim.only_fg!(fg!)
48,102✔
56
    end
57
    baseline = f(tree)
231,866✔
58
    x0, refs = get_constants(tree)
152,773✔
59
    result = Optim.optimize(obj, tree, algorithm, optimizer_options)
152,754✔
60
    num_evals = result.f_calls * eval_fraction
152,773✔
61
    # Try other initial conditions:
62
    for _ in 1:(options.optimizer_nrestarts)
199,976✔
63
        tmptree = copy(tree)
600,210✔
64
        eps = randn(T, size(x0)...)
781,680✔
65
        xt = @. x0 * (T(1) + T(1//2) * eps)
787,528✔
66
        set_constants!(tmptree, xt, refs)
396,079✔
67
        tmpresult = Optim.optimize(
393,778✔
68
            obj, tmptree, algorithm, optimizer_options; make_copy=false
69
        )
70
        num_evals += tmpresult.f_calls * eval_fraction
393,771✔
71
        # TODO: Does this need to take into account h_calls?
72

73
        if tmpresult.minimum < result.minimum
393,772✔
74
            result = tmpresult
68,569✔
75
        end
76
    end
545,964✔
77

78
    if result.minimum < baseline
152,775✔
79
        member.tree = result.minimizer::typeof(member.tree)
136,580✔
80
        member.loss = eval_loss(member.tree, dataset, options; regularization=true, idx=idx)
136,579✔
81
        member.score = loss_to_score(
150,894✔
82
            member.loss, dataset.use_baseline, dataset.baseline_loss, member, options
83
        )
84
        member.birth = get_birth_order(; deterministic=options.deterministic)
179,777✔
85
        num_evals += eval_fraction
90,158✔
86
    end
87

88
    return member, num_evals
152,775✔
89
end
90

91
struct Evaluator{D<:Dataset,O<:Options,I} <: Function
92
    dataset::D
199,963✔
93
    options::O
94
    idx::I
95
end
96
(e::Evaluator)(t) = eval_loss(t, e.dataset, e.options; regularization=false, idx=e.idx)
42,895,091✔
97
struct GradEvaluator{F<:Evaluator} <: Function
98
    f::F
199,947✔
99
end
100
function (g::GradEvaluator)(F, G, t)
4,273✔
101
    (val, grad) = value_and_gradient(g.f, g.f.options.autodiff_backend, t)
6,989✔
102
    if G !== nothing && grad !== nothing && grad.tree !== nothing
4,270✔
NEW
103
        G .= grad.tree.gradient
×
104
    end
105
    return val
4,263✔
106
end
107

108
end
STATUS · Troubleshooting · Open an Issue · Sales · Support · CAREERS · ENTERPRISE · START FREE · SCHEDULE DEMO
ANNOUNCEMENTS · TWITTER · TOS & SLA · Supported CI Services · What's a CI service? · Automated Testing

© 2025 Coveralls, Inc