This repository has been archived by the owner on Nov 23, 2018. It is now read-only.
-
Notifications
You must be signed in to change notification settings - Fork 9
/
types.go
262 lines (229 loc) · 8.85 KB
/
types.go
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
208
209
210
211
212
213
214
215
216
217
218
219
220
221
222
223
224
225
226
227
228
229
230
231
232
233
234
235
236
237
238
239
240
241
242
243
244
245
246
247
248
249
250
251
252
253
254
255
256
257
258
259
260
261
262
// Copyright ©2014 The gonum Authors. All rights reserved.
// Use of this source code is governed by a BSD-style
// license that can be found in the LICENSE file.
package optimize
import (
"errors"
"fmt"
"math"
"time"
"github.com/gonum/matrix/mat64"
)
const defaultGradientAbsTol = 1e-6
// Operation represents the set of operations commanded by Method at each
// iteration. It is a bitmap of various Iteration and Evaluation constants.
// Individual constants must NOT be combined together by the binary OR operator
// except for the Evaluation operations.
type Operation uint64
// Supported Operations.
const (
// NoOperation specifies that no evaluation or convergence check should
// take place.
NoOperation Operation = 0
// InitIteration is sent to Recorder to indicate the initial location.
// All fields of the location to record must be valid.
// Method must not return it.
InitIteration Operation = 1 << (iota - 1)
// PostIteration is sent to Recorder to indicate the final location
// reached during an optimization run.
// All fields of the location to record must be valid.
// Method must not return it.
PostIteration
// MajorIteration indicates that the next candidate location for
// an optimum has been found and convergence should be checked.
MajorIteration
// FuncEvaluation specifies that the objective function
// should be evaluated.
FuncEvaluation
// GradEvaluation specifies that the gradient
// of the objective function should be evaluated.
GradEvaluation
// HessEvaluation specifies that the Hessian
// of the objective function should be evaluated.
HessEvaluation
// Mask for the evaluating operations.
evalMask = FuncEvaluation | GradEvaluation | HessEvaluation
)
func (op Operation) isEvaluation() bool {
return op&evalMask != 0 && op&^evalMask == 0
}
func (op Operation) String() string {
if op&evalMask != 0 {
return fmt.Sprintf("Evaluation(Func: %t, Grad: %t, Hess: %t, Extra: 0b%b)",
op&FuncEvaluation != 0,
op&GradEvaluation != 0,
op&HessEvaluation != 0,
op&^(evalMask))
}
s, ok := operationNames[op]
if ok {
return s
}
return fmt.Sprintf("Operation(%d)", op)
}
var operationNames = map[Operation]string{
NoOperation: "NoOperation",
InitIteration: "InitIteration",
MajorIteration: "MajorIteration",
PostIteration: "PostIteration",
}
// Location represents a location in the optimization procedure.
type Location struct {
X []float64
F float64
Gradient []float64
Hessian *mat64.SymDense
}
// Result represents the answer of an optimization run. It contains the optimum
// location as well as the Status at convergence and Statistics taken during the
// run.
type Result struct {
Location
Stats
Status Status
}
// Stats contains the statistics of the run.
type Stats struct {
MajorIterations int // Total number of major iterations
FuncEvaluations int // Number of evaluations of Func
GradEvaluations int // Number of evaluations of Grad
HessEvaluations int // Number of evaluations of Hess
Runtime time.Duration // Total runtime of the optimization
}
// complementEval returns an evaluating operation that evaluates fields of loc
// not evaluated by eval.
func complementEval(loc *Location, eval Operation) (complEval Operation) {
if eval&FuncEvaluation == 0 {
complEval = FuncEvaluation
}
if loc.Gradient != nil && eval&GradEvaluation == 0 {
complEval |= GradEvaluation
}
if loc.Hessian != nil && eval&HessEvaluation == 0 {
complEval |= HessEvaluation
}
return complEval
}
// Problem describes the optimization problem to be solved.
type Problem struct {
// Func evaluates the objective function at the given location. Func
// must not modify x.
Func func(x []float64) float64
// Grad evaluates the gradient at x and stores the result in-place in grad.
// Grad must not modify x.
Grad func(grad []float64, x []float64)
// Hess evaluates the Hessian at x and stores the result in-place in hess.
// Hess must not modify x.
Hess func(hess mat64.MutableSymmetric, x []float64)
// Status reports the status of the objective function being optimized and any
// error. This can be used to terminate early, for example when the function is
// not able to evaluate itself. The user can use one of the pre-provided Status
// constants, or may call NewStatus to create a custom Status value.
Status func() (Status, error)
}
// TODO(btracey): Think about making this an exported function when the
// constraint interface is designed.
func (p Problem) satisfies(method Needser) error {
if method.Needs().Gradient && p.Grad == nil {
return errors.New("optimize: problem does not provide needed Grad function")
}
if method.Needs().Hessian && p.Hess == nil {
return errors.New("optimize: problem does not provide needed Hess function")
}
return nil
}
// Settings represents settings of the optimization run. It contains initial
// settings, convergence information, and Recorder information. In general, users
// should use DefaultSettings rather than constructing a Settings literal.
//
// If UseInitData is true, InitialValue, InitialGradient and InitialHessian
// specify function information at the initial location.
//
// If Recorder is nil, no information will be recorded.
type Settings struct {
UseInitialData bool // Use supplied information about the conditions at the initial x.
InitialValue float64 // Function value at the initial x.
InitialGradient []float64 // Gradient at the initial x.
InitialHessian *mat64.SymDense // Hessian at the initial x.
// FunctionThreshold is the threshold for acceptably small values of the
// objective function. FunctionThreshold status is returned if
// the objective function is less than this value.
// The default value is -inf.
FunctionThreshold float64
// GradientThreshold determines the accuracy to which the minimum is found.
// GradientThreshold status is returned if the infinity norm of
// the gradient is less than this value.
// Has no effect if gradient information is not used.
// The default value is 1e-6.
GradientThreshold float64
// FunctionConverge tests that the function value decreases by a
// significant amount over the specified number of iterations.
//
// If f < f_best and
// f_best - f > FunctionConverge.Relative * maxabs(f, f_best) + FunctionConverge.Absolute
// then a significant decrease has occured, and f_best is updated.
//
// If there is no significant decrease for FunctionConverge.Iterations
// major iterations, FunctionConvergence status is returned.
//
// If this is nil or if FunctionConverge.Iterations == 0, it has no effect.
FunctionConverge *FunctionConverge
// MajorIterations is the maximum number of iterations allowed.
// IterationLimit status is returned if the number of major iterations
// equals or exceeds this value.
// If it equals zero, this setting has no effect.
// The default value is 0.
MajorIterations int
// Runtime is the maximum runtime allowed. RuntimeLimit status is returned
// if the duration of the run is longer than this value. Runtime is only
// checked at iterations of the Method.
// If it equals zero, this setting has no effect.
// The default value is 0.
Runtime time.Duration
// FuncEvaluations is the maximum allowed number of function evaluations.
// FunctionEvaluationLimit status is returned if the total number of calls
// to Func equals or exceeds this number.
// If it equals zero, this setting has no effect.
// The default value is 0.
FuncEvaluations int
// GradEvaluations is the maximum allowed number of gradient evaluations.
// GradientEvaluationLimit status is returned if the total number of calls
// to Grad equals or exceeds this number.
// If it equals zero, this setting has no effect.
// The default value is 0.
GradEvaluations int
// HessEvaluations is the maximum allowed number of Hessian evaluations.
// HessianEvaluationLimit status is returned if the total number of calls
// to Hess equals or exceeds this number.
// If it equals zero, this setting has no effect.
// The default value is 0.
HessEvaluations int
Recorder Recorder
// Concurrent represents how many concurrent evaluations are possible.
Concurrent int
}
// DefaultSettings returns a new Settings struct containing the default settings.
func DefaultSettings() *Settings {
return &Settings{
GradientThreshold: defaultGradientAbsTol,
FunctionThreshold: math.Inf(-1),
FunctionConverge: &FunctionConverge{
Absolute: 1e-10,
Iterations: 20,
},
}
}
// resize takes x and returns a slice of length dim. It returns a resliced x
// if cap(x) >= dim, and a new slice otherwise.
func resize(x []float64, dim int) []float64 {
if dim > cap(x) {
return make([]float64, dim)
}
return x[:dim]
}
func resizeSymDense(m *mat64.SymDense, dim int) *mat64.SymDense {
if m == nil || cap(m.RawSymmetric().Data) < dim*dim {
return mat64.NewSymDense(dim, nil)
}
return mat64.NewSymDense(dim, m.RawSymmetric().Data[:dim*dim])
}