Owl Opt Library
The entry point of this library is Owl_opt
.
Workflow
- define a record type
'a t
in some modulePrms
- apply ppx deriver
@@deriving prms
to type'a t
so thatPrms
has typeOwl_opt.Prms.PT
- pass
Prms
through your favourite algorithm functor to create an optimisation moduleO
- define an objective function
f
that takes as input your record type'a t
- define initial parameters
prms0
- define learning rates
lr
withOwl_opt.Lr
- initialise optimisation state
s
withO.init ~prms0 ()
- define stoping criterion function
stop: float -> state -> bool
- minimise
f
for the optimisation session withO.min ~stop ~f s
Example
module Prms = struct
type 'a t = {a: 'a; b: 'a} [@@deriving prms]
end
(* make an Adam optimisation module for the parameter definition Prms *)
module O = Owl_opt.D.Adam.Make (Prms)
(* define the objective function *)
let f _ prms = Owl.Algodiff.D.Maths.(l2norm' (y - ((prms.a *@ x) + prms.b)))
(* define initial parameters *)
let prms0 = {a = Owl.Algodiff.D.Mat.gaussian 5 5; b = Owl.Algodiff.D.gaussian 5 1}
(* define fixed learning rate *)
let lr = Owl_opt.Lr.(Fix 1E-4)
(* initialise an optimisation session *)
let s = O.init ~prms0 ~beta1:0.99 ~beta2:0.999 ~lr ()
(* define stopping criteria: stop when function value is smaller than 1E-4 *)
let stop fv s = fv < 1E-4
(* minimise [f] for session [s] and returns final loss *)
let fv = O.min ~f ~stop s
(* get optimized prms *)
let prms = O.prms s
Important modules
1. Parameters
- module
Owl_opt.Prms
and module type definitionOwl_opt.Prms.PT
2. Learning rate
- module
Owl_opt.Lr
3. Double-precision
- Vanilla gradient descent
Owl_opt.D.Gd.Make
- Adam
Owl_opt.D.Adam.Make
- Rmsprop
Owl_opt.D.Rmsprop.Make
- Lbfgs (see Owl Opt Lbfgs)
4. Single-precision
- Vanilla gradient descent
Owl_opt.S.Gd.Make
- Adam
Owl_opt.S.Adam.Make
- Rmsprop
Owl_opt.S.Rmsprop.Make
- Lbfgs (see Owl Opt Lbfgs)