From 1bf561d0f385806943e68fbc0c679ddd5c7145d0 Mon Sep 17 00:00:00 2001 From: Will Tebbutt Date: Fri, 21 Feb 2020 16:06:58 +0000 Subject: [PATCH 01/42] Basic GP examples --- examples/getting_started/Project.toml | 10 ++++ examples/getting_started/basic_operations.jl | 57 ++++++++++++++++++++ examples/getting_started/easy_plotting.jl | 29 ++++++++++ 3 files changed, 96 insertions(+) create mode 100644 examples/getting_started/Project.toml create mode 100644 examples/getting_started/basic_operations.jl create mode 100644 examples/getting_started/easy_plotting.jl diff --git a/examples/getting_started/Project.toml b/examples/getting_started/Project.toml new file mode 100644 index 00000000..5aae799d --- /dev/null +++ b/examples/getting_started/Project.toml @@ -0,0 +1,10 @@ +[deps] +LinearAlgebra = "37e2e46d-f89d-539d-b4ee-838fcccc9c8e" +Plots = "91a5bcdd-55d7-5caf-9e0b-520d859cae80" +Random = "9a3f8284-a2c9-5f02-9a11-845980a1fd5c" +Stheno = "8188c328-b5d6-583d-959b-9690869a5511" + +[compat] +Plots = "= 0.28.4" +Stheno = "= 0.6.1" +julia = "1.3" diff --git a/examples/getting_started/basic_operations.jl b/examples/getting_started/basic_operations.jl new file mode 100644 index 00000000..5398eb17 --- /dev/null +++ b/examples/getting_started/basic_operations.jl @@ -0,0 +1,57 @@ +# +# This file just shows how some of the basic manipulations you can do in Stheno work in +# practice. See the main documentation and the rest of this examples folder for more info. +# + +# Set up the environment to run this example. Make sure you're within the folder that this +# file lives in. +using Pkg +Pkg.activate(@__DIR__) +Pkg.instantiate() + +# The time-to-first-plot issue means that this might take a little while. +using LinearAlgebra, Plots, Random, Stheno +rng = MersenneTwister(123456) + +# Construct a Matern-5/2 kernel with lengthscale 0.5 and variance 1.2. +k = kernel(Matern52(); l=0.5, s=1.2) + +# Construct a zero-mean GP with a simple kernel. Don't worry about the GPC object. +f = GP(k, GPC()) + +# Specify some locations at which to consider the GP. +N = 50 +x = rand(rng, N) * 10 + +# Specify the variance of the noise under which we'll make observations of the GP. +Σ = Diagonal(rand(rng, N) .+ 0.1) + +# Construct marginal distribution over `f` at `x` added to some independent zero-mean +# Gaussian noise with covariance matrix `Σ`. +fx = f(x, Σ) + +# Generate a sample from the prior. +y = rand(rng, fx) + +# Compute the log marginal probability of the sample under the prior. +logpdf(fx, y) + +# Do inference: compute the posterior distribution over `f` given we observe it + noise to +# be `y` at locations `x`. +f_post = f | Obs(fx, y) + +# Specify some points at which to plot the posterior. +Npr = 1000 +xpr = range(-3.0, 13.0; length=Npr) + +# Construct the posterior predictive distribution at `xpr`. Add some jitter. +fx_post = f_post(xpr, 1e-9) + +# Draw samples from the posterior. +y_post = rand(rng, fx_post) + +# Compute the marginal posterior predictive probability of the samples. +logpdf(fx_post, y_post) + +# Compute the posterior marginal distributions. (We could equally have done this with `fx`). +post_marginals = marginals(fx_post) diff --git a/examples/getting_started/easy_plotting.jl b/examples/getting_started/easy_plotting.jl new file mode 100644 index 00000000..90732d73 --- /dev/null +++ b/examples/getting_started/easy_plotting.jl @@ -0,0 +1,29 @@ +# +# Here we demonstrate how to use Stheno's built in plotting functionality to make +# visualising 1D examples straightforward. +# + +# Load some other code and construct some basic objects. Please see the file for reference. +include("basic_operations.jl") + +# Specify our plotting backend. +gr(); + +# Construct a new plot object. +posterior_plot = plot(); + +# Plot the posterior distribution. +plot!(posterior_plot, f_post(xpr); samples=5, color=:blue, label="posterior"); + +# Plot the observations. +scatter!(posterior_plot, x, y; + markercolor=:blue, + markershape=:circle, + markerstrokewidth=0.0, + markersize=4, + markeralpha=0.7, + label="", +); + +# Show the plot. +display(posterior_plot); From aa412c945c66db394c2df6f0dbe7ef1b5dc5f5ba Mon Sep 17 00:00:00 2001 From: willtebbutt Date: Fri, 21 Feb 2020 17:12:48 +0000 Subject: [PATCH 02/42] Relax version requirement --- examples/getting_started/Project.toml | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/examples/getting_started/Project.toml b/examples/getting_started/Project.toml index 5aae799d..f59e8ee7 100644 --- a/examples/getting_started/Project.toml +++ b/examples/getting_started/Project.toml @@ -6,5 +6,5 @@ Stheno = "8188c328-b5d6-583d-959b-9690869a5511" [compat] Plots = "= 0.28.4" -Stheno = "= 0.6.1" -julia = "1.3" +Stheno = "0.6" +julia = "1" From 331605e75fdfe620f35ef6b1a4d996e6925998d2 Mon Sep 17 00:00:00 2001 From: willtebbutt Date: Fri, 21 Feb 2020 18:59:34 +0000 Subject: [PATCH 03/42] Complete plotting basics --- examples/getting_started/basic_operations.jl | 2 +- ...asy_plotting.jl => high_level_plotting.jl} | 2 +- .../getting_started/low_level_plotting.jl | 50 +++++++++++++++++++ 3 files changed, 52 insertions(+), 2 deletions(-) rename examples/getting_started/{easy_plotting.jl => high_level_plotting.jl} (88%) create mode 100644 examples/getting_started/low_level_plotting.jl diff --git a/examples/getting_started/basic_operations.jl b/examples/getting_started/basic_operations.jl index 5398eb17..e2ae475e 100644 --- a/examples/getting_started/basic_operations.jl +++ b/examples/getting_started/basic_operations.jl @@ -54,4 +54,4 @@ y_post = rand(rng, fx_post) logpdf(fx_post, y_post) # Compute the posterior marginal distributions. (We could equally have done this with `fx`). -post_marginals = marginals(fx_post) +posterior_marginals = marginals(fx_post) diff --git a/examples/getting_started/easy_plotting.jl b/examples/getting_started/high_level_plotting.jl similarity index 88% rename from examples/getting_started/easy_plotting.jl rename to examples/getting_started/high_level_plotting.jl index 90732d73..51fda961 100644 --- a/examples/getting_started/easy_plotting.jl +++ b/examples/getting_started/high_level_plotting.jl @@ -13,7 +13,7 @@ gr(); posterior_plot = plot(); # Plot the posterior distribution. -plot!(posterior_plot, f_post(xpr); samples=5, color=:blue, label="posterior"); +plot!(posterior_plot, f_post(xpr); samples=5, color=:blue); # Plot the observations. scatter!(posterior_plot, x, y; diff --git a/examples/getting_started/low_level_plotting.jl b/examples/getting_started/low_level_plotting.jl new file mode 100644 index 00000000..7b1e2ae8 --- /dev/null +++ b/examples/getting_started/low_level_plotting.jl @@ -0,0 +1,50 @@ +# +# Here we demonstrate how to build up a plot manually using just the Plots.jl interface and +# the quantities that are easily computable using Stheno. This approach involves a +# reasonable amount of boilerplate, but demonstates how to work at a lower level in case the +# high-level plotting functionality demonstrated in high_level_plotting.jl is insufficient +# for your use case. +# + +# Load some other code and construct some basic objects. Please see the file for reference. +include("basic_operations.jl") + +# Specify our plotting backend. +gr(); + +# Construct a new plot object. +posterior_plot = plot(legend=nothing); + +# Generate several samples from the posterior predictive distribution. +Y = rand(rng, f_post(xpr), 5) +m = mean.(posterior_marginals) +σ = std.(posterior_marginals) + +plot!(posterior_plot, xpr, m; + linecolor=:blue, + linewidth=2, +); +plot!(posterior_plot, xpr, [m m]; + linewidth=0.0, + linecolor=:blue, + fillrange=[m .- 3 .* σ, m .+ 3 * σ], + fillalpha=0.3, + fillcolor=:blue, +); +plot!(posterior_plot, xpr, Y; + linewidth=0.5, + linealpha=0.5, + linecolor=:blue, +); + +# Plot the observations. +scatter!(posterior_plot, x, y; + markercolor=:blue, + markershape=:circle, + markerstrokewidth=0.0, + markersize=4, + markeralpha=0.7, +); + +# Show the plot. +display(posterior_plot); From 2841ba8bbc7e61f871575d9dd4e161a0cc8b8284 Mon Sep 17 00:00:00 2001 From: willtebbutt Date: Fri, 21 Feb 2020 19:04:27 +0000 Subject: [PATCH 04/42] Document examples --- examples/getting_started/README.md | 5 +++++ 1 file changed, 5 insertions(+) create mode 100644 examples/getting_started/README.md diff --git a/examples/getting_started/README.md b/examples/getting_started/README.md new file mode 100644 index 00000000..822c6194 --- /dev/null +++ b/examples/getting_started/README.md @@ -0,0 +1,5 @@ +# Examples: Getting Started + +These examples demonstrate how the basic manipulations of Gaussian processes available in this package in `basic_operations.jl`. + +`high_level_plotting.jl` shows how to effectively combine `Plots.jl` with this package to quickly generate plots for 1D input spaces, while `low_level_plotting.jl` provides an example of the same plotting command, but without any of the high-level plotting utilities provided by this package -- this is important if the high-level plotting utilities aren't suitable for your application. From e5bedfe85b5912b0e6def10b231fd62e3aa01c29 Mon Sep 17 00:00:00 2001 From: willtebbutt Date: Fri, 21 Feb 2020 19:38:41 +0000 Subject: [PATCH 05/42] Demonstrate approximate inference with Titsias --- examples/pseudo_points/Project.toml | 9 ++ examples/pseudo_points/basic_operations.jl | 108 +++++++++++++++++++++ 2 files changed, 117 insertions(+) create mode 100644 examples/pseudo_points/Project.toml create mode 100644 examples/pseudo_points/basic_operations.jl diff --git a/examples/pseudo_points/Project.toml b/examples/pseudo_points/Project.toml new file mode 100644 index 00000000..57ff421b --- /dev/null +++ b/examples/pseudo_points/Project.toml @@ -0,0 +1,9 @@ +[deps] +Plots = "91a5bcdd-55d7-5caf-9e0b-520d859cae80" +Random = "9a3f8284-a2c9-5f02-9a11-845980a1fd5c" +Stheno = "8188c328-b5d6-583d-959b-9690869a5511" + +[compat] +Plots = "= 0.28.4" +Stheno = "0.6" +julia = "1" diff --git a/examples/pseudo_points/basic_operations.jl b/examples/pseudo_points/basic_operations.jl new file mode 100644 index 00000000..280dc2b3 --- /dev/null +++ b/examples/pseudo_points/basic_operations.jl @@ -0,0 +1,108 @@ +# +# This file just illustrates the nuts-and-bolts of the approximate inference API provided +# by Stheno. It's worth mentally contrasting this with what you would have written to +# perform exact inference (hint: it's remarkably similar). +# + +# Set up the environment to run this example. Make sure you're within the folder that this +# file lives in. +using Pkg +Pkg.activate(@__DIR__) +Pkg.instantiate() + +# The time-to-first-plot issue means that this might take a little while. +using BenchmarkTools, LinearAlgebra, Plots, Random, Stheno +rng = MersenneTwister(123456) + + +# Construct a Matern-5/2 kernel with lengthscale 0.5 and variance 1.2. +k = kernel(Matern52(); l=0.5, s=1.2) + +# Construct a zero-mean GP with a simple kernel. Don't worry about the GPC object. +f = GP(k, GPC()) + +# Specify some locations at which to consider the GP. +N = 5_000 +x = vcat(rand(rng, div(N, 2)) * 3, rand(rng, div(N, 2)) * 4 .+ 6) + +# Specify the variance of the noise under which we'll make observations of the GP. +Σ = Diagonal(rand(rng, N) .+ 0.1) + +# Construct marginal distribution over `f` at `x` added to some independent zero-mean +# Gaussian noise with covariance matrix `Σ`. +fx = f(x, Σ) + +# Generate a sample from the prior. +y = rand(rng, fx) + +# Now pretend that we can't perform inference because `N` is too large. + +# Specify some pseudo-point locations. +z = range(-3.0, 13.0; length=150) + +# Specify inducing-points. Add a small amount of jitter for numerical stability. +u = f(z, 1e-6) + +# Compute the Evidence Lower BOund. +println("The elbo is reasonable tight.") +@show elbo(fx, y, u), logpdf(fx, y) + +println("Benchmark logpdf") +display(@benchmark logpdf($fx, $y)) +println() + +println("Benchmark elbo") +display(@benchmark elbo($fx, $y, $u)) +println() + +# Compute the approximate posterior process. +f_post = f | Stheno.PseudoObs(Obs(fx, y), u) + +# Specify some points at which to plot the approximate posterior. +Npr = 1000 +xpr = range(-3.0, 13.0; length=Npr) + +# It's possible to work efficently with the approximate posterior marginals. +# Unfortunately it's not possible to work efficiently with the entire posterior process -- +# this is limitation of pseudo-point approximations generally, as opposed to a limitation +# of Stheno. +posterior_marginals = marginals(f_post(xpr)) + +# Visualise the posterior. At the time of writing, it remains important to do this manually +# for the sake of efficiency. If you would like to have a high-level interface similar to +# the one available in the exact inference setting, please feel free to raise an issue, or +# implement it yourself and open a PR! + +# Specify our plotting backend. +gr(); + +# Construct a new plot object. +posterior_plot = plot(legend=nothing); + +# Generate several samples from the posterior predictive distribution. +m = mean.(posterior_marginals) +σ = std.(posterior_marginals) + +plot!(posterior_plot, xpr, m; + linecolor=:blue, + linewidth=2, +); +plot!(posterior_plot, xpr, [m m]; + linewidth=0.0, + linecolor=:blue, + fillrange=[m .- 3 .* σ, m .+ 3 * σ], + fillalpha=0.3, + fillcolor=:blue, +); + +# Plot the observations. +scatter!(posterior_plot, x, y; + markercolor=:blue, + markershape=:circle, + markerstrokewidth=0.0, + markersize=1, + markeralpha=0.2, +); + +# Show the plot. +display(posterior_plot); From bbccdabbade7c73b189a6bfeb2a53a2465176e63 Mon Sep 17 00:00:00 2001 From: willtebbutt Date: Fri, 21 Feb 2020 19:47:50 +0000 Subject: [PATCH 06/42] Docuemntation --- examples/pseudo_points/README.md | 7 +++++++ 1 file changed, 7 insertions(+) create mode 100644 examples/pseudo_points/README.md diff --git a/examples/pseudo_points/README.md b/examples/pseudo_points/README.md new file mode 100644 index 00000000..6917bce8 --- /dev/null +++ b/examples/pseudo_points/README.md @@ -0,0 +1,7 @@ +# Approximate inference with Pseudo Points + +Also known as sparse approximations, inducing-point approimations, and probably a few more names. + +Here we demonstrate how to perform efficient approximate inference in large data sets when a small number of pseudo-observations can be utilised to represent the posterior. + +Titsias, 2009, is the de-facto standard approach to this type of approximation, and is the algorithm utilised here. There are, however, plenty of other pseudo-point approximations exist in the literature, and wouldn't be particularly difficult to add. If you would like to see them implemented, please either do so and open a PR, or raise an issue on the matter! From 40a87257a5e6b2e97b2043e4ceca99da01ece0df Mon Sep 17 00:00:00 2001 From: willtebbutt Date: Fri, 21 Feb 2020 20:01:22 +0000 Subject: [PATCH 07/42] Furhter docs improvements --- README.md | 6 ++++-- examples/getting_started/README.md | 6 ++++-- examples/pseudo_points/README.md | 6 +++++- 3 files changed, 13 insertions(+), 5 deletions(-) diff --git a/README.md b/README.md index f0ed8d46..a0b7d48d 100644 --- a/README.md +++ b/README.md @@ -17,7 +17,9 @@ __Installation__ - `] add Stheno`. ## A Couple of Examples -We have a [model zoo](https://github.com/willtebbutt/stheno_models), but here are a couple of examples to get you started. +The primary sources of information regarding this package are the [documentation](https://willtebbutt.github.io/Stheno.jl/stable) and the examples folder, but here are a couple of flashy examples to get started with. + +Please raise an issue immediately if either of these examples don't work -- they're not currently included in CI, so there's always a higher chance that they'll be outdated than the internals of the package. In this first example we define a simple Gaussian process, make observations of different bits of it, and visualise the posterior. We are trivially able to condition on both observations of both `f₁` _and_ `f₃`, which is a very non-standard capability. ```julia @@ -214,7 +216,7 @@ Stheno doesn't currently have support for non-Gaussian likelihoods, and as such The plan is not to support the combination of GPs and Deep Learning explicitly, but rather to ensure that Stheno and [Flux.jl](https://github.com/FluxML/Flux.jl) play nicely with one another. Both packages now work with [Zygote.jl](https://github.com/FluxML/Zygote.jl), so you can use that to sort out gradient information. -## Things that are definitely up for grabs +## Things that are up for grabs Obviously, improvements to code documentation are always welcome, and if you want to write some more unit / integration tests, please feel free. In terms of larger items that require some attention, here are some thoughts: - An implementation of SVI from [Gaussian Processes for Big Data](https://arxiv.org/abs/1309.6835). - Kronecker-factored matrices: this is quite a general issue which might be best be addressed by the creation of a separate package. It would be very helpful to have an implementation of the `AbstractMatrix` interface which implements multiplication, inversion, eigenfactorisation etc, which can then be utilised in Stheno. diff --git a/examples/getting_started/README.md b/examples/getting_started/README.md index 822c6194..04c19467 100644 --- a/examples/getting_started/README.md +++ b/examples/getting_started/README.md @@ -1,5 +1,7 @@ # Examples: Getting Started -These examples demonstrate how the basic manipulations of Gaussian processes available in this package in `basic_operations.jl`. +Here we provide a terse but reasonably comprehensive introduction to the API made available in Stheno. This is a helpful introduction if you're looking to build on Stheno in your own code. Where appropriate, all operations that should be algorithmically differentiable. -`high_level_plotting.jl` shows how to effectively combine `Plots.jl` with this package to quickly generate plots for 1D input spaces, while `low_level_plotting.jl` provides an example of the same plotting command, but without any of the high-level plotting utilities provided by this package -- this is important if the high-level plotting utilities aren't suitable for your application. +`basic_operations.jl` illustrates the basic manipulations of Gaussian processes available in this package. + +`high_level_plotting.jl` shows how to effectively combine `Plots.jl` with this package to quickly generate plots for 1D input spaces, while `low_level_plotting.jl` provides an example of the same plotting functionality, but without any of the high-level plotting utilities provided by this package -- this is important if the high-level plotting utilities aren't suitable for your application. diff --git a/examples/pseudo_points/README.md b/examples/pseudo_points/README.md index 6917bce8..ca1c8a95 100644 --- a/examples/pseudo_points/README.md +++ b/examples/pseudo_points/README.md @@ -2,6 +2,10 @@ Also known as sparse approximations, inducing-point approimations, and probably a few more names. -Here we demonstrate how to perform efficient approximate inference in large data sets when a small number of pseudo-observations can be utilised to represent the posterior. +Here we demonstrate how to perform efficient approximate inference in large data sets when a small number of pseudo-observations can be utilised to represent the posterior. Again, this introduction is somewhat terse, but should be useful if you're looking to build on top of Stheno in your own code. Titsias, 2009, is the de-facto standard approach to this type of approximation, and is the algorithm utilised here. There are, however, plenty of other pseudo-point approximations exist in the literature, and wouldn't be particularly difficult to add. If you would like to see them implemented, please either do so and open a PR, or raise an issue on the matter! + +Bibliography + +Tisias, Michalis (2009). "Variational learning of inducing variables in sparse Gaussian processes". In: International Conference On Artificial Intelligence and Statistics, pp. 567-574. From 64a3a59e62620b64e5519159276a49f69215e1d9 Mon Sep 17 00:00:00 2001 From: willtebbutt Date: Fri, 21 Feb 2020 20:21:34 +0000 Subject: [PATCH 08/42] More docs and the process decomposition example --- README.md | 6 +- examples/README.md | 11 ++ examples/basic_gppp/process_decomposition.jl | 115 +++++++++++++++++++ 3 files changed, 129 insertions(+), 3 deletions(-) create mode 100644 examples/README.md create mode 100644 examples/basic_gppp/process_decomposition.jl diff --git a/README.md b/README.md index a0b7d48d..b605439f 100644 --- a/README.md +++ b/README.md @@ -5,7 +5,7 @@ [![](https://img.shields.io/badge/docs-stable-blue.svg)](https://willtebbutt.github.io/Stheno.jl/stable) [![](https://img.shields.io/badge/docs-dev-blue.svg)](https://willtebbutt.github.io/Stheno.jl/dev) -Stheno is designed to make doing non-standard things with Gaussian processes straightforward. It has an intuitive modeling syntax, is inherently able to handle both multi-input and multi-output problems, and trivially supports interdomain pseudo-point approximations. +Stheno is designed to make doing non-standard things with Gaussian processes straightforward. It has an intuitive modeling syntax, is inherently able to handle both multi-input and multi-output problems, and trivially supports interdomain pseudo-point approximations. We call this Gaussian process Probabilistic Programming (GPPP). [We also have a Python version of the package](https://github.com/wesselb/stheno) @@ -118,10 +118,10 @@ display(posterior_plot); ``` ![](https://github.com/willtebbutt/stheno_models/blob/master/exact/process_decomposition.png) -[Model Zoo Link](https://github.com/willtebbutt/stheno_models/blob/master/exact/process_decomposition.jl) - In the above figure, we have visualised the posterior distribution of all of the processes. Bold lines are posterior means, and shaded areas are three posterior standard deviations from these means. Thin lines are samples from the posterior processes. +This example can also be found in the `examples/basic_gppp/process_decomposition.jl`, which contains other toy examples of GPPP in action. + In this next example we make observations of two different noisy versions of the same latent process. Again, this is just about doable in existing GP packages if you know what you're doing, but isn't straightforward. ```julia diff --git a/examples/README.md b/examples/README.md new file mode 100644 index 00000000..40d9125c --- /dev/null +++ b/examples/README.md @@ -0,0 +1,11 @@ +# Examples Overview + +There are numerous examples of things that can be done with Stheno.jl in this directory. The `getting_started` sub-directory, you should have the pre-requisites to understand the other folders, each of while build on `getting_started` in + +Note that each sub-directory contains its own environment that specifies a particular version of Stheno and other dependencies. As such, each example _should_ certainly be runnable. It might not, however, be runnable on the most recent version of Stheno. If you encounter this, it likely means that someone forgot to check that all of the examples still run when a breaking change was made. This should be considered a bug and an issue raised, or PR open to fix the problem! + +Below we provide a brief description of each of the sub-directories. + +- `getting_started`: the most fundamental Stheno.jl functionality. If you're not comfortable with the content of this folder, you likely won't be with the rest of them. +- `pseudo_points`: covers inducing-point / sparse / pseudo-point approximations. +- `basic_gppp`: basic toy examples of the functionality that we call Gaussian process Probabilistic Programming (GPPP). diff --git a/examples/basic_gppp/process_decomposition.jl b/examples/basic_gppp/process_decomposition.jl new file mode 100644 index 00000000..3e70e83b --- /dev/null +++ b/examples/basic_gppp/process_decomposition.jl @@ -0,0 +1,115 @@ +using Stheno, Plots, Random, Statistics +using Stheno: @model, EQ + + + +########################### Define our model ########################### + +# Define a distribution over f₁, f₂, and f₃, where f₃(x) = f₁(x) + f₂(x). +@model function model() + f₁ = GP(randn(), EQ()) + f₂ = GP(EQ()) + f₃ = f₁ + f₂ + return f₁, f₂, f₃ +end + +# Randomly sample `N₁` locations at which to measure `f` using `y1`, and `N2` locations +# at which to measure `f` using `y2`. +rng, N₁, N₃ = MersenneTwister(123546), 10, 11; +X₁, X₃ = sort(rand(rng, N₁) * 10), sort(rand(rng, N₃) * 10); +f₁, f₂, f₃ = model(); + +# Generate some toy observations of `f₁` and `f₃`. +ŷ₁, ŷ₃ = rand(rng, [f₁(X₁), f₃(X₃)]); + +# Compute the posterior processes. +(f₁′, f₂′, f₃′) = (f₁, f₂, f₃) | (f₁(X₁)←ŷ₁, f₃(X₃)←ŷ₃); + +# Define some plotting stuff. +Np, S = 500, 25; +Xp = range(-2.5, stop=12.5, length=Np); + +# Sample jointly from the posterior over each process. +f₁′Xp, f₂′Xp, f₃′Xp = rand(rng, [f₁′(Xp, 1e-9), f₂′(Xp, 1e-9), f₃′(Xp, 1e-9)], S); + +# Compute posterior marginals. +ms1 = marginals(f₁′(Xp)); +ms2 = marginals(f₂′(Xp)); +ms3 = marginals(f₃′(Xp)); + +μf₁′, σf₁′ = mean.(ms1), std.(ms1); +μf₂′, σf₂′ = mean.(ms2), std.(ms2); +μf₃′, σf₃′ = mean.(ms3), std.(ms3); + + + +########################### Plot results ########################### + +gr(); +posterior_plot = plot(); + +# Plot posterior marginal variances +plot!(posterior_plot, Xp, [μf₁′ μf₁′]; + linewidth=0.0, + fillrange=[μf₁′ .- 3 .* σf₁′, μf₁′ .+ 3 * σf₁′], + fillalpha=0.3, + fillcolor=:red, + label=""); +plot!(posterior_plot, Xp, [μf₂′ μf₂′]; + linewidth=0.0, + fillrange=[μf₂′ .- 3 .* σf₂′, μf₂′ .+ 3 * σf₂′], + fillalpha=0.3, + fillcolor=:green, + label=""); +plot!(posterior_plot, Xp, [μf₃′ μf₃′]; + linewidth=0.0, + fillrange=[μf₃′ .- 3 .* σf₃′, μf₃′ .+ 3 * σf₃′], + fillalpha=0.3, + fillcolor=:blue, + label=""); + +# Plot joint posterior samples +plot!(posterior_plot, Xp, f₁′Xp, + linecolor=:red, + linealpha=0.2, + label=""); +plot!(posterior_plot, Xp, f₂′Xp, + linecolor=:green, + linealpha=0.2, + label=""); +plot!(posterior_plot, Xp, f₃′Xp, + linecolor=:blue, + linealpha=0.2, + label=""); + +# Plot posterior means +plot!(posterior_plot, Xp, μf₁′; + linecolor=:red, + linewidth=2.0, + label="f1"); +plot!(posterior_plot, Xp, μf₂′; + linecolor=:green, + linewidth=2.0, + label="f2"); +plot!(posterior_plot, Xp, μf₃′; + linecolor=:blue, + linewidth=2.0, + label="f3"); + +# Plot observations +scatter!(posterior_plot, X₁, ŷ₁; + markercolor=:red, + markershape=:circle, + markerstrokewidth=0.0, + markersize=4, + markeralpha=0.7, + label=""); +scatter!(posterior_plot, X₃, ŷ₃; + markercolor=:blue, + markershape=:circle, + markerstrokewidth=0.0, + markersize=4, + markeralpha=0.7, + label=""); + +display(posterior_plot); From b2bd74e47bc637052f956249bbc56dd392f2d767 Mon Sep 17 00:00:00 2001 From: willtebbutt Date: Fri, 21 Feb 2020 20:37:43 +0000 Subject: [PATCH 09/42] More docs, more examples --- README.md | 2 +- examples/basic_gppp/Project.toml | 9 ++ examples/basic_gppp/process_decomposition.jl | 96 ++++++++++++-------- 3 files changed, 67 insertions(+), 40 deletions(-) create mode 100644 examples/basic_gppp/Project.toml diff --git a/README.md b/README.md index b605439f..7c81f6f8 100644 --- a/README.md +++ b/README.md @@ -120,7 +120,7 @@ display(posterior_plot); In the above figure, we have visualised the posterior distribution of all of the processes. Bold lines are posterior means, and shaded areas are three posterior standard deviations from these means. Thin lines are samples from the posterior processes. -This example can also be found in the `examples/basic_gppp/process_decomposition.jl`, which contains other toy examples of GPPP in action. +This example can also be found in the `examples/basic_gppp/process_decomposition.jl`, which also contains other toy examples of GPPP in action. In this next example we make observations of two different noisy versions of the same latent process. Again, this is just about doable in existing GP packages if you know what you're doing, but isn't straightforward. diff --git a/examples/basic_gppp/Project.toml b/examples/basic_gppp/Project.toml new file mode 100644 index 00000000..57ff421b --- /dev/null +++ b/examples/basic_gppp/Project.toml @@ -0,0 +1,9 @@ +[deps] +Plots = "91a5bcdd-55d7-5caf-9e0b-520d859cae80" +Random = "9a3f8284-a2c9-5f02-9a11-845980a1fd5c" +Stheno = "8188c328-b5d6-583d-959b-9690869a5511" + +[compat] +Plots = "= 0.28.4" +Stheno = "0.6" +julia = "1" diff --git a/examples/basic_gppp/process_decomposition.jl b/examples/basic_gppp/process_decomposition.jl index 3e70e83b..53652727 100644 --- a/examples/basic_gppp/process_decomposition.jl +++ b/examples/basic_gppp/process_decomposition.jl @@ -1,4 +1,9 @@ -using Stheno, Plots, Random, Statistics +# Please run from the `basic_gppp` directory. +using Pkg +Pkg.activate(@__DIR__) +Pkg.instantiate() + +using Random, Plots, Stheno using Stheno: @model, EQ @@ -13,33 +18,35 @@ using Stheno: @model, EQ return f₁, f₂, f₃ end -# Randomly sample `N₁` locations at which to measure `f` using `y1`, and `N2` locations -# at which to measure `f` using `y2`. -rng, N₁, N₃ = MersenneTwister(123546), 10, 11; -X₁, X₃ = sort(rand(rng, N₁) * 10), sort(rand(rng, N₃) * 10); -f₁, f₂, f₃ = model(); +# Construct the processes in the model. +f₁, f₂, f₃ = model() + +# Randomly sample `N₁` and `N₃` locations at which to observe `f₁` and `f₃` respectively. +rng, N₁, N₃ = MersenneTwister(123546), 10, 11 +x₁, x₃ = sort(rand(rng, N₁) * 10), sort(rand(rng, N₃) * 10) -# Generate some toy observations of `f₁` and `f₃`. -ŷ₁, ŷ₃ = rand(rng, [f₁(X₁), f₃(X₃)]); +# Generate some toy of `f₁` and `f₃`, `y₁` and `y₃` respectively. +y₁, y₃ = rand(rng, [f₁(x₁), f₃(x₃)]) # Compute the posterior processes. -(f₁′, f₂′, f₃′) = (f₁, f₂, f₃) | (f₁(X₁)←ŷ₁, f₃(X₃)←ŷ₃); +(f₁_post, f₂_post, f₃_post) = (f₁, f₂, f₃) | (f₁(x₁)←y₁, f₃(x₃)←y₃) # Define some plotting stuff. Np, S = 500, 25; -Xp = range(-2.5, stop=12.5, length=Np); +xp = range(-2.5, stop=12.5, length=Np); # Sample jointly from the posterior over each process. -f₁′Xp, f₂′Xp, f₃′Xp = rand(rng, [f₁′(Xp, 1e-9), f₂′(Xp, 1e-9), f₃′(Xp, 1e-9)], S); +f₁_post_xp, f₂_post_xp, f₃_post_xp = + rand(rng, [f₁_post(xp, 1e-9), f₂_post(xp, 1e-9), f₃_post(xp, 1e-9)], S); # Compute posterior marginals. -ms1 = marginals(f₁′(Xp)); -ms2 = marginals(f₂′(Xp)); -ms3 = marginals(f₃′(Xp)); +ms₁ = marginals(f₁_post(xp)); +ms₂ = marginals(f₂_post(xp)); +ms₃ = marginals(f₃_post(xp)); -μf₁′, σf₁′ = mean.(ms1), std.(ms1); -μf₂′, σf₂′ = mean.(ms2), std.(ms2); -μf₃′, σf₃′ = mean.(ms3), std.(ms3); +μf₁′, σf₁′ = mean.(ms₁), std.(ms₁); +μf₂′, σf₂′ = mean.(ms₂), std.(ms₂); +μf₃′, σf₃′ = mean.(ms₃), std.(ms₃); @@ -49,67 +56,78 @@ gr(); posterior_plot = plot(); # Plot posterior marginal variances -plot!(posterior_plot, Xp, [μf₁′ μf₁′]; +plot!(posterior_plot, xp, [μf₁′ μf₁′]; linewidth=0.0, fillrange=[μf₁′ .- 3 .* σf₁′, μf₁′ .+ 3 * σf₁′], fillalpha=0.3, fillcolor=:red, - label=""); -plot!(posterior_plot, Xp, [μf₂′ μf₂′]; + label="", +); +plot!(posterior_plot, xp, [μf₂′ μf₂′]; linewidth=0.0, fillrange=[μf₂′ .- 3 .* σf₂′, μf₂′ .+ 3 * σf₂′], fillalpha=0.3, fillcolor=:green, - label=""); -plot!(posterior_plot, Xp, [μf₃′ μf₃′]; + label="", +); +plot!(posterior_plot, xp, [μf₃′ μf₃′]; linewidth=0.0, fillrange=[μf₃′ .- 3 .* σf₃′, μf₃′ .+ 3 * σf₃′], fillalpha=0.3, fillcolor=:blue, - label=""); + label="", +); # Plot joint posterior samples -plot!(posterior_plot, Xp, f₁′Xp, +plot!(posterior_plot, xp, f₁_post_xp, linecolor=:red, linealpha=0.2, - label=""); -plot!(posterior_plot, Xp, f₂′Xp, + label="", +); +plot!(posterior_plot, xp, f₂_post_xp, linecolor=:green, linealpha=0.2, - label=""); -plot!(posterior_plot, Xp, f₃′Xp, + label="", +); +plot!(posterior_plot, xp, f₃_post_xp, linecolor=:blue, linealpha=0.2, - label=""); + label="", +); # Plot posterior means -plot!(posterior_plot, Xp, μf₁′; +plot!(posterior_plot, xp, μf₁′; linecolor=:red, linewidth=2.0, - label="f1"); -plot!(posterior_plot, Xp, μf₂′; + label="f1", +); +plot!(posterior_plot, xp, μf₂′; linecolor=:green, linewidth=2.0, - label="f2"); -plot!(posterior_plot, Xp, μf₃′; + label="f2", +); +plot!(posterior_plot, xp, μf₃′; linecolor=:blue, linewidth=2.0, - label="f3"); + label="f3", +); # Plot observations -scatter!(posterior_plot, X₁, ŷ₁; +scatter!(posterior_plot, x₁, y₁; markercolor=:red, markershape=:circle, markerstrokewidth=0.0, markersize=4, markeralpha=0.7, - label=""); -scatter!(posterior_plot, X₃, ŷ₃; + label="", +); +scatter!(posterior_plot, x₃, y₃; markercolor=:blue, markershape=:circle, markerstrokewidth=0.0, markersize=4, markeralpha=0.7, - label=""); + label="", +); display(posterior_plot); From c2d8d1d9da3177c14307856169f5da3a63b2af6d Mon Sep 17 00:00:00 2001 From: willtebbutt Date: Fri, 21 Feb 2020 20:48:36 +0000 Subject: [PATCH 10/42] Sensor fusion --- examples/basic_gppp/sensor_fusion.jl | 147 +++++++++++++++++++++++++++ 1 file changed, 147 insertions(+) create mode 100644 examples/basic_gppp/sensor_fusion.jl diff --git a/examples/basic_gppp/sensor_fusion.jl b/examples/basic_gppp/sensor_fusion.jl new file mode 100644 index 00000000..987914cc --- /dev/null +++ b/examples/basic_gppp/sensor_fusion.jl @@ -0,0 +1,147 @@ +# Please run from the `basic_gppp` directory. +using Pkg +Pkg.activate(@__DIR__) +Pkg.instantiate() + +using Stheno, Random, Plots, Statistics +using Stheno: @model, EQ, Noise + +########################### Define and inspect our model ########################### + +rng = MersenneTwister(123456); + +#= +In this example, `f` is an unknown real-valued function that we wish to infer. To achieve +this, we have access to two sensors. The first returns noisy estimates of `f`, where we have +been reliably informed by whoever designed the sensor that the mean of the noise is given by +`sin(x) - 5 + sqrt(abs(x))`, and that it's variance is low (1e-2). (how the designer +estimated this function, is why a sensor might possibly have such a weird mean error is +beyond the scope of this example) The second returns biased measurements of `f`, where the +bias is known to be 3.5. The model below specifies a model for this scenario. +=# +@model function model() + + # Define a smooth latent process that we wish to infer. + f = GP(EQ()) + + # Define the two noise processes described. + noise1 = sqrt(1e-2) * GP(Noise()) + (x->sin.(x) .- 5.0 .+ sqrt.(abs.(x))) + noise2 = sqrt(1e-1) * GP(3.5, Noise()) + + # Define the processes that we get to observe. + y1 = f + noise1 + y2 = f + noise2 + + return f, noise1, noise2, y1, y2 +end +f, noise₁, noise₂, y₁, y₂ = model(); + +# Generate some toy observations of `y₁` and `y₂`. +x₁, x₂ = sort(rand(rng, 3) * 10), sort(rand(rng, 10) * 10); +ŷ₁, ŷ₂ = rand(rng, [y₁(x₁), y₂(x₂)]); + +# Compute the posterior processes. +(f′, y₁′, y₂′) = (f, y₁, y₂) | (y₁(x₁)←ŷ₁, y₂(x₂)←ŷ₂); + +# Sample jointly from the posterior processes and compute posterior marginals. +xp = range(-2.5, stop=12.5, length=500); +f′xp, y₁′xp, y₂′xp = rand(rng, [f′(xp, 1e-9), y₁′(xp, 1e-9), y₂′(xp, 1e-9)], 100); + +ms₁ = marginals(f′(xp)); +ms₂ = marginals(y₁′(xp)); +ms₃ = marginals(y₂′(xp)); + +μf′, σf′ = mean.(ms₁), std.(ms₁) +μy₁′, σy₁′ = mean.(ms₂), std.(ms₂) +μy₂′, σy₂′ = mean.(ms₃), std.(ms₃) + + + +########################### Plot results ########################### + +gr(); + +posterior_plot = plot(); + +# Plot posterior marginal std. dev. +plot!(posterior_plot, xp, [μy₁′ μy₁′]; + linewidth=0.0, + fillrange=[μy₁′ .- 3 .* σy₁′, μy₁′ .+ 3 * σy₁′], + fillalpha=0.3, + fillcolor=:red, + label="", +); +plot!(posterior_plot, xp, [μy₂′ μy₂′]; + linewidth=0.0, + fillrange=[μy₂′ .- 3 .* σy₂′, μy₂′ .+ 3 * σy₂′], + fillalpha=0.3, + fillcolor=:green, + label="", +); +plot!(posterior_plot, xp, [μf′ μf′]; + linewidth=0.0, + fillrange=[μf′.- 3 .* σf′ μf′ .+ 3 .* σf′], + fillalpha=0.5, + fillcolor=:blue, + label="", +); + +# Plot posterior marginal samples. +scatter!(posterior_plot, xp, y₁′xp, + markercolor=:red, + markershape=:circle, + markerstrokewidth=0.0, + markersize=0.5, + markeralpha=0.3, + label="", +); +scatter!(posterior_plot, xp, y₂′xp, + markercolor=:green, + markershape=:circle, + markerstrokewidth=0.0, + markersize=0.5, + markeralpha=0.3, + label="", +); +plot!(posterior_plot, xp, f′xp; + linecolor=:blue, + linealpha=0.2, + label="", +); + +# Plot posterior means. +plot!(posterior_plot, xp, μy₁′; + linecolor=:red, + linewidth=2.0, + label="", +); +plot!(posterior_plot, xp, μy₂′; + linecolor=:green, + linewidth=2.0, + label="", +); +plot!(posterior_plot, xp, μf′; + linecolor=:blue, + linewidth=2.0, + label="Latent Function", +); + +# Plot samples on which we conditioned. +scatter!(posterior_plot, x₁, ŷ₁; + markercolor=:red, + markershape=:circle, + markerstrokewidth=0.0, + markersize=4, + markeralpha=0.8, + label="Sensor 1", +); +scatter!(posterior_plot, x₂, ŷ₂; + markercolor=:green, + markershape=:circle, + markerstrokewidth=0.0, + markersize=4, + markeralpha=0.8, + label="Sensor 2", +); + +display(posterior_plot); From 5c172bc2fae2f2650b5b5c063a3c9cea56600a65 Mon Sep 17 00:00:00 2001 From: willtebbutt Date: Fri, 21 Feb 2020 20:50:16 +0000 Subject: [PATCH 11/42] Tweak docs --- examples/basic_gppp/sensor_fusion.jl | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/examples/basic_gppp/sensor_fusion.jl b/examples/basic_gppp/sensor_fusion.jl index 987914cc..3e9417cb 100644 --- a/examples/basic_gppp/sensor_fusion.jl +++ b/examples/basic_gppp/sensor_fusion.jl @@ -14,9 +14,9 @@ rng = MersenneTwister(123456); In this example, `f` is an unknown real-valued function that we wish to infer. To achieve this, we have access to two sensors. The first returns noisy estimates of `f`, where we have been reliably informed by whoever designed the sensor that the mean of the noise is given by -`sin(x) - 5 + sqrt(abs(x))`, and that it's variance is low (1e-2). (how the designer -estimated this function, is why a sensor might possibly have such a weird mean error is -beyond the scope of this example) The second returns biased measurements of `f`, where the +`sin(x) - 5 + sqrt(abs(x))`, and that it's variance is low (1e-2). How the designer +estimated this function, and why a sensor might possibly have such a strange mean error, is +beyond the scope of this example. The second returns biased measurements of `f`, where the bias is known to be 3.5. The model below specifies a model for this scenario. =# @model function model() From 73b87f44984633223500b755e0d944b11f206fc1 Mon Sep 17 00:00:00 2001 From: willtebbutt Date: Fri, 21 Feb 2020 21:16:04 +0000 Subject: [PATCH 12/42] More docs and more examples --- README.md | 5 +- examples/basic_gppp/Project.toml | 2 + examples/basic_gppp/README.md | 7 + examples/basic_gppp/time_varying_blr.jl | 150 +++++++++++++++++++ examples/getting_started/basic_operations.jl | 1 + examples/pseudo_points/basic_operations.jl | 1 + 6 files changed, 163 insertions(+), 3 deletions(-) create mode 100644 examples/basic_gppp/README.md create mode 100644 examples/basic_gppp/time_varying_blr.jl diff --git a/README.md b/README.md index 7c81f6f8..72eb6f5d 100644 --- a/README.md +++ b/README.md @@ -120,7 +120,7 @@ display(posterior_plot); In the above figure, we have visualised the posterior distribution of all of the processes. Bold lines are posterior means, and shaded areas are three posterior standard deviations from these means. Thin lines are samples from the posterior processes. -This example can also be found in the `examples/basic_gppp/process_decomposition.jl`, which also contains other toy examples of GPPP in action. +This example can also be found in `examples/basic_gppp/process_decomposition.jl`, which also contains other toy examples of GPPP in action. In this next example we make observations of two different noisy versions of the same latent process. Again, this is just about doable in existing GP packages if you know what you're doing, but isn't straightforward. @@ -196,10 +196,9 @@ display(posterior_plot); ``` ![](https://github.com/willtebbutt/stheno_models/blob/master/exact/simple_sensor_fusion.png) -[Model Zoo Link](https://github.com/willtebbutt/stheno_models/blob/master/exact/simple_sensor_fusion.jl) - As before, we visualise the posterior distribution through its marginal statistics and joint samples. Note that the posterior samples over the unobserved process are (unsurprisingly) smooth, whereas the posterior samples over the noisy processes still look uncorrelated and noise-like. +As before, this example can also be found in `examples/basic_gppp/process_decomposition.jl`. ## Hyperparameter learning and inference diff --git a/examples/basic_gppp/Project.toml b/examples/basic_gppp/Project.toml index 57ff421b..81165fc5 100644 --- a/examples/basic_gppp/Project.toml +++ b/examples/basic_gppp/Project.toml @@ -1,4 +1,6 @@ [deps] +ColorTypes = "3da002f7-5984-5a60-b8a6-cbb66c0b333f" +FixedPointNumbers = "53c48c17-4a7d-5ca2-90c5-79b7896eea93" Plots = "91a5bcdd-55d7-5caf-9e0b-520d859cae80" Random = "9a3f8284-a2c9-5f02-9a11-845980a1fd5c" Stheno = "8188c328-b5d6-583d-959b-9690869a5511" diff --git a/examples/basic_gppp/README.md b/examples/basic_gppp/README.md new file mode 100644 index 00000000..66cc4dbb --- /dev/null +++ b/examples/basic_gppp/README.md @@ -0,0 +1,7 @@ +# Examples: Basic GPPP + +Various simply toy GPPP examples. + +- `process_decomposition.jl`: additive GP where we inspect the posterior over the processes +- `sensor_fusion.jl`: integrate multiple types of observations of a process made under different types of noise. +- `time_varying_blr.jl`: Bayesian Linear Regression (BLR) in which the coefficients vary throughout the input space, each according to an independent Gaussian process. Here it's 1D, so we call it time. diff --git a/examples/basic_gppp/time_varying_blr.jl b/examples/basic_gppp/time_varying_blr.jl new file mode 100644 index 00000000..3c9641e7 --- /dev/null +++ b/examples/basic_gppp/time_varying_blr.jl @@ -0,0 +1,150 @@ +# Please run from the `basic_gppp` directory. +using Pkg +Pkg.activate(@__DIR__) +Pkg.instantiate() + +using Stheno, Plots, Random, ColorTypes, FixedPointNumbers +using Stheno: @model + +########################### Define and inspect our model ########################### + +#= +g1 and g2 are time-varying basis functions. In a real application, these might be specified +by data. + +w1 and w2 are the time-varying basis functions for these bases, specified by slowly +varying GPs. + +f is the prediction of the regressor. + +y is the addition of f and rough temporally-correlated "noise". +=# +@model function model() + g1, g2 = x->x / 4, cos + w1, w2 = stretch(GP(EQ()), 0.2), stretch(GP(EQ()), 1) + f = g1 * w1 + g2 * w2 + y = f + 0.3 * GP(Matern12()) + return w1, w2, f, y +end + +# Sample from the prior from plotting and for conditioning. +rng, N, Nplot, S = MersenneTwister(123456), 250, 500, 100; +X, Xp = sort(rand(rng, N) * 10), range(-2.5, stop=12.5, length=Nplot); +w1, w2, f, y = model(); +w1s, w2s, fs, ŷ = rand(rng, [w1(Xp, 1e-9), w2(Xp, 1e-9), f(Xp, 1e-9), y(X, 1e-9)]); + +# Compute posterior distribution over f′. +w1′, w2′, f′, y′ = (w1, w2, f, y) | (y(X) ← ŷ); + +# Sample from the posterior and write to file. +w1′s, w2′s, y′s = rand(rng, [w1′(Xp, 1e-9), w2′(Xp, 1e-9), y′(Xp, 1e-9)], S); + +# Get posterior mean and marginals f′ and y′ and write them for plotting. + +ms_w1′ = marginals(w1′(Xp)); +ms_w2′ = marginals(w2′(Xp)); +ms_y′ = marginals(y′(Xp)); + +μw1′, σw1′ = mean.(ms_w1′), std.(ms_w1′); +μw2′, σw2′ = mean.(ms_w2′), std.(ms_w2′); +μy′, σy′ = mean.(ms_y′), std.(ms_y′); + + + +########################### Plot results ########################### + +gr(); +posterior_plot = plot( + legend=:topright, + legendfont=Plots.Font( + "sans-serif", + 10, + :hcenter, + :vcenter, + 0.0, + RGB{Normed{UInt8, 8}}(0.0,0.0,0.0) + ), + background_color_legend=RGBA(1, 1, 1, 0), + foreground_color_legend=RGBA(1, 1, 1, 0), +); + + +# Plot posterior over w1. +plot!(posterior_plot, Xp, μw1′; + linecolor=:green, + linewidth=2.0, + label="w1", +); +plot!(posterior_plot, Xp, [μw1′ μw1′]; + linewidth=0.0, + fillrange=[μw1′ .- 3 .* σw1′, μw1′ .+ 3 * σw1′], + fillalpha=0.2, + fillcolor=:green, + label="", +); +plot!(posterior_plot, Xp, w1′s; + linecolor=:green, + linealpha=0.1, + label="", +); + +# Plot posterior over w2. +plot!(posterior_plot, Xp, μw2′; + linecolor=:magenta, + linewidth=2.0, + label="w2", +); +plot!(posterior_plot, Xp, [μw2′ μw2′]; + linewidth=0.0, + fillrange=[μw2′ .- 3 .* σw2′, μw2′ .+ 3 * σw2′], + fillalpha=0.2, + fillcolor=:magenta, + label="", +); +plot!(posterior_plot, Xp, w2′s; + linecolor=:magenta, + linealpha=0.1, + label="", +); + + +# Plot x1 and x2 +plot!(posterior_plot, Xp, (x->x / 4).(Xp); + linecolor=:black, + linewidth=1.0, + label="sin", +); +plot!(posterior_plot, Xp, cos.(Xp); + linecolor=:black, + linewidth=1.0, + linestyle=:dash, + label="cos", +); + +# Plot samples against which we're regressing. +scatter!(posterior_plot, X, ŷ; + markercolor=:red, + markershape=:circle, + markerstrokewidth=0.0, + markersize=4, + markeralpha=0.7, + label="", +); + +# Plot posterior over `y`. +plot!(posterior_plot, Xp, μy′; + linecolor=:blue, + linewidth=2.0, + label="y", +); +plot!(posterior_plot, Xp, [μy′ μy′]; + linewidth=0.0, + fillrange=[μy′ .- 3 .* σy′, μy′ .+ 3 * σy′], + fillalpha=0.3, + fillcolor=:blue, + label="", +); + +# savefig(posterior_plot, "plotting_research_talk/tv_blr.pdf"); + +display(posterior_plot); diff --git a/examples/getting_started/basic_operations.jl b/examples/getting_started/basic_operations.jl index e2ae475e..c7f85e69 100644 --- a/examples/getting_started/basic_operations.jl +++ b/examples/getting_started/basic_operations.jl @@ -24,6 +24,7 @@ N = 50 x = rand(rng, N) * 10 # Specify the variance of the noise under which we'll make observations of the GP. +# We could also have made this a `Real` to specify isotropic noise. Σ = Diagonal(rand(rng, N) .+ 0.1) # Construct marginal distribution over `f` at `x` added to some independent zero-mean diff --git a/examples/pseudo_points/basic_operations.jl b/examples/pseudo_points/basic_operations.jl index 280dc2b3..1d95ebe8 100644 --- a/examples/pseudo_points/basic_operations.jl +++ b/examples/pseudo_points/basic_operations.jl @@ -26,6 +26,7 @@ N = 5_000 x = vcat(rand(rng, div(N, 2)) * 3, rand(rng, div(N, 2)) * 4 .+ 6) # Specify the variance of the noise under which we'll make observations of the GP. +# We could also have made this a `Real` to specify isotropic noise. Σ = Diagonal(rand(rng, N) .+ 0.1) # Construct marginal distribution over `f` at `x` added to some independent zero-mean From 731654a3f4c87b5d9278a53cb58b83e26318ff51 Mon Sep 17 00:00:00 2001 From: willtebbutt Date: Fri, 21 Feb 2020 21:25:17 +0000 Subject: [PATCH 13/42] More examples, more docs --- examples/basic_gppp/Project.toml | 2 + examples/basic_gppp/README.md | 1 + examples/basic_gppp/non_iid_noise.jl | 98 ++++++++++++++++++++++++++++ 3 files changed, 101 insertions(+) create mode 100644 examples/basic_gppp/non_iid_noise.jl diff --git a/examples/basic_gppp/Project.toml b/examples/basic_gppp/Project.toml index 81165fc5..2aa0abe8 100644 --- a/examples/basic_gppp/Project.toml +++ b/examples/basic_gppp/Project.toml @@ -6,6 +6,8 @@ Random = "9a3f8284-a2c9-5f02-9a11-845980a1fd5c" Stheno = "8188c328-b5d6-583d-959b-9690869a5511" [compat] +ColorTypes = "= 0.9.1" +FixedPointNumbers = "= 0.6.1" Plots = "= 0.28.4" Stheno = "0.6" julia = "1" diff --git a/examples/basic_gppp/README.md b/examples/basic_gppp/README.md index 66cc4dbb..76828259 100644 --- a/examples/basic_gppp/README.md +++ b/examples/basic_gppp/README.md @@ -5,3 +5,4 @@ Various simply toy GPPP examples. - `process_decomposition.jl`: additive GP where we inspect the posterior over the processes - `sensor_fusion.jl`: integrate multiple types of observations of a process made under different types of noise. - `time_varying_blr.jl`: Bayesian Linear Regression (BLR) in which the coefficients vary throughout the input space, each according to an independent Gaussian process. Here it's 1D, so we call it time. +- `non_iid_noise.jl`: An interesting noise model for GP regression. Play around with the model as discussed to see the effects of additional observations. diff --git a/examples/basic_gppp/non_iid_noise.jl b/examples/basic_gppp/non_iid_noise.jl new file mode 100644 index 00000000..6236c97b --- /dev/null +++ b/examples/basic_gppp/non_iid_noise.jl @@ -0,0 +1,98 @@ +# Please run from the `basic_gppp` directory. +using Pkg +Pkg.activate(@__DIR__) +Pkg.instantiate() + +using Stheno, Plots, Random +using Stheno: @model, Noise + +########################### Define and inspect our model ########################### + +#= +We wish to perform inference in some latent process `f`, but it is corrupted by both some +iid noise, and some non-iid noise. Specifically, ω is some periodic behaviour in which we +are uninterested. Similarly, g is also drawn from a GP with an EQ kernel, but with a much +shorter length-scale. We wish to use observations of +`f + ω + g + GP(Noise(0.1))` to infer `f`. In this context, `ω + g + GP(Noise(0.1))` is +regarded as our "noise process". + +The described model is unidentiable. Conditioning on an observation of the periodic process +at some point, however, allows us to infer a particular `f`. Use the `model_identifiable` +function rather than `model` to see the results of this. Note the example of sequential +conditioning. +=# +@model function model() + f = GP(EQ()) + ω = periodic(GP(EQ()), 1) + return f, ω, ω + f, ω + f + GP(0.1 * Noise()) +end +@model function model_identifiable() + f = GP(EQ()) + t = periodic(GP(EQ()), 1.0) + ω = t | (t([0])←[0.0]) + return f, ω, ω + f, ω + f + GP(0.001 * Noise()) +end + +# Select some input locations and sample from the prior. +rng, N, Nplot, S = MersenneTwister(123456), 100, 500, 100 +X, Xp = sort(rand(rng, N) * 10), range(-2.5, stop=12.5, length=Nplot) +f, ω, fpω, y = model() +fs, ωs, ys, ŷ = rand(rng, [f(Xp, 1e-9), ω(Xp, 1e-9), fpω(Xp, 1e-9), y(X, 1e-9)]) + +# Compute posterior distribution over f′. +f′ = f | (y(X) ← ŷ) + +# Sample from the posterior and write to file. +f′Xp = rand(rng, f′(Xp, 1e-9), S) + +# Get posterior mean and marginals f′ and y′ and write them for plotting. +ms = marginals(f′(Xp)) +μf′, σf′ = mean.(ms), std.(ms) + + + +########################### Plot results ########################### + +gr(); +posterior_plot = plot(); + +# Prior over `f`. +plot!(posterior_plot, Xp, ys; + linewidth=1.0, + linecolor=:red, + label="y", +); +plot!(posterior_plot, Xp, fs; + linewidth=2.0, + linecolor=:black, + label="f", +); + +# Posterior over `f`. +plot!(posterior_plot, Xp, μf′; + linecolor=:blue, + linewidth=2.0, + label="f1", +); +plot!(posterior_plot, Xp, [μf′ μf′]; + linewidth=0.0, + fillrange=[μf′ .- 3 .* σf′, μf′ .+ 3 * σf′], + fillalpha=0.3, + fillcolor=:blue, + label="", +); +scatter!(posterior_plot, X, ŷ; + markercolor=:red, + markershape=:circle, + markerstrokewidth=0.0, + markersize=4, + markeralpha=0.7, + label="", +); +plot!(posterior_plot, Xp, f′Xp, + linecolor=:blue, + linealpha=0.2, + label="", +); + +display(posterior_plot); From 7b6b0df9e89515b192971fc7fff4016bc9ed505f Mon Sep 17 00:00:00 2001 From: willtebbutt Date: Fri, 21 Feb 2020 22:11:22 +0000 Subject: [PATCH 14/42] WIP on GPPP + Pseudo-Points --- examples/README.md | 1 + .../gppp_and_pseudo_points/1d_additive.jl | 254 ++++++++++++++++++ examples/gppp_and_pseudo_points/Project.toml | 4 + examples/gppp_and_pseudo_points/README.md | 3 + 4 files changed, 262 insertions(+) create mode 100644 examples/gppp_and_pseudo_points/1d_additive.jl create mode 100644 examples/gppp_and_pseudo_points/Project.toml create mode 100644 examples/gppp_and_pseudo_points/README.md diff --git a/examples/README.md b/examples/README.md index 40d9125c..01da6662 100644 --- a/examples/README.md +++ b/examples/README.md @@ -9,3 +9,4 @@ Below we provide a brief description of each of the sub-directories. - `getting_started`: the most fundamental Stheno.jl functionality. If you're not comfortable with the content of this folder, you likely won't be with the rest of them. - `pseudo_points`: covers inducing-point / sparse / pseudo-point approximations. - `basic_gppp`: basic toy examples of the functionality that we call Gaussian process Probabilistic Programming (GPPP). +- `gppp_and_pseudo_points`: combine GPPP and pseudo-point approximations to do interesting things. This is a WIP -- it doesn't work properly yet. diff --git a/examples/gppp_and_pseudo_points/1d_additive.jl b/examples/gppp_and_pseudo_points/1d_additive.jl new file mode 100644 index 00000000..9c55c49b --- /dev/null +++ b/examples/gppp_and_pseudo_points/1d_additive.jl @@ -0,0 +1,254 @@ +# Set up the environment to run this example. Make sure you're within the folder that this +# file lives in. +using Pkg +Pkg.activate(@__DIR__) +Pkg.instantiate() + +using Stheno, Plots, Random +gr() + +# Define model. +σ², ω, T = 1e0, 1.0, 25.0 +gpc = GPC() +f₁ = periodic(GP(EQ(), gpc), ω) +f₂ = GP(0.1 * EQ(), gpc) +f₃ = f₁ + f₂ + +# Sample from marginal process to generate toy data. +rng = MersenneTwister(123456) +S = 25 +x = range(0.0, T; length=100) +xp = range(-2.5, T + 2.5; length=500) +fx = f₃(x, σ²) +y = rand(rng, fx) + +# Plots for stuff. +f₁′_plot, f₂′_plot, f₃′_plot = plot(), plot(), plot(); + +posterior_plot = plot(); +approx_in_marginal_posterior_plot = plot(); +approx_in_latents_posterior_plot = plot(); + + + +##################################### Exact Inference ###################################### + +# Compute the posterior processes, sample from them, and compute marginals. +@show logpdf(fx, y) +f₁′, f₂′, f₃′ = (f₁, f₂, f₃) | (fx ← y) +f₁′xp, f₂′xp, f₃′xp = rand(rng, [f₁′(xp, 1e-6), f₂′(xp, 1e-6), f₃′(xp, 1e-6)], S) + +ms₁ = marginals(f₁′(xp)) +ms₂ = marginals(f₂′(xp)) +ms₃ = marginals(f₃′(xp)) +μ₁′, σ₁′ = mean.(ms₁), std.(ms₁) +μ₂′, σ₂′ = mean.(ms₂), std.(ms₂) +μ₃′, σ₃′ = mean.(ms₃), std.(ms₃) + +items = [ + (μ₁′, σ₁′, f₁′xp, :red, "exact", f₁′_plot), + (μ₂′, σ₂′, f₂′xp, :red, "", f₂′_plot), + (μ₃′, σ₃′, f₃′xp, :red, "", f₃′_plot), +] + +# Posterior marginal variance. +for (μ, σ, _, colour, name, plt) in items + plot!(plt, xp, [μ, μ]; + linewidth=0.0, + fillrange=[μ .- 3 .* σ, μ .+ 3 * σ], + fillalpha=0.3, + fillcolor=colour, + label="", + ); +end + +# Posterior samples. +for (μ, σ, f, colour, name, plt) in items + plot!(plt, xp, f, + linecolor=colour, + linealpha=0.2, + label="", + ); +end + +# Posterior mean. +for (μ, σ, f, colour, name, plt) in items + plot!(plt, xp, μ; + linecolor=colour, + linewidth=2.0, + label=name, + ); +end + +# Plot observations. +scatter!(posterior_plot, x, y; + markercolor=:blue, + markershape=:circle, + markerstrokewidth=0.0, + markersize=4, + markeralpha=0.7, + label="", +); + + + +##################### Approximate inference with pseudo-data in f₃ ######################### + +# Compute approximate posterior process. +M₃ = 15; +Z₃ = range(0.0, T; length=M₃); +Z₃ = 0:T +Z₃ = 0:0.5:T +# Z₃ = 0:0.25: +u₃ = f₃(Z₃); +# μ′u₃, Σ′u₃ = Stheno.optimal_q(f₃(X), ŷ, u₃, σ_noise); +# q_u₃ = Stheno.Titsias(u₃, μ′u₃, Σ′u₃, gpc); +pseudo_obs = Stheno.PseudoObs(f₃(x) ← y, u₃) +f₁′u₃ = f₁ | pseudo_obs +f₂′u₃ = f₂ | pseudo_obs +f₃′u₃ = f₃ | pseudo_obs +f₁′u₃xp, f₂′u₃xp, f₃′u₃xp = rand(rng, [f₁′u₃(xp), f₂′u₃(xp), f₃′u₃(xp)], S); + +μ₁′u₃, σ₁′u₃ = mean.(marginals(f₁′u₃(Xp))), std.(marginals(f₁′u₃(Xp))) +μ₂′u₃, σ₂′u₃ = mean.(marginals(f₂′u₃(Xp))), std.(marginals(f₂′u₃(Xp))) +μ₃′u₃, σ₃′u₃ = mean.(marginals(f₃′u₃(Xp))), std.(marginals(f₃′u₃(Xp))) + +@show elbo(fx, y, u₃); + +items = [ + (μ₁′u₃, σ₁′u₃, f₁′u₃xp, :green, "Z in f3", f₁′_plot), + (μ₂′u₃, σ₂′u₃, f₂′u₃xp, :green, "", f₂′_plot), + (μ₃′u₃, σ₃′u₃, f₃′u₃xp, :green, "", f₃′_plot), +]; + +# Posterior marginal variance. +for (μ, σ, _, colour, name, plt) in items + plot!(plt, Xp, [μ, μ]; + linewidth=0.0, + fillrange=[μ .- 3 .* σ, μ .+ 3 * σ], + fillalpha=0.3, + fillcolor=colour, + label=""); +end + +# # Posterior samples. +# for (μ, σ, f, colour, name, plt) in items +# plot!(plt, Xp, f, +# linecolor=colour, +# linealpha=0.2, +# label=""); +# end + +# Posterior mean. +for (μ, σ, f, colour, name, plt) in items + plot!(plt, Xp, μ; + linecolor=colour, + linewidth=2.0, + label=name, + ); +end + +# Plot observations and pseudo-input locations. +scatter!(approx_in_marginal_posterior_plot, X, ŷ; + markercolor=:blue, + markershape=:circle, + markerstrokewidth=0.0, + markersize=4, + markeralpha=0.7, + label="", +); +scatter!(approx_in_marginal_posterior_plot, Z₃, zeros(M₃); + markercolor=:black, + markershape=:circle, + markerstrokewidth=0.0, + markersize=4, + markeralpha=0.8, + label="Z", +); + + + +############# Perform approximate inference by placing pseudo-data in f₁ and f₂ ############ + +# Compute approximate posterior process. +M₁, M₂ = 10, 5 +Z₁, Z₂ = linspace(0.0, 1 / ω, M₁), linspace(0.0, T, M₂) +u₁₂ = BlockGP([f₁(Z₁), f₂(Z₂)]) +μ′u, Σ′u = Stheno.optimal_q(f₃(X), ŷ, u₁₂, σ_noise) +conditioner = Stheno.Titsias(u₁₂, μ′u, Σ′u, gpc) +f₁′u₁₂, f₂′u₁₂, f₃′u₁₂ = (f₁, f₂, f₃) | conditioner + +f₁′u₁₂Xp, f₂′u₁₂Xp, f₃′u₁₂Xp = rand(rng, [f₁′u₁₂(Xp), f₂′u₁₂(Xp), f₃′u₁₂(Xp)], S) +μ₁′u₁₂, σ₁′u₁₂ = marginals(f₁′u₁₂(Xp)) +μ₂′u₁₂, σ₂′u₁₂ = marginals(f₂′u₁₂(Xp)) +μ₃′u₁₂, σ₃′u₁₂ = marginals(f₃′u₁₂(Xp)) + +@show elbo(f₃(X), ŷ, u₁₂, σ_noise) + +items = [ + (μ₁′u₁₂, σ₁′u₁₂, f₁′u₁₂Xp, :blue, "Z in f1 and f2", f₁′_plot), + (μ₂′u₁₂, σ₂′u₁₂, f₂′u₁₂Xp, :blue, "", f₂′_plot), + (μ₃′u₁₂, σ₃′u₁₂, f₃′u₁₂Xp, :blue, "", f₃′_plot), +] + +# Posterior marginal variance. +for (μ, σ, _, colour, name, plt) in items + plot!(plt, Xp, [μ, μ]; + linewidth=0.0, + fillrange=[μ .- 3 .* σ, μ .+ 3 * σ], + fillalpha=0.3, + fillcolor=colour, + label="", + ) +end + +# # Posterior samples. +# for (μ, σ, f, colour, name, plt) in items +# plot!(plt, Xp, f, +# linecolor=colour, +# linealpha=0.2, +# label=""); +# end + +# Posterior mean. +for (μ, σ, f, colour, name, plt) in items + plot!(plt, Xp, μ; + linecolor=colour, + linewidth=2.0, + label=name, + ) +end + +# Plot observations and pseudo-input locations. +scatter!(approx_in_latents_posterior_plot, X, ŷ; + markercolor=:blue, + markershape=:circle, + markerstrokewidth=0.0, + markersize=4, + markeralpha=0.7, + label="", +); +scatter!(approx_in_latents_posterior_plot, Z₁, zeros(M₁); + markercolor=:black, + markershape=:circle, + markerstrokewidth=0.0, + markersize=4, + markeralpha=0.8, + label="Z₁", +); +scatter!(approx_in_latents_posterior_plot, Z₂, zeros(M₂); + markercolor=:magenta, + markershape=:circle, + markerstrokewidth=0.0, + markersize=4, + markeralpha=0.8, + label="Z₂", +); + + + +# display(posterior_plot); +# display(approx_in_marginal_posterior_plot); +# display(approx_in_latents_posterior_plot); + +plot(f₁′_plot, f₂′_plot, f₃′_plot; layout=(3, 1)) diff --git a/examples/gppp_and_pseudo_points/Project.toml b/examples/gppp_and_pseudo_points/Project.toml new file mode 100644 index 00000000..d261faa1 --- /dev/null +++ b/examples/gppp_and_pseudo_points/Project.toml @@ -0,0 +1,4 @@ +[deps] +Plots = "91a5bcdd-55d7-5caf-9e0b-520d859cae80" +Random = "9a3f8284-a2c9-5f02-9a11-845980a1fd5c" +Stheno = "8188c328-b5d6-583d-959b-9690869a5511" diff --git a/examples/gppp_and_pseudo_points/README.md b/examples/gppp_and_pseudo_points/README.md new file mode 100644 index 00000000..3e1f6681 --- /dev/null +++ b/examples/gppp_and_pseudo_points/README.md @@ -0,0 +1,3 @@ +# Examples: Gaussian process Probabilistic Programming and Pseudo-Points + +This is a WIP. In the past, this functionality worked. At some point, it stopped working. From 8f32481432ef50ed16331dff725b4e8fd554daf0 Mon Sep 17 00:00:00 2001 From: hongbinren Date: Tue, 25 Feb 2020 12:02:18 +0800 Subject: [PATCH 15/42] add fnn example --- .../flux_integration/simple_fnn/Project.toml | 12 ++ .../flux_integration/simple_fnn/README.md | 25 ++++ .../simple_fnn/fit_step_function.jl | 117 ++++++++++++++++++ examples/flux_integration/simple_fnn/loss.png | Bin 0 -> 14228 bytes .../flux_integration/simple_fnn/predict.png | Bin 0 -> 14520 bytes 5 files changed, 154 insertions(+) create mode 100644 examples/flux_integration/simple_fnn/Project.toml create mode 100644 examples/flux_integration/simple_fnn/README.md create mode 100644 examples/flux_integration/simple_fnn/fit_step_function.jl create mode 100644 examples/flux_integration/simple_fnn/loss.png create mode 100644 examples/flux_integration/simple_fnn/predict.png diff --git a/examples/flux_integration/simple_fnn/Project.toml b/examples/flux_integration/simple_fnn/Project.toml new file mode 100644 index 00000000..12b07b0e --- /dev/null +++ b/examples/flux_integration/simple_fnn/Project.toml @@ -0,0 +1,12 @@ +[deps] +Flux = "587475ba-b771-5e3f-ad9e-33799f191a9c" +Plots = "91a5bcdd-55d7-5caf-9e0b-520d859cae80" +Random = "9a3f8284-a2c9-5f02-9a11-845980a1fd5c" +Stheno = "8188c328-b5d6-583d-959b-9690869a5511" +Zygote = "e88e6eb3-aa80-5325-afca-941959d7151f" + +[compat] +Flux = "0.10" +Stheno = "0.6" +Zygote = "0.4.6" +julia = "1" diff --git a/examples/flux_integration/simple_fnn/README.md b/examples/flux_integration/simple_fnn/README.md new file mode 100644 index 00000000..03d35614 --- /dev/null +++ b/examples/flux_integration/simple_fnn/README.md @@ -0,0 +1,25 @@ +# Neural network + Gaussian process ( step function ) +## Introduction +Gaussian process is typically used as a smoothing device, here, we show that by combining with a simple fully connected neural network, it's able to correctly learn sharp discontinuities. + +Targeted function: +$$ +\begin{gather} +y=F(x)+\epsilon,\;\epsilon\sim\mathcal{N}(0, 0.01^2)\\ +F(x)=\left\{\begin{array}{rl} + 0.0 & \text{if } x\le0.0\\ + 1.0 & \text{if } x>0.0 + \end{array}\right. +\end{gather} +$$ + +Neural network is used as a feature extractor, it can be viewed as a nonlinear transformation $T:\,x\to z$, Gaussian process with Automatic Relevance Determination (ARD) kernel is then used to map $z\to y$, the combined mapping can be represented as $y=f(T(x))$. + +The results is show below: +![result](./predict.png) + + +## Reference +[1] [Deep kernel learning](https://arxiv.org/abs/1511.02222) + +[2] [Manifold Gaussian Processes for Regression](https://arxiv.org/abs/1402.5876) diff --git a/examples/flux_integration/simple_fnn/fit_step_function.jl b/examples/flux_integration/simple_fnn/fit_step_function.jl new file mode 100644 index 00000000..366929b6 --- /dev/null +++ b/examples/flux_integration/simple_fnn/fit_step_function.jl @@ -0,0 +1,117 @@ +# Set up the environment to run this example. Make sure you're within the folder that this +# file lives in. +using Pkg +Pkg.activate(@__DIR__) +Pkg.instantiate() + + +using Plots, Random, Stheno, Flux, Zygote +rng = MersenneTwister(5) +T = Float64 + + +# define the step function +# step_func(x) = 0.0 if x<=0, 1.0 if x>0 +step_func(x) = (ϵ=T(0.01)*randn(rng, T); x>T(0.0) ? T(1.0)+ϵ : T(0.0)+ϵ) + +# prepare data +## training data, drawn from Normal(0, 1) +train_X = randn(rng, T, 100) +## Flux requires data to be at least two dimension +Xtrain = reshape(train_X, 1, :) +## evaluate to get the function value +train_y = step_func.(train_X) + +## test data drawn from Uniform(-5, 5) +test_X = Array(-5.0:0.02:5.0) +Xtest = reshape(test_X, 1, :) +test_y = step_func.(test_X) + + + +# Build a MLP that perform domain transformation to input data +# for Flux usage, please refer to: https://fluxml.ai/Flux.jl/stable/ +mlp = Chain(Dense(1, 6, relu), Dense(6, 2, relu)) |> (T==Float32 ? f32 : f64) +# extract MLP parameters +θ_mlp = params(mlp) + +# Build a GP model with Stheno +# here we consider using anisotropic EQ kernel, this kernel contains to hyperparameters: length scale l & scaling factor γ +# these hyperparameters are positive, we add on this restriction by resetting them in log-scale +# for Stheno usage, please refer to: https://github.com/willtebbutt/Stheno.jl/tree/wct/example-revamp/examples/getting_started +logl = randn(rng, T, 2) +logγ = T[0.0] + +function build_gp(logl, logγ) + ard_se_kernel = exp(T(2.0)*logγ[1])*stretch(EQ(), exp.(-logl)) + gp = GP(T(0.0), ard_se_kernel, GPC()) + gp +end + +# Since we always assume our data to be noisy, we model this noise by λ, also in log-scale +logλ = T[0.01] + +# Collect MLP and GP parameters +ps = Params([logl, logγ, logλ, θ_mlp...]) + + + +# When training, we always specifies a loss function to optimize, for GP we use negative log-likelihood +# !!! NOTE: +# Stheno and Flux have slightly different conventions regarding their data. In particular +# - Flux assumes that you'll provide a `Matrix` of data, in which each column is an +# observation. +# - Stheno assumes that you'll provide an `AbstractVector`, where each element is an +# observation. To handle multi-dimensional inputs we have the `ColVecs` object, that +# literally just wraps a `Matrix` and tells Stheno to pretend is a vector of vectors. This +# is helpful to remove some ambiguities that arrise if you don't do this. +function NLL(X, y) + Z = mlp(X) + gp = build_gp(logl, logγ) + margin_lik = gp(ColVecs(Z), exp(T(2.0)*logλ[1])) + nll = -logpdf(margin_lik, y) + nll +end + + + +# Training the overall model with Flux optimizers +using Flux.Optimise: update! + +train_data = (Xtrain, train_y) +opt = ADAGrad() +nlls = [] +for i in 1:1500 + nll = NLL(train_data...) + push!(nlls, nll) + gs = gradient(()->NLL(train_data...), ps) + for p in ps + update!(opt, p, gs[p]) + end +end + +loss_plot = plot(xlabel="Epoches", ylabel="Negative log-likelihood", legend=false) +plot!(loss_plot, nlls) +png(loss_plot, "loss.png") + + + +# Visualize the performance of our model +function predict(X, Xtrain, ytrain) + Z = mlp(X); Ztrain = mlp(Xtrain) + gp = build_gp(logl, logγ) + noisy_prior = gp(ColVecs(Ztrain), exp(T(2.0)*logλ[1])) + posterior = gp | Obs(noisy_prior, ytrain) + posterior(ColVecs(Z)) +end + +posterior = predict(Xtest, Xtrain, train_y) +post_dist = marginals(posterior) +pred_y = mean.(post_dist) +var_y = std.(post_dist) + + +predict_plot = plot(legend=true, xlabel="x", ylabel="y", ylim=(-0.5, 1.5)) +plot!(predict_plot, test_X, pred_y, ribbons=3*var_y, st=:line, fillalpha=0.3, color=:blue, label="NNGP") +plot!(predict_plot, train_X, train_y, st=:scatter, color=:red, lw=5, label="Training set") +png(predict_plot, "predict.png") diff --git a/examples/flux_integration/simple_fnn/loss.png b/examples/flux_integration/simple_fnn/loss.png new file mode 100644 index 0000000000000000000000000000000000000000..55452d34c56878110ab74b1ece0a249cd7f4d52e GIT binary patch literal 14228 zcma*O1z1&Ww>COKN>EvJ!$Mv`>6DfbQE3nXkw$3&DW#DTloF){BqXF!It7&$X`~zJ z?)t~_`~JPp*?XUT9lb7vx#oK26Jy-t9`_hCSWV^jMM7Fa1VJv|y(6cAAlNzxf)zo4 z4OdRIgFE3rJQKy+a>yCx?}wVqw+Mnp?#jt%K7YS5>Z+$Xdy2olCTbo_6H!7#qlQC% z>y|?ksw3bZMGpRZg5PW0@7-H6vEt^Ub*<^=s)@5)jl915wmp~bP4wOD1!ryU{N~HA zI0=h?z89^N$|(Pb06#HKKwy6Bv~i%lQDiHivHj(|Xy+EaJPHS96v3gjh?!WrKN<%? z9^6D>BR;vz1Xzf4Kgu8GWlbOpHwob|BSvJQbF;FVo10Mw zMoII^tZ(lxVI3I@(iVAs@Swhcm#v7Gr_5Y*ijWe_J2;OC#3nv%Owo}QjwSvmA+ z$LHiKH}{;Y!6JLTJRu=rIyaWhw@3UG6ci8s-j9VKjHv(i%>O<`4Ls(~EiyJ0!EtUb zF8y-*$wK2sN-kYZU0q!}A_VdIewWg}rKRPwSx0M23*UI((vDc7O2NR<;o<6FQQBBE zzSF^_ttXeZCOXd{$WJ-uKB6+qo{U?sDPL1^@sBGjDn6`!UZjy75*q3`9V*PKnxt3u zacuq?7uU(mTUDzv|G+?bdHH_NnWBW_0bwQ8FIUU<_pn{EQd4C=YFs0aJPGvUy05AE zE@nE9i%6nVc)>oW&no&!dK=sCCb@xped}>9PR`y1C%N>Vh4$h3-Y41Gj~_pV$*)ib z8oO=(QQv!`@Kwi*m*xWHSo)>qw_lBTsV@xLYO=P!d(4dr~V~GxWv}gHr=a zoEOH_TNk(b2b~`-_Eo!XrKkVH$CI*QbRpz*jJ5z#)CHG3*!;FOB1ze}t2{h=o12l9 zgoU_C^P%!-kBQ4_+G+bN1P=}lsH_Jrq2kuEN*xUKYY#xpKokz#PAq0O1e3jna#!WnJ&Tr`MGLgZJP*EpM2K? z?~0I6pGV{9s3ad>(QeFQsRUX(to1^y7#z2^36G>KoyDB&EHy1I;v@A|9aZTdOQFiT zeI5(>KuLqkY?>tU>M?3>f1q^5O(`Ry(7vk=Mn6dAiaUAJid`0LUz%%^P}Yu$J-^}C zRRlQ>Z}(NZMy{>+pSQ7J2*rkFG;pJMr80kHVtAjD1MblIuT;kpm36Ve*DLUhAF9qdWE;tn zV~n1ZQ}Am*m;2CF8r5XLP;VXta`{7QxUgfs;jg~Mg*SF57QkP7QK|?%5 zdTfTlpRt`n?-L{ZjPl1IhV?EVex*-NmxgwIQ8&i?|mRy9;6G>h4fFU_ajgPDuFF!;OG7-aS= z?6?^kjx&h6={{Nfyb?xAkDp!p?J%nFXL zrgMK_4hRSkOAHHg64O^#SFZ_g>F8Mb{n>man)zm6`+L#Pt1Nb-m9vYBpER-;J(Ez; zqLFKFVhkrt2E6HfbZ*_cCDF-2?s7tRDOy8Qb7pGlvNMGdfG-sl)#uznRVzEYHF+ug z^AEsq?Wo$u%L?c4~bQm0d`wAo(l zJASc*C$6TizcM=;fPNRexw+{!zBA$RJV}@~bc$HG=_$Z)z7(q!ieb;8RXcfqqq~?Pw*8c7f@}$k5L3F;lL=>hDuPsd%nTU<*I=|m? z!>rCG-2UwJB-wNKcNB+q`!hd3zv=1eqn#!5l7{qN?WXtFNMGeNXP#P90W;e|^|2gp zG%=BpksWO>dRE3g@>u24e?2~g>j?4KKS=xsm)y#m9W>ZKf8F6y0Hcwh!MfjOw7n7ve1jzquYw_-o5*< z%vPuj2N#!1r_iY02TO}n*-90sFqm!sKtG|h0Z^a*_24Yo$(-E_-1oGA!%9}O-)%C^Q38fy3SNwFdk%)F8F1mFR5$;)xVW|$o;v$U zUp4doJ62YoC_WYQTRPzrO;qOaZs~KKI=>ZLtU;|;&X@Bd}OWVP2+eQ3wf__ zQ|4-N1ldAG|FZQFIj{^13k|f%3swbxe+FzCS6659?@8U7hPl zP)!uOuc5Ko!j`-|TvqUGZER(@tjsIG#oXp$#(?&5U8=|>Pe8dQHmfVrDk?v>7kY>p zB?j~L#r?iY%K;>wKYxC{Gr7#cb@ys>;`t4u)5Oq~egt{{A`3lI?fO|a4+Bk+R}|a1 zU`NfQrRxEBavvm|#CN|Kea<+XhlPloQ+uKe#6lMcS zL}q&i0#Vd|7s(i#g%+d@r460Sm(SCBsIG1t5J~S-7RhM+)ywNVhV8v%VU&r=H&D^n2i~LZvu1+S@-Yn@8>@Fobl;XE1>e{ zYb(Dm(y*QbRIYymb26xL>xdJWZ;um@=v*5rv2O4`4{(qy@^lJ*$&n;&aa0!aR=FajyMf!jIU%`@~sFst%R`_^gsBRUC(?LxweIdl9Ku z4BPVEH9rfj`lJ^(WT>!@Cv^2Ck>+9Y}Y19hT%FnVPx694gqSwZa`e=La*gElnUMV9|Uo_I0+cos-f#A|7 zW8HFf2nXpDk(f<4>d*SFL}aR+IQ{YqX$d$iG8mkZ*wfnD`YJfMNaI?Vt(^64SwcgU zhK(d4GI58uqS0i8T=3evlYs|%m=xp(!gwh}MBwO(1f(ldmW^NXf_=8ynNo(u8Q0pFZ7yqwenMAtNPa=imsZ z7u{UUDPUz~by*u59hw#Mk4qffQ`rbF`z8i410TKEC?+ae;l8&D&ML%3LPD}ucjl#_ z(7OElGt1@65|WZ-Hotw*g0Jn`eo8eY*Kzr~r2yA8T7wuS;j*j~&L{~Hvn#DB<>pH( zTf>2_lru({evOKB%GvDWtl(y1h(5K%l0kCgE^MqEq#~ zNS8;|moH9}4FQdT7axAHP`9+Stl_#eGpjN(AEPwD@z=X)S=b3~htiff&i@3TnVX%x zudO{lJA38Im6eeS0dDU6)Kn9UsL-G5x|p3AzuoY^C?AygQ}YUq4?idT#IW|iD+vh; z)5__?MB`E$K1?+3NS5-3Xe=r!3gqrBtFo09s`USdI+FeAlNfBu3T3E#PwK7LmEnTa z7YGAFO@d&9VL~S%`^_?ti3~R$Ws=cS2vd01Pjftiv?LS$meq*XYs{8I%(1 zy&A3^LIMH+ApcnHd6A!eefRUU#-E)imwd?Noz_0Jla%Ch?cK$Z0Z*1ewfcWG`ejw3 zp^a5qza`Pg8!b6vBE`D9Hh%WoJXycO;k$>hh=|CI8?}aYUdnDop>)DyONz9itPr2k z=no%0)E_hSKY1#0azaBDUE4t!ICU8q-mXILK@OAmF(1e&Km!4ah5%O~W?>lOqn1br z&JRC@fkyZ49k~ojLqj7qHFXh2xzN>MyFA?VJevp$@o^`mfP~F!u+VtADctUN@kfP^ zR$Zyz{m+vndG0=_F|f-Po0^{HF|3sUK?*RG&-6#{@Kg}+1>7j7FUY;?2(tVxE2V2> zWTY$Y_ITf5_Pxxt+M`u0!`it=DOFYOCMG5;Jbh1|%3bK&%cY)gId-wb1ZQjz+OO4-2OS!Z_ zmmQ=jYGXV6ODE63G(ejiB3hKWz;i{J$aGxliS8)AIpiP9ian)g8h^Rkov%mh4%B_m2;%x`>#UrBpWC{;rm;TrNVnW2wQm(x_4^OaODn z*fJLPxDxnSh6|aJ8{;8_yT3qxDXB>OEp#R{n)?pJX3>{%QdV#)1=5BJ+f*kbL1u7~ zu|dtv&A~BLrOAiV@;|zxrPZIIoWRe|pQ{^3mM|PJ8ev?i&WsU+(EJ3B4h|Wr$zJOd zUtLy)XfEJgZf$P9bLWnT_i1%ho1+TIK~zJ^VM zR@%E4E76o;wmKpt3vKhec(&4cNheyr$Rt4FWekO`PthzA|F|@#W zZfu_zGsj30XXCMr=_UXJ$Q+M>9+$evI0#6FTs&#mPsUDpNl8_Jk3{%nF&f+3ukWr5 zH-*y0L`1BQJ{ymUih5ARd0>C%i#2p<^fYYpL+SB8{Dj;lmF;n?ce|D`=ti;K6U8DwW=6r?^Cv zK%P8K59YPJwM2t+8$DFEwGGt}_fmyM>J-ZtqnpDv*9UtBylH^x{-URxg5Q|4yA z?)?b4cW@RO1GHXVwYqs>AtCQYo|?$X`D$t5wO+5N^xw_dl0S$7wTN9x-X$h2+i025t%Rlt;BOLh3RZO=rNf0Fn8u1Y~hkkbhG7X*4dN)(x zmB?W}S(H;&3keRc@H#pCt1K{Fw~3`th!nq~xF*%WNEO-@TUMAK=*!E+wNkS;CNQ2P{%lp> za8_jXU_QA_OMmkRDsQpz;%V8&L4PilmJK$dbMK~%$aC;XKYTKEC8hTMyoV>p$5!4^ z^9NOjK}Is8g6(hf2EE@;k-3}n1Hq#Rrgc?RR<7Z|4gT||D~+dm-IqeacbseKyXT>y zUL8=qN;$5q&YwS3l$5HS7P|FUAj>P1y}K?hr5!&IpeXWfjfQjgGHAQMG$9cM8e#mv zrSH>yh$2ja7Qetz=6)tFO;7tVnCCu`XWKP7@A)@|*VLry2z5XJ#jWeJj!BA_?&K2aCUp!nyn^ zC`iO@JIgueTv9b?~ zgycjZ@?_rfM&^Zc=?D@H84j3j5%@i@xTK(lhKAbH!=>WlV$Y*pTwL4-6%y#nRx4lI z|9q=)Bf#WDZ2&%|KSC_Ws$CTn2uYL^#dZdZOgnEJEw{X7%T!JueEP(x9 zVi41GJldRXqvXE0m}X@8#L#xvE@O&4TM+~g!c}mqxfSv_Z6He3#|u58=hCMC{r%OOT-EqJcOaY|Rr>^P<_<*o1_f zyVLIy$UfY3N{OoZU6--Z6lnZ_8L2-)iPaU zG~vzdg|v|>wtdAbe<)z?#Og8@g0!G`>xb&*m8PaP5MGh#@qf-wRO#@Rc%2o5Ea@Id zxC;vl8($KS_dTC!eiIRa;&_Lnd_9fKnzw7w=|TiHA{dv2eh-8b)}o!S=X$tpdAYT{ zeFW;SAn~%PytkO$lqL^L2|Xz<3*XRI=;FXZENInbs8Ogw+flLT$J)6%DH_=z6r>SR zap&q3rNR;S=M&2j?wHIuPqLnbfK&KnkW__%?bIG`wrO$UGXBc7-)+mu2>%&p=F$*w z4MDzHT_yNjTugg%_Tt5hXbwEaAXlF+d$!YLSV~z`(SzrZpl&skN|HoH*I9CWyj6wC zgiOiKqp`)G%7dL}$M0lc*aCbJT*RHMk>wT_cR$#i1#SY-wOHl8{aHppQB{8Y>C;xh zIu1yPKRh+*B#Dnx%D{+f0BjxcLheT^WofCYxtf$C!hsg1Qu#+QKiD4fjS5nuaS;nS zGT(L3CP1Q-0wKZ&>zrA8?Bv3NWliyf4341bji8w+@2T)_?0N~inU|D%nxMWyC5h<; zKzx>i4r2x5lt4Fo0QRn%=LN-QM$c_^>lj>O3#EsZg8pxR=6Ef5P(N#7=05 z8|Z{>G42a?;Jw{-+x+MIg_V^je#H}C^Hv+3PrCOXaZDc3_qc~o!~>hzvA#-xE3>~^ zwWd{I04Z1dK$J5N`k#aBy(mMMrZ#EM10V7^FE^%EtP7*04#o zcFZ{@iYJSmp+~_y!i29YpBgA+69F(OUR36M58f_ycAyHfi}1LIz(4=|BQ7qU6L#d@ z2e~iQFF+K0STL{$1OxV5E`4ChSuD|@U~gw!4cD)K&Yi?j$;PI#x0Z*Vm?6mG zfLi!E1T{G)5T&3NG`_^QR~E(#a4IHNdGqBZx$XVwaBtv0axmG_=Nl#;HE8JQj(*VD zDZ%O6ws}@Zx7YN%9ro52dB?MD71jeP!%lg!vp;b@#0l_Pw#!NLkU2^i)gzbL|Dz@k zfV21{JLkiP2_R{?qog!62gh5T1D=l{ONLl~|9(_J9Zl6oNEJGa) z4XEbGzIgDtSWQD?>OXVJckdv9eG~rd*|Yg_dDgpwtEMJX--02%*5oh*5Uqr&1Zbu4 z1t!b{z6bQ_Xad%^*<;a_?-U;E=;*9O`}iPKR56v|P6vKh>Nsxyy07@UDt^lQ{@JmA zQJjs#SC3p0e}3l9vQmj6&f)Y%E;FmwD>qoL+m7fsK1BJeN}UKwN_v9AfQ5xMCm$;R zHaVFIg;M)>r9Zo^r)R5A+mPa(G^{EoCkKpf!tVPm*ZZz)34Ko zl(|Ya*U0s+Oj%n~Q)ky+@iWl?9Mxk^UpdU-PB@wk{7@J2dJh{p4i=PozB$wK{{4I7#z4s}{L8Px!|OZ_HsPc}9%%}pxo1Z- zB1}wmC=s9F8d0ZnISrE%g__`eeIZ2&stKQQgFkeC1i9KnMZo5zymhXa`Ox#R?Y17m zsL#Z2AfWd&HR0b(<9`zqGrzXR^Y2O^CFSAaq3*iq(-+vTag0Aoca6-L=00-cAoT`l zg2(2bQma+;#`r3f*a(WmS1nLN{|-|}he&gBlMCpb1hTG+`H#mIzDm}`@CYklcxx}p zpMS>$zceAx_1Z-?A}r}ul>cT)CGoRQcXSd@k6L8UsnB9$O0KhQ$}-~kPwkkdiD!xA zwmtJ%3evHVazg^y2Sgi>`XxV67sb@6%Sagr=yTj8$M#t^swf(fq-Z|Q^kFC(un_F} zC0(h-EjQt-jG)<|KF_uI!;K@(@1WsexFLzNXT|ET12W?|EA1w z8J26`Jp^?4cBoEY>b1XzXZxW2S(h@5PuKXq$0mp1Z@mdXCe~5@_ELw3d=C#fl9YuQ zxujt!rZ-Uyp0xD!x$f`W#lAnk@PGgbg9+`eElBT9-x#?!Zfa_a*c-(P5fVSr;k73J z6TB8OE|pENXPnNBk9=rB`LBPF|6-<$EqQ7;{Y>?$m=vbihVO^QAqui?EWUtA84|v} zTGhpDB`FTG&X1t}+{lm2*M8{py}DzFxq3k<8tZlNMHAuOd*3o>iS?RBXb^j90@=cJ zh*rb+>`CL+$Iq@q>jV8cUlb>2CGOV!5R>lCAL*|L6X+0n} zRGP%~CQ4*b$m7n7`%c?tmn1s!#LRWKnHc@j5u;9&zY&+rYWs^!it%{@9>$4_)K_p3 z_W(2whgE(?%}sJ)4oizyMZIWeA;4f6l&^C|mD(mcKar|u&)QUlM*c2V{VJN^>!L`Y z)~%=0Lq*{p-wO2zb9Zv520WOr_dT!DU9^I(6@MdlU7?fp^wbjko+XayI1{G}Skfj0 zvJpPMAERn2ZjBB$)e6~uSFu6R*KCULB58)BS2B!~#@o&u^!E#eGw0H|36MwUC1oQt z*qG+ZbZ!N|U_5AjzDgN*7fTu&#)*AN^n|XtWBJ>qK~ufNuol=6XrwD70xcLnNy|pU z&TVS^2T|D@n7TMzr&_6r{gO%1#WjdEzCJ!tSxf4ALIA$PbfcNb7quJ|`LRNOaHVS` z#PFa+YD>q$!ktRtkJ%Mp)C{f+flFpGljEXjfUXSjnxL06#cK|mSHf>dq#Pn>aF_>K zo>A#ie7|yWW{!am9Lqd9HQa=yTDm-vzxg5ztqn!=8OK)!9dSVgA#E!0_RfZMyoA0a zG6ei!TU8m4EWNaMVSz&z@5MZtCa&0zDCHiLlQ^9~;^5HES*(2ZI4Pf6BUx6gTkp)+ zoG+?vuzLRD7Yj}WeBAh)HAqrikL082q1Ti50NoF+Im`TP4s!HIOA*N0|HB?O=VaC? zxz@nnyFt|WuxX#vuUANv3hb0d7UetL^=YB;&g!4fuX3Jcfoj+NHbdiEN`TP;PBSzG z(%&z{|6IiMY~tE2w<1)kaJ6T};zG_N#R_~q{NZ2}oDl>dayh`xmzbpoaSW~V`W=Rm+j=f!ES zDerbN`|O*1g&x?C2pVrsyr)imaZ29viX&_0MwP|kFmr9zEBTkaAl2y$h4l9Q=Hbl3 z$E^7+=!aS&;vDZK?{V5xSnz&#;N7y}9iC;D|9)3LIdM|?4~g&c9ZLVYlilHX zA!|Y+B4F`QsK@X`UENBMvmkE^eEG67PM`@&A-1nI`+Ug&NX9}Z#zH@wkrSLf`%ZpF zmaNbE4X=nrR{-3GTSQS>ZZ1XrJqu3Uo4L`>t>db-T5-?AZAc42>#ExubN}!33?a^c z8U29v>*EkLdQUBq*)~pbORJdY|R^+%`8v>hoR)QCtos=sv8@psARHSPE-L(qG_k7 z4`~UM@Ulnwfp>pYjDapqsND8hl_-2H-hOs&HwzM+;$n>y)DhijP$c8|4{=zUYTp?- zHT16C^LQ@%bUC`AC!o@ehG~~S(4DqXBD9vBZH2YS2~acpRBpT&x6|35mnzbhwm3iB%Mu;6ft` z>SL^LTRf)oqbq&={D1-KRs~FpMCWB@=4j>U>+8c@Z`E-X-XjcXfQ-}w)xXZ>CbGXb zjNKJ5+|Y#`V~X2wNI&`LjkFx}wTV3!Fdl=fr|0Ke>`|KjV6&8XYQNULj`8u0pG`+c zM~eE`gV>#03Lm=%1_pdPEvwK_%r)&;0X_}_U-kaf>-_wDRVdejL>PQ0t!rj}9v=q> z8htEEhTI-5nowtSeZemWl+B_dTl`*)+H_Ym5KZKw?6x~^{yJbrsV89A>9Yob1Mn}6 zLUFR0pI;m@>&=wf%x2bpWHHz)uwNg=qltu*OY8F3>JWxjQb;DGPPXq2mp%!;3cB=_ zD~};P2c`%;Utkk=+QvhVM`LBU&Ia&vXBHf??LWTbR}9D%ZwS8JT5H^T^?F<@+4yqih8YUK%JUv+A?j zUH&ac&csv)A{Yp*W!3|>z!c$`gLFc-VH0D8Nk%1Qb=B@doz#+s72zZIQvv_~D?yPKpiMN9Wxh;UoKbYYP}E5|I}IF7IR z9E->%UKQlf(5}-EOvd(20<;h6d4kz#DDCV$3;VU(8tT*}{VYWGDaC#2Y~Rd>eb6X7 zE6*;VA*A{d@9R~yxEG(4AMtiE_#Jqkzt4I(m5}BBDWBbhi%{S4KiVz#z9+N!^q*Qi zu_gTUTjxXhD}(dq+DSqc4svp>?%}I$<-CMdqN1%^5>28LcLYabgvFx6)|O4bvN`#L zpGW5BGSI@f2c$R85>iZh{I3aX^f9S_nrV(o+hd#*30$RawqM%{n0sDIc5o07ohXUr z@_OF&Rz+8FXD}KkLfnybV0LVRQ;;cYbQ?cdZ!Pv%ayg5!kTJ=eFo)}yEmkry16oE4 zfhYb)+qts5%>G{8*~$83Bh=5XZ?3NKWOO)~jHz-BJvc(p*&y5HJgwnBKWM{hJ}Vg;Pi}Qe zB_}0u&wYiKB*Fk}JBqlhtSrzg=5k_udN_D^o;fKuYc=?*VT&BkC>&~du36AKnQI*Byv-Y%{yEOl`~T{Czq#m8Z-OJ! z+|tsuv;&=SAlK}DvFypJZ45O;ZpXZTF9FIgRM@uk`Cd}vkIIZX=RVnGe0GXfQdE2$ z8hW2S3LqE%5`9D^ePL`vgKW|~6{ilb!%Q<^Y`m~$j(hnA(ZnAp#R#}855`xST;jV_ z=~ua=GrTOhdMpE(Y*x=g(4EX4!0x;HmvofC-K$(yNJ&XSZ-qYncu(kZ(;T1t!Q+#* z1o8=_kRAVJCXW8T=-4tU5*)0f9`U!oe*J35EgSbTlu`~a3439^4~e}Q;N!5CuVju&!N!!r#=C><)wNF}4WJ-}QKq<93erpdb0-^Q6nCJigQbzpNnh>GU zOQ^3dK2cRxF4-r_GpxJ8%loTXm1sf<-r?wu6@QlBdj&yAd{NTupld}|;vi^fb{|a0 z&CLaI8`NUYe|7l)0v-gBuvig)Jl*b5b*s6-#+G?MM++omdR`;xj+pN5Zs<#jPE2H= zrITNlp$gLcmKW0wN-@K@JWMpeT8+j)(@xc0r%Q)9D|$I5;SE z1B0>5WG}bHUg!__1cwH40gTvRDs>J?m>P3i;>xy0A`&reM0CRWV?3kKTLa3>%S#Ma z1No!BQf|5?jaR!ms;NbY)?2i(uuD?kE%CvEJ2U+C@Q!<{8z z%kFwMy~5cz4xPDyR|<=&>ICXcUi&=t@LP1hOv znLW-cLpwk>4d5up#xOO0*a|3CPEAgNLTnUgM_q3K?+>MStw#n9Q4Q_x>|B9{gUeg` zj)r-=jf*FdAS)dNhKH+;&O^(a+o-?e)2Hg}QEN~R1FbQJ>!H?z2M6no#Hw-pS2#J9 zw##rQo`V(dW`T?k6BH`T)=HuP?dIrcU?fBLq#C+LuuE}N&WNBO!r5{7v50O)`u2m zXfy)hj)IBFP)jRA?JAU;0K}Z3rvVzcIXn}PT|`nAu{?%#?UX`&jI=9XzY2;oa32WM z2P3rRiMX0<&U7tzRVFtrdw|u~=)C&gY=jNbA+X2eJbHCHgFdhj4 zt*vc-UELWJ;!RCVR)sO|0<$%$PfF%Q$BvXfXffoC6pzSH> zs)U3G_&?bGm+nridPaE9V_9(}7aO6g7lk_9|9#giP#!Knemo2HXz1tyT~3<&UoWI# h-VFNx#Ke*_{3F8~DME_o@(@^&ySG&2@@0*D{|{vtwRZpj literal 0 HcmV?d00001 diff --git a/examples/flux_integration/simple_fnn/predict.png b/examples/flux_integration/simple_fnn/predict.png new file mode 100644 index 0000000000000000000000000000000000000000..e3688812c052a10b3c342ef7496c3db6822a2d57 GIT binary patch literal 14520 zcmbVz1z1)6)+gO5(hUkIEl5bW0s;ntNF#{Sf^@eaVt|0O0Vplq9U=+}N_R^gO8PKs zgYUg}X1;HpnL(c;o^$s8_gd>0`>~Fe8YvM25e5bZ>6OdM*D)}#^f53npAlfeJBNCY zyWk%@a}70Rj3eYvT21Cl3=B4mE6NHt-IJC_?s=b{K_4CUWhh;)$0492yR3$T^8xF6 zlp5|;`FBKNT;Xhi9FFg@ve=l3uWyFqlGm}B>v8aKTrR99REc22dfqIL^^QAnWOuCJ zVVzuN<&;>_zOQUKWnOaf?%krI)tbBnn^to=JPrO zf!$U`r1Tf216vC%+El6ai>;sEA)=zB%%7Zlm78Z&^=HVT?&{^sJcHJBs)~w=wzjqj z7c4PMOiappxi~nCTV!M!4N4x!Y<4Trie|if*Kb>T?ncJu%81vmoZATr3HBeZLMI}; zJcF+$iq(`0U%XgiKT;Vjy?%M7BQ}oj_WIspzIMWenHSn}z6U$ul$=;tSP^1c@J{KF zJ@4~}8uGrrwfXt^E#^*c?C+CD?8Qlxny~d^pDP#}`|8Jg{jqlbp7?Pm=T>%IU0qeS zqx6IGazlqY#zEv_{d-l#` ziSyp(LP3Gha)e%NY^-I8!IwC})fGkcDihzeE9&a214T*E(ck>5B04)Y6Ti|hF#KLo z)Ew{qa3hviPi#x?&!mf_`ZWrcOD^pXN=6@k;rFkrJEE=))#rQF8pSBLGal4eSZTv2 zBqYScb6LS%*R^V-uTPwZr+p+O#ubg;+26;<#XU!m-(giWU@KPhA);yRisf%MzNgm< zyPDhDuq7oWyJYv5U~zJ8Eq2864wcx^H$6N|hay5kO@U{UFeZ>g>jp)HJAq|-xbuH}^*i&; zOr2&zB1KBdwFmIpmClkLPKDe>Ug_=eCE?+E$c0#21S&}xY^Pyk!Rs2+Z^{UXhU9I| z)6Z>Gl-~9YJm*yN-11?N+0^uuEJXpt_Tw{t*qU7d*Mw_#dF9rw{ za$UQ24O(2m<=3;8b4QKe(3YWQW*#ZJ*TtRc{Scaa8iwBO@A`rBu-rTOJ*8Y%C;toy+HP)aBnp`| z?yOBg_wI-`3D|3EX^{~UI!XVQxT~LqsuLe^>3MhMrTU5`&ckkcZu8bs z!=Ti~jS@DuKmVOl@Ac2zde#nu#S42|i)v4qZtu+hTKtv3@8j|W7C1|dM_yjO{-w5T zg?UQ^Z}IkGpTpta7OT(ZH3NgefuehLJCl(xy!{>b`yB2I`TUbth&Hqpc*sZ@#g;~E z_NfPXbw3uvoU{5JICZBfOtnVsFPDwc2pH;5J)Fu-(Sj|_kun}=;l-);)a8k{ao!>8 z(N0^=?V5Q>-SIQOD+UN1m(hK&hYWsj@Jk5Z?C$!8I7vki;8e3JNu?WKlfJv zHhxM(ln>q|f4O%_k{jPUVz+aQWt>|g-sqwEo>FqEGD*3T6bvmtr4J~1@YCSa-Onu= zswyf*>+8aCHrwtnqdhm!3BAFDROg#p^RP4&G$csERy;h6&|CSiU^Y>6}xco7)#vHr!u0mPx0 z?u;;URy1;jQI%`#aEH-5Wbj0aRJV()uCgZbqb@|d22V3f33$Yfm`6lJ?9C=Ru(*#0 zgj)m{eM3|m+S7&FExWi}=xvT`7zx+D% zUSO4O&YiM6xJ1V5&%ylLX#d{cUV0I$&EG$5U;^Q6YF)X4nW3FZcDknc{)abgr-fx^ zQt6qc-3!pvwAL8-t3OA@ma0eekEv93-d%=`pISC2ePNuJ9yzNVAJsF!*L4R9`p12{NgNRmq)`>zO7H_ z>pxk3G2DHm{`4v3@j5=<+1;Hnal+)?*{#^5l(FtkBceXfXH>a4-Vn6dmmeJ+-PI-M zkhneKmfB)|ACjoC@hJ6R86-{EVQFb>rl|1-6W?9S?;(kz_t#U84!F;s?{I1;-Pze; zmU2BUYf$U$q30X2sHDKdF^LC>Vbo_=Z5fpvt^HOt%5XqAgoxUIF4b>zY^)B#Yf~uc zP=T4!cifIw{9d2Ajl1oO2KZhbNyT`qF`rNITB%6&L+>uaWKTv4!nE9gI5YDlPA}Jh zy8YWXEp2VL8NG5@QPK0}o|XH-^fHj`Sn+U^YdYWBq<=PY)BSM0Ws7uqxT47K2yHi5 zyt_M>ikbNJk6T}N#?{2KQBPiRadAPx)nvEPit+DZES^~petsl}TEe035U`jeGe02aIh}@^moQDSGOu$)MiK~n8lUbhDR*<>-7 z`ECfx>l0zDS{fQQ-`|8#In${|F~r8j_2rw?U3;r~(Y9YmLZW>BtP-;zx_W21{(z zl$9|cu*Zwp4Z=S4Nw(S9nTj6s-CvHDLzkbqmFJ_{>>Q7>$<-^;=M$_(HGWL>-Sg-# zz_+ihxF36d%AkzQW2It8DNJ3P2+M1G#b@iMURRPNQuv6Ex=%foT5)t!>5JETMM>$D zIr}xSZg)0uC2vw4kBn(&{fi(u{e?4ZY~xS~9#!@E@*YB5&&Ygf`A#|Z+zqSs>CY>9 zZ*cBg--HYcIUX|XNsbqmflyJd2-}du_$m_VAm#t_-yUvd=`hdWz8mW59S6G`BSs-* zj%ysUdQcj`5gps=D==%KHFlrqGx2-m#(tKUmv`}@TUPnE#>U1=u1orgyn0Ylke!`; zG&MCP0*mRUcWv^;ix*J+60J5&e>lzte>;O5`EcIj4EUJmzm3W#g#bjy$4D0^C1b)l zw4nGoDPH1JR%F>HiHyvzHup@=%vj#L7bj>kIuasrPEKyD4Sv*2dIb3)tBZ-bLTZC8 z;Wp;uR%G>c*y#DfLN3>&BpJ)F&m8P>3MRw1iXoRC{mwD0SP#Tq-`EI%R8qb9Lj$s& zae--riQl2_sn`RUYUJQS`OS2PF9S})A9zQQmB~={%=AhXY#lGRE4@|^AO_mNss2U6 zV)zJsP@I{%GWw`R8?P{yD>X?wYjAw*RbM( zlvLG~mt35xPqMDRCn~V%%Y#BDG&s1}rcdC)g&&2}pI>sbLdYI5@%7+6z#M+~1*wXD z<=(-TL+?x>MbKn3VVdjmASr`r`C?ubEE*ZBY&8TU2w`Q$p7#q!MwxRwJb0%jFL`f| zwAu{`^i6&K`qc-bu(6?HxF z+!_p)r=7kZent0ZB=xycnG66b8&IGN{3g&&L@O|<3%TC+yCOM#_NkVVl3m>AJVzk4sUrY4|dr3ur!SsIm;AR6hnv}KA1*K^}`Jv>Mxr9OWX>LY2l zm0{evb;!ezr`_ZZ@LwrPS@-1`L0!`#_ziMorOU!8+d^eUef|Dwj}532-PVY%PP9Mg z6!$+o&|A1T5226H4kqNgeOX+0hX%PDo|&&N8j?AUyuuO?hL%f)5&yZ1w!x3&}y zK}}d-fyDJ2QG`_J#mf!&6OXJ(@@c(wQUcuAPpK-ZDoi=Xli>BIOUDEm7Z8mGRIxEN zVq?#BCO~-!Y3Vxi^+@k^UqS^11t0dZv>BY6VNz^#4;0Y5slC{yOZ|nExZf9d$_N|N zS%(Sd*oY6<=MK16f+$ei+vEZ2vFE?W3v+UFx2^GR|Ngo7nL}fc9gj;p#k+ym8LllL zqNWKVC6U0%H9go_>*FWr>FLQgduBUUTQjgL_#88}2@aY^u9y?1AfaLzGpAvd>+)2z zT*|X&I>o|bVq&o0#KcM;&U8SX;i_lHG5c`78{KEj@`JWjQE-REH>9?J1&u4-poXGg z3Q07h!q{%m##r+Ac0VoaUn@i~Ii8B@8Xm@_;ubKj-iaQm3Tq;D#25{A8&@ruo#qSm z55)SYc(0!rZy2w3F0K!&7GkM_&il(2_*vwsjn@fqSqp-;4!CC(>GX4c-RyV$-TP{x z=V&Y657V{kaHC7k?_>K5ns^MzU!QUFI{~r&T3s5rMnUXjdS;kJgI4f3>EhO%hcw#3 zn|~8PuKMN6yWQPLZ7A*WW9#RytZP%xT3-10T1fZi-0saY()VMMz;p?`Dj*PUW*P8vHB9ElvzA&z=s`@jQmzlFnAg>fQid##wG?51` zf0b!%{%RAs+|uOhnxrAESN8!Sv_zddcMh_+yt10f)!7_Q&4lyNIRt2A^06AcQYu|k!%UYS zncIzH>nJL5aJTY^)pb#fAcU}+NwYEtYp+G2l;0+QbiT02;y zf$132jV~$4#G7vF6VX@`C#H!=lk28a*#va&{%E(xOde{AI}4|a5PoY%`s@(k9SnVPZyGAWjK4#=eZ z@$vD8l)7$*vHHbc#q;rVH=Li=e{1F9%Yh2zxP2X{8Ax&8S_NhF4$^ zNaE+m@Fp8esw-_Xlf;-|C-C<&<&q|)*MY6jiXxOI$#2rk!rW&g9c;v7P4)j|xqbKU zF#bRR(Z{B1JNRcBnY(FI!b(uA<>aONyDfDi9}F(~HN{IfTlp3xh&X)f=m?>r=KYo= zaSR(MFnTJu9xh=<^u6xlUZl{uz=Blk^6+;;*i4^t-bZeR!!?MS;vJRHYF{s z-8&4qztvy7_{7Pfs4*t~-R2;Sm!DH>ZEa0HQOs^~a+0JeJTlT|s5G}K@{}-UWXpQL z1r>f%df=<8D2I7cVMn41f}ZMlOCeWK#QmnEH$(5F;|>#zn2HVK*U3YKF!5efiEMdD z=kJo`jsAE(h`Kb8l#E8yJ~VTJIsO{zJ~JtM0Zwh;eH6`(;w2JhVTkK*SntYXRFldz zVeXKz_?wJdqQV#_c$6EadNQDp|GIVuVH&ran+N0$PKV^DqH$ zHq4JoT+&3b>6&d%oo73NWbL#=acJ_>rI6w=Zu&pK?Ha)wc>X-!d+R4`%*Z?uQPE<% zp;CT>;__(~3?{%oTTc4@7aW;vt_1p0Y;-FUf2S7rN#E@P)EU=K`SFwM*#hvZr!uA9PwJM*&6rF!dz3C-TfKL z?ugeaPfrl|ih?XUS+0+tR51+#KN^jO1zR2{ngj5|Ox4>O%__Hf5JIbjds*PZ6betS z4zRAo;oj0fQKnKD*+e&mVo+d;=kE^?wQV+jn2aJY{OEAI4~PY*1A#&>9+a>lAt4ESbY>49K6H04e@bbrd+XM#pARmsZoSQq?#UdeO^%iw+(GZZ zeLc&PgT1Cxs+$e_wzA3$Sf(Er;C5(WdoP~)*0VSD}r8BWIKi_pLw^CA2ko1R5ue;saB;3DZ z=luJO3@&bNC<>(n1-nkrX1SunvzQnFWY_j!d=^BC+d339FQK&4N;YpJ6SM0WF8%8F zXDRh5v(zo17y$akH5b2p_wF4q1inCjg_?;vj8+49hw@<%3hnQ2)u3t`1s)P%B7iwT zhi82&qdUMzNYba?L0#6kQT8Xg{|;WyAsk|>0P4clX+%)!Lk z+8XNhgTwrp&y*k%5H*@#r+8&dq_~>S#z&?`k#{G~WBT$ye(ibw)Z3wEOfsH8##AT- z;XyI%=q7n2hikUxafRYl-U1cJWbu-%#)sEd=7st|9z0s3$i964hQT)mYD(JJQh<}h z2oERu)M0vBKQJi2G%MUt82}!8FXw5er@+i^GRY{yHxN#ng+uBNFnr$t_r)7G2rDY8 zqhxV7STvBYe=6di=}2%eCM$bBrb)V zbyV}D?8-b;ZQdlou3~0ol|EFR%OJi-M~AHO9~ z$$vbyg$1}E?|79bq!lSonE&~^^e6q+=Kd01u2}UHrr`61znunR#3|%>bad$_3Ei1zS~ji!6XaUG`_S%-aS?n=8-Fk7sK7L5c&WaP7klN%}r$HkshRE zX#Z`kV5m^v<V_!ZI3MmCBw8P;F zXK3>fy}z33^9R`4FQZOv%m9Avf4uuVMwAPZK__5IHa50Pmpp;(3Q>rE@xr`lAVV|Z z#j|JKz}&A*wc+F77#_GZ36(mGp+SwLwX5FvCY25O=YV0DS4v8XmsjJ8pYHA3w|94U zzxj-J3?=N6(2L~+=8eJNCgAt;lqayAbsv`0B_;b5pyn%ZwgUg${Yj-`*UuKv|YN|@SwyX_g2-&3L;t_9C)|c z%)oCY2i|gtq)qKzU2`AztEcDYhECMQ!tI1hNFASFsaY5(Bq!%LR_g;(I0ABxxw$z^ z;hmBPTab?cCti?{kT`elJ~FMa4~ng?3G<4OG_7?ogkBRRfmH(5T)M5R2`GP1SkI=e zOilH`qCjG$c6NTRV{dQ2yFT-iv--TBpI`P*gqL=sz5WpwA3KJh@n)^Jqr870>iogM zfvwmbYwN5kV&MP~_||8>Bwiwje5Ln~F*2&CP{KTr&3#RDgW3#;?@ypbAw+}gF>lr87j2FY_I8rEF0@B$;$3L{1%xiB zolj9AKU1$hDr>6iA<@W{8KNrVMg$TUCkdJ0upMV3&HQn?^r}S)VeaFUv!XUX_4cKu z@=5f*bX&FgK7>L|ZDNOTRABciBEsY zV91i`*N~|67J1VMSmWbw_CO%kL&7@JZn%V+S9JJEq(YJnaLvauny}2(REpyuw~548 zB-#x#iF4kBM0%VZkntVg4bmS%4E5<*FDN2wK82BR(esbavLv0w(%zZUW4_BB0zH z16`iYfXYMwnXg1n0v~TAt|PfR<2Y^nJMJT0L;~~i-4}d=IRt({gPusQj|T_B%JF9i znLCLT5fuw>hEDFAVxd@bGEWtfp25DpD|t%I#m7y6@{i19=T#66%P%8Mgi!td3G;Pa z?)+=wL}3ia1O3;~kr5yFIpv>xH;jDyc>a(PL?XA>>*f^wXOnbZg?)5##W6v?EWhpJHJ6U;Uq%TZFn`4kobkt5AFAJ3~pgK6Y~hWWuG~o~E%O8v@xSWg_-+UzV?5I63OIYByT#0pyb3-U5Izod@UB)BTas309@z&yuT`re-Ij zQrOdj-`O)#Zli?{bTt#jz|P?&T?ys_prC=buQjgm1nB3Ysv62WbjexuUPpL&a~C*f z+|xiz*@*_T1!W+HHCfrvvQdinCy@F!=DJKoELu6NN&Pb3*9h6%3VXB&@rZ_wPTz&} z{{8z9Q#!tWwF6Bk_Pnk@1$E+ASBML#ep&gZIki%l%Mc|9G#lPx~%S=|)!E^FpZbRf?=ynyz95 zH+pqj_T_>3gp5%v$8n<1*lW4s!wtq1DO#MmoqckTgEb__?k1O&*$Dzh38j&=QR!4o>pRw=P?R`9Zrr~>gyNX~P6AjI)q@>|Ag&>H;z&(S~*)ulA zDnv3>*v8@o8fu>F{wjs%wJYf;Soaa|*4%ve9pAGarU^hd(mG@XQV;^RvGK9ksI3%i zvP;^kJ-TplWsVLuLb@(FO*R`{yXIq8N;|EBd`M4EKld%!*^PSkS$w>8*$@o!vGW}YK(YU({9zC4*$JvE%zGdz0?Av`vwXpq zD2RCV^7u#^K?7=MbT}w(S38o-BE`ld;n`C+$o&Oo17!m~&WUSTgf$a9H~o*OJywJX zf&k*3<>m&MA4MnJNPED3Y9+Kohdz)EF8uUi}%r?82}q)5w<`1A_F5MBa~2#jCa6V16~i45+iASP)WdxrB}Zwgwfg{ zkVOP1TF`Jd&cv5c(qp}rql+jc%aNSkQNm0y9R4(@-i9^ z`$z5=Xs6%3pyGJ`SxPAP-t}z{iW(>1Vk7)vD&`VAilXr*%&+Hir`(m zHVxo7!qvZ3r5X`KQ-9iB~^_%MNF0cog`UOdK_B7rwq zUDI86oYI-;9IeZA=0*IWb>IBxH`tU#h*Z$BAZ83hE%t<71Wa;o-oDM$P6cVNer7Q8 z0?8CO+{U~&u6}O~C1nf^2{Ed6d~jZ{TIV-eh}FF~QlbldG>KKj=cu75sY)c7YT)3X z8K;atSp4oZ<-P2Rg>BasooVd-dpfEcYxbMZ#j}1qsMx3vaz*^V{E_Z2f$j@#ZF$Hdr3y53K6O)FDr?F`II|)&XDe>x)*ELeUo^ya>QhmV z#W%Sc5rUQA)HEs#>huYfl8eDO7#WIZ^x3KHL)FqSb1>UjOV14h^#^3m*EvG_ClohLI8Z zz=AnXOf(EqGoufRcEA2?_kXj|Sf+IIW_B!Zp~1$NIP%av8P8^tqn)W4+(;jcBXZwB zNryrzP^KSyl z8)`6ugXC<34pAb#0k?4z=DJBk>k}QRyonrk_GHb9WN;$W$ZZ3+P}nZ`<1{-IT&X6X z{{*5<>US^VDA8;NdS?+vvD>dkgX7B%P)4D#iy&lG>!JZgB|+IjJ%O0GI9eJSD{w=S z!QLk!CkL|`-rq_;yEGC%^2w7W1}c%$KZp%vgJ#fZVp;*d%6`NMS8n-*p_}q|!v+f1 z8@Fu4*9#aviv(bhu7)=9mP|L|%o8k|rbkClRfsq2>rg#$U4AX@P)qiWW}xGCcn}#z z|Ad71{9;vZI~HpLdpOoO28NHRfPTaF0iPQ|Xh^p>-%{xPp4y>LGjH)ov4}$-NGTH2 zVXt9qhK!)F^f507P3X{yDe~A#`jt3!V`wQ}Sn}5u;*=_iA0tzF#{+WQ|ICo{Q?Ro7 zeRf^LLp+VU6HPXIJ1&8tLV34x1;uT%z%cjSQc+1s+-}gtxytpPsf0B4YJDfzX9bK5 z$~&pFrJW!Pllpk8Hdu61OmcE|sj9>#iY~Y-HV!TR+I_Te>SoyrX|mKiScN;C2T=#*Ctut#1 z1$*d_eU%+xeqajAO=Un+JOtta5a!~qT)}t=zW_k1W5#u8(W*mi38IVaM6*iDBb65) z^v;9-4=CE-tE;I{0)P{4hm_GiV5Ivr)`XhL82eNkm|*ULDgpPk23L}SfkC9zSF0o$ zDXfEWKOq7w-4CBYQU=4urBZh&fwaWOys-`0YP#72(2qS%a>v`;51zgtD&LpNb9KVVrFddUd(cHLx{kpPp za2t8l1H~FIYZfzM>SP@g#T56+$E_&bGxc2a1TvnR<~_j4a4Vc5*0?=|9JdAPW^ zjH^4LoP6`EQ1QA zu(&pB$*FTrfM3Ydl~0h>aE#vx7P@0bah~!7--?6v2mvrGieNoLkaPuN2t^>|KhWmC z_yobYfVobr`_03>lz$=EKVT}{{UUOw51fB-BjUV5fZT}z`(FUo8Sye5!$0_p7Lb>8 z@TqIZVE5mjAb=UU^q+Sog-P6K*^VjO|F{o;X}HfMJ9whLT|*%4y7u3z{@YJ_Vga$k zIR84q#2?}OMb2t8f9K{Oj~F07#33szqmp*)(#ilBcnp4(5WDsT;ys&&HzW9vH z+JCzE7h-E_Q=Y)`Fde495&&4s5%8@EuHxf+{1=}b^Va{G8kzrf0{&_2q>T{^aEybR z6jzlk6i{AXepcgEIarpyQ3e3MP}k6KcH`$ledbQh(JyQq(T#6XBA2;aVt^F+8_r*D zfingKmm0vZApZg20dMg;R65|Bm9e_9HD^GhkPqfw5orv9BeAHc2z+>i2O!h|{sVvV z`(#c6AZlO5#m%i4XpRHxNJvPylGn&2<}hkoG%$=H32lnj)>c(zWzAGsAIRW9*|CV( z#=jjAdJ-HQ+~41S!LkDbigo*Q+RaHPduxTT@&B5YeMtKF@#CZYv7?C6n;?8WB#u)Q zOXQc79C*I{5Eo$0mv7%>J=WhrT?_o*AMLAK7fCq5^Z~BCk(n4*fHj~kdETJD0mP<` z&NFyA#26krIXuXF;X>2o*i_cOzf$D@j`x6ofQpI=c&=bPGu8LEwH#=mKtr;6Oh1R` zEdcnQ0hSb)S6aElM2Ff6u&fDKlaP{LyhbrQJ&l8hXW-)5^b-hH#?Xn?RpiN$i&cks zO+1NlZlXNxCJrC7f4~DO!1{hg`}lye43^y5^$UIZY7%$bDcZ^pfZ!u-a$L#_H3Q|a z(ei)eRrP+B*yuUXmC{f?UmV}VHT&LE+}PCg^5sjQs9k~6j-ckHDP{f7$4}~NKRph= ze8stI^D!7k!L>&pO8GBplcgPa%|8S8qM4u_YTTmr2_YF-Eqdm0c=)!XHgfc0PE~$- zG6Y&(F=Sn^gcc8h5e-m@g}FJ%xA|AP<6=@$WWn&pBI60P48M%b2&~crPpL!x(GB|d zY%bAHo;(2oae8*Pvg+A&eqmv!mEj61E={l-)qsu&>%F)Vq0ZShbI#nt0>Ejt=hg$a zxEr1e%bR8HKkvl^c9TLSfY@5MkC&keoIV0Zm7><&Y(he)ys8zT1cIx0&qqPl1q;-2 zxpWH%o+CzD3-8Nrta%LGi}A|io0>sv#vpe}t&}$*kLZ+*xDMHsjo`t9O<*g_gko$Z zFH2PY$`u$n26zsx-?;Igw*XUX6L<vlg$36aAI4=q)A%v8@Ua>tqAt_8m_FZv0wFw#& z&x6*$UrqjaK+sG7)GNN*rzj|dV2=ZP^TnzN0qP^vvDZDNBpWnj-}Z9G{NM#k3p;x{ z@*D>LXn{7`2+R)PcDx0gHd91HM;I zQ>{RKp||Uf9=REDSiLDyMIJc<%J9T9{r~j3e1yG=SBsTv69vyEVqjcR(NfM+G!6V8 DhwtUa literal 0 HcmV?d00001 From d7e70b6e05b8d8cca74f11ca32655aae65db1b5b Mon Sep 17 00:00:00 2001 From: hongbinren Date: Tue, 25 Feb 2020 13:50:04 +0800 Subject: [PATCH 16/42] add classification example --- examples/turing_integration/Project.toml | 12 ++ .../binary_classification.jl | 104 ++++++++++++++++++ 2 files changed, 116 insertions(+) create mode 100644 examples/turing_integration/Project.toml create mode 100644 examples/turing_integration/binary_classification.jl diff --git a/examples/turing_integration/Project.toml b/examples/turing_integration/Project.toml new file mode 100644 index 00000000..1ac6b1f0 --- /dev/null +++ b/examples/turing_integration/Project.toml @@ -0,0 +1,12 @@ +[deps] +Distributions = "31c24e10-a181-5473-b8eb-7969acd0382f" +Plots = "91a5bcdd-55d7-5caf-9e0b-520d859cae80" +RDatasets = "ce6b1742-4840-55fa-b093-852dadbb1d8b" +Random = "9a3f8284-a2c9-5f02-9a11-845980a1fd5c" +Stheno = "8188c328-b5d6-583d-959b-9690869a5511" +Turing = "fce5fe82-541a-59a6-adf8-730c64b5f9a0" + +[compat] +Stheno = "0.6" +Turing = "0.7" +julia = "1" diff --git a/examples/turing_integration/binary_classification.jl b/examples/turing_integration/binary_classification.jl new file mode 100644 index 00000000..f37c4107 --- /dev/null +++ b/examples/turing_integration/binary_classification.jl @@ -0,0 +1,104 @@ +# Set up the environment to run this example. Make sure you're within the folder that this +# file lives in. +using Pkg +Pkg.activate(@__DIR__) +Pkg.instantiate() + + +using Turing, Stheno, RDatasets, Random, Plots, Statistics +rng = MersenneTwister(5) +T = Float64 + + +# Use RDatasets to load a binary classification dataset, we use features from this dataset to predict it's color +# The crabs data frame has 200 rows and 8 columns, describing 5 morphological measurements on 50 crabs +# each of two colour forms and both sexes, of the species Leptograpsus variegatus collected at Fremantle, W. Australia. +crabs = dataset("MASS","crabs") +crabs = crabs[shuffle(1:size(crabs, 1)), :] +train = crabs[1:div(end,2), :] +test = crabs[div(end,2)+1:end, :] + +train_y = Array{Bool}(undef,size(train, 1)) +train_y[train.Sp.=="B"].=0 +train_y[train.Sp.=="O"].=1 +train_x = Matrix(transpose(convert(Array,train[:,4:end]))) + +test_y = Array{Bool}(undef, size(test, 1)) +test_y[test.Sp.=="B"].=0 +test_y[test.Sp.=="O"].=1 +test_x = Matrix(transpose(convert(Array, test[:, 4:end]))) + + + +# The probabilistic model for the binary classification problem can be formulated as: +# f ~ GP(μ, K) +# y|f ~ Bernoulli(sigmoid(f)) +# This model is build using Turing.jl, please refer to https://turing.ml/dev/docs/using-turing/get-started for details. + +σ(x) = T(1.0) / (T(1.0)+exp(-x)) + +function build_gp(logl, σ², X) + ard_eq_kernel = σ² * stretch(EQ(), exp.(-logl)) + gp = GP(ard_eq_kernel, GPC()) + prior = gp(ColVecs(X), T(0.01)) + gp, prior +end + +# The Turing model used to estimate the posterior distribution, +# the latent variable is f & the parameter is logl +@model gpc_learn(X, y) = begin + logl ~ Normal(T(0.0), T(2.0)) + _, prior = build_gp(logl, T(1.0), X) + f ~ prior + for i in eachindex(y) + y[i] ~ Bernoulli(σ(f[i])) + end +end + +# Function used to infer the label for newly inputs +# NOTE: for simplicity, we use MAP estimation here instead of integrate over the latent and parameters +function gpc_infer(x, logl, Xtrain, fsamples) + nsamples = size(fsamples, 2) + fxs = [] + for i in 1:nsamples + gp, prior = build_gp(logl[i], T(1.0), Xtrain) + conditioned_gp = gp | Obs(prior, fsamples[:, i]) + posterior = conditioned_gp(ColVecs(x)) + push!(fxs, mean.(marginals(posterior))) + end + fx_mean = vec(mean(hcat(fxs...), dims=2)) + p = σ.(fx_mean) + y = Int.(p .> T(0.5)) + y +end + +# Marginalize over non Gaussian likelihood is intractable, +# we use MCMC to compute the approximate posterior +model = gpc_learn(train_x, train_y) +mcmc_samples = sample(model, HMC(0.01, 10), 5000); + +# store the sampled parameter & latent variable, descard the first 1000 samples +logl_df = mcmc_samples[:logl] +logl = vec(logl_df.value.data) +logl = Array{T}(logl) +reserve_logl = logl[1001:end] + +fsamples_df = mcmc_samples[:f] +fsamples = Matrix(transpose(dropdims(fsamples_df.value.data, dims=3))) +fsamples = convert.(T, fsamples) +reserve_fsamples = fsamples[:, 1001:end] + + +# prediction +pred_y = gpc_infer(test_x, reserve_logl, train_x, reserve_fsamples) +# determine the accuracy of our prediction +function accuracy(pred_y, y) + N = length(y) + N_neq = sum(abs.(pred_y .- y)) + r = T(1.0) - N_neq / N + r +end + +@show accuracy(pred_y, test_y) + + From dabddaf8204d09413af1fdccf3a9ff91ffe4602f Mon Sep 17 00:00:00 2001 From: hongbinren Date: Wed, 26 Feb 2020 00:36:55 +0800 Subject: [PATCH 17/42] correct some syntax --- .../simple_fnn/fit_step_function.jl | 39 +++++++++--------- examples/flux_integration/simple_fnn/loss.png | Bin 14228 -> 18072 bytes .../flux_integration/simple_fnn/predict.png | Bin 14520 -> 15053 bytes .../binary_classification.jl | 22 +++++----- 4 files changed, 31 insertions(+), 30 deletions(-) diff --git a/examples/flux_integration/simple_fnn/fit_step_function.jl b/examples/flux_integration/simple_fnn/fit_step_function.jl index 366929b6..3683a6e8 100644 --- a/examples/flux_integration/simple_fnn/fit_step_function.jl +++ b/examples/flux_integration/simple_fnn/fit_step_function.jl @@ -12,7 +12,10 @@ T = Float64 # define the step function # step_func(x) = 0.0 if x<=0, 1.0 if x>0 -step_func(x) = (ϵ=T(0.01)*randn(rng, T); x>T(0.0) ? T(1.0)+ϵ : T(0.0)+ϵ) +function step_func(x) + ϵ=T(0.01)*randn(rng, T) + return x>zero(T) ? one(T) + ϵ : zero(T) + ϵ +end # prepare data ## training data, drawn from Normal(0, 1) @@ -23,13 +26,13 @@ Xtrain = reshape(train_X, 1, :) train_y = step_func.(train_X) ## test data drawn from Uniform(-5, 5) -test_X = Array(-5.0:0.02:5.0) +test_X = collect(-5.0:0.02:5.0) Xtest = reshape(test_X, 1, :) test_y = step_func.(test_X) -# Build a MLP that perform domain transformation to input data +# Build an MLP that transform the input data # for Flux usage, please refer to: https://fluxml.ai/Flux.jl/stable/ mlp = Chain(Dense(1, 6, relu), Dense(6, 2, relu)) |> (T==Float32 ? f32 : f64) # extract MLP parameters @@ -43,9 +46,8 @@ logl = randn(rng, T, 2) logγ = T[0.0] function build_gp(logl, logγ) - ard_se_kernel = exp(T(2.0)*logγ[1])*stretch(EQ(), exp.(-logl)) - gp = GP(T(0.0), ard_se_kernel, GPC()) - gp + ard_eq_kernel = exp(T(2.0) * logγ[1]) * stretch(EQ(), exp.(-logl)) + return GP(T(0.0), ard_eq_kernel, GPC()) end # Since we always assume our data to be noisy, we model this noise by λ, also in log-scale @@ -64,13 +66,12 @@ ps = Params([logl, logγ, logλ, θ_mlp...]) # - Stheno assumes that you'll provide an `AbstractVector`, where each element is an # observation. To handle multi-dimensional inputs we have the `ColVecs` object, that # literally just wraps a `Matrix` and tells Stheno to pretend is a vector of vectors. This -# is helpful to remove some ambiguities that arrise if you don't do this. +# is helpful to remove some ambiguities that arise if you don't do this. function NLL(X, y) - Z = mlp(X) - gp = build_gp(logl, logγ) - margin_lik = gp(ColVecs(Z), exp(T(2.0)*logλ[1])) - nll = -logpdf(margin_lik, y) - nll + Z = mlp(X) + gp = build_gp(logl, logγ) + gp_Z = gp(ColVecs(Z), exp(T(2.0)*logλ[1])) + return -logpdf(gp_Z, y) end @@ -82,12 +83,12 @@ train_data = (Xtrain, train_y) opt = ADAGrad() nlls = [] for i in 1:1500 - nll = NLL(train_data...) - push!(nlls, nll) - gs = gradient(()->NLL(train_data...), ps) - for p in ps - update!(opt, p, gs[p]) - end + nll = NLL(train_data...) + push!(nlls, nll) + gs = gradient(()->NLL(train_data...), ps) + for p in ps + update!(opt, p, gs[p]) + end end loss_plot = plot(xlabel="Epoches", ylabel="Negative log-likelihood", legend=false) @@ -102,7 +103,7 @@ function predict(X, Xtrain, ytrain) gp = build_gp(logl, logγ) noisy_prior = gp(ColVecs(Ztrain), exp(T(2.0)*logλ[1])) posterior = gp | Obs(noisy_prior, ytrain) - posterior(ColVecs(Z)) + return posterior(ColVecs(Z)) end posterior = predict(Xtest, Xtrain, train_y) diff --git a/examples/flux_integration/simple_fnn/loss.png b/examples/flux_integration/simple_fnn/loss.png index 55452d34c56878110ab74b1ece0a249cd7f4d52e..fd2f6d83200aaca1d4930db12712af120d5e12a4 100644 GIT binary patch literal 18072 zcmbV!1yq$$wB{8lK}so6y1TpK(n@y<(%nca-AH$LOGybx2}pN$cS+YAd~dCp_2$j2 zS%WU%a{oH#?7h$azWwbREH5j8f`o?zfk05CB%z8B2#h)e^6VQT4EW2vTJR9~0dF89 z0fjt1{mX1Fh=D+eAW~3a<$l-zJ=<^{JWK8HLx+VxGF9j2;J_1oe(i$}fheO0BSIj>0Yu;( z6;X5;$d_Vb#Agtp33NYjs5znt1R~E4OALV&;{7|k%nBiEz?j$VZ(+*YzN%8H5Q)8K zODm2%b~i%jwp{NP8J=kdqqpxk{@DNYgwvEzfH8_1$Ty&|Gz&M7Tija^u-aM4GuU@KwM0?zC!tCza~VN z5dx_f6>G~}YcFTAQ3G$bh=~aoCa9N%RWm(@{2KbqifE*f&Sn<9=PV}HwLeoW5sr7> zj?Ad6tjsoJpXR7NzD(8+fgC&NBDb?!O!g--lQLFV-$;v#2Xqs<)>~5LeH9q!+3hF(XlZn9vwHgo6Uig6w3x#9tv|iZZMly+~IZU z^)5$?C`X!Qs-ayXQ0&kyr?R5o^dG7;%UF&3O{Yg&y&wPn{rg+%PwfOpV2hZzM%$%| zs;W5~Uifc|BU{rCadB~1rX_xKec%`IoRNWp=P)p2Qs!Q7^{$iAMVNkTb81gT+02iU zg^Rz&!kYL_PDa*wj>edkP^hZ^;Q{du?KZd$`Rn8t&oTnF+Yx6I8NC-G)ic`i^5{4? zYRbx(7#JglGBF~eq8Z)~t}f~AnCp&+1eloP24cQw)iajD!ou4a#n6u*i3Db8i2LlV ztgQMDt@ShnqLP!7lai?Ydiwj{%g9(%ZSd@H>0blO(r#DtvAw%GDUeOW<8`)_l^v|N zTT2GBQl2%k=lu@MG^H*9*PlOs>=zJ`kdirVbUO&yts7+FOG#lNU&KA~B&LySQn@-5 zpC<4P7924KoOE{k$^i)k@++Z`7SY$KboA>r3%HKpE>b%YN|ppLYLp^+M6tZFIWY+2 z3!{P>I=scm)73H*bkO5x?83mJJxJ&Xn~o~kmDPm;htSKeA{q~V^Wf!Knc|umR%FAeqEYa9J91s{e6FIZ+ssyaLM88_V9~SboOMRK+LG7!b&^40wg% zrW!=>?^5MNFQ-E7c~@A@fiKDF2vae$>=dErZ6k0uc?lb(I{sf%9B_yrn^xwT% zm2v#+zS5y}17BLE{15+~fXnKV%yTY|Yt}&k69_*6M{F|A07(P0S(wvEM3(JG#G|zH zZ#Fp#SrC0SWXavY^9bdR61~i3#d^93k)jE@Goo<;?N1%Bo>%bf@cIVmyy002;4Km- zc>Qj4kw`G3+T*x>mCs7+Ri{%I%b!Ca9$)Z$zp;j(b#^`-2(Ix;K@l;HO5!QdxcPod zqq+R|(9g&5{MugfKi%gXULTP+m>tYj<}A^B)VwvLMFyCs3rYKa7iH=zd4<|F=LA-? zu80lMDb)aG`b}vjm^%JAcw!9lz>kiY5XksDzpQ=rH%~(_FIXhnfAuFq9}cYKZV?w^ z$1tz~2o{3=lsu2(?!O_=jlcCf@KsOJiRJzIG(Vr?z>{G(p&Cg@GxH#UAh?SnLb8kf z8G+2U;pv4=)OaHBLiy097gNu=+sm`+vQ)s zwPr0ji2bsuJ_x8qf3>Hrg)^EEp2aT0|JiR`y>sps{<~mlWmwUYL(9pjwyxyMBvId~ z%7L|S+plY^Mh}5|cKupSdTeA24Z8T%)m4hRqN1Yfi#eOOKd3@R3*<6>Yw^58m5L^l zYMdV3FD@w&6&3x@%csej^c@~#?x*p8do=DW)d+l zlou9Kk&v`I$gQ&wMSCVN=I4FBaRnw3$cE!56HjNiKt)57%xWUr>UA$3hKqrL0R~=O zRTV+Ne>16N<62WcK0e;x-``E>&Bw~hN>4wKGb9ET&6$xAq>=S!+kOoLDHM0myFL-T z21{YxxRVbSw;(^?d3W4w`PbJW!AFnF!-dD2saCg|`jfTp3?3)*{pm_>Ztk_;;b+?p z7z*}OGA}9m@qyi7)kS2!+ZI(%K@oP*>Hk(DhNP#YLg2P{r zUYZTn_ePZCz`rx>EYODyS@3xrx1)diBA@}YamysL!Hese%uvDn`{X(uWkep!UlAtD zwCW6dqtCX6<_<2vckio}z`dyoZ}1kMjvj3&nt}22z8@sujPdretdXXp7OjG@GVO-b zvmrs3TF33-l^K%^d;yPoN{h3IsLK7;EwK#5%>xjOGTG~!{+O@#M3G8F)XZ2K85ymt ztnj1_<`}t-e8;^QQ81cBN#q7@f~0oobi_j+>YY)RqX*k(eM80pwt>L;0}Signw>E@ zL4MDhnmua+gVn2(wZNBeRx8_oQZRgr8xNtOgj-D|gFtAORU?m6Y+8~yZ5A_yeFd)9 zW6t-c7UF3y|lQPhLrTv_Hb4@-4L&0&h%ucfe_h~zg`K}x%q7krpZV({KZUNXmS}E7&sS_ zTI6Jj_e=`4kOv;^qa}>{_U+tf;ro@2Kz_HK7mRoqeRVj92-zS`z|AAIkK{;HXjB`| zR2f}gUpqJP3krIJ-z_Ff-|)ND6zrXapwpB4ehXIz!AmugymoAc4TB?p|Y$ za|W_D=I}pmy*Yt$KAq*=*a&y-)-1roqe)J^&60=gO|M1WO#%>t;D%}>6D6g|!`*dv zBq7e~=yPKY4Gn_AKY=ep^>x~}n>_5~_;~z5LfKPBr2YNl*8$@cLGPAc!&66Lf$dGx zhkLP&^v|haoBM^A>vrE{vI2{=ZJok#SE&@Xw3WZ8YxHn>E zu#*MctCL9|{M(cHew7jJ&($3Pzv0&j8A2K#{)l|bg7cIO2#(JWZv!{i;XM3#HkhF6 zCKbPtB^sFgK0g5GQ%GdwrRCQaeS4gT$!A8EqZ1#h0FGFi(nf!Z(<=+WUMuukAEK@9 zye13#g46}{WA^1GL#bdP$BYATS<)hd_-&ffp$3dc@muE|^B^H;wfD~&Szi7a)2l2f zKAR{VARfGJV>Vj$7`1hN=8O-43}=@!vGD*9L9LXp%_2Vg&CM-5O;jU1>?{I|@oimz z_(Y8Jz}_cg_tJ5OUr*~R)MsZgY05gz$|eovlsGGO__J{E&lJ72yGlnWii)eROerTH zGfsbcDP}g|96aTHTZg41z{Ti+vn{oFZftBEwLU)l4u5L{VjhD|a&nh9Wdw3rjz48Yjy!3`)z{I#gMtuK z1~$J&dn|mB{E-6zux^@W77@qog?Eg@Oj-`Rd}73TrPPpjh$cTR9t7-Z6sel9=Y1t_&lY2Uo7mg zs>M@sPqP$A@De)R%79zN_|z^k0Jkk)tnJ6MLQLzrB65!L_q2sx>&S%xr>fykWS@me zKOpC?78M6sdX1Rc<$~wM{tVay-3nuWl<2|os!}i%pU`wcAXWrC%=EtsvjTZf=UN>P zX@AGi1pUqws0VEp0xf%>VwdjoJR2;$%S#V2Ljro4YD&&+A3h8WrtxNY-RwLavoqE`wsFIPU(ot;85!+~ z-I#q<=VLpN%Df9SnzvOW?0J0&Ibk}_E#|L6!;=Xu#C0F!UBUW~IMOOB4 zF)#}^%>cDrHcYt8G`qWiNWp0K6hA?DW8q7J!bTJIXx+5`ls^2@v4+!! zLOTI1z+eWb5K~jr!aZkMSy^>;b>xIej{67rhij<~p%=yj5J(LQ@We#sPpTl6dt9I1 z-FY-Eq-SKb8gzwBl&D1l(=@Iw(`j-Z$rihucZe*ouzKC-SLMEX?LG8wNbq$LMks^= z^BEc73({GJ8#w>1b6{yhv~WQT)1ewaR`>X16fzPmD@P*lJ9PiqfxCXJKK<&dydX z)0{Ar(P|4y-)Erb%EVk|n+NR8&;r3q{)ZHS`@dsj1#(S?6ciK@go0i@--xH~Iza@p zoGM>HX8gI_*ksff&+V|e?brx75*$4byVLdFaIIvI)Y;ittGOTSmcbz*=BM^;sDYw6 zBa+dM1ME#lKTLPAWFH0AKC=~JrK2P-gG?SZ5S>fC1gU)C;D9ERy{F1UrAaGigvr^z zjl0h0DHC%&sGS0vz~!h6qZkybUN(vHmZLw7*9G{3XdtRnb+XLg!-b|5ostRjO7)-~ zgGvz3IcG+5!3c|#3OFnqZV#KhZ|9ac7OMdHwPHIyIjO7FrnZ;QwD4ywxv78R)o+RB ztn6$bhw_n+Vg)pSdxC|9{S1gPlqx#F!i*E9H)VhS{+-qft^S-nDk_TEe7rFDHD%-B z4~wb8%5H*^;^IF&J@v&zses6`psP7~A>)r;K8!5_fDE8-3_g9j+@Dz+td@y3WWz&& z_q~PFi=_NeV_D`PS9TJ?MV5twNc^7SwU0dcJ5&iM)bBsA63eGjBbPd@Zp5K9-h}9A zd#O(#%Xe^asMzflc(_^v7=%J@&Wa-i83`!}V7#7lq_=f7^zgx>ucuoFbvfFfL4M)X zWBFB9RtBQtH|Cyzeap_yj$TE=+}zyQI3JzlAJ!=Ajpx`ZIvl;JUJVp^mk%}~;43SD zDx3ll-*Tqv2JRgApH{W;0Qj3$am0Vc7Zz1{1Hmk#o-ba8f{^3!#=${z0a%Y}sd`jQ z%vxj0w{OSWIgzMf#T?=qNO^g0!F=@%4mz8xMMpvf)0*#EP4 zPMrlGw`N^JeEe|CpEPSR*BMnCcjo_gzYk{R5}$$5002jkP%bJ8!vC=|-(D&U@f)0d zILI;g0Nl!k?B=L^j0N%XE0m)}5+6Tj($PCTXfoBTv9Fs@4?mfKTn z4b}!ke1VsfweTnfdSu6N2(jh`bu!NYW)emJ5&dSt)rNJ>9>W6cIUdM9-{W9G+ANW<9@MEnc-pocUvJ+ zb<-P!v;s!G6kj5c_7H=BrmMSHV>Sk|p7_+%{l<5z01%iArn>G=GovMJ*#y+BX)%VR zBE6#qhhu>?9nF(Lf$xYWm)RUl+cS6F$x8s}ShZYxr+4;FJ~2Xfhjt;2dTxXs9R3sm z)A`*A+_tjnd33^<8??yV)l5^V92VCmE)L5YHf6sdEmJ%cG&YP=VKc%sR z%c|)4xw!z1u0{U_E=xg9?wU#VcnRe!M=#?GCIj~p06W0$c+uPsEBD)jy33;_IVeD` zgr0v#ru4k|%|^+rw>S;X;kp)XU1^f|;iHpl+&>4h5Dz2=e z;`w-g336ArhwH5m6k`C79gf`G&3&QTRNhV#{ZoVaWEd{m=a)88Qc}QsH8hg4snjJt zr{RvMxkxkKQK_vwK@{Cp)kt|nJ1|0etsj;PjpzMItl~K*`6<>1f*eI#n19S*JoZV-ET zKH+e^H%0>Nv_^kodt)f|tW!!$>Clw(#N{s6fe|dNhc?aSfOQ>qOGa+L`6fDB>l4d# z(Tl#5gnyc5hZg~Wjw!wMz#uNS{klKMiOs;WECX!LCm-z=F14yD>u|Z8YXO0j6*Lg& ztEj|Hm^&>9ct8AHu(r7x+uye=c)j;aYucgtYM%d(j3<*PCL}WsP&r^~UDv`nwd)>_ z0|m8Dm87BxJdTCsCh}C#oR`O15-2?>z4>7KfPrX=ItYIT*1Y6(djMs#U+>xIOOTL| z;JhoM>r42Hv2tU^<>?Y>eC3_$9_m_Q`;G?q0asnXa#d9ogOo4{NU^6#Bl%XpMvs>_ z1(U;8&NLa?;DCulvM$r;8yH9f)C8CpMzf>2x(Wno7&p%^j^6QSXo5Wzf8$=_l7Lu7 z2D}O+D5+;Ii>@XU#VVR*&Y*SzfUe%p&&-2dYs7V(-$Bs4zKxw*y(&9k5<-G33tOXv z6Yw~if;sPM;XT@yT^9_u_y-{G|#xw}CFz+$FLlJ=JA$wXx3v+W4 z5|SV)YU&-ZY$AvxyGyN)AYJ6sN(%wwS|5Nr*4nN8 z_Va^9ODJ$gbNP67(c@m)17eMw9W7#a_* zM$tkdnLtSbn{m#5XC*jg$QD3Ub@l1hu25Q9+F+16c%1f9g2YQ-Uq9Nf=$_zG*`?*C zrs^W>^dmy53koE2qrv@vS1KA*WOVA@d@`<{`9$6}giGe;kok9cO12r=8YF=Ya&;y! z{eZ0sf6KiE64!p_FhYOHDY1Xrueqq&Qfg%kL z|A|QyM1l-QgUAsS6!fV(LTB_dfI~MstRQ5AIiuIcyPN-rJZD>Ae8JFAKRK5Ns4fgJ z7r^#P)GBmaJY2oJS{6WP9@48c=-Qnq0bXuZJu?Z+#dIXw{ji~XthV@_$vC4Ae{^So zcO)&_Ge`|CU{Fy1UlMAkpQCEh*MVm#9?WdXcG?LrkmbgPrU)kbr>_C)IvgCFl=l?P zx<}@bp`q?uX)b)K1llz6qM-;fX!ldsThB&g8Prt~A-2u)DDD3@9-@ZJbo9WEZnS69 zh!%~^K4x)l0^kNaYbWZ{(^HWBJnahW>xO|H!cZ{4G=OZ@-mnexP*9wuBAr~haH;>h z2U2^UOakDb$z|d}Iy*IS3KCpkdw>ufjj3%j;vb8lgs-j`qSazpZU@Dbsjf1j`D&9P zQ8a!`EG(5J118)6G(xYdWBo`h_<<2yO*ZvtTzbK;MqopDIu_6o;X_sUfNidx;gf$= zzwtwzqx`j=r{;`S7}$({)jH530&wTNKW!(q22ff|boA3Al!oq&^=k|?(VMBX11kgOEtb&kMyFg z=*6hE{`d+|qs!(XQpZrrwXC)d{rO|wV6cqGKN`F>XSzthU8ney7cqerJRZ_hswcw0 z$>{>rnPH$vfaK&lok{s>jZwLNSfT#Em2}m6k zFgHROThyxzr9OUKrmgrH9Us4$Rke)wGTb;LT|VDB&M9^DwXJ|P1X8ChhMq3y?FDXw zfcg%X!*V)CPTV9mG6zP1~97XxwyD*OaO$v9H_HW$Y`He->Y zl~WSbszBh=R&@}rx0qre;Q@%Ab%@WQ!K&O9e+5+D0qdp-pcLRRlUPlDcXg>KEB^%c zgMdb`Ka$f8um+$S-qvZ)ITQ_d8eW`Q->|58cq??m8N~5Jei`ZX@wD$HWMl|>Kisw6 zZ7>1|aJ41)$PHeC)XUJ&P@OLR34wwwuSB&hna|aJ4I6D@&3n~%8idHl3tI0h;UatB z-ich#A;UE#S`yQWr)OtietZ`&AbpJNg;N0wbP(9?uv9oZQj8&7?#`lVeHFLq) zRK13`%V|XW07ip=90c|`;P3zng$0$WmUI~qlZ19D0w*z)`2_Oz;IJ^u>C+z4$e%a6 zCE;~+w6v_aFWs73{Pn^rSkUfimbQrVb&^u`!akEJS4hncw1z(IQKv{T#f($76IOt5tBd)kvi6E-Y za$1|wF*7ss^3J8ffQT_-%7*)r{=|GXpoy+VDCf?;`e*I*#|Z>yRDHaOmr?eEA1Ge8 zyi*ZTz?>o+SiUsWm^5WmP*9-1N-Vo%KLNzV)DpJH6QlS<0P(v!Yip;Lj97BuU}AQ5 zb%FXJ0Pt)xFlzo7AigW*OsDDficaw^;Zl8gulnH?45YrBS1)%r`g;7*~V; zGnUZU|EXtiOzWz#L)NArEL1|gRT9)y7OnRBMvK45fC%~3{h1OGfaH{f1Wo|a-MvMi zYu%B8PP^kk5aM8O4@iV5LuBog5q#I&uSk|`pWZSUqCt*X-y^OZ;Faq*J70W?gtFP@+ID{HDQ88NECT2R|sWbsUVd~O)*DZ8h4QMIphmL)}NoYIpf^1Y31k$ zAtU6l5F!!u;t${Ae=L@~m>5|l4XKT{l(oTug)CbkicB!??F3tUoWa@Y#XJ0Veh*T; zT2b`kk#WqHUDcgHn93VA*&#%Tc_5K;&c#MI6*bOw32T6NrW@@+hrFL=a=6SVT8Ydj|s(VxEr2ngig z8Tfisy#sNf*q2DTX@>K3qk|2{ilji~=P{|catm^0t6P|f5}>|;`SK`=E|O-gZ+XqMb1(dgRMT6v;Xr1qkrkH$4)Pe-$0UqD zmFE_Y=T^n*ed?YR^bV=lhfKTl1>_FT&Cj-oTIZ@iJvAsIAy`iCNtbZ<&Ig^r=-YN& zOLS+Ucs?j+#i7!ao;ZJiKnQ$^{3u|}O+8pBvsri-PU0M{?fxP^`(;Cnm}SQ5emYJi z4h67!%egmBUHTcs3rQI9!`{0;4_+mXHZW)-ie>NhHmC(ZmhnTbK#891LpL?Uw`M^} zkTgS~Lx;-w-i%wyptkU9i3l-^oB7#aAq0U2LXK9VHu>W1V(W!O1~0GSo|i`b^4 z)O6CTQ?DTE&;Iv>d4o1J7>pO@As4|VkMfvV$}tnVPlYaL9kT{AlXlS1DH+{ z9hknlJ>awAPX~APj3m=O-Xq*h_O+AZy-O*3>QXkuYGH% zuK@3s8^gAu<+>*(*0FQR&utQm>B9#5h2wcQ;z}b~U{yrhewy$r3~sC&iHO8jxi)4| zh^Z*L-!P1ue|0yn1Bshg!QG0&#^nrk7vS&^q)TSaxjIE2eN1kKOU$p^81|) zLWJhN5c$P3(?qp03%S*kl4grZ#G*yeAV?2WgS&xJ4n3)msBg|p2mV#3;o@9Brf{Se zX_J?-(xrE!DXDh*3y24b54xt}knM%0H#PrPGp++#Y%z9aWzi(TwDenF(my3LxGZpx zC7+Gg!|etzrXnl}xLR)7!H@GJ?6J(^zI1$&G8?E0ihBn)pz?^yFJkJ7?>lqWe7%b? z>L+{mF?d>s%=$j^taS@olqS9Zq>EbeA%cxu4g({-M38Z**@D(osC%STfMxCis-!QG z!stFG3vaq_Bv?o^KTFQ2-L(>R7P~`^F%U(LOM`noCTw?ps85o|VklVWedpeB^ydod z!h$TnWzvA>>nqIa{K&!X?;w8psJ00`tJ<(R?IM->*)D8`of?M;`T0^9v1|qBKNwV2sJC#YtMOh3sRRDv^XmZdEOuM*HqGSGxJ$N5e>0$d$@6XD$_bm< z=2IsbRMAfaR|2L`28s}@J7$t>O-=k$wpHA{nlg2s)ZU)Dmd!yRsXP)w7ug%JW03?w zg{M;RG69AS3f(H{idpIljigFB1@UDW4h+iY_SKar{&a$isZVY0n(|@hL_r+$+q9HZ ze=KB_XPHjKh;7JhsUS7@W_od{#qLtO1s`0T>ewc})*9FCy{8P@yZsJE$fB)O;W&S8 zEivFs>(k8J+gR2(k~-;il@UQ%+N}~ZMH7PJGiOgoCJeY^mDXIPEU}}_&vA!ZCE{Yo z;ZIAwuSjFk1H@UIJKu!_p|!t4h8$zU5`WecK_|^N>d3`QS`;ib)DkO71KA>iI7{`{ zTD06pnYf=iq>w9mSmLXYzpxBJHK`X^p`4erai}*GdQ7kM-IBlcyQj>6h@gbo7yub= z2W#YkNGdJ2%s5X~hD+gu{3%Ql9VK|vW?xMpA333DMl#m)%i{C!eC!KY#1>@tTV1%eIz*eA)9OAig3Icl1VhFr{p9^TRAJ z5XSydr;a`nD)`3+UmjD~!rm625@PxWj9noN6vHQ}{V+x>?(^O(aAU;rQdLNo@|Jkm zLb>I`s9?Wff5}oXeeM=eef!!qF6M_e`hoAZTFOQ{-i=)O_M5>6J54F(N_*b((2uD8 zVR$4Y!j9jy@hnfB7a+sfAWT|GR(~g0QT!Ibm19{_ZNQ>@T??AlcVblcN+qV+C*b^B-u2SGgve( z@?I!QpYjH9{7AOPVx(pLr}gNVn&2ULqLJvcKPVed(CHUmb*QGwqsE1l;rNqmXQ6tC zrq`HMd@l4w9H-LT2=VM9fy%hdUjLGe*Q0;s`v%0h(PcSd$OPCG=dklhN$}ncyu>Tl zuQ|{DeDwWvkZ337Ud3C$Kszmmj#u`prXuV)Q>A%<;>NPgenKm5(Ti_Xd#UyG5q1G< zLH`uTf+LJR+*rG~?I4IqKIe~DcmMw9YV*S^#-rUKEp|vIVQ6PLN##l^mm-Z8R00436%#AdtS#=v{!%ir(AgMJIcK1O_Bv(Me`vRhlHx3jxl|5zP0+LA;WAa- zMf{pXq9|&o*XqySZ7c{)kyg>I{jYCByW9ig@RH08NwUu-vg_l6b`N=C`vi_!3{Gxz zyEON9rV5ZK|Ck9579tR=)e?DJ%$NYG`FCJHz|G!AHTm}Nq{?lU2Tl> zdbNY}5AYS6v##;7QwQFSFr`1;-JKHMm?VJ7oAD4fOjWcm0R|xqb@pF}Uz#wX2@VP| zhjx6g;nq5dyN!Bnv?}z%2c55rS2Tww%^FD5I8x~O`O|_PfJE(86o63bbWp=eb-*sB zSenssa&ig?wCMNUUoJg@nmzksRZY#fDckP(O}rQTawvdG9V^4844#2a@KEw>OnMs} zl8dgl8Ir}^n#7)Jxn)siHbtA4I+m>u%)=C{Bs_ZLS_j%4;Q2O{sU%5h(!yAGvuN)R zeh|?Y#D3gDK%o9jlutQT+B*hs)(uSlm5&5JNkKT+Rba(&o`0!n=yT~$SLt3>QpNY%VxT#`pg%s0DG2A5Zv zM7-vcTQF*@eD_D1(kucB5|We{hFPcq6Ky#;eZg{uY}A@2Y*F-1?3Q_)A}^eO(s{xQ z-lm1c^#X#sNV-1g`orPo-JgtalgRC$?p^=j?B(@vbH3+&-KPg+^t@mufKG&zYWHpQ{$F3I>hwRUOc2kl~V_vVL~KF@pW0Ffs{6~IwltkVuu()bE_X-FG6zs7HYrNgvEZR%7+$;*vF`kbkjzpC zCzCpTGT4#icO=dMesA5xM-)9l^Vbk5qgtYcI^=-vl~UueB5xP`_XSn`gd$aJY6|B@pH-Nm z?*TXBOd8(;6zs}usm7TT)2_`3zQrraiY>UpBUxgak%=oSYPp#6h<(#7M%HN(Y0PL* zPnBM<>38yFw$~w7G}H_Y>c?ij{e8X;*7+Gb@`pN6O>75fBj7aeV+N-NO5 z#z=DfGf1MKNh^Ken6C)ZRA^0g4>wcv`AZX8IT~6ykR48PZ|pgUbVyc;Zm)%0r4%$&iJiAZhH2q;1o@5rYxbqK08F z%@p6}enS?*QkB;omrj9QnD>!BPrUM$gG+ChG+f#cWK9tXFJjWMCV~f{6g;AQfj{c)U9oDqa0=L9|w2}oQmpG6pG%D#F zB*CE*?S;hjCSvT_O~DP$<(-AY6lAiRgCeztELv1Zv8^o@|!=volUb-%?D{n8!Ew)q`XShKDCC-Kn2u}7s56y_O>CS z=q+6CKCy`&jeht@R zaQ(KwzYmnTbWePj2^}?c+i;;9CN?&QAe&oSM0_yfbJA=S*FQb#EoDMGpNv6mcz!0` zs)Yy$e2@_Cvv3}>9&d3RT`9BVmzT zzq*D^b%QUmp50})^%SH*A($*A#xG#uP5P^>C<7fA_=Yu(DE3V>3-LMD1UiP;o1*&o zzp0J>mCaow6kh;e!kL^DKZfS-gyy0}H)ktbud8N{`1Vbo09P>=<|vb5P_$2yXcoXE z9N)mS+VTUE0q-{aU4Blz5Vlz+GDorPH==0VB*&T($xgVcuL}F}!JlbWR>MGW1Ug@a z%h|owRmRJ_Nro?26NrPR=14<7LTt|bBb3VCk30i-)tKLZXJ!ZKA}E&AGIB|uj+$B!!RhN5smeE{#0OB ze%*&A1+lSA^|bZcN1=BjkUi9N{B z@u{(}Nr6NYhuaB%F@U%;DBoo(sN|IrZotbZ>h@o!#jvCX zVcH7@YExVHZKD7jkVL~d-8Y><8%!w>oFZ3tMH+Zic!Eh#=(Ob! zWc>Ejc5m3txhJ@SgJ2cVUh`^Rv|j(xlwWT7b{2!?$f)9Lw99O`PAPb~nbIyJ#G2ml{*$HGY( zj>ZIxm5Z#%Q}KM+Pz%LrRC%=fSooH4P=~AfrBJ9{+D#|>@@p%y>_*1WMdQhZ&V3}f zCtk9NQ@9hM65t67-0Hqq9Xjg}1E}Go3^wT-_WmsSGCE9QU;`D>iRYz%q_h~Q+IaG} zVvY67Mcw9*I*E{ZRfL`Wi{FuG4*~qF$a~Jr+6;Z{RT1wz0!7Dk8HVb zLiy6cl#5hGI%cqN&34e~UO6DeN9OvghSVq$eZ%l*s+GKIgP4YkkaqF@$f^OHmR6SQ z4Epwx#XGB{5&ZYe)L(|8er%m%xMZp<1~Z?u`s-=2>ABg)`Loe-l9kymIF1lBgEq}sE37E^Sq&;O@fEj8#%M9zZTa9r zXGO1CT=Xt1;)poVo)WLtMf)ySe;pkPMfLZ~^ud!x%tk_ZLw>`d*9oVX=j-NIjSokn zE(0>f?Dx6imFW0$7FbxQpf|B!RzbW`{sV{^lk$b}gQ_n=qh?Y88^gj2XR%`$XG+*3 zZSF5Mq&=4~Cq=1{=S%Vu>@DZilF&4fX*CzqAW7_+yZTpzowEp|S)+}029`u6b|KP~ z)IK6LhA}(u$pQQWi_ey-*slI1fMK>L7Z~}&k07S+tZ^dU;QdmsBTWh!|Y=Z5kQx16LGU?84xCG#0QdlC{3G|(hpx5R0Il6NIF$2Z!5jTUsx zlh}5u%Diz1YBH`JCv>yBW2?-2uoya5yCXl7WlU02^iIw1ea?ZB;DJ67{&$koO%n)w z*`2EG2e-B)B)aP({FsaP_H{@3yBaYciTbK0%)^1Iw5?ltY6|v?w?(2*c|+PcE8p9S zs*fLP^`bG)SEkl${BH^@aGA*VAlB=qH1LfDiws?Jn$T(ia44W=-BvF2DopID33*juvA5KYYx<|N5x}XO9lyFjtwN0fA#C zJuYryRjvlK`(0gK#e1qlyg?b_I70Al1r2w=&I+^@>gwu(+ApB{R`fdpCLT?JJn%M zx2Y{yTvnz7bRXTRZRsD3_83l4PH8-o-8neZpu0V7CK~Byjj9&LA7pR;Q6Mqc?hd=zGt(bS)E4y5D zuNW}C1@agJ1A`yd3pX8Tf}r%x|94mvxWXKeqM-@i*@6q>U}4>Fa<)oIOS^%g0WC9! z^?W@Leh`ZWz{0@|Br=Oe=Yk<|va_=@GTH!UC`UZpUgrqq7b9pxU&jA1m{62K#|m_L zplviY$QrcUeEITaNZ{5IwCq7^K(hkSb~*w@D3r}|`a5_Fi&pjJ8y3GHAm--KZ-$9zt0pls9CoXGZVgh^+LHfND-oZZMVmTo4b)G7B=i%S&<9mhrQ$9t4+*$)&k zdp@i{|LnS3RIocok1YcN+vUL=4jvvJHulAnMjDU37)Z*1wvN?g&@u_<<#NfB&5hhe z2L#~EMudcrWUsCoOzEiUm8}?mc<3=@YqU;$pZ`13d!^m~_lp7``s7)7YxA=b^mqWJ zadUotKIruVPW_~`y)}AP!~bNpz;(ufwcX5(6AhKBTD*V%{@%)bc!-vs-YJg)V`FOz z2OIm9LQLQ#3=E9%K=KC9;pbq`SP4X~ylI0#Aa@VQ7H=T#1s&HwR1(@E03q~Y?Soup zgASLQxyOggRv@ta3k2&Ga*~p!_1j(@0VqI%?AE;ak*Tx0+h)GL@@$w2h z8>L)&*rOD9EMOdUKnXcIItpY}YAPybY{{U->$&G}ux2Lwv!S*@w+T)>yKV<-kIDf1tmL+2dL6|9J054p8UaLrX_TWeC zDBHk*F>BKM!!4ikJ!c+#Y9opmjf$y~iQ*Z{0R1aR>?tSEr+w|gfQgjQ*L8V6wzKZ4 z%aSzC1|s9k56`9rk0rT+u}Cbn<;Uv~S{jJ%P95c>AG&W_re@j2?Y(tf)7*h#QS2I5 zm%)j^#|G^P@f@mFS~0ieZ?75 zaWl*-x~V;<)+Md@^Fy>}%EJHoS4bussL21COKN>EvJ!$Mv`>6DfbQE3nXkw$3&DW#DTloF){BqXF!It7&$X`~zJ z?)t~_`~JPp*?XUT9lb7vx#oK26Jy-t9`_hCSWV^jMM7Fa1VJv|y(6cAAlNzxf)zo4 z4OdRIgFE3rJQKy+a>yCx?}wVqw+Mnp?#jt%K7YS5>Z+$Xdy2olCTbo_6H!7#qlQC% z>y|?ksw3bZMGpRZg5PW0@7-H6vEt^Ub*<^=s)@5)jl915wmp~bP4wOD1!ryU{N~HA zI0=h?z89^N$|(Pb06#HKKwy6Bv~i%lQDiHivHj(|Xy+EaJPHS96v3gjh?!WrKN<%? z9^6D>BR;vz1Xzf4Kgu8GWlbOpHwob|BSvJQbF;FVo10Mw zMoII^tZ(lxVI3I@(iVAs@Swhcm#v7Gr_5Y*ijWe_J2;OC#3nv%Owo}QjwSvmA+ z$LHiKH}{;Y!6JLTJRu=rIyaWhw@3UG6ci8s-j9VKjHv(i%>O<`4Ls(~EiyJ0!EtUb zF8y-*$wK2sN-kYZU0q!}A_VdIewWg}rKRPwSx0M23*UI((vDc7O2NR<;o<6FQQBBE zzSF^_ttXeZCOXd{$WJ-uKB6+qo{U?sDPL1^@sBGjDn6`!UZjy75*q3`9V*PKnxt3u zacuq?7uU(mTUDzv|G+?bdHH_NnWBW_0bwQ8FIUU<_pn{EQd4C=YFs0aJPGvUy05AE zE@nE9i%6nVc)>oW&no&!dK=sCCb@xped}>9PR`y1C%N>Vh4$h3-Y41Gj~_pV$*)ib z8oO=(QQv!`@Kwi*m*xWHSo)>qw_lBTsV@xLYO=P!d(4dr~V~GxWv}gHr=a zoEOH_TNk(b2b~`-_Eo!XrKkVH$CI*QbRpz*jJ5z#)CHG3*!;FOB1ze}t2{h=o12l9 zgoU_C^P%!-kBQ4_+G+bN1P=}lsH_Jrq2kuEN*xUKYY#xpKokz#PAq0O1e3jna#!WnJ&Tr`MGLgZJP*EpM2K? z?~0I6pGV{9s3ad>(QeFQsRUX(to1^y7#z2^36G>KoyDB&EHy1I;v@A|9aZTdOQFiT zeI5(>KuLqkY?>tU>M?3>f1q^5O(`Ry(7vk=Mn6dAiaUAJid`0LUz%%^P}Yu$J-^}C zRRlQ>Z}(NZMy{>+pSQ7J2*rkFG;pJMr80kHVtAjD1MblIuT;kpm36Ve*DLUhAF9qdWE;tn zV~n1ZQ}Am*m;2CF8r5XLP;VXta`{7QxUgfs;jg~Mg*SF57QkP7QK|?%5 zdTfTlpRt`n?-L{ZjPl1IhV?EVex*-NmxgwIQ8&i?|mRy9;6G>h4fFU_ajgPDuFF!;OG7-aS= z?6?^kjx&h6={{Nfyb?xAkDp!p?J%nFXL zrgMK_4hRSkOAHHg64O^#SFZ_g>F8Mb{n>man)zm6`+L#Pt1Nb-m9vYBpER-;J(Ez; zqLFKFVhkrt2E6HfbZ*_cCDF-2?s7tRDOy8Qb7pGlvNMGdfG-sl)#uznRVzEYHF+ug z^AEsq?Wo$u%L?c4~bQm0d`wAo(l zJASc*C$6TizcM=;fPNRexw+{!zBA$RJV}@~bc$HG=_$Z)z7(q!ieb;8RXcfqqq~?Pw*8c7f@}$k5L3F;lL=>hDuPsd%nTU<*I=|m? z!>rCG-2UwJB-wNKcNB+q`!hd3zv=1eqn#!5l7{qN?WXtFNMGeNXP#P90W;e|^|2gp zG%=BpksWO>dRE3g@>u24e?2~g>j?4KKS=xsm)y#m9W>ZKf8F6y0Hcwh!MfjOw7n7ve1jzquYw_-o5*< z%vPuj2N#!1r_iY02TO}n*-90sFqm!sKtG|h0Z^a*_24Yo$(-E_-1oGA!%9}O-)%C^Q38fy3SNwFdk%)F8F1mFR5$;)xVW|$o;v$U zUp4doJ62YoC_WYQTRPzrO;qOaZs~KKI=>ZLtU;|;&X@Bd}OWVP2+eQ3wf__ zQ|4-N1ldAG|FZQFIj{^13k|f%3swbxe+FzCS6659?@8U7hPl zP)!uOuc5Ko!j`-|TvqUGZER(@tjsIG#oXp$#(?&5U8=|>Pe8dQHmfVrDk?v>7kY>p zB?j~L#r?iY%K;>wKYxC{Gr7#cb@ys>;`t4u)5Oq~egt{{A`3lI?fO|a4+Bk+R}|a1 zU`NfQrRxEBavvm|#CN|Kea<+XhlPloQ+uKe#6lMcS zL}q&i0#Vd|7s(i#g%+d@r460Sm(SCBsIG1t5J~S-7RhM+)ywNVhV8v%VU&r=H&D^n2i~LZvu1+S@-Yn@8>@Fobl;XE1>e{ zYb(Dm(y*QbRIYymb26xL>xdJWZ;um@=v*5rv2O4`4{(qy@^lJ*$&n;&aa0!aR=FajyMf!jIU%`@~sFst%R`_^gsBRUC(?LxweIdl9Ku z4BPVEH9rfj`lJ^(WT>!@Cv^2Ck>+9Y}Y19hT%FnVPx694gqSwZa`e=La*gElnUMV9|Uo_I0+cos-f#A|7 zW8HFf2nXpDk(f<4>d*SFL}aR+IQ{YqX$d$iG8mkZ*wfnD`YJfMNaI?Vt(^64SwcgU zhK(d4GI58uqS0i8T=3evlYs|%m=xp(!gwh}MBwO(1f(ldmW^NXf_=8ynNo(u8Q0pFZ7yqwenMAtNPa=imsZ z7u{UUDPUz~by*u59hw#Mk4qffQ`rbF`z8i410TKEC?+ae;l8&D&ML%3LPD}ucjl#_ z(7OElGt1@65|WZ-Hotw*g0Jn`eo8eY*Kzr~r2yA8T7wuS;j*j~&L{~Hvn#DB<>pH( zTf>2_lru({evOKB%GvDWtl(y1h(5K%l0kCgE^MqEq#~ zNS8;|moH9}4FQdT7axAHP`9+Stl_#eGpjN(AEPwD@z=X)S=b3~htiff&i@3TnVX%x zudO{lJA38Im6eeS0dDU6)Kn9UsL-G5x|p3AzuoY^C?AygQ}YUq4?idT#IW|iD+vh; z)5__?MB`E$K1?+3NS5-3Xe=r!3gqrBtFo09s`USdI+FeAlNfBu3T3E#PwK7LmEnTa z7YGAFO@d&9VL~S%`^_?ti3~R$Ws=cS2vd01Pjftiv?LS$meq*XYs{8I%(1 zy&A3^LIMH+ApcnHd6A!eefRUU#-E)imwd?Noz_0Jla%Ch?cK$Z0Z*1ewfcWG`ejw3 zp^a5qza`Pg8!b6vBE`D9Hh%WoJXycO;k$>hh=|CI8?}aYUdnDop>)DyONz9itPr2k z=no%0)E_hSKY1#0azaBDUE4t!ICU8q-mXILK@OAmF(1e&Km!4ah5%O~W?>lOqn1br z&JRC@fkyZ49k~ojLqj7qHFXh2xzN>MyFA?VJevp$@o^`mfP~F!u+VtADctUN@kfP^ zR$Zyz{m+vndG0=_F|f-Po0^{HF|3sUK?*RG&-6#{@Kg}+1>7j7FUY;?2(tVxE2V2> zWTY$Y_ITf5_Pxxt+M`u0!`it=DOFYOCMG5;Jbh1|%3bK&%cY)gId-wb1ZQjz+OO4-2OS!Z_ zmmQ=jYGXV6ODE63G(ejiB3hKWz;i{J$aGxliS8)AIpiP9ian)g8h^Rkov%mh4%B_m2;%x`>#UrBpWC{;rm;TrNVnW2wQm(x_4^OaODn z*fJLPxDxnSh6|aJ8{;8_yT3qxDXB>OEp#R{n)?pJX3>{%QdV#)1=5BJ+f*kbL1u7~ zu|dtv&A~BLrOAiV@;|zxrPZIIoWRe|pQ{^3mM|PJ8ev?i&WsU+(EJ3B4h|Wr$zJOd zUtLy)XfEJgZf$P9bLWnT_i1%ho1+TIK~zJ^VM zR@%E4E76o;wmKpt3vKhec(&4cNheyr$Rt4FWekO`PthzA|F|@#W zZfu_zGsj30XXCMr=_UXJ$Q+M>9+$evI0#6FTs&#mPsUDpNl8_Jk3{%nF&f+3ukWr5 zH-*y0L`1BQJ{ymUih5ARd0>C%i#2p<^fYYpL+SB8{Dj;lmF;n?ce|D`=ti;K6U8DwW=6r?^Cv zK%P8K59YPJwM2t+8$DFEwGGt}_fmyM>J-ZtqnpDv*9UtBylH^x{-URxg5Q|4yA z?)?b4cW@RO1GHXVwYqs>AtCQYo|?$X`D$t5wO+5N^xw_dl0S$7wTN9x-X$h2+i025t%Rlt;BOLh3RZO=rNf0Fn8u1Y~hkkbhG7X*4dN)(x zmB?W}S(H;&3keRc@H#pCt1K{Fw~3`th!nq~xF*%WNEO-@TUMAK=*!E+wNkS;CNQ2P{%lp> za8_jXU_QA_OMmkRDsQpz;%V8&L4PilmJK$dbMK~%$aC;XKYTKEC8hTMyoV>p$5!4^ z^9NOjK}Is8g6(hf2EE@;k-3}n1Hq#Rrgc?RR<7Z|4gT||D~+dm-IqeacbseKyXT>y zUL8=qN;$5q&YwS3l$5HS7P|FUAj>P1y}K?hr5!&IpeXWfjfQjgGHAQMG$9cM8e#mv zrSH>yh$2ja7Qetz=6)tFO;7tVnCCu`XWKP7@A)@|*VLry2z5XJ#jWeJj!BA_?&K2aCUp!nyn^ zC`iO@JIgueTv9b?~ zgycjZ@?_rfM&^Zc=?D@H84j3j5%@i@xTK(lhKAbH!=>WlV$Y*pTwL4-6%y#nRx4lI z|9q=)Bf#WDZ2&%|KSC_Ws$CTn2uYL^#dZdZOgnEJEw{X7%T!JueEP(x9 zVi41GJldRXqvXE0m}X@8#L#xvE@O&4TM+~g!c}mqxfSv_Z6He3#|u58=hCMC{r%OOT-EqJcOaY|Rr>^P<_<*o1_f zyVLIy$UfY3N{OoZU6--Z6lnZ_8L2-)iPaU zG~vzdg|v|>wtdAbe<)z?#Og8@g0!G`>xb&*m8PaP5MGh#@qf-wRO#@Rc%2o5Ea@Id zxC;vl8($KS_dTC!eiIRa;&_Lnd_9fKnzw7w=|TiHA{dv2eh-8b)}o!S=X$tpdAYT{ zeFW;SAn~%PytkO$lqL^L2|Xz<3*XRI=;FXZENInbs8Ogw+flLT$J)6%DH_=z6r>SR zap&q3rNR;S=M&2j?wHIuPqLnbfK&KnkW__%?bIG`wrO$UGXBc7-)+mu2>%&p=F$*w z4MDzHT_yNjTugg%_Tt5hXbwEaAXlF+d$!YLSV~z`(SzrZpl&skN|HoH*I9CWyj6wC zgiOiKqp`)G%7dL}$M0lc*aCbJT*RHMk>wT_cR$#i1#SY-wOHl8{aHppQB{8Y>C;xh zIu1yPKRh+*B#Dnx%D{+f0BjxcLheT^WofCYxtf$C!hsg1Qu#+QKiD4fjS5nuaS;nS zGT(L3CP1Q-0wKZ&>zrA8?Bv3NWliyf4341bji8w+@2T)_?0N~inU|D%nxMWyC5h<; zKzx>i4r2x5lt4Fo0QRn%=LN-QM$c_^>lj>O3#EsZg8pxR=6Ef5P(N#7=05 z8|Z{>G42a?;Jw{-+x+MIg_V^je#H}C^Hv+3PrCOXaZDc3_qc~o!~>hzvA#-xE3>~^ zwWd{I04Z1dK$J5N`k#aBy(mMMrZ#EM10V7^FE^%EtP7*04#o zcFZ{@iYJSmp+~_y!i29YpBgA+69F(OUR36M58f_ycAyHfi}1LIz(4=|BQ7qU6L#d@ z2e~iQFF+K0STL{$1OxV5E`4ChSuD|@U~gw!4cD)K&Yi?j$;PI#x0Z*Vm?6mG zfLi!E1T{G)5T&3NG`_^QR~E(#a4IHNdGqBZx$XVwaBtv0axmG_=Nl#;HE8JQj(*VD zDZ%O6ws}@Zx7YN%9ro52dB?MD71jeP!%lg!vp;b@#0l_Pw#!NLkU2^i)gzbL|Dz@k zfV21{JLkiP2_R{?qog!62gh5T1D=l{ONLl~|9(_J9Zl6oNEJGa) z4XEbGzIgDtSWQD?>OXVJckdv9eG~rd*|Yg_dDgpwtEMJX--02%*5oh*5Uqr&1Zbu4 z1t!b{z6bQ_Xad%^*<;a_?-U;E=;*9O`}iPKR56v|P6vKh>Nsxyy07@UDt^lQ{@JmA zQJjs#SC3p0e}3l9vQmj6&f)Y%E;FmwD>qoL+m7fsK1BJeN}UKwN_v9AfQ5xMCm$;R zHaVFIg;M)>r9Zo^r)R5A+mPa(G^{EoCkKpf!tVPm*ZZz)34Ko zl(|Ya*U0s+Oj%n~Q)ky+@iWl?9Mxk^UpdU-PB@wk{7@J2dJh{p4i=PozB$wK{{4I7#z4s}{L8Px!|OZ_HsPc}9%%}pxo1Z- zB1}wmC=s9F8d0ZnISrE%g__`eeIZ2&stKQQgFkeC1i9KnMZo5zymhXa`Ox#R?Y17m zsL#Z2AfWd&HR0b(<9`zqGrzXR^Y2O^CFSAaq3*iq(-+vTag0Aoca6-L=00-cAoT`l zg2(2bQma+;#`r3f*a(WmS1nLN{|-|}he&gBlMCpb1hTG+`H#mIzDm}`@CYklcxx}p zpMS>$zceAx_1Z-?A}r}ul>cT)CGoRQcXSd@k6L8UsnB9$O0KhQ$}-~kPwkkdiD!xA zwmtJ%3evHVazg^y2Sgi>`XxV67sb@6%Sagr=yTj8$M#t^swf(fq-Z|Q^kFC(un_F} zC0(h-EjQt-jG)<|KF_uI!;K@(@1WsexFLzNXT|ET12W?|EA1w z8J26`Jp^?4cBoEY>b1XzXZxW2S(h@5PuKXq$0mp1Z@mdXCe~5@_ELw3d=C#fl9YuQ zxujt!rZ-Uyp0xD!x$f`W#lAnk@PGgbg9+`eElBT9-x#?!Zfa_a*c-(P5fVSr;k73J z6TB8OE|pENXPnNBk9=rB`LBPF|6-<$EqQ7;{Y>?$m=vbihVO^QAqui?EWUtA84|v} zTGhpDB`FTG&X1t}+{lm2*M8{py}DzFxq3k<8tZlNMHAuOd*3o>iS?RBXb^j90@=cJ zh*rb+>`CL+$Iq@q>jV8cUlb>2CGOV!5R>lCAL*|L6X+0n} zRGP%~CQ4*b$m7n7`%c?tmn1s!#LRWKnHc@j5u;9&zY&+rYWs^!it%{@9>$4_)K_p3 z_W(2whgE(?%}sJ)4oizyMZIWeA;4f6l&^C|mD(mcKar|u&)QUlM*c2V{VJN^>!L`Y z)~%=0Lq*{p-wO2zb9Zv520WOr_dT!DU9^I(6@MdlU7?fp^wbjko+XayI1{G}Skfj0 zvJpPMAERn2ZjBB$)e6~uSFu6R*KCULB58)BS2B!~#@o&u^!E#eGw0H|36MwUC1oQt z*qG+ZbZ!N|U_5AjzDgN*7fTu&#)*AN^n|XtWBJ>qK~ufNuol=6XrwD70xcLnNy|pU z&TVS^2T|D@n7TMzr&_6r{gO%1#WjdEzCJ!tSxf4ALIA$PbfcNb7quJ|`LRNOaHVS` z#PFa+YD>q$!ktRtkJ%Mp)C{f+flFpGljEXjfUXSjnxL06#cK|mSHf>dq#Pn>aF_>K zo>A#ie7|yWW{!am9Lqd9HQa=yTDm-vzxg5ztqn!=8OK)!9dSVgA#E!0_RfZMyoA0a zG6ei!TU8m4EWNaMVSz&z@5MZtCa&0zDCHiLlQ^9~;^5HES*(2ZI4Pf6BUx6gTkp)+ zoG+?vuzLRD7Yj}WeBAh)HAqrikL082q1Ti50NoF+Im`TP4s!HIOA*N0|HB?O=VaC? zxz@nnyFt|WuxX#vuUANv3hb0d7UetL^=YB;&g!4fuX3Jcfoj+NHbdiEN`TP;PBSzG z(%&z{|6IiMY~tE2w<1)kaJ6T};zG_N#R_~q{NZ2}oDl>dayh`xmzbpoaSW~V`W=Rm+j=f!ES zDerbN`|O*1g&x?C2pVrsyr)imaZ29viX&_0MwP|kFmr9zEBTkaAl2y$h4l9Q=Hbl3 z$E^7+=!aS&;vDZK?{V5xSnz&#;N7y}9iC;D|9)3LIdM|?4~g&c9ZLVYlilHX zA!|Y+B4F`QsK@X`UENBMvmkE^eEG67PM`@&A-1nI`+Ug&NX9}Z#zH@wkrSLf`%ZpF zmaNbE4X=nrR{-3GTSQS>ZZ1XrJqu3Uo4L`>t>db-T5-?AZAc42>#ExubN}!33?a^c z8U29v>*EkLdQUBq*)~pbORJdY|R^+%`8v>hoR)QCtos=sv8@psARHSPE-L(qG_k7 z4`~UM@Ulnwfp>pYjDapqsND8hl_-2H-hOs&HwzM+;$n>y)DhijP$c8|4{=zUYTp?- zHT16C^LQ@%bUC`AC!o@ehG~~S(4DqXBD9vBZH2YS2~acpRBpT&x6|35mnzbhwm3iB%Mu;6ft` z>SL^LTRf)oqbq&={D1-KRs~FpMCWB@=4j>U>+8c@Z`E-X-XjcXfQ-}w)xXZ>CbGXb zjNKJ5+|Y#`V~X2wNI&`LjkFx}wTV3!Fdl=fr|0Ke>`|KjV6&8XYQNULj`8u0pG`+c zM~eE`gV>#03Lm=%1_pdPEvwK_%r)&;0X_}_U-kaf>-_wDRVdejL>PQ0t!rj}9v=q> z8htEEhTI-5nowtSeZemWl+B_dTl`*)+H_Ym5KZKw?6x~^{yJbrsV89A>9Yob1Mn}6 zLUFR0pI;m@>&=wf%x2bpWHHz)uwNg=qltu*OY8F3>JWxjQb;DGPPXq2mp%!;3cB=_ zD~};P2c`%;Utkk=+QvhVM`LBU&Ia&vXBHf??LWTbR}9D%ZwS8JT5H^T^?F<@+4yqih8YUK%JUv+A?j zUH&ac&csv)A{Yp*W!3|>z!c$`gLFc-VH0D8Nk%1Qb=B@doz#+s72zZIQvv_~D?yPKpiMN9Wxh;UoKbYYP}E5|I}IF7IR z9E->%UKQlf(5}-EOvd(20<;h6d4kz#DDCV$3;VU(8tT*}{VYWGDaC#2Y~Rd>eb6X7 zE6*;VA*A{d@9R~yxEG(4AMtiE_#Jqkzt4I(m5}BBDWBbhi%{S4KiVz#z9+N!^q*Qi zu_gTUTjxXhD}(dq+DSqc4svp>?%}I$<-CMdqN1%^5>28LcLYabgvFx6)|O4bvN`#L zpGW5BGSI@f2c$R85>iZh{I3aX^f9S_nrV(o+hd#*30$RawqM%{n0sDIc5o07ohXUr z@_OF&Rz+8FXD}KkLfnybV0LVRQ;;cYbQ?cdZ!Pv%ayg5!kTJ=eFo)}yEmkry16oE4 zfhYb)+qts5%>G{8*~$83Bh=5XZ?3NKWOO)~jHz-BJvc(p*&y5HJgwnBKWM{hJ}Vg;Pi}Qe zB_}0u&wYiKB*Fk}JBqlhtSrzg=5k_udN_D^o;fKuYc=?*VT&BkC>&~du36AKnQI*Byv-Y%{yEOl`~T{Czq#m8Z-OJ! z+|tsuv;&=SAlK}DvFypJZ45O;ZpXZTF9FIgRM@uk`Cd}vkIIZX=RVnGe0GXfQdE2$ z8hW2S3LqE%5`9D^ePL`vgKW|~6{ilb!%Q<^Y`m~$j(hnA(ZnAp#R#}855`xST;jV_ z=~ua=GrTOhdMpE(Y*x=g(4EX4!0x;HmvofC-K$(yNJ&XSZ-qYncu(kZ(;T1t!Q+#* z1o8=_kRAVJCXW8T=-4tU5*)0f9`U!oe*J35EgSbTlu`~a3439^4~e}Q;N!5CuVju&!N!!r#=C><)wNF}4WJ-}QKq<93erpdb0-^Q6nCJigQbzpNnh>GU zOQ^3dK2cRxF4-r_GpxJ8%loTXm1sf<-r?wu6@QlBdj&yAd{NTupld}|;vi^fb{|a0 z&CLaI8`NUYe|7l)0v-gBuvig)Jl*b5b*s6-#+G?MM++omdR`;xj+pN5Zs<#jPE2H= zrITNlp$gLcmKW0wN-@K@JWMpeT8+j)(@xc0r%Q)9D|$I5;SE z1B0>5WG}bHUg!__1cwH40gTvRDs>J?m>P3i;>xy0A`&reM0CRWV?3kKTLa3>%S#Ma z1No!BQf|5?jaR!ms;NbY)?2i(uuD?kE%CvEJ2U+C@Q!<{8z z%kFwMy~5cz4xPDyR|<=&>ICXcUi&=t@LP1hOv znLW-cLpwk>4d5up#xOO0*a|3CPEAgNLTnUgM_q3K?+>MStw#n9Q4Q_x>|B9{gUeg` zj)r-=jf*FdAS)dNhKH+;&O^(a+o-?e)2Hg}QEN~R1FbQJ>!H?z2M6no#Hw-pS2#J9 zw##rQo`V(dW`T?k6BH`T)=HuP?dIrcU?fBLq#C+LuuE}N&WNBO!r5{7v50O)`u2m zXfy)hj)IBFP)jRA?JAU;0K}Z3rvVzcIXn}PT|`nAu{?%#?UX`&jI=9XzY2;oa32WM z2P3rRiMX0<&U7tzRVFtrdw|u~=)C&gY=jNbA+X2eJbHCHgFdhj4 zt*vc-UELWJ;!RCVR)sO|0<$%$PfF%Q$BvXfXffoC6pzSH> zs)U3G_&?bGm+nridPaE9V_9(}7aO6g7lk_9|9#giP#!Knemo2HXz1tyT~3<&UoWI# h-VFNx#Ke*_{3F8~DME_o@(@^&ySG&2@@0*D{|{vtwRZpj diff --git a/examples/flux_integration/simple_fnn/predict.png b/examples/flux_integration/simple_fnn/predict.png index e3688812c052a10b3c342ef7496c3db6822a2d57..6c5975c4e65513a8860f5cc94d95d25a2ac6c321 100644 GIT binary patch literal 15053 zcmd6OcR1Gl`?pQ@-Yz0UX7=8xC_+}&CD~codtXGU6b&;;GE4S$A!Qaq$V!yGH_v&| z{rx`2_jer6bNrs;_s8SDkL$i&KA-nI->>sLU+3!`t9w1f-6mA^iD{^)(y6UJTNo9XZfp!#@^W)mN-{@3 zeEgV}o!v55kGo-PZ0zxO=qu-GFPHOR@kB8aeOdf}@Tvi`i=H>OOFq z>dVzHs#*Q)!B@^Y=qwo$5)#*9?R}aq6+IzBs)okRMwXi5;^KA|HsQ`{T=%zFeo0J3 zBt6GlAfw`Y`O0(gXHu?dYja<+G!qaCyPjHg%S|Nxc}7dUISEcT7rvJ+cn9nx`5v;$ zc$_(ZUeC3qEv(wQ^O^Q!WpYZ}=c^5?2YUCM_yUhG~$4&V_LrE2T$P8Yq| zS{JmJZ#ehD4F zBlC-d)3=IXQo0y*~0+-a)1FuBQQr(S-d8(mu7WU!F|LA9D#~!rp`$p+#f30lT zMXdU+hh6#b!NEai;ba9u!C%0*I77ZHpM#c4#?H^r&&0%Jsdg)EC~EtzYu%3RXqDIW z2Y39;m>4-#Rn_kw9Bz8k)me|#1x+(V%3k>Ik9LyG+S>Yu|6BpJpq>3#ZNQcqg+gDB zPI~ac?v)g2_qAW0U8utP!`-=}`omThS5_&P5%!RMckB?H-m^p+>NU*)OSR=bOFzDT z$a(#`>!~QldfJl&1Lg|KU20ZZhf3^$kAIBcS!*NvdPZVUm5VuH7S=r( zDtBhON{1KhZd+D){w^s0;oo~jDhh={MMaqhER?#tyAzVLp0(hRm#=Xhs|z_g*ow*> zF(X!`q7oK$8&^i5-B+_4bX>+tos6~?N6nLef6Z=xa9Ztdy@L1r;O8gj=D>&x@r@{} zX3JvQc0}#SS)?p7g07@#&vbFK;Nsv+PEPV@KkdoZVi+Kmt5lGXkdT)j*?F_}0Qqjx zRk-r!qfd#v$zut+?wI*@!P&6?`V(g%FZe*eT*|TUjb(lC+Dx~w`&=FM!nJGHK7IN` z#v+La#~9bf&TeaAxP1FY`b*4aDt#}{2?{8Q3(ZtRxeOK~H~#e7*}INbZ9JE7TCAEY zoYa)EDDHA@$-0SI7w)rX?I)XKwUUJt)b_?3BL?zK)*1=fZES6AZEON}f9ISt%wJwx z`=Y-5h5d$la~xQ4K5T=XebKe7R7PdDp}4-crlJl@@>%k0AR`kGQ8TZ8csYz`VA_F-G>kf9{Z?+q$|WtBKjO zVLqbwkc=>f*&nq57j<=0mzUex+9uGbrIE_s`KrJ;+k1Coo}wK~$=Mb6-2(Tsdf0GI zFU)?SND{1eZ@HV3B;-}QYUdEoBxVb?8;YKAOQV7{K?Am>*4Sukckl_b->LF0%E9!c zX=Y~Dzr8(e#pjqykd^6!M0ST9Ra?~h!(ljg@a?Wna7=9MrHdD((T{Km$>no4*Q~$3 zx^(AP2Vea{a#ZvM4R!5eyY4g{n5aoIt$-m3{g5-5)$|7p*rn_5`MvX+`?fml%ftse ztX=Fe{W;7#VA%mt%d98msyJ=#t;NT~^DD2Yu4?1k4LAV41Li(0t*sciZn;ZWF;W=} z@OZN|{`Wx9*xpioNc3h(N{ZjwuZ^X#qs?L0qscho%OeGoYu!rhB38=v!3T=lLG_MVB^;lwcr3$-Wc2YbfrsyE|dV`G%>Dd2PxPE^X1_j2L`1GlLNosNT4VTImz;B1%*Tu!lqzf!kE@tm- zU@wT=U<5~7^+#Uj+jq|u`Yx0X0MMi`IBRKZkJR|xS;+#&!PlPPhQu5z?H)kw+c_b9 z*!4R*rS@N53RJ%_E#+U{o=PAjAlRJ!GId{WaeK90*qq^wS>-C9k-!&l1cFIZsPj5? zYArVt6gGQxl!8`T7}A4xf2YXYX=-Xp5-`!L_P)&_EG&HPT%LeZQ&c7f^l8>%R}LOL zi0_rJA}*|0tTHWid=jL2qs*D!L=(}H&$ zO7hMos-eh-oFz;0ma!n5U}9pjHJV`3)6<*xt{%GX8#+Hf50>%7oePlN2{69Bf(nHe z_4?D-O5e`8O{Ql>)7;q7pI3CFyfB_cDWPs}u^NJynE{A4Fnby_;j}#-L9x63TmLGH zQ36;CD}CH_EN`0e;Z%assQbr)WXm5_h6RMO-lQS(@0t|?s(hAQyuFS}g7!9N(nAhs zVY%+J`b|GEDy;g$Uvf^iTioP=db9TJ;j*BOek1UN+Az(-`I7GYtTN}%o_zx@V#a@= z{H~75*-7rMAo9qIxdv|y96mqhvo*SQUtFAjlAD-Br_L}rCM7jow^=L2_wt9B$4Qv~ zeWQ7H5&>A2Tx?1g$+@UYdDk~K9H-Q(gYS)pW`g>Sz_@=_mZp z%u(GJwX|L+h13mQ-@Xedo;=nHZ(B5pU|M0g2zZ(1I~h9`yw^Fe`q{|EW%2i-u6vlq zg$ouiffEm%>_(7j#uywN>@(ty8oK@%mhkbLH*Y2bMEuUNanY_&lcQjKY?wi0A~Jmy z@}30kNiS{f_V>JrNrqvTdq`ir!p#@kXTVA>>%}R*ALwF;82-5&<RJLjPDh7$^<$dj&X88+az}K(rl+sx>b1<@9kD4H+Q!4*E$-f*&P8N3{$8z<1tqo1Ek?61Z- z&>%0v^r(d0m&em7l=`N+!c$gy=)I42X2vJbV7xnk!#~{6WJ5zk6bkFQt6{j3M4`iE zS#0cZYHX#Tds8`)egPC{pIoT@gaEIO;9>y zWMmwK5D!2mJ(K2HVq)UMhu6$r%`l5vHxDWur5LX-oIz!c?(YDs#ofCnQIb!?CfC{+ z!74v1Div48NGF-46Na;=K91iSq{tg*ZVR8WI^`T@T1>usL4E!Az|v^>82S6*wG2bg z_1di;a&F@d5TRrJ{{7p?-0S+LYyBw6mtv+Lr$}zB{(9kByYa28KQBO)K9GrZq(%+J zbLII5rvVoTTLzp*Wm#~rjmf9p8VpqiY>^1p6tCyzw92q6YJfou50h{aC=W5727j}p2(xcKL zDg^`>gV6KEKw*I+qu;}anEgog`Ely;V20*4#Kzyv{T-Mqx?=sQ>R9BrAyE@dP)O?? zjHPo)uKP39D8u-8;u!BlnDY!j|gj#ijg>Oyap%}s|F1#6wA2b*+`#p zI}T~z)v1J!EiI)kqp~-L%PhxVz6=T?I8ar!URxWBhcHPz?wIV z3R8?eyuv=n`2CpgIAk>4TpAmDeLe4^*!e{QrERuom4Q<Mh8a&qG0>w%-K z_TXV|%6WbFu%5~(UjFU`;ltFX?d`Iuh$M1C@4Zv^P~>cHuVuX+4Xz2UW_(60^j|ilmyxltOH2*gk4s6Rse3Ay{PgMj#kMC7YFT-P1-sy-6CXV? zW0{(o%6j?owN84S@5&_IIm7Lyg*x@mE;C)J7=J@7rG{ODUEj!IjN5J{u^x#mrV~Py zPdHZKr{P+O4O+2w2z~!f#7Im_EAUi2S1Ie%wIm_D0Bm&Lz9 z)2iVK?`w`8a*KmMc~nAtM4qVjajuJzL}#%Gjde(yaAG2D@+8F*WG5jYXWKwxJRA9d7#a$^)iC0hnUTo60W9O+$s(86Gu zBIRna8-^QQq2-YsV`0QVEG6Q{ z_)NN1Wz-Z&#o`v}eFWKC(n%BErML*^343dg5nZbcknHI{XqI zeo14DJQUlF+aS7rLc$ooTa9vC#dVMZUqft~`ZPOFN<|}KhFE;)_V)J0nDBSS%+?Lq zQ;j$`Gf779D|<-x8`!H~KqRES?U3^S95dKZXZQ)u-Zt>WJ zqg$W37%WE`9;RDbI}0%1M|8HeNl&aDd}J@bhpKa<?#9wDhCqGM~f4y`J7E`Bu<{6(o`&i%A zA)n@kjU9PEKwHOydyAhh#z&ek^Jtr%;VR}yn@ zSa;|1e)6jdXH9acXLdztsI6&>tjmp7x1_F?uS}Y6?@-a&M51UAE|F18WGsjyPWhB# z5iWraB2t0=Lb5CaqoJ;@GD5;+Z^|;r+UyQq6&ROXAVa&6zh;S^aFgyTwz0Rj-=IlI zA|ND8FACcxY0x`nlhWRZK|n(BeppS%IUk$q0nRfw33=1LAFPU@t&~JwnM|y>g!@?L z=7wWLMxkuM6mK>YEXT*l=Xhd5rDqA^;^(7gnL$^;$ecz zd^O}*SZ70Hb;*QKn&Y>Ml^Ki)6~^7)kPS4*R*+0uDF}oGoMtdBCAR<4grN5xOLcmk zA03B1i0$4}G^Wt`BHa!5{*@R0!4f!sZL9rB3z0InnMqx9XL-CZXs=9)JdX8CMNszT30;d4 zG2kvB+37!DkKMhtx0RwueYwQGg}4dwI`HG)3T|w*J>=@wQ91{7S`p-n2n7=!DJdxd z0d0k*lGU}f64L@C5^2kz=N=UueW9iXYH+`N()DwX8D6==%F@5 zr!G^=<8L4dZB!Yyzjh11@tC$ODRB*xHzwb(5;i0KJe=v&pQA-lAN&3qy zWwGM<92Q@(i}`HT4tMgWFak1FmrCyKpY_XH`HP;bY^7!>yWtNGWCf{lPmH%7Yoyi0 z=?i(UUOm_k&oemX-bp=hzN+gfhGZkI42WltgGv8!pdc7mRxpHzA*rl!IRw ztM)nA-53j8>r^7mqO(!yq>;K#lV40VU(6zeYo#Dhx>YUkjSvy3p-V@uDnuG7z$jtc z0Spp>on4MHw=Zs06S9KQB4Q>(2+PY#xVhn=_jP~4je#9~omANk$m53Vf&z4Pb-^R+ z`0d_wb6XyKceCDae*U-ASTgyHC#V^*39+OJuJO{DkUYvj{A^Ir zrQtUZ+5iH8|N2``ZuwIQv}E1U;cUC$xQkD%zRXJBn=kp@n^Wx~#>x2-$Ti62M30S zGgDo&w?@Xi3Q_^jrK{~pW`Uv;6En!K4t~9-Hh%T^MP|QRBhum%6fBtFzQ^9K_?=KF zYyc_H7izK0^Z|v+pmz@a?bd9adoAxrlBM=ik z{-+;^1jAbY4e+lNERq6}lHaV~$wS)y5%^b$dtaX7Ru!vpE0l5p2k{U zyS>eYsn&bLD+}e<9?(*I3Gr%Vc@kEMk>Yl`QHNbx%DH!)r!w9 z-r&rPr|QA)L1!BqxAobTqxN72-y+$}AGe8B$1*ttYMI39fU^v^!7%TF7pq&ThPsr< zbIaD203Uy0sAOi?wO+wN0*g5CQ%Ml-nDvc12Nc<0Tmu_^YKWvI|=V`yr7$HvL6MO>x3M;N;y34F1~NtFNyQ zQN!1!CaCl*#u_D6E_Z2BCOt@8tJm%5@SI@{#6NwOC@40(_%UgO&yP<}rA0k|b+9*~ zwz^KTg++`GK{7_5| zyI8%{by+(fUk(>1dcx`t?f@tkyE*z)M9%1yV)GdJG`wjgOCKlk-+r zR~NtieSLp3+rz_yHZt1)p)&i~c4|=cYj1Z!$r}zl{7m6%lu3`!jnzxGBVI-T2DuzF zz0A??;57C1^}W9G!mz-!f8OIzQ^nYSnk|wWIE3}HnhB56u6l1eFvu;b7+(W2_o9h3 z@AR;9O7OR~DBE|&%#zN&tCV?dzomtR?yR(6MMWKvw8WEk`C9v#V7P#=J2oF(c(+Hc z-lNIQ2oI0B%gImNFd?lN{yeSgOV&(?C2$L_R*Z}U7!IW7K{`8wd9wxsTm8tUpHha= zwJt8&hwnv5=e_q2zT7ftnM>VLOY-lH@x?{qzUZW;bn7xQrl$f45|h<{&$pBeM`S#nh>86!;Jy7J=uoHLg(kEAH5 zQq*)O#OwgE@uNYccnCu$ksr4%T|6xKhw5&H%E@z04a(isdCy~hW1dw0NTf0HnuB(Q zAUdq`HBgH_z+~2hp@GYZd4YK#W0o;<$ScN|1Je{{GFrM1{`I>X`Mp2#s#H?P?_7Pu z;Qj)*AQpRjgJw80RIVA-Ix)u+;1+i>XjZ^{HO|U>o$~KcJYyxO%8xl0*qRDW%;+%B zoxHmCcU;s^4?1SPq<>M*+~DBufl*bKY3g>)8eFa zU2$>8r{*y+GuJSyYi@GkxG!N0qq=oG1)lc5A;fIlscN=V#nUIxyCJyxY?%Ho5pYXR-`>Q? zVaO*{U}sKxMPozfBbk^@Fg*ACJpajbi-%X%#02O|<|tQ`kL|2vVhJ-tvg2RM&%rCN zFt13m1p7ji-Wm{9{cB^GO)oG|k%r8?Tj@w1=Y|i}OKYZ98*i>LaFfv$N4DHMZY?OY!)Y zCZ{51q067}6U}s|hpf+jS;E8*uJx%BPKH)iR#3?U8v0A2d38gBveYIsJ3BiaokY;? zdP^Kbk@34pVC>z>SpHVOmq<-R%=y(`c|vWW`F1w3>iEkzp+BS507A7G!W$Sp*ZKqR z)_A5T=Zxz9yv>8A-CO9e-cC+nVKIASv;ama`u4Y>Mg&C@z(MHenPQN?JLSKs=RDlp zr4GHeO_8VI&%8x0jIB^0Ir^$4Tq$V*bwtR+1cS(aeQs}W@9ez(HT}AaOJS$&ouwtg zbUj^4y!$mZW-qP~9(Hp|NkMvshxd51w>w!zDOVs(zK@ccdUD1mNvfr?mKKU&@B^N@ zy(`erUxotRyS~0Yxd|Vw_gKb&O@TU{ z*$4YCTEeyBK)35By#rbW(Ems2yDKjwyKejA<@Gz3nzE6jhN28SC0}&zm_0MUs9{%1 z5ISmG;s~{MVD+m<3mL9ENRI^}*diMn8|}rZq3_%6q9jqg?!_CWURA>fnmK@^{ z!KJIIsp;$K(Fmn6uH|!eaZzl1119{-mh=3pBu=03#kYhtMu{&ie2cy0!ZPc z=g&>S9dc>jHz=~GtqAH)Q!vacg^?*+S!ED3t6#pf#YO9W0~)kE+BF&|G>2dyu>X;Sdrcw;y^x4Xkqxtg(Gh29S5X z;Ak$MA}6$aLCL|HW7}1eTU=bsn;!VJ^j7C{iO?47*n5~CiEMtV?rfGda7rO+fE5_{ zIK}`s3+pTDBN-Spg2L>{p&wk>-f;j&fT;|)T9xGf12x4%L`1p?Clv$NaW@P$oPZ3} z3xp=VxIUH%b#KP7{@&<{&{;sjm} zpWs=|%gk#S1jQ6dTr;%tV~^d@3Y_0p0?-+vTSojX1}~2t0tWCf^ZD3o!}Rby#v9{q z{PjYmchxmbQDxiMj)T}BoqtA zgz?3QZpZ$|;HqG8d?nK2rfK~U&E|=!c2+;|H;(kXZ#{D zJA+xAso(Pdv<*-GXLqm$C-cn7y!LklU>uL5j$edTy@i?2zgHuN=8wi_Rm{+xCHD_j zdaF&E1^wq@eC5qOZ~Sc<0*sR%N3`nyf?$L-X5sbiMqJ_HAt0^5ow%Kz^S{?$z{__>oAbRjY^&R!F| zWJ7Je)|sk*`^YpS2{&$mOzu6%2@fJ;w^mHgw^Y%SNS->=JV50f*U z|M`(UL?DNaDO#vXP>+XL_2F9ggj;iKYu$OrKJ)aTGMo0Kyd>d-g+2!$`nxoZ@{PZ| zyZI3SM%>}+Pk@iEt}dbWy7|p11yxv(0GtjXiiC64lq*?G5h5VL^P z1f&|4kYcIM5)q1tr9YL(+nY&u{r-VZT6)lnj2ASzx7EbF;jM#N2W>amd$p64Rme08*_l0%*@RG zoe~3P{=c(%g&#b6^d68N-s^vJEem8b0PWJFubm}_UuwiD9&S%TDH7@|@|%M0Y5dI#n!i9MPy_uRU!yL))k6m+=42ITpay|hxB21OycFcq2*)2)ld5UW3{r_*Q4&S zNRcDtD}c3wS{5_pmzLGu_$sm;;r9)l#{AY0B&UFs_!u`J3l$+RQPGI`P2LIXL4vIX zt(xDjE%^98@h%vuceO4nL(#+^CAbMfoN>R#HNew3^($#YdKv!63kWIrN! zGkL&QTSJV}HO)q@3A(0qoUGX~f{M28B3Rok<iZPL}A+KJ)hRpLmlUOS|&S_?s(C4}| zsvJbYQFt-YEE9^eymRG6*;@BY<-Es>Q<%xH!8n9!+h9_v=knWlB%MhJ* zJJ*aRw#U0NPPa|{OVz8mL3f+LBCHAmD!p93yespux$Vme3k{?1xl^3KjoIvQ%T^vR{1 z<5!KLdYYiPYwtb<(Jytfz>ou@Fgk9T#ij{(+XAz11%%~IEhq|M(+?Qo)+eo``AAh` zlCT~;Jq{XMbiW9e1?s5jPBD2JDj@?~dOTp8EU8hnxPk5gg^rqlK{RhBe_vFR8(j^l zl=A5jQcE?&KKbYbx$!C74Dz379!9cS4)VHaw{5(`aNyjXKX+R`d@TOeC4`DRbz7WX z)z{Z|XaBBx%`=#x6dV_jC}m`1Cbp+(?$CKT1}elC3nG7zY>@(QP< zbxw0a<6$SV+j7Sme5o!>NdBLU3AyUAD0y@mm3z*Ti&kJ(1R-%Z)G1Nv*{=g`r+< zWb#;fcU$@6M#+nmX<{M>$El1d*sRF*2`HMfmx^)t_7d z8d*gptR>Oz4AeVPOM`o-< zww9yg^qR@;j;`)F5%K$g)x5dD&mBJ>pC(k%yg$fB&Lz@gTWPC=aRv(! zW?!r-Yew#)rf+6PwWEM z_ytBBGAk18=x;`rG;D0CiHUc=tz`vnME5)9>2cEv%?(R=uR|EkXF9L2O|egOXMCX?0&05F56(=r0eja5j1Slx zLzT3{csYnzi?KG95fIL{@3xdtZ_rlNi1Ih4B@+L@mBQ8~!$H!_N2X^`b?|?d&v}77 zQ$w8Gpt*dv{wTtGPsErA7s-|%wv-`-)j1LL)We#4jA7#_{A&JbT8|0IVijEbQ%=FV z?tqq1Ef?-jTttUQM1X2oz4;SOK=vz8H?_50OdxOiDzd^n9VlGu*J-hha~wqn4- zMG6@=9yJU25J%Zr>Dv6%=zSejfVS%GXR2IzT072|Dye7auO2FLv~x^&5+xa`0f$`$YoFyCwpBzql1JWe zvM##Ghea~Qg;hiS>PKtFUn6J%Ik^DPyn;B!4(Q_??Pt^wkI>LSHQ{;Uu#Sb#iE0FK z@X^)L(Gj*!T-;RE(eV#((@uSFzCZ>V8j8gMnYUx9Ar#(ync(<$$t?wfa1r#+Mize4 zAP%_!?@9;AK~`xn$>rFU6)RZcNw?Wq*Y`R8fFq4eD0d z!XqF6l7Ng^{On4*@C&r0Jz3*&MEz0QDH;&eRYJ7$7M5hT=QKu(_d-%k?0l;-);Ei>kbXDGMw%2H0T}IkI2(ezcROLea0NFS& z^@nydU8+h-S{E-`So~m*{?c=ct5UbWTbNgA%rCq?Ujv>CvEdE_$b{>v=dfLrFz_{?_NmAlF2pqAjr5GED7f^>q}yR;wV+ zg`7SbIRad_aEdQtg5SRPb?1)n<(iZ(E&&C0nS`DX2&Bmy%~`pti&RM{VELD_lPz|k zkOQkB6}Y_&3Z};^Mxb$m0K~5V`U)U2k(*#9CL^wGIhmUL+ z_%tz@U(RrE+)2)ckC(v}!RgXl4cR8d*?)68j=u>Oq>m?$K}z{INyDUs>x)z?zK4ex zniZ2tk$Iv|{Y}Xs4Lqi2DQRuI@i`G%n7=|Magy?5Si55?8HR9@!T<9(Py_$hN1RT` z?*H+d3jeeB9{zuA&Hg@+W^A&aZS(x{%AnJO45_2ANh}_rAS));3tu+Cy%vCVM{l|` zB=C=hdHoQ4n}_lRbbkK)98M%uCrv-tivk(Dyu3V#YE59TzcX`nW<#6SE}3`(I+pT4|!*uU~-* zhMPg4kAmAGV#uaQ2s!m#PRq#bBwGf%-_A8H{qvi?FcjE3N5{8fOVgwmgN%ZgKfgt?W^yepl zgn%)SK_1O!C8R6_Y|fj!xB2w%8%#saWl+>JyWO4Dtdg;}iS%n3&}Zjt-Dzkdv1g z3xLkx3viui5g;8qg_?1HPESWgM$T$@8Twy)VqsyCmUC2lIGt=+<~+38(Qa0Om;m)c z>eMPgjVLL4$6JCFE)~!U_x=}lGZ!un>@*dZ#>ALmF$fWv=QsdIC@3KT!nrCa5n}%A zzL%yDkZ*`lMu!2peeelxV@-Fa^zwot{69RsI){={2n48rGN6yZ_>1qQd4D%#h7sxQ zy_s)PatO&a@V`3G&U1^4i}Ucb%Ggx-Z~TFNr+@zRob8zafF&a%t6lzQBQ@03`H9R; zTE)SIEsR!|4_!C#oEs=&SMc|AcAk$sMdL}m&Ufq9Err!KE|65GO5Qd#G)x+rHn8`X zNu?RviPxii&IK;HD+NRl_^_0jKnZuJ%1H?c34z@}rV0N_RKL92=I3`81X>^29kVm57*B_z4Cz;VDP4uNcUe0+SauzIl}4A<7kfC?!2 zOr3Pau6F%=%laQDEnMy%P|WJ6d>@~HQOARtv1KpaY~Jv}{efS(x~M|oAX>$ma!p7u0U+-Ax~Ohk?Qi-v)C_zVvZMz7oDdehn( zbNLAB9#0KSqo#Jvn4(u;Jo6)!YY#OMxuABBy?)&$r&>PhaYnK}-_E$*S%E&kuX8f01 zo;-c}6lA64u49w*mcBY^d2mUl8`c|O0M_iO`PG)}BeMOdI*6+;jbV}F`a8CMR=vU#>r%yw;BQ5O%h(DzJY)rMD=z>pOF^X?pM)N6Vm!wT6hk`~ z14Qk^b%HbzWH`WJi?!c)7$Mjx)LG0+=BQWBJO>sRuI~ZqOtsGRIE!HSJewut-}8o9(=_U literal 14520 zcmbVz1z1)6)+gO5(hUkIEl5bW0s;ntNF#{Sf^@eaVt|0O0Vplq9U=+}N_R^gO8PKs zgYUg}X1;HpnL(c;o^$s8_gd>0`>~Fe8YvM25e5bZ>6OdM*D)}#^f53npAlfeJBNCY zyWk%@a}70Rj3eYvT21Cl3=B4mE6NHt-IJC_?s=b{K_4CUWhh;)$0492yR3$T^8xF6 zlp5|;`FBKNT;Xhi9FFg@ve=l3uWyFqlGm}B>v8aKTrR99REc22dfqIL^^QAnWOuCJ zVVzuN<&;>_zOQUKWnOaf?%krI)tbBnn^to=JPrO zf!$U`r1Tf216vC%+El6ai>;sEA)=zB%%7Zlm78Z&^=HVT?&{^sJcHJBs)~w=wzjqj z7c4PMOiappxi~nCTV!M!4N4x!Y<4Trie|if*Kb>T?ncJu%81vmoZATr3HBeZLMI}; zJcF+$iq(`0U%XgiKT;Vjy?%M7BQ}oj_WIspzIMWenHSn}z6U$ul$=;tSP^1c@J{KF zJ@4~}8uGrrwfXt^E#^*c?C+CD?8Qlxny~d^pDP#}`|8Jg{jqlbp7?Pm=T>%IU0qeS zqx6IGazlqY#zEv_{d-l#` ziSyp(LP3Gha)e%NY^-I8!IwC})fGkcDihzeE9&a214T*E(ck>5B04)Y6Ti|hF#KLo z)Ew{qa3hviPi#x?&!mf_`ZWrcOD^pXN=6@k;rFkrJEE=))#rQF8pSBLGal4eSZTv2 zBqYScb6LS%*R^V-uTPwZr+p+O#ubg;+26;<#XU!m-(giWU@KPhA);yRisf%MzNgm< zyPDhDuq7oWyJYv5U~zJ8Eq2864wcx^H$6N|hay5kO@U{UFeZ>g>jp)HJAq|-xbuH}^*i&; zOr2&zB1KBdwFmIpmClkLPKDe>Ug_=eCE?+E$c0#21S&}xY^Pyk!Rs2+Z^{UXhU9I| z)6Z>Gl-~9YJm*yN-11?N+0^uuEJXpt_Tw{t*qU7d*Mw_#dF9rw{ za$UQ24O(2m<=3;8b4QKe(3YWQW*#ZJ*TtRc{Scaa8iwBO@A`rBu-rTOJ*8Y%C;toy+HP)aBnp`| z?yOBg_wI-`3D|3EX^{~UI!XVQxT~LqsuLe^>3MhMrTU5`&ckkcZu8bs z!=Ti~jS@DuKmVOl@Ac2zde#nu#S42|i)v4qZtu+hTKtv3@8j|W7C1|dM_yjO{-w5T zg?UQ^Z}IkGpTpta7OT(ZH3NgefuehLJCl(xy!{>b`yB2I`TUbth&Hqpc*sZ@#g;~E z_NfPXbw3uvoU{5JICZBfOtnVsFPDwc2pH;5J)Fu-(Sj|_kun}=;l-);)a8k{ao!>8 z(N0^=?V5Q>-SIQOD+UN1m(hK&hYWsj@Jk5Z?C$!8I7vki;8e3JNu?WKlfJv zHhxM(ln>q|f4O%_k{jPUVz+aQWt>|g-sqwEo>FqEGD*3T6bvmtr4J~1@YCSa-Onu= zswyf*>+8aCHrwtnqdhm!3BAFDROg#p^RP4&G$csERy;h6&|CSiU^Y>6}xco7)#vHr!u0mPx0 z?u;;URy1;jQI%`#aEH-5Wbj0aRJV()uCgZbqb@|d22V3f33$Yfm`6lJ?9C=Ru(*#0 zgj)m{eM3|m+S7&FExWi}=xvT`7zx+D% zUSO4O&YiM6xJ1V5&%ylLX#d{cUV0I$&EG$5U;^Q6YF)X4nW3FZcDknc{)abgr-fx^ zQt6qc-3!pvwAL8-t3OA@ma0eekEv93-d%=`pISC2ePNuJ9yzNVAJsF!*L4R9`p12{NgNRmq)`>zO7H_ z>pxk3G2DHm{`4v3@j5=<+1;Hnal+)?*{#^5l(FtkBceXfXH>a4-Vn6dmmeJ+-PI-M zkhneKmfB)|ACjoC@hJ6R86-{EVQFb>rl|1-6W?9S?;(kz_t#U84!F;s?{I1;-Pze; zmU2BUYf$U$q30X2sHDKdF^LC>Vbo_=Z5fpvt^HOt%5XqAgoxUIF4b>zY^)B#Yf~uc zP=T4!cifIw{9d2Ajl1oO2KZhbNyT`qF`rNITB%6&L+>uaWKTv4!nE9gI5YDlPA}Jh zy8YWXEp2VL8NG5@QPK0}o|XH-^fHj`Sn+U^YdYWBq<=PY)BSM0Ws7uqxT47K2yHi5 zyt_M>ikbNJk6T}N#?{2KQBPiRadAPx)nvEPit+DZES^~petsl}TEe035U`jeGe02aIh}@^moQDSGOu$)MiK~n8lUbhDR*<>-7 z`ECfx>l0zDS{fQQ-`|8#In${|F~r8j_2rw?U3;r~(Y9YmLZW>BtP-;zx_W21{(z zl$9|cu*Zwp4Z=S4Nw(S9nTj6s-CvHDLzkbqmFJ_{>>Q7>$<-^;=M$_(HGWL>-Sg-# zz_+ihxF36d%AkzQW2It8DNJ3P2+M1G#b@iMURRPNQuv6Ex=%foT5)t!>5JETMM>$D zIr}xSZg)0uC2vw4kBn(&{fi(u{e?4ZY~xS~9#!@E@*YB5&&Ygf`A#|Z+zqSs>CY>9 zZ*cBg--HYcIUX|XNsbqmflyJd2-}du_$m_VAm#t_-yUvd=`hdWz8mW59S6G`BSs-* zj%ysUdQcj`5gps=D==%KHFlrqGx2-m#(tKUmv`}@TUPnE#>U1=u1orgyn0Ylke!`; zG&MCP0*mRUcWv^;ix*J+60J5&e>lzte>;O5`EcIj4EUJmzm3W#g#bjy$4D0^C1b)l zw4nGoDPH1JR%F>HiHyvzHup@=%vj#L7bj>kIuasrPEKyD4Sv*2dIb3)tBZ-bLTZC8 z;Wp;uR%G>c*y#DfLN3>&BpJ)F&m8P>3MRw1iXoRC{mwD0SP#Tq-`EI%R8qb9Lj$s& zae--riQl2_sn`RUYUJQS`OS2PF9S})A9zQQmB~={%=AhXY#lGRE4@|^AO_mNss2U6 zV)zJsP@I{%GWw`R8?P{yD>X?wYjAw*RbM( zlvLG~mt35xPqMDRCn~V%%Y#BDG&s1}rcdC)g&&2}pI>sbLdYI5@%7+6z#M+~1*wXD z<=(-TL+?x>MbKn3VVdjmASr`r`C?ubEE*ZBY&8TU2w`Q$p7#q!MwxRwJb0%jFL`f| zwAu{`^i6&K`qc-bu(6?HxF z+!_p)r=7kZent0ZB=xycnG66b8&IGN{3g&&L@O|<3%TC+yCOM#_NkVVl3m>AJVzk4sUrY4|dr3ur!SsIm;AR6hnv}KA1*K^}`Jv>Mxr9OWX>LY2l zm0{evb;!ezr`_ZZ@LwrPS@-1`L0!`#_ziMorOU!8+d^eUef|Dwj}532-PVY%PP9Mg z6!$+o&|A1T5226H4kqNgeOX+0hX%PDo|&&N8j?AUyuuO?hL%f)5&yZ1w!x3&}y zK}}d-fyDJ2QG`_J#mf!&6OXJ(@@c(wQUcuAPpK-ZDoi=Xli>BIOUDEm7Z8mGRIxEN zVq?#BCO~-!Y3Vxi^+@k^UqS^11t0dZv>BY6VNz^#4;0Y5slC{yOZ|nExZf9d$_N|N zS%(Sd*oY6<=MK16f+$ei+vEZ2vFE?W3v+UFx2^GR|Ngo7nL}fc9gj;p#k+ym8LllL zqNWKVC6U0%H9go_>*FWr>FLQgduBUUTQjgL_#88}2@aY^u9y?1AfaLzGpAvd>+)2z zT*|X&I>o|bVq&o0#KcM;&U8SX;i_lHG5c`78{KEj@`JWjQE-REH>9?J1&u4-poXGg z3Q07h!q{%m##r+Ac0VoaUn@i~Ii8B@8Xm@_;ubKj-iaQm3Tq;D#25{A8&@ruo#qSm z55)SYc(0!rZy2w3F0K!&7GkM_&il(2_*vwsjn@fqSqp-;4!CC(>GX4c-RyV$-TP{x z=V&Y657V{kaHC7k?_>K5ns^MzU!QUFI{~r&T3s5rMnUXjdS;kJgI4f3>EhO%hcw#3 zn|~8PuKMN6yWQPLZ7A*WW9#RytZP%xT3-10T1fZi-0saY()VMMz;p?`Dj*PUW*P8vHB9ElvzA&z=s`@jQmzlFnAg>fQid##wG?51` zf0b!%{%RAs+|uOhnxrAESN8!Sv_zddcMh_+yt10f)!7_Q&4lyNIRt2A^06AcQYu|k!%UYS zncIzH>nJL5aJTY^)pb#fAcU}+NwYEtYp+G2l;0+QbiT02;y zf$132jV~$4#G7vF6VX@`C#H!=lk28a*#va&{%E(xOde{AI}4|a5PoY%`s@(k9SnVPZyGAWjK4#=eZ z@$vD8l)7$*vHHbc#q;rVH=Li=e{1F9%Yh2zxP2X{8Ax&8S_NhF4$^ zNaE+m@Fp8esw-_Xlf;-|C-C<&<&q|)*MY6jiXxOI$#2rk!rW&g9c;v7P4)j|xqbKU zF#bRR(Z{B1JNRcBnY(FI!b(uA<>aONyDfDi9}F(~HN{IfTlp3xh&X)f=m?>r=KYo= zaSR(MFnTJu9xh=<^u6xlUZl{uz=Blk^6+;;*i4^t-bZeR!!?MS;vJRHYF{s z-8&4qztvy7_{7Pfs4*t~-R2;Sm!DH>ZEa0HQOs^~a+0JeJTlT|s5G}K@{}-UWXpQL z1r>f%df=<8D2I7cVMn41f}ZMlOCeWK#QmnEH$(5F;|>#zn2HVK*U3YKF!5efiEMdD z=kJo`jsAE(h`Kb8l#E8yJ~VTJIsO{zJ~JtM0Zwh;eH6`(;w2JhVTkK*SntYXRFldz zVeXKz_?wJdqQV#_c$6EadNQDp|GIVuVH&ran+N0$PKV^DqH$ zHq4JoT+&3b>6&d%oo73NWbL#=acJ_>rI6w=Zu&pK?Ha)wc>X-!d+R4`%*Z?uQPE<% zp;CT>;__(~3?{%oTTc4@7aW;vt_1p0Y;-FUf2S7rN#E@P)EU=K`SFwM*#hvZr!uA9PwJM*&6rF!dz3C-TfKL z?ugeaPfrl|ih?XUS+0+tR51+#KN^jO1zR2{ngj5|Ox4>O%__Hf5JIbjds*PZ6betS z4zRAo;oj0fQKnKD*+e&mVo+d;=kE^?wQV+jn2aJY{OEAI4~PY*1A#&>9+a>lAt4ESbY>49K6H04e@bbrd+XM#pARmsZoSQq?#UdeO^%iw+(GZZ zeLc&PgT1Cxs+$e_wzA3$Sf(Er;C5(WdoP~)*0VSD}r8BWIKi_pLw^CA2ko1R5ue;saB;3DZ z=luJO3@&bNC<>(n1-nkrX1SunvzQnFWY_j!d=^BC+d339FQK&4N;YpJ6SM0WF8%8F zXDRh5v(zo17y$akH5b2p_wF4q1inCjg_?;vj8+49hw@<%3hnQ2)u3t`1s)P%B7iwT zhi82&qdUMzNYba?L0#6kQT8Xg{|;WyAsk|>0P4clX+%)!Lk z+8XNhgTwrp&y*k%5H*@#r+8&dq_~>S#z&?`k#{G~WBT$ye(ibw)Z3wEOfsH8##AT- z;XyI%=q7n2hikUxafRYl-U1cJWbu-%#)sEd=7st|9z0s3$i964hQT)mYD(JJQh<}h z2oERu)M0vBKQJi2G%MUt82}!8FXw5er@+i^GRY{yHxN#ng+uBNFnr$t_r)7G2rDY8 zqhxV7STvBYe=6di=}2%eCM$bBrb)V zbyV}D?8-b;ZQdlou3~0ol|EFR%OJi-M~AHO9~ z$$vbyg$1}E?|79bq!lSonE&~^^e6q+=Kd01u2}UHrr`61znunR#3|%>bad$_3Ei1zS~ji!6XaUG`_S%-aS?n=8-Fk7sK7L5c&WaP7klN%}r$HkshRE zX#Z`kV5m^v<V_!ZI3MmCBw8P;F zXK3>fy}z33^9R`4FQZOv%m9Avf4uuVMwAPZK__5IHa50Pmpp;(3Q>rE@xr`lAVV|Z z#j|JKz}&A*wc+F77#_GZ36(mGp+SwLwX5FvCY25O=YV0DS4v8XmsjJ8pYHA3w|94U zzxj-J3?=N6(2L~+=8eJNCgAt;lqayAbsv`0B_;b5pyn%ZwgUg${Yj-`*UuKv|YN|@SwyX_g2-&3L;t_9C)|c z%)oCY2i|gtq)qKzU2`AztEcDYhECMQ!tI1hNFASFsaY5(Bq!%LR_g;(I0ABxxw$z^ z;hmBPTab?cCti?{kT`elJ~FMa4~ng?3G<4OG_7?ogkBRRfmH(5T)M5R2`GP1SkI=e zOilH`qCjG$c6NTRV{dQ2yFT-iv--TBpI`P*gqL=sz5WpwA3KJh@n)^Jqr870>iogM zfvwmbYwN5kV&MP~_||8>Bwiwje5Ln~F*2&CP{KTr&3#RDgW3#;?@ypbAw+}gF>lr87j2FY_I8rEF0@B$;$3L{1%xiB zolj9AKU1$hDr>6iA<@W{8KNrVMg$TUCkdJ0upMV3&HQn?^r}S)VeaFUv!XUX_4cKu z@=5f*bX&FgK7>L|ZDNOTRABciBEsY zV91i`*N~|67J1VMSmWbw_CO%kL&7@JZn%V+S9JJEq(YJnaLvauny}2(REpyuw~548 zB-#x#iF4kBM0%VZkntVg4bmS%4E5<*FDN2wK82BR(esbavLv0w(%zZUW4_BB0zH z16`iYfXYMwnXg1n0v~TAt|PfR<2Y^nJMJT0L;~~i-4}d=IRt({gPusQj|T_B%JF9i znLCLT5fuw>hEDFAVxd@bGEWtfp25DpD|t%I#m7y6@{i19=T#66%P%8Mgi!td3G;Pa z?)+=wL}3ia1O3;~kr5yFIpv>xH;jDyc>a(PL?XA>>*f^wXOnbZg?)5##W6v?EWhpJHJ6U;Uq%TZFn`4kobkt5AFAJ3~pgK6Y~hWWuG~o~E%O8v@xSWg_-+UzV?5I63OIYByT#0pyb3-U5Izod@UB)BTas309@z&yuT`re-Ij zQrOdj-`O)#Zli?{bTt#jz|P?&T?ys_prC=buQjgm1nB3Ysv62WbjexuUPpL&a~C*f z+|xiz*@*_T1!W+HHCfrvvQdinCy@F!=DJKoELu6NN&Pb3*9h6%3VXB&@rZ_wPTz&} z{{8z9Q#!tWwF6Bk_Pnk@1$E+ASBML#ep&gZIki%l%Mc|9G#lPx~%S=|)!E^FpZbRf?=ynyz95 zH+pqj_T_>3gp5%v$8n<1*lW4s!wtq1DO#MmoqckTgEb__?k1O&*$Dzh38j&=QR!4o>pRw=P?R`9Zrr~>gyNX~P6AjI)q@>|Ag&>H;z&(S~*)ulA zDnv3>*v8@o8fu>F{wjs%wJYf;Soaa|*4%ve9pAGarU^hd(mG@XQV;^RvGK9ksI3%i zvP;^kJ-TplWsVLuLb@(FO*R`{yXIq8N;|EBd`M4EKld%!*^PSkS$w>8*$@o!vGW}YK(YU({9zC4*$JvE%zGdz0?Av`vwXpq zD2RCV^7u#^K?7=MbT}w(S38o-BE`ld;n`C+$o&Oo17!m~&WUSTgf$a9H~o*OJywJX zf&k*3<>m&MA4MnJNPED3Y9+Kohdz)EF8uUi}%r?82}q)5w<`1A_F5MBa~2#jCa6V16~i45+iASP)WdxrB}Zwgwfg{ zkVOP1TF`Jd&cv5c(qp}rql+jc%aNSkQNm0y9R4(@-i9^ z`$z5=Xs6%3pyGJ`SxPAP-t}z{iW(>1Vk7)vD&`VAilXr*%&+Hir`(m zHVxo7!qvZ3r5X`KQ-9iB~^_%MNF0cog`UOdK_B7rwq zUDI86oYI-;9IeZA=0*IWb>IBxH`tU#h*Z$BAZ83hE%t<71Wa;o-oDM$P6cVNer7Q8 z0?8CO+{U~&u6}O~C1nf^2{Ed6d~jZ{TIV-eh}FF~QlbldG>KKj=cu75sY)c7YT)3X z8K;atSp4oZ<-P2Rg>BasooVd-dpfEcYxbMZ#j}1qsMx3vaz*^V{E_Z2f$j@#ZF$Hdr3y53K6O)FDr?F`II|)&XDe>x)*ELeUo^ya>QhmV z#W%Sc5rUQA)HEs#>huYfl8eDO7#WIZ^x3KHL)FqSb1>UjOV14h^#^3m*EvG_ClohLI8Z zz=AnXOf(EqGoufRcEA2?_kXj|Sf+IIW_B!Zp~1$NIP%av8P8^tqn)W4+(;jcBXZwB zNryrzP^KSyl z8)`6ugXC<34pAb#0k?4z=DJBk>k}QRyonrk_GHb9WN;$W$ZZ3+P}nZ`<1{-IT&X6X z{{*5<>US^VDA8;NdS?+vvD>dkgX7B%P)4D#iy&lG>!JZgB|+IjJ%O0GI9eJSD{w=S z!QLk!CkL|`-rq_;yEGC%^2w7W1}c%$KZp%vgJ#fZVp;*d%6`NMS8n-*p_}q|!v+f1 z8@Fu4*9#aviv(bhu7)=9mP|L|%o8k|rbkClRfsq2>rg#$U4AX@P)qiWW}xGCcn}#z z|Ad71{9;vZI~HpLdpOoO28NHRfPTaF0iPQ|Xh^p>-%{xPp4y>LGjH)ov4}$-NGTH2 zVXt9qhK!)F^f507P3X{yDe~A#`jt3!V`wQ}Sn}5u;*=_iA0tzF#{+WQ|ICo{Q?Ro7 zeRf^LLp+VU6HPXIJ1&8tLV34x1;uT%z%cjSQc+1s+-}gtxytpPsf0B4YJDfzX9bK5 z$~&pFrJW!Pllpk8Hdu61OmcE|sj9>#iY~Y-HV!TR+I_Te>SoyrX|mKiScN;C2T=#*Ctut#1 z1$*d_eU%+xeqajAO=Un+JOtta5a!~qT)}t=zW_k1W5#u8(W*mi38IVaM6*iDBb65) z^v;9-4=CE-tE;I{0)P{4hm_GiV5Ivr)`XhL82eNkm|*ULDgpPk23L}SfkC9zSF0o$ zDXfEWKOq7w-4CBYQU=4urBZh&fwaWOys-`0YP#72(2qS%a>v`;51zgtD&LpNb9KVVrFddUd(cHLx{kpPp za2t8l1H~FIYZfzM>SP@g#T56+$E_&bGxc2a1TvnR<~_j4a4Vc5*0?=|9JdAPW^ zjH^4LoP6`EQ1QA zu(&pB$*FTrfM3Ydl~0h>aE#vx7P@0bah~!7--?6v2mvrGieNoLkaPuN2t^>|KhWmC z_yobYfVobr`_03>lz$=EKVT}{{UUOw51fB-BjUV5fZT}z`(FUo8Sye5!$0_p7Lb>8 z@TqIZVE5mjAb=UU^q+Sog-P6K*^VjO|F{o;X}HfMJ9whLT|*%4y7u3z{@YJ_Vga$k zIR84q#2?}OMb2t8f9K{Oj~F07#33szqmp*)(#ilBcnp4(5WDsT;ys&&HzW9vH z+JCzE7h-E_Q=Y)`Fde495&&4s5%8@EuHxf+{1=}b^Va{G8kzrf0{&_2q>T{^aEybR z6jzlk6i{AXepcgEIarpyQ3e3MP}k6KcH`$ledbQh(JyQq(T#6XBA2;aVt^F+8_r*D zfingKmm0vZApZg20dMg;R65|Bm9e_9HD^GhkPqfw5orv9BeAHc2z+>i2O!h|{sVvV z`(#c6AZlO5#m%i4XpRHxNJvPylGn&2<}hkoG%$=H32lnj)>c(zWzAGsAIRW9*|CV( z#=jjAdJ-HQ+~41S!LkDbigo*Q+RaHPduxTT@&B5YeMtKF@#CZYv7?C6n;?8WB#u)Q zOXQc79C*I{5Eo$0mv7%>J=WhrT?_o*AMLAK7fCq5^Z~BCk(n4*fHj~kdETJD0mP<` z&NFyA#26krIXuXF;X>2o*i_cOzf$D@j`x6ofQpI=c&=bPGu8LEwH#=mKtr;6Oh1R` zEdcnQ0hSb)S6aElM2Ff6u&fDKlaP{LyhbrQJ&l8hXW-)5^b-hH#?Xn?RpiN$i&cks zO+1NlZlXNxCJrC7f4~DO!1{hg`}lye43^y5^$UIZY7%$bDcZ^pfZ!u-a$L#_H3Q|a z(ei)eRrP+B*yuUXmC{f?UmV}VHT&LE+}PCg^5sjQs9k~6j-ckHDP{f7$4}~NKRph= ze8stI^D!7k!L>&pO8GBplcgPa%|8S8qM4u_YTTmr2_YF-Eqdm0c=)!XHgfc0PE~$- zG6Y&(F=Sn^gcc8h5e-m@g}FJ%xA|AP<6=@$WWn&pBI60P48M%b2&~crPpL!x(GB|d zY%bAHo;(2oae8*Pvg+A&eqmv!mEj61E={l-)qsu&>%F)Vq0ZShbI#nt0>Ejt=hg$a zxEr1e%bR8HKkvl^c9TLSfY@5MkC&keoIV0Zm7><&Y(he)ys8zT1cIx0&qqPl1q;-2 zxpWH%o+CzD3-8Nrta%LGi}A|io0>sv#vpe}t&}$*kLZ+*xDMHsjo`t9O<*g_gko$Z zFH2PY$`u$n26zsx-?;Igw*XUX6L<vlg$36aAI4=q)A%v8@Ua>tqAt_8m_FZv0wFw#& z&x6*$UrqjaK+sG7)GNN*rzj|dV2=ZP^TnzN0qP^vvDZDNBpWnj-}Z9G{NM#k3p;x{ z@*D>LXn{7`2+R)PcDx0gHd91HM;I zQ>{RKp||Uf9=REDSiLDyMIJc<%J9T9{r~j3e1yG=SBsTv69vyEVqjcR(NfM+G!6V8 DhwtUa diff --git a/examples/turing_integration/binary_classification.jl b/examples/turing_integration/binary_classification.jl index f37c4107..487113be 100644 --- a/examples/turing_integration/binary_classification.jl +++ b/examples/turing_integration/binary_classification.jl @@ -35,24 +35,24 @@ test_x = Matrix(transpose(convert(Array, test[:, 4:end]))) # y|f ~ Bernoulli(sigmoid(f)) # This model is build using Turing.jl, please refer to https://turing.ml/dev/docs/using-turing/get-started for details. -σ(x) = T(1.0) / (T(1.0)+exp(-x)) +σ(x) = one(T) / (one(T)+exp(-x)) function build_gp(logl, σ², X) - ard_eq_kernel = σ² * stretch(EQ(), exp.(-logl)) - gp = GP(ard_eq_kernel, GPC()) - prior = gp(ColVecs(X), T(0.01)) - gp, prior + ard_eq_kernel = σ² * stretch(EQ(), exp.(-logl)) + gp = GP(ard_eq_kernel, GPC()) + prior = gp(ColVecs(X), T(0.01)) + gp, prior end # The Turing model used to estimate the posterior distribution, # the latent variable is f & the parameter is logl @model gpc_learn(X, y) = begin - logl ~ Normal(T(0.0), T(2.0)) - _, prior = build_gp(logl, T(1.0), X) - f ~ prior - for i in eachindex(y) - y[i] ~ Bernoulli(σ(f[i])) - end + logl ~ Normal(T(0.0), T(2.0)) + _, prior = build_gp(logl, T(1.0), X) + f ~ prior + for i in eachindex(y) + y[i] ~ Bernoulli(σ(f[i])) + end end # Function used to infer the label for newly inputs From 8f3d5990811ecde6a649b99057418cd06b248d28 Mon Sep 17 00:00:00 2001 From: hongbin <35309324+HamletWantToCode@users.noreply.github.com> Date: Wed, 26 Feb 2020 00:46:59 +0800 Subject: [PATCH 18/42] correct indentation --- .../simple_fnn/fit_step_function.jl | 30 +++++++++---------- 1 file changed, 15 insertions(+), 15 deletions(-) diff --git a/examples/flux_integration/simple_fnn/fit_step_function.jl b/examples/flux_integration/simple_fnn/fit_step_function.jl index 3683a6e8..67d159b0 100644 --- a/examples/flux_integration/simple_fnn/fit_step_function.jl +++ b/examples/flux_integration/simple_fnn/fit_step_function.jl @@ -13,8 +13,8 @@ T = Float64 # define the step function # step_func(x) = 0.0 if x<=0, 1.0 if x>0 function step_func(x) - ϵ=T(0.01)*randn(rng, T) - return x>zero(T) ? one(T) + ϵ : zero(T) + ϵ + ϵ=T(0.01)*randn(rng, T) + return x>zero(T) ? one(T) + ϵ : zero(T) + ϵ end # prepare data @@ -46,8 +46,8 @@ logl = randn(rng, T, 2) logγ = T[0.0] function build_gp(logl, logγ) - ard_eq_kernel = exp(T(2.0) * logγ[1]) * stretch(EQ(), exp.(-logl)) - return GP(T(0.0), ard_eq_kernel, GPC()) + ard_eq_kernel = exp(T(2.0) * logγ[1]) * stretch(EQ(), exp.(-logl)) + return GP(T(0.0), ard_eq_kernel, GPC()) end # Since we always assume our data to be noisy, we model this noise by λ, also in log-scale @@ -68,10 +68,10 @@ ps = Params([logl, logγ, logλ, θ_mlp...]) # literally just wraps a `Matrix` and tells Stheno to pretend is a vector of vectors. This # is helpful to remove some ambiguities that arise if you don't do this. function NLL(X, y) - Z = mlp(X) - gp = build_gp(logl, logγ) - gp_Z = gp(ColVecs(Z), exp(T(2.0)*logλ[1])) - return -logpdf(gp_Z, y) + Z = mlp(X) + gp = build_gp(logl, logγ) + gp_Z = gp(ColVecs(Z), exp(T(2.0)*logλ[1])) + return -logpdf(gp_Z, y) end @@ -83,12 +83,12 @@ train_data = (Xtrain, train_y) opt = ADAGrad() nlls = [] for i in 1:1500 - nll = NLL(train_data...) - push!(nlls, nll) - gs = gradient(()->NLL(train_data...), ps) - for p in ps - update!(opt, p, gs[p]) - end + nll = NLL(train_data...) + push!(nlls, nll) + gs = gradient(()->NLL(train_data...), ps) + for p in ps + update!(opt, p, gs[p]) + end end loss_plot = plot(xlabel="Epoches", ylabel="Negative log-likelihood", legend=false) @@ -101,7 +101,7 @@ png(loss_plot, "loss.png") function predict(X, Xtrain, ytrain) Z = mlp(X); Ztrain = mlp(Xtrain) gp = build_gp(logl, logγ) - noisy_prior = gp(ColVecs(Ztrain), exp(T(2.0)*logλ[1])) + noisy_prior = gp(ColVecs(Ztrain), exp(T(2.0)*logλ[1])) posterior = gp | Obs(noisy_prior, ytrain) return posterior(ColVecs(Z)) end From d0bd5f81e6fe4349c0747dbbba18bd13e90d98a5 Mon Sep 17 00:00:00 2001 From: hongbin <35309324+HamletWantToCode@users.noreply.github.com> Date: Wed, 26 Feb 2020 00:49:06 +0800 Subject: [PATCH 19/42] correct indentation --- .../binary_classification.jl | 24 +++++++++---------- 1 file changed, 12 insertions(+), 12 deletions(-) diff --git a/examples/turing_integration/binary_classification.jl b/examples/turing_integration/binary_classification.jl index 487113be..200d11f3 100644 --- a/examples/turing_integration/binary_classification.jl +++ b/examples/turing_integration/binary_classification.jl @@ -38,21 +38,21 @@ test_x = Matrix(transpose(convert(Array, test[:, 4:end]))) σ(x) = one(T) / (one(T)+exp(-x)) function build_gp(logl, σ², X) - ard_eq_kernel = σ² * stretch(EQ(), exp.(-logl)) - gp = GP(ard_eq_kernel, GPC()) - prior = gp(ColVecs(X), T(0.01)) - gp, prior + ard_eq_kernel = σ² * stretch(EQ(), exp.(-logl)) + gp = GP(ard_eq_kernel, GPC()) + prior = gp(ColVecs(X), T(0.01)) + gp, prior end # The Turing model used to estimate the posterior distribution, # the latent variable is f & the parameter is logl @model gpc_learn(X, y) = begin - logl ~ Normal(T(0.0), T(2.0)) - _, prior = build_gp(logl, T(1.0), X) - f ~ prior - for i in eachindex(y) + logl ~ Normal(T(0.0), T(2.0)) + _, prior = build_gp(logl, T(1.0), X) + f ~ prior + for i in eachindex(y) y[i] ~ Bernoulli(σ(f[i])) - end + end end # Function used to infer the label for newly inputs @@ -61,14 +61,14 @@ function gpc_infer(x, logl, Xtrain, fsamples) nsamples = size(fsamples, 2) fxs = [] for i in 1:nsamples - gp, prior = build_gp(logl[i], T(1.0), Xtrain) + gp, prior = build_gp(logl[i], T(1.0), Xtrain) conditioned_gp = gp | Obs(prior, fsamples[:, i]) posterior = conditioned_gp(ColVecs(x)) push!(fxs, mean.(marginals(posterior))) end fx_mean = vec(mean(hcat(fxs...), dims=2)) p = σ.(fx_mean) - y = Int.(p .> T(0.5)) + y = Int.(p .> T(0.5)) y end @@ -95,7 +95,7 @@ pred_y = gpc_infer(test_x, reserve_logl, train_x, reserve_fsamples) function accuracy(pred_y, y) N = length(y) N_neq = sum(abs.(pred_y .- y)) - r = T(1.0) - N_neq / N + r = T(1.0) - N_neq / N r end From 7bcf9e6353796a90dfe31349c5c1dacb02a16ca4 Mon Sep 17 00:00:00 2001 From: hongbinren Date: Wed, 26 Feb 2020 12:59:20 +0800 Subject: [PATCH 20/42] add readme entries on new examples --- examples/README.md | 2 ++ 1 file changed, 2 insertions(+) diff --git a/examples/README.md b/examples/README.md index 01da6662..4c153f7f 100644 --- a/examples/README.md +++ b/examples/README.md @@ -10,3 +10,5 @@ Below we provide a brief description of each of the sub-directories. - `pseudo_points`: covers inducing-point / sparse / pseudo-point approximations. - `basic_gppp`: basic toy examples of the functionality that we call Gaussian process Probabilistic Programming (GPPP). - `gppp_and_pseudo_points`: combine GPPP and pseudo-point approximations to do interesting things. This is a WIP -- it doesn't work properly yet. +- `flux_integration`: indicate how Stheno.jl work with Flux.jl. Currently we have an example shows how to use Flux's neural network as a feature extractor and then use GP to do regression, Stheno's power is not limited to this, we are working on adding more functionalities and more examples. +- `turing_integration`: a binary classification example shows how Stheno.jl, with the help of Turing.jl, can be used in non-Gaussian likelihood problems. From 84bc524783deb95e5f377cb7dfd54716ab32885c Mon Sep 17 00:00:00 2001 From: hongbinren Date: Sun, 1 Mar 2020 00:31:56 +0800 Subject: [PATCH 21/42] add neural kernel network --- Project.toml | 1 + .../neural_kernel_network/AirPassengers.csv | 145 ++++++++++++++++++ .../neural_kernel_network/Project.toml | 12 ++ .../neural_kernel_network/time_series.jl | 123 +++++++++++++++ src/Stheno.jl | 1 + src/gp/kernel.jl | 80 +++++----- src/gp/neural_kernel_network.jl | 83 ++++++++++ 7 files changed, 409 insertions(+), 36 deletions(-) create mode 100644 examples/flux_integration/neural_kernel_network/AirPassengers.csv create mode 100644 examples/flux_integration/neural_kernel_network/Project.toml create mode 100644 examples/flux_integration/neural_kernel_network/time_series.jl create mode 100644 src/gp/neural_kernel_network.jl diff --git a/Project.toml b/Project.toml index e76c7502..b5d48f56 100644 --- a/Project.toml +++ b/Project.toml @@ -7,6 +7,7 @@ BlockArrays = "8e7c35d0-a365-5155-bbbb-fb81a777f24e" Distances = "b4f34e82-e78d-54a5-968a-f98e89d6e8f7" Distributions = "31c24e10-a181-5473-b8eb-7969acd0382f" FillArrays = "1a297f60-69ca-5386-bcde-b61e274b549b" +Flux = "587475ba-b771-5e3f-ad9e-33799f191a9c" LinearAlgebra = "37e2e46d-f89d-539d-b4ee-838fcccc9c8e" MacroTools = "1914dd2f-81c6-5fcd-8719-6d5c9610ff09" Random = "9a3f8284-a2c9-5f02-9a11-845980a1fd5c" diff --git a/examples/flux_integration/neural_kernel_network/AirPassengers.csv b/examples/flux_integration/neural_kernel_network/AirPassengers.csv new file mode 100644 index 00000000..f7ef8177 --- /dev/null +++ b/examples/flux_integration/neural_kernel_network/AirPassengers.csv @@ -0,0 +1,145 @@ +"","time","value" +"1",1949,112 +"2",1949.08333333333,118 +"3",1949.16666666667,132 +"4",1949.25,129 +"5",1949.33333333333,121 +"6",1949.41666666667,135 +"7",1949.5,148 +"8",1949.58333333333,148 +"9",1949.66666666667,136 +"10",1949.75,119 +"11",1949.83333333333,104 +"12",1949.91666666667,118 +"13",1950,115 +"14",1950.08333333333,126 +"15",1950.16666666667,141 +"16",1950.25,135 +"17",1950.33333333333,125 +"18",1950.41666666667,149 +"19",1950.5,170 +"20",1950.58333333333,170 +"21",1950.66666666667,158 +"22",1950.75,133 +"23",1950.83333333333,114 +"24",1950.91666666667,140 +"25",1951,145 +"26",1951.08333333333,150 +"27",1951.16666666667,178 +"28",1951.25,163 +"29",1951.33333333333,172 +"30",1951.41666666667,178 +"31",1951.5,199 +"32",1951.58333333333,199 +"33",1951.66666666667,184 +"34",1951.75,162 +"35",1951.83333333333,146 +"36",1951.91666666667,166 +"37",1952,171 +"38",1952.08333333333,180 +"39",1952.16666666667,193 +"40",1952.25,181 +"41",1952.33333333333,183 +"42",1952.41666666667,218 +"43",1952.5,230 +"44",1952.58333333333,242 +"45",1952.66666666667,209 +"46",1952.75,191 +"47",1952.83333333333,172 +"48",1952.91666666667,194 +"49",1953,196 +"50",1953.08333333333,196 +"51",1953.16666666667,236 +"52",1953.25,235 +"53",1953.33333333333,229 +"54",1953.41666666667,243 +"55",1953.5,264 +"56",1953.58333333333,272 +"57",1953.66666666667,237 +"58",1953.75,211 +"59",1953.83333333333,180 +"60",1953.91666666667,201 +"61",1954,204 +"62",1954.08333333333,188 +"63",1954.16666666667,235 +"64",1954.25,227 +"65",1954.33333333333,234 +"66",1954.41666666667,264 +"67",1954.5,302 +"68",1954.58333333333,293 +"69",1954.66666666667,259 +"70",1954.75,229 +"71",1954.83333333333,203 +"72",1954.91666666667,229 +"73",1955,242 +"74",1955.08333333334,233 +"75",1955.16666666667,267 +"76",1955.25,269 +"77",1955.33333333334,270 +"78",1955.41666666667,315 +"79",1955.5,364 +"80",1955.58333333334,347 +"81",1955.66666666667,312 +"82",1955.75,274 +"83",1955.83333333334,237 +"84",1955.91666666667,278 +"85",1956,284 +"86",1956.08333333334,277 +"87",1956.16666666667,317 +"88",1956.25,313 +"89",1956.33333333334,318 +"90",1956.41666666667,374 +"91",1956.5,413 +"92",1956.58333333334,405 +"93",1956.66666666667,355 +"94",1956.75,306 +"95",1956.83333333334,271 +"96",1956.91666666667,306 +"97",1957,315 +"98",1957.08333333334,301 +"99",1957.16666666667,356 +"100",1957.25,348 +"101",1957.33333333334,355 +"102",1957.41666666667,422 +"103",1957.5,465 +"104",1957.58333333334,467 +"105",1957.66666666667,404 +"106",1957.75,347 +"107",1957.83333333334,305 +"108",1957.91666666667,336 +"109",1958,340 +"110",1958.08333333334,318 +"111",1958.16666666667,362 +"112",1958.25,348 +"113",1958.33333333334,363 +"114",1958.41666666667,435 +"115",1958.5,491 +"116",1958.58333333334,505 +"117",1958.66666666667,404 +"118",1958.75,359 +"119",1958.83333333334,310 +"120",1958.91666666667,337 +"121",1959,360 +"122",1959.08333333334,342 +"123",1959.16666666667,406 +"124",1959.25,396 +"125",1959.33333333334,420 +"126",1959.41666666667,472 +"127",1959.5,548 +"128",1959.58333333334,559 +"129",1959.66666666667,463 +"130",1959.75,407 +"131",1959.83333333334,362 +"132",1959.91666666667,405 +"133",1960,417 +"134",1960.08333333334,391 +"135",1960.16666666667,419 +"136",1960.25,461 +"137",1960.33333333334,472 +"138",1960.41666666667,535 +"139",1960.5,622 +"140",1960.58333333334,606 +"141",1960.66666666667,508 +"142",1960.75,461 +"143",1960.83333333334,390 +"144",1960.91666666667,432 diff --git a/examples/flux_integration/neural_kernel_network/Project.toml b/examples/flux_integration/neural_kernel_network/Project.toml new file mode 100644 index 00000000..12b07b0e --- /dev/null +++ b/examples/flux_integration/neural_kernel_network/Project.toml @@ -0,0 +1,12 @@ +[deps] +Flux = "587475ba-b771-5e3f-ad9e-33799f191a9c" +Plots = "91a5bcdd-55d7-5caf-9e0b-520d859cae80" +Random = "9a3f8284-a2c9-5f02-9a11-845980a1fd5c" +Stheno = "8188c328-b5d6-583d-959b-9690869a5511" +Zygote = "e88e6eb3-aa80-5325-afca-941959d7151f" + +[compat] +Flux = "0.10" +Stheno = "0.6" +Zygote = "0.4.6" +julia = "1" diff --git a/examples/flux_integration/neural_kernel_network/time_series.jl b/examples/flux_integration/neural_kernel_network/time_series.jl new file mode 100644 index 00000000..2a5bed4a --- /dev/null +++ b/examples/flux_integration/neural_kernel_network/time_series.jl @@ -0,0 +1,123 @@ +# Set up the environment to run this example. Make sure you're within the folder that this +# file lives in. +using Pkg +Pkg.activate(@__DIR__) +Pkg.instantiate() + + +using Stheno +using Plots; pyplot(); +using Random; Random.seed!(4); +using Flux +using Zygote +using DelimitedFiles +using Statistics + + +# read AirPass data +data = readdlm("AirPassengers.csv", ',') +year = data[2:end,2]; passengers = data[2:end,3]; +# Split the data into training and testing data +oxtrain = year[year.<1958]; oytrain = passengers[year.<1958]; +oxtest = year[year.>=1958]; oytest = passengers[year.>=1958]; + +#data preprocessing +## standardize X and y +xtrain_mean = mean(oxtrain) +ytrain_mean = mean(oytrain) +xtrain_std = std(oxtrain) +ytrain_std = std(oytrain) +xtrain = @. (oxtrain-xtrain_mean)/xtrain_std +ytrain = @. (oytrain-ytrain_mean)/ytrain_std + +xtest = @. (oxtest-xtrain_mean)/xtrain_std +ytest = @. (oytest-ytrain_mean)/ytrain_std + +## input data +Xtrain = reshape(xtrain, 1, length(xtrain)); +Xtest = reshape(xtest, 1, length(xtest)); +Year = hcat(Xtrain, Xtest); +Passengers = vcat(ytrain, ytest) + + +# kernel parameter initialization +function median_distance_local(x) + n = length(x) + dist = [] + for i in 1:n + for j in i:n + push!(dist, abs(x[j]-x[i])) + end + end + median(dist) +end +l = median_distance_local(xtrain) + + + +# construct kernels +iso_lin_kernel1 = stretch(Linear(), [0.0]) +iso_per_kernel1 = [log(1.0)] * stretch(PerEQ([log(l)]), [log(l)]) +iso_eq_kernel1 = [log(1.0)] * stretch(EQ(), [log(l/4.0)]) +iso_rq_kernel1 = [log(1.0)] * stretch(RQ([log(0.2)]), [log(2.0*l)]) +iso_lin_kernel2 = stretch(Linear(), [0.0]) +iso_rq_kernel2 = [log(1.0)] * stretch(RQ([log(0.1)]), [log(l)]) +iso_eq_kernel2 = [log(1.0)] * stretch(EQ(), [log(l)]) +iso_per_kernel2 = [log(1.0)] * stretch(PerEQ([log(l/4.0)]), [log(l/4.0)]) + + +# sum product network +linear1 = LinearLayer(8, 8) +linear2 = LinearLayer(4, 4) +linear3 = LinearLayer(2, 1) + +# NKN +player = Primitive(iso_lin_kernel1, iso_per_kernel1, iso_eq_kernel1, iso_rq_kernel1, + iso_lin_kernel2, iso_rq_kernel2, iso_eq_kernel2, iso_per_kernel2) +nn = Chain(linear1, Product, linear2, Product, linear3) +nkn = NeuralKernelNetwork(player, nn) + + +# build GP model +σ²_n = 0.1 +gp = GP(nkn, GPC()) +gp_Xtrain = gp(ColVecs(Xtrain), σ²_n) +ps = params(nkn) + +# optimize +using Flux.Optimise: update! + +optimizer = ADAM(0.001) +loss = [] +for i in 1:5000 + ll = .-logpdf(gp_Xtrain, ytrain) + push!(loss, ll) + if i==1 || i%100 == 0 + @info "step=$i, loss=$ll" + end + gs = gradient(()->.-logpdf(gp_Xtrain, ytrain), ps) + for p in ps + update!(optimizer, p, gs[p]) + end +end + +display(plot(loss)) + + +# predict +function predict(X, Xtrain, ytrain) + noisy_prior = gp(ColVecs(Xtrain), σ²_n) + posterior = gp | Obs(noisy_prior, ytrain) + posterior(ColVecs(X)) +end + +posterior = predict(Year, Xtrain, ytrain) +pred_y = mean(posterior) +pred_oy = @. pred_y*ytrain_std+ytrain_mean + +plt = plot(xlabel="Year", ylabel="Airline Passenger number", legend=true) +plot!(plt, year, pred_oy, title="Time series prediction",label="95% predictive confidence region") +scatter!(plt, oxtest, oytest, label="Observations(test)", color=:red) +scatter!(plt, oxtrain, oytrain, label="Observations(train)", color=:black) +display(plt) + diff --git a/src/Stheno.jl b/src/Stheno.jl index 5f1076bb..54317fa1 100644 --- a/src/Stheno.jl +++ b/src/Stheno.jl @@ -41,6 +41,7 @@ module Stheno # Atomic GP objects. include(joinpath("gp", "mean.jl")) include(joinpath("gp", "kernel.jl")) + include(joinpath("gp", "neural_kernel_network.jl")) include(joinpath("gp", "gp.jl")) # Composite GPs, constructed via affine transformation of CompositeGPs and GPs. diff --git a/src/gp/kernel.jl b/src/gp/kernel.jl index e438d84f..acfc5052 100644 --- a/src/gp/kernel.jl +++ b/src/gp/kernel.jl @@ -2,6 +2,8 @@ import Base: +, *, zero, cos using Distances: sqeuclidean, SqEuclidean, Euclidean using Base.Broadcast: broadcast_shape using LinearAlgebra: isposdef, checksquare +using Flux +using Flux: @functor abstract type Kernel end @@ -9,7 +11,7 @@ abstract type Kernel end export Kernel, kernel, elementwise, pairwise, ew, pw # Kernel exports -export EQ, Exp, Matern12, Matern32, Matern52, RQ, Cosine, Linear, Poly, GammaExp, Wiener, +export EQ, Exp, PerEQ, Matern12, Matern32, Matern52, RQ, Cosine, Linear, Poly, GammaExp, Wiener, WienerVelocity, Precomputed @@ -106,15 +108,20 @@ The usual periodic kernel derived by mapping the input domain onto the unit circ For length scales etc see [`stretch`](@ref), for variance see [`*`](@ref). """ -struct PerEQ <: Kernel end +struct PerEQ{LT<:AV{<:Real}} <: Kernel + logl::LT +end +@functor PerEQ + +_pereq(d, logl) = exp(-2.0*sin(π*d)^2 / exp(2.0*logl)) # Binary methods. -ew(k::PerEQ, x::AV{<:Real}, x′::AV{<:Real}) = exp.(.-2 .* sin.(π .* abs.(x .- x′)).^2) -pw(k::PerEQ, x::AV{<:Real}, x′::AV{<:Real}) = exp.(.-2 .* sin.(π .* abs.(x .- x′')).^2) +ew(k::PerEQ, x::AV, x′::AV) = _pereq.(ew(Euclidean(), x, x′), k.logl[1]) +pw(k::PerEQ, x::AV, x′::AV) = _pereq.(pw(Euclidean(), x, x′), k.logl[1]) # Unary methods. -ew(::PerEQ, x::AV{<:Real}) = ones(eltype(x), length(x)) -pw(k::PerEQ, x::AV{<:Real}) = pw(k, x, x) +ew(k::PerEQ, x::AV) = _pereq.(ew(Euclidean(), x), k.logl[1]) +pw(k::PerEQ, x::AV) = _pereq.(pw(Euclidean(), x), k.logl[1]) @@ -213,19 +220,20 @@ The standardised Rational Quadratic, with kurtosis `α`. For length scales etc see [`stretch`](@ref), for variance see [`*`](@ref). """ -struct RQ{Tα<:Real} <: Kernel - α::Tα +struct RQ{Tα<:AV{<:Real}} <: Kernel + logα::Tα end +@functor RQ _rq(d, α) = (1 + d / (2α))^(-α) # Binary methods. -ew(k::RQ, x::AV, x′::AV) = _rq.(ew(SqEuclidean(), x, x′), k.α) -pw(k::RQ, x::AV, x′::AV) = _rq.(pw(SqEuclidean(), x, x′), k.α) +ew(k::RQ, x::AV, x′::AV) = _rq.(ew(SqEuclidean(), x, x′), exp(k.logα[1])) +pw(k::RQ, x::AV, x′::AV) = _rq.(pw(SqEuclidean(), x, x′), exp(k.logα[1])) # Unary methods. -ew(k::RQ, x::AV) = _rq.(ew(SqEuclidean(), x), k.α) -pw(k::RQ, x::AV) = _rq.(pw(SqEuclidean(), x), k.α) +ew(k::RQ, x::AV) = _rq.(ew(SqEuclidean(), x), exp(k.logα[1])) +pw(k::RQ, x::AV) = _rq.(pw(SqEuclidean(), x), exp(k.logα[1])) @@ -487,11 +495,11 @@ pw(k::Product, x::AV) = pw(k.kl, x) .* pw(k.kr, x) Scale the variance of `Kernel` `k` by `σ²` s.t. `(σ² * k)(x, x′) = σ² * k(x, x′)`. """ -struct Scaled{Tσ²<:Real, Tk<:Kernel} <: Kernel - σ²::Tσ² +struct Scaled{Tσ²<:AV{<:Real}, Tk<:Kernel} <: Kernel + logσ²::Tσ² k::Tk end - +@functor Scaled """ *(σ²::Real, k::Kernel) *(k::Kernel, σ²::Real) @@ -507,16 +515,16 @@ julia> pw(0.5 * k, x) == 0.5 .* Stheno.pw(k, x) true ``` """ -*(σ²::Real, k::Kernel) = Scaled(σ², k) -*(k::Kernel, σ²) = σ² * k +*(logσ²::AV{<:Real}, k::Kernel) = Scaled(logσ², k) +*(k::Kernel, logσ²) = logσ² * k # Binary methods. -ew(k::Scaled, x::AV, x′::AV) = k.σ² .* ew(k.k, x, x′) -pw(k::Scaled, x::AV, x′::AV) = k.σ² .* pw(k.k, x, x′) +ew(k::Scaled, x::AV, x′::AV) = exp(k.logσ²[1]) .* ew(k.k, x, x′) +pw(k::Scaled, x::AV, x′::AV) = exp(k.logσ²[1]) .* pw(k.k, x, x′) # Unary methods. -ew(k::Scaled, x::AV) = k.σ² .* ew(k.k, x) -pw(k::Scaled, x::AV) = k.σ² .* pw(k.k, x) +ew(k::Scaled, x::AV) = exp(k.logσ²[1]) .* ew(k.k, x) +pw(k::Scaled, x::AV) = exp(k.logσ²[1]) .* pw(k.k, x) @@ -526,10 +534,10 @@ pw(k::Scaled, x::AV) = k.σ² .* pw(k.k, x) Apply a length scale to a kernel. Specifically, `k(x, x′) = k(a * x, a * x′)`. """ struct Stretched{Ta<:Union{Real, AV{<:Real}, AM{<:Real}}, Tk<:Kernel} <: Kernel - a::Ta + loga::Ta k::Tk end - +@functor Stretched """ stretch(k::Kernel, a::Union{Real, AbstractVecOrMat{<:Real}) @@ -608,39 +616,39 @@ K = pairwise(k, xs, ys) 1.40202e-8 0.293658 0.0808585 ``` """ -stretch(k::Kernel, a::Union{Real, AbstractVecOrMat{<:Real}}) = Stretched(a, k) +stretch(k::Kernel, loga::Union{Real, AbstractVecOrMat{<:Real}}) = Stretched(loga, k) # Binary methods (scalar `a`, scalar-valued input) -ew(k::Stretched{<:Real}, x::AV{<:Real}, x′::AV{<:Real}) = ew(k.k, k.a .* x, k.a .* x′) -pw(k::Stretched{<:Real}, x::AV{<:Real}, x′::AV{<:Real}) = pw(k.k, k.a .* x, k.a .* x′) +ew(k::Stretched{<:Real}, x::AV{<:Real}, x′::AV{<:Real}) = ew(k.k, exp.(k.loga) .* x, exp.(k.loga) .* x′) +pw(k::Stretched{<:Real}, x::AV{<:Real}, x′::AV{<:Real}) = pw(k.k, exp.(k.loga) .* x, exp.(k.loga) .* x′) # Unary methods (scalar) -ew(k::Stretched{<:Real}, x::AV{<:Real}) = ew(k.k, k.a .* x) -pw(k::Stretched{<:Real}, x::AV{<:Real}) = pw(k.k, k.a .* x) +ew(k::Stretched{<:Real}, x::AV{<:Real}) = ew(k.k, exp.(k.loga) .* x) +pw(k::Stretched{<:Real}, x::AV{<:Real}) = pw(k.k, exp.(k.loga) .* x) # Binary methods (scalar and vector `a`, vector-valued input) function ew(k::Stretched{<:Union{Real, AV{<:Real}}}, x::ColVecs, x′::ColVecs) - return ew(k.k, ColVecs(k.a .* x.X), ColVecs(k.a .* x′.X)) + return ew(k.k, ColVecs(exp.(k.loga) .* x.X), ColVecs(exp.(k.loga) .* x′.X)) end function pw(k::Stretched{<:Union{Real, AV{<:Real}}}, x::ColVecs, x′::ColVecs) - return pw(k.k, ColVecs(k.a .* x.X), ColVecs(k.a .* x′.X)) + return pw(k.k, ColVecs(exp.(k.loga) .* x.X), ColVecs(exp.(k.loga) .* x′.X)) end # Unary methods (scalar and vector `a`, vector-valued input) -ew(k::Stretched{<:Union{Real, AV{<:Real}}}, x::ColVecs) = ew(k.k, ColVecs(k.a .* x.X)) -pw(k::Stretched{<:Union{Real, AV{<:Real}}}, x::ColVecs) = pw(k.k, ColVecs(k.a .* x.X)) +ew(k::Stretched{<:Union{Real, AV{<:Real}}}, x::ColVecs) = ew(k.k, ColVecs(exp.(k.loga) .* x.X)) +pw(k::Stretched{<:Union{Real, AV{<:Real}}}, x::ColVecs) = pw(k.k, ColVecs(exp.(k.loga) .* x.X)) # Binary methods (matrix `a`, vector-valued input) function ew(k::Stretched{<:AM{<:Real}}, x::ColVecs, x′::ColVecs) - return ew(k.k, ColVecs(k.a * x.X), ColVecs(k.a * x′.X)) + return ew(k.k, ColVecs(exp.(k.loga) * x.X), ColVecs(exp.(k.loga) * x′.X)) end function pw(k::Stretched{<:AM{<:Real}}, x::ColVecs, x′::ColVecs) - return pw(k.k, ColVecs(k.a * x.X), ColVecs(k.a * x′.X)) + return pw(k.k, ColVecs(exp.(k.loga) * x.X), ColVecs(exp.(k.loga) * x′.X)) end # Unary methods (scalar and vector `a`, vector-valued input) -ew(k::Stretched{<:AM{<:Real}}, x::ColVecs) = ew(k.k, ColVecs(k.a * x.X)) -pw(k::Stretched{<:AM{<:Real}}, x::ColVecs) = pw(k.k, ColVecs(k.a * x.X)) +ew(k::Stretched{<:AM{<:Real}}, x::ColVecs) = ew(k.k, ColVecs(exp.(k.loga) * x.X)) +pw(k::Stretched{<:AM{<:Real}}, x::ColVecs) = pw(k.k, ColVecs(exp.(k.loga) * x.X)) diff --git a/src/gp/neural_kernel_network.jl b/src/gp/neural_kernel_network.jl new file mode 100644 index 00000000..96a32ed2 --- /dev/null +++ b/src/gp/neural_kernel_network.jl @@ -0,0 +1,83 @@ +export LinearLayer, Product, Primitive, NeuralKernelNetwork, ew, pw + +using Flux +using Flux: softplus, @functor +import Flux: functor + + +# Linear layer, perform linear transformation to input array +# x₁ = W * x₀ +struct LinearLayer{T, MT<:AbstractArray{T}} + W::MT +end +@functor LinearLayer +LinearLayer(in_dim, out_dim) = LinearLayer(randn(out_dim, in_dim)) +(lin::LinearLayer)(x) = softplus.(lin.W) * x + +function Base.show(io::IO, layer::LinearLayer) + print(io, "LinearLayer(", size(layer.W, 2), ", ", size(layer.W, 1), ")") +end + + +# Product function, given an 2d array whose size is M×N, product layer will +# multiply every m neighboring rows of the array elementwisely to obtain +# an new array of size (M÷m)×N +function Product(x, step=2) + m, n = size(x) + m%step == 0 || error("the first dimension of inputs must be multiple of step") + new_x = reshape(x, step, m÷step, n) + .*([new_x[i, :, :] for i in 1:step]...) +end + + +# Primitive layer, mainly act as a container to hold basic kernels for the neural kernel network +struct Primitive{T} + kernels::T + Primitive(ks...) = new{typeof(ks)}(ks) +end +functor(p::Primitive) = p.kernels, ks -> Primitive(ks...) + +# flatten k kernel matrices of size Mk×Nk, and concatenate these 1d array into a k×(Mk*Nk) 2d array +_cat_kernel_array(x) = vcat([reshape(x[i], 1, :) for i in 1:length(x)]...) + +# NOTE, though we implement `ew` & `pw` function for Primitive, it isn't a subtype of Kernel type, +# I do this because it will facilitate writing NeuralKernelNetwork +ew(p::Primitive, x) = _cat_kernel_array(map(k->ew(k, x), p.kernels)) +pw(p::Primitive, x) = _cat_kernel_array(map(k->pw(k, x), p.kernels)) + +ew(p::Primitive, x, x′) = _cat_kernel_array(map(k-ew(k, x, x′), p.kernels)) +pw(p::Primitive, x, x′) = _cat_kernel_array(map(k->pw(k, x, x′), p.kernels)) + +function Base.show(io::IO, layer::Primitive) + print(io, "Primitive(") + join(io, layer.kernels, ", ") + print(io, ")") +end + + +# Neural Kernel Network, since kernel space ( stationary kernel ) is closed under linear combination +# ( with positive coefficient ) and element-wise multiplication, we can use a neural network like structure +# to build composite kernels. This type contains a `Primitive` layer which holds basic kerenls and a specialised +# nerual network architecture to perform kernel composition. It should function like a normal `Stheno` kernel. +struct NeuralKernelNetwork{PT, CT} <: Kernel + player::PT + chain::CT +end +@functor NeuralKernelNetwork + +# use this function to reshape the 1d array back to kernel matrix +_rebuild_kernel(x, n, m) = reshape(x, n, m) + +ew(nkn::NeuralKernelNetwork, x) = nkn.chain(ew(nkn.player, x)) +pw(nkn::NeuralKernelNetwork, x) = _rebuild_kernel(nkn.chain(pw(nkn.player, x)), length(x), length(x)) + +ew(nkn::NeuralKernelNetwork, x, x′) = nkn.chain(ew(nkn.player, x, x′)) +pw(nkn::NeuralKernelNetwork, x, x′) = _rebuild_kernel(nkn.chain(pw(nkn.player, x, x′)), length(x), length(x′)) + +function Base.show(io::IO, kernel::NeuralKernelNetwork) + print(io, "NeuralKernelNetwork(") + join(io, [kernel.player, kernel.chain], ", ") + print(io, ")") +end + + From 07f4291a3e1ccb674b28f39801ec908e01f971a0 Mon Sep 17 00:00:00 2001 From: hongbin <35309324+HamletWantToCode@users.noreply.github.com> Date: Sun, 1 Mar 2020 00:43:41 +0800 Subject: [PATCH 22/42] correct indentation --- src/gp/kernel.jl | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/src/gp/kernel.jl b/src/gp/kernel.jl index acfc5052..1078c7b5 100644 --- a/src/gp/kernel.jl +++ b/src/gp/kernel.jl @@ -628,10 +628,10 @@ pw(k::Stretched{<:Real}, x::AV{<:Real}) = pw(k.k, exp.(k.loga) .* x) # Binary methods (scalar and vector `a`, vector-valued input) function ew(k::Stretched{<:Union{Real, AV{<:Real}}}, x::ColVecs, x′::ColVecs) - return ew(k.k, ColVecs(exp.(k.loga) .* x.X), ColVecs(exp.(k.loga) .* x′.X)) + return ew(k.k, ColVecs(exp.(k.loga) .* x.X), ColVecs(exp.(k.loga) .* x′.X)) end function pw(k::Stretched{<:Union{Real, AV{<:Real}}}, x::ColVecs, x′::ColVecs) - return pw(k.k, ColVecs(exp.(k.loga) .* x.X), ColVecs(exp.(k.loga) .* x′.X)) + return pw(k.k, ColVecs(exp.(k.loga) .* x.X), ColVecs(exp.(k.loga) .* x′.X)) end # Unary methods (scalar and vector `a`, vector-valued input) @@ -640,10 +640,10 @@ pw(k::Stretched{<:Union{Real, AV{<:Real}}}, x::ColVecs) = pw(k.k, ColVecs(exp.(k # Binary methods (matrix `a`, vector-valued input) function ew(k::Stretched{<:AM{<:Real}}, x::ColVecs, x′::ColVecs) - return ew(k.k, ColVecs(exp.(k.loga) * x.X), ColVecs(exp.(k.loga) * x′.X)) + return ew(k.k, ColVecs(exp.(k.loga) * x.X), ColVecs(exp.(k.loga) * x′.X)) end function pw(k::Stretched{<:AM{<:Real}}, x::ColVecs, x′::ColVecs) - return pw(k.k, ColVecs(exp.(k.loga) * x.X), ColVecs(exp.(k.loga) * x′.X)) + return pw(k.k, ColVecs(exp.(k.loga) * x.X), ColVecs(exp.(k.loga) * x′.X)) end # Unary methods (scalar and vector `a`, vector-valued input) From 7f6bb36770f71fcea211495f1e4461b35d5d5e22 Mon Sep 17 00:00:00 2001 From: hongbin <35309324+HamletWantToCode@users.noreply.github.com> Date: Sun, 1 Mar 2020 00:44:46 +0800 Subject: [PATCH 23/42] Update neural_kernel_network.jl --- src/gp/neural_kernel_network.jl | 22 +++++++++++----------- 1 file changed, 11 insertions(+), 11 deletions(-) diff --git a/src/gp/neural_kernel_network.jl b/src/gp/neural_kernel_network.jl index 96a32ed2..5f7281ce 100644 --- a/src/gp/neural_kernel_network.jl +++ b/src/gp/neural_kernel_network.jl @@ -15,7 +15,7 @@ LinearLayer(in_dim, out_dim) = LinearLayer(randn(out_dim, in_dim)) (lin::LinearLayer)(x) = softplus.(lin.W) * x function Base.show(io::IO, layer::LinearLayer) - print(io, "LinearLayer(", size(layer.W, 2), ", ", size(layer.W, 1), ")") + print(io, "LinearLayer(", size(layer.W, 2), ", ", size(layer.W, 1), ")") end @@ -23,10 +23,10 @@ end # multiply every m neighboring rows of the array elementwisely to obtain # an new array of size (M÷m)×N function Product(x, step=2) - m, n = size(x) - m%step == 0 || error("the first dimension of inputs must be multiple of step") - new_x = reshape(x, step, m÷step, n) - .*([new_x[i, :, :] for i in 1:step]...) + m, n = size(x) + m%step == 0 || error("the first dimension of inputs must be multiple of step") + new_x = reshape(x, step, m÷step, n) + .*([new_x[i, :, :] for i in 1:step]...) end @@ -49,9 +49,9 @@ ew(p::Primitive, x, x′) = _cat_kernel_array(map(k-ew(k, x, x′), p.kernels)) pw(p::Primitive, x, x′) = _cat_kernel_array(map(k->pw(k, x, x′), p.kernels)) function Base.show(io::IO, layer::Primitive) - print(io, "Primitive(") - join(io, layer.kernels, ", ") - print(io, ")") + print(io, "Primitive(") + join(io, layer.kernels, ", ") + print(io, ")") end @@ -75,9 +75,9 @@ ew(nkn::NeuralKernelNetwork, x, x′) = nkn.chain(ew(nkn.player, x, x′)) pw(nkn::NeuralKernelNetwork, x, x′) = _rebuild_kernel(nkn.chain(pw(nkn.player, x, x′)), length(x), length(x′)) function Base.show(io::IO, kernel::NeuralKernelNetwork) - print(io, "NeuralKernelNetwork(") - join(io, [kernel.player, kernel.chain], ", ") - print(io, ")") + print(io, "NeuralKernelNetwork(") + join(io, [kernel.player, kernel.chain], ", ") + print(io, ")") end From 3e1c2d6b8e1cd3c9a629b94376655c19c6ed5d82 Mon Sep 17 00:00:00 2001 From: hongbinren Date: Sun, 1 Mar 2020 21:20:20 +0800 Subject: [PATCH 24/42] update, fix NKN's ew method, modify parameter's type of some kernel in kernels.jl --- src/gp/kernel.jl | 59 +++++++++++++++++++++------------ src/gp/neural_kernel_network.jl | 7 ++-- 2 files changed, 42 insertions(+), 24 deletions(-) diff --git a/src/gp/kernel.jl b/src/gp/kernel.jl index acfc5052..a01f0be0 100644 --- a/src/gp/kernel.jl +++ b/src/gp/kernel.jl @@ -108,20 +108,31 @@ The usual periodic kernel derived by mapping the input domain onto the unit circ For length scales etc see [`stretch`](@ref), for variance see [`*`](@ref). """ +# The type of parameter l is changed to `AbstractVector`, since Flux's `params` method +# requires the fields of a type to be array, also I use log scale for scale and stretch +# parameters, since these parameters should remain positive during optimization, use the +# log scale will safely remove that constraint ( this is also adopted in GaussianProcess.jl ). +# +# NOTE: The above implementations break Stheno's current convention, but we could discuss +# it later to find out whether we should use Flux's `params` method or write our own, and +# how to deal with the constraints. +# +# NOTE: It seems that current `PerEQ` kernel don't work properly ( I noticed it isn't exported yet ), +# so I reimplemented `PerEQ`. struct PerEQ{LT<:AV{<:Real}} <: Kernel logl::LT end @functor PerEQ -_pereq(d, logl) = exp(-2.0*sin(π*d)^2 / exp(2.0*logl)) +_pereq(d, l) = exp(-2.0*sin(π*d)^2 / l^2) # Binary methods. -ew(k::PerEQ, x::AV, x′::AV) = _pereq.(ew(Euclidean(), x, x′), k.logl[1]) -pw(k::PerEQ, x::AV, x′::AV) = _pereq.(pw(Euclidean(), x, x′), k.logl[1]) +ew(k::PerEQ, x::AV, x′::AV) = _pereq.(ew(Euclidean(), x, x′), exp(k.logl[1])) +pw(k::PerEQ, x::AV, x′::AV) = _pereq.(pw(Euclidean(), x, x′), exp(k.logl[1])) # Unary methods. -ew(k::PerEQ, x::AV) = _pereq.(ew(Euclidean(), x), k.logl[1]) -pw(k::PerEQ, x::AV) = _pereq.(pw(Euclidean(), x), k.logl[1]) +ew(k::PerEQ, x::AV) = _pereq.(ew(Euclidean(), x), exp(k.logl[1])) +pw(k::PerEQ, x::AV) = _pereq.(pw(Euclidean(), x), exp(k.logl[1])) @@ -220,6 +231,7 @@ The standardised Rational Quadratic, with kurtosis `α`. For length scales etc see [`stretch`](@ref), for variance see [`*`](@ref). """ +# α is also in log scale and accept AV type !!! struct RQ{Tα<:AV{<:Real}} <: Kernel logα::Tα end @@ -491,7 +503,7 @@ pw(k::Product, x::AV) = pw(k.kl, x) .* pw(k.kr, x) """ - Scaled{Tσ²<:Real, Tk<:Kernel} <: Kernel +Scaled{Tσ²<:AV{<:Real}, Tk<:Kernel} <: Kernel Scale the variance of `Kernel` `k` by `σ²` s.t. `(σ² * k)(x, x′) = σ² * k(x, x′)`. """ @@ -515,6 +527,7 @@ julia> pw(0.5 * k, x) == 0.5 .* Stheno.pw(k, x) true ``` """ +# NOTE: σ² is in log scale !!! *(logσ²::AV{<:Real}, k::Kernel) = Scaled(logσ², k) *(k::Kernel, logσ²) = logσ² * k @@ -533,7 +546,8 @@ pw(k::Scaled, x::AV) = exp(k.logσ²[1]) .* pw(k.k, x) Apply a length scale to a kernel. Specifically, `k(x, x′) = k(a * x, a * x′)`. """ -struct Stretched{Ta<:Union{Real, AV{<:Real}, AM{<:Real}}, Tk<:Kernel} <: Kernel +# NOTE: Real type is removed in Union !!! +struct Stretched{Ta<:Union{AV{<:Real}, AM{<:Real}}, Tk<:Kernel} <: Kernel loga::Ta k::Tk end @@ -616,39 +630,40 @@ K = pairwise(k, xs, ys) 1.40202e-8 0.293658 0.0808585 ``` """ -stretch(k::Kernel, loga::Union{Real, AbstractVecOrMat{<:Real}}) = Stretched(loga, k) +stretch(k::Kernel, loga::AbstractVecOrMat{<:Real}) = Stretched(loga, k) +# NOTE: `a` is not scalar any more !!! # Binary methods (scalar `a`, scalar-valued input) -ew(k::Stretched{<:Real}, x::AV{<:Real}, x′::AV{<:Real}) = ew(k.k, exp.(k.loga) .* x, exp.(k.loga) .* x′) -pw(k::Stretched{<:Real}, x::AV{<:Real}, x′::AV{<:Real}) = pw(k.k, exp.(k.loga) .* x, exp.(k.loga) .* x′) +ew(k::Stretched{<:AV{<:Real}}, x::AV{<:Real}, x′::AV{<:Real}) = ew(k.k, exp.(-k.loga) .* x, exp.(-k.loga) .* x′) +pw(k::Stretched{<:AV{<:Real}}, x::AV{<:Real}, x′::AV{<:Real}) = pw(k.k, exp.(-k.loga) .* x, exp.(-k.loga) .* x′) # Unary methods (scalar) -ew(k::Stretched{<:Real}, x::AV{<:Real}) = ew(k.k, exp.(k.loga) .* x) -pw(k::Stretched{<:Real}, x::AV{<:Real}) = pw(k.k, exp.(k.loga) .* x) +ew(k::Stretched{<:AV{<:Real}}, x::AV{<:Real}) = ew(k.k, exp.(-k.loga) .* x) +pw(k::Stretched{<:AV{<:Real}}, x::AV{<:Real}) = pw(k.k, exp.(-k.loga) .* x) # Binary methods (scalar and vector `a`, vector-valued input) -function ew(k::Stretched{<:Union{Real, AV{<:Real}}}, x::ColVecs, x′::ColVecs) - return ew(k.k, ColVecs(exp.(k.loga) .* x.X), ColVecs(exp.(k.loga) .* x′.X)) +function ew(k::Stretched{<:AV{<:Real}}, x::ColVecs, x′::ColVecs) + return ew(k.k, ColVecs(exp.(-k.loga) .* x.X), ColVecs(exp.(.-k.loga) .* x′.X)) end -function pw(k::Stretched{<:Union{Real, AV{<:Real}}}, x::ColVecs, x′::ColVecs) - return pw(k.k, ColVecs(exp.(k.loga) .* x.X), ColVecs(exp.(k.loga) .* x′.X)) +function pw(k::Stretched{<:AV{<:Real}}, x::ColVecs, x′::ColVecs) + return pw(k.k, ColVecs(exp.(-k.loga) .* x.X), ColVecs(exp.(.-k.loga) .* x′.X)) end # Unary methods (scalar and vector `a`, vector-valued input) -ew(k::Stretched{<:Union{Real, AV{<:Real}}}, x::ColVecs) = ew(k.k, ColVecs(exp.(k.loga) .* x.X)) -pw(k::Stretched{<:Union{Real, AV{<:Real}}}, x::ColVecs) = pw(k.k, ColVecs(exp.(k.loga) .* x.X)) +ew(k::Stretched{<:AV{<:Real}}, x::ColVecs) = ew(k.k, ColVecs(exp.(.-k.loga) .* x.X)) +pw(k::Stretched{<:AV{<:Real}}, x::ColVecs) = pw(k.k, ColVecs(exp.(.-k.loga) .* x.X)) # Binary methods (matrix `a`, vector-valued input) function ew(k::Stretched{<:AM{<:Real}}, x::ColVecs, x′::ColVecs) - return ew(k.k, ColVecs(exp.(k.loga) * x.X), ColVecs(exp.(k.loga) * x′.X)) + return ew(k.k, ColVecs(exp.(-k.loga) * x.X), ColVecs(exp.(.-k.loga) * x′.X)) end function pw(k::Stretched{<:AM{<:Real}}, x::ColVecs, x′::ColVecs) - return pw(k.k, ColVecs(exp.(k.loga) * x.X), ColVecs(exp.(k.loga) * x′.X)) + return pw(k.k, ColVecs(exp.(-k.loga) * x.X), ColVecs(exp.(.-k.loga) * x′.X)) end # Unary methods (scalar and vector `a`, vector-valued input) -ew(k::Stretched{<:AM{<:Real}}, x::ColVecs) = ew(k.k, ColVecs(exp.(k.loga) * x.X)) -pw(k::Stretched{<:AM{<:Real}}, x::ColVecs) = pw(k.k, ColVecs(exp.(k.loga) * x.X)) +ew(k::Stretched{<:AM{<:Real}}, x::ColVecs) = ew(k.k, ColVecs(exp.(.-k.loga) * x.X)) +pw(k::Stretched{<:AM{<:Real}}, x::ColVecs) = pw(k.k, ColVecs(exp.(.-k.loga) * x.X)) diff --git a/src/gp/neural_kernel_network.jl b/src/gp/neural_kernel_network.jl index 96a32ed2..fdaf7119 100644 --- a/src/gp/neural_kernel_network.jl +++ b/src/gp/neural_kernel_network.jl @@ -67,11 +67,14 @@ end # use this function to reshape the 1d array back to kernel matrix _rebuild_kernel(x, n, m) = reshape(x, n, m) +# the result of `ew` function should be a 1d array, however, the result of Flux's neural network is a 2d array, +# therefore, we reshape it to 1d +_rebuild_diag(x) = reshape(x, :) -ew(nkn::NeuralKernelNetwork, x) = nkn.chain(ew(nkn.player, x)) +ew(nkn::NeuralKernelNetwork, x) = _rebuild_diag(nkn.chain(ew(nkn.player, x))) pw(nkn::NeuralKernelNetwork, x) = _rebuild_kernel(nkn.chain(pw(nkn.player, x)), length(x), length(x)) -ew(nkn::NeuralKernelNetwork, x, x′) = nkn.chain(ew(nkn.player, x, x′)) +ew(nkn::NeuralKernelNetwork, x, x′) = _rebuild_diag(nkn.chain(ew(nkn.player, x, x′))) pw(nkn::NeuralKernelNetwork, x, x′) = _rebuild_kernel(nkn.chain(pw(nkn.player, x, x′)), length(x), length(x′)) function Base.show(io::IO, kernel::NeuralKernelNetwork) From 86d33b89f58f5050a8877fadce583d49e70aaf89 Mon Sep 17 00:00:00 2001 From: hongbinren Date: Sun, 1 Mar 2020 21:42:11 +0800 Subject: [PATCH 25/42] update example --- .../neural_kernel_network/time_series.jl | 38 +++++++++++-------- 1 file changed, 22 insertions(+), 16 deletions(-) diff --git a/examples/flux_integration/neural_kernel_network/time_series.jl b/examples/flux_integration/neural_kernel_network/time_series.jl index 2a5bed4a..dcaa8693 100644 --- a/examples/flux_integration/neural_kernel_network/time_series.jl +++ b/examples/flux_integration/neural_kernel_network/time_series.jl @@ -54,16 +54,15 @@ end l = median_distance_local(xtrain) - # construct kernels iso_lin_kernel1 = stretch(Linear(), [0.0]) -iso_per_kernel1 = [log(1.0)] * stretch(PerEQ([log(l)]), [log(l)]) -iso_eq_kernel1 = [log(1.0)] * stretch(EQ(), [log(l/4.0)]) -iso_rq_kernel1 = [log(1.0)] * stretch(RQ([log(0.2)]), [log(2.0*l)]) +iso_per_kernel1 = [0.0] * stretch(PerEQ([log(l)]), [log(l)]) +iso_eq_kernel1 = [0.0] * stretch(EQ(), [log(l/4.0)]) +iso_rq_kernel1 = [0.0] * stretch(RQ([log(0.2)]), [log(2.0*l)]) iso_lin_kernel2 = stretch(Linear(), [0.0]) -iso_rq_kernel2 = [log(1.0)] * stretch(RQ([log(0.1)]), [log(l)]) -iso_eq_kernel2 = [log(1.0)] * stretch(EQ(), [log(l)]) -iso_per_kernel2 = [log(1.0)] * stretch(PerEQ([log(l/4.0)]), [log(l/4.0)]) +iso_rq_kernel2 = [0.0] * stretch(RQ([log(0.1)]), [log(l)]) +iso_eq_kernel2 = [0.0] * stretch(EQ(), [log(l)]) +iso_per_kernel2 = [0.0] * stretch(PerEQ([log(l/4.0)]), [log(l/4.0)]) # sum product network @@ -101,23 +100,30 @@ for i in 1:5000 end end -display(plot(loss)) +plt1 = plot(legend=false, xlabel="Epoches", ylabel="Negative log-likelihood") +plot!(plt1, loss) +png(plt1, "loss.png") # predict function predict(X, Xtrain, ytrain) - noisy_prior = gp(ColVecs(Xtrain), σ²_n) - posterior = gp | Obs(noisy_prior, ytrain) + gp = GP(nkn, GPC()) + gp_Xtrain = gp(ColVecs(Xtrain), σ²_n) + posterior = gp | Obs(gp_Xtrain, ytrain) posterior(ColVecs(X)) end posterior = predict(Year, Xtrain, ytrain) -pred_y = mean(posterior) +post_dist = marginals(posterior) +pred_y = mean.(post_dist) +var_y = std.(post_dist) + pred_oy = @. pred_y*ytrain_std+ytrain_mean +pred_oσ = @. var_y*ytrain_std -plt = plot(xlabel="Year", ylabel="Airline Passenger number", legend=true) -plot!(plt, year, pred_oy, title="Time series prediction",label="95% predictive confidence region") -scatter!(plt, oxtest, oytest, label="Observations(test)", color=:red) -scatter!(plt, oxtrain, oytrain, label="Observations(train)", color=:black) -display(plt) +plt2 = plot(xlabel="Year", ylabel="Airline Passenger number", legend=true) +plot!(plt2, year, pred_oy, ribbons=3*pred_oσ, title="Time series prediction",label="95% predictive confidence region") +scatter!(plt2, oxtest, oytest, label="Observations(test)", color=:red) +scatter!(plt2, oxtrain, oytrain, label="Observations(train)", color=:black) +png(plt2, "time_series.png") From fe3d483e518fee521b303cf1f626eccf6af78319 Mon Sep 17 00:00:00 2001 From: hongbinren Date: Tue, 3 Mar 2020 12:53:32 +0800 Subject: [PATCH 26/42] design a tree structure for handling model parameters --- Project.toml | 1 - src/Stheno.jl | 2 +- src/gp/kernel.jl | 233 ++++++++++++++++++++++++++++++++++------------- 3 files changed, 169 insertions(+), 67 deletions(-) diff --git a/Project.toml b/Project.toml index 30b9b9c9..1d3a51fc 100644 --- a/Project.toml +++ b/Project.toml @@ -7,7 +7,6 @@ BlockArrays = "8e7c35d0-a365-5155-bbbb-fb81a777f24e" Distances = "b4f34e82-e78d-54a5-968a-f98e89d6e8f7" Distributions = "31c24e10-a181-5473-b8eb-7969acd0382f" FillArrays = "1a297f60-69ca-5386-bcde-b61e274b549b" -Flux = "587475ba-b771-5e3f-ad9e-33799f191a9c" LinearAlgebra = "37e2e46d-f89d-539d-b4ee-838fcccc9c8e" MacroTools = "1914dd2f-81c6-5fcd-8719-6d5c9610ff09" Random = "9a3f8284-a2c9-5f02-9a11-845980a1fd5c" diff --git a/src/Stheno.jl b/src/Stheno.jl index 54317fa1..f5b87aca 100644 --- a/src/Stheno.jl +++ b/src/Stheno.jl @@ -41,7 +41,7 @@ module Stheno # Atomic GP objects. include(joinpath("gp", "mean.jl")) include(joinpath("gp", "kernel.jl")) - include(joinpath("gp", "neural_kernel_network.jl")) +# include(joinpath("gp", "neural_kernel_network.jl")) include(joinpath("gp", "gp.jl")) # Composite GPs, constructed via affine transformation of CompositeGPs and GPs. diff --git a/src/gp/kernel.jl b/src/gp/kernel.jl index a01f0be0..b3b0c520 100644 --- a/src/gp/kernel.jl +++ b/src/gp/kernel.jl @@ -2,13 +2,74 @@ import Base: +, *, zero, cos using Distances: sqeuclidean, SqEuclidean, Euclidean using Base.Broadcast: broadcast_shape using LinearAlgebra: isposdef, checksquare -using Flux -using Flux: @functor + abstract type Kernel end +function get_iparam(::Kernel) end +function child(::Kernel) end +parameters(x::Kernel) = parameters!(parameter_eltype(x)[], x) +# parameters(x::Kernel) = parameters!(Params(), x) +function parameters!(out, x::Kernel) + append!(out, get_iparam(x)) + # push!(out, get_iparam(x)) + for x_child in child(x) + parameters!(out, x_child) + end + return out +end +function parameter_eltype(x::Kernel) + T = eltype(get_iparam(x)) + for each in child(x) + T = promote_type(T, parameter_eltype(each)) + end + return T +end +get_nparameter(x::Kernel) = length(parameters(x)) + +function dispatch!(k::Kernel, v::AV) + nθ_k = get_nparameter(k) + nθ_k == length(v) || throw(DimensionMismatch("expect $(nθ_k) parameters, got $(length(v))")) + θ = get_iparam(k) + copyto!(θ, 1, v, 1, length(θ)) + loc = 1 + length(θ) + for k′ in child(k) + nθ_k′ = get_nparameter(k′) + dispatch!(k′, v[loc:loc+nθ_k′-1]) + loc += nθ_k′ + end + return k +end +extract_gradient(k::Kernel, G::NamedTuple) = extract_gradient!(parameter_eltype(k)[], G) +function extract_gradient!(out, G::NamedTuple) + for each in values(G) + if each isa NamedTuple + extract_gradient!(out, each) + elseif each isa AV + append!(out, each) + end + end + return out +end + + + +""" +Definition of a particular kernel should contains: + + struct KernelName <: Kernel end + function kernelname end + get_iparam(::KernelName) + child(::KernelName) + parameter_eltype(::KernelName) + ew(::KernelName, x) + ew(::KernelName, x, x′) + pw(::KernelName, x) + pw(::KernelName, x, x′) +""" + # API exports -export Kernel, kernel, elementwise, pairwise, ew, pw +export Kernel, kernel, elementwise, pairwise, ew, pw, parameters, dispatch!, extract_gradient # Kernel exports export EQ, Exp, PerEQ, Matern12, Matern32, Matern52, RQ, Cosine, Linear, Poly, GammaExp, Wiener, @@ -28,6 +89,8 @@ A rank 0 `Kernel` that always returns zero. struct ZeroKernel{T<:Real} <: Kernel end ZeroKernel() = ZeroKernel{Float64}() zero(::Kernel) = ZeroKernel() +get_iparam(::ZeroKernel) = Union{}[] +child(::ZeroKernel) = () # Binary methods. ew(k::ZeroKernel{T}, x::AV, x′::AV) where {T} = zeros(T, broadcast_shape(size(x), size(x′))) @@ -47,6 +110,8 @@ but (almost certainly) shouldn't be used as a base `Kernel`. """ struct OneKernel{T<:Real} <: Kernel end OneKernel() = OneKernel{Float64}() +get_iparam(::OneKernel) = Union{}[] +child(::OneKernel) = () # Binary methods. ew(k::OneKernel{T}, x::AV, x′::AV) where {T} = ones(T, broadcast_shape(size(x), size(x′))) @@ -63,16 +128,18 @@ pw(k::OneKernel{T}, x::AV) where {T} = ones(T, length(x), length(x)) A rank 1 kernel that returns the same value `c` everywhere. """ -struct ConstKernel{T} <: Kernel - c::T +struct ConstKernel{T, cT<:AV{T}} <: Kernel + c::cT end +get_iparam(c::ConstKernel) = c.c +child(::ConstKernel) = () # Binary methods. -ew(k::ConstKernel, x::AV, x′::AV) = fill(k.c, broadcast_shape(size(x), size(x′))...) -pw(k::ConstKernel, x::AV, x′::AV) = fill(k.c, length(x), length(x′)) +ew(k::ConstKernel, x::AV, x′::AV) = fill(k.c[1], broadcast_shape(size(x), size(x′))...) +pw(k::ConstKernel, x::AV, x′::AV) = fill(k.c[1], length(x), length(x′)) # Unary methods. -ew(k::ConstKernel, x::AV) = fill(k.c, length(x)) +ew(k::ConstKernel, x::AV) = fill(k.c[1], length(x)) pw(k::ConstKernel, x::AV) = pw(k, x, x) @@ -88,6 +155,8 @@ Squared Exponential kernel. For length scales etc see [`stretch`](@ref), for variance see [`*`](@ref). """ struct EQ <: Kernel end +get_iparam(::EQ) = Union{}[] +child(::EQ) = () # Binary methods. ew(::EQ, x::AV, x′::AV) = exp.(.-ew(SqEuclidean(), x, x′) ./ 2) @@ -104,25 +173,19 @@ pw(::EQ, x::AV) = exp.(.-pw(SqEuclidean(), x) ./ 2) The usual periodic kernel derived by mapping the input domain onto the unit circle. -`` k(x, x^\prime) = \exp (-2 \sin (\pi | x - x^\prime |^2)`` +`` k(x, x^\prime) = \exp (-2 (\sin (\pi | x - x^\prime |) / l)^2)`` For length scales etc see [`stretch`](@ref), for variance see [`*`](@ref). """ -# The type of parameter l is changed to `AbstractVector`, since Flux's `params` method -# requires the fields of a type to be array, also I use log scale for scale and stretch -# parameters, since these parameters should remain positive during optimization, use the -# log scale will safely remove that constraint ( this is also adopted in GaussianProcess.jl ). -# -# NOTE: The above implementations break Stheno's current convention, but we could discuss -# it later to find out whether we should use Flux's `params` method or write our own, and -# how to deal with the constraints. -# -# NOTE: It seems that current `PerEQ` kernel don't work properly ( I noticed it isn't exported yet ), -# so I reimplemented `PerEQ`. -struct PerEQ{LT<:AV{<:Real}} <: Kernel +struct PerEQ{T, LT<:AV{T}} <: Kernel logl::LT end -@functor PerEQ +function PerEQ(l::Real) + l > 0.0 || throw(ArgumentError("l should be positive")) + PerEQ(typeof(l)[log(l)]) +end +get_iparam(per::PerEQ) = per.logl +child(::PerEQ) = () _pereq(d, l) = exp(-2.0*sin(π*d)^2 / l^2) @@ -146,6 +209,8 @@ The standardised Matern-1/2 / Exponential kernel: For length scales etc see [`stretch`](@ref), for variance see [`*`](@ref). """ struct Matern12 <: Kernel end +get_iparam(::Matern12) = Union{}[] +child(::Matern12) = () # Binary methods ew(k::Matern12, x::AV, x′::AV) = exp.(.-ew(Euclidean(), x, x′)) @@ -176,6 +241,8 @@ The standardised Matern kernel with ν = 3 / 2. For length scales etc see [`stretch`](@ref), for variance see [`*`](@ref). """ struct Matern32 <: Kernel end +get_iparam(::Matern32) = Union{}[] +child(::Matern32) = () function _matern32(d::Real) d = sqrt(3) * d @@ -200,6 +267,8 @@ The standardised Matern kernel with ν = 5 / 2. For length scales etc see [`stretch`](@ref), for variance see [`*`](@ref). """ struct Matern52 <: Kernel end +get_iparam(::Matern52) = Union{}[] +child(::Matern52) = () function _Matern52(d::Real) λ = sqrt(5) * d @@ -231,11 +300,15 @@ The standardised Rational Quadratic, with kurtosis `α`. For length scales etc see [`stretch`](@ref), for variance see [`*`](@ref). """ -# α is also in log scale and accept AV type !!! -struct RQ{Tα<:AV{<:Real}} <: Kernel +struct RQ{T, Tα<:AV{T}} <: Kernel logα::Tα end -@functor RQ +function RQ(α::Real) + α > 0.0 || throw(ArgumentError("α should be positive")) + RQ(typeof(α)[log(α)]) +end +get_paramter(rq::RQ) = rq.logα +child(::RQ) = () _rq(d, α) = (1 + d / (2α))^(-α) @@ -254,17 +327,23 @@ pw(k::RQ, x::AV) = _rq.(pw(SqEuclidean(), x), exp(k.logα[1])) Cosine Kernel with period parameter `p`. """ -struct Cosine{Tp<:Real} <: Kernel - p::Tp +struct Cosine{T, Tp<:AV{T}} <: Kernel + logp::Tp +end +function Cosine(p::Real) + p > 0.0 || throw(ArgumentError("p should be positive")) + Cosine(typeof(p)[log(p)]) end +get_paramter(c::Cosine) = c.logp +child(::Cosine) = () # Binary methods. -ew(k::Cosine, x::AV{<:Real}, x′::AV{<:Real}) = cos.(pi.*ew(Euclidean(), x, x′) ./k.p) -pw(k::Cosine, x::AV{<:Real}, x′::AV{<:Real}) = cos.(pi.*pw(Euclidean(), x, x′) ./k.p) +ew(k::Cosine, x::AV{<:Real}, x′::AV{<:Real}) = cos.(pi.*ew(Euclidean(), x, x′) ./exp(k.logp[1])) +pw(k::Cosine, x::AV{<:Real}, x′::AV{<:Real}) = cos.(pi.*pw(Euclidean(), x, x′) ./exp(k.logp[1])) # Unary methods. ew(k::Cosine, x::AV{<:Real}) = 1 .+ ew(Euclidean(), x) -pw(k::Cosine, x::AV{<:Real}) = cos.(pi .* pw(Euclidean(), x) ./ k.p) +pw(k::Cosine, x::AV{<:Real}) = cos.(pi .* pw(Euclidean(), x) ./exp(k.logp[1])) @@ -274,6 +353,8 @@ pw(k::Cosine, x::AV{<:Real}) = cos.(pi .* pw(Euclidean(), x) ./ k.p) The standardised linear kernel / dot-product kernel. """ struct Linear <: Kernel end +get_iparam(::Linear) = Union{}[] +child(::Linear) = () # Binary methods ew(k::Linear, x::AV{<:Real}, x′::AV{<:Real}) = x .* x′ @@ -298,10 +379,15 @@ defined as k(xl, xr) = (dot(xl, xr) + σ²)^p ``` """ -struct Poly{p, Tσ²<:Real} <: Kernel - σ²::Tσ² +struct Poly{p, T, Tσ²<:AV{T}} <: Kernel + logσ²::Tσ² end -Poly(p::Int, σ²::Real) = Poly{p, typeof(σ²)}(σ²) +function Poly(p::Int, σ²::Real) + σ²>0.0 || throw(ArgumentError("σ² should be positive")) + Poly{p, typeof(σ²), AV{typeof(σ²)}}(typeof(σ²)[σ²]) +end +get_iparam(p::Poly) = p.logσ² +child(::Poly) = () _poly(k, σ², p) = (σ² + k)^p Zygote.@adjoint function _poly(k, σ², p) @@ -313,12 +399,12 @@ Zygote.@adjoint function _poly(k, σ², p) end # Binary methods -ew(k::Poly{p}, x::AV, x′::AV) where {p} = _poly.(ew(Linear(), x, x′), k.σ², p) -pw(k::Poly{p}, x::AV, x′::AV) where {p} = _poly.(pw(Linear(), x, x′), k.σ², p) +ew(k::Poly{p}, x::AV, x′::AV) where {p} = _poly.(ew(Linear(), x, x′), exp(k.logσ²[1]), p) +pw(k::Poly{p}, x::AV, x′::AV) where {p} = _poly.(pw(Linear(), x, x′), exp(k.logσ²[1]), p) # Unary methods -ew(k::Poly{p}, x::AV) where {p} = _poly.(ew(Linear(), x), k.σ², p) -pw(k::Poly{p}, x::AV) where {p} = _poly.(pw(Linear(), x), k.σ², p) +ew(k::Poly{p}, x::AV) where {p} = _poly.(ew(Linear(), x), exp(k.logσ²), p) +pw(k::Poly{p}, x::AV) where {p} = _poly.(pw(Linear(), x), exp(k.logσ²), p) @@ -347,6 +433,8 @@ pw(k::GammaExp, x::AV) = exp.(.-pw(Euclidean(), x).^k.γ) The standardised stationary Wiener-process kernel. """ struct Wiener <: Kernel end +get_iparam(::Wiener) = Union{}[] +child(::Wiener) = () _wiener(x::Real, x′::Real) = min(x, x′) @@ -366,6 +454,8 @@ pw(k::Wiener, x::AV{<:Real}) = pw(k, x, x) The standardised WienerVelocity kernel. """ struct WienerVelocity <: Kernel end +get_iparam(::WienerVelocity) = Union{}[] +child(::WienerVelocity) = () _wiener_vel(x::Real, x′::Real) = min(x, x′)^3 / 3 + abs(x - x′) * min(x, x′)^2 / 2 @@ -386,6 +476,8 @@ The standardised aleatoric white-noise kernel. Isn't really a kernel, but never """ struct Noise{T<:Real} <: Kernel end Noise() = Noise{Int}() +get_iparam(::Noise) = Union{}[] +child(::Noise) = () # Binary methods. ew(k::Noise{T}, x::AV, x′::AV) where {T} = zeros(T, broadcast_shape(size(x), size(x′))...) @@ -439,7 +531,8 @@ struct Sum{Tkl<:Kernel, Tkr<:Kernel} <: Kernel kl::Tkl kr::Tkr end - +get_iparam(::Sum) = Union{}[] +child(s::Sum) = (s.kl, s.kr) """ +(kl::Kernel, kr::Kernel) @@ -475,7 +568,8 @@ struct Product{Tkl<:Kernel, Tkr<:Kernel} <: Kernel kl::Tkl kr::Tkr end - +get_iparam(::Product) = Union{}[] +child(p::Product) = (p.kl, p.kr) """ +(kl::Kernel, kr::Kernel) @@ -507,11 +601,16 @@ Scaled{Tσ²<:AV{<:Real}, Tk<:Kernel} <: Kernel Scale the variance of `Kernel` `k` by `σ²` s.t. `(σ² * k)(x, x′) = σ² * k(x, x′)`. """ -struct Scaled{Tσ²<:AV{<:Real}, Tk<:Kernel} <: Kernel +struct Scaled{T, Tσ²<:AV{T}, Tk<:Kernel} <: Kernel logσ²::Tσ² k::Tk end -@functor Scaled +function Scaled(σ²::Real, k::Kernel) + σ²>0.0 || throw(ArgumentError("σ² should be positive")) + Scaled(typeof(σ²)[log(σ²)], k) +end +get_iparam(s::Scaled) = s.logσ² +child(s::Scaled) = (s.k,) """ *(σ²::Real, k::Kernel) *(k::Kernel, σ²::Real) @@ -528,8 +627,8 @@ true ``` """ # NOTE: σ² is in log scale !!! -*(logσ²::AV{<:Real}, k::Kernel) = Scaled(logσ², k) -*(k::Kernel, logσ²) = logσ² * k +*(σ²::Real, k::Kernel) = Scaled(σ², k) +*(k::Kernel, σ²) = σ² * k # Binary methods. ew(k::Scaled, x::AV, x′::AV) = exp(k.logσ²[1]) .* ew(k.k, x, x′) @@ -546,12 +645,12 @@ pw(k::Scaled, x::AV) = exp(k.logσ²[1]) .* pw(k.k, x) Apply a length scale to a kernel. Specifically, `k(x, x′) = k(a * x, a * x′)`. """ -# NOTE: Real type is removed in Union !!! -struct Stretched{Ta<:Union{AV{<:Real}, AM{<:Real}}, Tk<:Kernel} <: Kernel +struct Stretched{T, Ta<:Union{AV{T}, AM{T}}, Tk<:Kernel} <: Kernel loga::Ta k::Tk end -@functor Stretched +get_iparam(s::Stretched) = s.loga +child(s::Stretched) = (s.k,) """ stretch(k::Kernel, a::Union{Real, AbstractVecOrMat{<:Real}) @@ -630,40 +729,44 @@ K = pairwise(k, xs, ys) 1.40202e-8 0.293658 0.0808585 ``` """ -stretch(k::Kernel, loga::AbstractVecOrMat{<:Real}) = Stretched(loga, k) +stretch(k::Kernel, a::Real) = stretch(k, typeof(a)[a]) +function stretch(k::Kernel, a::AbstractVecOrMat{<:Real}) + all(a.>0.0) || throw(ArgumentError("all element of a should be positive")) + Stretched(log.(a), k) +end # NOTE: `a` is not scalar any more !!! # Binary methods (scalar `a`, scalar-valued input) -ew(k::Stretched{<:AV{<:Real}}, x::AV{<:Real}, x′::AV{<:Real}) = ew(k.k, exp.(-k.loga) .* x, exp.(-k.loga) .* x′) -pw(k::Stretched{<:AV{<:Real}}, x::AV{<:Real}, x′::AV{<:Real}) = pw(k.k, exp.(-k.loga) .* x, exp.(-k.loga) .* x′) +ew(k::Stretched{<:Real}, x::AV{<:Real}, x′::AV{<:Real}) = ew(k.k, exp.(k.loga) .* x, exp.(k.loga) .* x′) +pw(k::Stretched{<:Real}, x::AV{<:Real}, x′::AV{<:Real}) = pw(k.k, exp.(k.loga) .* x, exp.(k.loga) .* x′) # Unary methods (scalar) -ew(k::Stretched{<:AV{<:Real}}, x::AV{<:Real}) = ew(k.k, exp.(-k.loga) .* x) -pw(k::Stretched{<:AV{<:Real}}, x::AV{<:Real}) = pw(k.k, exp.(-k.loga) .* x) +ew(k::Stretched{<:Real}, x::AV{<:Real}) = ew(k.k, exp.(k.loga) .* x) +pw(k::Stretched{<:Real}, x::AV{<:Real}) = pw(k.k, exp.(k.loga) .* x) # Binary methods (scalar and vector `a`, vector-valued input) -function ew(k::Stretched{<:AV{<:Real}}, x::ColVecs, x′::ColVecs) - return ew(k.k, ColVecs(exp.(-k.loga) .* x.X), ColVecs(exp.(.-k.loga) .* x′.X)) +function ew(k::Stretched{<:Real}, x::ColVecs, x′::ColVecs) + return ew(k.k, ColVecs(exp.(k.loga) .* x.X), ColVecs(exp.(k.loga) .* x′.X)) end -function pw(k::Stretched{<:AV{<:Real}}, x::ColVecs, x′::ColVecs) - return pw(k.k, ColVecs(exp.(-k.loga) .* x.X), ColVecs(exp.(.-k.loga) .* x′.X)) +function pw(k::Stretched{<:Real}, x::ColVecs, x′::ColVecs) + return pw(k.k, ColVecs(exp.(k.loga) .* x.X), ColVecs(exp.(k.loga) .* x′.X)) end # Unary methods (scalar and vector `a`, vector-valued input) -ew(k::Stretched{<:AV{<:Real}}, x::ColVecs) = ew(k.k, ColVecs(exp.(.-k.loga) .* x.X)) -pw(k::Stretched{<:AV{<:Real}}, x::ColVecs) = pw(k.k, ColVecs(exp.(.-k.loga) .* x.X)) +ew(k::Stretched{<:Real}, x::ColVecs) = ew(k.k, ColVecs(exp.(k.loga) .* x.X)) +pw(k::Stretched{<:Real}, x::ColVecs) = pw(k.k, ColVecs(exp.(k.loga) .* x.X)) # Binary methods (matrix `a`, vector-valued input) -function ew(k::Stretched{<:AM{<:Real}}, x::ColVecs, x′::ColVecs) - return ew(k.k, ColVecs(exp.(-k.loga) * x.X), ColVecs(exp.(.-k.loga) * x′.X)) -end -function pw(k::Stretched{<:AM{<:Real}}, x::ColVecs, x′::ColVecs) - return pw(k.k, ColVecs(exp.(-k.loga) * x.X), ColVecs(exp.(.-k.loga) * x′.X)) -end +# function ew(k::Stretched{<:AM{<:Real}}, x::ColVecs, x′::ColVecs) +# return ew(k.k, ColVecs(exp.(-k.loga) * x.X), ColVecs(exp.(.-k.loga) * x′.X)) +# end +# function pw(k::Stretched{<:AM{<:Real}}, x::ColVecs, x′::ColVecs) +# return pw(k.k, ColVecs(exp.(-k.loga) * x.X), ColVecs(exp.(.-k.loga) * x′.X)) +# end # Unary methods (scalar and vector `a`, vector-valued input) -ew(k::Stretched{<:AM{<:Real}}, x::ColVecs) = ew(k.k, ColVecs(exp.(.-k.loga) * x.X)) -pw(k::Stretched{<:AM{<:Real}}, x::ColVecs) = pw(k.k, ColVecs(exp.(.-k.loga) * x.X)) +# ew(k::Stretched{<:AM{<:Real}}, x::ColVecs) = ew(k.k, ColVecs(exp.(.-k.loga) * x.X)) +# pw(k::Stretched{<:AM{<:Real}}, x::ColVecs) = pw(k.k, ColVecs(exp.(.-k.loga) * x.X)) From 2cf9d6e1ba1fcef7e402b4cac22f357a564f9d50 Mon Sep 17 00:00:00 2001 From: hongbinren Date: Wed, 4 Mar 2020 00:20:30 +0800 Subject: [PATCH 27/42] add AbstractModel type, add neural network specified for GP --- src/Stheno.jl | 4 ++ src/abstract_gp.jl | 2 +- src/abstract_model.jl | 54 +++++++++++++++++++ src/gp/gp.jl | 3 ++ src/gp/kernel.jl | 92 ++++++++++++++++----------------- src/gp/mean.jl | 12 +++-- src/gp/neural_kernel_network.jl | 41 +++------------ src/neural_network/basic.jl | 42 +++++++++++++++ 8 files changed, 166 insertions(+), 84 deletions(-) create mode 100644 src/abstract_model.jl create mode 100644 src/neural_network/basic.jl diff --git a/src/Stheno.jl b/src/Stheno.jl index f5b87aca..c98acab4 100644 --- a/src/Stheno.jl +++ b/src/Stheno.jl @@ -37,6 +37,7 @@ module Stheno # Supertype for GPs. include("abstract_gp.jl") + include("abstract_model.jl") # Atomic GP objects. include(joinpath("gp", "mean.jl")) @@ -56,6 +57,9 @@ module Stheno # include(joinpath("composite", "gradient.jl")) # include(joinpath("composite", "integrate.jl")) + # Neural network used in gp + include(joinpath("neural_network", "basic.jl")) + # Various stuff for convenience. include(joinpath("util", "model.jl")) include(joinpath("util", "plotting.jl")) diff --git a/src/abstract_gp.jl b/src/abstract_gp.jl index 7d60c56a..deeae51e 100644 --- a/src/abstract_gp.jl +++ b/src/abstract_gp.jl @@ -1,6 +1,6 @@ export GPC -abstract type AbstractGP end +abstract type AbstractGP <: AbstractModel end # A collection of GPs (GPC == "GP Collection"). Used to keep track of GPs. mutable struct GPC diff --git a/src/abstract_model.jl b/src/abstract_model.jl new file mode 100644 index 00000000..a2e27fc5 --- /dev/null +++ b/src/abstract_model.jl @@ -0,0 +1,54 @@ +export AbstractModel, parameters, parameter_eltype, dispatch!, extract_gradient + +const AVM = AbstractVecOrMat + +abstract type AbstractModel end + +function get_iparam(::AbstractModel) end +function child(::AbstractModel) end +parameters(x::AbstractModel) = parameters!(parameter_eltype(x)[], x) +function parameters!(out, x::AbstractModel) + append!(out, get_iparam(x)) + for x_child in child(x) + parameters!(out, x_child) + end + return out +end +function parameter_eltype(x::AbstractModel) + T = eltype(get_iparam(x)) + for each in child(x) + T = promote_type(T, parameter_eltype(each)) + end + return T +end +get_nparameter(x::AbstractModel) = length(parameters(x)) + +function dispatch!(k::AbstractModel, v::AV) + nθ_k = get_nparameter(k) + nθ_k == length(v) || throw(DimensionMismatch("expect $(nθ_k) parameters, got $(length(v))")) + θ = get_iparam(k) + copyto!(θ, 1, v, 1, length(θ)) + loc = 1 + length(θ) + for k′ in child(k) + nθ_k′ = get_nparameter(k′) + dispatch!(k′, v[loc:loc+nθ_k′-1]) + loc += nθ_k′ + end + return k +end +extract_gradient(k::AbstractModel, G::NamedTuple) = extract_gradient!(parameter_eltype(k)[], G) +function extract_gradient!(out, G::NamedTuple) + for (_, val) in pairs(G) + if val isa AVM + append!(out, val) + elseif val isa NamedTuple + extract_gradient!(out, val) + elseif val isa Tuple && eltype(val) == NamedTuple + foreach(x->extract_gradient!(out, x), val) + end + end + return out +end + + + diff --git a/src/gp/gp.jl b/src/gp/gp.jl index 6314866e..6baba994 100644 --- a/src/gp/gp.jl +++ b/src/gp/gp.jl @@ -64,6 +64,9 @@ struct GP{Tm<:MeanFunction, Tk<:Kernel} <: AbstractGP return gp end end +get_iparam(::GP) = Union{}[] +child(gp::GP) = (gp.m, gp.k) + GP(m::Tm, k::Tk, gpc::GPC) where {Tm<:MeanFunction, Tk<:Kernel} = GP{Tm, Tk}(m, k, gpc) GP(f, k::Kernel, gpc::GPC) = GP(CustomMean(f), k, gpc) diff --git a/src/gp/kernel.jl b/src/gp/kernel.jl index b3b0c520..944d2606 100644 --- a/src/gp/kernel.jl +++ b/src/gp/kernel.jl @@ -4,52 +4,52 @@ using Base.Broadcast: broadcast_shape using LinearAlgebra: isposdef, checksquare -abstract type Kernel end -function get_iparam(::Kernel) end -function child(::Kernel) end -parameters(x::Kernel) = parameters!(parameter_eltype(x)[], x) +abstract type Kernel <: AbstractModel end +# function get_iparam(::Kernel) end +# function child(::Kernel) end +# parameters(x::Kernel) = parameters!(parameter_eltype(x)[], x) # parameters(x::Kernel) = parameters!(Params(), x) -function parameters!(out, x::Kernel) - append!(out, get_iparam(x)) - # push!(out, get_iparam(x)) - for x_child in child(x) - parameters!(out, x_child) - end - return out -end -function parameter_eltype(x::Kernel) - T = eltype(get_iparam(x)) - for each in child(x) - T = promote_type(T, parameter_eltype(each)) - end - return T -end -get_nparameter(x::Kernel) = length(parameters(x)) - -function dispatch!(k::Kernel, v::AV) - nθ_k = get_nparameter(k) - nθ_k == length(v) || throw(DimensionMismatch("expect $(nθ_k) parameters, got $(length(v))")) - θ = get_iparam(k) - copyto!(θ, 1, v, 1, length(θ)) - loc = 1 + length(θ) - for k′ in child(k) - nθ_k′ = get_nparameter(k′) - dispatch!(k′, v[loc:loc+nθ_k′-1]) - loc += nθ_k′ - end - return k -end -extract_gradient(k::Kernel, G::NamedTuple) = extract_gradient!(parameter_eltype(k)[], G) -function extract_gradient!(out, G::NamedTuple) - for each in values(G) - if each isa NamedTuple - extract_gradient!(out, each) - elseif each isa AV - append!(out, each) - end - end - return out -end +# function parameters!(out, x::Kernel) +# append!(out, get_iparam(x)) +# # push!(out, get_iparam(x)) +# for x_child in child(x) +# parameters!(out, x_child) +# end +# return out +# end +# function parameter_eltype(x::Kernel) +# T = eltype(get_iparam(x)) +# for each in child(x) +# T = promote_type(T, parameter_eltype(each)) +# end +# return T +# end +# get_nparameter(x::Kernel) = length(parameters(x)) +# +# function dispatch!(k::Kernel, v::AV) +# nθ_k = get_nparameter(k) +# nθ_k == length(v) || throw(DimensionMismatch("expect $(nθ_k) parameters, got $(length(v))")) +# θ = get_iparam(k) +# copyto!(θ, 1, v, 1, length(θ)) +# loc = 1 + length(θ) +# for k′ in child(k) +# nθ_k′ = get_nparameter(k′) +# dispatch!(k′, v[loc:loc+nθ_k′-1]) +# loc += nθ_k′ +# end +# return k +# end +# extract_gradient(k::Kernel, G::NamedTuple) = extract_gradient!(parameter_eltype(k)[], G) +# function extract_gradient!(out, G::NamedTuple) +# for each in values(G) +# if each isa NamedTuple +# extract_gradient!(out, each) +# elseif each isa AV +# append!(out, each) +# end +# end +# return out +# end @@ -69,7 +69,7 @@ Definition of a particular kernel should contains: # API exports -export Kernel, kernel, elementwise, pairwise, ew, pw, parameters, dispatch!, extract_gradient +export Kernel, kernel, elementwise, pairwise, ew, pw # Kernel exports export EQ, Exp, PerEQ, Matern12, Matern32, Matern52, RQ, Cosine, Linear, Poly, GammaExp, Wiener, diff --git a/src/gp/mean.jl b/src/gp/mean.jl index b6ddaabd..47c453ff 100644 --- a/src/gp/mean.jl +++ b/src/gp/mean.jl @@ -1,6 +1,6 @@ import Base: zero -abstract type MeanFunction end +abstract type MeanFunction <: AbstractModel end @@ -10,6 +10,8 @@ abstract type MeanFunction end Returns `zero(T)` everywhere. """ struct ZeroMean{T<:Real} <: MeanFunction end +get_iparam(::ZeroMean) = Union{}[] +child(::ZeroMean) = () ZeroMean() = ZeroMean{Float64}() ew(::ZeroMean{T}, x::AV) where T = zeros(T, length(x)) zero(::MeanFunction) = ZeroMean() @@ -22,6 +24,8 @@ zero(::MeanFunction) = ZeroMean() Return `one(T)` everywhere. """ struct OneMean{T<:Real} <: MeanFunction end +get_iparam(::OneMean) = Union{}[] +child(::OneMean) = () OneMean() = OneMean{Float64}() ew(::OneMean{T}, x::AV) where T = ones(T, length(x)) @@ -32,9 +36,11 @@ ew(::OneMean{T}, x::AV) where T = ones(T, length(x)) Returns `c` everywhere. """ -struct ConstMean{T<:Real} <: MeanFunction - c::T +struct ConstMean{T, cT<:AV{T}} <: MeanFunction + c::cT end +get_iparam(c::ConstMean) = c.c +child(::ConstMean) = () ew(m::ConstMean, x::AV) = fill(m.c, length(x)) diff --git a/src/gp/neural_kernel_network.jl b/src/gp/neural_kernel_network.jl index 64e683fb..a98ebc07 100644 --- a/src/gp/neural_kernel_network.jl +++ b/src/gp/neural_kernel_network.jl @@ -1,41 +1,13 @@ -export LinearLayer, Product, Primitive, NeuralKernelNetwork, ew, pw - -using Flux -using Flux: softplus, @functor -import Flux: functor - - -# Linear layer, perform linear transformation to input array -# x₁ = W * x₀ -struct LinearLayer{T, MT<:AbstractArray{T}} - W::MT -end -@functor LinearLayer -LinearLayer(in_dim, out_dim) = LinearLayer(randn(out_dim, in_dim)) -(lin::LinearLayer)(x) = softplus.(lin.W) * x - -function Base.show(io::IO, layer::LinearLayer) - print(io, "LinearLayer(", size(layer.W, 2), ", ", size(layer.W, 1), ")") -end - - -# Product function, given an 2d array whose size is M×N, product layer will -# multiply every m neighboring rows of the array elementwisely to obtain -# an new array of size (M÷m)×N -function Product(x, step=2) - m, n = size(x) - m%step == 0 || error("the first dimension of inputs must be multiple of step") - new_x = reshape(x, step, m÷step, n) - .*([new_x[i, :, :] for i in 1:step]...) -end +export Primitive, NeuralKernelNetwork # Primitive layer, mainly act as a container to hold basic kernels for the neural kernel network -struct Primitive{T} +struct Primitive{T} <: AbstractModel kernels::T Primitive(ks...) = new{typeof(ks)}(ks) end -functor(p::Primitive) = p.kernels, ks -> Primitive(ks...) +get_iparam(p::Primitive) = Union{}[] +child(p::Primitive) = p.kernels # flatten k kernel matrices of size Mk×Nk, and concatenate these 1d array into a k×(Mk*Nk) 2d array _cat_kernel_array(x) = vcat([reshape(x[i], 1, :) for i in 1:length(x)]...) @@ -59,11 +31,12 @@ end # ( with positive coefficient ) and element-wise multiplication, we can use a neural network like structure # to build composite kernels. This type contains a `Primitive` layer which holds basic kerenls and a specialised # nerual network architecture to perform kernel composition. It should function like a normal `Stheno` kernel. -struct NeuralKernelNetwork{PT, CT} <: Kernel +struct NeuralKernelNetwork{PT<:Primitive, CT<:Chain} <: Kernel player::PT chain::CT end -@functor NeuralKernelNetwork +get_iparam(nkn::NeuralKernelNetwork) = Union{}[] +child(nkn::NeuralKernelNetwork) = (nkn.player, nkn.chain) # use this function to reshape the 1d array back to kernel matrix _rebuild_kernel(x, n, m) = reshape(x, n, m) diff --git a/src/neural_network/basic.jl b/src/neural_network/basic.jl new file mode 100644 index 00000000..70b364a3 --- /dev/null +++ b/src/neural_network/basic.jl @@ -0,0 +1,42 @@ +export LinearLayer, Product, chain + +using Base: tail + + +softplus(x) = log(1+exp(x)) +struct LinearLayer{T, MT<:AM{T}} <: AbstractModel + W::MT +end +get_iparam(l::LinearLayer) = l.W +child(l::LinearLayer) = () +LinearLayer(in_dim, out_dim) = LinearLayer(randn(out_dim, in_dim)) +(lin::LinearLayer)(x) = softplus.(lin.W) * x + +function Base.show(io::IO, layer::LinearLayer) + print(io, "LinearLayer(", size(layer.W, 2), ", ", size(layer.W, 1), ")") +end + + +struct Product <: AbstractModel + list::Tuple{Vararg{AV{Int}}} + Product(Is...) = new(Is) +end +get_iparam(::Product) = Union{}[] +child(::Product) = () +function (p::Product)(x) + res = [prod(x[indices, :], dims=1) for indices in p.list] + return vcat(res...) +end + + +struct Chain <: AbstractModel + models::Tuple{Vararg{AbstractModel}} + Chain(ms...) = new(ms) +end +get_iparam(::Chain) = Union{}[] +child(c::Chain) = c.models +applychain(::Tuple{}, x) = x +applychain(fs::Tuple, x) = applychain(tail(fs), first(fs)(x)) +(c::Chain)(x) = applychain(c.models, x) +chain(ms...) = Chain(ms...) + From 9da0fd48e17a5787b3dff6c33a600e547381315c Mon Sep 17 00:00:00 2001 From: hongbinren Date: Wed, 4 Mar 2020 11:39:58 +0800 Subject: [PATCH 28/42] fix bug --- src/Stheno.jl | 10 +++--- src/abstract_model.jl | 8 +++-- src/gp/kernel.jl | 67 ++++++------------------------------- src/neural_network/basic.jl | 21 +++++++----- 4 files changed, 33 insertions(+), 73 deletions(-) diff --git a/src/Stheno.jl b/src/Stheno.jl index c98acab4..9e8d573f 100644 --- a/src/Stheno.jl +++ b/src/Stheno.jl @@ -36,13 +36,16 @@ module Stheno include(joinpath("util", "proper_type_piracy.jl")) # Supertype for GPs. - include("abstract_gp.jl") include("abstract_model.jl") + include("abstract_gp.jl") + + # Neural network used in gp + include(joinpath("neural_network", "basic.jl")) # Atomic GP objects. include(joinpath("gp", "mean.jl")) include(joinpath("gp", "kernel.jl")) -# include(joinpath("gp", "neural_kernel_network.jl")) + include(joinpath("gp", "neural_kernel_network.jl")) include(joinpath("gp", "gp.jl")) # Composite GPs, constructed via affine transformation of CompositeGPs and GPs. @@ -57,9 +60,6 @@ module Stheno # include(joinpath("composite", "gradient.jl")) # include(joinpath("composite", "integrate.jl")) - # Neural network used in gp - include(joinpath("neural_network", "basic.jl")) - # Various stuff for convenience. include(joinpath("util", "model.jl")) include(joinpath("util", "plotting.jl")) diff --git a/src/abstract_model.jl b/src/abstract_model.jl index a2e27fc5..ff5ef489 100644 --- a/src/abstract_model.jl +++ b/src/abstract_model.jl @@ -43,8 +43,12 @@ function extract_gradient!(out, G::NamedTuple) append!(out, val) elseif val isa NamedTuple extract_gradient!(out, val) - elseif val isa Tuple && eltype(val) == NamedTuple - foreach(x->extract_gradient!(out, x), val) + elseif val isa Tuple + for each in val + if each isa NamedTuple + extract_gradient!(out, each) + end + end end end return out diff --git a/src/gp/kernel.jl b/src/gp/kernel.jl index 944d2606..fa7710c3 100644 --- a/src/gp/kernel.jl +++ b/src/gp/kernel.jl @@ -5,53 +5,6 @@ using LinearAlgebra: isposdef, checksquare abstract type Kernel <: AbstractModel end -# function get_iparam(::Kernel) end -# function child(::Kernel) end -# parameters(x::Kernel) = parameters!(parameter_eltype(x)[], x) -# parameters(x::Kernel) = parameters!(Params(), x) -# function parameters!(out, x::Kernel) -# append!(out, get_iparam(x)) -# # push!(out, get_iparam(x)) -# for x_child in child(x) -# parameters!(out, x_child) -# end -# return out -# end -# function parameter_eltype(x::Kernel) -# T = eltype(get_iparam(x)) -# for each in child(x) -# T = promote_type(T, parameter_eltype(each)) -# end -# return T -# end -# get_nparameter(x::Kernel) = length(parameters(x)) -# -# function dispatch!(k::Kernel, v::AV) -# nθ_k = get_nparameter(k) -# nθ_k == length(v) || throw(DimensionMismatch("expect $(nθ_k) parameters, got $(length(v))")) -# θ = get_iparam(k) -# copyto!(θ, 1, v, 1, length(θ)) -# loc = 1 + length(θ) -# for k′ in child(k) -# nθ_k′ = get_nparameter(k′) -# dispatch!(k′, v[loc:loc+nθ_k′-1]) -# loc += nθ_k′ -# end -# return k -# end -# extract_gradient(k::Kernel, G::NamedTuple) = extract_gradient!(parameter_eltype(k)[], G) -# function extract_gradient!(out, G::NamedTuple) -# for each in values(G) -# if each isa NamedTuple -# extract_gradient!(out, each) -# elseif each isa AV -# append!(out, each) -# end -# end -# return out -# end - - """ Definition of a particular kernel should contains: @@ -307,7 +260,7 @@ function RQ(α::Real) α > 0.0 || throw(ArgumentError("α should be positive")) RQ(typeof(α)[log(α)]) end -get_paramter(rq::RQ) = rq.logα +get_iparam(rq::RQ) = rq.logα child(::RQ) = () _rq(d, α) = (1 + d / (2α))^(-α) @@ -334,7 +287,7 @@ function Cosine(p::Real) p > 0.0 || throw(ArgumentError("p should be positive")) Cosine(typeof(p)[log(p)]) end -get_paramter(c::Cosine) = c.logp +get_iparam(c::Cosine) = c.logp child(::Cosine) = () # Binary methods. @@ -737,24 +690,24 @@ end # NOTE: `a` is not scalar any more !!! # Binary methods (scalar `a`, scalar-valued input) -ew(k::Stretched{<:Real}, x::AV{<:Real}, x′::AV{<:Real}) = ew(k.k, exp.(k.loga) .* x, exp.(k.loga) .* x′) -pw(k::Stretched{<:Real}, x::AV{<:Real}, x′::AV{<:Real}) = pw(k.k, exp.(k.loga) .* x, exp.(k.loga) .* x′) +ew(k::Stretched{<:Real}, x::AV{<:Real}, x′::AV{<:Real}) = ew(k.k, exp.(.-k.loga) .* x, exp.(.-k.loga) .* x′) +pw(k::Stretched{<:Real}, x::AV{<:Real}, x′::AV{<:Real}) = pw(k.k, exp.(.-k.loga) .* x, exp.(.-k.loga) .* x′) # Unary methods (scalar) -ew(k::Stretched{<:Real}, x::AV{<:Real}) = ew(k.k, exp.(k.loga) .* x) -pw(k::Stretched{<:Real}, x::AV{<:Real}) = pw(k.k, exp.(k.loga) .* x) +ew(k::Stretched{<:Real}, x::AV{<:Real}) = ew(k.k, exp.(.-k.loga) .* x) +pw(k::Stretched{<:Real}, x::AV{<:Real}) = pw(k.k, exp.(.-k.loga) .* x) # Binary methods (scalar and vector `a`, vector-valued input) function ew(k::Stretched{<:Real}, x::ColVecs, x′::ColVecs) - return ew(k.k, ColVecs(exp.(k.loga) .* x.X), ColVecs(exp.(k.loga) .* x′.X)) + return ew(k.k, ColVecs(exp.(.-k.loga) .* x.X), ColVecs(exp.(.-k.loga) .* x′.X)) end function pw(k::Stretched{<:Real}, x::ColVecs, x′::ColVecs) - return pw(k.k, ColVecs(exp.(k.loga) .* x.X), ColVecs(exp.(k.loga) .* x′.X)) + return pw(k.k, ColVecs(exp.(.-k.loga) .* x.X), ColVecs(exp.(.-k.loga) .* x′.X)) end # Unary methods (scalar and vector `a`, vector-valued input) -ew(k::Stretched{<:Real}, x::ColVecs) = ew(k.k, ColVecs(exp.(k.loga) .* x.X)) -pw(k::Stretched{<:Real}, x::ColVecs) = pw(k.k, ColVecs(exp.(k.loga) .* x.X)) +ew(k::Stretched{<:Real}, x::ColVecs) = ew(k.k, ColVecs(exp.(.-k.loga) .* x.X)) +pw(k::Stretched{<:Real}, x::ColVecs) = pw(k.k, ColVecs(exp.(.-k.loga) .* x.X)) # Binary methods (matrix `a`, vector-valued input) # function ew(k::Stretched{<:AM{<:Real}}, x::ColVecs, x′::ColVecs) diff --git a/src/neural_network/basic.jl b/src/neural_network/basic.jl index 70b364a3..dff826dd 100644 --- a/src/neural_network/basic.jl +++ b/src/neural_network/basic.jl @@ -1,4 +1,4 @@ -export LinearLayer, Product, chain +export LinearLayer, ProductLayer, chain using Base: tail @@ -17,15 +17,18 @@ function Base.show(io::IO, layer::LinearLayer) end -struct Product <: AbstractModel - list::Tuple{Vararg{AV{Int}}} - Product(Is...) = new(Is) +# when writing ProductLayer, we don't use `prod`, because broadcasting problem will +# results in gradient evaluation problem. +struct ProductLayer <: AbstractModel + step::Int end -get_iparam(::Product) = Union{}[] -child(::Product) = () -function (p::Product)(x) - res = [prod(x[indices, :], dims=1) for indices in p.list] - return vcat(res...) +get_iparam(::ProductLayer) = Union{}[] +child(::ProductLayer) = () +function (p::ProductLayer)(x) + m, n = size(x) + x1 = reshape(x, p.step, m÷p.step, n) + res = .*([x1[i, :, :] for i in 1:p.step]...) + return res end From 237d99b80e05a5c9e589c705c32b39e302d4f99a Mon Sep 17 00:00:00 2001 From: hongbinren Date: Thu, 5 Mar 2020 01:15:14 +0800 Subject: [PATCH 29/42] fix bug, pass tests --- src/abstract_model.jl | 26 ++++--- src/gp/kernel.jl | 155 +++++++++++++++++++++--------------------- src/gp/mean.jl | 5 +- test/gp/kernel.jl | 4 +- 4 files changed, 99 insertions(+), 91 deletions(-) diff --git a/src/abstract_model.jl b/src/abstract_model.jl index ff5ef489..5b5576ed 100644 --- a/src/abstract_model.jl +++ b/src/abstract_model.jl @@ -4,9 +4,21 @@ const AVM = AbstractVecOrMat abstract type AbstractModel end -function get_iparam(::AbstractModel) end -function child(::AbstractModel) end +get_iparam(m::AbstractModel) = throw(UndefVarError("get_iparam method not defined for $m")) +child(m::AbstractModel) = throw(UndefVarError("child method not defined for $m")) + +parameter_eltype(::Any) = Union{} +function parameter_eltype(x::AbstractModel) + T = eltype(get_iparam(x)) + for each in child(x) + T = promote_type(T, parameter_eltype(each)) + end + return T +end + + parameters(x::AbstractModel) = parameters!(parameter_eltype(x)[], x) +parameters!(out, ::Any) = out function parameters!(out, x::AbstractModel) append!(out, get_iparam(x)) for x_child in child(x) @@ -14,15 +26,11 @@ function parameters!(out, x::AbstractModel) end return out end -function parameter_eltype(x::AbstractModel) - T = eltype(get_iparam(x)) - for each in child(x) - T = promote_type(T, parameter_eltype(each)) - end - return T -end + + get_nparameter(x::AbstractModel) = length(parameters(x)) + function dispatch!(k::AbstractModel, v::AV) nθ_k = get_nparameter(k) nθ_k == length(v) || throw(DimensionMismatch("expect $(nθ_k) parameters, got $(length(v))")) diff --git a/src/gp/kernel.jl b/src/gp/kernel.jl index fa7710c3..91e18085 100644 --- a/src/gp/kernel.jl +++ b/src/gp/kernel.jl @@ -84,6 +84,7 @@ A rank 1 kernel that returns the same value `c` everywhere. struct ConstKernel{T, cT<:AV{T}} <: Kernel c::cT end +ConstKernel(c::Real) = ConstKernel(typeof(c)[c]) get_iparam(c::ConstKernel) = c.c child(::ConstKernel) = () @@ -131,24 +132,21 @@ The usual periodic kernel derived by mapping the input domain onto the unit circ For length scales etc see [`stretch`](@ref), for variance see [`*`](@ref). """ struct PerEQ{T, LT<:AV{T}} <: Kernel - logl::LT + l::LT end -function PerEQ(l::Real) - l > 0.0 || throw(ArgumentError("l should be positive")) - PerEQ(typeof(l)[log(l)]) -end -get_iparam(per::PerEQ) = per.logl +PerEQ(l::Real) = PerEQ(typeof(l)[l]) +get_iparam(per::PerEQ) = per.l child(::PerEQ) = () _pereq(d, l) = exp(-2.0*sin(π*d)^2 / l^2) # Binary methods. -ew(k::PerEQ, x::AV, x′::AV) = _pereq.(ew(Euclidean(), x, x′), exp(k.logl[1])) -pw(k::PerEQ, x::AV, x′::AV) = _pereq.(pw(Euclidean(), x, x′), exp(k.logl[1])) +ew(k::PerEQ, x::AV, x′::AV) = _pereq.(ew(Euclidean(), x, x′), k.l[1]) +pw(k::PerEQ, x::AV, x′::AV) = _pereq.(pw(Euclidean(), x, x′), k.l[1]) # Unary methods. -ew(k::PerEQ, x::AV) = _pereq.(ew(Euclidean(), x), exp(k.logl[1])) -pw(k::PerEQ, x::AV) = _pereq.(pw(Euclidean(), x), exp(k.logl[1])) +ew(k::PerEQ, x::AV) = _pereq.(ew(Euclidean(), x), k.l[1]) +pw(k::PerEQ, x::AV) = _pereq.(pw(Euclidean(), x), k.l[1]) @@ -254,24 +252,32 @@ The standardised Rational Quadratic, with kurtosis `α`. For length scales etc see [`stretch`](@ref), for variance see [`*`](@ref). """ struct RQ{T, Tα<:AV{T}} <: Kernel - logα::Tα -end -function RQ(α::Real) - α > 0.0 || throw(ArgumentError("α should be positive")) - RQ(typeof(α)[log(α)]) + α::Tα end -get_iparam(rq::RQ) = rq.logα +RQ(α::Real) = RQ(typeof(α)[α]) +get_iparam(rq::RQ) = rq.α child(::RQ) = () _rq(d, α) = (1 + d / (2α))^(-α) +# define gradient for _rq manually, since automatic backprop by Zygote +# will results in type unstable +@adjoint function _rq(d::dT, α::αT) where {dT<:Real, αT<:Real} + y = _rq(d, α) + y, function (ȳ) + T = promote_type(dT, αT) + x = 1 + d / (2α) + -0.5*ȳ*y/x, ȳ*y*(d / (x*(2α)) - log(x+eps(T))) + end +end + # Binary methods. -ew(k::RQ, x::AV, x′::AV) = _rq.(ew(SqEuclidean(), x, x′), exp(k.logα[1])) -pw(k::RQ, x::AV, x′::AV) = _rq.(pw(SqEuclidean(), x, x′), exp(k.logα[1])) +ew(k::RQ, x::AV, x′::AV) = _rq.(ew(SqEuclidean(), x, x′), k.α[1]) +pw(k::RQ, x::AV, x′::AV) = _rq.(pw(SqEuclidean(), x, x′), k.α[1]) # Unary methods. -ew(k::RQ, x::AV) = _rq.(ew(SqEuclidean(), x), exp(k.logα[1])) -pw(k::RQ, x::AV) = _rq.(pw(SqEuclidean(), x), exp(k.logα[1])) +ew(k::RQ, x::AV) = _rq.(ew(SqEuclidean(), x), k.α[1]) +pw(k::RQ, x::AV) = _rq.(pw(SqEuclidean(), x), k.α[1]) @@ -281,22 +287,19 @@ pw(k::RQ, x::AV) = _rq.(pw(SqEuclidean(), x), exp(k.logα[1])) Cosine Kernel with period parameter `p`. """ struct Cosine{T, Tp<:AV{T}} <: Kernel - logp::Tp -end -function Cosine(p::Real) - p > 0.0 || throw(ArgumentError("p should be positive")) - Cosine(typeof(p)[log(p)]) + p::Tp end -get_iparam(c::Cosine) = c.logp +Cosine(p::Real) = Cosine(typeof(p)[p]) +get_iparam(c::Cosine) = c.p child(::Cosine) = () # Binary methods. -ew(k::Cosine, x::AV{<:Real}, x′::AV{<:Real}) = cos.(pi.*ew(Euclidean(), x, x′) ./exp(k.logp[1])) -pw(k::Cosine, x::AV{<:Real}, x′::AV{<:Real}) = cos.(pi.*pw(Euclidean(), x, x′) ./exp(k.logp[1])) +ew(k::Cosine, x::AV{<:Real}, x′::AV{<:Real}) = cos.(pi.*ew(Euclidean(), x, x′) ./ k.p[1]) +pw(k::Cosine, x::AV{<:Real}, x′::AV{<:Real}) = cos.(pi.*pw(Euclidean(), x, x′) ./ k.p[1]) # Unary methods. ew(k::Cosine, x::AV{<:Real}) = 1 .+ ew(Euclidean(), x) -pw(k::Cosine, x::AV{<:Real}) = cos.(pi .* pw(Euclidean(), x) ./exp(k.logp[1])) +pw(k::Cosine, x::AV{<:Real}) = cos.(pi .* pw(Euclidean(), x) ./ k.p[1]) @@ -333,13 +336,10 @@ k(xl, xr) = (dot(xl, xr) + σ²)^p ``` """ struct Poly{p, T, Tσ²<:AV{T}} <: Kernel - logσ²::Tσ² + σ²::Tσ² end -function Poly(p::Int, σ²::Real) - σ²>0.0 || throw(ArgumentError("σ² should be positive")) - Poly{p, typeof(σ²), AV{typeof(σ²)}}(typeof(σ²)[σ²]) -end -get_iparam(p::Poly) = p.logσ² +Poly(p::Int, σ²::Real) = Poly{p, typeof(σ²), AV{typeof(σ²)}}(typeof(σ²)[σ²]) +get_iparam(p::Poly) = p.σ² child(::Poly) = () _poly(k, σ², p) = (σ² + k)^p @@ -352,12 +352,12 @@ Zygote.@adjoint function _poly(k, σ², p) end # Binary methods -ew(k::Poly{p}, x::AV, x′::AV) where {p} = _poly.(ew(Linear(), x, x′), exp(k.logσ²[1]), p) -pw(k::Poly{p}, x::AV, x′::AV) where {p} = _poly.(pw(Linear(), x, x′), exp(k.logσ²[1]), p) +ew(k::Poly{p}, x::AV, x′::AV) where {p} = _poly.(ew(Linear(), x, x′), k.σ²[1], p) +pw(k::Poly{p}, x::AV, x′::AV) where {p} = _poly.(pw(Linear(), x, x′), k.σ²[1], p) # Unary methods -ew(k::Poly{p}, x::AV) where {p} = _poly.(ew(Linear(), x), exp(k.logσ²), p) -pw(k::Poly{p}, x::AV) where {p} = _poly.(pw(Linear(), x), exp(k.logσ²), p) +ew(k::Poly{p}, x::AV) where {p} = _poly.(ew(Linear(), x), k.σ²[1], p) +pw(k::Poly{p}, x::AV) where {p} = _poly.(pw(Linear(), x), k.σ²[1], p) @@ -366,17 +366,20 @@ pw(k::Poly{p}, x::AV) where {p} = _poly.(pw(Linear(), x), exp(k.logσ²), p) The γ-Exponential kernel, 0 < γ ⩽ 2, is given by `k(xl, xr) = exp(-||xl - xr||^γ)`. """ -struct GammaExp{Tγ<:Real} <: Kernel +struct GammaExp{T, Tγ<:AV{T}} <: Kernel γ::Tγ end +GammaExp(γ::Real) = GammaExp(typeof(γ)[γ]) +get_iparam(g::GammaExp) = g.γ +child(::GammaExp) = () # Binary methods -ew(k::GammaExp, x::AV, x′::AV) = exp.(.-ew(Euclidean(), x, x′).^k.γ) -pw(k::GammaExp, x::AV, x′::AV) = exp.(.-pw(Euclidean(), x, x′).^k.γ) +ew(k::GammaExp, x::AV, x′::AV) = exp.(.-ew(Euclidean(), x, x′).^k.γ[1]) +pw(k::GammaExp, x::AV, x′::AV) = exp.(.-pw(Euclidean(), x, x′).^k.γ[1]) # Unary methods -ew(k::GammaExp, x::AV) = exp.(.-ew(Euclidean(), x).^k.γ) -pw(k::GammaExp, x::AV) = exp.(.-pw(Euclidean(), x).^k.γ) +ew(k::GammaExp, x::AV) = exp.(.-ew(Euclidean(), x).^k.γ[1]) +pw(k::GammaExp, x::AV) = exp.(.-pw(Euclidean(), x).^k.γ[1]) @@ -555,14 +558,11 @@ Scaled{Tσ²<:AV{<:Real}, Tk<:Kernel} <: Kernel Scale the variance of `Kernel` `k` by `σ²` s.t. `(σ² * k)(x, x′) = σ² * k(x, x′)`. """ struct Scaled{T, Tσ²<:AV{T}, Tk<:Kernel} <: Kernel - logσ²::Tσ² + σ²::Tσ² k::Tk end -function Scaled(σ²::Real, k::Kernel) - σ²>0.0 || throw(ArgumentError("σ² should be positive")) - Scaled(typeof(σ²)[log(σ²)], k) -end -get_iparam(s::Scaled) = s.logσ² +Scaled(σ²::Real, k::Kernel) = Scaled(typeof(σ²)[σ²], k) +get_iparam(s::Scaled) = s.σ² child(s::Scaled) = (s.k,) """ *(σ²::Real, k::Kernel) @@ -584,12 +584,12 @@ true *(k::Kernel, σ²) = σ² * k # Binary methods. -ew(k::Scaled, x::AV, x′::AV) = exp(k.logσ²[1]) .* ew(k.k, x, x′) -pw(k::Scaled, x::AV, x′::AV) = exp(k.logσ²[1]) .* pw(k.k, x, x′) +ew(k::Scaled, x::AV, x′::AV) = k.σ²[1] .* ew(k.k, x, x′) +pw(k::Scaled, x::AV, x′::AV) = k.σ²[1] .* pw(k.k, x, x′) # Unary methods. -ew(k::Scaled, x::AV) = exp(k.logσ²[1]) .* ew(k.k, x) -pw(k::Scaled, x::AV) = exp(k.logσ²[1]) .* pw(k.k, x) +ew(k::Scaled, x::AV) = k.σ²[1] .* ew(k.k, x) +pw(k::Scaled, x::AV) = k.σ²[1] .* pw(k.k, x) @@ -598,11 +598,11 @@ pw(k::Scaled, x::AV) = exp(k.logσ²[1]) .* pw(k.k, x) Apply a length scale to a kernel. Specifically, `k(x, x′) = k(a * x, a * x′)`. """ -struct Stretched{T, Ta<:Union{AV{T}, AM{T}}, Tk<:Kernel} <: Kernel - loga::Ta +struct Stretched{T, Ta<:AVM{T}, Tk<:Kernel} <: Kernel + a::Ta k::Tk end -get_iparam(s::Stretched) = s.loga +get_iparam(s::Stretched) = s.a child(s::Stretched) = (s.k,) """ stretch(k::Kernel, a::Union{Real, AbstractVecOrMat{<:Real}) @@ -683,43 +683,40 @@ K = pairwise(k, xs, ys) ``` """ stretch(k::Kernel, a::Real) = stretch(k, typeof(a)[a]) -function stretch(k::Kernel, a::AbstractVecOrMat{<:Real}) - all(a.>0.0) || throw(ArgumentError("all element of a should be positive")) - Stretched(log.(a), k) -end +stretch(k::Kernel, a::AVM{<:Real}) = Stretched(a, k) # NOTE: `a` is not scalar any more !!! # Binary methods (scalar `a`, scalar-valued input) -ew(k::Stretched{<:Real}, x::AV{<:Real}, x′::AV{<:Real}) = ew(k.k, exp.(.-k.loga) .* x, exp.(.-k.loga) .* x′) -pw(k::Stretched{<:Real}, x::AV{<:Real}, x′::AV{<:Real}) = pw(k.k, exp.(.-k.loga) .* x, exp.(.-k.loga) .* x′) +ew(k::Stretched{<:Real, <:AV{<:Real}}, x::AV{<:Real}, x′::AV{<:Real}) = ew(k.k, k.a .* x, k.a .* x′) +pw(k::Stretched{<:Real, <:AV{<:Real}}, x::AV{<:Real}, x′::AV{<:Real}) = pw(k.k, k.a .* x, k.a .* x′) # Unary methods (scalar) -ew(k::Stretched{<:Real}, x::AV{<:Real}) = ew(k.k, exp.(.-k.loga) .* x) -pw(k::Stretched{<:Real}, x::AV{<:Real}) = pw(k.k, exp.(.-k.loga) .* x) +ew(k::Stretched{<:Real, <:AV{<:Real}}, x::AV{<:Real}) = ew(k.k, k.a .* x) +pw(k::Stretched{<:Real, <:AV{<:Real}}, x::AV{<:Real}) = pw(k.k, k.a .* x) # Binary methods (scalar and vector `a`, vector-valued input) -function ew(k::Stretched{<:Real}, x::ColVecs, x′::ColVecs) - return ew(k.k, ColVecs(exp.(.-k.loga) .* x.X), ColVecs(exp.(.-k.loga) .* x′.X)) +function ew(k::Stretched{<:Real, <:AV{<:Real}}, x::ColVecs, x′::ColVecs) + return ew(k.k, ColVecs(k.a .* x.X), ColVecs(k.a .* x′.X)) end -function pw(k::Stretched{<:Real}, x::ColVecs, x′::ColVecs) - return pw(k.k, ColVecs(exp.(.-k.loga) .* x.X), ColVecs(exp.(.-k.loga) .* x′.X)) +function pw(k::Stretched{<:Real, <:AV{<:Real}}, x::ColVecs, x′::ColVecs) + return pw(k.k, ColVecs(k.a .* x.X), ColVecs(k.a .* x′.X)) end # Unary methods (scalar and vector `a`, vector-valued input) -ew(k::Stretched{<:Real}, x::ColVecs) = ew(k.k, ColVecs(exp.(.-k.loga) .* x.X)) -pw(k::Stretched{<:Real}, x::ColVecs) = pw(k.k, ColVecs(exp.(.-k.loga) .* x.X)) +ew(k::Stretched{<:Real, <:AV{<:Real}}, x::ColVecs) = ew(k.k, ColVecs(k.a .* x.X)) +pw(k::Stretched{<:Real, <:AV{<:Real}}, x::ColVecs) = pw(k.k, ColVecs(k.a .* x.X)) # Binary methods (matrix `a`, vector-valued input) -# function ew(k::Stretched{<:AM{<:Real}}, x::ColVecs, x′::ColVecs) -# return ew(k.k, ColVecs(exp.(-k.loga) * x.X), ColVecs(exp.(.-k.loga) * x′.X)) -# end -# function pw(k::Stretched{<:AM{<:Real}}, x::ColVecs, x′::ColVecs) -# return pw(k.k, ColVecs(exp.(-k.loga) * x.X), ColVecs(exp.(.-k.loga) * x′.X)) -# end +function ew(k::Stretched{<:Real, <:AM{<:Real}}, x::ColVecs, x′::ColVecs) + return ew(k.k, ColVecs(k.a * x.X), ColVecs(k.a * x′.X)) +end +function pw(k::Stretched{<:Real, <:AM{<:Real}}, x::ColVecs, x′::ColVecs) + return pw(k.k, ColVecs(k.a * x.X), ColVecs(k.a * x′.X)) +end # Unary methods (scalar and vector `a`, vector-valued input) -# ew(k::Stretched{<:AM{<:Real}}, x::ColVecs) = ew(k.k, ColVecs(exp.(.-k.loga) * x.X)) -# pw(k::Stretched{<:AM{<:Real}}, x::ColVecs) = pw(k.k, ColVecs(exp.(.-k.loga) * x.X)) +ew(k::Stretched{<:Real, <:AM{<:Real}}, x::ColVecs) = ew(k.k, ColVecs(k.a * x.X)) +pw(k::Stretched{<:Real, <:AM{<:Real}}, x::ColVecs) = pw(k.k, ColVecs(k.a * x.X)) diff --git a/src/gp/mean.jl b/src/gp/mean.jl index 47c453ff..5651c0bb 100644 --- a/src/gp/mean.jl +++ b/src/gp/mean.jl @@ -39,9 +39,10 @@ Returns `c` everywhere. struct ConstMean{T, cT<:AV{T}} <: MeanFunction c::cT end +ConstMean(c::Real) = ConstMean(typeof(c)[c]) get_iparam(c::ConstMean) = c.c child(::ConstMean) = () -ew(m::ConstMean, x::AV) = fill(m.c, length(x)) +ew(m::ConstMean, x::AV) = fill(m.c[1], length(x)) @@ -53,4 +54,6 @@ A wrapper around whatever unary function you fancy. struct CustomMean{Tf} <: MeanFunction f::Tf end +get_iparam(::CustomMean) = Union{}[] +child(c::CustomMean) = (c.f,) ew(f::CustomMean, x::AV) = map(f.f, x) diff --git a/test/gp/kernel.jl b/test/gp/kernel.jl index 906c8d69..7f334d3d 100644 --- a/test/gp/kernel.jl +++ b/test/gp/kernel.jl @@ -42,7 +42,7 @@ using LinearAlgebra end @timedtestset "PerEQ" begin - differentiable_kernel_tests(PerEQ(), ȳ, Ȳ, Ȳ_sq, x0, x1, x2; atol=1e-6) + differentiable_kernel_tests(PerEQ(1.0), ȳ, Ȳ, Ȳ_sq, x0, x1, x2; atol=1e-6) end @timedtestset "Exp" begin @@ -73,7 +73,7 @@ using LinearAlgebra differentiable_kernel_tests(RQ(100.0), ȳ, Ȳ, Ȳ_sq, x0, x1, x2) differentiable_kernel_tests(RQ(100.0), ȳ, Ȳ, Ȳ_sq, X0, X1, X2) end - @timedtestset "single-input" begin + @timedtestset "single-input" begin # Zygote return Complex{Float64}, should be Float64 adjoint_test((α, x, x′)->ew(RQ(α), x, x′), ȳ, 1.5, x0, x1) adjoint_test((α, x, x′)->pw(RQ(α), x, x′), Ȳ, 1.5, x0, x2) adjoint_test((α, x)->ew(RQ(α), x), ȳ, 1.5, x0) From 4a98ba8dc9f76f5fbb226ad48b169df5cd9ca0bf Mon Sep 17 00:00:00 2001 From: hongbinren Date: Thu, 5 Mar 2020 10:51:29 +0800 Subject: [PATCH 30/42] add kernel parameter constraint, redefine a interface for Scaled, update example --- .../neural_kernel_network/predict.png | Bin 0 -> 56190 bytes .../neural_kernel_network/time_series.jl | 142 ++++++++++-------- src/gp/kernel.jl | 121 ++++++++------- test/gp/kernel.jl | 6 +- 4 files changed, 153 insertions(+), 116 deletions(-) create mode 100644 examples/flux_integration/neural_kernel_network/predict.png diff --git a/examples/flux_integration/neural_kernel_network/predict.png b/examples/flux_integration/neural_kernel_network/predict.png new file mode 100644 index 0000000000000000000000000000000000000000..3304863c2872032a3f90eb9f7d8a06e16b1978c0 GIT binary patch literal 56190 zcmZ6z1yq(>7cKmP3Wy?z2ucb_BS<$03P?+rq@B z<6voS_`u27&feU{nuCRn`Ym|t-mKip&p>5o{6fsB(6<3 zyQusbY@C_OcpoPA;PzFq=3m#Z&!@i<3#WRHDSFlCf!l5Ia26^?;wRfHs#Txgn4|j; zh&MiGrIP;m5f2aRk@pjH^Nfn~bGELq_DIGS8$HJ#_{zd0-` zaT4Rm#IQVCEf}xpIw!q`iJ9JKCY`UNr>FPU!$W56L`?8GTr17bj5D6<{PwIT5k9_; zMEcBk{~N{=cPTgAos1{YD9P0}>rV^%$`2>qah;u=Ctd#HL@cU&)ryLsQ<&Y@(Cwda z*WX_rcpv>pTPagIs$`?%is|vrd`4zw)bPIZ#<-^85aFySHCi+tjnP6^oW$6q`=*ef zprGu=m7$mlviEZ!bJ;MoRph)<t|+Wve_@lt_*$+jEK0CoSf{iIhi!&dD^`+ z3^z#YGaD{7#kCkJ$drnpg;iJ1l)kI$b?(?dFd%oJ_u(`+NdNja%5|e^k=JSMo{;;& zLm{E0C?@s(bV*##({&zQ&tqeDcJ`&I1`%6Z+kDN^cPu*9Z!XSU$H&L_hIri%n46lK zURK%p_aq4n)HvH1zIvtE;3?E_wsS+kc_TwSkdW{+JUUt;f!nI`%?x^yfOD2Yim;Kf zvF*kU^!Ie;xi;jMpo9&T50d>;$o}+4I+&yI~tM8GY(8# z@(h#ic+|6J&wf=qT3A|HE&s_Q;(Go20UKNRmoN17#|tD`vT=s{9QPmVqrA@diLPMW zd>|l@@Xca~-EQuAfnGz;?;PdekjsnH&5?2|5+S!e{jONH@oGnz2Ad|IYe>{#-(}oF zZaqKWGV|m8@zxLAkveyt8t2WQO--U+XG?wQNN}{?Q4nm(E3B86kibasJjrQqZ21Ix zvc26(@ppMyYICw~``}=qKT~FHxY!qBpufoADUzqKNYX||4`E$?{P=O@^RJA9t}a=A zr?s}thD-SfS~-`$!8{*=gN?i6xh%)ZgSjjQe^uD%rSI0CtT1_A9GklBuZX{S^Vq<^ z;4LiWc&%#$Y%k&Rg&+MLX)MLpDAjZcoYK?Y6p=y{*W05vzU!i*qS!C2M#~f#7#L30 zr#udm&?Dc!e^2~a-@BuyXK*ut!!!i_cXo>B>o;EqziKlH2nc+$8sqj|&z4V8Dkv!E zn3`(%bvj&T9)bQl!KU84jFpd1)7ZphMMNQiTMq3t)DK2h*52t#*cb*(Oiai5j=Sdv zuyRaKRP)s2bZTAtmQG}%nH9aYN{l{wYOAS5@9giZ!j&1RsJ>4hou3{uX=`i$fe1Y> z-~QXm^yK{LZ>YpjgY935)WgExNv28-o*q$&iTP8W`zxxd5b?&QrrABoLKeSmf+f<+ zZKkXp>R|JcPl8R=6iV1#>`ge6fBpN5!QSpUL^djlBoAT`{q zb$j@&A!Ma)xxXNF%o6hQ@;oDHSXo0aFAf`yj*r=1{k-YB%%HBWZqgMis-{Lr!fpAu zx~KW$ov%IjM{8XyYnx$PUS4t|wTj>$UFD>f9*8~yEix?W-H z==jI0;AIv2-_}qleD7h$?vqVH;`2MM7?pWI+nJl2+e*1S!&FdE@b!TB{NC9a_$N<& zwc?RpJ*oG74wHLlvPkE#{rgjM;ZSXfvilAAdSVv!heR0OkV{gt+_e0$5w z%U#^vBjV#<69g?ymeknI2Y8M_zT9eryq2XMy*xjhI`QJR9R6h~lI*s^z^Gmz3kCT0 z?b}H8EVmk)-Q2X7KGO>G`#ydbSx}ApC``Zd2Fo9&wU&Y_YbGM5^4*f zyoSfbv@Q>1n-AvShxlA9?G;XU-kiKi${PlW5)W6LRK<6xaa!-P4*c-p@u=xWy~80M zAz@Z;UT5#*Wg|I`JY@>j!_Cy(k|%h zA3uDU_9xPPJUo;kbtnC=}U!Y{hW^0<&nJL}H{{dwP22v>mJa^4HcZ&&!XF zTu|kfBj2IDca666QSR5d+oeBQ#<3Q9W(=G^`G3~VS-D)ch=3nUV6d8-;BLTWhL8>{?f}~ zOzu3?7Y~;ofSQZn(&Q~cmo5Eboq@r@=*Gq;v~uwoxcLtAM1W1+CnS(ahLEz|lVY+y zw9|vVxQdC%VKXTRWnIF}t-76o-~M28%En1sU;p!;p}q-U>)fKp)h{_@*0s}JcNdII zOiCbn#6rJ)`*tm#LKMJ4Iuy%il9D8{s;f(*%pHdhn|;O5nJLLByAyb_q08p$*2P2V zzqj7o*;%?mv<`JqZZNt(`1#VOR!FR)p@xeVKs0fW4Sd=ktOAOnmHa0lO}$X}=;&w* zR;&boO8PmJjpKW?qJdR9+Xy*%!M-20u_PkYPAk8ig;bzAvVDSv%H1o-=-yV;quGUV#E} zFGzU#(lA3TkV6ev4etj6J4ZDthi4La^$9(>}swoi@flrle4K zoa_}>ufuL29sFc}HM`LFE(u8|6!$NMHO9g3a7fs_F5X`37kVjX%abL$F5FLkyCdb1 zb_8#H{rYtSKo#EHqHmi&+-BA+xeEOT5aUx56IzSG{8Z!4Xv@jkkzA(~_%T}9*hadP zw_=c&O9R=sP(~0S1`vzg;QQ6{et9x}hiPbNGxITeCXL=`m6es#fB(K>3rplQ$JZ{m zG-wI9n<(h|DTGX5Wwi{NVd>jrJKu`O{w`*bVhmW6%~s>D$e5L7|B!b~>ArXo)^`A9 zgv;Z^;Sapm{MuT6M+|Erq;qDn*M;jhgCFn2Hvd9Dc|Bffn+wG~*RUm^qpwf#PrjzI zzn|ZiJ9P5*$WkIA(g|5!eic~FFj zs&1)4gN=t+&(->tYxm~O3mjt37lDM27?m1!1ss;bh&j!|+|IUJqw4DFa#eCZz3$KW z7|(7T!0)<~(NxUG$Hxe00C{jeUfwUTlewY7C(p$tC5es~&47jEdYm3~OibwT@$)kR zKId{+df|9>Wb6BPp(iO1S&_B%^0O(`2r%8by%4~Dcrz7P`0 z7J-sMIAmI^)UE%0a{K@PJ?`}A`DJCC-+%m&l96FrE%=XglA;Yxd0omu+1^);r*O(r zOC~yGb=Acs{6AmOlFP0_1ZaclKS%x5TFyn2hkV8V&)2-7o9_RZYN^uoBK$jduHU@* z`!WZx2b6*4*48^DBr?z)a#ZsOVP7~kwkP|A|9$e)C>?Xo*_j#9XV3gDb|GND!^>^& z?acwO&wzmT^Y^zs+A=sf-0U4GH7zbJebo`kU~>2`D;_WGc&MaVVsxFt^N11!eRlrr zco%gxb$Lz^Ds*tK;o^h|_y^(PU2$kM0AXCr7Ca+9eR|{U%(`lg#U!-S+xhb+1}iHo z?6OaAun`RwB{@zexjq@Rz62f{gyjJGlhV~C*VWZMIj^s;Z*FeZ7&n!dmd0*xZ$IgU zO0YIj69YU1DvTmK&qKc=X+YC~M66j*kr9UNM{f%kgy6k=HC$wHVlEsk@v2DVvO_5P zAP%2^z-Y@A5`y_WfXwR`}>vLV&rd9HYQU6yQo62Er& zlwrdLxlw4xF5@56>gi@fHefEOp6U%&P&D4||55Ks4bJJ+Td(UATYd)rgA>}g~yOOkTCwfzE_yk@_#8%XpK7a7%CAs4lRV~idAj&ly4nA zsS)$dhei&>Q67Nnd`HwPfSCYPQKN!Uv9b3&KJx-^nS8q|hthSOzk7?6*E=SLh=j+w zd18Vrlb}~{_oklbF%2MTqxSLG`|by82H*Xzlk(a5fzGf6`<4KNNK4QZc!?|!6pFo` zST;}xh_}Y=d!r;`0-2eZ0_#0aA>y9`fZIAeq!SX-xs=_}u6AI67&4itb~OGqFk4|h zKnqR1x0S-{<)F@OF|5*CKN)M2KI{6QGPS|dS^%h*7$XNA0nnhyj*HCc#?RoJ&wKra z>xpX<1J+AW&Bl#CZ;bY_rR*$($~=FLU_5>Jq2GDxp|G{=Ms0W@6e0y83nG`txWGdn z^KL>PPZaiuf7KeY4a}p|um$^P2w5knCQnZf)&prOJKEc;R#D}er6&Fu>Zr}SL-ISc zvcE7j0198|d{fuDINMbK@k2pHMFr%hg7Hp{^^sCKPyv`1$gfjA+YXR>uU9yKdshFc zYRoBJo7=;wxC1*M#(B<`lh#r1br3@UW-Ax!3Oz}P_1c~lhkElU+0@2H0ooO)8NkVB zr>7es?2b2THt&ge*0$LD`ue&8W5v34D@*OGmTV%g;^p@zkWn@b=iFohP8mSuQD-}y ztYubX(FhAp6Tbn8lie`5w>+@DyGsqA4j|8J#pX!qd&t0L5kzmfJZ-q7e(=B>aM;Ds zUtA;%P+Aq%a{BrYii?YZKx{z|`~)%tw7e>(_4o^7L=L-{>`CN%dblyMKa?oqRp0L3 z-O*t<+x+q9^z;rM-V-QhXzM#SVZ}Z}bK|re4ja;S56;hLMu~}u-2?cO#P65}O3&W< zSPXnBpUB%3M){Q2>p~FHnOUzs5%d?N6MJa0Hk`2o@yLR<0ZoSj!{ZpVF*8U=xp>Z* zIMd`Apgi7A1e?c;DT@GZGCj{v9F~6l1MT+Q)qbI?@2~;NWjd?pJYI49HJ7_Z_q`-K8T@$TmR zG(F|~qSM=AA3ixyKMha9%uCyS-d1+T{?gp-;g$Rnl{~dav$L~DK$OsiD{ZMk3jk?S z)GLVPi3=hYId71n99Mp)!Ygr__xsr|_H2K@!MgR6#AXZLP@7B4cDDICFl~8|E2s zo*00j$pEX7E&~X!&F9(;S&Yoo)YL>F_h_gAc=zwO1FSzITUCA$bn2p0C%C41biG^---16)HS?cJ;?Q91b%Js7NP$Umt0gzk!>a z*8?rKJ=rsdyFGVt;e}T-0afSZ-0N%`6Dl$SlOSh6*+7C=L+VBprAQ; z)SsY1UctPz1(HEApmNBmI4%pqr|)jEoz}iGG#vhBLByn9Q0B6MP(c8|aIv-VssYQ- zm%F_prAF=ePyri@U$wrmwKaes@ShCD6N}6c%&c@3DCFnDRFhZ?54ZQas;Puu7g<$P zQ|fu{1`^nJV1Q=8`+-ljPEL~l3gJYw%}fG4kNvw)Y^LEZsIGXfK&T~XTYp1EX8->6 zEi7b3R+x`34xT=~+n8ARE$_j>!SLAFE(Al-tJYf>w@9%Xp<(T>zxVgQO-9zOtRQ6k z<|0!j2H|AALWiXDofeBv^xkfZdwA`P)q4makg6E5W9 zp-$PZ4YPS%UIJ!cw%=k@`GPV_afZl#q*eAykD7%g1oA&(UQ#0HUMj1-;qv%IjWY*y zwzajjB7ien(@nJ5JI-svNPXDa+Cok1U7o}=?LKxE5yMjagwH|gCFn?fg(d3=S!l^> zM95VQWx2q_wNobrt*u#rqZ+h- zqNzQa4LosTQq46g8dp+e$P8M^oxfb0b1)4V0&bg_NSaq*ZaxL|cmMFk3&k;I>FwD- zR;{E`wXL{#fuY}rh6U%d7Z-kbv$`#DU$DbsGrdcCTRt&-XMY3o+xe8p(exF5hb3>I zMu4neHhAhZ*Z_zYJQ#k3(R8Ok6SdNypPmeH&W|IvBIL&-I!BmKt8V4|FB9f|pVtvh%uZJwQW4Lj*gBE*4_giakq-+SF4CYgLT3tw zu4@Z$OLt3L<;Kn$P2kOt|nuoMzQPytAwjHE+7hsY%6dTn$d zegz=^qpTGz+IVtPdPic_{$1hB2ZDl$fL84GmK1#x6Me=uWB4^PAMx_WUcWPe@Rjj8{({_tX5oVIbXfq9ca2{h%@`v+Xe(byT#y7jug8!7DRl0*NPXWX`?f z+4kepf4mYmk%Ta}d#!Th*(>6I2^=%TX8 zg7@jGP!>CL?Fb#Jt*v#hE{DbGH)goRe$Xd|{?301cR?p*0Q-#Q-kZZo;HMjXJJE`w#_gj%dgLb5Njj zz60qB4-e0~p+RIITVaSo#JGb1#AkWHwb1;i*w}9W!mx*j<_A_s1Nax3^Y*Ak;qobP zU>_KZ@>q{g4;R0JeF*`5@&$}V#GXDy8-^@{d7L2>uB#XrB>-j8!^>gZIK8>4kS-Ct zG+8I0Uhoo4A1a{3a=(9*FXmtPmr|2%Kz)fVr+_&qK}pR{cEk2Y(x$oj8Tyqg`&PO& zPQic!`fFY7AyJxPGKN~`cL#2g2|60kwO^pwLCDJL{umoepi6e~0US8Fze|AZn>7LZ zwb=M8Bd*3IMpE8Uy%*=FRd0^j2OeJ-T6*V5+g|k30JriQvhsWXUKyZi`n5!Dn8w0% z=3`*sUswS{n1+xFxkVxqwX@ycD+u84hq_2bP0a^`O;CR^rvbHOy*=I`<#+h-;R6<= zdSlII{Y{HS_WyYS@){1{QXK#;KLi9&J$&c~+TIe8{{?U>MCprSRQ(P+2Bid#oV*8t ztu;=Mpobz5KGn20SvHQ{A1<73tO7g@8LeEuaRYg_G{@D!AaWtChD(@X=)W%N!9efG935E_kP3K* zhG16c>2H8Jx#D=k7)}OBR@{5acgJrqt_m_s+4U}hUWVf2ENw9O=kG?e#H*_6d}zTS zm!Ocb1Ux>p$xEatvf0i)Q&LvGj*XoLDy+BD2@4=PcxJRw(n64Hl)UO-Tb1f-%inq+ zwQ;ycd8zXzlLH_GGK+=W>+9=tSRKUqwICz}Am$8O&G5)bA52}@*|GI16&Z~2I0oH^Q!pv?a!i5TuqAId`Eb)^~F>&Kmur2pnCrV!Pd+Ra4d8oV*t0ZN&HbjrwmL@ zPb7t?sjrwWvY+w10L3p{lglRxK$SL&WER%0`A`8_{h!?JaEYf)Ws8CjRMKZYuKa;d zCv++lAvF-YfkY^|oR$_b3KTI!51(DHn8GM?)v8vEf^Aj&_F5I>umTf&AG z7M-=dB8W3%Z_q3@mjI_P>PD_kIkAo4Rp&wen{h(mBPEAq3fElL#uB%@w?g735 z@rUy9<1k?VAuu6Hk2HiZ@aEQg4Wbe+FYo6_geIVnJVqu3gpahAeB%M7415;760t78 z&(E&kh+61P>4nes$FdbtdH`r3tQ2TC2DKrq6Acs%c++G+>EJ}jo*IImgp#{FUd4d| z8Khz*___Vz0ElRYhK3(PLt6(1?!cTB=(rD*cyOyAsuZYE?4D;X;3+{{r=I|_+6Isk zC=3-KP6!@E*Ql_b0NV{u^*f~iMDDj5<;+V-f^Jk26?tuNt}w>2STVStrbDn1$fNma z83Qm{#Q))M&DsSW(Zyn%m*r)B{?x&<{gt;nBc`BWRA`*C$R-Flvp_n0yh9hQK>~(> z{Fhaqpf5yfkj#eEoF5jKEu*_{>xV7w>obHbI8>IRKl`wCey~sSGt*WKb&kDpJoZ!}Z+T_&cn@JsxY) znB6GzFKSp&JK}}a)8uc=U-tIXKF>%t1P^-2uN}54kkO+VdVj_kUnB`9He?zQDszP6b zf^NohTfHtftV1?V^>-p;dsDRnpksFHabDR!x-d}%oecKAF53wNrw<_^wCwDkz^_$; zkldam3)THycf~hxv1#=8^r2A&!E+KyR>;CdiQ&62uyKp9YYLH!20?~sGz;88pdrjm zU0yi%@iW2vK;4{h1E8}P@%TtP6#X++0kOd0h2x5$W+(=IASDF8}( zF|0<7H8nqfJfO0FpsNuR5fPyvS>t$Y$SK}#p`n1Z1>IxwK}5TJO%7G=`(tfRU<0559JDfa^VY2esFYa%SrFU<#JAdE zSqvV@?AK>BSi8Q$?n*G^!Jq>7vtEE1bTkCjK!yXwOKS)PoCLB^T8(EtM3S`p#qFGk z8txLOe$Fy@e}maJJ9=KQ9w@>L{WI1u|fM>grGw_PVg|39)~*T z-{8m9p@&JrdjdZ|d|Ua<>&<)MS5SbiiMZ*YW+6@^m|KRB3w4Zsv*5Fv!$Q;-kbjL} zo8WSfU;n1}t)~$2OoWnvPe}Mv&+A+qOdkVCA42%fyY?{T3IHd zRqhfoYk0qZj|o5kqv-{t08|q|E_cAU1H82*oLZ6>CcW0y)?FzgQ*#^uw`O4)^bi6Z zG*-EWHtr_?wh(ZmVW|a^$aBg|s$R@`*OY87Vwm!8H?m6iNu|#;@=z>=wkyBM!;;!= zk%EYYwx*Qd8kepeHbM$4)#L6lAQ$MocQ1|?DG;Lvl=S8?n}%+X;1MCFY`__;a*sf- zMSw7v8qVp%H;7?9G0WOdf=PHx0FI*&lyjR}0)?O)#3gumm<_M^veqRW0FT(FDYWjr zr9R(W)jagaCSCV{n`Wk{0myj-HZxS&KXWkSLHoZT$1ZgpXfgqqC7uXWA0fjBD3b$U z6sf_UVOYKil4@V5-~48%JanW-Ax6;o=t({v=T^4r2+FBv{yLZVgPMq>+L&eVwg1#f z-=_mF4O=G(9k~nR3Dj=Dd;@?BYpl~@c#6yhU_wD`Df0gSkL4I*qXfATy7zk|pvZ;X zP@~g^7iY(?;{ZOJV6Tq=1ne!Y_%J2Gix)x6r=_QV3<=57uCVsz-drwjCI1W95!o&v z-+<|{>*z8go1E3QPVMdN`Y$I22h~zIj5j=CvM%Z7=9ap?&`<~{uo$2&i21D$ysv7uwYAM|T!$;ru^ zB-}wzg?B)XzB2l7LqX(R#j6emrDX>FboEQ>ZxTpmIxdX%#xD8iZ@r@Lerw57Lp8)+axRHX6@FmQF0hGJ0e)h?<;!B zFj~v`W4boO5*h<5^k{X8-*f8lJdoGw%#KCTta|dGB5gxF!gvx1K}3WBMs^LEgVdjH z2*Bi80&GorYF|NrPy!T|20?L?nDfK68-%FGz$JmVnfIk#L;eLqnlSqSoVJJ&62_Z@ z3b^R9DZ)wME!pLSSK4jbcfP&B4wNL#^m(X{_zGny|DF(l2gqm!@hR&*GG9sEK85 zySx}*cbI6HonbRIJCEIVmD(Xp#dGEbPziHdC98qovEK{i(o(CAgT}Uq2{Pq z^JP|E#rtVY&&dD%Z?>oRwcgte3A`{8gE1CspFV-5s*zd*DRSS5mv_7VZC7dcVG@?q8YG;oZP%bY~5f{ zfN{)Nxz&|2e>$KionRFP;W=@{1D-(f4^Z^yg}QY|Ky`ZxSF9wE%;rMG~1R_U*FtXKQ5jOO(R%&7sjjXe;4YdK)nM< zAq6=yT0R(=BY+oSFAXu;m4!%6OcnDl*?3pW zWdsG&4VT%Ebd@gKR57f2v~+ZsU_g#k0u$PG3JMD7IHpc1e1Jn}9zTA8EaP@+Qb^c$ zxBqK$0kccX;_LYD?R>pv?&U%*ZiEWO>ou;rj2yV8ray{Uw@t|Jk~aMjuW_<`G9-G{ zfvmaqmFtnZpLgqKP%M)qPI7XlfR8TYSK!bep$)*Ox&ieD#AsxW4*mc{9zie#YIUfv zb!dnP{6Vl!-5VU)AU#!9RV`l%<~Kz_)+*u~4#?RkbTE$1+sp2+O1yJ&D|NH7I@ zL9tMQ1pulskhQ)I#v{ewzRj-|He^8`LwqD*VR#5n2K$soxn{d2J^q&{#5wt!3KZjSKpJm0PB%?qm`|xEQJ{ zR9|$YPReB_db|A029I!V<}+ngW2A{D0ZG(Jj+5(#0v?Flek3kS~7iA_6Xn1 z#|0j$qplkN5jLO7n?W5MbLwi;8d+DE{S$Y;V-l}N)!I3-$5%~b+rba`=U?X&UW@@B>LX9_u$@#*tf8Cx%XiDmq9}I~92O^I zy zrMeDG9F))EiuN#4<;@lD=++vKXD7sszGPDdWjmIU+f8pyAI1(dv$Hz_h+K)1S3BU_e5e}(AJGgw`7t{983#kNXw zH&;#PA<>sh;=k*ir(S3W)6Mkr|5h3o(_#vbxf|7~;!{(AJ82@#x#Tj(^k+Jnb&R2k zU`Ux$Ks+`k#&lZ&!29wZ$Ww4j#>tMxoHA*yj79p{uG&AbVb|}HG8ZzKl@qElM86JW z$ANWhrtTRNRyEa)MCnK?d&}4j&?&5ggDl+W)?)qAHREoB}8RxVKyn5 zzkIPYdQg0_8U|79Gp{hMCubpPSL?2y00Vxreq^JC&!v7$e>HxqJU;AD@%c?mtt=tVNx zXP`7M>)gII3;dhEpNttBJKB|-|Dy=vDB{lvN{OMuI))xq;iuX zeuzFbB*eJ#4AlpY-o!2p4H(WzB=uK$p7R6iVJ((NRFpIag06<@Pq|pZFCq)I0E|nd zSQa2)JXcsTyZK}XR(Q@Ssaf`m{yY`O#;vft2bqm~i&aL(?DY&AXUwH&YA;^k!9;~x z;Iu~|m^c^%Tm)X{T+jiT2Sfgi_&vb+i1M)dG`{hkI(|gNZ~*}{D&cGV7MyD@0(xFe zGK6~?&CyfYXx*~IFDwv^LJyPN(kug1oAYp3k33kh>l2Na9&{v zj`jcr0;_QV=>1@vxetI*HltOsXB#I<%6A!`?{U1tp!Yw}LSq*lRG53l}sc9{RNrH`b=ON4ND zE5&8@I`~D@)`Ec-;-#%YnO3qkMURty?Ipo*@KU5@vpC>g@~9B@dYN{M*x(iBfIRcu zU)Qo7JNGmae6LMg7v}jJ^R75DsD{Voy(#;HXR92ph#okgq1t{9APF(nfU|_a3m54M zlHOHBtsP1iXH12}SrMb1>UrsfBALpM3zv1pX$USRmQM7oZV{T@nUH%F8j(HyvSdCj z>#)PndmkXZGMp)b6ETSU5Ewuz96^G{RrHLX8ZIoDua>CzQl_TCC;tphEg{CFNQWVr zCG5AZ^O8Wuyje_6AHxP})$cBvnjvju=>UuGt4HdX>*&2%iE?U*@@c=qOKXg|8?kY4 z=A+bgDZvf~j+IiN9)pE?3LqG~`WZyY$ysLP_7J(35ex(uZ5Rm8lO$cP&z+8cJ)c8p`KKk~^@&W_e-MHcUU%f7>Uq;`mp z9MNuNbk@v$io6mBX_cl%iGd8B$$fW@+>NCA<>+XsHlNlG^NPERSkYVqzMpZnEUCFm zmg+PimxI-VN6TEzG4}cQ*xW;EuE5$v?5tbfl&r@u4d8jf+_p9#Coi7`6C2t?M>wI` zz>!ZN4-h`|`o#=8OG&M^M7XcD$`{FC^2?A0JFKW>SuQRPGv|N;j%cKaJ zmT}KQvFlF$Yf^ZgU8R`+$%wmg1vE2o(m$@U0jUU#XsONo$RJoywt$GGPCW(AiDkAZQQrJ|BUR8 zTPrZccm80M!cg{`nVC6h2qoqUfO$*Y@URA05rRR`1`C7bcx5tSC@Z| zS(}VsM|FrfC9`Ag;Fpvs$6k{xdZO*l=p~esF=lb5SxWnvW>6lN)>w3sYrul~g4mu| z-F*Np&+}?t{*`=?{qDcjlx9GTRwc%rd!2Jj2v;5-$EPQSED{rZ7OB*1at{-KJ8tXQ z{XI!K$~(S}mxeZ&|4h=ZXsf>Q_o)iM+t{3LB7|DeqK|%TDS^Ex<3ZmAgFaR4VXy#d&&pMHE#1m;|XnVM;g_*{1R(vL&CX(*q(!Hy?vS$k-h8e z_<9NnqB!a}WE4Y1=K;2j3nsYjw1Dh5eNjyG^j}m+h~Q4-P5^F11;jO?cNSI9nvLme zli2vy)teLrUOO^rpPG=HUCDoa`0JwBWp%d_pZ)H}qtjoR3x}jf3s(+w1T*zWY=`Yf zE3n2?%4d{blY0?8kLOXhEX&Y*$tX$p=uvRc&A%yE6%=|6H^X3qwjVPb0Xk|zWtX)O z9epEDxb>$qL0GNgLQiZV3B|NcF5$luIa?V8vHhs;MU$v@9+Q_Q29NuX@999QV^eZ( zqBE*v9?|U=RJ$rLN1MAqQvV;t#KqP1{y9>*{fw^xN|S112B5s7aC?EVC0KULiJR>4 zGWJ_S^_+L5?Mp*X$|S_1muc*3vG<1B<)5k&{jk~~gq3+%O05AFHGLp(K@e@gZ~soe zcvr}elx19cduk0n=>%<+R4j|tyD{T}E%Uzx&I5XX74(`iHjy>2lL9KCp)RcG2-`-Lt;(|& z2_c#78gAG>m|((TB$-0Gf5V5Cd!mRyOq541(uN+Vul*YS_`qEjr)fhQPrq!Rr`xi( zlb`=0)cO`((2Y$?+g~gDd(F=IaqasOM()?#jq!Z;v>?!Ov)DS2rh`?&J5usXjCe;Hx;4##h9 z-@pG6I4fwTFs#0}He|+m24}!%CPiUMXGM)!E~OVu?qAiYC?OcazUpba{9>Sc$Mr7h zw<_7VD(TAwT_v@de>6R%G~aBi^`YnAV8CXj1GLM_t~yDcxGpPud-7J1{KjY-pd`AD zS@X3z3wx7c0nU}Nisx2SH()yfSiAz?E)eFhgoI9TE#AC%NJkf=MduH77gN!ITEm!4 z+~pv8W>GNnL$6N&?M~cI-iwW-PUf@VkY? z-&_UvbWRRe)n9us@(RU9W_tJ*8B$)KKGD+9d{LFSF`oA4dw_Han)d3c8>HZ;C#Q^q zb)sK(SBExNiQWyOY#3VxDu~|o=t<^1j0;W#+x#+&jEb+RgDQcXV*)u3lsYhX;Gj%< z5>sJ@`@29V0gR6JV6r{ui#|Y5+hOS(v$$5sOnF4jO-VJFIha1Vys#WDN>w?@h-Aku za&Xo6uQOOQ#5~}BN3Ai);&|naklgJQ)iU<$L^h%2$JNA}98uE+Hmg3{-+%Sei;uS` zi@(0qjSWr)yV4U44UJ-MnC8i!#E7QoqBB-T^%nZ{-HaIVRzWu+4(YQ0< zi&Awzh~2#N(*WYOpfz zJFsMcY$fJ;$A%|9f$Hj)3nQ}coW$o28R+G&IbDti8SE%Fq9G7LW#8@**xUk1_!mr_ z(;qr}*B{0*6aGJi=d)geC*q*ELB#R`oWZ51Fss_k(Semp%V>#jD-5VH6D;YMW0>Sq z<)>!4CRzBT#wheX^@(`mqnv3o-q)&;s8yre(LejxV!W3r=D?dpTxYYsBr@*4dd33Mve(?3Vj!$i2>N zK}_Bo+S}fyq@~4x30moC*EJ0=S7Wpv!ch}m{%Fx_Jn)F2wIsBJ_6Hz^7cKp*bron+ zYV0M(*+v*~uv!--={tKz<9*}ptQHzMM{R)(XkGm>C>&SU3{ z{Hak8p|Y9Oyf<3sMz$oF)H-&+qD8my&j@N2s#VMAFBsOs?>wM{xiXxZ;($XG!eG@* zSFPDC>@+e@VL&|QDRgr2&7eVS7B3#AfRP@=9(wfu9Tz%1cGamhmvpPAr+4oI9S)tq+$Be^tAMoaBu!J5Rkwz?{ zeR064|5`memdk|V;oxExUVJkCRW|X4CtAfswefMhKj^!hN0!`;xr*sOb$AQ|pkSfE z30S-RlLSc}*eLts9b>R!f{)=TLklSLun?(me9q)B4l(3Fy6vY>EZolb!BrZcnsXJ> zK)yDH7Ncw#h!)2_4%68z*z%Gy4&Hknqj`%|k2}zVNMS}b{TB7lF8$%6PhNpr&wRf3 zwHNuhB`ZBNVXA#;Y3s!Dw%cHHC!lsUfK-6aqs5_ShsA|gE2QY+W9N?%O_E_=0c~~O z)sGByYD2Oi9RDsxwjg1vk zC?{B(ivbs`lT*2Hs0DBlPV4b|Ha0eJy1feo12{~A^`9K2E^^Hbb{4o^YxmbMX>%xK z&#ue#DrQWQ-{b>A=H+oLT&}JVp;W~!;4YipdzSvY@bkHNVX6Zm)x~=Cc3byX%QjVU z$jtS!G!}*nldNS&bVsGqyKD^3abCu=>!L-8Ue`vKp90vF+2I|2{ia0qp)pYviF3E5 z_x$Rr1*f=r*f%~6aQI|He!{dnEiw%d8XT7(^#(B(bj0tH6drysK>_YCkYZ{MhFC1y zhH6)LT3Pkz)PZ$|rlQW)k&CItBi*m5HdFOS?ysjD0(+UY#P`wy6OX1zShd=4RvYbpG4gIg5BT`=1>bEM|7xDatSC8scUEWccpWtTbd z_~+q611YQy>l`1S2Z5tnQs}8_D!D!~URqD|eyiVc8Z!KUHeXORgRF3@9F%L#mpB? z9nsIDkdqTIrS%7aeir<>ha*4#yYEYLZqFi_7WTCo4U=3uG-k#o^{A-bB!WLwf0zs= z-q1{&_7zj%ZS~5Vih{3y?rlq{AI(*pTaG0M7p@MB5yl$_7IPX*znUpQ77$|9T6p# zyle&W-uS|g)ZWvFx$57m%h#J8Uz7cj=yjX=htD$$wmStyy$$@-lxaOqC^y`nk93#N zmEapg&U8aIh#2X7wVHOG>%-AlEXJrC`+Ue9-gv&ldYWj;-1~#*C!Y%|daO;5@jYi# z7S8i0g^C`P9Ckm+SRDAdV=Dr%@-;}j2Doh5uXc7%y$`saf9r!?$sZ3#)qx*p>aTvZ zfr#5<_pRr5{KYV213uL&R0cl!uFp3u z*=0DSClN*AJ0J#PIJn_&%>A<{f(U!y+5!DE}wQ>T=q}BC*e~Z{yCw z=EslG^qHD-s7bDBWogB^=F8u?#)~LwW=0`)^k;z@`P9tvyPDX{GR(mnn1Ww)mroc% zxUWQ{chkDF!NT7blXbs)@4ncJzHY|AdWjNnn87dYp{J*RA}5E#NxXV2>W5c*elR9H zLHh5LD7{=WrucLonHgDO<~=5X>p|05;`8To7M41S@pcnl&Q+F!_y(IHG%4NDr{XcY zUD;}n0>$X34sYMGp774s-60_CDmeK;@#50k>7>N#=ks{(vwd@!kE-T|Bmt{bI2hY{Q^N{h*s@_4!v`yJd2e*ZOBPy1~i8 zt<$MXa%_Ke7(uwfkviL{j3co1e3`1vZ{A zv^WP7y!z2SqArGAZ*F56SnSbl$b3J_h7Ofk^i{tF6P;1SshXW{xPo7zL>zt}hy=Lm z(~6648ymyKldE1&oQ&iqr3sR#Q1vUu~L zC@s>v2Q|?3otyRk#6Cs(+rSPVm3a!!uIFL9t3G|1+8-ZM**wHS%tpiI3i?<%udP-Ui_R{+W^HE!U0Z^rgWo zw)pnDSM96tctJ$-Sy{3?l77Oc}(W{NMq+CzZt6X|D)?HqpFJ5 zaJ}hGgLHR?bVzrobPEUwNHEP+$er7B&KP%$JFY*}pKH&x z=A2)=@AEa!+oC(ePWCNKG&Hno&`$$TBHyJF855~}1YbD#v|_ulCt5kQsrveLO;tRH zbiRo-zxc<7W#kyP;~odGiMNp6_^Ik8Mr%Jony-qgO+GJz<8#a%X;xe9c~m%1r7rL^ z)oYuay6{<;C-wGTgjb>k*2lzia7{Ww;9#dCC7s5vfs4f$L#)Ur{lg;in@PvrIf-hi9_Ko zx3n*65Kx*N`UR6Lnt9%^qs>;zSr&Dej69CYCon}o^yFA}@B)Kg31TUl5HzS4AlAWs zPaeFUN!-kaTx&*DJ#2@){MLPV*nh()7iQSc{-KgMyQ>wE;it^9r>=l_m1*3QF4c?5 zeIpH%2Vh^{WkM1L3+y@!a9tns@@SRf7_^#ZTh;f^2pb71OjVxr(IrVH$(5JA6yXYR zZ(nEtuT?{qO`m|{jS3^rj=7mMy_8Owo2cb>iwir^xva0zzE@$jrh;=Wdb0(hkIwTq zB5>7cc7AV^Y>-cUABp`$Ry%H0BuIKpmv;Gd2Cj)5DDO-f$noY(INjR@Dz-Fg}NAIwZklA}lxKCVa*nkK22s6n!y1g7GTvxN-U zv)lV-PKMB1*gdw^V)!9Klb(wu@I@zW>seo93WDW47**3#;-)1|J)#y~)%1)PfgRSI zY?cqK=)&WtlFNJNXPV5og5IJo#`EDxRM~wAq26aRO+%rwhSrxmG~>OZy~J@^ZqI3x zj;4)Lxbf?KAu`#Y1X`}=Eipylgq}@XeyvMzmQk(z*0(#@xE=}F!3})MG7z=xmE}Mp z_XXeD)PE!uj>f|7lai7`b8j8S_uy%_$z3gsx4ma`O8ra}BBB4iN)2tMZ+fR^HOgn8 z&}z;OK5KtPrrtp!umyV&33IjgUYN+~%kl@aic+QI9~LvaIA5=|aO&_YW7Tx7=p(Vc zRTmIns6}|)*%_yuxcMWl{U?k0nDO3ZR>m~`m5qsIyZ-W+URLO_En&uso4^{ew_aH@ z&6FOu$7P11zFiNl-XheFjP1{n@yjOq;_$7)r8nmqQ&|etRuN+$W(cg*;WdBHU3K#(U zFLM;l)V~)0CF#(dZ$M@CW~Q8|blpc<1B;cZbl%K$NwJXcCVrVzG*DfQt#;8(XTn)* z&PJ3w4E!V`yQj&r<%YHia?Ak-l-GA^A-``H6o1 zI(D*3v5cReI8bf~mSn*8J=PG(cQ(DCdR+AH?`qdo0tlm99#E=9D{Qw=Pn1#?P?1JU zKB`bmI6A4Cx7awK_>XYz{3fNa@%1T)++^rw{q{LWa|q+%#HBGo`;hUOFrrDg9%~z0 zFxI-(jwB9&9=MP}ekW2^UXJ1;@$Yz4oGQ)Vn-S?$y}-fn6}R8|U^81?7AqOWjm^dx zYw}x#1a@`o#&4Qc{l2+7!0;F{r~E&AqE3tR_=|cuzI1~Pw$7S#%7_sM6Zv+Z;u~5~d}SIJi~zdvfNt-dw71oOvH;{>Zd^D2?$a3h zE1SAh=3@wH1L7ld@tD(>x=TsS)vWLC24rI)4fz#_$yaXzw-J>kuJ05HBHeIUbx$(s z+3h`6mRBnat!5X8cpKklzgaSn5mT-#EcOhE;TQx+-3Imj;j(bDtom}Cy?IaYbMZflw>93F36|-;6XO#3D2&SSWydQ=Q8F&XlGOX{W_5e|`%CqacDf?V3??mVcjUUp0*5lI?e zA${}w^KV}SI3R*~q|@Gq36bB0=9J>_5likX0KVr_1BX!nPM}la?<-7A2!#W*myW{^ z8i#;KYqTl=8Gq1)&vglmBsO%+0tv;C#9!{PrS*RwJ-2s|FpJTd*+9vBXwgVEwSinT zvImb9e0d(3+EV?VLB0Qh7R~BnJL=1%?Xc%6eZg<%{>7_)fi=q@Q>87o;zc9SeHF}b z>||Zd+il!-VbmQZRHK~kGm{Ht;HIEAfgf!ca^Gk6JBl?`fv<_Qbm}a6<;J9 zA8V*a1c<%%u7V6-|4V3w6zc{v6a8vZwqLNYix1>!?~Ew_p#DrRx?t9&vJxOY-PMe^ z^__5w!1qYY<%sFqxXB!m@X%;8~oB&Nl zrci^1g#{I;t>7UQdNuHmf9AT7+lBsx{*`7V)CN_^9~X#+^0g~m)JCHqK1X|W)+tzC^<0LRizSarS>k3u!=-u&yM}g5>Z8u^ zs zlgU3v%ni`sD+#!jT;g*foTN@o%gChf#7eCXDGy)S^Kd!hKLfot-eQG4^(vjREhSkf_!Q|*-)stXxj+qWvwPUm0q zkxbD}Qcn%#qvs8=~?!dB!oOzfANaY6ICA5>|=V5#iEB7v@906oVIEmYme+3Vk#1?=LnLq+vXlgF9kGB-!8Bxxu_- zf^y082eYBJ{uxWknZg=6C~f}M;EOg>&PG?TWOvdC

gagD~GdP=_^k?Ts+d6pfEZ z_j%kBpHA3wy;zE(0h+;Tgx7yU%L1d!egZs9{YwKNk9hX%8FD?>zn6YOok@*viHlVy zoJN#uPG$3rFGtwYvuYnL#((eLaycquiwI>onh!Z%@GLT?9x40rP#q3RM zCoMAWrZ$YrNAWDehG;2hD0-$mt*=0CL=usZ1~FfTLU&oEv9@f}4L7KRtsvM_%0#%t zWPrYn@!z6>>Q;Vzz1HO-=v)JofM`w#EClRxPC6F3IDau~ZkJ9OyfRu$eWpbNZ`R1&+LD|9NWH{mJs; z5)WsmGZS_21PqZxxf^!ef7}f&bqMlrW*C?kURimp5hqz7jo$g}pS;PFiy-8<2(%a* z?(aCI>booKxZUVSRd+^8#oWn7n@zWAns-f8%!t*9z@ud%8W38AcZ<-fQobhK{IbNj zm9{VC={t=7X>2)JyCROp?)0r(nO@=XkuOeUs8fhqOUsC$TESv4iEAo#C9w!)3u}^$ z)Dc!Zye-g-q>B4KgJm`_Dr5Ic!Ifa3O4=+duj~Z|Rq_A*80TC(&yIfoCS^>aN$JVm zvV_G@7<;7951oTo8BV(T!=ZfJojePNnBMBzU5#ag`1uJnGq}<8bZs_e;pI7SF?M*Q zaf+oEYi+{o2qdo~VA@Wvc z8R$WdNHn3Q`FhSGE_K%7zGTj}aTI->GoNKYoSG0$BrX<1*|3;P{SIX~M?FM3;KSqH z+AW|lo&m*deoG6?x||ApWWb>$vi|RLa@|+AtuUUsRGYq3<10noVZa(FG34Fe-qvAS zfP*CF6;7=Nljqa!9pn#&A;TL99gq)olM7M*h4aj}%`r zR1T@!2xa<4M!&;9~6@zw42ow$Cv9^-mwFb|c*9XyzH_9%Wn=a@Sk31WLKlH3Sv>(Ugz^8Gc=I{UW zt`yI;#UU#dy$+WxUnbyzC?K8EM3!rm`mP|tOp7_^t`kRuauk<9-7L@%)P0-X>G1T7 z&t9Vnp_5$+#B=nPm9Y-L=I7_%JBW8$`xl+n(o0BCsor}(5qOE+dpM{Q9V^D4SYgzb z9L{DcR9#~vWb2i+;u~@h_w>EqP^M>gdc%uqs8nW;%Pz7@l7v)j%``4}&#utt(e~0E z0cMYKb7i{T2y)J3T-;BJY~uZ!RK^w>mp-Dv!};wre;l`E;d|(=N`PW@G)!`Ox>u^p zhNE_7Fw1FLr0G8BLU0apNL-GwcFYpIs8rTm^?T;aSTH$Au)+*c<)%D;wpZhk)DbPN=x!+BunK+} zvA&4Mex6^Q$;+vx7oHn9QJR& zj3}vJANm~jXWAn@mxs!eY>~3IGK1QkAU#enTeu7^1LN&G+`!aCH+r8MF{q(5K?Q*M zp#TkgDrl$!$4(%Orw&kxJ;1XT861q*emV8Lc_-br+Gd88vlbR0^9;qsEfZ)qJhj&1 z#rySeN+t6(!{~N^O(HCU4eoU&z37bc#*4Q)9#<*o2Ol65WW)TlHHiMLpd z4(mh|P@n4|g3Zc?`y{5CKo4?ZI&%e2rL7SKh4(Yf6y8&mT|n#n9lVaN%>BlTQllM* z%VwBitLK^1I)o6Iut0;{V5QS#TX`AWibBqth`{>v5zy3N;7-rI+|c)(`mpfz7Fj|5 z7c&2qH-8TWV!+&wp)#Sqs4RMTYO!g5){GIA`Vn%Hb&de{iiMi){R#EyYxu4x{w|eD zEM~%}6X|)YjAVua^rJS#jRU;_6^elt6%(5o1y9=*c@a(FZdo0$kub37_2M?1=xgY6 zUFJ#VikCm5%W}wiP*uC}nFdl+*ffGt*ccO{pn8OnJZ@5*C^mSxq#b%>=k69Hp!^?K z7r2u3WYZk)3K@z zQ_3P{T>O>}`+r)1i1>HipCHzzJ4=Qnd4}L}x!bJ?3&cLngkLz9ZbM5J;VoBffdA=9 z<2Rr9Cc=#Pt=-My&c~LFb%)_DiP{(+Vvj~*Dv7wDf?v|?iu6{`F5_6dH`Zq{Q~ieW znMuSbRhbi+N$giP33&Rlgr#_WFZ7|ap|FeOp z@a1+g#?N45>y}H>gqyANuwZzo5$lO6e%D6P4>VBFQIh+7VCMv*$g5$-%0{B&@hZ=aw1 zGpSq1d0c;~&=vlYzt(FIt^RZoHU?C#>Dk#$zz^zud{}$j0RNG;x90!{AN+?8vtI$A zgb|R#Hr;Z8EuGz{6(8`Mik3hvf*VK)ez@XFa{*+6mbSKVJENS3hu||2k(XWd22?Nq z`~(G=Y+`r93zYnXA4diu!8iqNX-PjZzq?geC2H_l>uI$4+xwUFexha^@bM{59H0}* zFZfK$AB-tQ=rl`-n5ww&hE7AA0kO8(?G6D&rR1bYXbowMRG#Zi$n7vNN`_jKe{1m# zAr|n5%rebYCW$`m?oH>g8E$PXOY17San%kj2{ri0$;z4l4JU%Z&$ErbTae(K1B(Bm zQ60aw!QWgLx0g{saij0s1u}matLntzCcgN~7yFW?ZfAtQen!UkhHF*4FI`IbkJgrL zPrNax#2TiTZRijaH2{p6^vjnoL9JC)Y!A6QqZ5-Tdl<^WzxQ)=$Z!~o)jX~jI$n1f zcOt)f+{LOo>Zs8C*!pgWD#1ofZL+X?*;8!V#ucYTdhXM#&QM`hsBRTZxLDnt>eLyD z^InO?X;jndbW(DA_mA<->bd2xkc`HSuM$}UbJ}ll@JZ08b>AraCb1~_ zF6BpZ3;v;M-cEjH__{w3SV<_y+yAPz|0*UrbDtrR{X`rT()9tJMfa({N_mfhDo29| zfAd-8P>zl*xwxZpEZkokF#}(G%qL|i(>CWkI=U%vU%q&M$`;|_nL6+Z{ot~=xL6Gg zTGP|hlKC}$>QEZsT6ede1R!*fT}*3rl$DkOt^)cQpg`pp6(M_fP=a|KIf;3#t)xJl zCkY_fKlv37fz5gNvO}XZSVI^>w8jv1Og7w7zi~RZJurU!017Lu7RZ44NtMN$Bf^jJ z!-b&a)&$RhZ4@UdGe$&bd*i!E9g6#FdxHqVk!$VzdN-;RNN{l0ZzZk^zv1KK(6VGnF-1xjvlw}cD+CB$>Q z=YXuka)h#vq;1Z8RrkNgn-4473=L$W*4CU+_;gnsJns6*KA?w4vygV==#=B)P>V@e zttyhx_Py7&h(L^;pZ-J`hpej@_Io^0Fcx1Ah9(2f<=~SO2WEV&L|HA(f~(cw1@}G zJc+M5`~%>9kt1rc2x-0Ot!dyXUx(S6`7iWAH{nqvP!2tL`5V$mXZ)otv6>TY90y8L zQ=(qzhhdLvvQwyvk`f!v$*9j9x4-mg{Y)!7zdAv=z4>?y#;?RCC53G2M0_T~*xS#) z%?rzW$@ajzSi82@F#;v(;@l2*@%Wp=VP={SFFl)#Z`POn_JI{I-cf=TT&Pi7V1bs;&o~7x_X90(*dPNdfsv7sP8T>yI%)$1S3MCN+s~1wrl$Ul-vHMA zcl)HUAJ!hU-esf4+cmZshxq77-qQ?S&+yJ^Okv2sAwzaFkHHb9o+!x;!{mCUu*py+ zBHdh!BQ;TmwAk>SgR$}b(%$`FQn3p$i+Qaw^Zo}YV$XYuJ?yU**VZQ*S{|C2hwzqI zba}#-{x>_=i+R78)LJJ8@tGHFvvV6j2y;pK+e-4zIjQche@>!(sg0XHo}dB{3ml^BnbW9|=Ebds471C;EF=EZxYGm>UFc>2x=T=H#w zEh>87R&6z2Wh_!?S3_KNbjk^damTVo)dvRRIVlD{sqA3EcgHb8raN8mb4{wUMWAn7 zpfq7>YW0WAc3V9C^B>~mbfXeVjNzt~4v-waQ{mISfNN4<(H|7jYh+)0ypR`d9lNpL z^JogIdVTgwUex1}w(1L=eh+|Rqk#jxqY($TuADD9%hFOy4hzH3xgrr-qNp`c`AWVd zOrr#C5zep$ogLiVFOR2M`6Cugd{h| ztoWCH#8K3rgQ`EB3T^59Y_@EZWh3!&UV31t6 z9fXIV;V1w8A^l?8*hoRcqyrYg{@zZhc{@(Ica_z4@UMNfpC_pH!{lcxrAhNk3ub|C z9wlEVgWwMDh^coM2FC`Hs^9t&3KxUm1G~f3YFqsL4|!4TUnOX~*H@rPfvej*zS&fk z4I-A#;9?2ODa+^(jJYk!qE$wXigfzB+$)sszIL^KJG^C#3-XyADW29UgXBuuq>Ut@ zskX!CXca(52aY^^K;Y#CK1Cc-QkyF_04hJog@s}*FRavfgW4Li^Yqc_=YJv&Ami}a zq@EF1pv{&N9-?XFt3w8fv9pDFKcL@4)|VhQ>>J{R5*U4dPzBXLrdMgdUvEEUtQO_9 z31wT@n(FlzM{vr0{-+bu<{kCMqNka1qesXz&gQ$jDclW8^6BiFW%1%eSLO2pBJhmV z;29qj=CDp$A_`Cr;}j^J^<2O;O#Xda)%WS0_vGzNV<15>y!Vylr>y9*bAC|Dx)IpO ziniI>BiUk_}-_4-n_69u4|c6Uxpls+~= z+EM*8=L~Q^Z7r0zwd<#9qN2D*R?-v8#g71U;fCkTxE*RiqNrdA6R{6lcGLt z?h!a~?pj^%5uwPCbMn^bU7=jbq-J8h2qF4_y`(#~(=PG;NZb&IG|0kNF52-)X1 zua2xH)$Zq!mlI{zcP(km-AX*xM0ByS=4tzda3JM_qsZ>^)R5(@gy?HF!n|Q=PZS?7 zocibC1~W4=FkcVA8ML=OgYAZo__;3Hb28lQIPdakq=!GAqKUKD_{%%0ue@|_?lE5H zEBjWApzQ}DNew9c5dKWg8~B>G@ifo8JzmAv#ffH)6ms6gwhiI9d)SDc%#wW^vhoJR zQg5?wdjb(K@ugf$ToQf8slMVJEAB<6fMA(+JNo+X=1eqryI60XhQTFp;>c+Fo=oSJ+b}E9N&$sN<5WS3K{+dkfVwHNK)vXql zo$*$k{&~*z(nqvv!WfnwcD;s*zCYr}1`(PZ-TXC(e;vF_2Nk?ue+~Ysf09!CB8K!C zn}#yiD<5z2o-e~EUJHhxlGg!pWLV+}!%7@2HjJ_s90MF4Y??wxD=ye2zf1EDi@BOf z-1co@vIIb84y!5B)KHicksqrfEWf?6oB$P z5+om307lx-nw*v8P-girR_9(5&=B>Q^0$ABal4b(V_j~;r3a>A-KpUQXFGHK`um0K z_=K#S)=HyJl=XEzw7iX_GeJ>>NCgMYxZ0CQ;aX3gzn-+NnzfC^`)c!0h_}h(ss?Pa zdig+++kva)k4MKepB6mb@MZw`~G~vuwT{3p4w4&+Cq*` z=$V=YR(zFj~%SBb6h&5()!17S3}5XuFVggpoS)4HC2Z3T>dhaz=r_Ie4#LR^=|N#P<=gQw8Sv2noHye z%`Y(@D!0C|Z~Fx%X!=YdTnfqQY1Iq{*WirtQjAGlTk{VzJFTM;xm2;M|K<+ZX48;a z{_vt6EtUVM__G$sY~vFWoPU1BowNnBrYOK|{=Sg~_8qX8E_pwR+LloQvmAUGKH|t0 z7A#>})rcz1AM_YXDAe1Q8j+2~Ge+yPwLX6-m2f7{STAf&Q5U{xzuwbgvbdzBjzF_xN3G?wzH{zZVe11)L47t7vz11p!SQFN_I5;^UP9Q|Ml^l=HDg_f)!du`e=E`pcp*ku{b6nQ6YNGE zK>yk}#D3PBWQQ2Tma6*XiQ5%7o*2Y%FHh}NJ*tQsKFR?_7u9a7KNbboOh9NU(AWI+ zTQaAYLo_?+4`*gRvvYE4{#M#v&lOmUW%=&$3K~e=;*-}uH^b(S)P=14QGCJGon1=f zO!bu^92Vki{wMJWkVlgNT#_UJbU;i1d#m|s_LcJxg~vLOU6R+obb(z9ic(T=$t#Ue zOnEx5zW-E2?<6>6yArsWJ1tkM^ylAPmYn-{Juxlaio57lxwkZBq}4`5k2lv7llmAw znDEva&Qe0FE=A?Z0Ve)Xdg1HP2nT@zFT>^uO1B3YVdVjpCRC=2MUKVb!sDGgU*j#I zbT~+i0Jp2pZc~WoVdJo;r8mwhPlSbkcsY<7yg35TFl~inq!kP@G0 zP-?nmUv-z4nU3YU5D~3dQfakX-TSK=dTA+crj6pKl@KOwOc;P2ct(N%TnH1e3wuE# z8xbG>q#c0fgBm1YSF>FlPyC0gok1yzOm_K>^boPtvp?$V4V=pz$ILvIPwPpO=u&Vs z<2@@zGKZ)0u407lYoZJLpF6jweOr8vX0|reI!}oR`9LDo+)Io8yx^tYf}HwMKQ>e9 z65fb-Z_JPj*NGi>)}%G}cjjcEy4!?Re-xnCY7sM#cbXAi!Rz~&WQJ?iadbWva>^GQ zpQ_F2d#dE;BcHqtl&87)HY~->+GRv!AlfI4_7JDGSJ>3cn+3K1 zy3jN+g&LKQqDxP{&{|Da=Z+=AAHHX@#XBi`r_suDGWiK|MdiR3&J1soFzSNDs0U+}3=cDmvM!3r76fJ46Gu$Lc7qHD!aGgOk>iJFe z0(KAx^IE=0U)Z8I;fu#^z6ll<8udlt3pfg}jyWO5uLE~ncWiI&9pYZuq8;}rGE5ZJb-v3TZRvh! z&61Yab7@NrTa<&EX-vT?#GsRi4*kD7iLLFr5N3|2L+~iH15awo(zboK5KLxHuK3^l zD0rQnh!gml;X!mUFc}CM0aFpCfPg?6459&ea1&4odqJP)98AjC=(t%W`|sKDl|Yb- z&I_i9(gljBbdq6S%crt3rxS&3ws$Ox^%c@TW>EUcUO8%TjE)W%-&9hsbTaNHn*FNU(14iF zuxR=c{IWsvJ8T0nr1WXY%%B>!&b-bPUutxYTGK%>dc}f**X?SSgwN!e3n^7Eg{Hez zPWW|`m&Bej%h_6xEUiQ4@Kt`wF{3UvyqIWx=IF2{XA>o4vi5e&)a>{AZ1gP3K^ql8G!O?#TTYqzvhpWJ7r6O!+X!= zF@dYEuYYSYq$2v~C-Qrv9^-5a?4KLI?`mJr)JO=@v+2Q(41kY-Kw!!PSZXaRUkhCt z9FzwxC9LmQu#b|buqZT8BVybJV_x@d_g_`Cv!=ClFumj*#@NtLmcl_zw<{Y-PTr=e z0#?1!{2#~d=CBgpJ~zC-vDg5%ysM8}ZI}Wke_hE{$q#xCJ`YuI2e8Xxn%rJ($Cx9*Y%tBB zKXVQ&4EcbsQ8D*a;|aSLHI>?Me;;1f+t2^C3U=V>|uDpY*il}lo{?hRx@ z9W+Q?ls0j>4mHF-@S3ErF={Rh#rDrIl(mzyf9}u)B0b^we#KsOY2lYPg+?CI6vJq< zfEOPPxD^{Ph7i%~_W^65H(q3`uC9K&MfE7eOa`m2j>eI0HP6~?&6SL!oUr^tR!AR% zR}hkd3JJ)r#U(^9J=PJc|X#z%vOV4?oa>V;>f3xlx4Ux@8|<8W`jQh%sy+S{P>#=O7+H1X!=XxG0=Y-0t+~&{l_sBU zfGuKB8{X6T%-3&CZyV6hds!kDN)g}0FH5*Q+S7$JuKf|nMXLj$)*p8>6}O0nP@l7| z1Pji)Vs`72|2|#n4XXialn2-J!mActjrpRMwm=-uMFZ^^kEtK$KNlN<3`hvVoO#QS zilat__3Z8KMNuL_B4;Q|d}jCqfgV}uHX7Pf!hLd=D!Q%5%g<1Z{>IU;)CA5>OC?7l zLv=pSltay9V(;6gO4K^y{ur0Q^wrn??uAY79Gshs~mv@r+-a@N=+vF91*7=M4Un~x1Bq5iNJVD&Wh=e<4z?l{9 zKAwNg)c-ypK!e2hvB1H2S1d zi$=KqXJXe6!Ci=O_lFJIV`*(rwM-R6f0Udc+oFsqhB zQ2l?I#j82#L~Ui(%@(?EDEl|;o$iD22K zuFfVWg(NqE$ip0df~@RZ-Lp75X=3U`^xUaQ4o5hSHE;@xPy*27o&^F-G_1vp*hs3jzBS>Mq__@Rt&+1{~9Pv@nB2V zr4uW0HgQ^4WBQx+)6I4i2&ECOEY)rF3u`}L35xPl_>4qWk9dAt;&>kO_J7YQy80yMZK9;<1>H?BNSSxiKsd?$!@FS6qyjPzm(8J42cwHS6D5l zd&&M}V+6WC6xlFpD7UYGW6sWC9*kFiI0je#PU)9?@L;-D{GX$Z&ha#540!nau{9CW z=(=}Cl;Rhrqw!lLAUOdCnckV{E^vNn-)StH7fjmYs%Erm1$UNAjUM@q;hVT0x_nVrdCibAw z6tFONz~%(na}EG>P6}AkVxTb;2*6?)mFNFnEN=00g~{AE@UlO~S#cY`LVKzq5r2%) z{Bx8r5l1_^QoypU@^=V%u z7?i!g;r8@b!tZh7Dkm%KC_o24swdVXxPH`>Whw@$pPGiRw1_+{PgMO4qWf@fLlC(4 z?5BF65e5sZ1@c??cIMh;6%`dU;TG$PeXi9Y+p!oPCXA5hmkT3@CgVk6zWsQdThy51 zO8_sAN5ANJ9?;#Qa(yR{DQ8pSakILjaK0go(2w%z1yPIEP8Q1TmaBjF%3XeNN28p zz<+6T`oZdhf!rjEDSMSJ!uepCb~_pJFQL&A4DBqt+wS%j9;_xc(X8$w=$`9+| zQso`LWwd{8!R=- zC(r($7GPjtX4}7D*cVV#VUyMWo`3q_uMwg9+Lz$8{Q}FOc>}qipa3>%fO52AXxu=O>MG5 zi|EfrJAbj7D2sCfAY291fkJ}DY>wQ6dG1H5sm-J(^zPZN)}Yz~U=;y?AV34ac@Qo@ z4qpb-cWy!B(`m(h&KV|_{}?smaEI+ynqU*8REIoq{R$<@z>h!?Y9@cv3}F=TzB-cq zEa>?`LHs051T~`wWy3vA{b{zZA>=Vyv&LSE3sws0TsmGAN@8ZXuqR=jaKPXco3ZJ* z5#RuI+x_fapvL;_I{lZ7a!*VOkDf`*K$fx~6b{=G=Y<=WBTlu7$_H+rU$IeH;ppb) z#60lqq%3`kPkpY0I%PCWW~Qg9IXEx@e_O3o%gWZ4*QgCh2>?(wMrwf?#2ND%3l<+< zc~DJMQW(SmTc&s-`e_!tC~@HDuIxb9J2YGiO)1*NSdwv;2pw>IQP3ohpz zGoobL(41-sngwFI$9t?rS+?4XW%XpmaHMhY)^FYN>|MOgh9|z=JjbSM=GXsNvtejx zqyq9NFi$SGqk{qxo1Pv4Lby8DhS8Jjh6Z5$ zK>EA|=nsexK&XH6wEYbJ;P6mgN9Q9D{}O{*&#xT&co5ei_TTHm%UHXe@~ZJ^&7+gE zz-d0br0M+n4bDd2y|~tHouef6C-;kqd8x^&AMmuZ9@5aMl~jGEG3_WFecftq;Cjw*+8%*#G1H9s$NQ%`k)7N``DiI$lfqRFH}9#6=OJ-5dmsMwx`AByuY6+iWE7$reg zKKF|5NxY^2LcDtdW$RoyDEQ~anUkeaV!Y%{pTgeQXI5_mb*@H1Bjy^y_wrK0qh^iS z47kcB#(|EDrp^IJ^jYOEvQolt#FvE&E*=D`UVr)>Y;wb}j8J|w2AubGE_pZ2_~~;(`VlTK4T0pH zW zUe4Y9%Aar%4E_U1(hxA%W*zhW_Lw_M-`Wb4=x6ZsFdN5{>uCc`LiMj1iZDuk$}?SBv!9-# zS34?AuhdWuSiEJ6(%f&<$(0PlN|LB(XXkmM$iygxpB~8$9)&nCG%sn;O|2q=wg`Iy zU!$#!{>*>?Q8}o*(I&A+bkEuOC>ao=1&;)bCFT<8Le4;vd21KuYeGI=^F;64w{O5) zU*8%;-I$({fl{yL*(_Y!l%J2llZFu;9W4aF`9Qvo1_8RdLBPIHQ&-a0Ul(_O=3y$GjNA!}=` zPFHLS+?L5cn`N4ykeUB5zUY`U@0S%_)56Atj@|wNLzVKN=ixMiO|iH7ttv6U{Bk>G zQvAupJZEy`Kji{YLb^O1BX=8Y;hpNmF*#k?jN?J{7N+gh1XC|!v$G`%z{k2f&%aK= zu!NNegeG{v4DNYQT6{MC;V&NOj}il}DHvPw()JTEH@vpCHdd9=JrCyUImJ70GRI&Ej7Es-8bAPIe=8vwQXJwLXeV8{9<|Q{|iFO7s(dlDx1DP2n zN}MI^F3X?QHP`)=WkWamro`}0CKXPgT6plAA@lRuFW(aE#1=^(II6<8&2Xj9P9n($ zh<@<^9prVkv%D#kE9k-G?j|!12vVq2G9^D5L{q6KHf0K@Q2uQc9Lo|T<4Nl{KW{y( ztZV&@6BrYN)kNrXv*G~i(djcQ88REBfg&a#)E&Q<6K4N>}x zeGxOp(u?8PH2iaRGb)m42P8w*80p_1t#(htiQ_(6X-kx~L8omvln?H3RqqP3$KWbE zUq9f_JF8$de7`@{df7xiEQN2=K$zR}U}f*#j-pMKqr-+%dZr-rRO(GJVZ+O3h_}M; ziI1uxSz)=fIdt!eNl{k%(Nu0KUFU3(Ja%Y71 zj4Ju#>y;ZJjY?w(M6cPJ?E3LRxL!Mnj-CZmaCx+HTf(ai46WCwZ*26gC-V5~_XQ}{ zz=q_F6y2JGY1x$iiYcZXZfpn$asJZ$d&9*u<=tGE)a^27xX%Lm%tR+$kA8qAmiPZt zbFqNgA)f#fDy>tOHA#(i1w*3cMYS)TaEieF1F6ulJ@Qzm>XebRQ z<}T%k++eSrti1O-X)LF|ek9BT-!Jenc9yy#!$W-fNn{BxiHUDr^Y**H+B^ z3%|$&4tyY>E-+NWF{wz7eqD{Ipy`^!1-XglAHR=OQK=J|QH1sYLORN&NB_*ws{5P~ z6cx3n@3D;%lV}8{H`8bP<+Sn=-Pxyg>~Q~_+}V%wkrFGS`{3wE76=_bVpCy3nAzE7 z!SD^UH*a7A?(CKtF~R!6yrDpj6&oK955cMm$0a9^nwU__zQ+KadaKf-B4QCAn#2#c zIDfP4J~mcjNU5LiHS2}P{9(P$HF_0wKjf10Ybn6u6Z@}WaYSeL))j+gs7wZ4;Jovn z4NgmLY;^iCb@j`G0Fcfz@^jJ7pTBwL281L1Y=1&YS>gk!7Y=^WbE$Qa zf5-NF(D+Vp;V_UC;l@LM{BDM$)sqXLp-)%m!?37`S1=Q49|Jp@;pd}d3#o7l5zXKs zg^^w=eemD>N*FQKRyW?Sp&XR%i4FpPXqm%$y3l?DlkAh=R$&48DCVuKBtOJ`Q%UuV z^$+>CWigwrp41DB?|KBF{97Zr-1cvYQItk4x=-bYaZYxqt+0j&jzsl3seuy@jOxMd z08RWDFm9#I7hEr%*;KFeeUuzc(nRV)u}28qr0YvRADvI^n%!MWU7ROQ$MJKv9HhR?S3eY?)$%K!+k~A$?-O?{3 zJL|{eZGLEgV#-{9Z9=$N6ed&HyC)PItm=ftI0~Tvy|Mm!SFfB`B&mCRINZ&#o&}|s z;*Gf?_U>lHmPm6q@aseL>H}tXCSHXE^^625Sr9$9uG)i-kN!*Ff)O7F`Qeuxi5&OC z6=B0Ryxt80YTF&&sjRxFoWSrU@IjUHjeRfUR#qXamY6U8)`u#=R#)CrL`D&;L_sIw<=uzpj{y|v6b()c$zVUWEjkbx< zz+Fq~7QDh|l`xUT$%%!laR~N&@&-$%KB55%x5Hn4RQvig(jcP4F(UL!)?L@PTMLXh zj3aUq>Gnme1l&Mqq|9PnD4uyC&WkX&_K#5zbWQVkZ2_#^;=t$d zGGqV#LmM*93mjGCj&M5|OlrUJG(Ag1`Vn#Zk#R2f0*u98?Ki*7INSEPa9+DtvJ%py zKM;|TA*l>cFy1nFs#ArS$6hAc;}UqT(oX;x3!_6I{Q-JgoI?0|03^-`MC;&y3Ivi> zEQ>+Z)&B~Lc>XmOIu+(r)!BIZs_L+1PWpJrmRZcSCH*C&>d~OJ+yP9;X<$w}&z-x> zY4@Dtq$1f8CIAT7B(I9|r?2d&_^46KTlmJ~K>Y0TCrCe`{k8QG|LZwNn@qzH5QPJp zx7EW57Q@q4fpIE4Euu6d-}r8clj?te29h8emTF_~eFo9X2^qasHx}`SOU-ipE0~fQ zOl6@;!~~U=HO)z+$v2O^4Y~cl5KUd6Z5;aaTS{jV49K|CSCEH9?}5;(mmO|b2YS3t zHAKzA!-fqmyNJ1xQ4u-kYBbfRtgD}gWT#4>qNy*BU|g#HT&PEqXF$ST z&iqNHz7=C?Y=!v;yLU_qYga=Oh?#zkumQ32fa|zeD3~i_VDwy7RZzPQCZHJi%Fry|`9><+%q+lSa3ys~sXEC7xSOGx< z%8(Dee@|~CA5xoSw2`hCh)5aU{hI_VoQwwZCzS2?`lATfRhXlBG}v+mWGj9 zS}|AV^7xNJ)2tqzVYq-QC?OEva;O$B;vZ zw6xUFT}s2yL)^{poU`s(_m9h3v(_va=G))?_TFzj@AF_qSMw1I-wDS4@czaS_~S<) zGR|WPD<;(U{y8&YF>kV_rWK0KrZOv_D*Kv&=w_wR@;MFba%efdrhiVk2KVHehB%rY zP67kN=Qn{?Mwx&RtzU*M*_0Y1Ht+|+QCsVWyU7+WJ66|1nqwxyM{xcMDgAPvc^G%P z=Dsr6mwcRQP#!xqO5ru%NEH~I0;B!7}3OGYA=CT5@y$cIIE5ewtXRjmRiLDA@Q>CgGS2J_lxv zzQ%)gV@XNLBQpz&HDE_qK+wwxu%wQeFBDM6JkN>AZEIml+~yNIe>HZ6GGnz+a#oF1hL0MQ-fo|>bF^Q};<#fme-q>Vo3I7* zBwU{qd@dOq8%yeOj&$x{ys}_mqLw2cwRUnvq*c2haOrXMEq>8=Rq*KM5SBF3X6+5Q zN8tvHWb`)gyFaW|E02H>?F(qx0i^Nu&lEwi4|xTqX}bwt`>8L{DLkg`EQT{K03Uks zloH;yzR^_e;J=QdskoS!aZ^LS*)n(c%y`MtC=$^T9p zW?*j^26T_D^I1KWHA2K%3m}(Ap+6BQ*3tkek+W&`)Bab_keZzT*n_B)-D+puhy1e( zb#5ri=Q@0LmGUcieTqDO63cTX%OmJnGG`pY77kfvr}YKIXiRkbqL6!U*i{gbz^BB@ zsNhe}%G0PEH&;J3AYIEIJXti>YKURr|MGK zP!k{<>CHw~3bUG7#$T~QsB#Ik2A05%_ZJN}0s$R-KizNiGkIjJZ$K!%T2vySN{I(T z%qTxV{Aylme(6~u)r9QnJHp|ip^d6`+l@p^RYi>-;c`ys7UrODG)AXI#M}{2*urOho|cqEJNQjBEqh>> z^WI}Xz_0T1(g14l7Dpk;5iZ5luy8fl#@(V`#`}lq-(6`tFOvvVH<&%W2VIH8ynIbZ z;~CXrST7D*;Tswfy}m> z8uzrFUfp4PN}KQg0R7AHXNWY zNm0nEGCe5+T9~oP$)D}*tNt;O*sb?%1n<@^Fu#<>ezXPGL>@5($k^ALtpLM zE5BUk4*gnF*ad05#(yE-Z};2Y3zD!3;^oULtoa0fOAN=$-Dm=P^*u>^-PS+*gc@Zj zu9)iSHtj1R7r+JSk-d`79y^{)NW=yUv}}`J-BXd;P|TBTK`rj>5J41-e!P=|e_%Do z%$iwG!$ARVPLxKDLc!!BUx29`cqS(5Zel`h?#Sk5o*9oJNy`-VldE2|;pWxN-;cs? zf&4gVol6C>r1n6UJVX}|q4ATt_R;Zmy)%( zyD6WdYj{ixL4v5QvIw~j+C5NN{Y;$H|2WS~rZ(}9r%k}*75jdXyTj9k>Zqf4hR<{- zlfEN{>FCp{eKZ&idwIDPFiAQ9 z9sQj*qT>ho<7Ka;IBPbP$5ChI)go|#qA^?K&NVF+3-6ZSCFB&@HnO|=d**ezry=`l z^Jh%VngbCNGxMl>ueRm+R`c?d>T=nvM>p@U8jE4DIyPpBv=Cx&qDl(7-H=3Nc|x%(&mdFtJHQp zzJUfs6(NrqgblX~6vF6MC8@5g^N)t@f6IIgY*xF2pZU|Ty=!>6=CpRATlxSGxj9K} z-vGLY`-8`*_L96Bh3>nd+?fgHC-j_+G#s2?T5Bd@+McBm_InjDVh-puRC9z+0)bBBZS|;rqka(C7Kot z+!|lR{PMLOp=g@>PAX>)wX_AIgp*Nav0~U3P9KTSVk-!J9R|PT5hBV=RRtH1&2C#M zkJv1ZmgYDvmv%&%Vwvo7)AlJ;`*70Q^;&f`9+om49`STi*sNSd=>4b0=WIjZI2vCz zORm`tWOX!_4;ziLPzWI!bu3bWABvd_r>#c5jjSNhB3b8aNZ~GVb@=!8_Ep~w^wzn- z(LTZ;DdeO*5Ie)!!)PAe{^^`%P!~>8x&G1Z_S2XIp=3~s_aRcPQHJ-Lvp=dI?!nsd zI23Jf?iR-%Z~IfGFomopCcD}L@gM+IaNFD4F91li4S7-A8UR`yPIaKM{(z3Q@6539 zxYqCbc(j%mJzctW3_0g1|GwJpVfSL1!Giwd5A3`mvp?^U>lt0igo=qs$7`6+@k_UK zdm!~Mwp3qB2W)>7(P+*)xLi5NWFA=lt^1fV@qG*D_xl>QFWTW%MrISo*LJia_X3qk zNxxp4llkf)t0tZq*rEI&D{!lbz2VShR(m5IAesgvd0Ym%i7TyU*#O7QOto?N{4*&a z0dJ{c>6wAc!~OiAEK|pJpn}6T+7RYLO`&!?9?am7sq@8Ho=mQnM$vm(%4BH{3S%er zR?E>I@!N;6lr++WtbHwW2t_eyO7fjrLiLqc!It<@Uyvuu-sWs>_puRvlBX!P4SSrg znW=7wRVf<$i;5}XxgcHl$Dglq+!*=8%SX$$)bz<-9`_G5ndf^mB=%?ssJs?M9peYX zeew)5=}sMWOt1&aGpW2|@xieK*h#mQZ_fZrI_3NK=zinl3g9qRhg#rau(AEF7%$kj zADSEWg4-l?4UJUY^4^@+Neq#U-6q#}K_zFOqzWDy&p$!U{A;wsq}y@HOQbcWZ$eG| z^^8`Rv4oR0{3N8hscwFlI#>q5YVpNgfBr1_481)dB##tQe;+W%kyFz6T< z^4i)m()lB)`uXoFzZ;(MK>O^ce744(4V$|79RJ#}i7cNj>1V2(OKi20cF6a1G&8r# zg3vz8(CG|(0yw@E&b^AD?2pxJ^rWIsrXIK8b%;egD$N_Y_3m0vtB4k=)j!))eGcNbSm>ThOx{%jJ^77 zmECg0Ml)vN62ZB^M%lGQMzb>ftXMd1@OSqsGS9yRviqhdCqt$#P5kilCH>*C>$*&B z#(wY1L?~@OiHR+y*F4+{`VuKDP_L(sZMZ|@OIf&oHf7?1+-yKNX&*8NH`k*S5b-dm zJ8>>~$FV0Ku6Z*v4^gV2hoQ5+;Q<1O>CP&NG1S)gYW20Xr*ppo35&qtmharlFq|q^ zUa+G_CQj9r@7i(TS{b*6ll-Hzhr7b)P8mbMMQ1|DYu~J#Zg{zWj_4b5AGKkt6Use2 z`krk;*WSF$Kr#*VpCmQ zO6Z&P`997kkX(e~+5vIpQAr9Hoed~eBo&iOYCV*qQ|NQI=~YA>YX++n)EL{_dnvk! zC@U0F52V&tWP|VBNWTG(qDzf3iLNM^H9Ld1!klG8d}4g&*Y_vAF$+~=yF@S}kQ+3hDH?h}fDuq!(rLd*&sKIF9e3R#yI;|Akl`p#K8E7t7S1;39 za(VGOZ2lzaQQ@wlhaF8g`Rs>IclhOS&r7Ev*iK{`ud3Byv$f>-6b=M??R4`<7Y5qW z9|{pBBWa%jlN4yWKc64+L8D09FSct%R;j-dK`+hD?tz)jI=0@XT^}`0*nDRCGyhwW z&CpVioz4BNj!+vm`%gzz8@-Jo$Z(4_J*Nqr?=`#?LF(|!DngyoRIp#QhA}y1|G87W zCH6J7=?LjOLdd0M?1aYJfDa5>RvOsAiA3|5<{pF97Wl|Iptbt9dyLAOHN|0$ZZS0Y zCX15>YtsPbT$P9$&%NmEd962HU%pKDd|%?9ziy(r>&NHEoN_BR+A+kWdu zJ;S4~n_{!tPb?}{m0v{OJUa;J3VS*Xqulxu2k&Eev(vX5`r0sJYQ7KaYtsJl?!Udr ze=)@{68fQir|B$fT)%zqf6+vFLqjqyKDT%hc+Ot%GfXr{AfX>5pGw|R8cospjuHutp z1@UK}6q>Q0{2XL^0aSPI`+#H(5a#}*8}%_%R+-a6=Xd?d(X`54jOFIp+v)fY#2f^D z{UTX-uM!#7M8dgS5Fhd4TWh?etXwPvc-<8_PAHlWzNH#1fRx=;W&FLPd+4G&F`nd(aESwZOBxF>&d*fB8l()LoR7My&}VQ zzNql1*q*B2{5HZbEg5s>Pm7k0z~`*_$d^%5>fG4WRcDkEXTcyQ8JYY$2+L}vI}l}q z#+c=5dv{+tZodNSSD9Iloo_NPj_7Gxed!axq!dBSJ+f*_A^WWPiweXs%5v*egcw)# zA-Kz=Qk^#yl_mN8?Up|oe&iuJ+CbuaU&nCL@ZqTHgAbMXGs0G+(QrN}oLe3Eat^|= ziF`CqGHdXUI(3H6ioc!uDF4g)D# zF9rDO%V8AC2+1MRzPihwFT8FNAF>&&Hd^Q);^o^^&jri9U}Sj3*1gv=7iwY)p zq{Vz2uw!au>6Bx4d~jYv71!B*#l*Z=b8^ZNaV&zl!`>B&veOihKb;&X{%zWcpdIH&)?`ax4PDq0R|{1^7QwhxyO;u9 zPxY(HhEF>eDo5#mO%nTpuyacDl%oj0x+o_-qwFJ&&~d$`;-2>E(?ICB>_@(kCn*bx z9ZpH1=2_~G5^$B$Fb!UytC+so)cm6VLW|6D@Aat)OB2SGDpy_b;5Yu7tDPwGvh9{-x_@o61f0z;-z9(?cvMjltq*Xoo4m6W!ay5OUn?IE1s|IxS#5vup9rTh z^!)lBYU-N9`Sd8w`}uvT-A}&pFXSMr-1+X})bDU_;L0}}2#jX$rC$8u;9HfH#cqK3 z|6|4U2O0Alc|q+aeh&(rS4-2KkHodu5@Pn1U{iNC+A+Pbb~oz%3-KWq-4Xt;%UO>{k z0ddg0DD?F)!`QSIBi7tr$)BOfe+JoKu$ng#5VIHU`|kr>XX3XEzu5^NRnabaF@`e= zvtKW%YX-zMC+ZCUT{*aD^lbg4Xrq%F3j)9kOw02$Kl^MuW&h3!?GICn=qCv!}f4(FLIKxJ=zd#a!R|TSHX%aymiE2LFA(}+F&@j zoG!gIL_78Er<2#mAtD}afu^r34_YFlZA{8#}_Jb zsY)+FLU_Mk`_R#~Ru&E>X`s7^1TEo#oHHex5)jxz#cgFBb z!s=SAf=p>>G}%0KR4H7!=AYkGt<-g)@z*I-Zfg1hEY*Szyi==l^p*PqSMADIdZV7R z0^a+tU?Zp-jYIF;JkU6UcxyShzBWyp)wOK5S_P|V9a8TxdJ6wGxD^biUEJj%Tdv?9 z9M8h;T<3dHX<=b;320bJ#_*zcNjNSSKkG=gvZ)at--h^IXWn?&M%%A zx|mNO4fj!{7@k26Rp;|&bzk5HL%r2$Lil{`?}sFQR9{9RnmICDu`DO@A#>D*5vCHN z4Vef({fPdCrZt2#+tpGoF{&IJdO~o74gH@+a9bgddNp*61b*jbpQyhKThWBEe19dU1E`gBd~clrZV{Nw4hv|te%DicroAN_ za5utj@btqO-wZ$#87+8D_@E@c{p;J4M`_;Yv*HpG#~>bc9Frhu6r_HdXd7jTAZTEG zQ!JohBy5u!`CuT=qlb**o+YBW#!cjFD0^xovM7eUZjV0VexF5tXxDB$&9sqAN$dvy z#~FvWkt3OBcScJ8y}c5ilY%Ja$c#&g-VC68Jr&eK%`A~Px1J;~#p>!qZOA;JwjGIIyg4BanJ{1Yj0xfd0(!Ew1DjSgO(!zv;oPm z%ML7`UjRq((Zb@rzP>)7^UpZ>{2*rvux)bg?z}3cQ>(nVI5ke#jpg$0sKQw z_?E-j*;$r9obPUZdnYLwc=NnBUZa8+c0gq#V=Yup7!NOlc~ViM;xcP8JSP^v>~+p9 zCwIg_9)}yb`(YNhvva~o`!6U6w3$4k)AQc7yd=kp8}r@`C79RoO}`hoppPQh#^thL zVE45n@2?^8w1;tsh(uz+b5e^-*&kPbTI@bvpF8!Cu)uJ3UK00sa#>xG^V6O}g=$con04n`8;rSbKPca{Bw zh_5K$MV71tb3UEezO|ZEi#k}|ynYRha_?z%wZC!eb8lU8oz`}(0)!8qJ>K(&u8W&I z8B3R5-6tp=(+Y zdVpwyAAo(ZvGwADg*;A+ybDSyD}OXJ@N{-{X_Gk`@50JuG6ZJt;K>33Q^%qiC9W5m zbWjT~P0gaTM>}4O-KUDHvZ*DKU+in}e0@VhSRx}KS!@p_;Sm+8tDU;$Fd8LV*#yKE5 zL&w1;`+}txlZ9UrZUrgG>WEIVRB<@%=#ho~0)bL7BQwQYiXdH~%CP)trwUkNp%>s= zr2(mZP^bh4qO?@lu1`qD9hgiiFquRQ(}Sz`m!K$a3KXaxC?6lp8jv@`!NcoW>j`%U z1k`fHe(Scr2+ao?=Q|+$!0z<-D`Gz!pK-09CKGuDh|Xu_6nXO%J{9Y*UirNRM{{lh@VuKcMukWu7-~C$t?|2LrqI;%kXABB9 z{lGF5m6n1=0Mfz*84XklEqe`dfKSv1M1c=;064L#K(b8e=1A-TbomQ(Xp8_H^h|l| zLy;=;W`hRAhYuY~>;SGcB6y+ohzy7}0kX_q00dJ9n1ctl;iaXebTZrw;B;8eSx{{^ zY~?kRF|?M=mRucH)xkdwYdxwjqjai0Mj?AmDre>-uq4!W*(cY1e|{Fx{9Enyysopw zy3cNwJbtH>dbJp~;%pHeWZ&yT@1r5$25WV-+UmxY-fKR@+};MfCsUvSHTiU3zQucPP;n#KK6Svjt7<#HGlXcP`=d_WQCfFLzo+ZnEo(tkk zY2P&Wq`JsG>pd}$OL^uC95eOgQL9X z_~_`({v4i?hSZJr?t`Qp$hK6QD_+b$L8IvY@F$-`FTAI-1>{eF8zBM$QXtFx7N9{} z1-ZtaVNRz^D^*{e6df!*4}0dlqz`z=f~a|4O_18ze_Oe5=Bn`WEoo(fu`|2+2In6__c^u8bw|hdf=NZmT~xWkn93F7DA6E}e=Bs(U*!x%xVt#=T(3=x?x^ zY)t#-GTU2N5|1ulnJ_eGy5>R*;;jo{|UM^f;HispS5@FL#pY6URel0WXsDPm-FK;3~Y*Mwdj6Z6{I zPlp}%qR^Qv){bZUS@h+%P4W_9^5fTFlyRi_1?8p((EByAwi0V01zytzUx7M29AUi6 z{dX4yzOHHM|M|sxitab--=stgvgRBg?=*kKG}nzUDCn;l3~av~r1ohx3lmEvo?xx3 z>viX*M6KYQn5lQJ`FA@MGuAxu!QJuOm}aG6bK-a_uT_gHt8QER7Xu#v1VG-+CUH^> zc&SB1Q+*GKTN?{$TJkVf`tkC^uudzP0rlFHesM+aB3_DZRM%$it8D5khM8M&}vqvq-y!~%A-+MGSd@& zYZH6ws%j?0v*5CztR7rI*{E%SG7)QB(eTDFL|61jmF2|VVJIwxG3a%;|6vqcMHQz{ z(TN#W?}(fFt4)cmL)F=D0)+_ahsUe;7R;AOrE_|p*pJveJ6LQNslm=o4q`qssh_Q_lgMOxOFaKC3gMM*ScXjZa z7s5MZ6S9pwBoB5JNwgiVC@IhvMWSSQ_O*QeIhT0op0FHT0dA0*C~|#t16-1lVCuI)l8|C)c#A#({j%XEs1$Mu}~yf_#*^b zuMn%Omo`1+uvC_+!pDqJH|>Ft^QjwOH3?UUuvL{1WEGeHYTcsZM_u}}+eXvF5}XQ4 zg~b+Z?zdEu6~gVmd5+Gh7^)ao7r3NNVCY+Sau%K~dz^0{c^dA_?Li9$a}v18HXI19 zlvmsA#(v+S&9JkxtJmvOjv3Ohb$|TNXeCW5Jw$x+GNXxB+|?vhFdmyipqaQf z=ZII-pdl3Tn*XP!&YbKV3p|0@UId%1JBcFVg!?t|Va0d^CSYMkgm4-=SJKg!F0$o? z|ElIlY+RiAIo*b(FV~IIac41IA>5hQ{^r+$nuODb`)8c7oe1COK*g2sCzU7`zg8*T z-_C;;=4cB9P6CaWvrNsr5t`f+{%FgL1>?*c7`oC&oQG&K?pGuIVlUqV-q zs~Y##+C30<9V{fx$5c+^*AnV{7)Ok6x}cS&`Sjx-P58NdjWlu7mUqfl8F1rVRu@(m zC)hZGwEfRK`<26J>13@F1&e@|rIJMj>*rJ3iW-ul-+6T;wB#5OXwJ7<`xsW&-ZjZ# zeThl1IR}2r>FX3zW>-uzzSm$Tz2eqJz9%T^ybcF4kL>lMa0gWYJDU83cN$XvrKC&2;MveHA?Xah{C?QTw-udDsA+Yw0zTI)-cND0I$<+BaTw7;)lBEzV@ zh6zTJagJCGbJWB1h%zVBsS0qw>Kv*GW$}Zgwkif`>RZFM7e%tA0 z^#|kG#~=5c@Sy!$pRK0Z+_Q?xIaJ63ikF7v674uZbXb0OSEzqKY^+kd4tJ#*rL#_2 z%f*@QU)v07G=g|{)Gl=X^@Q1X8Nl0uaJyc?4Tdehx=U&OSo_^SQn5Mv6I3rj83?$* z)Y5@AEuO!t=G;H0%PX2M3K?>ze*sx7pZ4{G7RcOEj|(R$XRVpeLg`fj#&C)@?^o-0 zL2}<&?yK+qGH=y@XJXIh@nt1@}815K3@q>i*cl57;2$u|o z43Nfr2pFyhr;6fr9Tsz*Cyokp_xk&iGxSj*6QjW)v>;iFA#|%2BLWf|)1Ul|b58TN zrbI6$SbCr-?OPfEJT;Igm=^T7$U1mo0J7X(BncPoz?Rza@RIsflnmVt9IKozs~~O2 zM0xB6&oTjSRnyoEquTu5wO3ubf3yy^_~L+h#B4p$FXQ8pDBiD&bOAqJ<$!{iNN)v9 zY)E_(8@@fv3%M#GVIt2bJg~jH0hTFNF5AP~6#$cDq8)DejLpBIQar`%@7P%>HB70; zbZHwxkz^gaz)F3lD!Z@hm*UWO&dxO-Z5|@EVUkjwa)D)87jGfUMRy~&I6D+G3`1S?UOzvT9-RiX>zED`v&56|0X9@gCaZ zK}?fa@CPaGfj94h*(-S%A3b^<_gP#-i55JA&+YGG!_bI8CL}pCC)B(iSyiRbz5G)w z*Ykn~)GP+{HsWo*NBb`bBEEuSj~}JK%?KInz^MY+HrkIr z#@>5BK7Iw(oak`3YY#dTS2NG`T@>jUqyJ1V4q7D(PVargyp&JADY~x<^fuy_T5tVZ zT^i&Zy)-iaceSDxjuUU4tbaQ2y%(-eTw*dRsMoTYzYG#2nd=Hn*1iJ*3C9O+bV*xp z(U$RanI&W_^)~Wf;bL$p-Xf^t_SqjVkj+=>0WVH`9@roHxiwgdIQ!C86EhlBpIzb!X_^lvWDKa-BD{WMl-WQy& zX)`&{9`o<;1Z;si@|t^G1## zqtkbJq|*Xt=J|J*%mz&C1Irr=+l8-&1&pI#MBA)k79hzTNz7NAU<13$&vkt3iLdo8 zxK6k=0s%6^noHOh+c5dA&NTMPIR3Gal6c zHp{xV$VwLzR`B#a=El+WJ)_BRjk-XlZGI&*O-wz#;O+#kU=w3&sU?O;K5TQH;ijE~ znJLowrt~F;_)CJnO$7P~b^3{?1LfNe^@^3bKas*_vNBKztG-Dd;5fArSGWG_!o%&7 zjn3M-d6{6dnSllZQSVQFQy9I&Iv@VrYlhgS$$Pxv;L8Bz`xY-R3X`|rRhNfh!oqsC zq6@4YH%9E*n2=x2I?6|ySPJvceFvv+1V;xF-hP>JC(^n-@+hoEt=CEH$gjeetUIWc z#^k(S=xvnSfhPP60}sj9Tz z)H4?c2X&V0s{XVF0~DHZHnp^(Pw7AU1HfsNW%Wz%z>W0AH zEB8n1YGMhlqjBW7G?mBRc(H_EWXW~1decf_o%C<9nSrjmh}C5NMdSYkmX~ct-9Ghh zedFkK;X)5ib8N^bzfDO*R)3j~G6DgufGfGb0f-;bFKbW#+TF*_T52Dx?Scp?hx7e= zBFq#{W|(Ah=o8+fZk#swiibiW-^Co5BDr^&ud^>#D(glIUci-~5BD1k&Owb5dD>{0 zh-v910O(d0>BR?0{~%ThN&1xYe4H|k2_uw4CM6r!zTaJJy%Xx?y#8cI1^je& zqwjx$qWD*bc2sAb3Qs%GxW9yYu60qSY#l=E(&1A!0{6~UmNrriSL=^k;>A$Tz)E*q z`%cC2ioczvV9@Rtz)B@Cp%sMCQ3nx!)9&L_!p}cC`!HOHDlAbXw|yavE-$mI&(?}9 zimEU9n1MhhfclMP&ppqktg~sNhAR$)AO59rs|%;$Z*wx{zKP5eMuK%TvbDDF4hR8S z*KbBUY0rBX>oEEExL5-keFuz!Q++%hEfX12WJO!ZP=&m2d+*w6_#xqG5I6vyCU-bwPiUy@o7grr zNxzuGfppa(>Y zOL-v5*eW^G)*yRA`Mc>h{vPLe+CuWKdUxOS5qb${1=*=dcZO9MxwYl}xDfdOe)nv` zbFq)zNMF_0X0WREglDxaI%2qI&|U|0OfIE`yMdADqC!QIXPDjV)+&fP=36Dh6-fqDg7{ zGhNT5T~CHok@b$oSZS_vit4$g!42ndMdMN=swB4S;Nbk#Rh`C-zo^V1;vjQ4c8UL_ zjB)m7d$z?2bxztchcY2%331mWX=2f=fbgl`j-3X~+D1 zg`u43 zTmPO-E4a>h(}jYhg*1XATTD;H3fGr*$HSntbfLJjReQOS$EL_YHOS??>z|n%^V)Iq z6{jm&2~irf?N)P#1_QaExAh8+>SaGwsUKzrKe=P)p_+MPGxn-Y?Ul@m*hJB_6}w;8 zUgU&~%5KD;I@4w@N+ZR~Rg*F~#_WPW&UC1n(H+ZLgyK_S=4p<0>}@)W^O0sQy7{lx zd!4C6w|XvL|WF+5-r8%L~Rzj)@*;Gsso(4!&uu?mnEMjOB69d<-wAgYjwyq zLOnILh=K0u8z*tC!WVJ;TeA5wpJ?dm!y_UNl%wQGcP>*dS`vo=q{4nyacmg_UkX74 z4!M9oS)&~f@W?4hUl+kWtx#FzFM6JJ|jdKf1`WqG#_~Qx78Ymh~?avyrk33$gK^;Bc)MSJuM($+hxqvI=w7=LP0`Oo4 z&}#e}l9mpxV22L*_)$)k*exi*TPFJ&N?fBK?W|dCc|B1T1n=%g5)U_Q!I-f z>;mPj$xIwOtSG|AoT^J7#4md2edTSP2nD``K6I;5pbrkknyiI+DLe%=5VX_aR7n4Z zUD-=NQo%}MOUIgj)p-&kCjLFv0Y;jGsfzJ@O2;NuF3l~xrg@+{0JNwgSvvl7c>I_N!<3AdvEMMQ4(YR^f6uBTEvmFT?Gd~z9QXmB|b{TCy%?e?AA z?*3qm4ru}vGhsF%-Xu5Dm(XYfqb!HUmd2y=co0qd(?2W&%}kX4z@~vL8g2gX{{E`v zjLgi@2qJa>N7zbp9MQixSk&g*LIZ#$QOiYqqcaxuMH#OjFGHQ2jz1YIU@hrMlT`5N z=@1O6P_NM(lGQx(1$XK*M9$37*}RvA{poYgZ1r)pIbSz z_NL$0NrRq$qx$Tv`&1w7|7*L=KT+GDb;a>;Tqd~x;b=x3bLCYd&~eM2um*4oK;N4V z+G&XWK*hlUG_@;!{rcg<2fsl>aNlIassO^6I6S&Cy{sm9p0?U1x#_676``~T`BUrk z>3*k-NySr)!EQh4jy}Lny(}jO$$>udSQcGT#_1)`gU1l?rF}~ekAQ$3VEtNJBYEWL zBtRzwkjAo!ZUka^V#><#x2j@dVwo*bQ6u3BHi`YgSMLiXc*qYekQ7lELhG@;!}uS| z1u29D<>HMYVQ*n9$7=($4Mm`K`{CO?!qD2~sqKURr`qho7d&TDRLXAdJMrXBhee9h z;idtBh`b2G+VvuNZ<~H`>p9!9=24jLzwLdmv$M0acai$n)!&Ry_@izT!BgmPSc=re zvV9i09RWZ)d3}FB5|*;IW&i-| zsHmuR?nCiFoQ@5Y+YqB|ZEcs=*DidR%hZwW?WPH+rV{iyZ~ggNbqRl{7#rIvYwEF? zFL7R8URM8?+m({=J@kc=7TN4yCkn*iKi05`<$n&X;?ob8-|%?B5U}ZSX6eTXgs1=! zVJ8N2%dhc#=~+PO4xZLI3B7CI5HbqNOB|fFV|$IbF|q=7@N{I2rV|GWBxpk(VJr}^ zKxuRR3QG=8=zuvu9mBe2_|*iMuiU_@qobo+jge-eprs9M^}IMK6u$iqDy%1e#IV2@ zb}rxFxgrb~NO$RRgXtgY@^I%SZci-=_v6kA>h(6(2}H(v?iRNE*A~f^(-aTPc88D2~nRqQ@Ogf;4w%!*$A)i=3nZ> zylvI~?BA`|vp~irpJ7p*`UfQU4>VW7rvbqD!5aakss^iwJ!5&JceEzrbir4hIo%lq zi02oI?~t2)_mZ%Ckc(M?PbvDG@K&c_)hUjwAPM%lx=A5S+7OQ*;?JdLrp6IFrxwWO zw#odb+(OlW^Kc&F0IT|jFUu2YxEN$W)7#idC;;xCrC?nDjWY^S_WU!`tU#w z6X}4@hq}sJ_)%$Wl#eU^z!pdA;bKox|8tB6Vv|uLC4f(W*16B1$Kv6$`Z#(oSYEt% zA#iil1qho?FTRQs{Lk8RXnmT{;nErhn``+v_^8?&;ZpD=wHN55>K4eR&n-u{8Vu?L` zWq=w&>xljBFFwFNWZQa%5B9?)0&#I+_i$URsOk3qed$8n&kh6+gE(SAt9X&!3nJ{Qf) zv(W$p!)m)E&>KZ6t)cOn-1nwx&`}uBgG7+>C*`_4V~L zKw}PtX^;#1XoZA?>@GIDT9!laRsyh21JQ8zqLVo-6P7@e%|H0<$}{0Vz?6+^Sl$A~ z+g$)XS)2suz;>W^w)^+5nYMyLPPs~HhOlp&&-K4T!JC~C0Wivnw(FAHYpdC6iD*)O z*s0GmOp-sVtE(AabOHW_6^YG;`~BOcrUKrrbMuw@0kYE4?1I#p6&e5f&pZH~y_1Pa zfsL8j9}P?I%Oib6RmagSSR&qk!Q{N-;)1Q#0TRIn(LG=(kdinp@q>bbfVpa&*wzH< zBlU|e#E$@Z9~YpAF1W`4m3;u*W4-BhGN=R35B6WHf$ZXOBd=Pv_O=1)>{UEyefRpg@@_G~+6;&tf z!s6u%BHK~$pa+47;}wv6LjwF(1qB7c50FG}pX-TCHv2X41P1layITYjEi-eU1r&#f z$h=)wRP-@0c(U671ol=)sBHrAedO%a!};`bJsJSwyq<;k+)%11EBDNrW)4plE2_78 zaDqN#GGE4kj$)k(h?ew~mwObAkS<`?SPQ@sveP7%7t+mGJ@5eA?V$sJ9(SEyct7YGVCoV3YlpzL2ynf&Tz~^H# zGoQ4zNi%(}>AN z08N|IBvwNwfXz!By&som{zngK$ngvy>lSctZE}zi(k<+bogTD+?tsmD3v}RJu5M1Q zb}|mMDzdWPv;tyv#IZy;{y%`jl(iJ(X=^w7e#Bu1L|qV>fGw*Fh_?~4n>+`EXmJS% zbJ3tf{yTtG1IQMVTCLejIiag}k956`QK+b>(tPi(WJlZWRz<vb;-sV$Co3fdwtOpq5T(b&zG(6a+xA-*ZC%~vz6zg}*c4*pi1Mu+@~#tpdhPLpSA zE$sU5SnhQgJ>(BAwamlIZswMou&o_{@c+xg_2Xp!*PH)k>K=@ts9Vzi&l;jpjG)$- zj#6`S#sevVLPLNjdrM6n3}Bo-gvrH{V_7(((cLcd`29O6SpB@EEJvuW} zw<7Z+s{M5=5d&!0S~nF&5CMXZ1($j2M-NOfadB}+A;LEuKAD}JqL9{e4Y1&jFD^`h zkhj;V-6V|^{0*#vREk9pAum(3dwF$rJj6bF zdKdmm^f~I%t>7bI6w+>&{Ky#hx5 zPoPJu9kF-_w8ObPFAk^}7@o8ER|1BtpWWT!VEbM@0G?3@v_8nnVuJsy%RB@q$OXWF zc>}(k3cyUkNPOHEK>|FQ0WfQf@BrOIN&pun0Udvv0HxPAIEdi~fPRVKh)-x~5%6Sj z1~S1oxVY*7BVDRm0n||v67b7Z===NoZ-DXA+Z>4ZV+}Yv>cWE0Syr(6-rcl$+KYxF zi6o$ciBAK^AeP?)9`1YveFb2^Ob!+r0|Nv9EG)=4Ipm6dmZS;Fr)@Vr6BAHd^iHGqwBV4<&|s(Q3;&!R<#UK)C>yeGXz zAjx4eFy$F$#1x%W-2=>Zz(Vw(TtI0^csRSoxjNvbCDp)W^5vui(CciVhEDS_+wXS{ z?<#!_iZ5V)&l=c)S+r(N53rDG0cO>_-EYl+8}B;z@3#k?pbFY546JN{7rxE{-cbDI z`-fHm<_?ve+=^_yzyi+h_ZwqimAn%eq&tDPo`Gmf;1zP9x|ji&xz|K*?_0D;33OEK zKj3<`Pr%b&rUH97b#i+JfGuGoUT6~#qb*28#{65KGs(7GBHB!y0SG)@{an^LB{Ts5 Du?L*L literal 0 HcmV?d00001 diff --git a/examples/flux_integration/neural_kernel_network/time_series.jl b/examples/flux_integration/neural_kernel_network/time_series.jl index dcaa8693..6201dc67 100644 --- a/examples/flux_integration/neural_kernel_network/time_series.jl +++ b/examples/flux_integration/neural_kernel_network/time_series.jl @@ -1,28 +1,20 @@ -# Set up the environment to run this example. Make sure you're within the folder that this -# file lives in. -using Pkg -Pkg.activate(@__DIR__) -Pkg.instantiate() -using Stheno +using LinearAlgebra, Stheno, Flux, Zygote, DelimitedFiles, Statistics using Plots; pyplot(); using Random; Random.seed!(4); -using Flux -using Zygote -using DelimitedFiles -using Statistics - -# read AirPass data +###################################################### +# Data loading +## read AirPass data data = readdlm("AirPassengers.csv", ',') year = data[2:end,2]; passengers = data[2:end,3]; -# Split the data into training and testing data +## Split the data into training and testing data oxtrain = year[year.<1958]; oytrain = passengers[year.<1958]; oxtest = year[year.>=1958]; oytest = passengers[year.>=1958]; -#data preprocessing -## standardize X and y +##data preprocessing +### standardize X and y xtrain_mean = mean(oxtrain) ytrain_mean = mean(oytrain) xtrain_std = std(oxtrain) @@ -34,13 +26,21 @@ xtest = @. (oxtest-xtrain_mean)/xtrain_std ytest = @. (oytest-ytrain_mean)/ytrain_std ## input data -Xtrain = reshape(xtrain, 1, length(xtrain)); -Xtest = reshape(xtest, 1, length(xtest)); -Year = hcat(Xtrain, Xtest); +Xtrain = reshape(xtrain, 1, length(xtrain)) +Xtest = reshape(xtest, 1, length(xtest)) +Year = hcat(Xtrain, Xtest) Passengers = vcat(ytrain, ytest) +###################################################### + +plt = plot(xlabel="Year", ylabel="Airline Passenger number", legend=true) +scatter!(plt, oxtrain, oytrain, label="Observations(train)", color=:black) + + -# kernel parameter initialization +###################################################### +# Build kernel with Neural Kernel Network +## kernel length scale initialization function median_distance_local(x) n = length(x) dist = [] @@ -53,67 +53,85 @@ function median_distance_local(x) end l = median_distance_local(xtrain) +## kernel parameter constraint +g1(x) = exp(-x) +g2(x) = exp(x) -# construct kernels -iso_lin_kernel1 = stretch(Linear(), [0.0]) -iso_per_kernel1 = [0.0] * stretch(PerEQ([log(l)]), [log(l)]) -iso_eq_kernel1 = [0.0] * stretch(EQ(), [log(l/4.0)]) -iso_rq_kernel1 = [0.0] * stretch(RQ([log(0.2)]), [log(2.0*l)]) -iso_lin_kernel2 = stretch(Linear(), [0.0]) -iso_rq_kernel2 = [0.0] * stretch(RQ([log(0.1)]), [log(l)]) -iso_eq_kernel2 = [0.0] * stretch(EQ(), [log(l)]) -iso_per_kernel2 = [0.0] * stretch(PerEQ([log(l/4.0)]), [log(l/4.0)]) +## define kernels +iso_lin_kernel1 = stretch(Linear(), log(1.0), g1) +iso_per_kernel1 = scale(stretch(PerEQ(log(l), g2), log(l), g1), log(1.0), g2) +iso_eq_kernel1 = scale(stretch(EQ(), log(l/4.0), g1), log(1.0), g2) +iso_rq_kernel1 = scale(stretch(RQ(log(0.2), g2), log(2.0*l), g1), log(1.0), g2) +iso_lin_kernel2 = stretch(Linear(), log(1.0), g1) +iso_rq_kernel2 = scale(stretch(RQ(log(0.1), g2), log(l), g1), log(1.0), g2) +iso_eq_kernel2 = scale(stretch(EQ(), log(l), g1), log(1.0), g2) +iso_per_kernel2 = scale(stretch(PerEQ(log(l/4.0), g2), log(l/4.0), g1), log(1.0), g2) -# sum product network +# define network linear1 = LinearLayer(8, 8) +prod1 = ProductLayer(2) linear2 = LinearLayer(4, 4) +prod2 = ProductLayer(2) linear3 = LinearLayer(2, 1) -# NKN +## NKN player = Primitive(iso_lin_kernel1, iso_per_kernel1, iso_eq_kernel1, iso_rq_kernel1, - iso_lin_kernel2, iso_rq_kernel2, iso_eq_kernel2, iso_per_kernel2) -nn = Chain(linear1, Product, linear2, Product, linear3) + iso_lin_kernel2, iso_rq_kernel2, iso_eq_kernel2, iso_per_kernel2) +nn = chain(linear1, prod1, linear2, prod2, linear3) nkn = NeuralKernelNetwork(player, nn) +############################################################# -# build GP model -σ²_n = 0.1 -gp = GP(nkn, GPC()) -gp_Xtrain = gp(ColVecs(Xtrain), σ²_n) -ps = params(nkn) +# Do some common calculation +σ²_n = 0.1 # specify Gaussian noise +gp = GP(nkn, GPC()) # define GP +loss(m, x, y) = -logpdf(m(ColVecs(x), σ²_n), y) # define loss & compute negative log likelihood +loss(gp, Xtrain, ytrain) +∂gp, = gradient(m->loss(m, Xtrain, ytrain), gp) # compute derivative of loss w.r.t GP parameters -# optimize +# extract all parameters from the GP model +l_ps = parameters(gp) |> length +# extract the corresponding gradients from the derivative ( or conjugate of GP model ) +l_∂ps = extract_gradient(gp, ∂gp) |> length +# make sure parameters and gradients are in one-to-one correspondence +@assert l_ps == l_∂ps + + +############################################################# +# Optimize GP parameters w.r.t training data using Flux.Optimise: update! optimizer = ADAM(0.001) -loss = [] +L = [] for i in 1:5000 - ll = .-logpdf(gp_Xtrain, ytrain) - push!(loss, ll) - if i==1 || i%100 == 0 - @info "step=$i, loss=$ll" - end - gs = gradient(()->.-logpdf(gp_Xtrain, ytrain), ps) - for p in ps - update!(optimizer, p, gs[p]) + nll = loss(gp, Xtrain, ytrain) + push!(L, nll) + if i==1 || i%200 == 0 + @info "step=$i, loss=$nll" end + ps = parameters(gp) + ∂gp, = gradient(m->loss(m, Xtrain, ytrain), gp) + + Δps = extract_gradient(gp, ∂gp) + update!(optimizer, ps, Δps) + dispatch!(gp, ps) # dispatch! will update the GP model with updated parameters end -plt1 = plot(legend=false, xlabel="Epoches", ylabel="Negative log-likelihood") -plot!(plt1, loss) -png(plt1, "loss.png") +# you can view the loss curve +# plot(L, legend=false) +############################################################# -# predict -function predict(X, Xtrain, ytrain) - gp = GP(nkn, GPC()) +############################################################# +# make prediction +function predict(gp, X, Xtrain, ytrain) gp_Xtrain = gp(ColVecs(Xtrain), σ²_n) posterior = gp | Obs(gp_Xtrain, ytrain) posterior(ColVecs(X)) end -posterior = predict(Year, Xtrain, ytrain) +posterior = predict(gp, Year, Xtrain, ytrain) post_dist = marginals(posterior) pred_y = mean.(post_dist) var_y = std.(post_dist) @@ -121,9 +139,15 @@ var_y = std.(post_dist) pred_oy = @. pred_y*ytrain_std+ytrain_mean pred_oσ = @. var_y*ytrain_std -plt2 = plot(xlabel="Year", ylabel="Airline Passenger number", legend=true) -plot!(plt2, year, pred_oy, ribbons=3*pred_oσ, title="Time series prediction",label="95% predictive confidence region") -scatter!(plt2, oxtest, oytest, label="Observations(test)", color=:red) -scatter!(plt2, oxtrain, oytrain, label="Observations(train)", color=:black) -png(plt2, "time_series.png") +plot!(plt, year, pred_oy, ribbons=3*pred_oσ, title="Time series prediction",label="95% predictive confidence region") +scatter!(plt, oxtest, oytest, label="Observations(test)", color=:red) +display(plt) +############################################################## + + + + + + + diff --git a/src/gp/kernel.jl b/src/gp/kernel.jl index 91e18085..ce91bd07 100644 --- a/src/gp/kernel.jl +++ b/src/gp/kernel.jl @@ -28,7 +28,7 @@ export Kernel, kernel, elementwise, pairwise, ew, pw export EQ, Exp, PerEQ, Matern12, Matern32, Matern52, RQ, Cosine, Linear, Poly, GammaExp, Wiener, WienerVelocity, Precomputed - +export scale, stretch # # Base Kernels @@ -131,22 +131,24 @@ The usual periodic kernel derived by mapping the input domain onto the unit circ For length scales etc see [`stretch`](@ref), for variance see [`*`](@ref). """ -struct PerEQ{T, LT<:AV{T}} <: Kernel +struct PerEQ{T, LT<:AV{T}, fT} <: Kernel l::LT + f::fT end -PerEQ(l::Real) = PerEQ(typeof(l)[l]) +PerEQ(l::Real, f) = PerEQ(typeof(l)[l], f) +PerEQ(l::Real) = PerEQ(l, identity) get_iparam(per::PerEQ) = per.l child(::PerEQ) = () _pereq(d, l) = exp(-2.0*sin(π*d)^2 / l^2) # Binary methods. -ew(k::PerEQ, x::AV, x′::AV) = _pereq.(ew(Euclidean(), x, x′), k.l[1]) -pw(k::PerEQ, x::AV, x′::AV) = _pereq.(pw(Euclidean(), x, x′), k.l[1]) +ew(k::PerEQ, x::AV, x′::AV) = _pereq.(ew(Euclidean(), x, x′), k.f(k.l[1])) +pw(k::PerEQ, x::AV, x′::AV) = _pereq.(pw(Euclidean(), x, x′), k.f(k.l[1])) # Unary methods. -ew(k::PerEQ, x::AV) = _pereq.(ew(Euclidean(), x), k.l[1]) -pw(k::PerEQ, x::AV) = _pereq.(pw(Euclidean(), x), k.l[1]) +ew(k::PerEQ, x::AV) = _pereq.(ew(Euclidean(), x), k.f(k.l[1])) +pw(k::PerEQ, x::AV) = _pereq.(pw(Euclidean(), x), k.f(k.l[1])) @@ -251,10 +253,12 @@ The standardised Rational Quadratic, with kurtosis `α`. For length scales etc see [`stretch`](@ref), for variance see [`*`](@ref). """ -struct RQ{T, Tα<:AV{T}} <: Kernel +struct RQ{T, Tα<:AV{T}, fT} <: Kernel α::Tα + f::fT end -RQ(α::Real) = RQ(typeof(α)[α]) +RQ(α::Real, f) = RQ(typeof(α)[α], f) +RQ(α::Real) = RQ(α, identity) get_iparam(rq::RQ) = rq.α child(::RQ) = () @@ -272,12 +276,12 @@ _rq(d, α) = (1 + d / (2α))^(-α) end # Binary methods. -ew(k::RQ, x::AV, x′::AV) = _rq.(ew(SqEuclidean(), x, x′), k.α[1]) -pw(k::RQ, x::AV, x′::AV) = _rq.(pw(SqEuclidean(), x, x′), k.α[1]) +ew(k::RQ, x::AV, x′::AV) = _rq.(ew(SqEuclidean(), x, x′), k.f(k.α[1])) +pw(k::RQ, x::AV, x′::AV) = _rq.(pw(SqEuclidean(), x, x′), k.f(k.α[1])) # Unary methods. -ew(k::RQ, x::AV) = _rq.(ew(SqEuclidean(), x), k.α[1]) -pw(k::RQ, x::AV) = _rq.(pw(SqEuclidean(), x), k.α[1]) +ew(k::RQ, x::AV) = _rq.(ew(SqEuclidean(), x), k.f(k.α[1])) +pw(k::RQ, x::AV) = _rq.(pw(SqEuclidean(), x), k.f(k.α[1])) @@ -286,20 +290,22 @@ pw(k::RQ, x::AV) = _rq.(pw(SqEuclidean(), x), k.α[1]) Cosine Kernel with period parameter `p`. """ -struct Cosine{T, Tp<:AV{T}} <: Kernel +struct Cosine{T, Tp<:AV{T}, fT} <: Kernel p::Tp + f::fT end -Cosine(p::Real) = Cosine(typeof(p)[p]) +Cosine(p::Real, f) = Cosine(typeof(p)[p], f) +Cosine(p::Real) = Cosine(p, identity) get_iparam(c::Cosine) = c.p child(::Cosine) = () # Binary methods. -ew(k::Cosine, x::AV{<:Real}, x′::AV{<:Real}) = cos.(pi.*ew(Euclidean(), x, x′) ./ k.p[1]) -pw(k::Cosine, x::AV{<:Real}, x′::AV{<:Real}) = cos.(pi.*pw(Euclidean(), x, x′) ./ k.p[1]) +ew(k::Cosine, x::AV{<:Real}, x′::AV{<:Real}) = cos.(pi.*ew(Euclidean(), x, x′) ./ k.f(k.p[1])) +pw(k::Cosine, x::AV{<:Real}, x′::AV{<:Real}) = cos.(pi.*pw(Euclidean(), x, x′) ./ k.f(k.p[1])) # Unary methods. ew(k::Cosine, x::AV{<:Real}) = 1 .+ ew(Euclidean(), x) -pw(k::Cosine, x::AV{<:Real}) = cos.(pi .* pw(Euclidean(), x) ./ k.p[1]) +pw(k::Cosine, x::AV{<:Real}) = cos.(pi .* pw(Euclidean(), x) ./ k.f(k.p[1])) @@ -335,10 +341,12 @@ defined as k(xl, xr) = (dot(xl, xr) + σ²)^p ``` """ -struct Poly{p, T, Tσ²<:AV{T}} <: Kernel +struct Poly{p, T, Tσ²<:AV{T}, fT} <: Kernel σ²::Tσ² + f::fT end -Poly(p::Int, σ²::Real) = Poly{p, typeof(σ²), AV{typeof(σ²)}}(typeof(σ²)[σ²]) +Poly(p::Int, σ²::Real, f) = Poly{p, typeof(σ²), AV{typeof(σ²)}, typeof(f)}(typeof(σ²)[σ²], f) +Poly(p::Int, σ²::Real) = Poly(p, σ², identity) get_iparam(p::Poly) = p.σ² child(::Poly) = () @@ -352,12 +360,12 @@ Zygote.@adjoint function _poly(k, σ², p) end # Binary methods -ew(k::Poly{p}, x::AV, x′::AV) where {p} = _poly.(ew(Linear(), x, x′), k.σ²[1], p) -pw(k::Poly{p}, x::AV, x′::AV) where {p} = _poly.(pw(Linear(), x, x′), k.σ²[1], p) +ew(k::Poly{p}, x::AV, x′::AV) where {p} = _poly.(ew(Linear(), x, x′), k.f(k.σ²[1]), p) +pw(k::Poly{p}, x::AV, x′::AV) where {p} = _poly.(pw(Linear(), x, x′), k.f(k.σ²[1]), p) # Unary methods -ew(k::Poly{p}, x::AV) where {p} = _poly.(ew(Linear(), x), k.σ²[1], p) -pw(k::Poly{p}, x::AV) where {p} = _poly.(pw(Linear(), x), k.σ²[1], p) +ew(k::Poly{p}, x::AV) where {p} = _poly.(ew(Linear(), x), k.f(k.σ²[1]), p) +pw(k::Poly{p}, x::AV) where {p} = _poly.(pw(Linear(), x), k.f(k.σ²[1]), p) @@ -366,20 +374,22 @@ pw(k::Poly{p}, x::AV) where {p} = _poly.(pw(Linear(), x), k.σ²[1], p) The γ-Exponential kernel, 0 < γ ⩽ 2, is given by `k(xl, xr) = exp(-||xl - xr||^γ)`. """ -struct GammaExp{T, Tγ<:AV{T}} <: Kernel +struct GammaExp{T, Tγ<:AV{T}, fT} <: Kernel γ::Tγ + f::fT end -GammaExp(γ::Real) = GammaExp(typeof(γ)[γ]) +GammaExp(γ::Real, f) = GammaExp(typeof(γ)[γ], f) +GammaExp(γ::Real) = GammaExp(γ, identity) get_iparam(g::GammaExp) = g.γ child(::GammaExp) = () # Binary methods -ew(k::GammaExp, x::AV, x′::AV) = exp.(.-ew(Euclidean(), x, x′).^k.γ[1]) -pw(k::GammaExp, x::AV, x′::AV) = exp.(.-pw(Euclidean(), x, x′).^k.γ[1]) +ew(k::GammaExp, x::AV, x′::AV) = exp.(.-ew(Euclidean(), x, x′).^(k.f(k.γ[1]))) +pw(k::GammaExp, x::AV, x′::AV) = exp.(.-pw(Euclidean(), x, x′).^(k.f(k.γ[1]))) # Unary methods -ew(k::GammaExp, x::AV) = exp.(.-ew(Euclidean(), x).^k.γ[1]) -pw(k::GammaExp, x::AV) = exp.(.-pw(Euclidean(), x).^k.γ[1]) +ew(k::GammaExp, x::AV) = exp.(.-ew(Euclidean(), x).^(k.f(k.γ[1]))) +pw(k::GammaExp, x::AV) = exp.(.-pw(Euclidean(), x).^(k.f(k.γ[1]))) @@ -557,11 +567,13 @@ Scaled{Tσ²<:AV{<:Real}, Tk<:Kernel} <: Kernel Scale the variance of `Kernel` `k` by `σ²` s.t. `(σ² * k)(x, x′) = σ² * k(x, x′)`. """ -struct Scaled{T, Tσ²<:AV{T}, Tk<:Kernel} <: Kernel +struct Scaled{T, Tσ²<:AV{T}, Tk<:Kernel, fT} <: Kernel σ²::Tσ² k::Tk + f::fT end -Scaled(σ²::Real, k::Kernel) = Scaled(typeof(σ²)[σ²], k) +scale(k::Kernel, σ²::Real, f) = Scaled(typeof(σ²)[σ²], k, f) +scale(k::Kernel, σ²::Real) = scale(k, σ², identity) get_iparam(s::Scaled) = s.σ² child(s::Scaled) = (s.k,) """ @@ -580,16 +592,16 @@ true ``` """ # NOTE: σ² is in log scale !!! -*(σ²::Real, k::Kernel) = Scaled(σ², k) +*(σ²::Real, k::Kernel) = scale(k, σ²) *(k::Kernel, σ²) = σ² * k # Binary methods. -ew(k::Scaled, x::AV, x′::AV) = k.σ²[1] .* ew(k.k, x, x′) -pw(k::Scaled, x::AV, x′::AV) = k.σ²[1] .* pw(k.k, x, x′) +ew(k::Scaled, x::AV, x′::AV) = k.f(k.σ²[1]) .* ew(k.k, x, x′) +pw(k::Scaled, x::AV, x′::AV) = k.f(k.σ²[1]) .* pw(k.k, x, x′) # Unary methods. -ew(k::Scaled, x::AV) = k.σ²[1] .* ew(k.k, x) -pw(k::Scaled, x::AV) = k.σ²[1] .* pw(k.k, x) +ew(k::Scaled, x::AV) = k.f(k.σ²[1]) .* ew(k.k, x) +pw(k::Scaled, x::AV) = k.f(k.σ²[1]) .* pw(k.k, x) @@ -598,9 +610,10 @@ pw(k::Scaled, x::AV) = k.σ²[1] .* pw(k.k, x) Apply a length scale to a kernel. Specifically, `k(x, x′) = k(a * x, a * x′)`. """ -struct Stretched{T, Ta<:AVM{T}, Tk<:Kernel} <: Kernel +struct Stretched{T, Ta<:AVM{T}, Tk<:Kernel, fT} <: Kernel a::Ta k::Tk + f::fT end get_iparam(s::Stretched) = s.a child(s::Stretched) = (s.k,) @@ -682,43 +695,43 @@ K = pairwise(k, xs, ys) 1.40202e-8 0.293658 0.0808585 ``` """ -stretch(k::Kernel, a::Real) = stretch(k, typeof(a)[a]) -stretch(k::Kernel, a::AVM{<:Real}) = Stretched(a, k) +stretch(k::Kernel, a::AVM{<:Real}, f) = Stretched(a, k, f) +stretch(k::Kernel, a::AVM{<:Real}) = stretch(k, a, identity) +stretch(k::Kernel, a::Real, f) = stretch(k, typeof(a)[a], f) +stretch(k::Kernel, a::Real) = stretch(k, a, identity) # NOTE: `a` is not scalar any more !!! # Binary methods (scalar `a`, scalar-valued input) -ew(k::Stretched{<:Real, <:AV{<:Real}}, x::AV{<:Real}, x′::AV{<:Real}) = ew(k.k, k.a .* x, k.a .* x′) -pw(k::Stretched{<:Real, <:AV{<:Real}}, x::AV{<:Real}, x′::AV{<:Real}) = pw(k.k, k.a .* x, k.a .* x′) +ew(k::Stretched{<:Real, <:AV{<:Real}}, x::AV{<:Real}, x′::AV{<:Real}) = ew(k.k, k.f.(k.a) .* x, k.f.(k.a) .* x′) +pw(k::Stretched{<:Real, <:AV{<:Real}}, x::AV{<:Real}, x′::AV{<:Real}) = pw(k.k, k.f.(k.a) .* x, k.f.(k.a) .* x′) # Unary methods (scalar) -ew(k::Stretched{<:Real, <:AV{<:Real}}, x::AV{<:Real}) = ew(k.k, k.a .* x) -pw(k::Stretched{<:Real, <:AV{<:Real}}, x::AV{<:Real}) = pw(k.k, k.a .* x) +ew(k::Stretched{<:Real, <:AV{<:Real}}, x::AV{<:Real}) = ew(k.k, k.f.(k.a) .* x) +pw(k::Stretched{<:Real, <:AV{<:Real}}, x::AV{<:Real}) = pw(k.k, k.f.(k.a) .* x) # Binary methods (scalar and vector `a`, vector-valued input) function ew(k::Stretched{<:Real, <:AV{<:Real}}, x::ColVecs, x′::ColVecs) - return ew(k.k, ColVecs(k.a .* x.X), ColVecs(k.a .* x′.X)) + return ew(k.k, ColVecs(k.f.(k.a) .* x.X), ColVecs(k.f.(k.a) .* x′.X)) end function pw(k::Stretched{<:Real, <:AV{<:Real}}, x::ColVecs, x′::ColVecs) - return pw(k.k, ColVecs(k.a .* x.X), ColVecs(k.a .* x′.X)) + return pw(k.k, ColVecs(k.f.(k.a) .* x.X), ColVecs(k.f.(k.a) .* x′.X)) end # Unary methods (scalar and vector `a`, vector-valued input) -ew(k::Stretched{<:Real, <:AV{<:Real}}, x::ColVecs) = ew(k.k, ColVecs(k.a .* x.X)) -pw(k::Stretched{<:Real, <:AV{<:Real}}, x::ColVecs) = pw(k.k, ColVecs(k.a .* x.X)) +ew(k::Stretched{<:Real, <:AV{<:Real}}, x::ColVecs) = ew(k.k, ColVecs(k.f.(k.a) .* x.X)) +pw(k::Stretched{<:Real, <:AV{<:Real}}, x::ColVecs) = pw(k.k, ColVecs(k.f.(k.a) .* x.X)) # Binary methods (matrix `a`, vector-valued input) function ew(k::Stretched{<:Real, <:AM{<:Real}}, x::ColVecs, x′::ColVecs) - return ew(k.k, ColVecs(k.a * x.X), ColVecs(k.a * x′.X)) + return ew(k.k, ColVecs(k.f.(k.a) * x.X), ColVecs(k.f.(k.a) * x′.X)) end function pw(k::Stretched{<:Real, <:AM{<:Real}}, x::ColVecs, x′::ColVecs) - return pw(k.k, ColVecs(k.a * x.X), ColVecs(k.a * x′.X)) + return pw(k.k, ColVecs(k.f.(k.a) * x.X), ColVecs(k.f.(k.a) * x′.X)) end # Unary methods (scalar and vector `a`, vector-valued input) -ew(k::Stretched{<:Real, <:AM{<:Real}}, x::ColVecs) = ew(k.k, ColVecs(k.a * x.X)) -pw(k::Stretched{<:Real, <:AM{<:Real}}, x::ColVecs) = pw(k.k, ColVecs(k.a * x.X)) - - +ew(k::Stretched{<:Real, <:AM{<:Real}}, x::ColVecs) = ew(k.k, ColVecs(k.f.(k.a) * x.X)) +pw(k::Stretched{<:Real, <:AM{<:Real}}, x::ColVecs) = pw(k.k, ColVecs(k.f.(k.a) * x.X)) """ diff --git a/test/gp/kernel.jl b/test/gp/kernel.jl index 7f334d3d..753c5a35 100644 --- a/test/gp/kernel.jl +++ b/test/gp/kernel.jl @@ -170,9 +170,9 @@ using LinearAlgebra end @timedtestset "Scaled" begin - differentiable_kernel_tests(Scaled(0.5, EQ()), ȳ, Ȳ, Ȳ_sq, x0, x1, x2) - differentiable_kernel_tests(Scaled(0.5, EQ()), ȳ, Ȳ, Ȳ_sq, X0, X1, X2) - adjoint_test(σ²->pw(Scaled(σ², EQ()), X0), Ȳ_sq, 0.5) + differentiable_kernel_tests(scale(EQ(), 0.5), ȳ, Ȳ, Ȳ_sq, x0, x1, x2) + differentiable_kernel_tests(scale(EQ(), 0.5), ȳ, Ȳ, Ȳ_sq, X0, X1, X2) + adjoint_test(σ²->pw(scale(EQ(), σ²), X0), Ȳ_sq, 0.5) @test 0.5 * EQ() isa Scaled @test EQ() * 0.5 isa Scaled end From 582277d8a85bb0d5ab2b8df02365f317d21e5066 Mon Sep 17 00:00:00 2001 From: hongbinren Date: Thu, 5 Mar 2020 10:52:33 +0800 Subject: [PATCH 31/42] update --- .../flux_integration/neural_kernel_network/time_series.jl | 6 +++++- 1 file changed, 5 insertions(+), 1 deletion(-) diff --git a/examples/flux_integration/neural_kernel_network/time_series.jl b/examples/flux_integration/neural_kernel_network/time_series.jl index 6201dc67..2004dc36 100644 --- a/examples/flux_integration/neural_kernel_network/time_series.jl +++ b/examples/flux_integration/neural_kernel_network/time_series.jl @@ -1,4 +1,8 @@ - +# Set up the environment to run this example. Make sure you're within the folder that this +# file lives in. +using Pkg +Pkg.activate(@__DIR__) +Pkg.instantiate() using LinearAlgebra, Stheno, Flux, Zygote, DelimitedFiles, Statistics using Plots; pyplot(); From fba284bb1215e9d0aee1bdebacab617b8cdf4f27 Mon Sep 17 00:00:00 2001 From: hongbinren Date: Fri, 6 Mar 2020 17:08:32 +0800 Subject: [PATCH 32/42] add `child` & `get_iparam` to composite_gp --- src/composite/compose.jl | 3 +++ src/composite/composite_gp.jl | 11 +++++++++++ 2 files changed, 14 insertions(+) diff --git a/src/composite/compose.jl b/src/composite/compose.jl index c5ea0b61..527e3204 100644 --- a/src/composite/compose.jl +++ b/src/composite/compose.jl @@ -1,3 +1,6 @@ +# Should parameters inside `stretch`, `periodic` & `shift` be optimized ? +# Or these are just treated as data preprocessing ? + import Base: ∘ export ∘, select, stretch, periodic, shift diff --git a/src/composite/composite_gp.jl b/src/composite/composite_gp.jl index c7506b0c..83034f82 100644 --- a/src/composite/composite_gp.jl +++ b/src/composite/composite_gp.jl @@ -13,6 +13,17 @@ struct CompositeGP{Targs} <: AbstractGP return gp end end +get_iparam(::CompositeGP) = Union{}[] +function child(c::CompositeGP) + models = [] + for i in eachindex(c.args) + if c.args[i] isa AbstractModel + push!(models, c.args[i]) + end + end + tuple(models...) +end + CompositeGP(args::Targs, gpc::GPC) where {Targs} = CompositeGP{Targs}(args, gpc) mean_vector(f::CompositeGP, x::AV) = mean_vector(f.args, x) From bc05b77508c9ff058ab1d5369d66cc9d03188e55 Mon Sep 17 00:00:00 2001 From: hongbinren Date: Fri, 6 Mar 2020 22:11:14 +0800 Subject: [PATCH 33/42] add annotations --- src/Stheno.jl | 126 ++++++++++++++++---------------- src/abstract_model.jl | 73 +++++++++++++++++- src/composite/composite_gp.jl | 10 +-- src/gp/kernel.jl | 43 +++++------ src/gp/neural_kernel_network.jl | 4 +- src/neural_network/basic.jl | 15 +++- 6 files changed, 171 insertions(+), 100 deletions(-) diff --git a/src/Stheno.jl b/src/Stheno.jl index 9e8d573f..dd7752e6 100644 --- a/src/Stheno.jl +++ b/src/Stheno.jl @@ -1,66 +1,66 @@ module Stheno - using Distributions, Distances, BlockArrays, Statistics, Random, FillArrays, - LinearAlgebra, Zygote - import Base: length, map - import Base.Broadcast: broadcasted, materialize, broadcast_shape - import Statistics: mean, cov - using LinearAlgebra: AbstractTriangular - using ZygoteRules: @adjoint - using Zygote: @nograd - using BlockArrays: _BlockArray - import LinearAlgebra: cholesky, cross - import Distances: pairwise, colwise - - const AV{T} = AbstractVector{T} - const AM{T} = AbstractMatrix{T} - const AVM{T} = AbstractVecOrMat{T} - - const BlockLowerTriangular{T} = LowerTriangular{T, <:BlockMatrix{T}} - const BlockUpperTriangular{T} = UpperTriangular{T, <:BlockMatrix{T}} - const BlockTriangular{T} = Union{BlockLowerTriangular{T}, BlockUpperTriangular{T}} - - function elementwise end - - const pw = pairwise - const ew = elementwise - - # Various bits of utility that aren't inherently GP-related. Often very type-piratic. - include(joinpath("util", "zygote_rules.jl")) - include(joinpath("util", "covariance_matrices.jl")) - include(joinpath("util", "block_arrays", "dense.jl")) - include(joinpath("util", "block_arrays", "diagonal.jl")) - include(joinpath("util", "block_arrays", "triangular.jl")) - include(joinpath("util", "abstract_data_set.jl")) - include(joinpath("util", "distances.jl")) - include(joinpath("util", "proper_type_piracy.jl")) - - # Supertype for GPs. - include("abstract_model.jl") - include("abstract_gp.jl") - - # Neural network used in gp - include(joinpath("neural_network", "basic.jl")) - - # Atomic GP objects. - include(joinpath("gp", "mean.jl")) - include(joinpath("gp", "kernel.jl")) - include(joinpath("gp", "neural_kernel_network.jl")) - include(joinpath("gp", "gp.jl")) - - # Composite GPs, constructed via affine transformation of CompositeGPs and GPs. - include(joinpath("composite", "composite_gp.jl")) - include(joinpath("composite", "indexing.jl")) - include(joinpath("composite", "cross.jl")) - include(joinpath("composite", "conditioning.jl")) - include(joinpath("composite", "approximate_conditioning.jl")) - include(joinpath("composite", "product.jl")) - include(joinpath("composite", "addition.jl")) - include(joinpath("composite", "compose.jl")) - # include(joinpath("composite", "gradient.jl")) - # include(joinpath("composite", "integrate.jl")) - - # Various stuff for convenience. - include(joinpath("util", "model.jl")) - include(joinpath("util", "plotting.jl")) + using Distributions, Distances, BlockArrays, Statistics, Random, FillArrays, + LinearAlgebra, Zygote + import Base: length, map + import Base.Broadcast: broadcasted, materialize, broadcast_shape + import Statistics: mean, cov + using LinearAlgebra: AbstractTriangular + using ZygoteRules: @adjoint + using Zygote: @nograd + using BlockArrays: _BlockArray + import LinearAlgebra: cholesky, cross + import Distances: pairwise, colwise + + const AV{T} = AbstractVector{T} + const AM{T} = AbstractMatrix{T} + const AVM{T} = AbstractVecOrMat{T} + + const BlockLowerTriangular{T} = LowerTriangular{T, <:BlockMatrix{T}} + const BlockUpperTriangular{T} = UpperTriangular{T, <:BlockMatrix{T}} + const BlockTriangular{T} = Union{BlockLowerTriangular{T}, BlockUpperTriangular{T}} + + function elementwise end + + const pw = pairwise + const ew = elementwise + + # Various bits of utility that aren't inherently GP-related. Often very type-piratic. + include(joinpath("util", "zygote_rules.jl")) + include(joinpath("util", "covariance_matrices.jl")) + include(joinpath("util", "block_arrays", "dense.jl")) + include(joinpath("util", "block_arrays", "diagonal.jl")) + include(joinpath("util", "block_arrays", "triangular.jl")) + include(joinpath("util", "abstract_data_set.jl")) + include(joinpath("util", "distances.jl")) + include(joinpath("util", "proper_type_piracy.jl")) + + # Supertype for GPs. + include("abstract_model.jl") + include("abstract_gp.jl") + + # Neural network used for building neural kernel network + include(joinpath("neural_network", "basic.jl")) + + # Atomic GP objects + include(joinpath("gp", "mean.jl")) + include(joinpath("gp", "kernel.jl")) + include(joinpath("gp", "neural_kernel_network.jl")) + include(joinpath("gp", "gp.jl")) + + # Composite GPs, constructed via affine transformation of CompositeGPs and GPs. + include(joinpath("composite", "composite_gp.jl")) + include(joinpath("composite", "indexing.jl")) + include(joinpath("composite", "cross.jl")) + include(joinpath("composite", "conditioning.jl")) + include(joinpath("composite", "approximate_conditioning.jl")) + include(joinpath("composite", "product.jl")) + include(joinpath("composite", "addition.jl")) + include(joinpath("composite", "compose.jl")) + # include(joinpath("composite", "gradient.jl")) + # include(joinpath("composite", "integrate.jl")) + + # Various stuff for convenience. + include(joinpath("util", "model.jl")) + include(joinpath("util", "plotting.jl")) end # module diff --git a/src/abstract_model.jl b/src/abstract_model.jl index 5b5576ed..addc68e5 100644 --- a/src/abstract_model.jl +++ b/src/abstract_model.jl @@ -1,15 +1,61 @@ export AbstractModel, parameters, parameter_eltype, dispatch!, extract_gradient + +""" +Here I introduce an `AbstractModel` type, acting as the root type of all types that contain +learnable parameters, such as `AbstractGP`, `Kernel` & `Meanfunction`. This gives +our model a tree structure, and facilitate for collecting and redistributing parameters. + +Here is an example of how our GP model now looks like: + + GP + / \ + / \ + ConstantMean Scaled + (c) (σ) + | + Stretched + (l) + | + EQ() + +Parameters for this model are `c`, `σ` & `l`, we can use: +```julia +θ = parameters(GP) +``` +to extract all parameters contained in the model, and +```julia +dispatch!(GP, θ₁) +``` +to redistribute the updated parameters ( maybe returned from some optimizer ) back to the model. + +Enabling parameter collecting and dispatching features leave Stheno's current type implementation and +APIs unchanged, one only need to subtype the current types to `AbstractModel` type and add two interfaces +`get_iparam` & `child` for each one, for example: +```julia +struct EQ <: AbstractModel end +get_iparam(::EQ) = Union{}[] +child(::EQ) = () +``` +""" + + const AVM = AbstractVecOrMat abstract type AbstractModel end +# Return parameters contained inside a model get_iparam(m::AbstractModel) = throw(UndefVarError("get_iparam method not defined for $m")) +# Return model that contained in another model, e.g. `Stretched` contains kernel child(m::AbstractModel) = throw(UndefVarError("child method not defined for $m")) + +# parameter_eltype will return the type of each paramters inside a model, for those types that +# are not subtype of `AbstractModel`, and cases where a model contains no parameters, e.g. EQ kernel, +# it will return `Union{}`. parameter_eltype(::Any) = Union{} function parameter_eltype(x::AbstractModel) - T = eltype(get_iparam(x)) + T = eltype(get_iparam(x)) for each in child(x) T = promote_type(T, parameter_eltype(each)) end @@ -17,6 +63,7 @@ function parameter_eltype(x::AbstractModel) end +# Extract all parameters of a model to a 1D array parameters(x::AbstractModel) = parameters!(parameter_eltype(x)[], x) parameters!(out, ::Any) = out function parameters!(out, x::AbstractModel) @@ -28,22 +75,40 @@ function parameters!(out, x::AbstractModel) end +# Return number of parameters contained inside a model get_nparameter(x::AbstractModel) = length(parameters(x)) +# dispatch! allows us to update parameters inside a model, it accept a model and a 1D +# array, it will assign values inside the array to the corresponding parameter of the model. function dispatch!(k::AbstractModel, v::AV) - nθ_k = get_nparameter(k) + nθ_k = get_nparameter(k) nθ_k == length(v) || throw(DimensionMismatch("expect $(nθ_k) parameters, got $(length(v))")) θ = get_iparam(k) copyto!(θ, 1, v, 1, length(θ)) loc = 1 + length(θ) for k′ in child(k) - nθ_k′ = get_nparameter(k′) - dispatch!(k′, v[loc:loc+nθ_k′-1]) + nθ_k′ = get_nparameter(k′) + dispatch!(k′, v[loc:loc+nθ_k′-1]) loc += nθ_k′ end return k end + + +# Zygote is able to compute gradient w.r.t a parametrized type, for example: +# ``` +# struct Linear +# W +# b +# end +# +# (l::Linear)(x) = l.W * x .+ l.b +# model = Linear(rand(2, 5), rand(2)) +# dmodel = gradient(model -> sum(model(x)), model)[1] +# ``` +# the results is a `NamedTuple`. `extract_gradient` function is used +# to extract the value of those gradients to a 1D array. extract_gradient(k::AbstractModel, G::NamedTuple) = extract_gradient!(parameter_eltype(k)[], G) function extract_gradient!(out, G::NamedTuple) for (_, val) in pairs(G) diff --git a/src/composite/composite_gp.jl b/src/composite/composite_gp.jl index 83034f82..4ac8c172 100644 --- a/src/composite/composite_gp.jl +++ b/src/composite/composite_gp.jl @@ -15,13 +15,13 @@ struct CompositeGP{Targs} <: AbstractGP end get_iparam(::CompositeGP) = Union{}[] function child(c::CompositeGP) - models = [] + models = [] for i in eachindex(c.args) - if c.args[i] isa AbstractModel - push!(models, c.args[i]) - end + if c.args[i] isa AbstractModel + push!(models, c.args[i]) end - tuple(models...) + end + tuple(models...) end CompositeGP(args::Targs, gpc::GPC) where {Targs} = CompositeGP{Targs}(args, gpc) diff --git a/src/gp/kernel.jl b/src/gp/kernel.jl index ce91bd07..f0323764 100644 --- a/src/gp/kernel.jl +++ b/src/gp/kernel.jl @@ -7,17 +7,12 @@ using LinearAlgebra: isposdef, checksquare abstract type Kernel <: AbstractModel end """ -Definition of a particular kernel should contains: - - struct KernelName <: Kernel end - function kernelname end - get_iparam(::KernelName) - child(::KernelName) - parameter_eltype(::KernelName) - ew(::KernelName, x) - ew(::KernelName, x, x′) - pw(::KernelName, x) - pw(::KernelName, x, x′) +Changes: + 1. Type of kernel parameters are set to vector or matrix, API is maintained by defining + additional construction function. + 2. A new field `f` is added to kernel that contains parameters, `f` is the constraint of + the kernel parameters, e.g. σ² in `Scaled` must remain positive during the calculation, + we can set `f(x)=exp(x)` to enable this constraint. """ @@ -255,7 +250,7 @@ For length scales etc see [`stretch`](@ref), for variance see [`*`](@ref). """ struct RQ{T, Tα<:AV{T}, fT} <: Kernel α::Tα - f::fT + f::fT end RQ(α::Real, f) = RQ(typeof(α)[α], f) RQ(α::Real) = RQ(α, identity) @@ -264,15 +259,15 @@ child(::RQ) = () _rq(d, α) = (1 + d / (2α))^(-α) -# define gradient for _rq manually, since automatic backprop by Zygote -# will results in type unstable +# I redefine adjoint function for `_rq`, since on my computer, Zygote's gradient +# of `_rq` returns a number of Complex type. @adjoint function _rq(d::dT, α::αT) where {dT<:Real, αT<:Real} y = _rq(d, α) - y, function (ȳ) - T = promote_type(dT, αT) - x = 1 + d / (2α) - -0.5*ȳ*y/x, ȳ*y*(d / (x*(2α)) - log(x+eps(T))) - end + y, function (ȳ) + T = promote_type(dT, αT) + x = 1 + d / (2α) + -0.5*ȳ*y/x, ȳ*y*(d / (x*(2α)) - log(x+eps(T))) + end end # Binary methods. @@ -292,7 +287,7 @@ Cosine Kernel with period parameter `p`. """ struct Cosine{T, Tp<:AV{T}, fT} <: Kernel p::Tp - f::fT + f::fT end Cosine(p::Real, f) = Cosine(typeof(p)[p], f) Cosine(p::Real) = Cosine(p, identity) @@ -343,7 +338,7 @@ k(xl, xr) = (dot(xl, xr) + σ²)^p """ struct Poly{p, T, Tσ²<:AV{T}, fT} <: Kernel σ²::Tσ² - f::fT + f::fT end Poly(p::Int, σ²::Real, f) = Poly{p, typeof(σ²), AV{typeof(σ²)}, typeof(f)}(typeof(σ²)[σ²], f) Poly(p::Int, σ²::Real) = Poly(p, σ², identity) @@ -376,7 +371,7 @@ The γ-Exponential kernel, 0 < γ ⩽ 2, is given by `k(xl, xr) = exp(-||xl - xr """ struct GammaExp{T, Tγ<:AV{T}, fT} <: Kernel γ::Tγ - f::fT + f::fT end GammaExp(γ::Real, f) = GammaExp(typeof(γ)[γ], f) GammaExp(γ::Real) = GammaExp(γ, identity) @@ -570,7 +565,7 @@ Scale the variance of `Kernel` `k` by `σ²` s.t. `(σ² * k)(x, x′) = σ² * struct Scaled{T, Tσ²<:AV{T}, Tk<:Kernel, fT} <: Kernel σ²::Tσ² k::Tk - f::fT + f::fT end scale(k::Kernel, σ²::Real, f) = Scaled(typeof(σ²)[σ²], k, f) scale(k::Kernel, σ²::Real) = scale(k, σ², identity) @@ -613,7 +608,7 @@ Apply a length scale to a kernel. Specifically, `k(x, x′) = k(a * x, a * x′) struct Stretched{T, Ta<:AVM{T}, Tk<:Kernel, fT} <: Kernel a::Ta k::Tk - f::fT + f::fT end get_iparam(s::Stretched) = s.a child(s::Stretched) = (s.k,) diff --git a/src/gp/neural_kernel_network.jl b/src/gp/neural_kernel_network.jl index a98ebc07..9783fd2d 100644 --- a/src/gp/neural_kernel_network.jl +++ b/src/gp/neural_kernel_network.jl @@ -9,7 +9,7 @@ end get_iparam(p::Primitive) = Union{}[] child(p::Primitive) = p.kernels -# flatten k kernel matrices of size Mk×Nk, and concatenate these 1d array into a k×(Mk*Nk) 2d array +# flatten k kernel matrices of size Mk×Nk to 1×(Mk_Nk), and concatenate these array into a k×(Mk_Nk) 2d array _cat_kernel_array(x) = vcat([reshape(x[i], 1, :) for i in 1:length(x)]...) # NOTE, though we implement `ew` & `pw` function for Primitive, it isn't a subtype of Kernel type, @@ -30,7 +30,7 @@ end # Neural Kernel Network, since kernel space ( stationary kernel ) is closed under linear combination # ( with positive coefficient ) and element-wise multiplication, we can use a neural network like structure # to build composite kernels. This type contains a `Primitive` layer which holds basic kerenls and a specialised -# nerual network architecture to perform kernel composition. It should function like a normal `Stheno` kernel. +# nerual network architecture to perform kernel composition. It should work like a normal `Stheno` kernel. struct NeuralKernelNetwork{PT<:Primitive, CT<:Chain} <: Kernel player::PT chain::CT diff --git a/src/neural_network/basic.jl b/src/neural_network/basic.jl index dff826dd..fe27fc9a 100644 --- a/src/neural_network/basic.jl +++ b/src/neural_network/basic.jl @@ -2,6 +2,19 @@ export LinearLayer, ProductLayer, chain using Base: tail +""" +This file contains components needed by the neural kernel network: +1. LinearLayer: linear transformation ( no bias term ) with positive weights +2. ProductLayer: compute element-wise multiplication between specific kernels +3. Chain: chain `LinearLayer` and `ProductLayer` together to build a sum product network + +The reason I don't use Flux's neural network modules is that the type of layer that can +be used in neural kernel network is limited ( to my knowledge, only linear layer with positive +weights, product layer & dropout layer are legible ). Therefore, I think we can maintain a +minimum neural network module specifically for neural kernel network. +""" + + softplus(x) = log(1+exp(x)) struct LinearLayer{T, MT<:AM{T}} <: AbstractModel @@ -17,8 +30,6 @@ function Base.show(io::IO, layer::LinearLayer) end -# when writing ProductLayer, we don't use `prod`, because broadcasting problem will -# results in gradient evaluation problem. struct ProductLayer <: AbstractModel step::Int end From 0b46ba28ba6eb18694ec93c20eb7da47e02f65bc Mon Sep 17 00:00:00 2001 From: hongbin <35309324+HamletWantToCode@users.noreply.github.com> Date: Fri, 6 Mar 2020 22:45:49 +0800 Subject: [PATCH 34/42] Update composite_gp.jl --- src/composite/composite_gp.jl | 10 +++++----- 1 file changed, 5 insertions(+), 5 deletions(-) diff --git a/src/composite/composite_gp.jl b/src/composite/composite_gp.jl index 4ac8c172..b2fa1e80 100644 --- a/src/composite/composite_gp.jl +++ b/src/composite/composite_gp.jl @@ -15,13 +15,13 @@ struct CompositeGP{Targs} <: AbstractGP end get_iparam(::CompositeGP) = Union{}[] function child(c::CompositeGP) - models = [] + models = [] for i in eachindex(c.args) - if c.args[i] isa AbstractModel - push!(models, c.args[i]) - end + if c.args[i] isa AbstractModel + push!(models, c.args[i]) end - tuple(models...) + end + tuple(models...) end CompositeGP(args::Targs, gpc::GPC) where {Targs} = CompositeGP{Targs}(args, gpc) From a3a40ef309ca7de910701d5a38ed974b593cd2df Mon Sep 17 00:00:00 2001 From: hongbin <35309324+HamletWantToCode@users.noreply.github.com> Date: Fri, 6 Mar 2020 23:34:02 +0800 Subject: [PATCH 35/42] Update kernel.jl fix indentation --- src/gp/kernel.jl | 42 +++++++++++++++++++++--------------------- 1 file changed, 21 insertions(+), 21 deletions(-) diff --git a/src/gp/kernel.jl b/src/gp/kernel.jl index f0323764..b6445ffd 100644 --- a/src/gp/kernel.jl +++ b/src/gp/kernel.jl @@ -9,10 +9,10 @@ abstract type Kernel <: AbstractModel end """ Changes: 1. Type of kernel parameters are set to vector or matrix, API is maintained by defining - additional construction function. - 2. A new field `f` is added to kernel that contains parameters, `f` is the constraint of - the kernel parameters, e.g. σ² in `Scaled` must remain positive during the calculation, - we can set `f(x)=exp(x)` to enable this constraint. + additional construction function. + 2. A new field `f` is added to kernel that contains parameters, `f` is the constraint of + the kernel parameters, e.g. σ² in `Scaled` must remain positive during the calculation, + we can set `f(x)=exp(x)` to enable this constraint. """ @@ -127,8 +127,8 @@ The usual periodic kernel derived by mapping the input domain onto the unit circ For length scales etc see [`stretch`](@ref), for variance see [`*`](@ref). """ struct PerEQ{T, LT<:AV{T}, fT} <: Kernel - l::LT - f::fT + l::LT + f::fT end PerEQ(l::Real, f) = PerEQ(typeof(l)[l], f) PerEQ(l::Real) = PerEQ(l, identity) @@ -250,7 +250,7 @@ For length scales etc see [`stretch`](@ref), for variance see [`*`](@ref). """ struct RQ{T, Tα<:AV{T}, fT} <: Kernel α::Tα - f::fT + f::fT end RQ(α::Real, f) = RQ(typeof(α)[α], f) RQ(α::Real) = RQ(α, identity) @@ -263,11 +263,11 @@ _rq(d, α) = (1 + d / (2α))^(-α) # of `_rq` returns a number of Complex type. @adjoint function _rq(d::dT, α::αT) where {dT<:Real, αT<:Real} y = _rq(d, α) - y, function (ȳ) - T = promote_type(dT, αT) - x = 1 + d / (2α) - -0.5*ȳ*y/x, ȳ*y*(d / (x*(2α)) - log(x+eps(T))) - end + return y, function (ȳ) + T = promote_type(dT, αT) + x = 1 + d / (2α) + -0.5*ȳ*y/x, ȳ*y*(d / (x*(2α)) - log(x+eps(T))) + end end # Binary methods. @@ -287,7 +287,7 @@ Cosine Kernel with period parameter `p`. """ struct Cosine{T, Tp<:AV{T}, fT} <: Kernel p::Tp - f::fT + f::fT end Cosine(p::Real, f) = Cosine(typeof(p)[p], f) Cosine(p::Real) = Cosine(p, identity) @@ -338,7 +338,7 @@ k(xl, xr) = (dot(xl, xr) + σ²)^p """ struct Poly{p, T, Tσ²<:AV{T}, fT} <: Kernel σ²::Tσ² - f::fT + f::fT end Poly(p::Int, σ²::Real, f) = Poly{p, typeof(σ²), AV{typeof(σ²)}, typeof(f)}(typeof(σ²)[σ²], f) Poly(p::Int, σ²::Real) = Poly(p, σ², identity) @@ -371,7 +371,7 @@ The γ-Exponential kernel, 0 < γ ⩽ 2, is given by `k(xl, xr) = exp(-||xl - xr """ struct GammaExp{T, Tγ<:AV{T}, fT} <: Kernel γ::Tγ - f::fT + f::fT end GammaExp(γ::Real, f) = GammaExp(typeof(γ)[γ], f) GammaExp(γ::Real) = GammaExp(γ, identity) @@ -565,7 +565,7 @@ Scale the variance of `Kernel` `k` by `σ²` s.t. `(σ² * k)(x, x′) = σ² * struct Scaled{T, Tσ²<:AV{T}, Tk<:Kernel, fT} <: Kernel σ²::Tσ² k::Tk - f::fT + f::fT end scale(k::Kernel, σ²::Real, f) = Scaled(typeof(σ²)[σ²], k, f) scale(k::Kernel, σ²::Real) = scale(k, σ², identity) @@ -608,7 +608,7 @@ Apply a length scale to a kernel. Specifically, `k(x, x′) = k(a * x, a * x′) struct Stretched{T, Ta<:AVM{T}, Tk<:Kernel, fT} <: Kernel a::Ta k::Tk - f::fT + f::fT end get_iparam(s::Stretched) = s.a child(s::Stretched) = (s.k,) @@ -706,10 +706,10 @@ pw(k::Stretched{<:Real, <:AV{<:Real}}, x::AV{<:Real}) = pw(k.k, k.f.(k.a) .* x) # Binary methods (scalar and vector `a`, vector-valued input) function ew(k::Stretched{<:Real, <:AV{<:Real}}, x::ColVecs, x′::ColVecs) - return ew(k.k, ColVecs(k.f.(k.a) .* x.X), ColVecs(k.f.(k.a) .* x′.X)) + return ew(k.k, ColVecs(k.f.(k.a) .* x.X), ColVecs(k.f.(k.a) .* x′.X)) end function pw(k::Stretched{<:Real, <:AV{<:Real}}, x::ColVecs, x′::ColVecs) - return pw(k.k, ColVecs(k.f.(k.a) .* x.X), ColVecs(k.f.(k.a) .* x′.X)) + return pw(k.k, ColVecs(k.f.(k.a) .* x.X), ColVecs(k.f.(k.a) .* x′.X)) end # Unary methods (scalar and vector `a`, vector-valued input) @@ -718,10 +718,10 @@ pw(k::Stretched{<:Real, <:AV{<:Real}}, x::ColVecs) = pw(k.k, ColVecs(k.f.(k.a) . # Binary methods (matrix `a`, vector-valued input) function ew(k::Stretched{<:Real, <:AM{<:Real}}, x::ColVecs, x′::ColVecs) - return ew(k.k, ColVecs(k.f.(k.a) * x.X), ColVecs(k.f.(k.a) * x′.X)) + return ew(k.k, ColVecs(k.f.(k.a) * x.X), ColVecs(k.f.(k.a) * x′.X)) end function pw(k::Stretched{<:Real, <:AM{<:Real}}, x::ColVecs, x′::ColVecs) - return pw(k.k, ColVecs(k.f.(k.a) * x.X), ColVecs(k.f.(k.a) * x′.X)) + return pw(k.k, ColVecs(k.f.(k.a) * x.X), ColVecs(k.f.(k.a) * x′.X)) end # Unary methods (scalar and vector `a`, vector-valued input) From 325016490184205a0578c64e8e1956d7b21d7faf Mon Sep 17 00:00:00 2001 From: hongbin <35309324+HamletWantToCode@users.noreply.github.com> Date: Fri, 6 Mar 2020 23:35:36 +0800 Subject: [PATCH 36/42] Update basic.jl fix indentation --- src/neural_network/basic.jl | 14 +++++++------- 1 file changed, 7 insertions(+), 7 deletions(-) diff --git a/src/neural_network/basic.jl b/src/neural_network/basic.jl index fe27fc9a..55d959c6 100644 --- a/src/neural_network/basic.jl +++ b/src/neural_network/basic.jl @@ -31,21 +31,21 @@ end struct ProductLayer <: AbstractModel - step::Int + step::Int end get_iparam(::ProductLayer) = Union{}[] child(::ProductLayer) = () function (p::ProductLayer)(x) - m, n = size(x) - x1 = reshape(x, p.step, m÷p.step, n) - res = .*([x1[i, :, :] for i in 1:p.step]...) - return res + m, n = size(x) + x1 = reshape(x, p.step, m÷p.step, n) + res = .*([x1[i, :, :] for i in 1:p.step]...) + return res end struct Chain <: AbstractModel - models::Tuple{Vararg{AbstractModel}} - Chain(ms...) = new(ms) + models::Tuple{Vararg{AbstractModel}} + Chain(ms...) = new(ms) end get_iparam(::Chain) = Union{}[] child(c::Chain) = c.models From 9d143538359ddc9bf72a9ea170acc964c397947b Mon Sep 17 00:00:00 2001 From: hongbin <35309324+HamletWantToCode@users.noreply.github.com> Date: Fri, 6 Mar 2020 23:38:02 +0800 Subject: [PATCH 37/42] Update abstract_model.jl fix indentation --- src/abstract_model.jl | 52 +++++++++++++++++++++---------------------- 1 file changed, 26 insertions(+), 26 deletions(-) diff --git a/src/abstract_model.jl b/src/abstract_model.jl index addc68e5..9b47ad58 100644 --- a/src/abstract_model.jl +++ b/src/abstract_model.jl @@ -9,15 +9,15 @@ our model a tree structure, and facilitate for collecting and redistributing par Here is an example of how our GP model now looks like: GP - / \ + / \ / \ - ConstantMean Scaled - (c) (σ) - | - Stretched - (l) - | - EQ() + ConstantMean Scaled + (c) (σ) + | + Stretched + (l) + | + EQ() Parameters for this model are `c`, `σ` & `l`, we can use: ```julia @@ -55,7 +55,7 @@ child(m::AbstractModel) = throw(UndefVarError("child method not defined for $m") # it will return `Union{}`. parameter_eltype(::Any) = Union{} function parameter_eltype(x::AbstractModel) - T = eltype(get_iparam(x)) + T = eltype(get_iparam(x)) for each in child(x) T = promote_type(T, parameter_eltype(each)) end @@ -68,7 +68,7 @@ parameters(x::AbstractModel) = parameters!(parameter_eltype(x)[], x) parameters!(out, ::Any) = out function parameters!(out, x::AbstractModel) append!(out, get_iparam(x)) - for x_child in child(x) + for x_child in child(x) parameters!(out, x_child) end return out @@ -82,14 +82,14 @@ get_nparameter(x::AbstractModel) = length(parameters(x)) # dispatch! allows us to update parameters inside a model, it accept a model and a 1D # array, it will assign values inside the array to the corresponding parameter of the model. function dispatch!(k::AbstractModel, v::AV) - nθ_k = get_nparameter(k) - nθ_k == length(v) || throw(DimensionMismatch("expect $(nθ_k) parameters, got $(length(v))")) + nθ_k = get_nparameter(k) + nθ_k == length(v) || throw(DimensionMismatch("expect $(nθ_k) parameters, got $(length(v))")) θ = get_iparam(k) copyto!(θ, 1, v, 1, length(θ)) loc = 1 + length(θ) for k′ in child(k) - nθ_k′ = get_nparameter(k′) - dispatch!(k′, v[loc:loc+nθ_k′-1]) + nθ_k′ = get_nparameter(k′) + dispatch!(k′, v[loc:loc+nθ_k′-1]) loc += nθ_k′ end return k @@ -111,20 +111,20 @@ end # to extract the value of those gradients to a 1D array. extract_gradient(k::AbstractModel, G::NamedTuple) = extract_gradient!(parameter_eltype(k)[], G) function extract_gradient!(out, G::NamedTuple) - for (_, val) in pairs(G) - if val isa AVM - append!(out, val) - elseif val isa NamedTuple - extract_gradient!(out, val) - elseif val isa Tuple - for each in val - if each isa NamedTuple - extract_gradient!(out, each) - end - end + for (_, val) in pairs(G) + if val isa AVM + append!(out, val) + elseif val isa NamedTuple + extract_gradient!(out, val) + elseif val isa Tuple + for each in val + if each isa NamedTuple + extract_gradient!(out, each) end + end end - return out + end + return out end From cf39c971e49db9035a20c1532da9662e3b9b8b8e Mon Sep 17 00:00:00 2001 From: hongbinren Date: Sat, 7 Mar 2020 00:40:19 +0800 Subject: [PATCH 38/42] fix bug --- src/abstract_model.jl | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/src/abstract_model.jl b/src/abstract_model.jl index 9b47ad58..f24de4bf 100644 --- a/src/abstract_model.jl +++ b/src/abstract_model.jl @@ -9,8 +9,8 @@ our model a tree structure, and facilitate for collecting and redistributing par Here is an example of how our GP model now looks like: GP - / \ - / \ + | | + | | ConstantMean Scaled (c) (σ) | From 3c6ef99a157af5302ff77cb6faf94bf8401e4380 Mon Sep 17 00:00:00 2001 From: hongbinren Date: Sat, 7 Mar 2020 09:17:34 +0800 Subject: [PATCH 39/42] fix bug --- src/abstract_model.jl | 4 +--- 1 file changed, 1 insertion(+), 3 deletions(-) diff --git a/src/abstract_model.jl b/src/abstract_model.jl index f24de4bf..940a4ac9 100644 --- a/src/abstract_model.jl +++ b/src/abstract_model.jl @@ -40,8 +40,6 @@ child(::EQ) = () """ -const AVM = AbstractVecOrMat - abstract type AbstractModel end # Return parameters contained inside a model @@ -112,7 +110,7 @@ end extract_gradient(k::AbstractModel, G::NamedTuple) = extract_gradient!(parameter_eltype(k)[], G) function extract_gradient!(out, G::NamedTuple) for (_, val) in pairs(G) - if val isa AVM + if val isa AbstractVecOrMat append!(out, val) elseif val isa NamedTuple extract_gradient!(out, val) From cecd7b507a274a0cd5a1ed4c05aca188af3c03db Mon Sep 17 00:00:00 2001 From: hongbinren Date: Fri, 13 Mar 2020 23:57:30 +0800 Subject: [PATCH 40/42] remove AbstractModel, add tests for NKN --- src/Stheno.jl | 126 +++++++++--------- src/abstract_gp.jl | 2 +- src/composite/composite_gp.jl | 12 +- src/gp/kernel.jl | 30 +---- src/gp/mean.jl | 8 +- src/gp/neural_kernel_network.jl | 24 ++-- src/neural_network/basic.jl | 12 +- .../parameter_handler.jl} | 67 +++++----- test/gp/kernel.jl | 41 ++++++ 9 files changed, 155 insertions(+), 167 deletions(-) rename src/{abstract_model.jl => util/parameter_handler.jl} (68%) diff --git a/src/Stheno.jl b/src/Stheno.jl index dd7752e6..6202c1db 100644 --- a/src/Stheno.jl +++ b/src/Stheno.jl @@ -1,66 +1,66 @@ module Stheno - using Distributions, Distances, BlockArrays, Statistics, Random, FillArrays, - LinearAlgebra, Zygote - import Base: length, map - import Base.Broadcast: broadcasted, materialize, broadcast_shape - import Statistics: mean, cov - using LinearAlgebra: AbstractTriangular - using ZygoteRules: @adjoint - using Zygote: @nograd - using BlockArrays: _BlockArray - import LinearAlgebra: cholesky, cross - import Distances: pairwise, colwise - - const AV{T} = AbstractVector{T} - const AM{T} = AbstractMatrix{T} - const AVM{T} = AbstractVecOrMat{T} - - const BlockLowerTriangular{T} = LowerTriangular{T, <:BlockMatrix{T}} - const BlockUpperTriangular{T} = UpperTriangular{T, <:BlockMatrix{T}} - const BlockTriangular{T} = Union{BlockLowerTriangular{T}, BlockUpperTriangular{T}} - - function elementwise end - - const pw = pairwise - const ew = elementwise - - # Various bits of utility that aren't inherently GP-related. Often very type-piratic. - include(joinpath("util", "zygote_rules.jl")) - include(joinpath("util", "covariance_matrices.jl")) - include(joinpath("util", "block_arrays", "dense.jl")) - include(joinpath("util", "block_arrays", "diagonal.jl")) - include(joinpath("util", "block_arrays", "triangular.jl")) - include(joinpath("util", "abstract_data_set.jl")) - include(joinpath("util", "distances.jl")) - include(joinpath("util", "proper_type_piracy.jl")) - - # Supertype for GPs. - include("abstract_model.jl") - include("abstract_gp.jl") - - # Neural network used for building neural kernel network - include(joinpath("neural_network", "basic.jl")) - - # Atomic GP objects - include(joinpath("gp", "mean.jl")) - include(joinpath("gp", "kernel.jl")) - include(joinpath("gp", "neural_kernel_network.jl")) - include(joinpath("gp", "gp.jl")) - - # Composite GPs, constructed via affine transformation of CompositeGPs and GPs. - include(joinpath("composite", "composite_gp.jl")) - include(joinpath("composite", "indexing.jl")) - include(joinpath("composite", "cross.jl")) - include(joinpath("composite", "conditioning.jl")) - include(joinpath("composite", "approximate_conditioning.jl")) - include(joinpath("composite", "product.jl")) - include(joinpath("composite", "addition.jl")) - include(joinpath("composite", "compose.jl")) - # include(joinpath("composite", "gradient.jl")) - # include(joinpath("composite", "integrate.jl")) - - # Various stuff for convenience. - include(joinpath("util", "model.jl")) - include(joinpath("util", "plotting.jl")) + using Distributions, Distances, BlockArrays, Statistics, Random, FillArrays, + LinearAlgebra, Zygote + import Base: length, map + import Base.Broadcast: broadcasted, materialize, broadcast_shape + import Statistics: mean, cov + using LinearAlgebra: AbstractTriangular + using ZygoteRules: @adjoint + using Zygote: @nograd + using BlockArrays: _BlockArray + import LinearAlgebra: cholesky, cross + import Distances: pairwise, colwise + + const AV{T} = AbstractVector{T} + const AM{T} = AbstractMatrix{T} + const AVM{T} = AbstractVecOrMat{T} + + const BlockLowerTriangular{T} = LowerTriangular{T, <:BlockMatrix{T}} + const BlockUpperTriangular{T} = UpperTriangular{T, <:BlockMatrix{T}} + const BlockTriangular{T} = Union{BlockLowerTriangular{T}, BlockUpperTriangular{T}} + + function elementwise end + + const pw = pairwise + const ew = elementwise + + # Various bits of utility that aren't inherently GP-related. Often very type-piratic. + include(joinpath("util", "zygote_rules.jl")) + include(joinpath("util", "covariance_matrices.jl")) + include(joinpath("util", "block_arrays", "dense.jl")) + include(joinpath("util", "block_arrays", "diagonal.jl")) + include(joinpath("util", "block_arrays", "triangular.jl")) + include(joinpath("util", "abstract_data_set.jl")) + include(joinpath("util", "distances.jl")) + include(joinpath("util", "proper_type_piracy.jl")) + include(joinpath("util", "parameter_handler.jl")) + + # Supertype for GPs. + include("abstract_gp.jl") + + # Neural network used for building neural kernel network + include(joinpath("neural_network", "basic.jl")) + + # Atomic GP objects + include(joinpath("gp", "mean.jl")) + include(joinpath("gp", "kernel.jl")) + include(joinpath("gp", "neural_kernel_network.jl")) + include(joinpath("gp", "gp.jl")) + + # Composite GPs, constructed via affine transformation of CompositeGPs and GPs. + include(joinpath("composite", "composite_gp.jl")) + include(joinpath("composite", "indexing.jl")) + include(joinpath("composite", "cross.jl")) + include(joinpath("composite", "conditioning.jl")) + include(joinpath("composite", "approximate_conditioning.jl")) + include(joinpath("composite", "product.jl")) + include(joinpath("composite", "addition.jl")) + include(joinpath("composite", "compose.jl")) + # include(joinpath("composite", "gradient.jl")) + # include(joinpath("composite", "integrate.jl")) + + # Various stuff for convenience. + include(joinpath("util", "model.jl")) + include(joinpath("util", "plotting.jl")) end # module diff --git a/src/abstract_gp.jl b/src/abstract_gp.jl index deeae51e..7d60c56a 100644 --- a/src/abstract_gp.jl +++ b/src/abstract_gp.jl @@ -1,6 +1,6 @@ export GPC -abstract type AbstractGP <: AbstractModel end +abstract type AbstractGP end # A collection of GPs (GPC == "GP Collection"). Used to keep track of GPs. mutable struct GPC diff --git a/src/composite/composite_gp.jl b/src/composite/composite_gp.jl index b2fa1e80..facc3ce2 100644 --- a/src/composite/composite_gp.jl +++ b/src/composite/composite_gp.jl @@ -13,16 +13,8 @@ struct CompositeGP{Targs} <: AbstractGP return gp end end -get_iparam(::CompositeGP) = Union{}[] -function child(c::CompositeGP) - models = [] - for i in eachindex(c.args) - if c.args[i] isa AbstractModel - push!(models, c.args[i]) - end - end - tuple(models...) -end +get_iparam(::CompositeGP) = throw(UndefVarError("get_iparam method currently not defined for composite GP")) +child(::CompositeGP) = throw(UndefVarError("child method currently not defined for composite GP")) CompositeGP(args::Targs, gpc::GPC) where {Targs} = CompositeGP{Targs}(args, gpc) diff --git a/src/gp/kernel.jl b/src/gp/kernel.jl index b6445ffd..df584959 100644 --- a/src/gp/kernel.jl +++ b/src/gp/kernel.jl @@ -4,7 +4,7 @@ using Base.Broadcast: broadcast_shape using LinearAlgebra: isposdef, checksquare -abstract type Kernel <: AbstractModel end +abstract type Kernel end """ Changes: @@ -37,8 +37,6 @@ A rank 0 `Kernel` that always returns zero. struct ZeroKernel{T<:Real} <: Kernel end ZeroKernel() = ZeroKernel{Float64}() zero(::Kernel) = ZeroKernel() -get_iparam(::ZeroKernel) = Union{}[] -child(::ZeroKernel) = () # Binary methods. ew(k::ZeroKernel{T}, x::AV, x′::AV) where {T} = zeros(T, broadcast_shape(size(x), size(x′))) @@ -58,8 +56,6 @@ but (almost certainly) shouldn't be used as a base `Kernel`. """ struct OneKernel{T<:Real} <: Kernel end OneKernel() = OneKernel{Float64}() -get_iparam(::OneKernel) = Union{}[] -child(::OneKernel) = () # Binary methods. ew(k::OneKernel{T}, x::AV, x′::AV) where {T} = ones(T, broadcast_shape(size(x), size(x′))) @@ -81,7 +77,6 @@ struct ConstKernel{T, cT<:AV{T}} <: Kernel end ConstKernel(c::Real) = ConstKernel(typeof(c)[c]) get_iparam(c::ConstKernel) = c.c -child(::ConstKernel) = () # Binary methods. ew(k::ConstKernel, x::AV, x′::AV) = fill(k.c[1], broadcast_shape(size(x), size(x′))...) @@ -104,8 +99,6 @@ Squared Exponential kernel. For length scales etc see [`stretch`](@ref), for variance see [`*`](@ref). """ struct EQ <: Kernel end -get_iparam(::EQ) = Union{}[] -child(::EQ) = () # Binary methods. ew(::EQ, x::AV, x′::AV) = exp.(.-ew(SqEuclidean(), x, x′) ./ 2) @@ -133,7 +126,6 @@ end PerEQ(l::Real, f) = PerEQ(typeof(l)[l], f) PerEQ(l::Real) = PerEQ(l, identity) get_iparam(per::PerEQ) = per.l -child(::PerEQ) = () _pereq(d, l) = exp(-2.0*sin(π*d)^2 / l^2) @@ -157,8 +149,6 @@ The standardised Matern-1/2 / Exponential kernel: For length scales etc see [`stretch`](@ref), for variance see [`*`](@ref). """ struct Matern12 <: Kernel end -get_iparam(::Matern12) = Union{}[] -child(::Matern12) = () # Binary methods ew(k::Matern12, x::AV, x′::AV) = exp.(.-ew(Euclidean(), x, x′)) @@ -189,8 +179,6 @@ The standardised Matern kernel with ν = 3 / 2. For length scales etc see [`stretch`](@ref), for variance see [`*`](@ref). """ struct Matern32 <: Kernel end -get_iparam(::Matern32) = Union{}[] -child(::Matern32) = () function _matern32(d::Real) d = sqrt(3) * d @@ -215,8 +203,6 @@ The standardised Matern kernel with ν = 5 / 2. For length scales etc see [`stretch`](@ref), for variance see [`*`](@ref). """ struct Matern52 <: Kernel end -get_iparam(::Matern52) = Union{}[] -child(::Matern52) = () function _Matern52(d::Real) λ = sqrt(5) * d @@ -255,7 +241,6 @@ end RQ(α::Real, f) = RQ(typeof(α)[α], f) RQ(α::Real) = RQ(α, identity) get_iparam(rq::RQ) = rq.α -child(::RQ) = () _rq(d, α) = (1 + d / (2α))^(-α) @@ -292,7 +277,6 @@ end Cosine(p::Real, f) = Cosine(typeof(p)[p], f) Cosine(p::Real) = Cosine(p, identity) get_iparam(c::Cosine) = c.p -child(::Cosine) = () # Binary methods. ew(k::Cosine, x::AV{<:Real}, x′::AV{<:Real}) = cos.(pi.*ew(Euclidean(), x, x′) ./ k.f(k.p[1])) @@ -310,8 +294,6 @@ pw(k::Cosine, x::AV{<:Real}) = cos.(pi .* pw(Euclidean(), x) ./ k.f(k.p[1])) The standardised linear kernel / dot-product kernel. """ struct Linear <: Kernel end -get_iparam(::Linear) = Union{}[] -child(::Linear) = () # Binary methods ew(k::Linear, x::AV{<:Real}, x′::AV{<:Real}) = x .* x′ @@ -343,7 +325,6 @@ end Poly(p::Int, σ²::Real, f) = Poly{p, typeof(σ²), AV{typeof(σ²)}, typeof(f)}(typeof(σ²)[σ²], f) Poly(p::Int, σ²::Real) = Poly(p, σ², identity) get_iparam(p::Poly) = p.σ² -child(::Poly) = () _poly(k, σ², p) = (σ² + k)^p Zygote.@adjoint function _poly(k, σ², p) @@ -376,7 +357,6 @@ end GammaExp(γ::Real, f) = GammaExp(typeof(γ)[γ], f) GammaExp(γ::Real) = GammaExp(γ, identity) get_iparam(g::GammaExp) = g.γ -child(::GammaExp) = () # Binary methods ew(k::GammaExp, x::AV, x′::AV) = exp.(.-ew(Euclidean(), x, x′).^(k.f(k.γ[1]))) @@ -394,8 +374,6 @@ pw(k::GammaExp, x::AV) = exp.(.-pw(Euclidean(), x).^(k.f(k.γ[1]))) The standardised stationary Wiener-process kernel. """ struct Wiener <: Kernel end -get_iparam(::Wiener) = Union{}[] -child(::Wiener) = () _wiener(x::Real, x′::Real) = min(x, x′) @@ -415,8 +393,6 @@ pw(k::Wiener, x::AV{<:Real}) = pw(k, x, x) The standardised WienerVelocity kernel. """ struct WienerVelocity <: Kernel end -get_iparam(::WienerVelocity) = Union{}[] -child(::WienerVelocity) = () _wiener_vel(x::Real, x′::Real) = min(x, x′)^3 / 3 + abs(x - x′) * min(x, x′)^2 / 2 @@ -437,8 +413,6 @@ The standardised aleatoric white-noise kernel. Isn't really a kernel, but never """ struct Noise{T<:Real} <: Kernel end Noise() = Noise{Int}() -get_iparam(::Noise) = Union{}[] -child(::Noise) = () # Binary methods. ew(k::Noise{T}, x::AV, x′::AV) where {T} = zeros(T, broadcast_shape(size(x), size(x′))...) @@ -492,7 +466,6 @@ struct Sum{Tkl<:Kernel, Tkr<:Kernel} <: Kernel kl::Tkl kr::Tkr end -get_iparam(::Sum) = Union{}[] child(s::Sum) = (s.kl, s.kr) """ +(kl::Kernel, kr::Kernel) @@ -529,7 +502,6 @@ struct Product{Tkl<:Kernel, Tkr<:Kernel} <: Kernel kl::Tkl kr::Tkr end -get_iparam(::Product) = Union{}[] child(p::Product) = (p.kl, p.kr) """ +(kl::Kernel, kr::Kernel) diff --git a/src/gp/mean.jl b/src/gp/mean.jl index 5651c0bb..2e2d0699 100644 --- a/src/gp/mean.jl +++ b/src/gp/mean.jl @@ -1,6 +1,6 @@ import Base: zero -abstract type MeanFunction <: AbstractModel end +abstract type MeanFunction end @@ -10,8 +10,6 @@ abstract type MeanFunction <: AbstractModel end Returns `zero(T)` everywhere. """ struct ZeroMean{T<:Real} <: MeanFunction end -get_iparam(::ZeroMean) = Union{}[] -child(::ZeroMean) = () ZeroMean() = ZeroMean{Float64}() ew(::ZeroMean{T}, x::AV) where T = zeros(T, length(x)) zero(::MeanFunction) = ZeroMean() @@ -24,8 +22,6 @@ zero(::MeanFunction) = ZeroMean() Return `one(T)` everywhere. """ struct OneMean{T<:Real} <: MeanFunction end -get_iparam(::OneMean) = Union{}[] -child(::OneMean) = () OneMean() = OneMean{Float64}() ew(::OneMean{T}, x::AV) where T = ones(T, length(x)) @@ -41,7 +37,6 @@ struct ConstMean{T, cT<:AV{T}} <: MeanFunction end ConstMean(c::Real) = ConstMean(typeof(c)[c]) get_iparam(c::ConstMean) = c.c -child(::ConstMean) = () ew(m::ConstMean, x::AV) = fill(m.c[1], length(x)) @@ -54,6 +49,5 @@ A wrapper around whatever unary function you fancy. struct CustomMean{Tf} <: MeanFunction f::Tf end -get_iparam(::CustomMean) = Union{}[] child(c::CustomMean) = (c.f,) ew(f::CustomMean, x::AV) = map(f.f, x) diff --git a/src/gp/neural_kernel_network.jl b/src/gp/neural_kernel_network.jl index 9783fd2d..a59a2ed7 100644 --- a/src/gp/neural_kernel_network.jl +++ b/src/gp/neural_kernel_network.jl @@ -2,11 +2,10 @@ export Primitive, NeuralKernelNetwork # Primitive layer, mainly act as a container to hold basic kernels for the neural kernel network -struct Primitive{T} <: AbstractModel +struct Primitive{T} kernels::T Primitive(ks...) = new{typeof(ks)}(ks) end -get_iparam(p::Primitive) = Union{}[] child(p::Primitive) = p.kernels # flatten k kernel matrices of size Mk×Nk to 1×(Mk_Nk), and concatenate these array into a k×(Mk_Nk) 2d array @@ -17,7 +16,7 @@ _cat_kernel_array(x) = vcat([reshape(x[i], 1, :) for i in 1:length(x)]...) ew(p::Primitive, x) = _cat_kernel_array(map(k->ew(k, x), p.kernels)) pw(p::Primitive, x) = _cat_kernel_array(map(k->pw(k, x), p.kernels)) -ew(p::Primitive, x, x′) = _cat_kernel_array(map(k-ew(k, x, x′), p.kernels)) +ew(p::Primitive, x, x′) = _cat_kernel_array(map(k->ew(k, x, x′), p.kernels)) pw(p::Primitive, x, x′) = _cat_kernel_array(map(k->pw(k, x, x′), p.kernels)) function Base.show(io::IO, layer::Primitive) @@ -31,12 +30,11 @@ end # ( with positive coefficient ) and element-wise multiplication, we can use a neural network like structure # to build composite kernels. This type contains a `Primitive` layer which holds basic kerenls and a specialised # nerual network architecture to perform kernel composition. It should work like a normal `Stheno` kernel. -struct NeuralKernelNetwork{PT<:Primitive, CT<:Chain} <: Kernel - player::PT - chain::CT +struct NeuralKernelNetwork{PT<:Primitive, NNT<:Union{LinearLayer, ProductLayer, Chain}} <: Kernel + primitives::PT + nn::NNT end -get_iparam(nkn::NeuralKernelNetwork) = Union{}[] -child(nkn::NeuralKernelNetwork) = (nkn.player, nkn.chain) +child(nkn::NeuralKernelNetwork) = (nkn.primitives, nkn.nn) # use this function to reshape the 1d array back to kernel matrix _rebuild_kernel(x, n, m) = reshape(x, n, m) @@ -44,15 +42,15 @@ _rebuild_kernel(x, n, m) = reshape(x, n, m) # therefore, we reshape it to 1d _rebuild_diag(x) = reshape(x, :) -ew(nkn::NeuralKernelNetwork, x) = _rebuild_diag(nkn.chain(ew(nkn.player, x))) -pw(nkn::NeuralKernelNetwork, x) = _rebuild_kernel(nkn.chain(pw(nkn.player, x)), length(x), length(x)) +ew(nkn::NeuralKernelNetwork, x) = _rebuild_diag(nkn.nn(ew(nkn.primitives, x))) +pw(nkn::NeuralKernelNetwork, x) = _rebuild_kernel(nkn.nn(pw(nkn.primitives, x)), length(x), length(x)) -ew(nkn::NeuralKernelNetwork, x, x′) = _rebuild_diag(nkn.chain(ew(nkn.player, x, x′))) -pw(nkn::NeuralKernelNetwork, x, x′) = _rebuild_kernel(nkn.chain(pw(nkn.player, x, x′)), length(x), length(x′)) +ew(nkn::NeuralKernelNetwork, x, x′) = _rebuild_diag(nkn.nn(ew(nkn.primitives, x, x′))) +pw(nkn::NeuralKernelNetwork, x, x′) = _rebuild_kernel(nkn.nn(pw(nkn.primitives, x, x′)), length(x), length(x′)) function Base.show(io::IO, kernel::NeuralKernelNetwork) print(io, "NeuralKernelNetwork(") - join(io, [kernel.player, kernel.chain], ", ") + join(io, [kernel.primitives, kernel.nn], ", ") print(io, ")") end diff --git a/src/neural_network/basic.jl b/src/neural_network/basic.jl index 55d959c6..fe25d4e2 100644 --- a/src/neural_network/basic.jl +++ b/src/neural_network/basic.jl @@ -17,11 +17,10 @@ minimum neural network module specifically for neural kernel network. softplus(x) = log(1+exp(x)) -struct LinearLayer{T, MT<:AM{T}} <: AbstractModel +struct LinearLayer{T, MT<:AM{T}} W::MT end get_iparam(l::LinearLayer) = l.W -child(l::LinearLayer) = () LinearLayer(in_dim, out_dim) = LinearLayer(randn(out_dim, in_dim)) (lin::LinearLayer)(x) = softplus.(lin.W) * x @@ -30,11 +29,9 @@ function Base.show(io::IO, layer::LinearLayer) end -struct ProductLayer <: AbstractModel +struct ProductLayer step::Int end -get_iparam(::ProductLayer) = Union{}[] -child(::ProductLayer) = () function (p::ProductLayer)(x) m, n = size(x) x1 = reshape(x, p.step, m÷p.step, n) @@ -43,11 +40,10 @@ function (p::ProductLayer)(x) end -struct Chain <: AbstractModel - models::Tuple{Vararg{AbstractModel}} +struct Chain + models::Tuple Chain(ms...) = new(ms) end -get_iparam(::Chain) = Union{}[] child(c::Chain) = c.models applychain(::Tuple{}, x) = x applychain(fs::Tuple, x) = applychain(tail(fs), first(fs)(x)) diff --git a/src/abstract_model.jl b/src/util/parameter_handler.jl similarity index 68% rename from src/abstract_model.jl rename to src/util/parameter_handler.jl index 940a4ac9..8c0b7914 100644 --- a/src/abstract_model.jl +++ b/src/util/parameter_handler.jl @@ -1,4 +1,4 @@ -export AbstractModel, parameters, parameter_eltype, dispatch!, extract_gradient +export parameters, parameter_eltype, dispatch!, extract_gradient """ @@ -8,16 +8,16 @@ our model a tree structure, and facilitate for collecting and redistributing par Here is an example of how our GP model now looks like: - GP - | | - | | - ConstantMean Scaled - (c) (σ) - | - Stretched - (l) - | - EQ() + GP + | | + | | + ConstantMean Scaled + (c) (σ) + | + Stretched + (l) + | + EQ() Parameters for this model are `c`, `σ` & `l`, we can use: ```julia @@ -39,20 +39,16 @@ child(::EQ) = () ``` """ - -abstract type AbstractModel end - # Return parameters contained inside a model -get_iparam(m::AbstractModel) = throw(UndefVarError("get_iparam method not defined for $m")) +get_iparam(::Any) = Union{}[] # Return model that contained in another model, e.g. `Stretched` contains kernel -child(m::AbstractModel) = throw(UndefVarError("child method not defined for $m")) +child(::Any) = () # parameter_eltype will return the type of each paramters inside a model, for those types that # are not subtype of `AbstractModel`, and cases where a model contains no parameters, e.g. EQ kernel, # it will return `Union{}`. -parameter_eltype(::Any) = Union{} -function parameter_eltype(x::AbstractModel) +function parameter_eltype(x) T = eltype(get_iparam(x)) for each in child(x) T = promote_type(T, parameter_eltype(each)) @@ -62,9 +58,8 @@ end # Extract all parameters of a model to a 1D array -parameters(x::AbstractModel) = parameters!(parameter_eltype(x)[], x) -parameters!(out, ::Any) = out -function parameters!(out, x::AbstractModel) +parameters(x) = parameters!(parameter_eltype(x)[], x) +function parameters!(out, x) append!(out, get_iparam(x)) for x_child in child(x) parameters!(out, x_child) @@ -74,20 +69,20 @@ end # Return number of parameters contained inside a model -get_nparameter(x::AbstractModel) = length(parameters(x)) +get_nparameter(x) = length(parameters(x)) # dispatch! allows us to update parameters inside a model, it accept a model and a 1D # array, it will assign values inside the array to the corresponding parameter of the model. -function dispatch!(k::AbstractModel, v::AV) +function dispatch!(k, v::AV) nθ_k = get_nparameter(k) nθ_k == length(v) || throw(DimensionMismatch("expect $(nθ_k) parameters, got $(length(v))")) θ = get_iparam(k) copyto!(θ, 1, v, 1, length(θ)) loc = 1 + length(θ) for k′ in child(k) - nθ_k′ = get_nparameter(k′) - dispatch!(k′, v[loc:loc+nθ_k′-1]) + nθ_k′ = get_nparameter(k′) + dispatch!(k′, v[loc:loc+nθ_k′-1]) loc += nθ_k′ end return k @@ -107,20 +102,20 @@ end # ``` # the results is a `NamedTuple`. `extract_gradient` function is used # to extract the value of those gradients to a 1D array. -extract_gradient(k::AbstractModel, G::NamedTuple) = extract_gradient!(parameter_eltype(k)[], G) +extract_gradient(k, G::NamedTuple) = extract_gradient!(parameter_eltype(k)[], G) function extract_gradient!(out, G::NamedTuple) for (_, val) in pairs(G) if val isa AbstractVecOrMat - append!(out, val) - elseif val isa NamedTuple - extract_gradient!(out, val) - elseif val isa Tuple - for each in val - if each isa NamedTuple - extract_gradient!(out, each) - end - end - end + append!(out, val) + elseif val isa NamedTuple + extract_gradient!(out, val) + elseif val isa Tuple + for each in val + if each isa NamedTuple + extract_gradient!(out, each) + end + end + end end return out end diff --git a/test/gp/kernel.jl b/test/gp/kernel.jl index 753c5a35..d1e5afec 100644 --- a/test/gp/kernel.jl +++ b/test/gp/kernel.jl @@ -192,6 +192,47 @@ using LinearAlgebra differentiable_kernel_tests(k, ȳ, Ȳ, Ȳ_sq, X0, X1, X2; atol=1e-7, rtol=1e-7) end end + + @timedtestset "NeuralKernelNetwork" begin + @timedtestset "general test" begin + k1 = 0.5 * stretch(EQ(), 0.1) + k2 = 1.0 * stretch(PerEQ(1.0), 0.2) + prim_layer = Primitive(k1, k2) + + lin = LinearLayer(2, 2) + prod = ProductLayer(2) + nn = chain(lin, prod) + + nkn = NeuralKernelNetwork(prim_layer, nn) + differentiable_kernel_tests(nkn, ȳ, Ȳ, Ȳ_sq, x0, x1, x2; atol=1e-7, rtol=1e-7) + differentiable_kernel_tests(nkn, ȳ, Ȳ, Ȳ_sq, X0, X1, X2; atol=1e-7, rtol=1e-7) + end + @timedtestset "kernel composition test" begin + k1 = 0.5 * stretch(EQ(), 0.1) + k2 = 1.0 * stretch(PerEQ(1.0), 0.2) + prim_layer = Primitive(k1, k2) + lin = LinearLayer(ones(1, 2)) + prod = ProductLayer(2) + nkn_add_kernel = NeuralKernelNetwork(prim_layer, lin) + nkn_prod_kernel = NeuralKernelNetwork(prim_layer, prod) + + sum_k = Stheno.softplus(1.0)*k1 + Stheno.softplus(1.0)*k2 + prod_k = k1 * k2 + + # vector input + @test ew(nkn_add_kernel, x0) ≈ ew(sum_k, x0) + @test ew(nkn_add_kernel, x0, x1) ≈ ew(sum_k, x0, x1) + @test pw(nkn_add_kernel, x0) ≈ pw(sum_k, x0) + @test pw(nkn_add_kernel, x0, x1) ≈ pw(sum_k, x0, x1) + + # ColVecs input + @test ew(nkn_add_kernel, ColVecs(X0)) ≈ ew(sum_k, ColVecs(X0)) + @test ew(nkn_add_kernel, ColVecs(X0), ColVecs(X1)) ≈ ew(sum_k, ColVecs(X0), ColVecs(X1)) + @test pw(nkn_add_kernel, ColVecs(X0)) ≈ pw(sum_k, ColVecs(X0)) + @test pw(nkn_add_kernel, ColVecs(X0), ColVecs(X1)) ≈ pw(sum_k, ColVecs(X0), ColVecs(X1)) + end + end + @timedtestset "kernel" begin x = randn(11) @test pw(EQ(), x) == pw(kernel(EQ()), x) From c4874609f7249daa8e0b106fb7ace26282fa511b Mon Sep 17 00:00:00 2001 From: hongbinren Date: Sat, 14 Mar 2020 00:04:51 +0800 Subject: [PATCH 41/42] update --- test/gp/kernel.jl | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/test/gp/kernel.jl b/test/gp/kernel.jl index d1e5afec..8db5a3c9 100644 --- a/test/gp/kernel.jl +++ b/test/gp/kernel.jl @@ -226,10 +226,10 @@ using LinearAlgebra @test pw(nkn_add_kernel, x0, x1) ≈ pw(sum_k, x0, x1) # ColVecs input - @test ew(nkn_add_kernel, ColVecs(X0)) ≈ ew(sum_k, ColVecs(X0)) - @test ew(nkn_add_kernel, ColVecs(X0), ColVecs(X1)) ≈ ew(sum_k, ColVecs(X0), ColVecs(X1)) - @test pw(nkn_add_kernel, ColVecs(X0)) ≈ pw(sum_k, ColVecs(X0)) - @test pw(nkn_add_kernel, ColVecs(X0), ColVecs(X1)) ≈ pw(sum_k, ColVecs(X0), ColVecs(X1)) + @test ew(nkn_add_kernel, X0) ≈ ew(sum_k, X0) + @test ew(nkn_add_kernel, X0, X1) ≈ ew(sum_k, X0, X1) + @test pw(nkn_add_kernel, X0) ≈ pw(sum_k, X0) + @test pw(nkn_add_kernel, X0, X1) ≈ pw(sum_k, X0, X1) end end From 38c5786ab0f72a4c011120722ca7176ce6ed3e50 Mon Sep 17 00:00:00 2001 From: hongbinren Date: Sat, 14 Mar 2020 01:15:17 +0800 Subject: [PATCH 42/42] update --- src/gp/kernel.jl | 1 - 1 file changed, 1 deletion(-) diff --git a/src/gp/kernel.jl b/src/gp/kernel.jl index df584959..92e18067 100644 --- a/src/gp/kernel.jl +++ b/src/gp/kernel.jl @@ -558,7 +558,6 @@ julia> pw(0.5 * k, x) == 0.5 .* Stheno.pw(k, x) true ``` """ -# NOTE: σ² is in log scale !!! *(σ²::Real, k::Kernel) = scale(k, σ²) *(k::Kernel, σ²) = σ² * k