Skip to content

Commit

Permalink
make pgpr vs svgp comparisons non-sparse
Browse files Browse the repository at this point in the history
  • Loading branch information
GiovanniPasserello committed May 28, 2021
1 parent 23b3b36 commit ef9cff5
Show file tree
Hide file tree
Showing 3 changed files with 16 additions and 21 deletions.
18 changes: 8 additions & 10 deletions shgp/classification/demos/demo_pgpr_vs_svgp_1D.py
Original file line number Diff line number Diff line change
Expand Up @@ -4,7 +4,6 @@
import tensorflow as tf

from shgp.data.dataset import PlatformDataset
from shgp.inducing.initialisation_methods import uniform_subsample
from shgp.likelihoods.pg_bernoulli import PolyaGammaBernoulli
from shgp.utilities.general import invlink
from shgp.utilities.train_pgpr import train_pgpr
Expand All @@ -18,24 +17,20 @@ def model_comparison():
# Model Optimisation #
######################

num_inducing = 10

# PGPR
pgpr, pgpr_elbo = train_pgpr(
X, Y,
10, 1000, 10,
kernel_type=gpflow.kernels.Matern52,
M=num_inducing,
init_method=uniform_subsample
20, 2000, 20,
kernel_type=gpflow.kernels.SquaredExponential
)
print("pgpr trained")

# TODO: This comparison of Bernoulli vs PG is worth showing in `Evaluation'.
# SVGP (choose Bernoulli or PG likelihood for comparison)
# likelihood = gpflow.likelihoods.Bernoulli(invlink=tf.sigmoid)
likelihood = PolyaGammaBernoulli()
likelihood = gpflow.likelihoods.Bernoulli(invlink=tf.sigmoid)
#likelihood = PolyaGammaBernoulli()
svgp = gpflow.models.SVGP(
kernel=gpflow.kernels.Matern52(),
kernel=gpflow.kernels.SquaredExponential(),
likelihood=likelihood,
inducing_variable=pgpr.inducing_variable.Z
)
Expand Down Expand Up @@ -72,7 +67,10 @@ def model_comparison():
# Meta
plt.title('PGPR vs SVGP - Platform Dataset')
plt.xlim((-2, 2))
plt.xlabel('x')
plt.ylim((-0.5, 1.5))
plt.yticks([0, 0.5, 1])
plt.ylabel('p( y=1 | x )')

# Display
plt.legend(loc='upper right')
Expand Down
15 changes: 6 additions & 9 deletions shgp/classification/demos/demo_pgpr_vs_svgp_2D.py
Original file line number Diff line number Diff line change
Expand Up @@ -4,7 +4,6 @@
import tensorflow as tf

from shgp.data.dataset import BananaDataset
from shgp.inducing.initialisation_methods import uniform_subsample
from shgp.likelihoods.pg_bernoulli import PolyaGammaBernoulli
from shgp.utilities.general import invlink
from shgp.utilities.train_pgpr import train_pgpr
Expand All @@ -18,22 +17,18 @@ def model_comparison():
# Model Optimisation #
######################

num_inducing = 20

# PGPR
pgpr, pgpr_elbo = train_pgpr(
X, Y,
20, 250, 10,
kernel_type=gpflow.kernels.SquaredExponential,
M=num_inducing,
init_method=uniform_subsample
20, 2000, 20,
kernel_type=gpflow.kernels.SquaredExponential
)
print("pgpr trained")

# TODO: This comparison of Bernoulli vs PG is worth showing in `Evaluation'.
# SVGP (choose Bernoulli or PG likelihood for comparison)
# likelihood = gpflow.likelihoods.Bernoulli(invlink=tf.sigmoid)
likelihood = PolyaGammaBernoulli()
likelihood = gpflow.likelihoods.Bernoulli(invlink=tf.sigmoid)
# likelihood = PolyaGammaBernoulli()
svgp = gpflow.models.SVGP(
kernel=gpflow.kernels.SquaredExponential(),
likelihood=likelihood,
Expand Down Expand Up @@ -87,6 +82,8 @@ def model_comparison():
c1.collections[0].set_label('PGPR ({:.2f})'.format(pgpr_elbo))
c2.collections[0].set_label('SVGP ({:.2f})'.format(svgp_elbo))

plt.xlabel('x1')
plt.ylabel('x2', rotation=1)
plt.title('PGPR vs SVGP - Banana Dataset')
plt.legend(loc='upper left')
plt.show()
Expand Down
4 changes: 2 additions & 2 deletions shgp/utilities/train_pgpr_svgp.py
Original file line number Diff line number Diff line change
Expand Up @@ -115,7 +115,7 @@ def _try_train_pgpr_svgp(X, Y, M, opt_iters, kernel_type, init_method, reinit_me
# return result(svgp, svgp.elbo((X, Y)), X_test, Y_test)


# TODO: Interleaved training (good, but slow and *very* non-monotonic unreliable)
# Interleaved training (good, but slow and *very* non-monotonic unreliable)
# def _train_sparse_pgpr_svgp(X, Y, M, opt_iters, kernel_type, reinit_method, reinit_metadata, optimise_Z, X_test, Y_test):
# """
# Train a sparse PGPR-SVGP model with a given reinitialisation method.
Expand Down Expand Up @@ -197,7 +197,7 @@ def _try_train_pgpr_svgp(X, Y, M, opt_iters, kernel_type, init_method, reinit_me
# return pgpr_svgp, np.max(elbos) # return the highest ELBO


# TODO: With greedy variance reinit (no PGPR)
# With greedy variance reinit (no PGPR)
def _train_sparse_pgpr_svgp(X, Y, M, opt_iters, kernel_type, reinit_method, reinit_metadata, optimise_Z, X_test, Y_test):
"""
Train a sparse PGPR-SVGP model with a given reinitialisation method.
Expand Down

0 comments on commit ef9cff5

Please sign in to comment.