Just start to learn Edward.
I tried the Bayesian Linear Model example. Then, I tried to do a little venture. I want to assume a conjugate prior for the covariance of y. Then, use Gibbs sampling to estimate it.
%matplotlib inline
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import edward as ed
import matplotlib.pyplot as plt
import numpy as np
import tensorflow as tf
from edward.models import Normal, Empirical
plt.style.use('ggplot')
def build_toy_dataset(N, w):
D = len(w)
x = np.random.normal(0.0, 2.0, size=(N, D))
y = np.dot(x, w) + np.random.normal(0.0, 0.01, size=N)
return x, y
ed.set_seed(42)
N = 40 # number of data points
D = 10 # number of features
w_true = np.random.randn(D) * 0.5
X_train, y_train = build_toy_dataset(N, w_true)
X_test, y_test = build_toy_dataset(N, w_true)
from edward.models import InverseGamma, Gamma
X = tf.placeholder(tf.float32, [N, D])
w = Normal(loc=tf.zeros(D), scale=tf.ones(D))
b = Normal(loc=tf.zeros(1), scale=tf.ones(1))
sigma_y = InverseGamma(tf.ones(N), tf.ones(N), sample_shape=1)
y = Normal(loc=ed.dot(X, w) + b, scale=sigma_y)
T = 500 # number of MCMC samples
qsigma_y = Empirical(tf.get_variable(
"qsigma_y/params", [T, 1, N],
initializer=tf.ones_initializer()))
inference = ed.Gibbs({sigma_y: qsigma_y}, data={X: X_train, y: y_train})
inference.run(n_sample=1, n_iter=250)
Then, I got the error:
---------------------------------------------------------------------------
NotImplementedError Traceback (most recent call last)
<ipython-input-8-b6599950810a> in <module>()
----> 1 inference = ed.Gibbs({sigma_y: qsigma_y}, data={X: X_train, y: y_train})
2 inference.run(n_sample=1, n_iter=250)
~/.virtualenvs/tensorflow-gpu/lib/python3.6/site-packages/edward/inferences/gibbs.py in __init__(self, latent_vars, proposal_vars, data)
44 if proposal_vars is None:
45 proposal_vars = {z: complete_conditional(z)
---> 46 for z in six.iterkeys(latent_vars)}
47 else:
48 check_latent_vars(proposal_vars)
~/.virtualenvs/tensorflow-gpu/lib/python3.6/site-packages/edward/inferences/gibbs.py in <dictcomp>(.0)
44 if proposal_vars is None:
45 proposal_vars = {z: complete_conditional(z)
---> 46 for z in six.iterkeys(latent_vars)}
47 else:
48 check_latent_vars(proposal_vars)
~/.virtualenvs/tensorflow-gpu/lib/python3.6/site-packages/edward/inferences/conjugacy/conjugacy.py in complete_conditional(rv, cond_set)
128 'statistics %s, but no available '
129 'exponential-family distribution has those '
--> 130 'sufficient statistics.' % str(dist_key))
131
132 # Swap sufficient statistics for placeholders, then take gradients
NotImplementedError: Conditional distribution has sufficient statistics (('#CPow-1.0000e+00', ('#x',)), ('#CPow-2.0000e+00', ('#x',)), ('#Log', ('#x',))), but no available exponential-family distribution has those sufficient statistics.
What is the problem? and, what should I change to make it work?
Thank you!