LDA with collapsed Gibbs Sampling

Dear All,

I would like to verify my first Edward code which is in fact a transformation of an example of gaussian mixture inference via gibbs sampling. Technically code works however, I am a bit unsatisfied with the inference quality:

from future import absolute_import
from future import division
from future import print_function

from time import time

import edward as ed
import matplotlib.pyplot as plt
import numpy as np
import tensorflow as tf
from edward.models import (
Dirichlet, Categorical, ParamMixture, Multinomial)

D = 10
N = 200
K = 4

sess = ed.get_session()

model definition

alpha = tf.ones(K)
theta = Dirichlet(alpha)
gamma = tf.ones(N)
beta = Dirichlet(gamma,sample_shape=K)
mix = ParamMixture(theta,{‘probs’ : beta}, Categorical,sample_shape=N)
z = mix.cat

analytical conditional part. This is calculated by tensorflow symbolically.

theta_cond = ed.complete_conditional(theta)
beta_cond = ed.complete_conditional(beta)
z_cond = ed.complete_conditional(z)

test distribution of the same shape parameters but with random priors.

arr = tf.ones(K) * 4.0
np.divide(arr,np.sum(arr))

alpha_test = arr

theta_test = Dirichlet(alpha_test)

arr = np.random.rand(N) * 4.0
np.divide(arr,np.sum(arr))

gamma_test = tf.ones(N)
beta_test = Dirichlet(gamma_test,sample_shape=K)
mix_test = ParamMixture(theta_test,{‘probs’ : beta_test}, Categorical,sample_shape=N)
z_test = mix_test.cat

Initialize

theta_est,beta_est,z_est = sess.run([theta, beta, z])

mix_data, z_data = sess.run([mix_test, z_test])

mix_data = np.reshape(mix_data,(N,))
theta_est = np.reshape(np.asarray(theta_est),(K,))

print(‘Test parameters:’)
print(‘theta:’, sess.run(theta_test))

print(‘Initial parameters:’)
print(‘theta:’, theta_est)
#print(‘gamma:’, gamma_est)
print()

inference - gibbs sampler

cond_dict = {theta: theta_est, beta: beta_est,
z: z_est, mix: mix_data}
t0 = time()
T = 50000
for t in range(T):
z_est = sess.run(z_cond, cond_dict)
cond_dict[z] = z_est
theta_est = np.reshape(np.asarray(sess.run([theta_cond], cond_dict)),(K,))
cond_dict[theta] = theta_est
beta_est = sess.run(beta_cond, cond_dict)
cond_dict[beta] = beta_est

print(‘took %.3f seconds to run %d iterations’ % (time() - t0, T))

print()
print(‘Final parameters::’)
print(‘theta:’, theta_est)

plt.figure(figsize=[10, 10])
plt.subplot(2, 1, 1)
plt.hist(mix_data, 50)
plt.title(‘Empirical Distribution of x‘)
plt.xlabel(’x’)
plt.ylabel(‘frequency’)
xl = plt.xlim()
plt.subplot(2, 1, 2)
plt.hist(sess.run(mix,{theta: theta_est, beta: beta_est,z: z_est}), 50)
plt.title(“Predictive distribution”)
plt.xlabel(’x’)
plt.ylabel(‘frequency’)
plt.xlim(xl)
plt.show()

mixture of gaussians:
mixture_gaussians

lda:
LDA

Hardly gibbs sampler learnt latent z and distribution parameters.

Any hints would be welcomed.

M.