Implementation same as the Example of Bayesian Neural Networks in PyMC3 document


I am trying to implementation Bayesian Neural Network Classification model same as that of PyMC3 document.
The code I implemented is this.

def neural_network(X):
    h = tf.tanh(tf.matmul(X, W_0))
    h = tf.tanh(tf.matmul(h, W_1))
    h = tf.sigmoid(tf.matmul(h, W_2))
    return tf.reshape(h, [-1])

n_hidden = 5
with tf.name_scope("model"):
    W_0 = Normal(loc=tf.zeros([X_all.shape[1], n_hidden]), scale=tf.ones([X_all.shape[1], n_hidden]), name="W_0")
    W_1 = Normal(loc=tf.zeros([n_hidden, n_hidden]), scale=tf.ones([n_hidden, n_hidden]), name="W_1")
    W_2 = Normal(loc=tf.zeros([n_hidden, 1]), scale=tf.ones([n_hidden, 1]), name="W_2")
    X = tf.placeholder(tf.float32, [None, X_all.shape[1]], name="X")
    f = neural_network(X)
    Y = Bernoulli(f, name="Y")

with tf.variable_scope("posterior"):
    with tf.variable_scope("qW_0"):
        loc = tf.get_variable("loc", [X_all.shape[1], n_hidden])
        scale = tf.nn.softplus(tf.get_variable("scale", [X_all.shape[1], n_hidden]))
        qW_0 = Normal(loc=loc, scale=scale)
    with tf.variable_scope("qW_1"):
        loc = tf.get_variable("loc", [n_hidden, n_hidden])
        scale = tf.nn.softplus(tf.get_variable("scale", [n_hidden, n_hidden]))
        qW_1 = Normal(loc=loc, scale=scale)
    with tf.variable_scope("qW_2"):
        loc = tf.get_variable("loc", [n_hidden, 1])
        scale = tf.nn.softplus(tf.get_variable("scale", [n_hidden, 1]))
        qW_2 = Normal(loc=loc, scale=scale)

inference = ed.KLqp({W_0: qW_0, W_1: qW_1, W_2: qW_2}, data={X: X_train, Y: Y_train}), n_iter=10000)

But result is this.

Test data.

In PyMC3 Document, nonlinear classification is performed well. But my implementation does not perform well. What’s wrong?