Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Rejection sampling variational inference #819

Open
wants to merge 43 commits into
base: master
Choose a base branch
from
Open
Changes from 1 commit
Commits
Show all changes
43 commits
Select commit Hold shift + click to select a range
4efb780
fix typos in docstring
Jan 3, 2018
7e43d1b
add multinomial-dirichlet test, empty `RejectionSamplingKLqp` class
Jan 7, 2018
d673763
Merge branch 'master' into rejection-sampling-variational-inference
Jan 12, 2018
7a5f90e
remove `sample_shape=1`
Jan 12, 2018
94a1bc3
add poisson-gamma test
Jan 14, 2018
a4c87cc
WIP: begin to implement RSVI logic
Jan 15, 2018
163414c
WIP: implement RSVI gradients
Jan 15, 2018
f162135
add scrap notebook with gradient update algo
Jan 19, 2018
2f96076
unit test gradient update algo in notebook
Jan 20, 2018
2c1162b
unit test gradient update algo to 3 iterations
Jan 20, 2018
ad25f6d
`test_kucukelbir_grad` passes
Jan 20, 2018
7e4a9ce
correction: `test_kucukelbir_grad` passes
Jan 20, 2018
8dc4f4f
cleanup (still skeptical this test works, as it seems almost stochastic
Jan 20, 2018
0aae8ed
move `test_kucukelbir_grad` to separate file
Jan 20, 2018
70172fb
add `KucukelbirOptimizer`
Jan 20, 2018
929e25c
pass `n`, `s_n` into `KucukelbirOptimizer` constructor
Jan 20, 2018
95d9774
looking forward to seeing if this passes CI. locally, i have no idea …
Jan 20, 2018
c212858
slightly more confidence
Jan 20, 2018
81637fb
set trainable=False
Jan 20, 2018
7aec66c
initialize `n` to 0
Jan 21, 2018
dda7f26
assert in loop
Jan 21, 2018
2a4ccc8
add dummy parameter `global_step` for temporary compatibility
Jan 21, 2018
8f69548
add `KucukelbirOptimizer`
Jan 21, 2018
26f8ed8
2-space indent
Jan 21, 2018
c7f3ea1
use `KucukelbirOptimizer`
Jan 21, 2018
435ec01
cleanup
Jan 21, 2018
45b17b8
test `qalpha`, `qbeta` values
Jan 21, 2018
ed6e266
delete blank line
Jan 21, 2018
80cee16
add `GammaRejectionSampler`
Jan 23, 2018
ef45bc3
add `log_prob_s` to `GammaRejectionSampler`
Jan 23, 2018
b94ef73
add citation to docstring
Jan 23, 2018
a136f9d
add guts of RSVI, integrating w.r.t. z
Jan 23, 2018
680894b
parametrize sampler with density
Jan 24, 2018
47ba81c
pass density to rejection sampler; return gradients
Jan 24, 2018
26f0c32
dict_swap[z] comes from rejection sampler, not `qz`
Jan 24, 2018
7b997e1
delete gamma_rejection_sampler_vars
Jan 24, 2018
6108125
delete TODO
Jan 24, 2018
77e9a6c
WIP: _test_build_rejection_sampling_loss_and_gradients
Jan 30, 2018
3846fa6
WIP: _test_build_rejection_sampling_loss_and_gradients
Jan 30, 2018
23c33af
WIP: _test_build_rejection_sampling_loss_and_gradients
Jan 30, 2018
4c481a0
WIP: _test_build_rejection_sampling_loss_and_gradients
Jan 30, 2018
00c9325
WIP: _test_build_rejection_sampling_loss_and_gradients
Jan 30, 2018
40d3808
pep8
Jan 30, 2018
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
Prev Previous commit
Next Next commit
add KucukelbirOptimizer
William Wolf committed Jan 20, 2018
commit 70172fbc55c5da4f7953786334ca2f205f21cf3e
2 changes: 1 addition & 1 deletion edward/optimizers/__init__.py
Original file line number Diff line number Diff line change
@@ -9,7 +9,7 @@
from tensorflow.python.util.all_util import remove_undocumented

_allowed_symbols = [
'alp_optimizer_apply_gradients',
'KucukelbirOptimizer',
]

remove_undocumented(__name__, allowed_exception_list=_allowed_symbols)
35 changes: 23 additions & 12 deletions edward/optimizers/sgd.py
Original file line number Diff line number Diff line change
@@ -5,15 +5,26 @@
import tensorflow as tf


def alp_optimizer_apply_gradients(n, s_n, grads_and_vars):
ops = []
for i, (grad, var) in enumerate(grads_and_vars):
updated_s_n = s_n[i].assign( (t * grad**2) + (1 - t) * s_n[i] )

p_n_first = eta * n**(-.5 + delta)
p_n_second = (1 + tf.sqrt(updated_s_n[i]))**(-1)
p_n = p_n_first * p_n_second

updated_var = var.assign_add(-p_n * grad)
ops.append(updated_var)
return ops
class KucukelbirOptimizer:

"""
# TODO: add me
"""

def __init__(self, t, delta, eta):
self.t = t
self.delta = delta
self.eta = eta

def apply_gradients(self, n, s_n, grads_and_vars):
ops = []
for i, (grad, var) in enumerate(grads_and_vars):
updated_s_n = s_n[i].assign( (self.t * grad**2) + (1 - self.t) * s_n[i] )

p_n_first = self.eta * n**(-.5 + self.delta)
p_n_second = (1 + tf.sqrt(updated_s_n[i]))**(-1)
p_n = p_n_first * p_n_second

updated_var = var.assign_add(-p_n * grad)
ops.append(updated_var)
return ops
19 changes: 4 additions & 15 deletions tests/optimizers/test_sgd.py
Original file line number Diff line number Diff line change
@@ -4,7 +4,7 @@

import tensorflow as tf

from edward.optimizers import alp_optimizer_apply_gradients
from edward.optimizers import KucukelbirOptimizer


class test_sgd_class(tf.test.TestCase):
@@ -15,23 +15,11 @@ def test_kucukelbir_grad(self):
[(2.7902498, 0.8434107), (1.241244, 1.8959416)],
[(2.6070995, 0.7563643), (1.0711095, 1.8410041)]
]

t = 0.1
delta = 10e-3
eta = 1e-1

def alp_optimizer_apply_gradients(n, s_n, grads_and_vars):
ops = []
for i, (grad, var) in enumerate(grads_and_vars):
updated_s_n = s_n[i].assign( (t * grad**2) + (1 - t) * s_n[i] )

p_n_first = eta * n**(-.5 + delta)
p_n_second = (1 + tf.sqrt(updated_s_n[i]))**(-1)
p_n = p_n_first * p_n_second

updated_var = var.assign_add(-p_n * grad)
ops.append(updated_var)
return ops

w1 = tf.Variable(tf.constant(1.))
w2 = tf.Variable(tf.constant(2.))
var_list = [w1, w2]
@@ -47,7 +35,8 @@ def alp_optimizer_apply_gradients(n, s_n, grads_and_vars):
s_n = tf.Variable(tf.zeros(2))
n = tf.Variable(tf.constant(1.))

train = alp_optimizer_apply_gradients(n, s_n, grads_and_vars)
optimizer = KucukelbirOptimizer(t=t, delta=delta, eta=eta)
train = optimizer.apply_gradients(n, s_n, grads_and_vars)
increment_n = n.assign_add(1.)

actual_grads_and_vars = []