Skip to content
Snippets Groups Projects
Commit 81148d84 authored by SebastianBruijns's avatar SebastianBruijns
Browse files

made gibbs_sample.py look pretty

parent fb9c974e
No related branches found
No related tags found
No related merge requests found
"""
Perform a Gibbs sampling using my function of the test data.
Also perform maximum likelihood estimation of the same weights.
"""
import numpy as np
import pyhsmm.basic.distributions as distributions
from scipy.optimize import minimize
import pickle
n_samples = 100000
# Data Params
T = 16
n_inputs = 3
step_size = 0.2
# Sampling params
n_samples = 100000
# Setup
Q = np.tile(np.eye(n_inputs), (T, 1, 1))
sample = pickle.load(open('test_data', 'rb'))
learn = distributions.Dynamic_GLM(n_inputs=n_inputs, T=T, P_0=4 * np.eye(n_inputs), Q=Q * step_size, prior_mean=np.zeros(n_inputs))
def wrapper(w, t):
learn.weights = np.tile(w, (T, 1))
return - np.sum(learn.log_likelihood(sample[t], t))
# Draw samples
samples = []
pseudo_samples = []
for _ in range(n_samples):
if _ % 1000 == 0:
print(_)
learn.resample(sample)
samples.append(learn.weights.copy())
def wrapper(w, t):
"""Reshape weight vector w into the correct shape, then compute the max ll estimate for the desired time t."""
learn.weights = np.tile(w, (T, 1))
return - np.sum(learn.log_likelihood(sample[t], t))
# Compute max ll estimates
LL_weights = np.zeros((T, n_inputs))
for t in range(T):
f = lambda w: wrapper(w, t)
LL_weights[t] = minimize(f, np.zeros(n_inputs)).x
LL_weights[t] = minimize(lambda w: wrapper(w, t), np.zeros(n_inputs)).x
# save everything
pickle.dump((samples, LL_weights), open('gibbs_posterior', 'wb'))
0% Loading or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment