@@ -84,8 +84,11 @@ class Dynamic_GLM(GibbsSampling):
returnoutputs
deflog_likelihood(self,input,timepoint):
"""
Given input from a single session (a matrix containing regressors and responses) and the timepoint information from that session, return log-likelihoods for observations.
"""
predictors,responses=input[:,:-1],input[:,-1]
nans=np.isnan(responses)
nans=np.isnan(responses)# can come from cross-validation
probs=np.zeros((input.shape[0],2))
out=np.zeros(input.shape[0])
# I could possibly save the 1 / ..., since it's logged it's just - log (but the other half of the probs is an issue)
...
...
@@ -99,6 +102,11 @@ class Dynamic_GLM(GibbsSampling):
# Gibbs sampling
defresample(self,data=[]):
"""
Resampling of dynamic logistic random variables.
We follow the resampling scheme of Windle: "Efficient Data Augmentation in Dynamic Models for Binary and Count Data".
This makes use of the forward filter backwards sample algorithm. Which uses Kalman filtering, for which we use Anderson & Moore 1979
"""
# TODO: Clean up this mess, I always have to call delete_obs_data because of all the saved shit!