\[ y_t \sim \text{Pois}(\lambda_t) \\ \lambda_t = \exp(a + rt) \\ a \sim \mathcal{N}(0, 1000^2) \\ r \sim \mathcal{N}(0, 1000^2) \] # Set-up
library(rstan)
library(brms)
library(tidyverse)
library(bayesplot)
Length of the data:
n <- 10
Observation vector:
y <- c(rep(0, n - 1), 1)
y
## [1] 0 0 0 0 0 0 0 0 0 1
Time covariate:
t <- 1:n
t
## [1] 1 2 3 4 5 6 7 8 9 10
Use brms
to fit the model with very flat priors:
fit <- brm(
y ~ t,
data = list(y = y, t = t),
family = poisson,
refresh = 0,
prior = prior(normal(0, 1000), class = "Intercept") +
prior(normal(0, 1000), class = "b")
)
bayesplot::mcmc_trace(fit)
bayesplot::mcmc_hist(fit)
## `stat_bin()` using `bins = 30`. Pick better value with `binwidth`.
Extract the draws from the Markov chain for \(r\) and compute the fraction \(> 0\):
draws <- as_draws_df(fit) %>%
rownames_to_column()
sum(draws$b_t > 0) / length(draws$b_t)
## [1] 1
Extract a few draws from the straight lines:
n_line <- 15
sample_draws <- draws[sample(nrow(draws), size = n_line), ]
ggplot(sample_draws, aes(intercept = b_Intercept, slope = b_t)) +
geom_abline(aes(intercept = b_Intercept, slope = b_t, col = as.factor(rowname))) +
xlim(c(0, 10)) + ylim(c(-1000, 1)) +
theme_minimal() +
labs(col = "MCMC iteration")