Model-based empirical likelihood example¶

Estimating the parameters of AR(2) process.

First load the required packages and classes. The non-system pcks are:

  1. arma_model class of arma models. Can be found here here
  2. empirical_likelihood classes for EL calculations. link
InĀ [1]:
%matplotlib inline
import matplotlib.pyplot as plt
import numpy as np

import arma_model as am
import empirical_likelihood as el 
plt.rcParams['figure.figsize'] = (10.0, 6.0)

Simulate the model for a given parameters and show visualization.

InĀ [2]:
np.random.seed(54321)
n = 500
phi = [0.4,-.5]
theta = [0.]

model = am.arma_model(phi, theta, size=n)
data = el.data(model.data)

model.plot()
No description has been provided for this image

Prior distribution¶

To continue with Bayesian inference. Place a prior distribution on the parameters

  • $\pi(\psi_1)$ and $\pi(\psi_2)$
InĀ [9]:
samples = 2000
prop_mean = [0.39,-0.48]
prop_std = [.1,.1]

prop_phi_1 = np.random.normal(loc=prop_mean[0], scale=prop_std[0],size=samples)
prop_phi_2 = np.random.normal(loc=prop_mean[1], scale=prop_std[1],size=samples)

plt.subplot(1,2,1)
plt.hist(prop_phi_1,20,label='$\psi_1$'), plt.grid(True),plt.legend()

plt.subplot(1,2,2)
plt.hist(prop_phi_2,20,label='$\psi_2$'), plt.grid(True),plt.legend()
plt.show()
No description has been provided for this image

Sampler 1: Bayesian weighted sampler¶

  • Sample from the prior and assign weights equal to EL
  • Self normalize the weights
  • Calculate parameter estimates
InĀ [13]:
els = []
for p1,p2 in zip(prop_phi_1,prop_phi_2):
	param = el.parameter([p1,p2],estim_eq='ar-least')
	elr = param.get_el(data)
	els.append(elr)

els = np.array(els)
els_norm = els/els.sum()

phi_1_estim = np.dot(prop_phi_1,els_norm)
phi_2_estim = np.dot(prop_phi_2,els_norm)
phi_1_std = np.dot(prop_phi_1**2,els_norm) - phi_1_estim**2
phi_2_std = np.dot(prop_phi_2**2,els_norm) - phi_2_estim**2

print('phi_1_hat = %.3f' % phi_1_estim)
print('phi_2_hat = %.3f' % phi_2_estim)
phi_1_hat = 0.389
phi_2_hat = -0.486

Plot the weighted samples

InĀ [14]:
fig = plt.figure()
ax = fig.add_subplot(1,2,1)
ax.plot(prop_phi_1,els_norm,'ob')
plt.grid(True)
plt.title('Model-based EL sampler $\phi_1$')
plt.xlabel(r'$\phi_1$')
plt.ylabel(r'$\mathcal{R}_b(\phi_1,\phi_2)$')
plt.xticks(rotation=70)

ax = fig.add_subplot(1,2,2)
ax.plot(prop_phi_2,els_norm,'ob')
plt.grid(True)
plt.title('Model-based EL sampler of $\phi_2$')
plt.xlabel(r'$\phi_2$')
plt.ylabel(r'$\mathcal{R}_b(\phi_1,\phi_2)$')
plt.xticks(rotation=70)
plt.tight_layout()

plt.show()
No description has been provided for this image

Sampler 2: Bayesian importance resampling¶

  • perform the importance resampling by
    1. Sample resample_size samples from previous algorithm by multinomial distribution, where probabilities are EL weights
    2. Add a random walk of white noise with low variance
    3. Re-run the Bayesian sampler
  • Normalize weights and calculate etimates
InĀ [17]:
resample_size = samples
noise_std = .001

resampled_phi_1 = np.random.choice(prop_phi_1,resample_size,p=els_norm)
resampled_phi_2 = np.random.choice(prop_phi_2,resample_size,p=els_norm)

noise = np.random.normal(loc=0,scale=noise_std,size=resample_size)
resampled_phi_1 += noise

noise = np.random.normal(loc=0,scale=noise_std,size=resample_size)
resampled_phi_2 += noise

rels = []
for p1,p2 in zip(resampled_phi_1,resampled_phi_2):
	param = el.parameter([p1,p2],estim_eq='ar-least')
	relr = param.get_el(data)
	rels.append(relr)

rels = np.array(rels)
rels_norm = rels/rels.sum()

res_phi_1_estim = np.dot(resampled_phi_1,rels_norm)
res_phi_2_estim = np.dot(resampled_phi_2,rels_norm)

res_phi_1_std = np.dot(resampled_phi_1**2,els_norm) - res_phi_1_estim**2
res_phi_2_std = np.dot(resampled_phi_2**2,els_norm) - res_phi_2_estim**2

print('phi_1_hat = %.3f' % res_phi_1_estim)
print('phi_2_hat = %.3f' % res_phi_2_estim)

print('phi_1_std = %.3f' % res_phi_1_std)
print('phi_2_std = %.3f' % res_phi_2_std)
phi_1_hat = 0.390
phi_2_hat = -0.487
phi_1_std = 0.000
phi_2_std = 0.001

Plot the weights after emplying the importance resampling

InĀ [19]:
fig = plt.figure()
ax = fig.add_subplot(1,2,1)
ax.plot(resampled_phi_1,rels_norm,'ob')
plt.grid(True)
plt.title('Reweighted samples $\phi_1$')
plt.xlabel(r'$\phi_1$')
plt.ylabel(r'$\mathcal{R}_b(\phi_1,\phi_2)$')
plt.xticks(rotation=70)

ax = fig.add_subplot(1,2,2)
ax.plot(resampled_phi_2,rels_norm,'ob')
plt.grid(True)
plt.title('Reweighted samples of $\phi_2$')
plt.xlabel(r'$\phi_2$')
plt.ylabel(r'$\mathcal{R}_b(\phi_1,\phi_2)$')
plt.xticks(rotation=70)
plt.tight_layout()

plt.show()
No description has been provided for this image

Sampler 3: The MCMC sampler via model-based EL¶

  1. Redifine prior and its estimation method required by the MCMC Algotirhm
InĀ [20]:
import scipy.stats as ss


prior_phi = [0.39,-0.48]
prior_std = [.1,.1]

def eval_prior(psi):
	phi_1 = ss.distributions.norm.logpdf(psi[0],loc=prior_phi[0],scale=prior_std[0])
	phi_2 = ss.distributions.norm.logpdf(psi[1],loc=prior_phi[1],scale=prior_std[1])
	prior = phi_1 + phi_2
	return prior

Actual MCMC sampler via the EL.

Estimates the parameters directly from the posterior distribution

InĀ [22]:
def prop_posterior(psi,data):
    ''' Function for the MCMC sampler that evaluates the 
        empirical likelihood likelihood and prior'''
    parameter = el.parameter(value=psi,estim_eq='ar-least')
    elr = np.log(parameter.get_el(data))
    prior = eval_prior(parameter.value)
    post = elr + prior
    return post

samples = 10000
burnin = 400
thinin = 5
phi_sample = prior_phi
sample_std = [.1,.1]

h_denom = prop_posterior(phi_sample, data)

phi_log = []

for i in range(samples):
	phi_prop = [np.random.normal(phi_sample[0],sample_std[0]),
					np.random.normal(phi_sample[1],sample_std[1])]

	h_num = prop_posterior(phi_prop, data)
	h = np.exp(h_num - h_denom)

	if np.random.uniform() < h:
		phi_sample = phi_prop
		h_denom = h_num
	phi_log.append(phi_sample)


phi_log = np.array(phi_log)
phi_log_clean = phi_log[burnin::thinin]
phi_log = phi_log.T
phi_log_clean = phi_log_clean.T


print('psi_1_mean = %.3f' % np.mean(phi_log_clean[0]))
print('psi_1_std = %.3f' % np.std(phi_log_clean[0]))

print('psi_2_mean = %.3f' % np.mean(phi_log_clean[1]))
print('psi_2_std = %.3f' % np.std(phi_log_clean[1]))
psi_1_mean = 0.390
psi_1_std = 0.035
psi_2_mean = -0.487
psi_2_std = 0.036

Visualize the results¶

  1. Show the evulation of samples before and without burning and thinning Plot histograms of EL posterior distributions for both parameters
InĀ [24]:
fig=plt.figure(1)
plt.subplot(2,1,1)
plt.plot(phi_log[0],'-r',label='$\phi_1$')
plt.plot(phi_log[1],'-b',label='$\phi_2$')
plt.title('Raw samples from $\pi(\phi_1,\phi_2|\mathbf{x})$')
plt.xlabel('$n$')
plt.legend(loc='upper left')
plt.tight_layout()
plt.grid(True)

plt.subplot(2,1,2)
plt.plot(phi_log_clean[0],'-r',label='$\phi_1$')
plt.plot(phi_log_clean[1],'-b',label='$\phi_2$')
plt.title('Thinned samples from $\pi(\phi_1,\phi_2|\mathbf{x})$')
plt.xlabel('$n$')
plt.legend(loc='upper left')
plt.grid(True)
plt.tight_layout()
plt.show()
No description has been provided for this image

Show the histogram of EL posterior distribution for both parameters

InĀ [25]:
x1 = np.linspace(0.23,0.55,500)
y1 = ss.distributions.norm.pdf(x1,loc=prior_phi[0],scale=prior_std[0])

x2 = np.linspace(-0.65,-.30,500)
y2 = ss.distributions.norm.pdf(x2,loc=prior_phi[1],scale=prior_std[1])

fig = plt.figure(1)
ax = fig.add_subplot(1,2,1)
ax.hist(phi_log_clean[0],30)
ax2 = ax.twinx()
ax2.plot(x1,y1,'-r',label='$\pi(\psi_1)$')
ax2.legend(loc='best')
plt.xlim(min(x1),max(x1))
plt.grid(True)
plt.title('Histogram of $\pi(\phi_1|\mathbf{x})$')
plt.xlabel(r'$\phi_1$')
plt.xticks(rotation=70)

ax = fig.add_subplot(1,2,2)
ax.hist(phi_log_clean[1],30)
ax2 = ax.twinx()
ax2.plot(x2,y2,'-r',label='$\pi(\psi_2)$')
ax2.legend(loc='best')
plt.xlim(min(x2),max(x2))
plt.grid(True)
plt.title('Histogram of $\pi(\phi_2|\mathbf{x})$')
plt.xlabel(r'$\phi_2$')
plt.xticks(rotation=70)
plt.tight_layout()

plt.show()
No description has been provided for this image