# -*- coding: utf-8 -*- """Exercises Chapter 6.ipynb Automatically generated by Colaboratory. Original file is located at https://colab.research.google.com/drive/18zQTq29ji2wLDHrhwu62X6Ij8F0-VvSE """ # Section 6.2 # Exercise 6.3 import numpy as np from scipy.stats import gamma, norm, kde import matplotlib.pyplot as plt m = 100000 xi = 0.04 xi_sigma = 0.01 alpha = 10 beta = 0.07 G = gamma.rvs(alpha, size=m, scale=1/alpha) X = xi + beta*(G-1) + xi_sigma*norm.rvs(size=m)/np.sqrt(G) Mean = np.mean(X) Sd = np.std(X) density = kde.gaussian_kde(X) x = np.linspace(-0.05, 0.15, 200) y = density(x) plt.plot(x,y) Mean_theoretical = xi Sd_theoretical = np.sqrt(beta**2/alpha + alpha*xi_sigma**2/(alpha-1)) print(Mean, Sd) print(Mean_theoretical, Sd_theoretical) beta = -0.1 G = gamma.rvs(alpha, size=m, scale=1/alpha) X = xi + beta*(G-1) + xi_sigma*norm.rvs(size=m)/np.sqrt(G) Mean = np.mean(X) Sd = np.std(X) density = kde.gaussian_kde(X) x = np.linspace(-0.05, 0.15, 200) y = density(x) plt.plot(x,y) Mean_theoretical = xi Sd_theoretical = np.sqrt(beta**2/alpha + alpha*xi_sigma**2/(alpha-1)) print(Mean, Sd) print(Mean_theoretical, Sd_theoretical) """# Exercise 6.4""" m = 100000 p = 0.5 xi1 = 0.02 xi2 = 0.08 sigma1 = 0.12 sigma2 = 0.25 xi = [xi1, xi2] sigma = [sigma1, sigma2] from scipy.stats import binom, lognorm C = binom.rvs(1,p,size=m) R = np.zeros(m) for i in range(m): R[i] = lognorm.rvs(sigma[int(C[i])], loc=xi[int(C[i])]) Mean = np.mean(R) Sd = np.std(R) density = kde.gaussian_kde(R) x = np.linspace(0.0, 3.0, 200) y = density(x) plt.plot(x,y) """# Exercise 6.6""" xi = 0.05 sigma = np.sqrt(np.log(1.01)) K = 25 T = 1 J = 10000 m = 20 from scipy.stats import poisson, norm mu = xi*np.exp(-sigma**2/2 + sigma*norm.rvs(size=(K,m))) Ncal = poisson.rvs(J*mu*T, size=(K,m)) plt.plot(np.arange(K), Ncal) np.std(Ncal)/np.mean(Ncal) J = 1000000 Ncal = poisson.rvs(J*mu*T, size=(K,m)) plt.plot(np.arange(K), Ncal) print(np.std(Ncal)/np.mean(Ncal)) """# Exercise 6.7""" from scipy.stats import lognorm theta = 0 sigma = 1 n = 20 m = 10000 z = lognorm.rvs(sigma, size=(n, m), loc=theta) thetaHat = np.mean(np.log(z),axis=0) sigmaHat = np.std(np.log(z),axis=0) ZHat = lognorm.rvs(sigmaHat, size=m, loc=thetaHat) print(np.exp(theta+0.5*sigma**2),np.mean(ZHat)) print(np.exp(theta+0.5*sigma**2)*np.sqrt(np.exp(sigma**2)-1),np.std(ZHat)) n = 100 z = lognorm.rvs(sigma, size=(n, m), loc=theta) thetaHat = np.mean(np.log(z),axis=0) sigmaHat = np.std(np.log(z),axis=0) ZHat = lognorm.rvs(sigmaHat, size=m, loc=thetaHat) print(np.exp(theta+0.5*sigma**2),np.mean(ZHat)) print(np.exp(theta+0.5*sigma**2)*np.sqrt(np.exp(sigma**2)-1),np.std(ZHat)) """# Exercise 6.8""" xi = 0.0002 sigma = 0.01 n = 250 K = 250 m = 10000 r = lognorm.rvs(sigma, loc=xi, size=(n,m))-1 xiHat = np.mean(np.log(r+1),axis=0) sigmaHat = np.std(np.log(r+1),axis=0) RHat = lognorm.rvs(np.sqrt(K)*sigmaHat, loc=K*xiHat,size=m)-1 print(np.exp(K*xi+0.5*K*sigma**2)-1,np.mean(RHat)) print(np.exp(K*xi+0.5*K*sigma**2)*np.sqrt(np.exp(K*sigma**2)-1),np.std(RHat)) n = 1000 r = lognorm.rvs(sigma, loc=xi, size=(n,m))-1 xiHat = np.mean(np.log(r+1),axis=0) sigmaHat = np.std(np.log(r+1),axis=0) RHat = lognorm.rvs(np.sqrt(K)*sigmaHat, loc=K*xiHat,size=m)-1 print(np.exp(K*xi+0.5*K*sigma**2)-1,np.mean(RHat)) print(np.exp(K*xi+0.5*K*sigma**2)*np.sqrt(np.exp(K*sigma**2)-1),np.std(RHat)) """# Section 6.5 # Exercise 6.24 """ ximu = 0.05 tau = 0.5 T = 1 J = 100 m = 10000 from numpy.random import poisson eps = np.random.normal(size=m) mu = ximu*np.exp(-0.5*tau**2+tau*eps) N = np.zeros((J,m)) for i in range(m): N[:,i] = poisson(mu[i]*T, size=J) Ncal = np.sum(N,axis=0) print(np.mean(Ncal)) print(np.std(Ncal)) tau = 1.0 mu = ximu*np.exp(-0.5*tau**2+tau*eps) N = np.zeros((J,m)) for i in range(m): N[:,i] = poisson(mu[i]*T, size=J) Ncal = np.sum(N,axis=0) print(np.mean(Ncal)) print(np.std(Ncal)) """# Exercise 6.25""" xiz = 1 sigma = 0.5 alpha = 20 n = 100 m = 10000 from scipy.stats import gamma, lognorm theta = xiz*gamma.rvs(alpha, scale=1/alpha, size=m) Z = np.zeros((n,m)) for i in range(m): Z[:,i] = theta[i]*lognorm.rvs(sigma, size=n, loc=-0.5*sigma**2) S = np.sum(Z,axis=0) print(np.mean(S)) print(np.std(S)) alpha = 2 theta = xiz*gamma.rvs(alpha, scale=1/alpha, size=m) Z = np.zeros((n,m)) for i in range(m): Z[:,i] = theta[i]*lognorm.rvs(sigma, size=n, loc=-0.5*sigma**2) S = np.sum(Z,axis=0) print(np.mean(S)) print(np.std(S)) """# Exercise 6.26""" import matplotlib.pyplot as plt p = [0.8, 0.6] K = 25 C0 = 0 m = 100000 C = np.ones((K+1,m)) * C0 for i in range(m): for k in range(K+1): I = np.random.uniform() > p[int(C[k-1,i])] C[k,i]=C[k-1,i]+I*(1-2*C[k-1,i]) p2k = np.mean(C, axis=1) plt.plot(np.arange(K+1), p2k) C0 = 1 C = np.ones((K+1,m)) * C0 for i in range(m): for k in range(K+1): I = np.random.uniform() > p[int(C[k-1,i])] C[k,i]=C[k-1,i]+I*(1-2*C[k-1,i]) p2k = np.mean(C, axis=1) plt.plot(np.arange(K+1), p2k) """# Exercise 6.27""" xi = [0.03, 0.06] p = [0.9, 0.9] sigma = [0.01, 0.01] c0 = 0 K = 25 m = 10 C = np.ones((K+1,m)) * c0 for i in range(m): for k in range(K+1): I = np.random.uniform() > p[int(C[k-1,i])] C[k,i]=C[k-1,i]+I*(1-2*C[k-1,i]) R = np.zeros((K+1,m)) for i in range(m): for k in range(K+1): R[k, i] = np.random.lognormal(xi[int(C[k,i])], sigma[int(C[k,i])]) - 1 plt.plot(np.arange(K+1), R) sigma = [0.12, 0.24] C = np.ones((K+1,m)) * c0 for i in range(m): for k in range(K+1): I = np.random.uniform() > p[int(C[k-1,i])] C[k,i]=C[k-1,i]+I*(1-2*C[k-1,i]) R = np.zeros((K+1,m)) for i in range(m): for k in range(K+1): R[k, i] = np.random.lognormal(xi[int(C[k,i])], sigma[int(C[k,i])]) - 1 plt.plot(np.arange(K+1), R) """# Exercise 6.28""" xi = [0.03, 0.06] p = [0.9, 0.9] sigma = [0.01, 0.01] c0 = 0 K = [2,5,25] m = 100000 C = np.ones((K[-1]+1,m)) * c0 for i in range(m): for k in range(K[-1]+1): I = np.random.uniform() > p[int(C[k-1,i])] C[k,i]=C[k-1,i]+I*(1-2*C[k-1,i]) R = np.zeros((K[-1]+1,m)) for i in range(m): for k in range(K[-1]+1): R[k, i] = np.random.lognormal(xi[int(C[k,i])], sigma[int(C[k,i])]) - 1 from scipy.stats import kde density0 = kde.gaussian_kde(R[K[0],:]) x = np.linspace(-0.05, 0.15, 200) y0 = density0(x) plt.plot(x,y0) density1 = kde.gaussian_kde(R[K[1],:]) x = np.linspace(-0.05, 0.15, 200) y1 = density1(x) plt.plot(x,y1) density2 = kde.gaussian_kde(R[K[2],:]) x = np.linspace(-0.05, 0.15, 200) y2 = density2(x) plt.plot(x,y2) sigma = [0.12, 0.24] C = np.ones((K[-1]+1,m)) * c0 for i in range(m): for k in range(K[-1]+1): I = np.random.uniform() > p[int(C[k-1,i])] C[k,i]=C[k-1,i]+I*(1-2*C[k-1,i]) R = np.zeros((K[-1]+1,m)) for i in range(m): for k in range(K[-1]+1): R[k, i] = np.random.lognormal(xi[int(C[k,i])], sigma[int(C[k,i])]) - 1 density0 = kde.gaussian_kde(R[K[0],:]) x = np.linspace(-1.05, 1.15, 200) y0 = density0(x) plt.plot(x,y0) density1 = kde.gaussian_kde(R[K[1],:]) x = np.linspace(-1.05, 1.15, 200) y1 = density1(x) plt.plot(x,y1) density2 = kde.gaussian_kde(R[K[2],:]) x = np.linspace(-1.05, 1.15, 200) y2 = density2(x) plt.plot(x,y2) """# Section 6.6 # Exercise 6.30 """ le = 116 alpha = 0.0003 beta = 0.1 F = 1 - np.exp(-alpha*(np.exp(beta*np.arange(le+1))-1)) plt.plot(np.arange(le+1), F) ll = np.arange(le) q=(F[ll+1]-F[ll])/(1-F[ll]) plt.plot(np.arange(le), q) """# Exercise 6.31""" psi0 = 0.0005 psi1 = 0.0000113 psi2 = 0.124 ll=np.arange(20,66) p_ia=1-np.exp(-psi0-psi1*np.exp(psi2*ll)) plt.plot(ll,p_ia) """# Exercise 6.32""" l0=25 K=40 psi0 = 0.0005 psi1 = 0.0000113 psi2 = 0.124 alpha=0.003 beta=0.1 kk = np.arange(K+1) kpda = 1-np.exp(-alpha*np.exp(beta*l0)*(np.exp(beta*kk)-1)) pia = 1-np.exp(-psi0 -psi1 * np.exp(psi2 *(np.arange(l0,l0 +K)))) kpaa = (1-kpda)*np.insert(np.cumprod(1-pia),0, 1.0) kpia = 1 -kpda -kpaa plt.plot(kk, kpda, label='kpda') plt.plot(kk, kpaa, label='kpaa') plt.plot(kk, kpia, label='kpia') plt.legend() """# Exercise 6.33""" Ncal0 = 1000000 l0 = 25 K = 40 s = 1 X = kpia * Ncal0 * s plt.plot(np.arange(K+1), X) K = 10 l_0 = [25,40,55] kk = np.arange(K+1) kpda = 1-np.exp(-alpha*np.exp(beta*l_0[0])*(np.exp(beta*kk)-1)) pia = 1-np.exp(-psi0 -psi1 * np.exp(psi2 *(np.arange(l_0[0],l_0[0] +K)))) kpaa = (1-kpda)*np.insert(np.cumprod(1-pia),0, 1.0) kpia0 = 1 -kpda -kpaa kpda = 1-np.exp(-alpha*np.exp(beta*l_0[1])*(np.exp(beta*kk)-1)) pia = 1-np.exp(-psi0 -psi1 * np.exp(psi2 *(np.arange(l_0[1],l_0[1] +K)))) kpaa = (1-kpda)*np.insert(np.cumprod(1-pia),0, 1.0) kpia1 = 1 -kpda -kpaa kpda = 1-np.exp(-alpha*np.exp(beta*l_0[2])*(np.exp(beta*kk)-1)) pia = 1-np.exp(-psi0 -psi1 * np.exp(psi2 *(np.arange(l_0[2],l_0[2] +K)))) kpaa = (1-kpda)*np.insert(np.cumprod(1-pia),0, 1.0) kpia2 = 1 -kpda -kpaa plt.plot(kk, kpia0, label='l_0=25') plt.plot(kk, kpia1, label='l_0=40') plt.plot(kk, kpia2, label='l_0=55') plt.legend()