# -*- coding: utf-8 -*- """Exercises chapter 7.ipynb Automatically generated by Colaboratory. Original file is located at https://colab.research.google.com/drive/1haqoG-VOF4UNXBeFb8iVvFUSGz8nb8NP # Section 3 # Exercise 7.9 """ import numpy as np def llminuspareto(beta,z): alphabeta = 1/np.mean(np.log(1+z/beta)) return(-np.log(alphabeta/beta)+(1+1/alphabeta)) mb = 10 n = 10000 alpha = 4 beta = 1 from scipy.optimize import minimize_scalar for i in range(mb): z = beta*(np.power(np.random.uniform(size=n),(-1/alpha))-1) res = minimize_scalar(llminuspareto, args=(z)) alphahat = 1/np.mean(np.log(1+z/res.x)) print(alphahat,res.x) n = 1000 for i in range(mb): z = beta*(np.power(np.random.uniform(size=n),(-1/alpha))-1) res = minimize_scalar(llminuspareto, args=(z)) alphahat = 1/np.mean(np.log(1+z/res.x)) print(alphahat,res.x) n = 100 for i in range(mb): z = beta*(np.power(np.random.uniform(size=n),(-1/alpha))-1) res = minimize_scalar(llminuspareto, args=(z)) alphahat = 1/np.mean(np.log(1+z/res.x)) print(alphahat,res.x) """# Exercise 7.11""" def llminusweibull(alpha,z): betaalpha = np.power((np.mean(np.power(z,alpha))),(1/alpha)) return(-np.log(alpha)+alpha*np.log(betaalpha)-(alpha-1)*np.mean(np.log(z))+1) from scipy.stats import weibull_min alpha = 0.6 beta = 1 n = 500 z = weibull_min.rvs(alpha, scale=beta, size=n)/beta alpha1 = 0.1 alpha2 = 2.1 a = np.arange(alpha1, alpha2, (alpha2-alpha1)/100) lmin = np.zeros(len(a)) for i in range(len(a)): lmin[i] = llminusweibull(a[i],z) res = minimize_scalar(llminusweibull, args=(z), bounds=(0.001, 50), method='bounded') alphahat = res.x betahat = np.power(np.mean(np.power(z,alphahat)),(1/alphahat)) print(alphahat,betahat) import matplotlib.pyplot as plt plt.plot(a, lmin) plt.vlines(x=alphahat, ymin=1, ymax=3.5, colors='red') plt.vlines(x=alpha, ymin=1, ymax=3.5, colors='green') """# Exercise 7.12""" import numpy as np import pandas as pd def llminusweibull(alpha,z): betaalpha = np.power((np.mean(np.power(z,alpha))),(1/alpha)) return(-np.log(alpha)+alpha*np.log(betaalpha)-(alpha-1)*np.mean(np.log(z))+1) url = "/studier/emner/matnat/math/STK3505/h20/belgianfire.txt" z = pd.read_csv(url, header=None, sep='\s+') from scipy.optimize import minimize_scalar res = minimize_scalar(llminusweibull, args=(z[0]), bounds=(0.001, 50), method='bounded') alphahat = res.x betahat = np.power(np.mean(np.power(z,alphahat)),(1/alphahat)) print(alphahat,betahat) a = np.arange(0.1,2.1,(2.1-0.1)/100) lmin = np.zeros_like(a) for i in range(len(a)): lmin[i] = llminusweibull(a[i],z=z[0]) import matplotlib.pyplot as plt plt.plot(a, lmin) plt.vlines(x=alphahat, ymin=4.0, ymax=6.0) from scipy.stats import weibull_min n = len(z) u = (np.arange(0,n)+0.5)/n q = weibull_min.ppf(u, alphahat, scale=betahat) plt.scatter(np.sort(z[0]),q) """# Exercise 7.13""" url = "/studier/emner/matnat/math/STK3505/h20/newyorkdaily.txt" x = np.array(pd.read_csv(url, header=None, sep='\s+')) xiHat = np.mean(x, axis=0) sigmaHat = np.std(x, axis=0) kurtHat = np.mean((x - xiHat)**4, axis=0)/(sigmaHat**4) - 3 print(xiHat) print(sigmaHat) print(kurtHat) """# Exercise 7.15""" url = "/studier/emner/matnat/math/STK3505/h20/newyorkdaily.txt" xMat = np.array(pd.read_csv(url, header=None, sep='\s+')) thetaMat = np.zeros((3,4)) from scipy.stats import t def llminust(param,x): xi = param[0] xisigma = param[1] alpha = param[2] return(np.log(xisigma) - np.mean(np.log(t.pdf((x-xi)/xisigma,2*alpha)))) from scipy.optimize import minimize for i in range(4): x0 = [np.mean(xMat[:,i]), np.std(xMat[:,i]),10] res = minimize(llminust, x0=x0, method='SLSQP', args=(xMat[:,i])) thetaMat[:,i] = res.x print(thetaMat[1,:]*np.sqrt(thetaMat[2,:]/(thetaMat[2,:]-1))) print(np.std(xMat, axis=0))