# script BayesLikelihood.py '''Tossing a coin with Bayes prior .. author:: Guenter Quast ''' import numpy as np, matplotlib.pyplot as plt, scipy.special as sp # ------------------------------------------------------ # ---- define the probabilit density distributions ------ # fUniform(x, a, b): def fUniform(x, a, b): return np.ones(len(x))/(b-a) # fBinomial (x, p, n) def fBinomial(x, p, n): #Binomial distribution k=np.round(x) return sp.binom(n, k) * p**k * (1.-p)**(n-k) #def fGauss(x, mu=0., sigma=1.): def fGauss(x, mu=0., sigma=1.): # Gauss distribution return (np.exp(-(x-mu)**2/2./sigma**2)/np.sqrt(2.*np.pi)/sigma) #def fPrior1(x): def fPrior1(x): return fUniform(x, 0., 1.) #def fPrior2(x): def fPrior2(x): return fGauss(x, mu=0.5, sigma=0.1) #def fPrior3(x): def fPrior3(x): return fGauss(x, mu=0., sigma=0.1) + fGauss(x, mu=1., sigma=0.1) # ---- graphical display of priors ------ figPDFs = plt.figure('Priors', figsize=(7.5, 5.)) #ax_priors = fig.add_subplot(1,1,1) xplt = np.linspace(0., 1., 133) plt.plot(xplt, fPrior1(xplt), label="Uniform") plt.plot(xplt, fPrior2(xplt), label="Gauss ok") plt.plot(xplt, fPrior3(xplt), label="Gauss manipulated") plt.xlabel('x') plt.ylabel('fPrior(x)') plt.legend(loc="best") # ------ function to toss coin n times ------ def TossCoin(p, N=None): if N is None: return np.random.rand()

) # L=np.zeros(len(p)) for i, p_i in enumerate(p): L[i]=np.prod(f(x, p_i, *args, **kwargs) ) return L # the same for -log L #def negLogL(p, f, x, *args, **kwargs): def negLogL(p, f, x, *args, **kwargs): # calulate neg logl L for lists of # - parameters p_i and # - observations x_i # for a pdf f(x, p, ) # nlL=np.zeros(len(p)) for i, p_i in enumerate(p): nlL[i]=np.sum(-np.log(f(x, p_i, *args, **kwargs) ) ) return nlL # ------main part ------------------------------------------------ # generate the data data = TossCoin(0.25, 500) # count number of heads nHeads = np.cumsum(data) # number of tosses to be shown Nt = [1, 2, 3, 4, 5, 8, 16, 32, 50, 150, 200, 250, 300, 350, 400, 500] # create an array 4x4 of plots ... npl=4 fig, axarr = plt.subplots(npl, npl, figsize=(3.5 * npl, 3.6 * npl)) # ... and plot the Bayes likelihoods L(p; N, k) for N=Nt[i], k=nHeads[i] pmin, pmax = 0.05, 0.95 nppoints = 133 p=np.linspace(pmin, pmax, nppoints, endpoint=True) i=0 j=0 for nplt in range(0, npl*npl): ax = axarr[i,j] N = Nt[nplt] k = nHeads[N-1] L = Likelihood(p, fBinomial, k, N) ax.plot(p, L*fPrior1(p) , '-', color='steelblue', linewidth=2) ax.plot(p, L*fPrior2(p), color='magenta', linewidth=1, linestyle='dashed') ax.plot(p, L*fPrior3(p), color='red', linewidth=1, linestyle="dotted") ax.text(0.25, 0.9, 'N= %i, k= %i'%(N, k), color='darkblue', transform=ax.transAxes, size='medium') if(i==(npl-1) and j==(npl-1)): ax.set_xlabel(r'$p$', size='x-large') if(i==0 and j==0): ax.set_ylabel(r'${\cal{L}}(p\,|\, N, k)$', size='large') j+=1 if j==npl: i+=1 j=0 # repeat for -ln L fig_nlL, axarr2 = plt.subplots(npl, npl, figsize=(3.5 * npl, 3.6 * npl)) i=0 j=0 for nplt in range(0, npl*npl): axl = axarr2[i,j] N = Nt[nplt] k = nHeads[N-1] nlL = negLogL(p, fBinomial, k, N) # add negLogL of first Prior nlL_B1 = nlL - np.log(fPrior1(p)) nlL_B1 -= min(nlL_B1) axl.plot(p, nlL_B1, '-', color='steelblue', linewidth=2) # add negLogL of 2nd Prior nlL_B2 = nlL - np.log(fPrior2(p)) nlL_B2 -= min(nlL_B2) axl.plot(p, nlL_B2, color='magenta', linewidth=1, linestyle='dashed') # add negLogL of 3rd Prior nlL_B3 = nlL - np.log(fPrior3(p)) nlL_B3 -= min(nlL_B3) axl.plot(p, nlL_B3, color='red', linewidth=1, linestyle="dotted") axl.text(0.25, 0.9, 'N= %i, k= %i'%(N, k), color='darkblue', transform=axl.transAxes, size='medium') # only region around minimum is interesting axl.set_ylim(0.,5.) # show (sparse) axis labels if(i==(npl-1) and j==(npl-1)): axl.set_xlabel(r'$p$', size='x-large') if(i==0 and j==0): axl.set_ylabel(r'$\Delta\, -{\rm ln}{\cal{L}}(p\,|\, N, k)$', size='large') j+=1 if j==npl: i+=1 j=0 plt.show()