Zeel's picture
Update app.py
d6fb397
import numpy as np
import streamlit as st
import scipy.stats
import matplotlib.pyplot as plt
from matplotlib import rc
# plt.style.use('fivethirtyeight')
st.subheader("Bayesian Coin Toss")
st_col = st.columns(1)[0]
N = st.slider('N_samples', min_value=2, max_value=20, value=5, step=1)
h = st.slider('N_heads', min_value=2, max_value=N, value=4, step=1)
alpha = st.slider('Alpha', min_value=0.5, max_value=5.0, value=2.0, step=0.1)
beta = st.slider('Beta', min_value=0.5, max_value=5.0, value=2.0, step=0.1)
N_theta = 100
theta = np.linspace(0.01,0.99,N_theta)
#rc('font', size=20)
# rc('text', usetex=True)
fig, ax = plt.subplots(figsize=(10,4))
axs = ax.twinx()
def Bernoulli(theta, N, h):
return (theta ** h) * ((1-theta) ** (N-h))
Likelihood = [Bernoulli(t,N,h) for t in theta]
ax.plot(theta, Likelihood, label='Likelihood',color='b');
axs.plot(theta, scipy.stats.beta.pdf(theta, alpha,beta), label='Prior',color='k');
ax.set_xlabel('p(head)');
# ax.vlines(h/N, *ax.get_ylim(), linestyle='--',label='MLE', color='b')
# axs.text(h/N,2,'MLE', color='b')
axs.plot(theta, [scipy.stats.beta.pdf(t, h+alpha, N-h+beta) for t in theta], color='r')
ax.text(theta[N_theta//4]+0.05, Likelihood[N_theta//4], 'Likelihood', color='b')
axs.text(theta[3*N_theta//4], scipy.stats.beta.pdf(theta, alpha,beta)[3*N_theta//4],'Prior')
# axs.text(alpha/(alpha+beta)-0.1,1,'Prior mean')
axs.text(theta[N_theta//2]-0.05,scipy.stats.beta.pdf(theta[N_theta//2], h+alpha, N-h+beta),'Posterior',color='r')
# axs.text((h+alpha)/(N+alpha+beta)-0.1,3,'Post. Mean',color='r')
# ax.vlines(alpha/(alpha+beta), *ax.get_ylim(), linestyle='--',label='Prior mean',color='k')
# ax.vlines((h+alpha)/(N+alpha+beta), *ax.get_ylim(), linestyle='--',label='Post. Mean',color='r')
# ax.set_title(f"n_samples={int(N)}, n_heads={int(h)}");
ax.tick_params(axis='y', colors='b')
axs.tick_params(axis='y', colors='r')
ax.set_ylabel('Likelihood',color='b')
axs.set_ylabel('Prior/Posterior', color='r', rotation=270, labelpad=30)
ax.spines['top'].set_visible(False)
axs.spines['top'].set_visible(False)
with st_col:
st.pyplot(fig)
hide_streamlit_style = """
<style>
#MainMenu {visibility: hidden;}
footer {visibility: hidden;}
</style>
"""
st.markdown(hide_streamlit_style, unsafe_allow_html=True)
st.markdown("""
The above visualization shows the joint effect of data and prior on the posterior. There are some interesting observations here:
* When prior is $Beta(1, 1)$, it becomes Uniform prior and thus **uninformative**. In this case, posterior matches with likelihood.
* With an increase in the number of samples, the posterior gets closer to the likelihood. Thus, when the number of samples is less, prior plays an important role.
""")