We will use two elements provided in the JHU daily timeseries, namely the confirmed cases and the recovered cases.
Let's load some libraries :
import matplotlib.pyplot as plt
import plotly.express as px
import plotly.graph_objects as go
import numpy as np
from covid.plotting import plot_country_cases, get_world_map, plot_country_pace
from covid.database import WorldCovidMatcher, CountryCollection
import pandas as pd
import os
Let's first have a look at the data for the infected cases. Infected are denoted "Confirmed cases" in JHU terminology.
plot_country_cases(["Netherlands", "Brazil", "Italy", "Germany", "France",
"Spain", "US", "Australia","Portugal","Switzerland",
"Belgium","Netherlands"],
case_type="confirmed cases", cumulative=False, use_plotly=True)
Let's connect to the database and create the matchers for confirmed and recovered cases (the last possible matcher would be deaths) :
cm_c = WorldCovidMatcher('confirmed')
cm_c.build_database()
cm_r = WorldCovidMatcher('recovered')
cm_r.build_database()
cm_d = WorldCovidMatcher('deaths')
cm_d.build_database()
We pick-up a judicious country list after discussion with colleagues in I11 meeting room and extract $I$ and $R$ (note only $R$ is cumulative in the extraction)
country_list=['Italy','Austria','France','Belgium','Spain']
country_list2=["Austria", "Italy", "Germany", "France",
"Spain", "Denmark","Greece",
"Belgium"]
c = cm_c.get_cases(CountryCollection(country_list2), normalized=True, cumulative=False)
r = cm_r.get_cases(CountryCollection(country_list2), normalized=True, cumulative=False)
d = cm_d.get_cases(CountryCollection(country_list2), normalized=True, cumulative=False)
for country in country_list2:
c[country].values[0]=0
d[country].values[0]=0
r[country].values[0]=0
We will first try to obtain an estimate of $\beta-\alpha$ in the increase period. $$d_t I=\beta SI-\alpha I$$ When $S\simeq 1$, we have $I=I0e^{(\beta-\alpha)t}$, and this is more clearly visible on $\int Idt=I0+I0/(\beta-\alpha)e^{(\beta-\alpha)t}$. We look for an exponential behavior on the cumulative confirmed cases of JHU data. Here is a selection of countries with similar behavior. Time origin is choses at a prevalence of 0.1/100000.
This has to be written also in the SBIR model.
fig = go.Figure()
sum=0
for country in country_list2:
Icum=np.cumsum(c[country].values-r[country].values-d[country].values)
offset_date = c[country][Icum >= 0.1].index[0]
cases=c[country].copy()
cases.index -= offset_date
sum+=Icum[(cases.index.days>-10) & (cases.index.days<40)]
fig.add_trace(go.Scatter(x=cases.index.days,y=Icum, mode="lines",
name=country))
days_doubling=3.1
fig.add_trace(go.Scatter(x=np.arange(-10,40),y=sum, mode="lines",
name='sum'))
fig.add_trace(go.Scatter(x=[0,20],y=[20,20*np.exp2(20/days_doubling)], mode="lines",
name='doubling every %0.1f days' % days_doubling))
fig.update_layout(title='Rough estimate of 𝛽-α=%0.3f' % (np.log(2)/days_doubling),
xaxis_title='Days',
yaxis_title='∫𝐼𝑑t (per 100000)')
beta_min_alpha=np.log(2)/days_doubling
fig.update_xaxes(range=(-20,100))
fig.update_yaxes(type="log")
fig.show()
fig = go.Figure()
sum=0
for country in country_list2:
# Icum=np.cumsum(np.cumsum(c[country].values-r[country].values-d[country].values))
Icum=(np.cumsum(c[country].values))
offset_date = c[country][Icum >= 0.1].index[0]
cases=c[country].copy()
cases.index -= offset_date
sum+=Icum[-1]-Icum
fig.add_trace(go.Scatter(x=cases.index.days, y=Icum[-1]-Icum, mode="lines", legendgroup="group2",name=country))
fig.add_trace(go.Scatter(x=cases.index.days, y=sum, mode="lines", legendgroup="group2",name='Sum'))
days_doubling=14.0
fig.add_trace(go.Scatter(x=[20,100],y=[3000,3000*np.exp2(-80/days_doubling)], mode="lines", legendgroup="group2",
name='halved every %0.1f days' % days_doubling))
fig.update_layout(title='Decay estimate of 𝜆=%0.3f' % (np.log(2)/days_doubling),
xaxis_title='Days',
yaxis_title='∫𝑡→∞ P𝑑𝑡')
fig.update_yaxes(type="log")
fig.show()
We now use these two fits to obtain $\lambda$ and $\beta-\alpha$ for each country, ans use this as an input for the SBIR model. The only remaining parameter is the diffusion term, that is adjusted to match the peak value of confirmed cases.
for country in ['Italy','Spain','Germany','France','Denmark']:
fig = go.Figure()
cases=c[country].copy()
offset_date = cases[cases.values >= 0.1].index[0]
cases.index-=offset_date
recovered=r[country].copy()
recovered.index-=offset_date
deaths=d[country].copy()
deaths.index-=offset_date
p1=np.polyfit(np.arange(0,25),np.log(np.cumsum(1e-3+cases[(cases.index.days>0) & (cases.index.days<26)])),1)
beta_minus_alpha=p1[0]
cases2=np.cumsum(c[country].values)
cases2=cases2[-1]-cases2
p2=np.polyfit(np.arange(40,80),np.log((cases[(cases.index.days>40) & (cases.index.days<81)])),1)
lambd=p2[0]
p2=np.polyfit(np.arange(25,60),np.log((cases2[(cases.index.days>25) & (cases.index.days<61)])),1)
lambd=p2[0]
fig.add_trace(go.Scatter(x=cases.index.days, y=cases, mode="markers", legendgroup="group2",name="confirmed"))
fig.add_trace(go.Scatter(x=np.arange(0,40), y=0.1*np.exp(beta_minus_alpha*np.arange(0,40)), mode="lines",
name='𝛽-α=%0.2f' % beta_minus_alpha))
fig.add_trace(go.Scatter(x=np.arange(0,100), y=np.exp(p2[1])*np.abs(p2[0])*np.exp(lambd*(np.arange(0,100))), mode="lines",
name='𝜆=%0.2f' % lambd))
t=np.linspace(0,100,1000)
t0=t[np.argmin(np.abs(0.1*np.exp(beta_minus_alpha*t)-np.exp(p2[1])*np.abs(p2[0])*np.exp(lambd*t)))]
#fig.add_trace(go.Scatter(x=[t0], y=[np.exp(p2[1])*np.abs(p2[0])*np.exp(lambd*t0)]))
try:
df=pd.read_csv(os.path.join('data','%s.fort' % country),delimiter='\s+').transpose()
fig.add_trace(go.Scatter(x=df.values[0], y=df.values[5]*1e5,name='SBIR'))
except:
pass
fig.update_yaxes(type="log")
fig.update_layout(title='%s, t=0 corresponds to %s' % (country,offset_date.strftime('%Y %m %d')),
yaxis_title='Daily confirmed cases')
fig.show()
cases=c[country].copy()
offset_date = cases[cases.values >= 0.1].index[0]
offset_date
from scipy.integrate import cumtrapz
for country in ['Italy','Spain','Germany','France','Denmark']:
fig = go.Figure()
cases=c[country].copy()
offset_date = cases[cases >= 0.1].index[0]
cases.index-=offset_date
recovered=r[country].copy()
recovered.index-=offset_date
deaths=d[country].copy()
deaths.index-=offset_date
p1=np.polyfit(np.arange(0,25),np.log(np.cumsum(1e-3+cases[(cases.index.days>0) & (cases.index.days<26)])),1)
beta_minus_alpha=p1[0]
cases2=np.cumsum(c[country].values)
cases2=cases2[-1]-cases2
p2=np.polyfit(np.arange(40,80),np.log((cases[(cases.index.days>40) & (cases.index.days<81)])),1)
lambd=p2[0]
p2=np.polyfit(np.arange(25,60),np.log((cases2[(cases.index.days>25) & (cases.index.days<61)])),1)
lambd=p2[0]
fig.add_trace(go.Scatter(x=cases.index.days[cases.index.days>0], y=np.cumsum(cases[cases.index.days>0]), mode="markers", legendgroup="group2",name="confirmed"))
# fig.add_trace(go.Scatter(x=np.arange(0,40), y=0.1*np.exp(beta_minus_alpha*np.arange(0,40)), mode="lines",
# name='𝛽-α=%0.2f' % beta_minus_alpha))
# fig.add_trace(go.Scatter(x=np.arange(0,100), y=np.exp(p2[1])*np.abs(p2[0])*np.exp(lambd*(np.arange(0,100))), mode="lines",
# name='𝜆=%0.2f' % lambd))
t=np.linspace(0,100,1000)
t0=t[np.argmin(np.abs(0.1*np.exp(beta_minus_alpha*t)-np.exp(p2[1])*np.abs(p2[0])*np.exp(lambd*t)))]
#fig.add_trace(go.Scatter(x=[t0], y=[np.exp(p2[1])*np.abs(p2[0])*np.exp(lambd*t0)]))
try:
df=pd.read_csv(os.path.join('data','%s.fort' % country),delimiter='\s+').transpose()
fig.add_trace(go.Scatter(x=df.values[0], y=cumtrapz(df.values[5]*1e5,df.values[0]),name='SBIR'))
except:
pass
fig.update_yaxes(type="log")
fig.update_layout(title='%s, t=0 corresponds to %s' % (country,offset_date.strftime('%Y %m %d')),
yaxis_title='Cumulative confirmed cases')
fig.show()
for country in country_list2:
fig = go.Figure()
cases=c[country]
offset_date = cases[cases.values >= 0.1].index[0]
cases.index-=offset_date
recovered=r[country]
recovered.index-=offset_date
deaths=d[country]
deaths.index-=offset_date
p1=np.polyfit(np.arange(0,20),np.log(np.cumsum(cases[(cases.index.days>0) & (cases.index.days<21)])),1)
beta_minus_alpha=p1[0]
p2=np.polyfit(np.arange(40,80),np.log((cases[(cases.index.days>40) & (cases.index.days<81)])),1)
lambd=p2[0]
print('𝛽-α=%0.2f' % beta_minus_alpha)
print('𝜆=%0.2f' % lambd)
fig.add_trace(go.Scatter(x=cases.index.days, y=cases, mode="lines", legendgroup="group2",name="confirmed"))
fig.add_trace(go.Scatter(x=np.arange(0,40), y=0.1*np.exp(beta_minus_alpha*np.arange(0,40)), mode="lines"))
fig.add_trace(go.Scatter(x=np.arange(20,100), y=np.exp(p2[1])*np.exp(lambd*(np.arange(20,100))), mode="lines"))
t=np.linspace(0,100,1000)
t0=t[np.argmin(np.abs(0.1*np.exp(beta_minus_alpha*t)-np.exp(p2[1])*np.exp(lambd*t)))]
fig.add_trace(go.Scatter(x=[t0], y=[np.exp(p2[1])*np.exp(lambd*t0)]))
fig.update_yaxes(type="log")
fig.update_layout(title=country)
fig.show()
fig = go.Figure()
fig.add_trace(go.Scatter(x=cases.index.days, y=cases, mode="lines", legendgroup="group2",name="I"))
beta_minus_alpha=np.polyfit(np.arange(0,20),np.log(np.cumsum(cases[(cases.index.days>0) & (cases.index.days<21)])),1)[0]
lambd=np.polyfit(np.arange(0,40),np.log((cases[(cases.index.days>40) & (cases.index.days<81)])),1)[0]
print('𝛽-α=%0.2f' % beta_minus_alpha)
print('𝜆=%0.2f' % lambd)
%%javascript
//hack to fix export
require.config({
paths: {
d3: 'https://cdnjs.cloudflare.com/ajax/libs/d3/5.9.2/d3',
jquery: 'https://code.jquery.com/jquery-3.4.1.min',
plotly: 'https://cdn.plot.ly/plotly-latest.min'
},
shim: {
plotly: {
deps: ['d3', 'jquery'],
exports: 'plotly'
}
}
});