Skip to content

Commit

Permalink
upd1
Browse files Browse the repository at this point in the history
  • Loading branch information
cefect committed Mar 21, 2020
1 parent a12c249 commit 7f22f6b
Show file tree
Hide file tree
Showing 11 changed files with 582 additions and 1 deletion.
17 changes: 17 additions & 0 deletions .project
Original file line number Diff line number Diff line change
@@ -0,0 +1,17 @@
<?xml version="1.0" encoding="UTF-8"?>
<projectDescription>
<name>COVID_01</name>
<comment></comment>
<projects>
</projects>
<buildSpec>
<buildCommand>
<name>org.python.pydev.PyDevBuilder</name>
<arguments>
</arguments>
</buildCommand>
</buildSpec>
<natures>
<nature>org.python.pydev.pythonNature</nature>
</natures>
</projectDescription>
8 changes: 8 additions & 0 deletions .pydevproject
Original file line number Diff line number Diff line change
@@ -0,0 +1,8 @@
<?xml version="1.0" encoding="UTF-8" standalone="no"?>
<?eclipse-pydev version="1.0"?><pydev_project>
<pydev_pathproperty name="org.python.pydev.PROJECT_SOURCE_PATH">
<path>/${PROJECT_DIR_NAME}</path>
</pydev_pathproperty>
<pydev_property name="org.python.pydev.PYTHON_PROJECT_VERSION">python interpreter</pydev_property>
<pydev_property name="org.python.pydev.PYTHON_PROJECT_INTERPRETER">miniconda3</pydev_property>
</pydev_project>
6 changes: 5 additions & 1 deletion README.md
Original file line number Diff line number Diff line change
@@ -1 +1,5 @@
# COVID_01
# COVID_01

##INSTALLATIOn
1) git clone https://github.com/HopkinsIDD/COVIDScenarioPipeline
2) git submodule init
Binary file added data/.DS_Store
Binary file not shown.
30 changes: 30 additions & 0 deletions data/utah/UT_COVID19_Data_03202020.csv
Original file line number Diff line number Diff line change
@@ -0,0 +1,30 @@
Date,GEOID,New Cases,County
03-06-2020,49001,1,Davis
03-10-2020,49043,1,Summit
03-11-2020,49057,1,Weber
03-12-2020,49035,2,Salt Lake
03-13-2020,49035,1,Salt Lake
03-14-2020,49001,1,Davis
03-14-2020,49035,11,Salt Lake
03-14-2020,49043,1,Summit
03-15-2020,49001,1,Davis
03-15-2020,49053,1,Washington
03-17-2020,49003,1,Box Elder
03-17-2020,49001,1,Davis
03-17-2020,49035,6,Salt Lake
03-17-2020,49043,6,Summit
03-17-2020,49045,1,Tooele
03-17-2020,49051,2,Wasatch
03-17-2020,49057,3,Weber
03-18-2020,49001,2,Davis
03-18-2020,49035,2,Salt Lake
03-18-2020,49043,7,Summit
03-18-2020,49049,1,Utah
03-20-2020,49005,2,Cache
03-20-2020,49001,6,Davis
03-20-2020,49035,22,Salt Lake
03-20-2020,49043,13,Summit
03-20-2020,49045,1,Tooele
03-20-2020,49049,1,Utah
03-20-2020,49051,2,Wasatch
03-20-2020,49057,2,Weber
30 changes: 30 additions & 0 deletions data/utah/geodata.csv
Original file line number Diff line number Diff line change
@@ -0,0 +1,30 @@
id,geoid,pop2010,stateUSPS
0,49001.0,6629.0,UT
1,49003.0,49975.0,UT
2,49005.0,112656.0,UT
3,49007.0,21403.0,UT
4,49009.0,1059.0,UT
5,49011.0,306479.0,UT
6,49013.0,18607.0,UT
7,49015.0,10976.0,UT
8,49017.0,5172.0,UT
9,49019.0,9225.0,UT
10,49021.0,46163.0,UT
11,49023.0,10246.0,UT
12,49025.0,7125.0,UT
13,49027.0,12503.0,UT
14,49029.0,9469.0,UT
15,49031.0,1556.0,UT
16,49033.0,2264.0,UT
17,49035.0,1029655.0,UT
18,49037.0,14746.0,UT
19,49039.0,27822.0,UT
20,49041.0,20802.0,UT
21,49043.0,36324.0,UT
22,49045.0,58218.0,UT
23,49047.0,32588.0,UT
24,49049.0,516564.0,UT
25,49051.0,23530.0,UT
26,49053.0,138115.0,UT
27,49055.0,2778.0,UT
28,49057.0,231236.0,UT
29 changes: 29 additions & 0 deletions data/utah/mobility.txt

Large diffs are not rendered by default.

17 changes: 17 additions & 0 deletions main.bat
Original file line number Diff line number Diff line change
@@ -0,0 +1,17 @@
PATH C:\LS\06_SOFT\conda\miniconda3
PATH %PATH%;C:\LS\06_SOFT\conda\miniconda3\DLLs
PATH %PATH%;C:\LS\06_SOFT\conda\miniconda3\lib
PATH %PATH%;C:\LS\06_SOFT\conda\miniconda3\site-packages
PATH %PATH%;C:\LS\06_SOFT\conda\miniconda3\site-packages\win32
PATH %PATH%;C:\LS\06_SOFT\conda\miniconda3\site-packages\win32\lib
PATH %PATH%;C:\LS\06_SOFT\conda\miniconda3\site-packages\Pythonwin
PATH %PATH%;C:\LS\06_SOFT\conda\miniconda3\Library
PATH %PATH%;C:\LS\06_SOFT\conda\miniconda3\Library\bin
PATH %PATH%;C:\LS\06_SOFT\conda\miniconda3\Lib\R
PATH %PATH%;C:\LS\06_SOFT\conda\miniconda3\Lib\R\library
ECHO %PATH%

C:\LS\06_SOFT\conda\miniconda3\python.exe main_UT.py 2 NoNPI 5

pause

118 changes: 118 additions & 0 deletions main_319PM.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,118 @@
import numpy as np
import pandas as pd
import datetime, time, multiprocessing, itertools, sys
import matplotlib.pyplot as plt


#set the R_USER variable
import os
os.environ["R_USER"] = "myName"
os.environ["R_HOME"] = r"C:\Program Files\R\R-3.6.3" #point to your R install

#setup the rinterface
import rpy2.rinterface as rinterface
rinterface.initr()

#===============================================================================
# from rpy2.robjects.packages import importr
# utils = importr('utils')
# utils.install_packages('dplyr')
#===============================================================================



#===============================================================================
# import custom modules
#===============================================================================

from seir_fix01 import seir
from COVIDScenarioPipeline.SEIR import setup
#from COVIDScenarioPipeline.SEIR import results

class WestCoastSpatialSetup():
"""
Setup for West Coast at the county scale.
"""
def __init__(self):
self.setup_name = 'utah'
self.folder = f'data/{self.setup_name}/'

self.data = pd.read_csv(f'{self.folder}geodata.csv')
self.mobility = np.loadtxt(f'{self.folder}mobility.txt')
self.popnodes = self.data['pop2010'].to_numpy()
self.nnodes = len(self.data)

if __name__ == '__main__': # For windows thread

#===========================================================================
# test pars
#===========================================================================
pars = {
1: 2,
2: 'NoNPI',
3: 5,
}

#===========================================================================
# execute
#===========================================================================
s = setup.Setup(setup_name = 'mid_utah_'+pars[2],
spatial_setup = WestCoastSpatialSetup(),
nsim = int(pars[1]),
ti = datetime.date(2020, 3, 6),
tf = datetime.date(2020, 10, 1),
interactive = False,
write_csv = True,
dt = 1/4)

if (pars[2] == 'NoNPI'):
s.script_npi = 'COVIDScenarioPipeline/data/NPI_Scenario1_None.R'
if (pars[2] == 'SC'):
s.script_npi = 'COVIDScenarioPipeline/data/NPI_Scenario2_School_Closure.R'
if (pars[2] == 'BI1918'):
s.script_npi = 'COVIDScenarioPipeline/data/NPI_Scenario3_Bootsma_1918Influenza.R'
if (pars[2] == 'KansasCity'):
s.script_npi = 'COVIDScenarioPipeline/data/NPI_Scenario4_KansasCity.R'
if (pars[2] == 'Wuhan'):
s.script_npi = 'COVIDScenarioPipeline/data/NPI_Scenario5_Wuhan.R'

#s.script_import = 'COVIDScenarioPipeline/R/distribute_airport_importations_to_counties.R'

#s.set_filter(np.loadtxt('data/west-coast-AZ-NV/filtergithub.txt'))
#===========================================================================
# print()
# print()
# print(f">>> Starting {s.nsim} model runs on {pars[3]} processes")
# print(f">>> Setup *** {s.setup_name} *** from {s.ti} to {s.tf} !")
# print(f">>> writing to folder : {s.datadir}{s.setup_name}")
# print()
# print()
#===========================================================================
tic = time.time()

seir = seir.run_parallel(s, int(pars[3]))
print(f">>> Runs done in {time.time()-tic} seconds...")

#===============================================================================
# results = results.Results(s, seir)
#
# simR = results.save_output_for_R(seir)
#
# results.plot_quick_summary()
#
# results.build_comp_data() # Long !!
#
# nodes_to_plot = [int(s.spatset.data[s.spatset.data['geoid']== SomeGEOID].id),
# int(s.spatset.data[s.spatset.data['geoid']== SomeGEOID1].id)]
#
#
#
# fig, axes = results.plot_all_comp(nodes_to_plot)
# fig.autofmt_xdate()
#
# results.plot_comp_mult('cumI', nodes_to_plot)
# fig, axes = results.plot_comp('cumI', nodes_to_plot)
#
# if s.interactive:
# plt.show()
#===============================================================================
164 changes: 164 additions & 0 deletions seir_fix01.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,164 @@
import time, itertools, multiprocessing
from numba import jit, jitclass, int64, float64
import numpy as np
from rpy2 import robjects
from rpy2.robjects import pandas2ri
pandas2ri.activate()
import pandas as pd
import warnings, random
from rpy2.rinterface import RRuntimeWarning
warnings.filterwarnings("ignore", category=RRuntimeWarning)
import rpy2.robjects as ro
from rpy2.robjects.conversion import localconverter
r_source = robjects.r['source']
r_assign = robjects.r['assign']
r_options = robjects.r['options']
r_options(warn=-1)
from rpy2.rinterface_lib.callbacks import logger as rpy2_logger
import logging, scipy
from COVIDScenarioPipeline.SEIR import setup
rpy2_logger.setLevel(logging.ERROR)
import uuid

ncomp = 7
S, E, I1, I2, I3, R, cumI = np.arange(ncomp)


def onerun_SEIR(s, uid):
scipy.random.seed()
#p = setup.COVID19Parameters(s)
r_assign('ti_str', str(s.ti))
r_assign('tf_str', str(s.tf))
r_assign('foldername', s.spatset.folder)
r_source(s.script_npi)
npi = robjects.r['NPI'].T
#p.addNPIfromR(npi)
importation = pd.read_csv('data/utah/UT_COVID19_Data_03202020.csv')
importation.drop('County', axis = 1,inplace=True)
importation = importation.pivot(index='Date', columns='GEOID', values='New Cases')
importation = importation.fillna(value = 0)
importation.columns = pd.to_numeric(importation.columns)
for col in s.spatset.data['geoid']:
if col not in importation.columns:
importation[col] = 0
importation = importation.reindex(sorted(importation.columns), axis=1)
idx = pd.date_range(s.ti, s.tf)
importation.index = pd.to_datetime(importation.index)
importation = importation.reindex(idx, fill_value=0)
importation = importation.to_numpy()
y0 = np.zeros((ncomp, s.nnodes))
y0[S,:] = s.popnodes

states = steps_SEIR_nb(setup.parameters_quick_draw(s, npi),
y0,
uid,
s.dt,
s.t_inter,
s.nnodes,
s.popnodes,
s.mobility,
s.dynfilter,
importation)

# Tidyup data for R, to save it:
if s.write_csv:
a = states.copy()[:,:,::int(1/s.dt)]
a = np.moveaxis(a, 1, 2)
a = np.moveaxis(a, 0, 1)
b = np.diff(a,axis = 0)
difI=np.zeros((s.t_span+1, s.nnodes))
difI[1:,:] = b[:,cumI,:]
na = np.zeros((s.t_span+1, ncomp+1, s.nnodes))
na[:,:-1,:] = a
na[:,-1,:] = difI
m,n,r = na.shape
out_arr = np.column_stack((np.tile(np.arange(n),m), na.reshape(n*m,-1)))
out_df = pd.DataFrame(out_arr, columns = ['comp'] + list(s.spatset.data['geoid'].astype(int)),
index = pd.date_range(s.ti, s.tf, freq='D').repeat(ncomp+1))
out_df['comp'].replace(S, 'S', inplace=True)
out_df['comp'].replace(E, 'E', inplace=True)
out_df['comp'].replace(I1, 'I1', inplace=True)
out_df['comp'].replace(I2, 'I2', inplace=True)
out_df['comp'].replace(I3, 'I3', inplace=True)
out_df['comp'].replace(R, 'R', inplace=True)
out_df['comp'].replace(cumI, 'cumI', inplace=True)
out_df['comp'].replace(ncomp, 'diffI', inplace=True)
str(uuid.uuid4())[:2]
out_df.to_csv(f"{s.datadir}{s.timestamp}_{s.setup_name}_{str(uuid.uuid4())}.csv", index='time', index_label='time')

return 1

def run_parallel(s, processes=multiprocessing.cpu_count()): # set to 16 when running on server

tic = time.time()
uids = np.arange(s.nsim)

with multiprocessing.Pool(processes=processes) as pool:
result = pool.starmap(onerun_SEIR, zip(itertools.repeat(s),
#itertools.repeat(p),
uids))
print(f">>> {s.nsim} Simulations done in {time.time()-tic} seconds...")
return result



#@jit(float64[:,:,:](float64[:,:], float64[:], int64), nopython=True)
@jit(nopython=True)
def steps_SEIR_nb(p_vec, y0, uid, dt, t_inter, nnodes, popnodes,
mobility, dynfilter, importation):
"""
Made to run just-in-time-compiled by numba, hence very descriptive and using loop,
because loops are expanded by the compiler hence not a problem.
as there is very few authorized function. Needs the nopython option to be fast.
"""
#np.random.seed(uid)
t = 0

y = np.copy(y0)
states = np.zeros((ncomp, nnodes, len(t_inter)))

mv = np.empty(ncomp-1)
exposeCases = np.empty(nnodes)
incidentCases = np.empty(nnodes)
incident2Cases = np.empty(nnodes)
incident3Cases = np.empty(nnodes)
recoveredCases = np.empty(nnodes)

p_infect = 1 - np.exp(-dt*p_vec[1][0][0])
p_recover = 1 - np.exp(-dt*p_vec[2][0][0])

for it, t in enumerate(t_inter):
if (it%int(1/dt)==0):
y[E] = y[E] + importation[int(t)]
for ori in range(nnodes):
for dest in range(nnodes):
for c in range(ncomp-1):
mv[c] = np.random.binomial(y[c,ori], 1 - np.exp(-dt*mobility[ori, dest]/popnodes[ori]))
y[:-1,dest] += mv
y[:-1,ori] -= mv


p_expose = 1 - np.exp(-dt*p_vec[0][it]*(y[I1]+y[I2]+y[I3])/popnodes) # vector

for i in range(nnodes):
exposeCases[i] = np.random.binomial(y[S][i], p_expose[i])
incidentCases[i] = np.random.binomial(y[E][i], p_infect)
incident2Cases[i] = np.random.binomial(y[I1][i], p_recover)
incident3Cases[i] = np.random.binomial(y[I2][i], p_recover)
recoveredCases[i] = np.random.binomial(y[I3][i], p_recover)

y[S] += -exposeCases
y[E] += exposeCases - incidentCases
y[I1] += incidentCases - incident2Cases
y[I2] += incident2Cases - incident3Cases
y[I3] += incident3Cases - recoveredCases
y[R] += recoveredCases
y[cumI] += incidentCases
states[:,:,it] = y
if (it%int(1/dt)==0):
y[cumI] += importation[int(t)]
#if (it%(1/dt) == 0 and (y[cumI] <= dynfilter[int(it%(1/dt))]).any()):
# return -np.ones((ncomp, nnodes, len(t_inter)))

return states

Loading

0 comments on commit 7f22f6b

Please sign in to comment.