Skip to content
Snippets Groups Projects
Commit 0fed682b authored by Tor-Einar Skog's avatar Tor-Einar Skog
Browse files

Convert string templating to f-strings

parent 8e4e9322
No related branches found
No related tags found
No related merge requests found
......@@ -25,37 +25,39 @@ local_timezone = pytz.timezone("Europe/Oslo")
#"""
# Empty the tmpfile folder
subprocess.run("rm %s*" % tmpfile_path, shell=True)
subprocess.run(f"rm {tmpfile_path}*", shell=True)
weatherdata_files = glob.glob("%sweather-*.nc" % infile_path)
weatherdata_files = glob.glob(f"{infile_path}weather-*.nc")
# Iterate the set of hourly weather data files
for file in weatherdata_files:
file_date = file[file.index("weather")+8:file.index("weather")+18]
# Assuming we're in the summer
wh_sum_date = datetime.fromisoformat("%sT00:00:00%s" % (file_date, utc_offset))
wh_sum_date = datetime.fromisoformat(f"{file_date}T00:00:00{utc_offset}")
#print(wh_sum_date)
#print('cdo -setdate,%s -settime,22:00:00 -chname,WH,WH_SUM -timsum -selname,WH -aexpr,"WH = RR > 0.2 || UM > 88.0 ? 1 : 0;" %s %swh_%s.nc' % (file_date, file, file_path, file_date))
# Produce daily files with WH_SUM, which is the number of "Wet hours" (WH) for a given day
# WH is defined as RR > 0.2 || UM > 88.0
wh_sum_date_utc = wh_sum_date.astimezone(timezone.utc).strftime("%Y-%m-%d")
wh_sum_hour_utc = wh_sum_date.astimezone(timezone.utc).strftime("%H")
subprocess.run(
'cdo -setdate,%s -settime,%s:00:00 -chname,WH,WH_DAYSUM -timsum -selname,WH -aexpr,"WH = RR > 0.2 || UM > 88.0 ? 1 : 0;" %s %swh_%s.nc' % (wh_sum_date.astimezone(timezone.utc).strftime("%Y-%m-%d"), wh_sum_date.astimezone(timezone.utc).strftime("%H"), file, tmpfile_path, file_date),
f'cdo -setdate,{wh_sum_date_utc} -settime,{wh_sum_hour_utc}:00:00 -chname,WH,WH_DAYSUM -timsum -selname,WH -aexpr,"WH = RR > 0.2 || UM > 88.0 ? 1 : 0;" {file} {tmpfile_path}wh_{file_date}.nc',
shell=True
)
# Concatenate daily files > one file with daily values
subprocess.run('cdo -O mergetime %swh_*.nc %swh_daysum.nc' % (tmpfile_path, tmpfile_path), shell=True)
subprocess.run(f'cdo -O mergetime {tmpfile_path}wh_*.nc {tmpfile_path}wh_daysum.nc', shell=True)
# Add sum of WH_DAYSUM[yesterday] + WH_DAYSUM[today] + WH_DAYSUM[tomorrow] into WHS[today]
# timselsum skips every 3 steps when summing 3 timestemps, so we must
# create three different files and then merge them
subprocess.run('cdo timselsum,3,0 %swh_daysum.nc %swh_3daysum_tmp_0.nc' % (tmpfile_path, tmpfile_path), shell=True)
subprocess.run('cdo timselsum,3,1 %swh_daysum.nc %swh_3daysum_tmp_1.nc' % (tmpfile_path, tmpfile_path), shell=True)
subprocess.run('cdo timselsum,3,2 %swh_daysum.nc %swh_3daysum_tmp_2.nc' % (tmpfile_path, tmpfile_path), shell=True)
subprocess.run(f'cdo timselsum,3,0 {tmpfile_path}wh_daysum.nc {tmpfile_path}wh_3daysum_tmp_0.nc', shell=True)
subprocess.run(f'cdo timselsum,3,1 {tmpfile_path}wh_daysum.nc {tmpfile_path}wh_3daysum_tmp_1.nc', shell=True)
subprocess.run(f'cdo timselsum,3,2 {tmpfile_path}wh_daysum.nc {tmpfile_path}wh_3daysum_tmp_2.nc', shell=True)
subprocess.run('cdo -chname,WH_DAYSUM,WHS -mergetime %swh_3daysum_tmp_*.nc %swh_3daysum_tmp_merged.nc' % (tmpfile_path, tmpfile_path), shell=True)
subprocess.run(f'cdo -chname,WH_DAYSUM,WHS -mergetime {tmpfile_path}wh_3daysum_tmp_*.nc {tmpfile_path}wh_3daysum_tmp_merged.nc', shell=True)
# the last timesteps are most likely wrong, due to lack of "tomorrows" when performing timselsum
# To remove the last ones:
......@@ -77,7 +79,7 @@ subprocess.run('cdo -chname,WH_DAYSUM,WHS -mergetime %swh_3daysum_tmp_*.nc %swh_
# Timesteps with [1] - [0] != 172800 should be discarded
# Using netCDF4 to accomplish this
wh_3daysum = nc.Dataset('%swh_3daysum_tmp_merged.nc' % tmpfile_path, 'r')
wh_3daysum = nc.Dataset(f'{tmpfile_path}wh_3daysum_tmp_merged.nc', 'r')
time_bnds = wh_3daysum.variables["time_bnds"][:]
# Assuming that wrong time bounds only exist at the end of the time series, this works
number_of_timesteps_to_remove = 0
......@@ -86,7 +88,7 @@ for time_bnd in time_bnds:
number_of_timesteps_to_remove = number_of_timesteps_to_remove + 1
wh_3daysum.close()
number_of_timesteps_to_keep = len(time_bnds) - number_of_timesteps_to_remove
subprocess.run('cdo -seltimestep,1/%s %swh_3daysum_tmp_merged.nc %swh_3daysum.nc' % (number_of_timesteps_to_keep, tmpfile_path, tmpfile_path), shell=True)
subprocess.run(f'cdo -seltimestep,1/{number_of_timesteps_to_keep} {tmpfile_path}wh_3daysum_tmp_merged.nc {tmpfile_path}wh_3daysum.nc', shell=True)
#"""
# Classifying warning status for the WHS model
......@@ -94,11 +96,11 @@ subprocess.run('cdo -seltimestep,1/%s %swh_3daysum_tmp_merged.nc %swh_3daysum.nc
# 20 <= WHS < 40 --> Orange
# 40 <= WHS --> Red
subprocess.run('cdo -aexpr,"WARNING_STATUS = WHS < 20 ? 2 : -1; WARNING_STATUS = WHS < 40 && WARNING_STATUS == -1 ? 3 : WARNING_STATUS; WARNING_STATUS = WHS >= 40 ? 4 : WARNING_STATUS" %swh_3daysum.nc %sresult.nc' % (tmpfile_path, tmpfile_path), shell=True)
subprocess.run(f'cdo -aexpr,"WARNING_STATUS = WHS < 20 ? 2 : -1; WARNING_STATUS = WHS < 40 && WARNING_STATUS == -1 ? 3 : WARNING_STATUS; WARNING_STATUS = WHS >= 40 ? 4 : WARNING_STATUS" {tmpfile_path}wh_3daysum.nc {tmpfile_path}result.nc', shell=True)
# Split the combined file into daily .nc files again, with YYYY-MM-DD in the filename. Convert to corresponding GeoTIFF files
wh_3daysum = nc.Dataset('%swh_3daysum.nc' % tmpfile_path, 'r')
wh_3daysum = nc.Dataset(f'{tmpfile_path}wh_3daysum.nc', 'r')
timesteps = wh_3daysum.variables["time"][:]
timestep_index = 1
timestep_dates = [] # Used in the mapfile template
......@@ -107,10 +109,10 @@ for timestep in timesteps:
file_date = timestep_date.astimezone(local_timezone).strftime("%Y-%m-%d")
timestep_dates.append(file_date)
# Create NetCDF result file
subprocess.run('cdo -seltimestep,%s/%s %sresult.nc %sresult_%s.nc' % (timestep_index, timestep_index, tmpfile_path, tmpfile_path, file_date), shell=True)
subprocess.run(f'cdo -seltimestep,{timestep_index}/{timestep_index} {tmpfile_path}result.nc {tmpfile_path}result_{file_date}.nc', shell=True)
# Convert to GeoTIFF
# We only need WHS and WARNING_STATUS
# Merge the WARNING_STATUS and WHS GeoTIFF files into one file with two bands.
# Merge the WARNING_STATUS and WHS variables into one GeoTIFF file with two bands.
# The WARNING_STATUS should always be band #1
subprocess.run(f'gdal_merge.py -separate -o {outfile_path}result_{file_date}.tif NETCDF:"{tmpfile_path}result_{file_date}.nc":WARNING_STATUS NETCDF:"{tmpfile_path}result_{file_date}.nc":WHS', shell=True)
......
0% Loading or .
You are about to add 0 people to the discussion. Proceed with caution.
Please register or to comment