Skip to content
Snippets Groups Projects

Compare revisions

Changes are shown as if the source revision was being merged into the target revision. Learn more about comparing revisions.

Source

Select target project
No results found
Select Git revision

Target

Select target project
  • VIPS/models/grid/NAERSTADMO
1 result
Select Git revision
Show changes
Commits on Source (4)
#!/usr/bin/python3
import os, shutil
import subprocess,glob
import os
import subprocess
import glob
from dotenv import load_dotenv
from datetime import datetime, timezone, timedelta
from datetime import datetime, timedelta
from jinja2 import Environment, FileSystemLoader
import logging
import pytz
import configparser
import netCDF4 as nc
import numpy as np
import numpy as np
logging.basicConfig(level=logging.INFO)
......@@ -20,59 +21,80 @@ config = configparser.ConfigParser()
config.read("NAERSTADMO.cfg")
infile_path = os.getenv("WEATHER_DATA_DIR")
infile_path = os.getenv("WEATHER_DATA_DIR")
outfile_path = os.getenv("DATA_DIR")
outtmp_path = "out/"
tmpfile_path = "tmp/"
bg_filename = (f'{tmpfile_path}background_data.nc')
tmp_filename = (f'{tmpfile_path}background_data_tmp.nc')
prepareWHS = (f'{tmpfile_path}prepare_WHS.nc')
bg_filename = f"{tmpfile_path}background_data.nc"
tmp_filename = f"{tmpfile_path}background_data_tmp.nc"
prepareWHS = f"{tmpfile_path}prepare_WHS.nc"
utc_offset = "+02:00"
local_timezone = pytz.timezone("Europe/Oslo")
filename = (f'{tmpfile_path}weather_data.nc')
filename = f"{tmpfile_path}weather_data.nc"
def create_dataset():
def create_dataset():
# Find the latest file from previous run to create a start date
last_final_date = None
list_of_files = glob.glob(f"{outtmp_path}final_2[0-9][0-9][0-9]-[01][0-9]-[0123][0-9].nc", recursive=True)
list_of_files = glob.glob(
f"{outtmp_path}final_2[0-9][0-9][0-9]-[01][0-9]-[0123][0-9].nc", recursive=True
)
if list_of_files:
latest_file = max(list_of_files,key=os.path.getctime)
latest_file = max(list_of_files, key=os.path.getctime)
file_name = os.path.basename(latest_file)
file_date = file_name[file_name.index("final_")+6:file_name.index("final_")+16]
last_final_date=file_date
file_date = file_name[
file_name.index("final_") + 6 : file_name.index("final_") + 16
]
last_final_date = file_date
if last_final_date is None or last_final_date < file_date:
start_date = datetime.strptime(os.getenv("START_DATE"),'%Y-%m-%d')
start_date = datetime.strptime(os.getenv("START_DATE"), "%Y-%m-%d")
if last_final_date is not None:
start_date = datetime.strptime(last_final_date,'%Y-%m-%d') - timedelta(days=4)
print(f"Last date of final calculations is {last_final_date}. Start date = {start_date}")
start_date = datetime.strptime(last_final_date, "%Y-%m-%d") - timedelta(days=4)
print(
f"Last date of final calculations is {last_final_date}. Start date = {start_date}"
)
# Find the set of data to merge and use as input file based on start date
list_weatherdata_files = glob.glob(f"{infile_path}/met_1_0km_nordic-2[0-9][0-9][0-9]-[01][0-9]-[0123][0-9].nc")
list_weatherdata_files = glob.glob(
f"{infile_path}/met_1_0km_nordic-2[0-9][0-9][0-9]-[01][0-9]-[0123][0-9].nc"
)
for file in list_weatherdata_files:
file_name = os.path.basename(file)
file_date = file_name[file_name.index("nordic-")+7:file_name.index("nordic-")+17]
file_date = file_name[
file_name.index("nordic-") + 7 : file_name.index("nordic-") + 17
]
end_date = None
end_date = start_date + timedelta(days=5)
if(file_date >= start_date.strftime('%Y-%m-%d') and file_date <= end_date.strftime('%Y-%m-%d')):
if(os.path.exists(f'{tmpfile_path}weather_data.nc') != True):
subprocess.run(f'cp {file} {tmpfile_path}weather_data.nc', shell=True)
else:
subprocess.run(f'mv {tmpfile_path}weather_data.nc {tmpfile_path}tmpfile.nc',shell=True)
subprocess.run(f'cdo -f nc2 mergetime {file} {tmpfile_path}tmpfile.nc {tmpfile_path}weather_data.nc', shell=True)
subprocess.run(f'rm {tmpfile_path}tmpfile.nc',shell=True)
subprocess.run(f'rm {outtmp_path}final_*',shell=True)
if file_date >= start_date.strftime(
"%Y-%m-%d"
) and file_date <= end_date.strftime("%Y-%m-%d"):
if os.path.exists(f"{tmpfile_path}weather_data.nc") != True:
subprocess.run(f"cp {file} {tmpfile_path}weather_data.nc", shell=True)
else:
subprocess.run(
f"mv {tmpfile_path}weather_data.nc {tmpfile_path}tmpfile.nc",
shell=True,
)
subprocess.run(
f"cdo -f nc2 mergetime {file} {tmpfile_path}tmpfile.nc {tmpfile_path}weather_data.nc",
shell=True,
)
subprocess.run(f"rm {tmpfile_path}tmpfile.nc", shell=True)
# Ensure that model is not run if weather data is not available
if not os.path.exists(f"{tmpfile_path}weather_data.nc"):
print(f"{tmpfile_path}weather_data.nc does not exist. Exit.")
return
subprocess.run(f"rm {outtmp_path}final_*", shell=True)
create_saturation()
create_pressure()
......@@ -84,9 +106,8 @@ def create_dataset():
def create_warning_status(start_date):
final_files = glob.glob(f"{outtmp_path}/final_*.nc")
# Classifying warning status for the WHS model
# 0 == RISK --> Grey
# 0 < RISK < 1 --> Yellow
......@@ -94,45 +115,77 @@ def create_warning_status(start_date):
# 2.5 <= RISK --> Red
for file in final_files:
subprocess.run(f'cdo -aexpr,"WARNING_STATUS = RISK < 0 ? 0 : -1; WARNING_STATUS = RISK <= 1 && WARNING_STATUS == -1 ? 2 : WARNING_STATUS; WARNING_STATUS = RISK <= 2.5 && WARNING_STATUS == -1 ? 3 : WARNING_STATUS;WARNING_STATUS = RISK >= 2.5 && WARNING_STATUS == -1 ? 4 : WARNING_STATUS" -timmax {file} {tmpfile_path}result_unmasked.nc', shell=True)
subprocess.run(
f'cdo -aexpr,"WARNING_STATUS = RISK < 0 ? 0 : -1; WARNING_STATUS = RISK <= 1 && WARNING_STATUS == -1 ? 2 : WARNING_STATUS; WARNING_STATUS = RISK <= 2.5 && WARNING_STATUS == -1 ? 3 : WARNING_STATUS;WARNING_STATUS = RISK >= 2.5 && WARNING_STATUS == -1 ? 4 : WARNING_STATUS" -timmax {file} {tmpfile_path}result_unmasked.nc',
shell=True,
)
file_name = os.path.basename(file)
file_date = file_name[file_name.index("final_")+6:file_name.index("final_")+16]
file_date = file_name[
file_name.index("final_") + 6 : file_name.index("final_") + 16
]
# Mask results using a CSV file with polygons
# Env variable MASK_FILE must be set
if os.getenv("MASK_FILE") is not None:
mask_file = os.getenv("MASK_FILE")
print(f"Applying mask file {mask_file} to result.nc")
subprocess.run(f'cdo -maskregion,{mask_file} {tmpfile_path}result_unmasked.nc {tmpfile_path}result_{file_date}.nc', shell=True)
subprocess.run(
f"cdo -maskregion,{mask_file} {tmpfile_path}result_unmasked.nc {tmpfile_path}result_{file_date}.nc",
shell=True,
)
else:
os.rename(f"{tmpfile_path}result_unmasked.nc", f"{tmpfile_path}result{file_date}.nc")
os.rename(
f"{tmpfile_path}result_unmasked.nc",
f"{tmpfile_path}result{file_date}.nc",
)
if(file_date > start_date.strftime('%Y-%m-%d')):
if file_date > start_date.strftime("%Y-%m-%d"):
with open("/dev/null", "w") as devnull:
subprocess.run(f'gdal_translate -ot Int16 -of GTiff NETCDF:"{tmpfile_path}result_{file_date}.nc":WARNING_STATUS {tmpfile_path}result_WARNING_STATUS_{file_date}.tif', shell=True)
subprocess.run(f'gdal_translate -ot Float32 -of GTiff NETCDF:"{tmpfile_path}result_{file_date}.nc":RISK {tmpfile_path}RISK_result_{file_date}.tif', shell=True)
subprocess.run(f'gdal_translate -ot Float32 -of GTiff NETCDF:"{tmpfile_path}result_{file_date}.nc":IR {tmpfile_path}IR_result_{file_date}.tif', shell=True)
# Need to reproject the files, to ensure we have the projection given in the generted mapfile. We always use EPSG:4326 for this
subprocess.run(f'gdalwarp -t_srs EPSG:4326 {tmpfile_path}result_WARNING_STATUS_{file_date}.tif {outfile_path}result_WARNING_STATUS_{file_date}.tif', shell=True, stdout=devnull)
subprocess.run(f'gdalwarp -t_srs EPSG:4326 {tmpfile_path}RISK_result_{file_date}.tif {outfile_path}RISK_result_{file_date}.tif', shell=True, stdout=devnull)
subprocess.run(f'gdalwarp -t_srs EPSG:4326 {tmpfile_path}IR_result_{file_date}.tif {outfile_path}IR_result_{file_date}.tif', shell=True, stdout=devnull)
subprocess.run(
f'gdal_translate -ot Int16 -of GTiff NETCDF:"{tmpfile_path}result_{file_date}.nc":WARNING_STATUS {tmpfile_path}result_WARNING_STATUS_{file_date}.tif',
shell=True,
)
subprocess.run(
f'gdal_translate -ot Float32 -of GTiff NETCDF:"{tmpfile_path}result_{file_date}.nc":RISK {tmpfile_path}RISK_result_{file_date}.tif',
shell=True,
)
subprocess.run(
f'gdal_translate -ot Float32 -of GTiff NETCDF:"{tmpfile_path}result_{file_date}.nc":IR {tmpfile_path}IR_result_{file_date}.tif',
shell=True,
)
#Count tif-files to add to mapfile
# Need to reproject the files, to ensure we have the projection given in the generted mapfile. We always use EPSG:4326 for this
subprocess.run(
f"gdalwarp -t_srs EPSG:4326 {tmpfile_path}result_WARNING_STATUS_{file_date}.tif {outfile_path}result_WARNING_STATUS_{file_date}.tif",
shell=True,
stdout=devnull,
)
subprocess.run(
f"gdalwarp -t_srs EPSG:4326 {tmpfile_path}RISK_result_{file_date}.tif {outfile_path}RISK_result_{file_date}.tif",
shell=True,
stdout=devnull,
)
subprocess.run(
f"gdalwarp -t_srs EPSG:4326 {tmpfile_path}IR_result_{file_date}.tif {outfile_path}IR_result_{file_date}.tif",
shell=True,
stdout=devnull,
)
# Count tif-files to add to mapfile
risk_files = glob.glob(f"{outfile_path}/RISK_*.tif")
timestep_dates = [] # Used in the mapfile template
timestep_dates = [] # Used in the mapfile template
for file in risk_files:
file_name = os.path.basename(file)
file_date = file_name[file_name.index("RISK_result_")+12:file_name.index("RISK_result_")+22]
file_date = file_name[
file_name.index("RISK_result_") + 12 : file_name.index("RISK_result_") + 22
]
timestep_dates.append(file_date)
# Generate mapfile
# Building data sets for language specific legends
languages = []
language_codes = config["i18n"]["languages"].split(",");
language_codes = config["i18n"]["languages"].split(",")
for language_code in language_codes:
language = {"language_code": language_code}
if ("i18n.%s" % language_code) in config:
......@@ -141,213 +194,312 @@ def create_warning_status(start_date):
languages.append(language)
# The paths should be set in a .env file
env = Environment(loader=FileSystemLoader('.'))
env = Environment(loader=FileSystemLoader("."))
template = env.get_template("mapfile/template.j2")
output = template.render({
"timestep_dates": timestep_dates,
"mapserver_data_dir": os.getenv("MAPSERVER_DATA_DIR"),
"mapserver_mapfile_dir": os.getenv("MAPSERVER_MAPFILE_DIR"),
"mapserver_log_file": os.getenv("MAPSERVER_LOG_FILE"),
"mapserver_image_path": os.getenv("MAPSERVER_IMAGE_PATH"),
"mapserver_extent": os.getenv("MAPSERVER_EXTENT"),
"languages": languages,
"language_codes": language_codes
})
output = template.render(
{
"timestep_dates": timestep_dates,
"mapserver_data_dir": os.getenv("MAPSERVER_DATA_DIR"),
"mapserver_mapfile_dir": os.getenv("MAPSERVER_MAPFILE_DIR"),
"mapserver_log_file": os.getenv("MAPSERVER_LOG_FILE"),
"mapserver_image_path": os.getenv("MAPSERVER_IMAGE_PATH"),
"mapserver_extent": os.getenv("MAPSERVER_EXTENT"),
"languages": languages,
"language_codes": language_codes,
}
)
mapfile_outdir = os.getenv("MAPFILE_DIR")
with open(f"{mapfile_outdir}/NAERSTADMO.map", 'w') as f:
with open(f"{mapfile_outdir}/NAERSTADMO.map", "w") as f:
f.write(output)
subprocess.run(f"rm {tmpfile_path}*", shell=True)
def create_matrix():
dataset=nc.Dataset(f'{tmpfile_path}background_data.nc', 'r')
data = dataset.variables['time'][:]
dataset = nc.Dataset(f"{tmpfile_path}background_data.nc", "r")
data = dataset.variables["time"][:]
ntimesteps = np.size(data)
for time in range(ntimesteps-5): # Need to cut the last day before the end du to WH calculations
for time in range(
ntimesteps - 5
): # Need to cut the last day before the end du to WH calculations
# For the first hour, all LastHour values are set to 0
if time==0:
# Some values requires a 'LastHourValue'. For the first hour of the dataset, these are set to 0
subprocess.run(f'cdo -seltimestep,1 -expr,"WVDLastHour=((TM >= 0)?0:0);Q0LastHour=((TM >= 0)?0:0);TSSHLastHour=((TM >= 0)?0:0);VASLastHour=((TM >= 0)?0:0);VRSLastHour=((TM >= 0)?0:0);WHSLastHour=((TM >= 0)?0:0);WDLastHour=((TM >= 0)?0:0);" -shifttime,1hour {tmpfile_path}background_data.nc {tmpfile_path}LastHour.nc', shell=True)
if time>=1:
# Create file with only one hour, add last hour values
if time == 0:
# Some values requires a 'LastHourValue'. For the first hour of the dataset, these are set to 0
subprocess.run(
f'cdo -seltimestep,1 -expr,"WVDLastHour=((TM >= 0)?0:0);Q0LastHour=((TM >= 0)?0:0);TSSHLastHour=((TM >= 0)?0:0);VASLastHour=((TM >= 0)?0:0);VRSLastHour=((TM >= 0)?0:0);WHSLastHour=((TM >= 0)?0:0);WDLastHour=((TM >= 0)?0:0);" -shifttime,1hour {tmpfile_path}background_data.nc {tmpfile_path}LastHour.nc',
shell=True,
)
if time >= 1:
# Create file with only one hour, add last hour values
create_HH1_HH2(time)
#subprocess.run(f'cdo -seltimestep,{str(time)} background_data.nc bg_tmp.nc', shell=True) #denne virker med skifter WD;WH med 1
subprocess.run(f'cdo -seltimestep,{str(time+1)} {tmpfile_path}background_data.nc {tmpfile_path}bg_tmp.nc', shell=True)
subprocess.run(f'cdo -O -merge {tmpfile_path}bg_tmp.nc {tmpfile_path}LastHour.nc {tmpfile_path}hh1_hh2.nc {tmpfile_path}this_hour.nc ', shell=True)
subprocess.run(f'rm {tmpfile_path}bg_tmp.nc', shell=True)
# subprocess.run(f'cdo -seltimestep,{str(time)} background_data.nc bg_tmp.nc', shell=True) #denne virker med skifter WD;WH med 1
subprocess.run(
f"cdo -seltimestep,{str(time+1)} {tmpfile_path}background_data.nc {tmpfile_path}bg_tmp.nc",
shell=True,
)
subprocess.run(
f"cdo -O -merge {tmpfile_path}bg_tmp.nc {tmpfile_path}LastHour.nc {tmpfile_path}hh1_hh2.nc {tmpfile_path}this_hour.nc ",
shell=True,
)
subprocess.run(f"rm {tmpfile_path}bg_tmp.nc", shell=True)
# Create TSSF, VAS, VRS
create_TSSH_VAS(time)
create_VRS(time)
subprocess.run(f'cdo -selname,TM,UM,Q0,RR,FF,BT,TSSH,VAS,SPH,WVD {tmpfile_path}this_hour.nc {tmpfile_path}background_tmp.nc ',shell=True)
subprocess.run(f'cdo -merge {tmpfile_path}background_tmp.nc {tmpfile_path}VRS.nc {tmpfile_path}background_data_{str(time)}.nc',shell=True)
# Create WHS
subprocess.run(
f"cdo -selname,TM,UM,Q0,RR,FF,BT,TSSH,VAS,SPH,WVD {tmpfile_path}this_hour.nc {tmpfile_path}background_tmp.nc ",
shell=True,
)
subprocess.run(
f"cdo -merge {tmpfile_path}background_tmp.nc {tmpfile_path}VRS.nc {tmpfile_path}background_data_{str(time)}.nc",
shell=True,
)
# Create WHS
create_WHS_WH(time)
file=nc.Dataset(f'{tmpfile_path}bg_final_{str(time)}.nc', 'r')
file = nc.Dataset(f"{tmpfile_path}bg_final_{str(time)}.nc", "r")
timestep = file.variables["time"][:]
date = datetime.fromtimestamp(timestep[0]).astimezone(local_timezone).strftime('%Y-%m-%d')
hour = datetime.fromtimestamp(timestep[0]).astimezone(local_timezone).strftime('%H')
subprocess.run(f'cdo -expr,"TSWH=(WHS==0)?0:sumWHTM;IR=(TSWH>=40?1:0);RISK=(IR==1?(TSWH/40)*VRS:0);VRS=(VRS==0?0:VRS);" {tmpfile_path}bg_final_{str(time)}.nc {tmpfile_path}final_{str(date)}_{str(hour)}.nc' , shell=True)
if (str(hour) == '00' or time == 1):
subprocess.run(f'cp {tmpfile_path}final_{str(date)}_{str(hour)}.nc {outtmp_path}final_{str(date)}.nc', shell=True)
date = (
datetime.fromtimestamp(timestep[0])
.astimezone(local_timezone)
.strftime("%Y-%m-%d")
)
hour = (
datetime.fromtimestamp(timestep[0])
.astimezone(local_timezone)
.strftime("%H")
)
subprocess.run(
f'cdo -expr,"TSWH=(WHS==0)?0:sumWHTM;IR=(TSWH>=40?1:0);RISK=(IR==1?(TSWH/40)*VRS:0);VRS=(VRS==0?0:VRS);" {tmpfile_path}bg_final_{str(time)}.nc {tmpfile_path}final_{str(date)}_{str(hour)}.nc',
shell=True,
)
if str(hour) == "00" or time == 1:
subprocess.run(
f"cp {tmpfile_path}final_{str(date)}_{str(hour)}.nc {outtmp_path}final_{str(date)}.nc",
shell=True,
)
else:
subprocess.run(f'cp {outtmp_path}final_{str(date)}.nc {tmpfile_path}tmp_{str(date)}.nc', shell=True)
subprocess.run(f'cdo -O -mergetime {tmpfile_path}tmp_{str(date)}.nc {tmpfile_path}final_{str(date)}_{str(hour)}.nc {outtmp_path}final_{str(date)}.nc', shell=True)
subprocess.run(f'rm {tmpfile_path}tmp_{str(date)}.nc {tmpfile_path}final_{str(date)}_{str(hour)}.nc', shell=True)
subprocess.run(
f"cp {outtmp_path}final_{str(date)}.nc {tmpfile_path}tmp_{str(date)}.nc",
shell=True,
)
subprocess.run(
f"cdo -O -mergetime {tmpfile_path}tmp_{str(date)}.nc {tmpfile_path}final_{str(date)}_{str(hour)}.nc {outtmp_path}final_{str(date)}.nc",
shell=True,
)
subprocess.run(
f"rm {tmpfile_path}tmp_{str(date)}.nc {tmpfile_path}final_{str(date)}_{str(hour)}.nc",
shell=True,
)
# Create last hour values
subprocess.run(f'rm {tmpfile_path}LastHour.nc', shell=True)
subprocess.run(f'cdo -selname,WHS,TSSH,VAS,VRS,WD,Q0,WVD {tmpfile_path}bg_final_{str(time)}.nc {tmpfile_path}LastHour_tmp.nc', shell=True)
subprocess.run(f'cdo -chname,WD,WDLastHour -chname,WHS,WHSLastHour -chname,TSSH,TSSHLastHour -chname,VAS,VASLastHour -chname,VRS,VRSLastHour -chname,WVD,WVDLastHour -chname,Q0,Q0LastHour -shifttime,1hour {tmpfile_path}LastHour_tmp.nc {tmpfile_path}LastHour.nc', shell=True)
subprocess.run(f'rm {tmpfile_path}bg_final_{str(time)}.nc', shell=True)
subprocess.run(f"rm {tmpfile_path}LastHour.nc", shell=True)
subprocess.run(
f"cdo -selname,WHS,TSSH,VAS,VRS,WD,Q0,WVD {tmpfile_path}bg_final_{str(time)}.nc {tmpfile_path}LastHour_tmp.nc",
shell=True,
)
subprocess.run(
f"cdo -chname,WD,WDLastHour -chname,WHS,WHSLastHour -chname,TSSH,TSSHLastHour -chname,VAS,VASLastHour -chname,VRS,VRSLastHour -chname,WVD,WVDLastHour -chname,Q0,Q0LastHour -shifttime,1hour {tmpfile_path}LastHour_tmp.nc {tmpfile_path}LastHour.nc",
shell=True,
)
subprocess.run(f"rm {tmpfile_path}bg_final_{str(time)}.nc", shell=True)
def create_WHS_WH(time):
for j in range(5):
subprocess.run(f'cdo -O -chname,WVD,WVDLastHour -selname,WVD -seltimestep,{str(time+j+1)} {tmpfile_path}prepare_WHS.nc {tmpfile_path}WVD_LastHourtmp.nc', shell=True)
subprocess.run(f'cdo -O -shifttime,1hour {tmpfile_path}WVD_LastHourtmp.nc {tmpfile_path}WVD_LastHour.nc', shell=True)
subprocess.run(f'cdo -O -merge -seltimestep,{str(time+j+1)} {tmpfile_path}prepare_WHS.nc {tmpfile_path}WVD_LastHour.nc {tmpfile_path}WHStmp.nc', shell=True)
os.remove(f'{tmpfile_path}WVD_LastHour.nc')
subprocess.run(f'cdo -O -chname,BT,BT1 -selname,BT -seltimestep,{str(time+j+2)} {tmpfile_path}prepare_WHS.nc {tmpfile_path}BT1.nc', shell=True)
subprocess.run(f'cdo -O -chname,BT,BT2 -selname,BT -seltimestep,{str(time+j+3)} {tmpfile_path}prepare_WHS.nc {tmpfile_path}BT2.nc', shell=True)
subprocess.run(f'cdo -O -merge {tmpfile_path}WHStmp.nc {tmpfile_path}BT1.nc {tmpfile_path}BT2.nc {tmpfile_path}WHS_background.nc', shell=True)
os.remove(f'{tmpfile_path}WHStmp.nc')
subprocess.run(
f"cdo -O -chname,WVD,WVDLastHour -selname,WVD -seltimestep,{str(time+j+1)} {tmpfile_path}prepare_WHS.nc {tmpfile_path}WVD_LastHourtmp.nc",
shell=True,
)
subprocess.run(
f"cdo -O -shifttime,1hour {tmpfile_path}WVD_LastHourtmp.nc {tmpfile_path}WVD_LastHour.nc",
shell=True,
)
subprocess.run(
f"cdo -O -merge -seltimestep,{str(time+j+1)} {tmpfile_path}prepare_WHS.nc {tmpfile_path}WVD_LastHour.nc {tmpfile_path}WHStmp.nc",
shell=True,
)
os.remove(f"{tmpfile_path}WVD_LastHour.nc")
subprocess.run(
f"cdo -O -chname,BT,BT1 -selname,BT -seltimestep,{str(time+j+2)} {tmpfile_path}prepare_WHS.nc {tmpfile_path}BT1.nc",
shell=True,
)
subprocess.run(
f"cdo -O -chname,BT,BT2 -selname,BT -seltimestep,{str(time+j+3)} {tmpfile_path}prepare_WHS.nc {tmpfile_path}BT2.nc",
shell=True,
)
subprocess.run(
f"cdo -O -merge {tmpfile_path}WHStmp.nc {tmpfile_path}BT1.nc {tmpfile_path}BT2.nc {tmpfile_path}WHS_background.nc",
shell=True,
)
os.remove(f"{tmpfile_path}WHStmp.nc")
# TODO WHS is not equal to 0 r 1 for all cases
subprocess.run(f'cdo -aexpr,"WHC=((WVD < 340) || (RR > 0.1)) ? 1 : 0; WHS=(((WVDLastHour + WVD) < 180) || (RR > 0.1) || (BT > 42) || ((BT+BT1+BT2)>120)) ? 1 : 0;" {tmpfile_path}WHS_background.nc {tmpfile_path}WHS_{str(time+j)}.nc', shell=True)
os.remove(f'{tmpfile_path}WHS_background.nc')
if(j==0):
subprocess.run(f'cdo -O -selname,WDLastHour {tmpfile_path}LastHour.nc {tmpfile_path}LastHourWD.nc',shell=True)
subprocess.run(f'cdo -O -merge {tmpfile_path}LastHourWD.nc {tmpfile_path}WHS_{str(time+j)}.nc {tmpfile_path}WHS_final.nc', shell=True)
subprocess.run(f'cdo -aexpr,"WD=(WHC*(WDLastHour+WHS))" {tmpfile_path}WHS_final.nc {tmpfile_path}WD_{str(time+j)}.nc', shell=True)
subprocess.run(f'cdo -aexpr,"WH=(WD > 0 ? 1 : 0)" {tmpfile_path}WD_{str(time+j)}.nc {tmpfile_path}WH_{str(time+j)}.nc', shell=True)
if(j==0):
subprocess.run(f'cdo -selname,WHS,WD,WH {tmpfile_path}WH_{str(time+j)}.nc {tmpfile_path}WHS.nc', shell=True)
subprocess.run(f'cdo -O chname,WD,WDLastHour -selname,WD -shifttime,1hour {tmpfile_path}WD_{str(time+j)}.nc {tmpfile_path}LastHourWD.nc', shell=True)
if(j==4):
first_file=nc.Dataset(f'{tmpfile_path}background_data_{str(time)}.nc', 'r')
subprocess.run(
f'cdo -aexpr,"WHC=((WVD < 340) || (RR > 0.1)) ? 1 : 0; WHS=(((WVDLastHour + WVD) < 180) || (RR > 0.1) || (BT > 42) || ((BT+BT1+BT2)>120)) ? 1 : 0;" {tmpfile_path}WHS_background.nc {tmpfile_path}WHS_{str(time+j)}.nc',
shell=True,
)
os.remove(f"{tmpfile_path}WHS_background.nc")
if j == 0:
subprocess.run(
f"cdo -O -selname,WDLastHour {tmpfile_path}LastHour.nc {tmpfile_path}LastHourWD.nc",
shell=True,
)
subprocess.run(
f"cdo -O -merge {tmpfile_path}LastHourWD.nc {tmpfile_path}WHS_{str(time+j)}.nc {tmpfile_path}WHS_final.nc",
shell=True,
)
subprocess.run(
f'cdo -aexpr,"WD=(WHC*(WDLastHour+WHS))" {tmpfile_path}WHS_final.nc {tmpfile_path}WD_{str(time+j)}.nc',
shell=True,
)
subprocess.run(
f'cdo -aexpr,"WH=(WD > 0 ? 1 : 0)" {tmpfile_path}WD_{str(time+j)}.nc {tmpfile_path}WH_{str(time+j)}.nc',
shell=True,
)
if j == 0:
subprocess.run(
f"cdo -selname,WHS,WD,WH {tmpfile_path}WH_{str(time+j)}.nc {tmpfile_path}WHS.nc",
shell=True,
)
subprocess.run(
f"cdo -O chname,WD,WDLastHour -selname,WD -shifttime,1hour {tmpfile_path}WD_{str(time+j)}.nc {tmpfile_path}LastHourWD.nc",
shell=True,
)
if j == 4:
first_file = nc.Dataset(
f"{tmpfile_path}background_data_{str(time)}.nc", "r"
)
time_var = first_file.variables["time"]
dtime = nc.num2date(time_var[:],time_var.units)
date = dtime[0].isoformat().split('T')
dtime = nc.num2date(time_var[:], time_var.units)
date = dtime[0].isoformat().split("T")
subprocess.run(
f"cdo -O mergetime {tmpfile_path}WH_*.nc {tmpfile_path}WH{str(time)}.nc",
shell=True,
)
subprocess.run(f'cdo -O mergetime {tmpfile_path}WH_*.nc {tmpfile_path}WH{str(time)}.nc', shell=True)
# Må summere til 1 verdi
subprocess.run(f'cdo -setdate,{date[0]} -settime,{date[1]} -timsum -expr,"sumWHTM = (WH == 1) ? (TM) : 0" {tmpfile_path}WH{str(time)}.nc {tmpfile_path}WHTM.nc', shell=True)
subprocess.run(f'cdo -merge {tmpfile_path}background_data_{str(time)}.nc {tmpfile_path}WHTM.nc {tmpfile_path}WHS.nc {tmpfile_path}bg_final_{str(time)}.nc', shell=True )
subprocess.run(f'rm {tmpfile_path}background_data_{str(time)}.nc', shell=True)
subprocess.run(f'rm {tmpfile_path}W*.nc', shell=True)
subprocess.run(
f'cdo -setdate,{date[0]} -settime,{date[1]} -timsum -expr,"sumWHTM = (WH == 1) ? (TM) : 0" {tmpfile_path}WH{str(time)}.nc {tmpfile_path}WHTM.nc',
shell=True,
)
subprocess.run(
f"cdo -merge {tmpfile_path}background_data_{str(time)}.nc {tmpfile_path}WHTM.nc {tmpfile_path}WHS.nc {tmpfile_path}bg_final_{str(time)}.nc",
shell=True,
)
subprocess.run(
f"rm {tmpfile_path}background_data_{str(time)}.nc", shell=True
)
subprocess.run(f"rm {tmpfile_path}W*.nc", shell=True)
def create_VRS(time):
subprocess.run(f'cdo -aexpr,"RTA=(((Q0-Q0LastHour)>7)?1:0)+((WVD-WVDLastHour)>=15?1:0);IRTA=(1-(BT/80));SFRS=((1-(Q0-270)/540)/1.5);" {tmpfile_path}this_hour.nc {tmpfile_path}this_hr.nc', shell=True)
subprocess.run(f'cdo -expr,"VRS=(((VAS*RTA*IRTA*SFRS + (0.95*VRSLastHour*SFRS))/(1+(0.1*RR)))/(1+(0.1*WHSLastHour*VRSLastHour)))" {tmpfile_path}this_hr.nc {tmpfile_path}VRS.nc', shell=True)
subprocess.run(f'rm {tmpfile_path}this_hr.nc', shell=True)
subprocess.run(
f'cdo -aexpr,"RTA=(((Q0-Q0LastHour)>7)?1:0)+((WVD-WVDLastHour)>=15?1:0);IRTA=(1-(BT/80));SFRS=((1-(Q0-270)/540)/1.5);" {tmpfile_path}this_hour.nc {tmpfile_path}this_hr.nc',
shell=True,
)
subprocess.run(
f'cdo -expr,"VRS=(((VAS*RTA*IRTA*SFRS + (0.95*VRSLastHour*SFRS))/(1+(0.1*RR)))/(1+(0.1*WHSLastHour*VRSLastHour)))" {tmpfile_path}this_hr.nc {tmpfile_path}VRS.nc',
shell=True,
)
subprocess.run(f"rm {tmpfile_path}this_hr.nc", shell=True)
def create_TSSH_VAS(time):
subprocess.run(f'cdo -O -aexpr,"TSSH=((HH1==1)?HH2*(TSSHLastHour+TM):HH2*0.75*TSSHLastHour);SPH=((TSSH>87)?1:0);VAS=((VASLastHour*0.95*(1-((WVD-220)/6000))+SPH)/(1+0.3*RR))" {tmpfile_path}this_hour.nc {tmpfile_path}this_hour_tmp.nc', shell=True)
os.rename(src='tmp/this_hour_tmp.nc', dst='tmp/this_hour.nc')
subprocess.run(
f'cdo -O -aexpr,"TSSH=((HH1==1)?HH2*(TSSHLastHour+TM):HH2*0.75*TSSHLastHour);SPH=((TSSH>87)?1:0);VAS=((VASLastHour*0.95*(1-((WVD-220)/6000))+SPH)/(1+0.3*RR))" {tmpfile_path}this_hour.nc {tmpfile_path}this_hour_tmp.nc',
shell=True,
)
os.rename(src="tmp/this_hour_tmp.nc", dst="tmp/this_hour.nc")
def create_HH1_HH2(time):
# on hour basis
subprocess.run(f'cdo -O -expr,"HH1=(WVD <= 220) ? 1 : 0; HH2=(WVD <= 520) ? 1 : 0" -seltimestep,{str(time+1)} {tmpfile_path}background_data.nc {tmpfile_path}hh1_hh2.nc', shell=True)
subprocess.run(
f'cdo -O -expr,"HH1=(WVD <= 220) ? 1 : 0; HH2=(WVD <= 520) ? 1 : 0" -seltimestep,{str(time+1)} {tmpfile_path}background_data.nc {tmpfile_path}hh1_hh2.nc',
shell=True,
)
def create_saturation():
#This is fixed for all hours and should be available in the background data
expr="aexpr,SP=(0.61078*exp(17.269*TM/(TM+237.3)))"
# This is fixed for all hours and should be available in the background data
expr = "aexpr,SP=(0.61078*exp(17.269*TM/(TM+237.3)))"
cdo_command = [
"cdo",
expr,
filename,
bg_filename,
]
"cdo",
expr,
filename,
bg_filename,
]
run_cdo(cdo_command=cdo_command)
def create_pressure():
#This is fixed for all hours and should be available in the background data
expr="aexpr,PP=UM*SP/100"
# This is fixed for all hours and should be available in the background data
expr = "aexpr,PP=UM*SP/100"
cdo_command = [
"cdo",
expr,
bg_filename,
tmp_filename,
]
"cdo",
expr,
bg_filename,
tmp_filename,
]
run_cdo(cdo_command=cdo_command)
os.rename(src=tmp_filename, dst=bg_filename)
def create_wvd():
#This is fixed for all hours and should be available in the background data
expr="aexpr,WVD=(SP-PP)*1000"
# This is fixed for all hours and should be available in the background data
expr = "aexpr,WVD=(SP-PP)*1000"
cdo_command = [
"cdo",
expr,
bg_filename,
tmp_filename,
]
"cdo",
expr,
bg_filename,
tmp_filename,
]
run_cdo(cdo_command=cdo_command)
os.rename(src=tmp_filename, dst=bg_filename)
def create_BT():
# BT is not available in the dataset and need to be calculted and added to background_data.nc
expr="aexpr,BT=((RR > 0 || (((100-UM)/100)*6.112*exp(17.67*TM/(TM+243.5))) < 2)) ? 60 : 0"
expr = "aexpr,BT=((RR > 0 || (((100-UM)/100)*6.112*exp(17.67*TM/(TM+243.5))) < 2)) ? 60 : 0"
cdo_command = [
"cdo",
expr,
bg_filename,
tmp_filename,
]
"cdo",
expr,
bg_filename,
tmp_filename,
]
run_cdo(cdo_command=cdo_command)
os.rename(src=tmp_filename, dst=bg_filename)
def prepare_WHS():
#system("cdo selname,RR,BT,WVD background_data.nc prepareWHS.nc")
my_variables = ['TM', 'RR', 'BT', 'WVD']
variable_list=','.join(my_variables)
# system("cdo selname,RR,BT,WVD background_data.nc prepareWHS.nc")
my_variables = ["TM", "RR", "BT", "WVD"]
variable_list = ",".join(my_variables)
cdo_command = [
"cdo",
"selname,"+variable_list,
bg_filename,
prepareWHS,
]
"cdo",
"selname," + variable_list,
bg_filename,
prepareWHS,
]
run_cdo(cdo_command=cdo_command)
def run_cdo(cdo_command):
try:
print(cdo_command)
......@@ -357,4 +509,5 @@ def run_cdo(cdo_command):
logging.error(f"CDO command {cdo_command[1]} failed:", e)
quit()
create_dataset()
# Use this as an example to create your own .env file
# Where your application resides
HOME_DIR=/home/foo/model/
# Path to the weather data
WEATHER_DATA_DIR=in/
# The models start date
START_DATE=2024-05-15
# Path to optional CSV file with polygons for masking result.
MASK_FILE=Norge_landomrader.csv
# Path to the output (GeoTIFF) files as seen from the running model code
DATA_DIR=out/
# Path to the generated mapfile as seen from the running model code
MAPFILE_DIR=mapfile/
# The path to the output (GeoTIFF files) as seen from Mapserver
MAPSERVER_DATA_DIR=/foo/mapserver/data/SEPTREFHUM/
# Where your generated MAPFILE and query templates should be placed
MAPSERVER_MAPFILE_DIR=/foo/mapserver/wms/SEPTREFHUM/
# Where mapserver logs for this WMS are written
MAPSERVER_LOG_FILE=/foo/mapserver/log/SEPTREFHUM.log
# Path to the temporary directory for writing temporary files and images. Must be writable by the user the web server is running as
MAPSERVER_IMAGE_PATH=/foo/mapserver/tmp/
# The value of the EXTENT parameter in Mapserver's mapfile. Units are DD (Decimal degrees)
MAPSERVER_EXTENT="-1.5831861262936526 52.4465003983706595 39.2608060398730458 71.7683216082912736"
\ No newline at end of file