Skip to content
GitLab
Explore
Sign in
Register
Primary navigation
Search or go to…
Project
G
Grid NAERSTADMO
Manage
Activity
Members
Labels
Plan
Issues
Issue boards
Milestones
Wiki
Code
Merge requests
Repository
Branches
Commits
Tags
Repository graph
Compare revisions
Snippets
Build
Pipelines
Jobs
Pipeline schedules
Artifacts
Deploy
Releases
Package registry
Container registry
Model registry
Operate
Environments
Terraform modules
Monitor
Incidents
Analyze
Value stream analytics
Contributor analytics
CI/CD analytics
Repository analytics
Model experiments
Help
Help
Support
GitLab documentation
Compare GitLab plans
Community forum
Contribute to GitLab
Provide feedback
Keyboard shortcuts
?
Snippets
Groups
Projects
Show more breadcrumbs
VIPS
Models
GRID
Grid NAERSTADMO
Commits
ecfdcf1e
Commit
ecfdcf1e
authored
3 months ago
by
Lene Wasskog
Browse files
Options
Downloads
Plain Diff
Merge branch 'main' into release
parents
fd56b496
339121e5
Branches
Branches containing commit
No related tags found
No related merge requests found
Changes
4
Hide whitespace changes
Inline
Side-by-side
Showing
4 changed files
NAERSTADMO.py
+63
-29
63 additions, 29 deletions
NAERSTADMO.py
README.md
+12
-7
12 additions, 7 deletions
README.md
env-sample
+8
-4
8 additions, 4 deletions
env-sample
run_NAERSTADMO.sh
+28
-9
28 additions, 9 deletions
run_NAERSTADMO.sh
with
111 additions
and
49 deletions
NAERSTADMO.py
+
63
−
29
View file @
ecfdcf1e
...
@@ -20,6 +20,7 @@
...
@@ -20,6 +20,7 @@
# Author: Brita Linnestad <brita.linnestad@nibio.no>
# Author: Brita Linnestad <brita.linnestad@nibio.no>
import
os
import
os
import
sys
import
subprocess
,
glob
import
subprocess
,
glob
from
dotenv
import
load_dotenv
from
dotenv
import
load_dotenv
from
datetime
import
datetime
,
timedelta
from
datetime
import
datetime
,
timedelta
...
@@ -27,38 +28,64 @@ from jinja2 import Environment, FileSystemLoader
...
@@ -27,38 +28,64 @@ from jinja2 import Environment, FileSystemLoader
import
logging
import
logging
import
pytz
import
pytz
import
shutil
import
configparser
import
configparser
import
netCDF4
as
nc
import
netCDF4
as
nc
import
numpy
as
np
import
numpy
as
np
logging
.
basicConfig
(
level
=
logging
.
INFO
)
load_dotenv
()
load_dotenv
()
DEBUG
=
(
False
if
os
.
getenv
(
"
DEBUG
"
)
is
None
or
os
.
getenv
(
"
DEBUG
"
).
lower
()
==
"
false
"
else
True
)
logging
.
basicConfig
(
level
=
logging
.
DEBUG
if
DEBUG
else
logging
.
INFO
,
format
=
"
%(asctime)s - %(levelname).4s - (%(filename)s:%(lineno)d) - %(message)s
"
,
)
# Get language stuff
# Get language stuff
config
=
configparser
.
ConfigParser
()
config
=
configparser
.
ConfigParser
()
config
.
read
(
"
NAERSTADMO.cfg
"
)
config
.
read
(
"
NAERSTADMO.cfg
"
)
local_timezone
=
pytz
.
timezone
(
os
.
getenv
(
"
LOCAL_TIMEZONE
"
))
today
=
datetime
.
now
(
local_timezone
)
if
len
(
sys
.
argv
)
>
1
:
year
=
int
(
sys
.
argv
[
1
])
else
:
year
=
today
.
year
# Don't run if before start_date
recurring_start_date
=
os
.
getenv
(
"
RECURRING_START_DATE
"
)
model_start_date
=
datetime
.
strptime
(
f
"
{
year
}
-
{
recurring_start_date
}
"
,
"
%Y-%m-%d
"
)
if
datetime
.
now
()
<=
model_start_date
:
logging
.
error
(
f
"
Today is before the configured start date of
{
model_start_date
}
. Exiting.
"
)
exit
(
0
)
infile_path
=
os
.
getenv
(
"
WEATHER_DATA_DIR
"
)
home_dir
=
os
.
getenv
(
'
HOME_DIR
'
)
outfile_path
=
os
.
getenv
(
"
DATA_DIR
"
)
infile_path
=
f
"
{
os
.
getenv
(
'
WEATHER_DATA_DIR
'
)
}{
year
}
/
"
outtmp_path
=
"
out/
"
tmpfile_path
=
"
tmp/
"
outtmp_path
=
f
"
out/
{
year
}
/
"
os
.
makedirs
(
outtmp_path
,
exist_ok
=
True
)
tmpfile_path
=
f
"
tmp/
{
year
}
/
"
os
.
makedirs
(
tmpfile_path
,
exist_ok
=
True
)
outfile_path
=
f
"
{
os
.
getenv
(
'
DATA_DIR
'
)
}{
year
}
/
"
os
.
makedirs
(
outfile_path
,
exist_ok
=
True
)
mapfile_outdir
=
f
"
{
os
.
getenv
(
'
MAPFILE_DIR
'
)
}{
year
}
/
"
os
.
makedirs
(
mapfile_outdir
,
exist_ok
=
True
)
bg_filename
=
f
"
{
tmpfile_path
}
background_data.nc
"
bg_filename
=
f
"
{
tmpfile_path
}
background_data.nc
"
tmp_filename
=
f
"
{
tmpfile_path
}
background_data_tmp.nc
"
tmp_filename
=
f
"
{
tmpfile_path
}
background_data_tmp.nc
"
prepareWHS
=
f
"
{
tmpfile_path
}
prepare_WHS.nc
"
prepareWHS
=
f
"
{
tmpfile_path
}
prepare_WHS.nc
"
utc_offset
=
"
+02:00
"
local_timezone
=
pytz
.
timezone
(
"
Europe/Oslo
"
)
filename
=
f
"
{
tmpfile_path
}
weather_data.nc
"
filename
=
f
"
{
tmpfile_path
}
weather_data.nc
"
def
create_dataset
():
def
create_dataset
():
# Find the latest file from previous run to create a start date
# Find the latest file from previous run to create a start date
last_final_date
=
None
last_final_date
=
None
list_of_files
=
glob
.
glob
(
list_of_files
=
glob
.
glob
(
f
"
{
outtmp_path
}
final_
2[0-9][0-9][0-9]
-[01][0-9]-[0123][0-9].nc
"
,
recursive
=
True
f
"
{
outtmp_path
}
final_
{
year
}
-[01][0-9]-[0123][0-9].nc
"
,
recursive
=
True
)
)
if
list_of_files
:
if
list_of_files
:
...
@@ -70,16 +97,16 @@ def create_dataset():
...
@@ -70,16 +97,16 @@ def create_dataset():
last_final_date
=
file_date
last_final_date
=
file_date
if
last_final_date
is
None
or
last_final_date
<
file_date
:
if
last_final_date
is
None
or
last_final_date
<
file_date
:
start_date
=
datetime
.
strptime
(
os
.
getenv
(
"
START_DATE
"
),
"
%Y-%m-%d
"
)
start_date
=
model_start_date
if
last_final_date
is
not
None
:
if
last_final_date
is
not
None
:
start_date
=
datetime
.
strptime
(
last_final_date
,
"
%Y-%m-%d
"
)
-
timedelta
(
days
=
4
)
start_date
=
datetime
.
strptime
(
last_final_date
,
"
%Y-%m-%d
"
)
-
timedelta
(
days
=
4
)
print
(
logging
.
info
(
f
"
Last date of final calculations is
{
last_final_date
}
. Start date =
{
start_date
}
"
f
"
Last date of final calculations is
{
last_final_date
}
. Start date =
{
start_date
}
"
)
)
# Find the set of data to merge and use as input file based on start date
# Find the set of data to merge and use as input file based on start date
list_weatherdata_files
=
glob
.
glob
(
list_weatherdata_files
=
glob
.
glob
(
f
"
{
infile_path
}
/met_1_0km_nordic-
2[0-9][0-9][0-9]
-[01][0-9]-[0123][0-9].nc
"
f
"
{
infile_path
}
/met_1_0km_nordic-
{
year
}
-[01][0-9]-[0123][0-9].nc
"
)
)
for
file
in
list_weatherdata_files
:
for
file
in
list_weatherdata_files
:
...
@@ -87,14 +114,14 @@ def create_dataset():
...
@@ -87,14 +114,14 @@ def create_dataset():
file_date
=
file_name
[
file_date
=
file_name
[
file_name
.
index
(
"
nordic-
"
)
+
7
:
file_name
.
index
(
"
nordic-
"
)
+
17
file_name
.
index
(
"
nordic-
"
)
+
7
:
file_name
.
index
(
"
nordic-
"
)
+
17
]
]
end_date
=
None
end_date
=
None
end_date
=
start_date
+
timedelta
(
days
=
5
)
end_date
=
start_date
+
timedelta
(
days
=
5
)
if
file_date
>=
start_date
.
strftime
(
if
file_date
>=
start_date
.
strftime
(
"
%Y-%m-%d
"
"
%Y-%m-%d
"
)
and
file_date
<=
end_date
.
strftime
(
"
%Y-%m-%d
"
):
)
and
file_date
<=
end_date
.
strftime
(
"
%Y-%m-%d
"
):
if
os
.
path
.
exists
(
f
"
{
tmpfile_path
}
weather_data.nc
"
)
!=
True
:
logging
.
info
(
f
"
Work on date
{
file_date
}
"
)
if
not
os
.
path
.
exists
(
f
"
{
tmpfile_path
}
weather_data.nc
"
):
subprocess
.
run
(
f
"
cp
{
file
}
{
tmpfile_path
}
weather_data.nc
"
,
shell
=
True
)
subprocess
.
run
(
f
"
cp
{
file
}
{
tmpfile_path
}
weather_data.nc
"
,
shell
=
True
)
else
:
else
:
subprocess
.
run
(
subprocess
.
run
(
...
@@ -109,7 +136,7 @@ def create_dataset():
...
@@ -109,7 +136,7 @@ def create_dataset():
# Ensure that model is not run if weather data is not available
# Ensure that model is not run if weather data is not available
if
not
os
.
path
.
exists
(
f
"
{
tmpfile_path
}
weather_data.nc
"
):
if
not
os
.
path
.
exists
(
f
"
{
tmpfile_path
}
weather_data.nc
"
):
print
(
f
"
{
tmpfile_path
}
weather_data.nc does not exist. Exit.
"
)
logging
.
error
(
f
"
{
tmpfile_path
}
weather_data.nc does not exist. Exit.
"
)
return
return
subprocess
.
run
(
f
"
rm
{
outtmp_path
}
final_*
"
,
shell
=
True
)
subprocess
.
run
(
f
"
rm
{
outtmp_path
}
final_*
"
,
shell
=
True
)
...
@@ -146,7 +173,7 @@ def create_warning_status(start_date):
...
@@ -146,7 +173,7 @@ def create_warning_status(start_date):
# Env variable MASK_FILE must be set
# Env variable MASK_FILE must be set
if
os
.
getenv
(
"
MASK_FILE
"
)
is
not
None
:
if
os
.
getenv
(
"
MASK_FILE
"
)
is
not
None
:
mask_file
=
os
.
getenv
(
"
MASK_FILE
"
)
mask_file
=
os
.
getenv
(
"
MASK_FILE
"
)
print
(
f
"
Applying mask file
{
mask_file
}
to result.nc
"
)
logging
.
info
(
f
"
Applying mask file
{
mask_file
}
to result.nc
"
)
subprocess
.
run
(
subprocess
.
run
(
f
"
cdo -maskregion,
{
mask_file
}
{
tmpfile_path
}
result_unmasked.nc
{
tmpfile_path
}
result_
{
file_date
}
.nc
"
,
f
"
cdo -maskregion,
{
mask_file
}
{
tmpfile_path
}
result_unmasked.nc
{
tmpfile_path
}
result_
{
file_date
}
.nc
"
,
shell
=
True
,
shell
=
True
,
...
@@ -226,9 +253,15 @@ def create_warning_status(start_date):
...
@@ -226,9 +253,15 @@ def create_warning_status(start_date):
"
language_codes
"
:
language_codes
,
"
language_codes
"
:
language_codes
,
}
}
)
)
mapfile_outdir
=
os
.
getenv
(
"
MAPFILE_DIR
"
)
with
open
(
f
"
{
mapfile_outdir
}
NAERSTADMO.map
"
,
"
w
"
)
as
f
:
with
open
(
f
"
{
mapfile_outdir
}
/NAERSTADMO.map
"
,
"
w
"
)
as
f
:
f
.
write
(
output
)
f
.
write
(
output
)
query_template
=
os
.
path
.
join
(
home_dir
,
"
mapfile/query_template.xml
"
)
query_template_IR
=
os
.
path
.
join
(
home_dir
,
"
mapfile/query_template_IR.xml
"
)
query_template_RISK
=
os
.
path
.
join
(
home_dir
,
"
mapfile/query_template_RISK.xml
"
)
shutil
.
copy
(
query_template
,
mapfile_outdir
)
shutil
.
copy
(
query_template_IR
,
mapfile_outdir
)
shutil
.
copy
(
query_template_RISK
,
mapfile_outdir
)
subprocess
.
run
(
f
"
rm
{
tmpfile_path
}
*
"
,
shell
=
True
)
subprocess
.
run
(
f
"
rm
{
tmpfile_path
}
*
"
,
shell
=
True
)
...
@@ -329,6 +362,8 @@ def create_matrix():
...
@@ -329,6 +362,8 @@ def create_matrix():
def
create_WHS_WH
(
time
):
def
create_WHS_WH
(
time
):
logging
.
info
(
f
"
Create WHS_WH for
{
time
}
"
)
for
j
in
range
(
5
):
for
j
in
range
(
5
):
subprocess
.
run
(
subprocess
.
run
(
f
"
cdo -O -chname,WVD,WVDLastHour -selname,WVD -seltimestep,
{
str
(
time
+
j
+
1
)
}
{
tmpfile_path
}
prepare_WHS.nc
{
tmpfile_path
}
WVD_LastHourtmp.nc
"
,
f
"
cdo -O -chname,WVD,WVDLastHour -selname,WVD -seltimestep,
{
str
(
time
+
j
+
1
)
}
{
tmpfile_path
}
prepare_WHS.nc
{
tmpfile_path
}
WVD_LastHourtmp.nc
"
,
...
@@ -426,6 +461,7 @@ def create_WHS_WH(time):
...
@@ -426,6 +461,7 @@ def create_WHS_WH(time):
def
create_VRS
(
time
):
def
create_VRS
(
time
):
logging
.
info
(
"
Create VRS
"
)
subprocess
.
run
(
subprocess
.
run
(
f
'
cdo -aexpr,
"
RTA=(((Q0-Q0LastHour)>7)?1:0)+((WVD-WVDLastHour)>=15?1:0);IRTA=(1-(BT/80));SFRS=((1-(Q0-270)/540)/1.5);
"
{
tmpfile_path
}
this_hour.nc
{
tmpfile_path
}
this_hr.nc
'
,
f
'
cdo -aexpr,
"
RTA=(((Q0-Q0LastHour)>7)?1:0)+((WVD-WVDLastHour)>=15?1:0);IRTA=(1-(BT/80));SFRS=((1-(Q0-270)/540)/1.5);
"
{
tmpfile_path
}
this_hour.nc
{
tmpfile_path
}
this_hr.nc
'
,
shell
=
True
,
shell
=
True
,
...
@@ -438,11 +474,12 @@ def create_VRS(time):
...
@@ -438,11 +474,12 @@ def create_VRS(time):
def
create_TSSH_VAS
(
time
):
def
create_TSSH_VAS
(
time
):
logging
.
info
(
"
Create TSSH_VAS
"
)
subprocess
.
run
(
subprocess
.
run
(
f
'
cdo -O -aexpr,
"
TSSH=((HH1==1)?HH2*(TSSHLastHour+TM):HH2*0.75*TSSHLastHour);SPH=((TSSH>87)?1:0);VAS=((VASLastHour*0.95*(1-((WVD-220)/6000))+SPH)/(1+0.3*RR))
"
{
tmpfile_path
}
this_hour.nc
{
tmpfile_path
}
this_hour_tmp.nc
'
,
f
'
cdo -O -aexpr,
"
TSSH=((HH1==1)?HH2*(TSSHLastHour+TM):HH2*0.75*TSSHLastHour);SPH=((TSSH>87)?1:0);VAS=((VASLastHour*0.95*(1-((WVD-220)/6000))+SPH)/(1+0.3*RR))
"
{
tmpfile_path
}
this_hour.nc
{
tmpfile_path
}
this_hour_tmp.nc
'
,
shell
=
True
,
shell
=
True
,
)
)
os
.
rename
(
src
=
"
tmp/
this_hour_tmp.nc
"
,
dst
=
"
tmp/
this_hour.nc
"
)
os
.
rename
(
src
=
f
"
{
tmpfile_path
}
this_hour_tmp.nc
"
,
dst
=
f
"
{
tmpfile_path
}
this_hour.nc
"
)
def
create_HH1_HH2
(
time
):
def
create_HH1_HH2
(
time
):
...
@@ -454,6 +491,7 @@ def create_HH1_HH2(time):
...
@@ -454,6 +491,7 @@ def create_HH1_HH2(time):
def
create_saturation
():
def
create_saturation
():
logging
.
info
(
"
Create saturation
"
)
# This is fixed for all hours and should be available in the background data
# This is fixed for all hours and should be available in the background data
expr
=
"
aexpr,SP=(0.61078*exp(17.269*TM/(TM+237.3)))
"
expr
=
"
aexpr,SP=(0.61078*exp(17.269*TM/(TM+237.3)))
"
cdo_command
=
[
cdo_command
=
[
...
@@ -466,6 +504,7 @@ def create_saturation():
...
@@ -466,6 +504,7 @@ def create_saturation():
def
create_pressure
():
def
create_pressure
():
logging
.
info
(
"
Create pressure
"
)
# This is fixed for all hours and should be available in the background data
# This is fixed for all hours and should be available in the background data
expr
=
"
aexpr,PP=UM*SP/100
"
expr
=
"
aexpr,PP=UM*SP/100
"
cdo_command
=
[
cdo_command
=
[
...
@@ -479,6 +518,7 @@ def create_pressure():
...
@@ -479,6 +518,7 @@ def create_pressure():
def
create_wvd
():
def
create_wvd
():
logging
.
info
(
"
Create wvd
"
)
# This is fixed for all hours and should be available in the background data
# This is fixed for all hours and should be available in the background data
expr
=
"
aexpr,WVD=(SP-PP)*1000
"
expr
=
"
aexpr,WVD=(SP-PP)*1000
"
cdo_command
=
[
cdo_command
=
[
...
@@ -493,7 +533,7 @@ def create_wvd():
...
@@ -493,7 +533,7 @@ def create_wvd():
def
create_BT
():
def
create_BT
():
# BT is not available in the dataset and need to be calculted and added to background_data.nc
# BT is not available in the dataset and need to be calculted and added to background_data.nc
logging
.
info
(
"
Create BT
"
)
expr
=
"
aexpr,BT=((RR > 0 || (((100-UM)/100)*6.112*exp(17.67*TM/(TM+243.5))) < 2)) ? 60 : 0
"
expr
=
"
aexpr,BT=((RR > 0 || (((100-UM)/100)*6.112*exp(17.67*TM/(TM+243.5))) < 2)) ? 60 : 0
"
cdo_command
=
[
cdo_command
=
[
"
cdo
"
,
"
cdo
"
,
...
@@ -507,6 +547,7 @@ def create_BT():
...
@@ -507,6 +547,7 @@ def create_BT():
def
prepare_WHS
():
def
prepare_WHS
():
# system("cdo selname,RR,BT,WVD background_data.nc prepareWHS.nc")
# system("cdo selname,RR,BT,WVD background_data.nc prepareWHS.nc")
logging
.
info
(
"
Prepare WHS
"
)
my_variables
=
[
"
TM
"
,
"
RR
"
,
"
BT
"
,
"
WVD
"
]
my_variables
=
[
"
TM
"
,
"
RR
"
,
"
BT
"
,
"
WVD
"
]
variable_list
=
"
,
"
.
join
(
my_variables
)
variable_list
=
"
,
"
.
join
(
my_variables
)
cdo_command
=
[
cdo_command
=
[
...
@@ -520,18 +561,11 @@ def prepare_WHS():
...
@@ -520,18 +561,11 @@ def prepare_WHS():
def
run_cdo
(
cdo_command
):
def
run_cdo
(
cdo_command
):
try
:
try
:
print
(
cdo_command
)
subprocess
.
run
(
cdo_command
,
check
=
True
)
subprocess
.
run
(
cdo_command
,
check
=
True
)
logging
.
info
(
f
"
CDO command
{
cdo_command
[
1
]
}
executed successfully.
"
)
logging
.
info
(
f
"
CDO command
{
cdo_command
[
1
]
}
executed successfully.
"
)
except
subprocess
.
CalledProcessError
as
e
:
except
subprocess
.
CalledProcessError
as
e
:
logging
.
error
(
f
"
CDO command
{
cdo_command
[
1
]
}
failed:
"
,
e
)
logging
.
error
(
f
"
CDO command
{
cdo_command
[
1
]
}
failed:
"
,
e
)
quit
()
quit
()
# Don't run if before start_date
start_date
=
datetime
.
strptime
(
os
.
getenv
(
"
START_DATE
"
),
"
%Y-%m-%d
"
)
if
datetime
.
now
()
<=
start_date
:
print
(
f
"
Today is before the configured start date of
{
start_date
}
. Exiting.
"
)
exit
(
0
)
# Run model
# Run model
create_dataset
()
create_dataset
()
This diff is collapsed.
Click to expand it.
README.md
+
12
−
7
View file @
ecfdcf1e
...
@@ -24,12 +24,14 @@ The model assumes weather data files named `met_1_0km_nordic-[YYYY-MM-DD].nc` wi
...
@@ -24,12 +24,14 @@ The model assumes weather data files named `met_1_0km_nordic-[YYYY-MM-DD].nc` wi
It is required that you have set the following environment variables:
It is required that you have set the following environment variables:
```
bash
```
bash
# Path to this code
(HOME_DIR + NAERSTADMO)
# Path to this code
HOME_DIR
=
/foobar/gridmodels/
HOME_DIR
=
/foobar/gridmodels/
NAERSTADMO/
# Path to the weather data files. Expecting hourly values in files named met_1_0km_nordic-[YYYY-MM-DD].nc
# Path to the weather data files. Expecting hourly values in files named met_1_0km_nordic-[YYYY-MM-DD].nc
WEATHER_DATA_DIR
=
/foobar/met_1_0km_nordic/2024/
WEATHER_DATA_DIR
=
/foobar/met_1_0km_nordic/2024/
# Start date for the model
# Start date for the model (MM-DD)
START_DATE
=
2024-05-15
RECURRING_START_DATE
=
05-15
# Local time zone
LOCAL_TIMEZONE
=
Europe/Oslo
# Use this file to crop the output of the grid
# Use this file to crop the output of the grid
MASK_FILE
=
Norge_landomrader.csv
MASK_FILE
=
Norge_landomrader.csv
# Where the GeoTIFF files will be output
# Where the GeoTIFF files will be output
...
@@ -46,21 +48,24 @@ MAPSERVER_LOG_FILE=/foobar2/mapserver/log/NAERSTADMO.log
...
@@ -46,21 +48,24 @@ MAPSERVER_LOG_FILE=/foobar2/mapserver/log/NAERSTADMO.log
MAPSERVER_IMAGE_PATH
=
/foobar2/mapserver/tmp/
MAPSERVER_IMAGE_PATH
=
/foobar2/mapserver/tmp/
# Extent of map (written to mapfile)
# Extent of map (written to mapfile)
MAPSERVER_EXTENT
=
"-1.5831861262936526 52.4465003983706595 39.2608060398730458 71.7683216082912736"
MAPSERVER_EXTENT
=
"-1.5831861262936526 52.4465003983706595 39.2608060398730458 71.7683216082912736"
# Whether or not to debug log. Default value is False
DEBUG
=
False
```
```
...this is the contents of the
`env-sample`
file
...this is the contents of the
`env-sample`
file
```
bash
```
bash
$
./run_
PSILARTEMP
.sh
$
./run_
NAERSTADMO
.sh
```
```
This creates a Python virtualenv, installs all the Python dependencies, runs the model and stores output in a log file.
This creates a Python virtualenv, installs all the Python dependencies, runs the model and stores output in a log file.
Alternatively, primarily for development purposes, you can run the Python script PSILARTEMP directly
:
The script can be run for a specific year like this
:
```
bash
```
bash
$
./
PSILARTEMP.py
$
./
run_NAERSTADMO.sh 2024
```
```
#### Viewing the result of the model
#### Viewing the result of the model
**TODO**
Add more details
**TODO**
Add more details
...
...
This diff is collapsed.
Click to expand it.
env-sample
+
8
−
4
View file @
ecfdcf1e
# Use this example to create your own .env file
# Use this example to create your own .env file
# Path to this code
(HOME_DIR + NAERSTADMO)
# Path to this code
HOME_DIR=/foobar/gridmodels/
HOME_DIR=/foobar/gridmodels/
NAERSTADMO/
# Path to the weather data files. Expecting hourly values in files named met_1_0km_nordic-[YYYY-MM-DD].nc
# Path to the weather data files. Expecting hourly values in files named met_1_0km_nordic-[YYYY-MM-DD].nc
WEATHER_DATA_DIR=/foobar/met_1_0km_nordic/2024/
WEATHER_DATA_DIR=/foobar/met_1_0km_nordic/2024/
# Start date for the model
# Start date for the model (MM-DD)
START_DATE=2024-05-15
RECURRING_START_DATE=05-15
# Local time zone
LOCAL_TIMEZONE=Europe/Oslo
# Use this file to crop the output of the grid
# Use this file to crop the output of the grid
MASK_FILE=Norge_landomrader.csv
MASK_FILE=Norge_landomrader.csv
# Where the GeoTIFF files will be output
# Where the GeoTIFF files will be output
...
@@ -21,3 +23,5 @@ MAPSERVER_LOG_FILE=/foobar2/mapserver/log/NAERSTADMO.log
...
@@ -21,3 +23,5 @@ MAPSERVER_LOG_FILE=/foobar2/mapserver/log/NAERSTADMO.log
MAPSERVER_IMAGE_PATH=/foobar2/mapserver/tmp/
MAPSERVER_IMAGE_PATH=/foobar2/mapserver/tmp/
# Extent of map (written to mapfile)
# Extent of map (written to mapfile)
MAPSERVER_EXTENT="-1.5831861262936526 52.4465003983706595 39.2608060398730458 71.7683216082912736"
MAPSERVER_EXTENT="-1.5831861262936526 52.4465003983706595 39.2608060398730458 71.7683216082912736"
# Whether or not to debug log. Default value is False
DEBUG=False
\ No newline at end of file
This diff is collapsed.
Click to expand it.
run_NAERSTADMO.sh
+
28
−
9
View file @
ecfdcf1e
...
@@ -18,9 +18,25 @@
...
@@ -18,9 +18,25 @@
# Configures environment and logging before running the model
# Configures environment and logging before running the model
# @author: Tor-Einar Skog <tor-einar.skog@nibio.no>
# @author: Tor-Einar Skog <tor-einar.skog@nibio.no>
validate_year
()
{
if
[[
$1
=
~ ^[0-9]
{
4
}
$
]]
;
then
return
0
else
return
1
fi
}
# Check if the year parameter is passed and validate it
if
[
-n
"
$1
"
]
;
then
if
validate_year
"
$1
"
;
then
year
=
$1
else
echo
"Invalid year:
$1
. Please provide a valid 4-digit year."
exit
1
fi
fi
#
First:
Test that we have CDO and GDAL installed
# Test that we have CDO and GDAL installed
if
!
command
-v
cdo &> /dev/null
if
!
command
-v
cdo &> /dev/null
then
then
echo
"ERROR: CDO could not be found. Exiting."
echo
"ERROR: CDO could not be found. Exiting."
...
@@ -44,11 +60,10 @@ then
...
@@ -44,11 +60,10 @@ then
fi
fi
# Paths to scripts and requirements
# Paths to scripts and requirements
APP_PATH
=
${
HOME_DIR
}
NAERSTADMO/
LOG_FILE
=
${
HOME_DIR
}
log/NAERSTADMO.log
LOG_FILE
=
${
APP_PATH
}
log/NAERSTADMO.log
REQUIREMENTS
=
${
HOME_DIR
}
requirements.txt
REQUIREMENTS
=
${
APP_PATH
}
requirements.txt
cd
$
APP_PATH
cd
$
HOME_DIR
# Create and activate the virtual environment
# Create and activate the virtual environment
python3
-m
venv .venv
python3
-m
venv .venv
...
@@ -56,10 +71,14 @@ python3 -m venv .venv
...
@@ -56,10 +71,14 @@ python3 -m venv .venv
python3
-m
pip
install
-q
--upgrade
pip
python3
-m
pip
install
-q
--upgrade
pip
pip
install
-q
-r
$REQUIREMENTS
pip
install
-q
-r
$REQUIREMENTS
# Run the model
if
[
-z
"
${
year
}
"
]
;
then
echo
"====
`
date
`
: Running model"
&>>
"
$LOG_FILE
"
echo
"====
`
date
`
: Run model for current year"
>>
"
$LOG_FILE
"
2>&1
python3
$APP_PATH
/NAERSTADMO.py &>>
"
$LOG_FILE
"
python3
${
HOME_DIR
}
NAERSTADMO.py
>>
"
$LOG_FILE
"
2>&1
echo
"====
`
date
`
: DONE running model"
&>>
"
$LOG_FILE
"
else
echo
"====
`
date
`
: Run model for
$year
"
>>
"
$LOG_FILE
"
2>&1
python3
${
HOME_DIR
}
NAERSTADMO.py
"
$year
"
>>
"
$LOG_FILE
"
2>&1
fi
echo
"====
`
date
`
: DONE running model"
>>
"
$LOG_FILE
"
2>&1
# Deactivate the virtual environment
# Deactivate the virtual environment
deactivate
deactivate
This diff is collapsed.
Click to expand it.
Preview
0%
Loading
Try again
or
attach a new file
.
Cancel
You are about to add
0
people
to the discussion. Proceed with caution.
Finish editing this message first!
Save comment
Cancel
Please
register
or
sign in
to comment