From 66ac9d8d9f85a902fc5b13e90c13352511a7fa46 Mon Sep 17 00:00:00 2001
From: lewa <lene.wasskog@nibio.no>
Date: Thu, 18 Apr 2024 10:25:19 +0200
Subject: [PATCH] feat: First version of ADASMELIAE, with pipeline config

---
 ADASMELIAE.cfg                                |   8 +-
 ADASMELIAE.py                                 | 434 +++++++++---------
 README.md                                     |  61 +--
 __pycache__/ADASMELIAE.cpython-311.pyc        | Bin 0 -> 12913 bytes
 ...st_ADASMELIAE.cpython-311-pytest-7.4.0.pyc | Bin 0 -> 1340 bytes
 ...st_ADASMELIAE.cpython-311-pytest-8.1.1.pyc | Bin 0 -> 4207 bytes
 env-sample                                    |   4 +-
 mapfile/query_template.xml                    |   2 +-
 ...WHS.xml => query_template_temperature.xml} |   2 +-
 mapfile/template.j2                           | 104 ++---
 requirements.txt                              |   3 +-
 test_ADASMELIAE.py                            |  43 ++
 12 files changed, 341 insertions(+), 320 deletions(-)
 create mode 100644 __pycache__/ADASMELIAE.cpython-311.pyc
 create mode 100644 __pycache__/test_ADASMELIAE.cpython-311-pytest-7.4.0.pyc
 create mode 100644 __pycache__/test_ADASMELIAE.cpython-311-pytest-8.1.1.pyc
 rename mapfile/{query_template_WHS.xml => query_template_temperature.xml} (74%)
 create mode 100644 test_ADASMELIAE.py

diff --git a/ADASMELIAE.cfg b/ADASMELIAE.cfg
index 0d79cce..7c35058 100644
--- a/ADASMELIAE.cfg
+++ b/ADASMELIAE.cfg
@@ -3,15 +3,11 @@
 languages=en,nb 
 
 [i18n.en]
-no_risk = No infection risk
 low_risk = Low infection risk
-medium_risk = Medium infection risk
 high_risk = High infection risk
-whs = Wet Hour Sum (0-72)
+temperature = Temperature
 
 [i18n.nb]
-no_risk = Ingen infeksjonsrisiko
 low_risk = Lav infeksjonsrisiko
-medium_risk = Middels infeksjonsrisiko
 high_risk = Høy infeksjonsrisiko
-whs = Sum fuktige timer (0-72)
\ No newline at end of file
+temperature = Temperatur
\ No newline at end of file
diff --git a/ADASMELIAE.py b/ADASMELIAE.py
index 01a1c50..b7796da 100755
--- a/ADASMELIAE.py
+++ b/ADASMELIAE.py
@@ -24,7 +24,7 @@
 
 
 import os
-import sys
+import time
 import subprocess
 import glob
 from dotenv import load_dotenv
@@ -35,231 +35,243 @@ import pytz
 import netCDF4 as nc
 import configparser
 
-# Paths config
-# Create a .env file from dotenv-sample
 load_dotenv()
+
+DEBUG = (
+    False
+    if os.getenv("DEBUG") is None or os.getenv("DEBUG").lower() == "false"
+    else True
+)
+
+model_id = os.getenv("MODEL_ID")
+filename_pattern = os.getenv("WEATHER_DATA_FILENAME_PATTERN")
+filename_dateformat = os.getenv("WEATHER_DATA_FILENAME_DATEFORMAT")
+
+# TODO Should start before the first European crops reach growth stage 51 and end when the northernmost regions have passed stage 59
+current_year = datetime.now().year
+local_timezone = pytz.timezone(os.getenv("LOCAL_TIMEZONE"))
+
+MODEL_START_DATE = datetime.strptime(
+    f"{current_year}-{os.getenv('START_DATE')}", "%Y-%m-%d"
+)
+MODEL_END_DATE = datetime.strptime(
+    f"{current_year}-{os.getenv('END_DATE')}", "%Y-%m-%d"
+)
+
+weather_data_dir = os.getenv("WEATHER_DATA_DIR")
+tmp_dir = os.getenv("TMP_DIR")
+data_dir = os.getenv("DATA_DIR")
+
 # Get language stuff
 config = configparser.ConfigParser()
 config.read("ADASMELIAE.cfg")
 
-DEBUG = False if os.getenv("DEBUG") is None or os.getenv("DEBUG").lower() == "false" else True
-
 logging.basicConfig(
     level=logging.DEBUG if DEBUG else logging.INFO,
     format="%(asctime)s - %(levelname).4s - (%(filename)s:%(lineno)d) - %(message)s",
 )
 
-# Path to weather data
-model_id = os.getenv("MODEL_ID")
-infile_path = os.getenv("WEATHER_DATA_DIR")
-# Used for iterating the weather data files
-filename_pattern = os.getenv("FILENAME_PATTERN")
-# Date format of weather data filenames
-filename_dateformat = os.getenv("FILENAME_DATEFORMAT")
-# Path to store generated GeoTIFF files
-outfile_path = os.getenv("DATA_DIR")
-# Where to store intermediary calculations
-tmpfile_path = "tmp/"
-# Names of weather parameters in NetCDF files
-RR = os.getenv("RR")
-UM = os.getenv("UM")
-local_timezone = pytz.timezone(os.getenv("LOCAL_TIMEZONE"))
+TM = "air_temperature_2m"
+TM_MAX = "TM_MAX"
+THRESHOLD = 15
+
 
-# Iterate the set of hourly weather data files
-# 1. When's the latest wh_[DATE].nc file? - set earliest weather data file: start_date = [DATE]-2 days
-# --> If no wh_DATE.nc file exists, set start_date = None
-start_date = None
-last_wh_date = None
-
-logging.info("Start running model")
-logging.info(f"Read files from path pattern: {tmpfile_path}wh_2[0-9][0-9][0-9]-[01][0-9]-[0123][0-9].nc")
-for wh_file in glob.glob(f"{tmpfile_path}wh_2[0-9][0-9][0-9]-[01][0-9]-[0123][0-9].nc"):
-    current_wh_file_date = local_timezone.localize(datetime.strptime(f"{wh_file[7:17]}", "%Y-%m-%d"))
-    if last_wh_date is None or last_wh_date < current_wh_file_date:
-        last_wh_date = current_wh_file_date
-if last_wh_date is not None:
-    start_date = last_wh_date - timedelta(days=2)
-    logging.info(f"Last date of WH calculations is {last_wh_date}. Start date = {start_date}")
-
-
-weatherdata_files = glob.glob(f"{infile_path}{filename_pattern}")
-logging.debug(f"{infile_path}{filename_pattern}")
-logging.debug("What are the weatherdata files?")
-logging.debug(weatherdata_files)
-for file_path in sorted(weatherdata_files):
-    # TODO: When filename/pattern is configurable: make the string search adaptable
-    file_name = os.path.basename(file_path)
-    # Skip if we don't have a complete date, which could indicate that we are looking at a yearly file of daily data
+# Common method for running commands using subprocess.run. Handles logging.
+def run_command(command, shell=True, stdout=subprocess.PIPE):
+    logging.debug(f"{command}")
     try:
-        wh_sum_date = local_timezone.localize(datetime.strptime(file_name, filename_dateformat))
+        result = subprocess.run(
+            command,
+            shell=shell,
+            stdout=stdout,
+            stderr=subprocess.PIPE,
+            text=True,
+            check=True,
+        )
+        if result.stdout:
+            result_lines = result.stdout.splitlines()
+            for line in result_lines:
+                logging.debug(line.strip())
+            return result_lines
+        if result.stderr:
+            for line in result.stderr.splitlines():
+                logging.error(line.strip())
+    except subprocess.CalledProcessError as e:
+        logging.error(f"Command failed: '{command}'")
+        logging.error(f"{e}")
+        quit()
+
+
+# Iterate the set of previously calculated result files. Find latest result date,
+# and return result date + 1 day. If no files exist, return MODEL_START_DATE.
+def find_start_date(default_date):
+    result_file_names = os.listdir(data_dir)
+    result_file_names = [
+        file for file in result_file_names if file.startswith("result_2")
+    ]
+    try:
+        dates = [
+            datetime.strptime(file.split("_")[1].split(".")[0], "%Y-%m-%d")
+            for file in result_file_names
+        ]
+        logging.debug(f"Found results for the following dates: {dates}")
     except ValueError as e:
-        logging.info(e)
-        continue
-
-    # Only process files from the three last days (if this is not a work from scratch)
-    if start_date is not None and wh_sum_date < start_date:
-        continue
-
-    # Check that the file has at least 23 timesteps
-    with nc.Dataset(file_path, 'r') as weatherdata_file:
-        file_timesteps = len(weatherdata_file.variables["time"])
-        if file_timesteps < 23:
-            logging.info(f"{file_path} has {file_timesteps} timesteps. Skipping it.")
+        logging.error(f"Error parsing dates: {e}")
+        return MODEL_START_DATE
+    latest_result_date = max(dates, default=None)
+    return (
+        latest_result_date + timedelta(days=1) if latest_result_date else default_date
+    )
+
+
+def find_end_date(default_date):
+    today = datetime.now()
+    return today if today < default_date else default_date
+
+
+if __name__ == "__main__":
+    logging.info(f"Start model {model_id}")
+    start_time = time.time()
+
+    start_date = find_start_date(MODEL_START_DATE)
+    end_date = find_end_date(MODEL_END_DATE)
+    logging.info(
+        f"Start running model for start date {start_date.date()} and end date {end_date.date()}"
+    )
+
+    weather_data_file_pattern = f"{weather_data_dir}{filename_pattern}"
+    weather_data_files = glob.glob(weather_data_file_pattern)
+    logging.debug(f"Find weather data files: {weather_data_file_pattern}")
+    logging.debug(weather_data_files)
+
+    timestep_dates = []
+    for weather_data_file_path in sorted(weather_data_files):
+        weather_data_file_name = os.path.basename(weather_data_file_path)
+
+        # Skip if we don't have a valid date
+        try:
+            file_date = datetime.strptime(weather_data_file_name, filename_dateformat)
+        except ValueError as e:
+            logging.info(f"{weather_data_file_name} - Skip file due to invalid date")
+            logging.debug(e)
             continue
 
-    # Produce daily files with WH_SUM, which is the number of "Wet hours" (WH) for a given day
-    # WH is defined as RR > 0.2 || UM > 88.0
-    wh_sum_date_str = wh_sum_date.strftime("%Y-%m-%d")
-    wh_sum_hour_str = wh_sum_date.strftime("%H") 
-    subprocess.run(
-        f'cdo -s -O -setdate,{wh_sum_date_str} -settime,{wh_sum_hour_str}:00:00 -chname,WH,WH_DAYSUM -timsum -selname,WH -aexpr,"WH = {RR} > 0.2 || {UM} > 88.0 ? 1 : 0;" {file_path} {tmpfile_path}wh_{wh_sum_date_str}.nc',
-        shell=True
-        )
+        # Set up file names based on date YYYY-MM-DD
+        date_YYYY_MM_DD = file_date.strftime("%Y-%m-%d")
+        max_temp_file_path = f"{tmp_dir}max_temp_{date_YYYY_MM_DD}.nc"
+        result_unmasked_path = f"{tmp_dir}result_unmasked_{date_YYYY_MM_DD}.nc"
+        result_path = f"{tmp_dir}result_{date_YYYY_MM_DD}.nc"
+
+        # Only process files from within valid interval
+        if file_date < start_date or file_date > end_date:
+            logging.debug(
+                f"{weather_data_file_name} - Skip file with date outside calculation period"
+            )
+            continue
 
-# Concatenate daily files > one file with daily values
-# Additive calculation - this file can be stored between calculations
-subprocess.run(f'cdo -s -O mergetime {tmpfile_path}wh_2[0-9][0-9][0-9]-[01][0-9]-[0123][0-9].nc {tmpfile_path}wh_daysum.nc', shell=True)
-
-# Check timesteps - that we have no missing data
-wh_daysum = nc.Dataset(f'{tmpfile_path}wh_daysum.nc', 'r')
-timesteps = wh_daysum.variables["time"][:]
-previous_timestep = None
-for timestep in timesteps:
-    if previous_timestep is not None:
-        if timestep - previous_timestep != 86400.0:
-            timestep_str = datetime.fromtimestamp(timestep).astimezone(local_timezone).strftime("%Y-%m-%d")
-            previous_timestep_str = datetime.fromtimestamp(previous_timestep).astimezone(local_timezone).strftime("%Y-%m-%d")
-            logging.error(f"Missing weather data between {previous_timestep_str} and {timestep_str}. Exiting.", file=sys.stderr)
-            exit(1)
-    previous_timestep = timestep
-wh_daysum.close()
-
-# From here, no additive calculation; we calculate from the beginning of the w_daysum.nc file every time. 
-
-# Add sum of WH_DAYSUM[yesterday] + WH_DAYSUM[today] + WH_DAYSUM[tomorrow] into WHS[today]
-# timselsum skips every 3 steps when summing 3 timestemps, so we must
-# create three different files and then merge them
-subprocess.run(f'cdo -s timselsum,3,0 {tmpfile_path}wh_daysum.nc {tmpfile_path}wh_3daysum_tmp_0.nc', shell=True)
-subprocess.run(f'cdo -s timselsum,3,1 {tmpfile_path}wh_daysum.nc {tmpfile_path}wh_3daysum_tmp_1.nc', shell=True)
-subprocess.run(f'cdo -s timselsum,3,2 {tmpfile_path}wh_daysum.nc {tmpfile_path}wh_3daysum_tmp_2.nc', shell=True)
-
-subprocess.run(f'cdo -s -chname,WH_DAYSUM,WHS -mergetime {tmpfile_path}wh_3daysum_tmp_*.nc {tmpfile_path}wh_3daysum_tmp_merged.nc', shell=True)
-
-# the last timesteps are most likely wrong, due to lack of "tomorrows" when performing timselsum
-# To remove the last ones:
-#
-# The variable time_bnds (which has two dimensions: time and bnds. bnds is size 2) contains the 
-# time (in seconds) between the first and last timesteps used for summing up. e.g like this:
-# $ ncdump -v time_bnds tmp/wh_3daysum.nc
-# time_bnds =
-# 1696111200, 1696284000,
-# 1696197600, 1696370400,
-# 1696284000, 1696456800,
-# 1696370400, 1696543200,
-# 1696456800, 1696629600,
-# 1696543200, 1696716000,
-# 1696629600, 1696716000,
-# 1696716000, 1696716000;
-#}
-# The difference [1] - [0] should be 172800 seconds = 48 hours
-# Timesteps with [1] - [0] != 172800 should be discarded
-# Using netCDF4 to accomplish this
-
-wh_3daysum = nc.Dataset(f'{tmpfile_path}wh_3daysum_tmp_merged.nc', 'r')
-time_bnds = wh_3daysum.variables["time_bnds"][:]
-# Assuming that wrong time bounds only exist at the end of the time series, this works
-number_of_timesteps_to_remove = 0
-for time_bnd in time_bnds:
-    if time_bnd[1]-time_bnd[0] != 172800.0:
-        number_of_timesteps_to_remove = number_of_timesteps_to_remove + 1
-wh_3daysum.close()
-number_of_timesteps_to_keep = len(time_bnds) - number_of_timesteps_to_remove
-subprocess.run(f'cdo -s -seltimestep,1/{number_of_timesteps_to_keep} {tmpfile_path}wh_3daysum_tmp_merged.nc {tmpfile_path}wh_3daysum.nc', shell=True)
-
-
-# Classifying warning status for the WHS model
-# 0  == WHS            --> Grey
-# 0  <  WHS <  20      --> Yellow
-# 20 <= WHS < 40       --> Orange
-# 40 <= WHS            --> Red
-subprocess.run(f'cdo -s -aexpr,"WARNING_STATUS = WHS <= 0 ? 0 : -1; WARNING_STATUS = WHS < 20 && WARNING_STATUS == -1 ? 2 : WARNING_STATUS; WARNING_STATUS = WHS < 40 && WARNING_STATUS == -1 ? 3 : WARNING_STATUS; WARNING_STATUS = WHS >= 40 && WARNING_STATUS == -1 ? 4 : WARNING_STATUS" {tmpfile_path}wh_3daysum.nc {tmpfile_path}result_unmasked.nc', shell=True)
-
-# Mask results using a CSV file with polygons
-# Env variable MASK_FILE must be set
-if os.getenv("MASK_FILE") is not None:
-    mask_file = os.getenv("MASK_FILE")
-    logging.info(f"Applying mask file {mask_file} to result.nc")
-    subprocess.run(f'cdo -maskregion,{mask_file} {tmpfile_path}result_unmasked.nc {tmpfile_path}result.nc', shell=True)
-else:
-    os.rename(f"{tmpfile_path}result_unmasked.nc", f"{tmpfile_path}result.nc")
-#"""
-
-# Split the combined file into daily .nc files again, with YYYY-MM-DD in the filename. Convert to corresponding GeoTIFF files
-# Variables that needs discrete classification, must be integers in order for mapserver to work properly (Don't ask!)
-# Since we need WARNING_STATUS to discretely classified, we need to create a separate GeoTIFF file for it
-wh_3daysum = nc.Dataset(f'{tmpfile_path}wh_3daysum.nc', 'r')
-timesteps = wh_3daysum.variables["time"][:]
-timestep_index = 1
-timestep_dates = [] # Used in the mapfile template
-# TODO: We are (?) assuming all timesteps are equal. This might NOT be the case if there are holes in the result data
-for timestep in timesteps:
-    timestep_date = datetime.fromtimestamp(timestep).astimezone(local_timezone)
-    file_date = timestep_date.astimezone(local_timezone).strftime("%Y-%m-%d")
-    timestep_dates.append(file_date)
-    # If start_date is set, that means that we should only generate result files from that date on 
-    # -- IF GeoTIFF fiels already exists before that.
-    if start_date is not None and timestep_date < start_date:
-        if os.path.isfile(f'{outfile_path}result_WARNING_STATUS_{file_date}.tif') and os.path.isfile(f'{outfile_path}result_{file_date}.tif'):
+        # Check that the file has at least 23 timesteps
+        with nc.Dataset(weather_data_file_path, "r") as weatherdata_file:
+            timestep_count = len(weatherdata_file.variables["time"])
+            if timestep_count < 23:
+                logging.info(
+                    f"{weather_data_file_name} - Skip file with {timestep_count} timesteps"
+                )
+                continue
+
+        # Skip if result files for date already exist
+        if os.path.isfile(
+            f"{data_dir}result_WARNING_STATUS_{date_YYYY_MM_DD}.tif"
+        ) and os.path.isfile(f"{data_dir}result_{date_YYYY_MM_DD}.tif"):
+            logging.info(f"Result files for {date_YYYY_MM_DD} already exist, skip")
             continue
 
-    # Create NetCDF result file
-    subprocess.run(f'cdo -s -seltimestep,{timestep_index}/{timestep_index} {tmpfile_path}result.nc {tmpfile_path}result_{file_date}.nc', shell=True)
-    # Convert to GeoTIFF
-    # We only need WHS and WARNING_STATUS
-    # Merge the WARNING_STATUS and WHS variables into one GeoTIFF file with two bands. 
-    # The WARNING_STATUS should always be band #1
-    # We must delete the GeoTIFF file before merging
-    subprocess.run(f'rm {outfile_path}result_*{file_date}*.tif', shell=True)
-    with open("/dev/null", "w") as devnull:
-        subprocess.run(f'gdal_translate -ot Int16 -of GTiff  NETCDF:"{tmpfile_path}result_{file_date}.nc":WARNING_STATUS {tmpfile_path}result_WARNING_STATUS_{file_date}_lcc.tif', shell=True, stdout=devnull)
-        subprocess.run(f'gdal_translate -ot Float32 -of GTiff  NETCDF:"{tmpfile_path}result_{file_date}.nc":WHS {tmpfile_path}result_{file_date}_lcc.tif', shell=True, stdout=devnull)
-        # Need to reproject the files, to ensure we have the projection given in the generted mapfile. We always use EPSG:4326 for this
-        subprocess.run(f'gdalwarp -t_srs EPSG:4326 {tmpfile_path}result_WARNING_STATUS_{file_date}_lcc.tif {outfile_path}result_WARNING_STATUS_{file_date}.tif', shell=True, stdout=devnull)
-        subprocess.run(f'gdalwarp -t_srs EPSG:4326 {tmpfile_path}result_{file_date}_lcc.tif {outfile_path}result_{file_date}.tif', shell=True, stdout=devnull)
-
-    timestep_index = timestep_index + 1
-
-# Generate mapfile
-# Building data sets for language specific legends
-languages = []
-language_codes = config["i18n"]["languages"].split(",");
-for language_code in language_codes:
-    language = {"language_code": language_code}
-    if ("i18n.%s" % language_code) in config:
-        for keyword in config["i18n.%s" % language_code]:
-            language[keyword] = config["i18n.%s" % language_code][keyword]
-        languages.append(language)
-
-# The paths should be set in a .env file
-env = Environment(loader=FileSystemLoader('.'))
-template = env.get_template("mapfile/template.j2")
-output = template.render({
-     "model_id":model_id,
-     "timestep_dates": timestep_dates,
-     "mapserver_data_dir": os.getenv("MAPSERVER_DATA_DIR"),
-     "mapserver_mapfile_dir": os.getenv("MAPSERVER_MAPFILE_DIR"),
-     "mapserver_log_file": os.getenv("MAPSERVER_LOG_FILE"),
-     "mapserver_image_path": os.getenv("MAPSERVER_IMAGE_PATH"),
-     "mapserver_extent": os.getenv("MAPSERVER_EXTENT"),
-     "languages": languages,
-     "language_codes": language_codes
-})
-mapfile_outdir = os.getenv("MAPFILE_DIR")
-with open(f"{mapfile_outdir}/{model_id}.map", 'w') as f:
-    f.write(output)
-
-
-# Remove all temporary/intermediary files
-subprocess.run(f"rm {tmpfile_path}wh_3daysum*.nc", shell=True)
-subprocess.run(f"rm {tmpfile_path}result*.nc", shell=True)
-subprocess.run(f"rm {tmpfile_path}result*.tif", shell=True)
+        # Produce daily files with MAX_TEMP, which is the highest recorded temperature within the given day
+        run_command(
+            command=f"cdo -s -O -L -settime,00:00:00 -setdate,{date_YYYY_MM_DD} -chname,{TM},{TM_MAX} -timmax -selvar,{TM} {weather_data_file_path} {max_temp_file_path}"
+        )
+
+        # Classifying warning status for the model
+        # temperature < 15     --> 2 (Yellow)
+        # temperature >= 15    --> 4 (Red)
+        run_command(
+            command=f'cdo -s -aexpr,"WARNING_STATUS = {TM_MAX} < {THRESHOLD} ? 2 : 4" {max_temp_file_path} {result_unmasked_path}',
+        )
+
+        # Mask results using a CSV file with polygons. Env variable MASK_FILE must be set, or else we use the file as it is.
+        if os.getenv("MASK_FILE") is not None:
+            mask_file = os.getenv("MASK_FILE")
+            logging.info(f"Applying mask file {mask_file} to result file")
+            run_command(
+                command=f"cdo -maskregion,{mask_file} {result_unmasked_path} {result_path}",
+            )
+        else:
+            os.rename(result_unmasked_path, result_path)
+
+        # Convert to GeoTIFF
+        # We only need WHS and WARNING_STATUS
+        # Merge the WARNING_STATUS and WHS variables into one GeoTIFF file with two bands.
+        # The WARNING_STATUS should always be band #1
+
+        # We must delete the GeoTIFF file before merging WHY?
+        # run_command(command=f"rm {data_dir}result_*{file_date_str}*.tif")
+
+        timestep_dates.append(date_YYYY_MM_DD)
+
+        with open("/dev/null", "w") as devnull:
+            run_command(
+                command=f'gdal_translate -ot Int16 -of GTiff  NETCDF:"{tmp_dir}result_{date_YYYY_MM_DD}.nc":WARNING_STATUS {tmp_dir}result_WARNING_STATUS_{date_YYYY_MM_DD}_lcc.tif',
+                stdout=devnull,
+            )
+            run_command(
+                command=f'gdal_translate -ot Float32 -of GTiff  NETCDF:"{tmp_dir}result_{date_YYYY_MM_DD}.nc":{TM_MAX} {tmp_dir}result_{date_YYYY_MM_DD}_lcc.tif',
+                stdout=devnull,
+            )
+            # Need to reproject the files, to ensure we have the projection given in the generted mapfile. We always use EPSG:4326 for this
+            run_command(
+                command=f"gdalwarp -t_srs EPSG:4326 {tmp_dir}result_WARNING_STATUS_{date_YYYY_MM_DD}_lcc.tif {data_dir}result_WARNING_STATUS_{date_YYYY_MM_DD}.tif",
+                stdout=devnull,
+            )
+            run_command(
+                command=f"gdalwarp -t_srs EPSG:4326 {tmp_dir}result_{date_YYYY_MM_DD}_lcc.tif {data_dir}result_{date_YYYY_MM_DD}.tif",
+                stdout=devnull,
+            )
+
+    # Generate mapfile
+    # Building data sets for language specific legends
+    languages = []
+    language_codes = config["i18n"]["languages"].split(",")
+    for language_code in language_codes:
+        language = {"language_code": language_code}
+        if ("i18n.%s" % language_code) in config:
+            for keyword in config["i18n.%s" % language_code]:
+                language[keyword] = config["i18n.%s" % language_code][keyword]
+            languages.append(language)
+
+    # The paths should be set in a .env file
+    env = Environment(loader=FileSystemLoader("."))
+    template = env.get_template("mapfile/template.j2")
+    output = template.render(
+        {
+            "model_id": model_id,
+            "timestep_dates": timestep_dates,
+            "mapserver_data_dir": os.getenv("MAPSERVER_DATA_DIR"),
+            "mapserver_mapfile_dir": os.getenv("MAPSERVER_MAPFILE_DIR"),
+            "mapserver_log_file": os.getenv("MAPSERVER_LOG_FILE"),
+            "mapserver_image_path": os.getenv("MAPSERVER_IMAGE_PATH"),
+            "mapserver_extent": os.getenv("MAPSERVER_EXTENT"),
+            "languages": languages,
+            "language_codes": language_codes,
+        }
+    )
+    mapfile_outdir = os.getenv("MAPFILE_DIR")
+    with open(f"{mapfile_outdir}/{model_id}.map", "w") as f:
+        f.write(output)
+
+    # Remove all temporary/intermediary files
+    # run_command(command=f"rm {tmp_dir}result*.nc")
+    # run_command(command=f"rm {tmp_dir}result*.tif")
+
+    end_time = time.time()
+    logging.info(
+        f"End model {model_id} - time spent: {'Execution time: {:.2f} seconds'.format(end_time - start_time)}"
+    )
diff --git a/README.md b/README.md
index b127608..5642dbd 100644
--- a/README.md
+++ b/README.md
@@ -6,18 +6,21 @@ This model is based on the work of Ferguson et al., (2015) to predict migration
 Input data for opprinnelig modell:
 
 DSSInput_Parameters
+
 - weatherData weatherData (required)
 - int growthStage (required)
 - optionalData optionalData
 
 weatherData
-- string timeStart 
-- string timeEnd 
-- int Interval 
-- List<int> weatherParameters 
+
+- string timeStart
+- string timeEnd
+- int Interval
+- List<int> weatherParameters
 - List<locationWeatherData> LocationWeatherData z
 
 locationWeatherData
+
 - double longitude
 - double latitude
 - double altitude
@@ -28,36 +31,39 @@ locationWeatherData
 - int length
 
 optionalData
+
 - double temp_threshold
-- DateTime? startDate 
+- DateTime? startDate
 - DateTime? endDate
 
 Se MELIAEController.cs for oppstart av applikasjonen
 Se MELIAE_DSS.cs for kjøring av modellen
 
-
-
-
-
 ## Technical description
+
 The model has been implemented by [Tor-Einar Skog](https://nibio.no/en/employees/tor-einar-skog), [NIBIO](https://nibio.no/en). It is designed to fit into the gridded pest prediction models of [VIPS](https://nibio.no/en/services/vips).
 
 ### Software requirements
+
 The model can only be run on Linux, as some of the tools mentioned below are only available on Linux. The development and testing of the model has been done using [Ubuntu Linux 22.04LTS](https://ubuntu.com/).
 
 #### CDO and GDAL
+
 The heavy lifting in this model is done by the tools [CDO](https://code.mpimet.mpg.de/projects/cdo) and [GDAL](https://gdal.org/). These tools need to be installed and available. CDO is only available on Linux.
 
 #### Python requirements
+
 The Python requirements are specified in `requirements.txt` file, and are included in the virtualenv created by the `run_ADASMELIAE.sh` (see below).
 
 ### Input data requirements
+
 The model (as per 2023-10-25) assumes that weather data files named `met_1_0km_nordic-[YYYY-MM-DD].nc` are available in the `in/` folder. The files must contain hourly timesteps with the following weather parameters:
 
-* RR (Rainfall in mm)
-* UM (Relative humidity in %)
+- RR (Rainfall in mm)
+- UM (Relative humidity in %)
 
 ### Running the model
+
 It is required that you have set the following environment variables:
 
 ```bash
@@ -68,9 +74,9 @@ HOME_DIR=/home/foo/2023_vips_in_space/
 # Path to the weather data
 WEATHER_DATA_DIR=in/
 # Used for iterating the weather data files
-FILENAME_PATTERN="met_1_0km_nordic-*.nc"
+WEATHER_DATA_FILENAME_PATTERN="met_1_0km_nordic-*.nc"
 # Used to extract date info from the filename
-FILENAME_DATEFORMAT="met_1_0km_nordic-%Y-%m-%d.nc"
+WEATHER_DATA_FILENAME_DATEFORMAT="met_1_0km_nordic-%Y-%m-%d.nc"
 # Names of weather parameters in NetCDF files
 # Hourly precipitation
 RR="RR"
@@ -78,7 +84,7 @@ RR="RR"
 UM="UM"
 # Timezone for weather data/daily aggregations
 LOCAL_TIMEZONE="Europe/Oslo"
-# Path to optional CSV file with polygons for masking result. 
+# Path to optional CSV file with polygons for masking result.
 MASK_FILE=Norge_landomrader.csv
 # Path to the output (GeoTIFF) files as seen from the running model code
 DATA_DIR=out/
@@ -101,9 +107,11 @@ MAPSERVER_EXTENT="-1.5831861262936526 52.4465003983706595 39.2608060398730458 71
 ```bash
 $ ./run_ADASMELIAE.sh
 ```
-This creates a Python virtualenv, installs all the Python dependencies, runs the model and stores output in a log file. 
+
+This creates a Python virtualenv, installs all the Python dependencies, runs the model and stores output in a log file.
 
 Alternatively, primarily for development purposes, you can run the Python script ADASMELIAE directly:
+
 ```bash
 $ ./ADASMELIAE.py
 ```
@@ -114,28 +122,23 @@ All intermediary files are stored in the `tmp/` folder, and they are all deleted
 
 The model outputs GeoTIFF files, two per day in the season/period of calculation:
 
-* `result_WARNING_STATUS_[YYYY-MM-DD].tif`, wich indicates infection risk of Septoria. 
-  * 0 = No infection risk (grey)
-  * 2 = Low infection risk (yellow)
-  * 3 = Medium infection risk (orange)
-  * 4 = High risk (red)
-* `result_[YYYY-MM-DD].tif`, which contains the wet hour sum (WHS) - which is the sum of wet hours for "yesterday", "today" and "tomorrow", relative to the current file date.
+- `result_WARNING_STATUS_[YYYY-MM-DD].tif`, wich indicates infection risk of Septoria.
+  - 0 = No infection risk (grey)
+  - 2 = Low infection risk (yellow)
+  - 3 = Medium infection risk (orange)
+  - 4 = High risk (red)
+- `result_[YYYY-MM-DD].tif`, which contains the wet hour sum (WHS) - which is the sum of wet hours for "yesterday", "today" and "tomorrow", relative to the current file date.
 
-A [Jinja2](https://pypi.org/project/Jinja2/) template mapfile (for [Mapserver](https://mapserver.org/)) with separate layers (WARNING_STATUS and WHS) for each date is found in the `mapfile/` folder. 
+A [Jinja2](https://pypi.org/project/Jinja2/) template mapfile (for [Mapserver](https://mapserver.org/)) with separate layers (WARNING_STATUS and WHS) for each date is found in the `mapfile/` folder.
 
 Examples of the two layers are shown below
 
 ![WARNING_STATUS example. Showing Norway and surrounding area](./WARNING_STATUS_layer_example.png)
 
-*WARNING_STATUS example. Showing Norway*
+_WARNING_STATUS example. Showing Norway_
 
 ![WHS (Wet Hour Sum) example. Showing Norway and surrounding area](./WHS_layer_example.png)
 
-*WHS (Wet Hour Sum) example. Showing Norway*
+_WHS (Wet Hour Sum) example. Showing Norway_
 
 ## Notes to self
-
-
-
-
-
diff --git a/__pycache__/ADASMELIAE.cpython-311.pyc b/__pycache__/ADASMELIAE.cpython-311.pyc
new file mode 100644
index 0000000000000000000000000000000000000000..a11ab49a536711742aa3858ea4f6026bde8b04e6
GIT binary patch
literal 12913
zcmZ3^%ge>Uz`(HbnnK!5O$LU?APx-kLm8i)co`U`Go&y?F{Ci2Fy$~tF@kBPC?+t?
z9K{T#S)y1{7*m*YSaaE;*cd@-S#sEO*`wHVIifhgeAXP!T&^gtT<$1tFrO)hCyEEG
zmp6(RO!GzYgK2>%0Val2wq*<q46C8eU|?WmU}8vR%o2nNq_8bxWMEj$2;(=Z;g??^
z1XGDjquP<ep3MZ3En-UH$YV_5j1opOlMB_(C=oPyZZvsOG<hC0c`+u2RQ4=!kewim
z>Y6MGkQfL@Nis2{@@7ea*dUB%Dle+3sJtj?upKfWeIT4FIh!Gc56uLAR6Vm9<}#-U
zEMsC|SPc#jkiIBcBFqsa!W=m?yM<8oM9HJc3!}*^pvjA%$tyB3qzYsyf!qwjs9_qV
zoT|J)1tgA+Q$$hC0jD>fELE7gRN*4_RGuth7(b6Kl_yIL#!C@fBaRwgj0`CfQR*p@
zQ5q>yQJN{7smxhgFx6>HDbg*BQQ9doEeugQDY7jLQM##mDRPk1H-V+-852V)PnJGR
zeTsal$ZUqWOjumVlm(9=G@lwUF{J9G%A)FtGGt;%)lHQ{6E#A!Pa)NCficW>WE$0u
z*$gR)sYb-9oXdi0CnH0u{A`9)QINl)Oqdu_S+ZDQHl>=Ns*N%QyUz?JpQ4m%0@8=7
z3L0Z@RjH=v^63mw=1dH!7U(iA3=^1Qmq%HqGG<x9>{wt8V<FS2mZ)Y%*>Hl&CH5>t
z{<29GLrn=$wt1$?45@Z13`M$8_F$jDV>Z=hHbaUs)|lnVLiofU)mBD^NQM-qDEnyp
zo>&N7&REV+!5qm@&X{VuzyaCCFm{Rnsu@v^sS2|hQdCkMXEUUz&Si;m0`t}2d}lCU
z9nN=2wM{k51{+t*mTH@7lnsurB2XEK>{gJEG%$VCvx$izl|RcBYyblTL#kzpCTcjD
zGDNvCF{HYux@CF5RHbN1!k7$b?)OafOwoq8r$_}$sOqG8&Spr_#Y}gc3@J<?h-y0{
z!vw}27F=?9+{z4w48<(vOps6p#Q-A%BdA6~q!=%_Q;`^{inAF~^uVF0kM5QtGn5pE
zTR$WgF!h7#59D&f0Bn!pTxQg;0{g9qi6K=x#R#mzI8}W%LyAeN#%zWZQ!v{s)p9mN
zidm}DY=*f!Ys}I0gY{Z~)mx@&&Sprl0;x<fOLd;jFqZ?XdgQv=8r8f;E3`5kO+LyS
zH9k{pQoUz0q}XEm3+ygCuq@auDQ2lIvl-^HVY(<!5NxLpENW7nQ(e$gK&tryUzj{H
zo$83{esD?SmtvpdfNFv%LzF+*MUcD&i8V;Rf}{ZFxtyu~YcT8u>v2WOZ*Jg_HbW9|
zp399>r#o03#0C$P^oH9uh#rUwA$lNeee5m-<(CvDRNpc(;K~&fn0i*GGeiZXDyMiN
z@^6Y4g1sORIecO46!daGC^g81A=V`-IK?|PIFm6dB*iB+xQdN|Ayv7Ip~yT{8OBGL
z?7|Q$78Qz3CN&hHDm5fE)P{k9p+`J507smMpvQTSD-%O17ZXD&cZx5n+!{YrepFa$
z*n)7FuaM~!e>9a5sSyhzkyXIhsUoSMx(YcL1)%B$r)NZaI1sOF5MJ3}ys~H^ks6R1
zloC3J6ID-C6q<X&(Bz|;7*hRGqh~Xugo9c$DG_s7u;ibZ)R+aaurNTTQ=?GLW5iL4
z<4TiE45_%=N^xlAO(fWbQ7O?2;*s3|W2eTY8dPyHpw&pT8PLmjkQ;*;Qka4nG-DEP
zGcho5DJUo?IOi8s7G<VqlqhI8YbqES7?~*ec{+LeE7)X|loS+O>Fbx5m+R$aCS~U9
z<>l+!=_zo5^@U_)7Aq7K<);@V<|<?sE2I^rrYaQYr<If^7NuG#ROXi|Bq!!66s4wQ
z7MB!dCY6??DrA-@B<7{)=NBpD=BH$)Re}x5EKw-UOGzzKD9K1wC`m2KEmp`+19RN{
zLKPg-(o&1^72H$vQi~FE6aq?<ax#+@d@_?$^NLdy5{tn`6o5pFGg4C&k}APE-9U~B
zhB`*UEx$A`C9xzkKTk&?HM1lmwMd~XwWv5VKTp9JY8TiTsHr*%`9%sEi6tOE6e;8v
zfHZ0<B<58r<Rq3L^y*;`2ZVQ16f*O`uF1$RNL47wNGt&bKzU|PjzUtZLTPboT4|0B
z*y5zp5`}Qj5D)**5CuoSNQH36pdd%TkVtEV^30Nq{L&JI)Us5FS(&*7Ihm;`3gwAK
zMTvPOl?wT3U?Y58gPc7a{X!g_JbgSvB0*ko^9=EG4Gvat^AA#RR0wbk3h{If^>GYR
z2nY=d@DFy?Q&0#_1vxAgYy#oXP6J0=QK~{pYDr>dPBAnDBJ)cXiZk*{b5ayC63bE*
zic*tPGs{v_6cQDZ^9w*O1cw4a^TDo2%*oG72YXwg1Sy5-DJXcRDdgps=qMDYrlO|J
z^t@8N{GxRI97v)l*0<B+dI`!LewvK8xO4IoQ{q$dOH%X7G?{L3q$HN4mSpCp-r@w2
zDXBRniJDBexLxzgGK=!_a#Qn4ZV9+$=A;H!7MG;v`s633q!!&`b#ZkHb-%@$mY7qV
z>K1Z~!`I)%)hFK5<(6!?t7C|VYf!w4V~AtCo2QSfpQEp9e1Ky}h-;AFEd}h-E{-9t
zZvH{Ojv=@BeEgjqed0qreO;sc{ak-B>fYiC4si?$0h#Ffi$gV1S2b5xHRTqEtDg&4
z?3MtEp)Q_5x7b5`1L8pp4zM#^JcFwE99<lPeO-M#9bNU3)6%QlRW%ZelR<vgELPA}
zP}RsuElbVGOUzBx)H4BzXsBwWW#*)UMT)IdHF7fZQuFdPQ#8S<b5o0p6Vp>Qi#3^U
zv4XYUVoS>}%1tb}C6t(16kn2>Taa3mSW;S)8gG<)i!H=A-q$hWC1~6xnHdxpP|U%=
zzyRWZj$#50<dnb%Oc)rl;OrWP*=U6sBLgQx2`khL1_p*KkQrcZ4MPoM4O0oY@edYY
zKy=(v7*iP6FfC(aU|7utkz-(}Wv*e)f}2$2hRj<4R|QiIqES1UwJbF(E)206wX8VY
z&bojdqzfIRnu1<=^k~+w1T)kyXMw^8thLAqnMaJvTo_`t&|Jj{Hj06Pp@vLXr7#3D
zfUq(HN(h4o>Of|zFci7fvY~_o+|CqcP&}rvU^bJntFK|hjP(-uU=#xbY6yb5JShyd
zY;|lP_d!^QZl@-zUx+5_E!N_U)SR4KY{exh`K2XbI<=_i7E4KLMaeDJ<c!qh>?%R$
z{M_8cycC7B#LS%36e|VwUySOSJh#|$^3&5Z^U`mzrlcm7rr+W!E=?*Z%1=%$F22QF
zRGN2-CBQSlwFuPHzQt8skds*g$}z>aSV6U2L6Iy21H&!W)S{yNqFX}Fi8(o`DFG1O
zu3#~i!qUtVP4-*t$q?&{q!}0(Af~Yur52awl-%Nh(D4v+SwPe+#?)d^@&Z-d3JMCp
zJoQ71Q;UlAb5iqC%M*)>v-8vS1B&vqQj<%H^}{>^g7tIrQ&Mw^^@}o#vvU&*iuDo2
z0uocNpz;=XQE6U0%rS-x3=9Db3=GA-j0_A740l<$TYM*kPtdu@B7cQN{sN2q2PP3#
z{=0(GQ{-;&2;UGAzacDnS6Ft6{S86U8$x0qc-e)xJ~FTiaeV<19c&*27<hzwY(Frt
za4Ou8R-9vdUE27PwDCo0(<{=Z9X>ZCR6Z~;^6G$z2`qO7#HaAiNSU8CD{E%XoScgS
z+E)a$FM!bvVfhO@@;A5zL54AL>fDf!o5Oou!r+pG!9@w9D-uRvBftb+b3U*#aq4_v
z01-?ui+?=j7QMo)@qvMvl^;Re<>2q(y&z_?Lj8i62?Sl_FulTIdV#|fEH^`NLfS<R
z*()5f7dT|^a`1Gr^{{oIz#l(;{P@7m%*y}c$B$xp1_p-djt0)642L)zIh{or4vX@;
zXfYpFv35~qKBCI%qQ#sHF9{hK7(itesI>bWz=*rVM9!V4WlJq%3S$dH4WkQM{;6dI
z4K84nVJZP7F;Mmfm9+4ZHHEo`3EMCiXgH|^ULr6ss4x_1)H0)(!3H&qA%&%e8Jig;
zd@vOZ49QH93@Hq?%yrD5vIj&X@^uNkgTcU1!<53Xh849F2r8Y6glkwz-~*!!3@L1D
z*iplzih+S4kFka!m_d`n4<oQaT0et&W78SZ8EP0}C2JXL7#ASihr~)jtESeA*Dy|G
z>X8a&&}6*D0nNTfnvA!&ic1oUN{T_Pxs^<}nDh*8v4ASuV$f)*f<lo20|Ub^E}NXp
z;*#Y2+yc8Q<+RMalz6bFcu<Q)&n727IWec$P7k3d7nDCO89@2-0|S#J8-lnYD7qqL
zeg3Na9a$IkJg?|^eqd&jWV^w^1Evw`K7+i1;(t)YgZ)1Tlu9A~_pD_^DX`&*vxad2
z$o(KgP%wI^q%b0Sx;S*8jU}frBXp#&phodTrXH_g22IvmjPWl)PWi>CR|IlKkpKe&
zgC=tkD81g|C@v`~05x`Qv4V@{BGBL^a%dNU0*<I)E(8TLsQ28!@PUC@kPAUP5D@vw
z%)l#pg+~@rSgjDAnLj6gMdB4XgX?l;m*mVg6kn9Hz9MJ6!|{ro-2twPJl<D$ygx9b
zm{J59v8z%C_fZreNv2o<)Y1lZhtl$Ma`MYF^U@VSsi4?Op-L27eJB(p78N52Yck$q
zPf1NnEX^q?vIT_$<1Lny#L8k#u3Jp`#kbf&DJvzj2;BTlj8DldDpCYhH=t>WTU=p@
zIi;y!V~aFEwUz*+b&F_)-eS&8tSAB%M4GI(c%b%x)8#Eeh>PPvsW~3hb}YWd3U+D{
zXi)i<5U9sgToMme1(pGs1FpZo$^I5UT2e0pjd_6vQj5QX)An5nsX1IrEiOvvT#?YZ
zz#{sA36|DxNNRx-^hZ$Y{{kXF(rg^83LltYLHs~M^8+ihAlFAw)O`UF9jsqD7&rw$
zWxD$T)(c|p7sT8za(G<f@VEd*Kkjmieqdl?RX`BnV*Y}d;fllyVulyQ3@>sRUEwgg
zz+rTkU;Y9IIs%vWtW2y5KYsl9%ElliJ0o<3%LPfpi;{*{Bn>aHh&?cJxxf*2fkSF;
z<O;_NvU(R~^{&Y3UF6Waz@c|S=L0hnD_b$B`LJBsk(=QlH<zO=^FdKrM+@eIMvNe~
z1*@YiY8?Se|KK_TQfy(*uBh!NP@z`C019UYh9agKMre7+=%>j9$wJI|`Q@5SMV_D(
z!CI0JN=+bZprzF<UU0Of=B0qsK@n&Gtsaz9K<TQ1;VzFzk58vxk6(l94H3y1K9@u^
zE{JG!n0A=1$hp8`1y1v~IO5}T6EpMT<EwarLB%6zP#`r&p-LITD=N*)110fXPzwf}
zBf#Y$I43An@qtDX6jJk2pnOTU%)AtZ^3+6d_dX@DBvApBCP7(7K|xm`I6JcdETE85
znyOHeuaKEnmY9<X(S3_EH?acTg^90X*2_!2B>+vYrFprD#o4JT@weEaB2|VchJgxn
zhy(IVONuj7QWcUDbCOGQz=I761*t`u`6;&;K`qYA+|-w##%z@krU6x43ZSy8xFofp
z_?8&drf|m~KTkjR_}~!7kkH`xDi*zx%(N=OAaL6d>=j5cQ6-{~m{XLRm{O^bT9H`{
z>L6!l7F1~@r{pW>7AxrbE9m+t=oY7zfUM9lFt7q6um~tS>Qr$m=q6`?vaZfAMx81_
z1>KU&+}y+pkcynL#3G$vj0&2JMcNDu3{?s+LlaXg3W{`;P~E6tt5C(PV53mQrC_gM
zq+q3BqNH$()7LTB8{9Fj5_T*o$f*QJX<~6U*b53({GiByco{5GB>=Vq6z)Z->7XH-
zDo*{B)H40N(wv-IjOA52=_!dh@g+rxdBvc@ML{>eM8Pw!#L!GZH$P3mJtQ+NO+mrW
zHN@G)%}S|CL@zH{$qLP#RUGj-$;qJb)MNsC4BYOn(!*l1TTXsriLnt@vsuArS4n}i
zl_wSzDCm~N7Z(*PxCR8fTbURenJH9p!Yop_#gb`gk#~zTCowO*G%-E3_!cjW8K0b=
zl3K+MlGjr$F0zA^-)x|Pi`1ej;oQUmP)gPZb+JJ{*2^-wCFJWE5bPQh1{-()_5DQ=
zV!n<6Zk|4_;2{Adr9S@d;F!22f>7$|>*x+1ukg4ffDm<!2yyib(d4|v0nUB#nJKsU
zK<NjRM8O%p_?8f;JCRycmRbZ709PuvL=j?8SA*&rWTiRz>EOEJmIy*=W^Q77YJ5Rr
zNyaS!glK9-Norn65opBo79UDb72o1U^58E<{VEo{+{A(^F4w#iSQ)~vpbO4Z3dIGf
zc_mf~RdTKssmZ0Fp**m-l|r?Zo>5w@LUC$xeqKtkX2LD@`1q9k<oNg^XHeA$s_~1I
zL4_YndQN`QEjGvyQW2<_1s9Ou`tTN8R%TvSqEQj3g<Aw_J%KCMTPy{YB~`cB^HNKk
zUEEA=G36!S;z`cWOUq0LRotmXx7gBCK_gPPSab5rQ;Ui~-TtC<kZl5>xCZ4$P=J)A
z7UkU%hKqpmD`e26$RDJZr7|_K=oU|MX;D#XUP(Nd#{r6_s{FjvTYTUqV?2ysWDoL|
zJ4hcNq=H8bC_#%&L`1Te<Q9OUt_U<idW**y?DT-dqGFKKAs)HK0xEQFaVI4fXC^}w
z++y+cbMwE&6yjS1S}bylGsGjvHQ2-7$E66gaG?m)s{~i?;OYWY{)0Ule~TqEFD)P1
zNCrC?#s>NL76+`B0kvsLQg2D3I2RN;@URlXkSZ<$HFAnFK>lVc&MzuSO}Qn8p$cR`
z#w`|b8stbyECvsQVN(Zk@GVZToglXrxq$)|+|&UzInzLaeTyF?6CW8F85!^E8}H%*
zO8?O61*{QliwLC5i-%V+U{P+UD44_Ul30>hoLX{A0P0>)tp>?G%sHufw>ZlZi!u|F
za#D+tGG}srX<o@Kw#;IXRiKKkI2)pctq9^Nw#0&h)V!2iEcpegd7xHf8K`6h=d&Wv
zRDDq%$V&;J@Zx}FyIbtpsg>pVMJczKQ}fDh@ua7gfU8STade9VRxsTHSpga@Wy>!u
zDJU(u#Rn?{@=HrVtxv|ZTdd_pnI)-^C<O&pkr-&n7X!FK0B#Td;;_lhPbtkwwJX}n
z09t3oz)<{;n}Oj2Gb1D82R432mJbXdLXUxwtAP^)Z!mB-fZ+`W!3!{iBq(=*0R`P)
z(71pN-C!`dfDPSX;A;RwBr`=WFhJ1_2H^`Z^Z?|NM&1_Q2Hp=G3><<DMJ<&R#4oZ-
zUSXHKz^=F;azW6N$Q9)mRn4!cnqOD7y`*Y;QPuv6s{H};OR8QMRJ}ehGq7?sxIW<I
z{lLz^$o7GUfk(8%sK;_b)J1NEE8GeV95;kzCKyezydZ0HQP}p1u<Zo~fg57-6OyK6
zUQlqlDCT@c%=rR?@CP;q0hx}9o|?|Op1LdiN)0?W1mz}(Op%>YIiqM!<poX4i}F@i
z<gG3WT3->gZs2<$tTMskim=KJcHs*w`WIOACuD+*T)=ZtO!tbIE<|jD)JJ9}5vDH;
zOd?Dd7z94BF$gJ7khmhK1lFZ{fkhXpi|eA8_7ySh3oIb96;&UZnS?;PgqRxmK5#II
z$U;1ze^J=rim(C516;x#N<EqrJTG#|UEz{zV86jFg6iuJYz$n=9Wqxql`e9sT;Wt{
zV7tM<*3R9;eSt^jB7^J|2H6V?vJV)98`!Tf2;Y!axWHx9q28n0p}Qb`NzM)KkPocP
zj7%RHm>HSAfQS!Zf{TGos)6YSr(gry4IaJ*&JP?63MwnS*7$#55M$&=C(HyH**-A9
z2@NJj>kkZYf{THLr=#i;v*bl)$r~&J4el*I7X-Bya9j}7x*(`^kwyCoi}nQ;?Hhtp
z6GWy+PL!S^-Qe{=P+@}n1x3S)f<{*ajT*de@XK`6&WO9nuXBZ8r@;ei%ngo&6<MIZ
z{6#(cD|+@9I1(;!B%G)?Uwf+dWc`_X2>&Cf&+vtTi4`;mz{>W4gF(gQf`a=6Q&$kF
z;C_Kc_J*X=43RmK7nE%-O4?qLw7tM0c7sK*!M(-zf{^D0J$Dc(<av?B>k5n41r{&3
zoXSNO)hjHj7g$suh>AD(-B8k4pmssi`l6D}6(ySw43Uhid>_H&1rE6zipopGuPYi}
zQZ&3^<Z)5a^NOP91rE6nY^<E3HzYJ?a9oklyrAW9!722jMA#LHunx}-&mTAVB|k7Q
zbBZE}8$1#n?ma#iWQ;CIhCt9op3o~ip%-{UZ}3QVxcB(ZP`t<^bA?Cd0*}lM9+3_Y
z2n6@M*qJ#+LE~*a4ALssrF1Sy>8yylC}nX)%A$k+hJyM9S+@(aZX5I=G>GJox}l_b
zLEis@y#Ee+2n`}RWNt_*ToALpAZEKl9ztIbv+dyiz{bEMI-%+kx8enE#RZWYBrd61
zUQo4!I_!q9<^_Qm2>Qs(#LM@Efr*#z1DIfA;1&JA#=s}h5!4fPK|*f>^F>~hE4(Hj
z7}z)^FIafokX4uwd_mFhilX%nw=0Uy7iC?p$hvg+-Qbs9z;uaU=>osf4FQ=6>{EDd
zi0fVuiM}8beLxOEUl57@$jl@t^o4;*Q0N1gU}F%J{J_h`DG4?nOnd}c^aVs9xDWW1
zKCt3b1y+iU00lDKEF=X&3_N^2=KXeEb~pINFYvfr;Bi?|51}vcxO`-0;^F$jz{JA^
z8VnYFz$eg?-k;Z%w}1l_7qV^}<RSD0S+|RP?pOHSKd`cJf`W|~6!AV6B!gDOUyuxf
zpo=`gS9pRi@C4uF5&6Kzz$wzf{eg)=G>d6R{RajR4IwW?Mnf17@<Ln+gaIKhlvhF+
zaI%&$m<i4i%>uE*m@+^Vgaaa(nK-#VGB9z1{KClv8cF8*zz27MGt-7LkO|IUP0maQ
zG?951!ZVS%9~c;N86BZ2p%U1P=YrJ+(HFTDuYl5@?hOgW8BB9H7ieFU(7Pg`*Wn4v
zJCdF&DlbTSLeNDXuPZ!W7kIpG@Q8PK-;ju2VR=C!8idTfL8L_ViEIe{0hDAvf>J9|
zx&?7Ra4_(SbvX5SUJ%n-;eC<E;tG$&1s;nJA`G13*EwV_amdcdzsR9?1w4rHfLE|1
zWP<jL;)}e>S9p~_FbHz;Ehq<D{(*@>vXtpS`3D9N4IwXtr$HESvX;>W6daPJ2r`B#
z8^M7~+z^-hz{<?a_mP1al+~C)Sq(&RFz`xrc!MlUVmcuXGB*jVJBjH+WCjX1zZ8XA
z0ZKTMFvTAj7(AIGpgNJ&;j)(tZf^?HhH#LM6tM0TrVAm-DBL_y`jCXleqdnmXNrR=
zMOKH)B7_Ispcc7-b-OVghzIG1@h-%tqi_qr{)5Rvq;T0GE6B+Qjyo{%5fqDGKm?eJ
zlK>YBC_2GPe}2$n5EO3kydf$*!);330`DuL2G>Q+E{TE$Nky%%h*~%J-H=rOz`)81
zAv$<(h$t_Jy&!Dd!Eu8}bO!H59@Q0U7q|^Ba2wo^R$QRDBKM+_^%ZFw&}5H4BZQp5
zenUlTh2@6Yi`p(%R9p|FUs3U!z%fJoqJZKB0mU0UqStw3F7e3Bh`7k3c7;dn0*~5{
z4{QwFVjbESWOXmd>aIw>C~I^@*60G4@kK7<D_q76&NujluJg-Z;+MamV0w|?><YhG
zgU1bC(f;_Z_y-aSH{^6~C}}THTB3PD$Kj%q;}s>x8>)JscHRYjw~MOoS5)0G<W+PR
zI4$wKpyzT?#r2Ac>kS3X1x!mgFKF3ZRIt6GV0%MS>4u!n2VpKyvBJg4^^uu}kL?Qs
z4<B2D+XpEIG5HBWQzB>7UKG{6BC6ZqcY~9+Lu7*71wMldd<F|NAoK-3gAK|TInAzc
zntfnkWR=*Vc!PtppS_EH0$V444}S;$2WCcA39uBH_y|(;1w??kI0<m%qUeMu<zo<#
z>?rA}?5ysoMryj`O^CR_kq1J0e%JLvFX@F|2#dRrn0+xU`=VaX6}=pg6h|I<^99k6
ziDzM8k-ESvSq$p$p5hnuQe--%$m%W3dP+^%n~(K0ABcU%#Kez@^&At69~<X6PDVdA
r&P$?>Iw8EQS9n=M1UavWFop<n-Y^x56lcCE&Jrojep8MyQkoqA0s_N(

literal 0
HcmV?d00001

diff --git a/__pycache__/test_ADASMELIAE.cpython-311-pytest-7.4.0.pyc b/__pycache__/test_ADASMELIAE.cpython-311-pytest-7.4.0.pyc
new file mode 100644
index 0000000000000000000000000000000000000000..ef19f4de78e98a2a5fa741921e5e4e5e0e5dc8b7
GIT binary patch
literal 1340
zcmZ3^%ge>Uz`!tFOD?U8fq~&Mhy%k+P{wB?kP3zrh7`sehA2ib&6LZW%*4o$%M!)P
z2$EsSVasKYVrOJXVQyiF;z(f(X3%7L2{OP>lkpaRT4r8Kd~r!)QAvDCVo7Q;NCJl0
z85kIt85kHoXD~7_OlK%zgh?<kEMs6`SPkdaGNdrnGS)EEFqSgbGSx5@PcH%KgUV;Y
zoXe2HSi_XYn8E~7nZ~q?g@Iu;3sez93Udj_J}5T}E{<wb8AGuMirK7C4Gc95%NQ9L
zR>R#`gsP4Wm%2R06lN3?5h@u`O-x~#!&Jq_z);Ja!kWTX!<@+o=CPFU!^~!2Krx8}
zCSJpWY7#21i7|ztg0YsVmNA7Pm_d`>Pm}Q%TV}CBUVe!tW0jbyMnR>4X0ZZ9L;=J#
z)-1log@`mwrd!Mfl?EWfxQZ{axHz?_1gy?fvlt|3s>xXdijP}d4h5AZsl_GniAA>r
z<C7C}a^j0p3yPBSa|;rSQg87%6ja71m1gFYWabs$V#~=-PRuF3B@thokzbmV0#+KI
zo|B)Hm=m9un45ZwGrl-6Efr+KEj~xEt(p0Gu0=)pMYn|G)AEaQ6HDS#D++QF^Abxk
z^Yb)Wi$Gy}iyLGO#Fa+K976~PqR|AIV^$1`W(5%V<)<H7oLW?@pOc!GTAo;3oSmPp
zA5fH^m6}{qtRLnX5Uih@pOTtWtY4H_oSmCkP^|Ch;u!4f>f`C?st@)&g0ELlc}oN=
zhL#PA#2FYEKuNIp4g&*21H)ZGg^PkpR|J(Byl${4UciQ~vM64`CVqiM@ghtLq6W^r
zz@i8d!Bm4Rf>jwr0h&uLf*o)b!&qb+(X@O9rBO}JTO6=>FXCZfV7Mh24@oI{ka)_>
z&(kYPEicL}Nxj7h7rMm`6)OT2Rkyeh0SiuU;6Pl-P$a>?z)&Q{z`*c}!zMRBr8Fni
uu1KDNfdQ0Ni@g~b7(OsFGBVx(g>wS~Hh6(S4Gn!@V`5bLz<^1BEdu~G+fEYz

literal 0
HcmV?d00001

diff --git a/__pycache__/test_ADASMELIAE.cpython-311-pytest-8.1.1.pyc b/__pycache__/test_ADASMELIAE.cpython-311-pytest-8.1.1.pyc
new file mode 100644
index 0000000000000000000000000000000000000000..cc33e711f6b19db54ef736bcfbdea5dda2b9a28c
GIT binary patch
literal 4207
zcmZ3^%ge>Uz`)RaNg*wPm4V?ghy%k+P{!vR1_p-d3@HpLj5!QZj9{86mpPe<ks+5Q
zij@%}!xqJs!jQt0!=B3?#SRu@&f&=AjN)WuNM*|c+X6B#g{6gI83O~uY7hsEqqtL8
zTNt8vQrKD;qIgpngBdj0UxH-(G#PJkq$HN4mSpCpYBJv9Ps_|pi7zfmEGmfy2`7UT
z!7#|N%nS?+pFuWFXGmvAVO#_<ipfuty-K7gwYW5=B;Lrt$XM6FMAy(%FE1HW)C{Yr
zIY?9)YC^bUke{cYdwg(+V@PN)nt_BAfz2nRh$!nU^zxEdG8D0aJPZnfUw-<b#i>Qb
z`Z=k2spW~q#o77k`T<4xS*gh-#rk2M0m1sY`6;P6#rj2=#o4)u1;zS~E{?&zu0EcQ
zuKFdZ#U=3wzFtA)Es@lU#N2|M)Oe`B(=u~Xi;FlJ7#Kiy7K<@3Ff=fHVBz3l`@n!i
zu(B1gGcYhDgJJ{ZE)WI@fMaF{69Yp#!*qrcMvyQBr!b~4r7$mJW?)ziQUeyOWvXFZ
zfTRX22qsckQdpNUF)*x#o6pEl!?*y>11o|OsEN#!p_aLZc>&xGs3I_x!q&l%#+bt1
z!hvpf3CP1>H4F?jOv@M<7*@mGRm+kBO85-udh)oH84MYUS<0Czm?Ig=85tQO85kKD
z(S6O4!inZ<)*6-?)@-m}YT0VoiVu}=L#<|DV8{XmJD8ioRl}CXn8FQGoyN3`g@Iu;
z3s{1Ifgy#bgdfIcV94Ttu~EaKjG<Tr#ca4QYgkbIScIw$l(oP{qN~eeOyNo4rO<9x
zBJD;~m&cgGo5DATsfvw(p_V;`KSiL1J(Cg4<0ugaI~Po#_?QPQ#K6E%!+~lmDzAw#
zg`t)$g&~+hQ_%0_b9M%Xm!K5%5|jvTakw~!IL5np2Hj%LFD?1Sm0zrvlUZDnl3A1t
zl84y<Vhe-vDJV9kGEQgcWDI6l$>;~pyqZk67~{d&su&dU3JpadRlhiFauRcsQWEW|
zY`}RFt>B9<&n(G^FDOba%giq=hGx-XJ)4~T<iwm}J3W|gNd^XnVvvD98W=8U*)=eL
zGv`ZCa(OAlz`(GQ@fVY=?MlWfAytinN(0Se1zTGM5YtGr_!bwUpuHuWT2YXiT#|}h
z`mSWU#avKn03wWj@g)`)rxulfHJfM_g9J@h@)m&t>lRyaYDsEd*)8_u{JfIXijrIG
z#i=EUB_%~gAcx%Iaww<-dor=;mSB8xVopwcQEEX^a(-?>Vo~ZX9*2U;_@vUzoRZAE
z;#+JGFW!=fFV4s>%}D_(jZe?XPfE;*&r8fry~P<{oS2phGT|1VBiK=y`FXBIMfpXy
zgyYlli*gf7;!`UMauV|rOEUBGG&#Yce~UXeKQBAAvLLY}IpdaqufL0{52%a}3W;}d
z3~{~1m|G;pz`#%>&A`BLiyP!5h=&a!9EiEb$Q;vRP_9Z)fC6y5-qHell&IJ&GG<_4
z0HvGagNzIeKN=YBipwuxS&+Cw@S?c(6>;qgEFw2p_^-2wU1AZNP_@AHB8%n~7R?JR
znjhF0xJ0jW%3R`<xyUJZg;VYW0~4$H0>v8~oc)|#oD*~|DA>W#MGpHb9QGGD?C*-I
zTu?Q=C~9^^)U1Q+11l4&IanW<_y{uL3y1)7aS}g%{J<vnm5V`Id4b6a!4*yyrSz{z
z>0e+Gzrmt&fko#Mn7jZcud?V|0W%<6Wbz_R3Ze$iy}+Ua5xKyk0~f>;ffxl5hbtpV
zFT{L^IdJkK*tJ(N+<<HaTye1=0|UceO-Cc<gGMZlX6y$o86C~oQ3?upJvo61)Z8Li
zOET6lkWfo95Lrty)iB|zB~#d2FlsxL8j=yU^sHqjt%j^&u3?1Me=IdDIO{)-8kRK1
z6i$$8jQWp@!a9f<)dx81KQwiDj450x+!Wf47J4}CMpKu^n8KaH1Fip9L3LpbE42P&
zBclFeL-jE#4_yDTfa^bgzamg`qzF`V7V(10LOu`yYHkz>fSP=udJdz`1GQQ}jqzZH
zB2a|`sq+|%z~vf9g95l*ED~X0V5qXkSvcmGmJm_fse@`ekcABlE0myhT@k1OPz0(6
zA?3Fo$jve!LLNjYfCxnf1_n*`B3%$0EUyn@fs1<s5X%rm7=a82<xrIJT$dy#7nw0I
zFo0|;28A*eOY{Xi3wSPa=w9K_y}+SM%aZ&97Xz!-1s2Uq;F{(Fn7j(BcpzM45?&`k
z)WEqHSTrFb7g#jmf|w!@qafmNWhCi^m=7@rPF{pL8)7m<%@rh<z*Syg(FB*<d(<8E
znGfo-I2yAbG-Gr$X4mAq#Q|%R6oJy+Ey;LDD@G5}F38N!(<@3XFUl-Qy~PO^y2TC^
zD^dluBiJBXAgv@WME?uYE@Ds1tSBihN(Hx1z^wsr`T=Jla61m1b$)T!<mRW8=A_yc
z*)cFMfbvxFL<R<i56p~=j5k1?v<3#OkhcL0Z!pMSKt(qg)GnZ+4{Quf94&Gmm^c~P
iJ}_`HvVCM`U=n}{NN_XieqewTGF*&W9~dwRu#*9=$Ff8K

literal 0
HcmV?d00001

diff --git a/env-sample b/env-sample
index 9a1d67f..071dd79 100644
--- a/env-sample
+++ b/env-sample
@@ -7,9 +7,9 @@ HOME_DIR=/home/foo/2023_vips_in_space/ADASMELIAE/
 # Path to the weather data
 WEATHER_DATA_DIR=in/
 # Used for iterating the weather data files
-FILENAME_PATTERN="met_1_0km_nordic-*.nc"
+WEATHER_DATA_FILENAME_PATTERN="met_1_0km_nordic-*.nc"
 # Used to extract date info from the filename
-FILENAME_DATEFORMAT="met_1_0km_nordic-%Y-%m-%d.nc"
+WEATHER_DATA_FILENAME_DATEFORMAT="met_1_0km_nordic-%Y-%m-%d.nc"
 # Names of weather parameters in NetCDF files
 # Hourly precipitation
 RR="RR"
diff --git a/mapfile/query_template.xml b/mapfile/query_template.xml
index 0666fe2..bbc03e2 100644
--- a/mapfile/query_template.xml
+++ b/mapfile/query_template.xml
@@ -1,7 +1,7 @@
 <!--mapserver template-->
 <?xml version="1.0" encoding="UTF-8"?>
 <vipsResult>
-    <modelName value="Referansefuktmodell"/>
+    <modelName value="Pollen beetle migration model"/>
     <modelId value="ADASMELIAE"/>
     <warningStatus value="[value_0]"/>
 </vipsResult>
\ No newline at end of file
diff --git a/mapfile/query_template_WHS.xml b/mapfile/query_template_temperature.xml
similarity index 74%
rename from mapfile/query_template_WHS.xml
rename to mapfile/query_template_temperature.xml
index eef0e21..e4144fd 100644
--- a/mapfile/query_template_WHS.xml
+++ b/mapfile/query_template_temperature.xml
@@ -3,5 +3,5 @@
 <vipsResult>
     <modelName value="Pollen beetle migration model"/>
     <modelId value="ADASMELIAE"/>
-    <parameter name="WHS" value="[value_0]"/>
+    <parameter name="air_temperature_2m" value="[value_0]"/>
 </vipsResult>
\ No newline at end of file
diff --git a/mapfile/template.j2 b/mapfile/template.j2
index 6f35ea1..b4d81e6 100644
--- a/mapfile/template.j2
+++ b/mapfile/template.j2
@@ -35,54 +35,44 @@ WEB
     # List of standard metadata: https://mapserver.org/ogc/wms_server.html#web-object-metadata
     # i18n support: https://mapserver.org/ogc/inspire.html#inspire-multi-language-support  
     METADATA
-            "wms_keywordlist" "VIPS model Septoria Reference Humidity Model (ADASMELIAE)"
+            "wms_keywordlist" "Pollen Beetle Migration Model (ADASMELIAE)"
             {% if languages %}
             "wms_inspire_capabilities" "embed"
             "wms_languages" "{{ language_codes|join(",")}}" # The first is the default
             {% endif %}
             "wms_abstract"  "<div id='preamble'>
-              <p>The reference humidity model was developed as a supplement to
-              the Humidity model. In this model 20 consecutive hours are
-              required to fulfil a risk period. One constraint in this
-              method is that you can have 19 consecutive risk hours  or,
-              fx 14 hours with risk then one hour below the Rh threshold
-              and then maybe 14 hours again with risk hours. In one of
-              these situations, the model will indicate a risk. In the
-              reference model the definition of Humid hours was introduced.
-              The Rh threshold was avoided as humid hours do not need to be
-              consecutive. The running sum of humid hours across three days
-              indicate that the Septoria risk is higher than if you have
-              three days with humid conditions than two or one. The operation
-              of the model should include weather forecast data and should
-              run 6 days ahead from current day if you include a 7-day weather
-              forecast (60 hours from national met office and until 7 days from ECMWF)</p>
+              <p>Pollen beetle (Meligethes spp.) adults are approximately 2.5 mm,
+      metallic greenish-black. Females bite oilseed rape buds and lay their eggs
+      inside. Adults and larvae attack buds and flowers, resulting in withered
+      buds and reduced pod set. In oilseed rape, adult and larval feeding can
+      lead to bud abortion and reduced pod set. However, damage rarely results
+      in reduced yields for winter crops. Spring crops are more vulnerable, as
+      the susceptible green/yellow bud stage often coincides with beetle
+      migration. </p>
             </div>
             <div id='body'>
               <p>
-              The model was tested against the Humidity model in a Danish
-              Septoria project funded by the GUDP. The threshold of 40 was
-              defined as high risk as this coincided with periods when the
-              humidity model recommended to apply fungicide (if not already protected).
-              The humidity model includes a decision model about when to spray,
-              protection periods ect. The reference model was used to quality
-              control the recommendations in a way that, if the reference humidity
-              hours were higher than 40 (no thresholds) then the user should
-              check the raw data for calculation of the Humidity model (threshold,
-              20 consecutive hours). If 2-3 periods of 17, 18, or 19 consecutive
-              hours appear, then one can consider to protect the crop based on the
-              reference model alone.</p>
-              <p>The Humidity model is considered as a DSS with several components.
-              The reference humidity model is considered as a weather based submodel
-              for the risk of Septoria, Easy to map and calculate based on weather data alone.</p>
+              Oilseed rape is only vulnerable if large numbers of pollen
+      beetle migrate into the crop during green bud stage. This DSS predicts
+      migration into crops based on air temperature, and so can be used to
+      evaluate risk to crop. Daily maximum air temperature is used to predict Migration
+      Risk. The default value of 15 degrees celsius is used, as that is the
+      temperature advised in the UK at which pollen beetles will fly.   
+            </p>
+              <p>This DSS was adapted from work carried out in the UK, and is
+      considered applicable, but not yet validated in, Belgium, Luxembourg,
+      Netherlands, France, Germany, Rep. Ireland, and Denmark. Only to be used during Oilseed rape growth stages 51-59. This
+      model is a simplification of a more detailed model described in Ferguson et al. (2015) Pest Management Science 72, 609-317.
+      <a href="https://doi.org/10.1002/ps.4069">https://doi.org/10.1002/ps.4069</a></p>
               
               <h3>Explanation of parameters</h3>
               <ul>
-              <li>WHS = <span itemprop=\"WHS\">Wet hour sum</span></li>
+              <li>TM_MAX = <span itemprop=\"TM_MAX\">Maximum Air Temperature at 2m</span></li>
               </ul>
             </div>
             "
         "wms_enable_request"  "*"
-        "wms_title"     "Septoria Reference Humidity Model"
+        "wms_title"     "Pollen Beetle Migration Model"
         "wms_getfeatureinfo_formatlist" "text/plain,text/html,text/xml"
         "wms_accessconstraints" "none"
         "wms_addresstype" ""
@@ -124,27 +114,17 @@ LAYER
     
     STATUS ON
     METADATA
-      "wms_title"     "Reference humidity model {{ timestep_date }}"  
+      "wms_title"     "Pollen Beetle Migration Model {{ timestep_date }}"  
       {% for language in languages %}
       "wms_abstract.{{language.language_code}}" "
       {
           \"isWarningStatus\": true,
           \"legendItems\": [
-          {
-            \"classification\": 0,
-            \"legendLabel\": \"{{ language.no_risk }}\",
-            \"legendIconCSS\": \"width: 25px; background-color: #707070;\"
-          },
           {
             \"classification\": 2,
             \"legendLabel\": \"{{ language.low_risk }}\",
             \"legendIconCSS\": \"width: 25px; background-color: #FFCC00;\"
           },
-          {
-            \"classification\": 3,
-            \"legendLabel\": \"{{ language.medium_risk }}\",
-            \"legendIconCSS\": \"width: 25px; background-color: #FFCC99;\"
-          },
           {
             \"classification\": 4,
             \"legendLabel\": \"{{ language.high_risk }}\",
@@ -159,30 +139,16 @@ LAYER
   
     # class using simple string comparison, equivalent to ([pixel] = 0)
   
-    CLASS
-      NAME "No infection risk"
-      EXPRESSION ([pixel] >= 0 AND [pixel] < 2) 
-      STYLE
-          COLOR 112 112 112
-      END
-    END
     CLASS
       NAME "Low infection risk"
-      EXPRESSION ([pixel] >= 2 AND [pixel] < 3) 
+      EXPRESSION ([pixel] < 3) 
       STYLE
-          COLOR 255 204 0
-      END
-    END
-    CLASS
-      NAME "Medium infection risk"
-      EXPRESSION ([pixel] >= 3 AND [pixel] < 4) 
-      STYLE
-          COLOR 255 153 0
+          COLOR 112 112 112
       END
     END
     CLASS
       NAME "High infection risk"
-      EXPRESSION ([pixel] >= 4) 
+      EXPRESSION ([pixel] >= 3) 
       STYLE
           COLOR  255 0 0
       END
@@ -190,25 +156,25 @@ LAYER
 END # Layer
   
 LAYER
-     NAME "{{model_id}}.WHS.{{ timestep_date }}"
+     NAME "{{model_id}}.temperature.{{ timestep_date }}"
      DATA "{{mapserver_data_dir}}result_{{ timestep_date }}.tif"
-     TEMPLATE "{{mapserver_mapfile_dir}}query_template_WHS.xml" TOLERANCE 1 TOLERANCEUNITS PIXELS
+     TEMPLATE "{{mapserver_mapfile_dir}}query_template_temperature.xml" TOLERANCE 1 TOLERANCEUNITS PIXELS
      TYPE RASTER
-     #PROCESSING "BANDS=1" # WHS band on top (others invisible, but band values are available in the query template)
+     #PROCESSING "BANDS=1" # Temperature band on top (others invisible, but band values are available in the query template)
      #PROCESSING "SCALE=AUTO"
      #PROCESSING "NODATA=-1"
    
    
      STATUS ON
      METADATA
-       "wms_title"     "Reference humidity model WHS {{ timestep_date }}"
+       "wms_title"     "Pollen Beetle Migration Model temperature {{ timestep_date }}"
        {% for language in languages %}
       "wms_abstract.{{language.language_code}}" "
       {
           \"isWarningStatus\": false,
           \"legendItems\": [
           {
-            \"legendLabel\": \"{{ language.whs }}\",
+            \"legendLabel\": \"{{ language.temperature }}\",
             \"legendIconCSS\": \"width: 25px; background: linear-gradient(to right, #FFFF00, #0000FF);\"
           }
         ]
@@ -218,10 +184,10 @@ LAYER
      END
     CLASSITEM "[pixel]"
     CLASS
-      NAME "Wet hour sum (yesterday + today + tomorrow) [0-72]"
-      EXPRESSION ([pixel] >= 0 AND [pixel] <= 72)
+      NAME "Temperature range"
+      EXPRESSION ([pixel] >= -40 AND [pixel] <= 40)
       STYLE
-        DATARANGE 0 72
+        DATARANGE -40 40
         COLORRANGE 255 255 0 0 0 255
       END
     END
diff --git a/requirements.txt b/requirements.txt
index 222acb6..5697770 100644
--- a/requirements.txt
+++ b/requirements.txt
@@ -1,4 +1,5 @@
 Jinja2
 netCDF4
 pytz
-python-dotenv
\ No newline at end of file
+python-dotenv
+pytest
\ No newline at end of file
diff --git a/test_ADASMELIAE.py b/test_ADASMELIAE.py
new file mode 100644
index 0000000..3b4c066
--- /dev/null
+++ b/test_ADASMELIAE.py
@@ -0,0 +1,43 @@
+import pytest
+from datetime import datetime
+from ADASMELIAE import find_start_date
+
+
+@pytest.fixture
+def example_result_files():
+    return [
+        "result_2023-04-15.nc",
+        "result_2023-04-16.nc",
+        "result_2023-04-17.nc",
+        "result_WARNING_STATUS_2023-04-15.nc",
+        "result_WARNING_STATUS_2023-04-16.nc",
+        "result_WARNING_STATUS_2023-04-17.nc",
+        "result_WARNING_STATUS_2023-04-18.nc",
+    ]
+
+
+def test_find_start_date_with_previous_results(example_result_files, monkeypatch):
+    MODEL_START_DATE = datetime(2023, 3, 1)
+    monkeypatch.setenv("DATA_DIR", "out")
+
+    # Mock os.listdir to return the example result files
+    with monkeypatch.context() as m:
+        m.setattr("os.listdir", lambda _: example_result_files)
+        start_date = find_start_date(MODEL_START_DATE)
+
+    # Assert the expected start date
+    expected_start_date = datetime(2023, 4, 18)
+    assert start_date == expected_start_date
+
+
+def test_find_start_date_without_previous_results(monkeypatch):
+    MODEL_START_DATE = datetime(2023, 3, 1)
+    monkeypatch.setenv("DATA_DIR", "out")
+
+    # Mock os.listdir to return the example result files
+    with monkeypatch.context() as m:
+        m.setattr("os.listdir", lambda _: [])
+        start_date = find_start_date(MODEL_START_DATE)
+
+    # Assert the expected start date
+    assert start_date == MODEL_START_DATE
-- 
GitLab