diff --git a/.dockerignore b/.dockerignore
new file mode 100644
index 0000000000000000000000000000000000000000..515b24ada4fc00440dbad4fb62b7e237b7a5f714
--- /dev/null
+++ b/.dockerignore
@@ -0,0 +1,2 @@
+.git
+build.sh
diff --git a/.gitignore b/.gitignore
index 9e04dff31ea8dfaf5a76834450130c223cef1001..e68d29e40f5a8737dab6da3a3c77296237c63b38 100644
--- a/.gitignore
+++ b/.gitignore
@@ -15,4 +15,5 @@ wandb/
 sample_data/segmented_point_clouds/*
 *.dat
 *.json
-*.png
\ No newline at end of file
+*.png
+*.txt
\ No newline at end of file
diff --git a/Dockerfile b/Dockerfile
new file mode 100644
index 0000000000000000000000000000000000000000..23a4071edaa554ab608117eefc3f3c13f8b3dab5
--- /dev/null
+++ b/Dockerfile
@@ -0,0 +1,49 @@
+FROM nvidia/cuda:11.2.1-cudnn8-runtime-ubuntu20.04
+
+# install conda
+ARG UBUNTU_VER=20.04
+ARG CONDA_VER=latest
+ARG OS_TYPE=x86_64
+
+RUN apt-get update && apt-get install -y --no-install-recommends \
+    curl \
+    sudo \
+    libglib2.0-0 \
+    libsm6 \
+    libxext6 \
+    libxrender-dev \
+    libsndfile1 \
+    libtiff5 \
+    && rm -rf /var/lib/apt/lists/* 
+
+
+RUN curl -LO "http://repo.continuum.io/miniconda/Miniconda3-${CONDA_VER}-Linux-${OS_TYPE}.sh" && \
+    bash Miniconda3-${CONDA_VER}-Linux-${OS_TYPE}.sh -p /miniconda -b && \
+    rm Miniconda3-${CONDA_VER}-Linux-${OS_TYPE}.sh 
+
+RUN /miniconda/bin/conda update conda 
+
+RUN /miniconda/bin/conda init bash
+RUN /miniconda/bin/conda create --name pdal-env python=3.8.13
+
+SHELL ["/miniconda/bin/conda", "run", "-n", "pdal-env", "/bin/bash", "-c"]
+
+RUN echo "conda activate pdal-env" >> ~/.bashrc
+
+RUN conda install -c conda-forge pdal==2.4.3 python-pdal==3.1.2
+
+RUN pip install parse oci ocifs
+
+COPY requirements.txt app/requirements.txt
+RUN pip install --no-cache -r app/requirements.txt
+
+COPY . /app
+
+WORKDIR /app
+
+ENTRYPOINT ["/miniconda/bin/conda", "run", "-n", "pdal-env", "python", "/app/run_oracle_wrapper.py"]
+
+
+# CMD ["--help" ]
+
+
diff --git a/bash_helper_scripts/are_files_identical.sh b/bash_helper_scripts/are_files_identical.sh
new file mode 100755
index 0000000000000000000000000000000000000000..33fa992726eaee87bf84b37ef905d41fb2331728
--- /dev/null
+++ b/bash_helper_scripts/are_files_identical.sh
@@ -0,0 +1,7 @@
+
+
+./bash_helper_scripts/get_austrian_sample_instance_p2.sh
+pdal translate maciek/p2_instance.las maciek/p2_instance.ply
+python fsct/run.py --model /home/nibio/mutable-outside-world/code/gitlab_fsct/instance_segmentation_classic/fsct/model/model.pth --point-cloud maciek/p2_instance.ply --batch_size 10 --odir maciek/ --verbose --keep-npy
+
+python helpers/compare_files_in_folders.py --folder1 maciek/p2_instance.tmp --folder2 old_maciek/p2_instance.tmp --verbose
\ No newline at end of file
diff --git a/run_all_command_line.sh b/bash_helper_scripts/run_all_command_line.sh
similarity index 100%
rename from run_all_command_line.sh
rename to bash_helper_scripts/run_all_command_line.sh
diff --git a/run_all_fine_grained.sh b/bash_helper_scripts/run_all_fine_grained.sh
similarity index 99%
rename from run_all_fine_grained.sh
rename to bash_helper_scripts/run_all_fine_grained.sh
index 446ab410a3dcf66a346dc9702762cc99116560c1..76b095e8df5d30fd71e9ffdd6f5b6279a7ccf8bf 100755
--- a/run_all_fine_grained.sh
+++ b/bash_helper_scripts/run_all_fine_grained.sh
@@ -111,7 +111,7 @@ for d in $data_folder/segmented_point_clouds/tiled/*/; do
     for f in $d/*.ply; do
         echo "Processing $f file..."
         python fsct/run.py \
-        --model /home/nibio/mutable-outside-world/code/instance_segmentation_classic/fsct/model/model.pth \
+        --model /home/nibio/mutable-outside-world/code/gitlab_fsct/instance_segmentation_classic/fsct/model/model.pth \
         --point-cloud $f \
         --batch_size 5 \
         --odir $d \
diff --git a/build.sh b/build.sh
new file mode 100755
index 0000000000000000000000000000000000000000..21f3b5c7b14fb257695c7a1d5ac45eeaa41c0269
--- /dev/null
+++ b/build.sh
@@ -0,0 +1,3 @@
+#/bin/sh
+
+docker build -t nibio/pc-geoslam-oracle:latest .
diff --git a/config/config.yaml b/config/config.yaml
index 3175bce6aaa88b80e36a3855367c858e2814ad76..22812fdeb7b4caa6c725456964d0d03d5a599aa7 100644
--- a/config/config.yaml
+++ b/config/config.yaml
@@ -1,3 +1,27 @@
-files_formats:
+general:
+  input_folder: './maciek'
+  output_folder: './maciek_results'
+  clean_output_folder: true
+  run_sematic_segmentation: true
+  run_instance_segmentation: true
+label_formats:
   label_for_instances_in_gt: 'treeID'
-  label_for_instances_in_predicted:  
+  label_for_instances_in_predicted: 'instance_nr'
+semantic_segmentation_params:
+  sematic_segmentation_script: './run_bash_scripts/sem_seg_sean.sh'
+  checkpoint_model_path: './fsct/model/model.pth'
+  batch_size : 5 # batch size for inference
+  tile_size: 10 # tile size in meters
+  min_density: 100 # minimum density of points in a tile(used for removing small tiles)
+  remove_small_tiles: 1 # 1: remove small tiles, 0: not remove small tiles
+instance_segmentation_params:
+  instance_segmentation_script: './run_bash_scripts/tls.sh' 
+  n_tiles: 3
+  slice_thickness: 0.5
+  find_stems_height: 1.5
+  find_stems_thickness: 0.5
+  graph_maximum_cumulative_gap: 3
+  add_leaves_voxel_length: 0.5
+  find_stems_min_points: 50
+  graph_edge_length: 1.0
+  add_leaves_edge_length: 1.0
\ No newline at end of file
diff --git a/helpers/run_command_bash.py b/helpers/run_command_bash.py
new file mode 100644
index 0000000000000000000000000000000000000000..2bb3cc93dc8f5d2e738810d2561230fa30489076
--- /dev/null
+++ b/helpers/run_command_bash.py
@@ -0,0 +1,11 @@
+import subprocess
+
+
+class RunCommandBash:
+    def __init__(self, cmd, args):
+        self.cmd = cmd
+        self.args = args
+
+    def __call__(self):
+        print("Running command: " + self.cmd + " " + " ".join(self.args))
+        subprocess.run([self.cmd, *self.args])
\ No newline at end of file
diff --git a/requirements.txt b/requirements.txt
index 5eee61352f1e6c101f7c30e1d72d92191974885f..9e761f979fd0727e61cbb3784f62c165ce705528 100644
Binary files a/requirements.txt and b/requirements.txt differ
diff --git a/run.py b/run.py
new file mode 100644
index 0000000000000000000000000000000000000000..41811f063d67425878f10c05503f1ead8e165657
--- /dev/null
+++ b/run.py
@@ -0,0 +1,126 @@
+# This script is used to run the application in a production environment
+import argparse
+import os
+import yaml
+import logging
+
+# local imports
+from helpers.run_command_bash import RunCommandBash
+
+# define logger
+logger = logging.getLogger(__name__)
+logging.basicConfig(level=logging.INFO)
+
+
+def main(path_to_config_file):
+    # load the config file
+    with open(path_to_config_file, "r") as file:
+        config = yaml.load(file, Loader=yaml.FullLoader)
+
+    # check if the output folder exists and if not create it
+    if not os.path.exists(config["general"]["output_folder"]):
+        os.mkdir(config["general"]["output_folder"])
+
+    ### sematic segmentation section ###
+    if config["general"]["run_sematic_segmentation"]:
+        logger.info("Running semantic segmentation")
+        sem_seg_command = config["semantic_segmentation_params"]["sematic_segmentation_script"]
+
+        # print the semantic segmentation parameters
+        for key, value in config["semantic_segmentation_params"].items():
+            logger.info(key + ": " + str(value))
+
+        # read all the parameters from the config file for the semantic segmentation
+        sem_seg_args = []
+
+        sem_seg_args.extend([
+            "-d", str(config["general"]["input_folder"]),
+            "-c", str(config["semantic_segmentation_params"]["checkpoint_model_path"]),
+            "-b", str(config["semantic_segmentation_params"]["batch_size"]),
+            "-t", str(config["semantic_segmentation_params"]["tile_size"]),
+            "-m", str(config["semantic_segmentation_params"]["min_density"]),
+            "-z", str(config["semantic_segmentation_params"]["remove_small_tiles"])
+            ])
+
+        # run the command with the arguments
+        logging.info("Running semantic segmentation with the arguments")
+        RunCommandBash(sem_seg_command, sem_seg_args)()
+
+    ### instance segmentation section ###
+    if config["general"]["run_instance_segmentation"]:
+        logger.info("Running instance segmentation")
+        ins_seg_command = config["instance_segmentation_params"]["instance_segmentation_script"]
+
+        # print the instance segmentation parameters
+        for key, value in config["instance_segmentation_params"].items():
+            logger.info(key + ": " + str(value))
+
+        # read all the parameters from the config file for the instance segmentation
+        ins_seg_args = []
+
+        ins_seg_args.extend([
+        "-d", str(config["general"]["input_folder"]),
+        "-n", str(config["instance_segmentation_params"]["n_tiles"]),
+        "-s", str(config["instance_segmentation_params"]["slice_thickness"]),
+        "-h", str(config["instance_segmentation_params"]["find_stems_height"]),
+        "-t", str(config["instance_segmentation_params"]["find_stems_thickness"]),
+        "-g", str(config["instance_segmentation_params"]["graph_maximum_cumulative_gap"]),
+        "-l", str(config["instance_segmentation_params"]["add_leaves_voxel_length"]),
+        "-m", str(config["instance_segmentation_params"]["find_stems_min_points"]),
+        "-o", str(config["instance_segmentation_params"]["graph_edge_length"]),
+        "-p", str(config["instance_segmentation_params"]["add_leaves_edge_length"])
+        ])
+
+        # run the command with the arguments
+        logging.info("Running instance segmentation with the arguments")
+        RunCommandBash(ins_seg_command, ins_seg_args)()
+
+    # do cleaning up folders
+    if config["general"]["clean_output_folder"]:
+        logger.info("Cleaning up the output folder")
+        os.system("rm -rf {}".format(config["general"]["output_folder"]))
+        os.mkdir(config["general"]["output_folder"])
+    else:
+        # check if the output folder is empty
+        if len(os.listdir(config["general"]["output_folder"])) != 0:
+            logger.error("The output folder is not empty. Please clean it up or set the 'clean_output_folder' parameter to True")
+            exit(1)
+
+    ### if only semantic segmentation is run transfer data to the output folder
+    if config["general"]["run_sematic_segmentation"] and not config["general"]["run_instance_segmentation"]:
+        logger.info("Transfering data to the output folder for semantic segmentation")
+        source_dir = os.path.join(config["general"]["input_folder"], "segmented_point_clouds") 
+
+        # take paths of all the files in the source_dir which end with .segmented.ply
+        files = [os.path.join(source_dir, file) for file in os.listdir(source_dir) if file.endswith(".segmented.ply")]
+    
+        # convert files in the source_dir to .las using pdal
+        for input_file in files:
+            # get directory of the file
+            dir_name = os.path.dirname(input_file)
+            # get file name
+            file_name = os.path.basename(input_file).split(".")[0]
+            # create a new file name with '.segmented.las' at the end
+            output_file_name = os.path.join(dir_name, file_name + ".segmented.las")
+
+            # create the command
+            os.system("pdal translate {} {} --writers.las.dataformat_id=3 --writers.las.extra_dims=all".format(input_file, output_file_name))
+        
+        # copy the converted files to the output folder
+        las_segmented_files = [os.path.join(source_dir, file) for file in os.listdir(source_dir) if file.endswith(".segmented.las")]
+        for file in las_segmented_files:
+            os.system("cp {} {}".format(file, config["general"]["output_folder"]))
+
+    ### if both semantic and instance segmentation are run transfer data to the output folder
+    if config["general"]["run_sematic_segmentation"] and config["general"]["run_instance_segmentation"]:
+        source_folder = os.path.join(config["general"]["input_folder"], "results")
+        # copy all the files and folders from the source folder to the output folder
+        os.system("cp -r {} {}".format(source_folder + '/*', config["general"]["output_folder"]))
+
+if __name__ == "__main__":
+    parser = argparse.ArgumentParser('Run the application in a production environment.')
+    parser.add_argument("--path_to_config_file", type=str, default="./config/config.yaml")
+    args = parser.parse_args()
+
+    # run the main function
+    main(args.path_to_config_file)
\ No newline at end of file
diff --git a/run.sh b/run.sh
new file mode 100755
index 0000000000000000000000000000000000000000..90fc3c2294a0670e9d86efe69ffde6563021e86a
--- /dev/null
+++ b/run.sh
@@ -0,0 +1,2 @@
+#!/bin/bash
+docker run --mount type=bind,src='/home/opc/git_repos/instance_segmentation_classic/config/config.yaml',dst='/app/current_config.yaml' --rm nibio/cuda-vscode-conda:latest --path_to_config_file /app/current_config.yaml
diff --git a/run_bash_scripts/sem_seg.sh b/run_bash_scripts/sem_seg.sh
new file mode 100755
index 0000000000000000000000000000000000000000..e7727fe08abfb0df7bc3be1cb9e3ed9d533e1897
--- /dev/null
+++ b/run_bash_scripts/sem_seg.sh
@@ -0,0 +1,173 @@
+#!/bin/bash
+
+############################ parameters #################################################
+# General parameters
+CLEAR_INPUT_FOLDER=1  # 1: clear input folder, 0: not clear input folder
+CONDA_ENV="pdal-env-1" # conda environment for running the pipeline
+
+# Tiling parameters
+data_folder="" # path to the folder containing the data
+
+############################# end of parameters declaration ############################
+
+# extract tiling parameters as command line arguments with the same default values
+while getopts "d:" opt; do
+  case $opt in
+    d) data_folder="$OPTARG"
+    ;;
+    \?) echo "Invalid option -$OPTARG" >&2
+    ;;
+  esac
+done
+
+# print the letters to choose from in getopts
+echo "      The list of letters for the parameters:"
+echo "d: data_folder"
+
+# print values of the parameters 
+echo "      The values of the parameters:"
+echo "data_folder: $data_folder"
+
+# Do the environment setup
+# check if PYTHONPATH is set to the current directory
+if [ -z "$PYTHONPATH" ]; then
+    echo "PYTHONPATH is not set. Setting it to the current directory"
+    export PYTHONPATH=$PWD
+else
+    echo "PYTHONPATH is set to '$PYTHONPATH'"
+fi
+
+# conda activate pdal-env-1
+
+# check if activated conda environment is the same as the one specified in the parameters
+if [ "$CONDA_DEFAULT_ENV" != "$CONDA_ENV" ]; then
+    echo "The activated conda environment is not the same as the one specified in the parameters."
+    echo "Please activate the correct conda environment and run the script again."
+    exit 1
+fi
+
+# if no input folder is provided, case a message and exit
+if [ -z "$data_folder" ]
+then
+    echo " "
+    echo "No input folder provided, please provide the input folder as a command line argument"
+    exit 1
+fi
+
+# clear input folder if CLEAR_INPUT_FOLDER is set to 1
+if [ $CLEAR_INPUT_FOLDER -eq 1 ]
+then
+    # delete all the files and folders except the ply, las and laz files in the input folder
+    echo "Clearing input folder"
+    find $data_folder/ -type f ! -name '*.ply' ! -name '*.las' ! -name '*.laz' -delete # delete all the files except the ply and las files
+    find $data_folder/* -type d -exec rm -rf {} + # delete all the folders in the input folder
+    echo "Removed all the files and folders except the ply and las files in the input folder"
+fi
+
+# check if there are las and laz files in the input folder
+count_las=`ls -1 $data_folder/*.las 2>/dev/null | wc -l`
+count_laz=`ls -1 $data_folder/*.laz 2>/dev/null | wc -l`
+
+count=$(($count_las + $count_laz))
+
+if [ $count != 0 ]; then
+    echo "$count las files found in the input folder good to go with!"
+else
+    echo "No las or laz files found in the input folder."
+    echo "All files in the input folder should have *.las or *.laz extension."
+    exit 1
+fi
+
+# do the conversion from laz to las if there are laz files in place (this is need for metrics calculation)
+python nibio_preprocessing/convert_files_in_folder.py --input_folder $data_folder --output_folder $data_folder --out_file_type las --in_place --verbose
+
+# do the conversion to ply
+python nibio_preprocessing/convert_files_in_folder.py --input_folder $data_folder --output_folder $data_folder --out_file_type ply --verbose
+
+# clear input folder if CLEAR_INPUT_FOLDER is set to 1
+if [ $CLEAR_INPUT_FOLDER -eq 1 ]
+then
+    # delete all the files and folders except the ply and las files in the input folder
+    echo "Clearing input folder"
+    find $data_folder/ -type f ! -name '*.ply' ! -name '*.las' -delete # delete all the files except the ply and las files
+    find $data_folder/* -type d -exec rm -rf {} + # delete all the folders in the input folder
+    echo "Removed all the files and folders except the ply and las files in the input folder"
+fi
+
+# move the output of the first step to the input folder of the second step
+mkdir -p $data_folder/segmented_point_clouds
+
+# move all .segmented.ply files to the segmented_point_clouds folder if they are in the input folder
+find $data_folder/ -type f -name '*.ply' -exec mv {} $data_folder/segmented_point_clouds/ \;
+
+# do the tiling and tile index generation
+echo "Tiling and tile index generation"
+python nibio_preprocessing/tiling.py \
+-i $data_folder/segmented_point_clouds/ \
+-o $data_folder/segmented_point_clouds/tiled \
+--tile_size 10
+
+# remove small tiles using nibio_preprocessing/remove_small_tiles.py
+for d in $data_folder/segmented_point_clouds/tiled/*; do
+    echo "Removing small tiles from $d"
+    python nibio_preprocessing/remove_small_tiles.py \
+    --dir $d \
+    --tile_index $d/tile_index.dat \
+    --min_density 75 \
+    --verbose
+done
+
+# iterate over all the directories in the tiled folder
+for d in $data_folder/segmented_point_clouds/tiled/*/; do
+    for f in $d/*.ply; do
+        echo "Processing $f file..."
+        python fsct/run.py \
+        --model /home/nibio/mutable-outside-world/code/gitlab_fsct/instance_segmentation_classic/fsct/model/model.pth \
+        --point-cloud $f \
+        --batch_size 10 \
+        --odir $d \
+        --verbose \
+        # --tile-index $d/tile_index.dat \
+        # --buffer 2
+    done
+done
+
+# remove all the files in the tiled subfolders except the *segmented.ply and tile_index.dat files
+find $data_folder/segmented_point_clouds/tiled/*/ -type f ! -name '*segmented.ply' ! -name 'tile_index.dat' -delete # delete all the files except the segmented.ply files
+# delete all the folders in the tiled subfolders
+find $data_folder/segmented_point_clouds/tiled/*/* -type d -exec rm -rf {} +
+
+# # merge the segmented point clouds
+echo "Merging the segmented point clouds"
+# iterate over all the directories in the tiled folder
+for d in $data_folder/segmented_point_clouds/tiled/*/; do
+    # get a base name of the directory
+    base_name=$(basename $d)
+    # create a name for the merged file
+    merged_file_name=$data_folder/segmented_point_clouds/$base_name.segmented.ply
+    python nibio_preprocessing/merging_and_labeling.py \
+    --data_folder $d \
+    --output_file $merged_file_name \
+    --only_merging
+done
+
+# rename all the segmented.ply files to .ply in the tiled subfolders
+for file in $data_folder/segmented_point_clouds/tiled/*/*; do
+    # skip if the file is not a ply file
+    if [[ $file != *.ply ]]; then
+        continue
+    fi
+    mv -- "$file" "${file%.segmented.ply}.ply"
+done
+
+# rename all the folder in the tiled subfolders to .segmented suffix
+for d in $data_folder/segmented_point_clouds/tiled/*; do
+    echo "Renaming $d to ${d%.segmented}"
+    # mv "$d" "${d%.segmented}"
+    mv $d{,.segmented}
+done
+
+
+# create folder for the output of the second step
+
+mkdir -p $data_folder/instance_segmented_point_clouds
diff --git a/run_bash_scripts/sem_seg_sean.sh b/run_bash_scripts/sem_seg_sean.sh
new file mode 100755
index 0000000000000000000000000000000000000000..37327d22e00e3e92e4e4d7ab97ae25c8db55a01c
--- /dev/null
+++ b/run_bash_scripts/sem_seg_sean.sh
@@ -0,0 +1,198 @@
+#!/bin/bash
+
+############################ parameters #################################################
+# General parameters
+CLEAR_INPUT_FOLDER=1  # 1: clear input folder, 0: not clear input folder
+CONDA_ENV="pdal-env" # conda environment for running the pipeline
+
+# Parameters for the semetnic segmentation
+data_folder="" # path to the folder containing the data
+checkpoint_model_path="./fsct/model/model.pth"
+batch_size=5 # batch size for the inference
+tile_size=10 # tile size in meters
+min_density=75 # minimum density of points in a tile(used for removing small tiles)
+remove_small_tiles=0 # 1: remove small tiles, 0: not remove small tiles
+
+############################# end of parameters declaration ############################
+
+# extract tiling parameters as command line arguments with the same default values
+
+# add remove_small_tiles parameter
+while getopts "d:c:b:t:m:z:" opt; do
+  case $opt in
+    d) data_folder="$OPTARG"
+    ;;
+    c) checkpoint_model_path="$OPTARG"
+    ;;
+    b) batch_size="$OPTARG"
+    ;;
+    t) tile_size="$OPTARG"
+    ;;
+    m) min_density="$OPTARG"
+    ;;
+    z) remove_small_tiles="$OPTARG"
+    ;;
+    \?) echo "Invalid option -$OPTARG" >&2
+    ;;
+  esac
+done
+
+# print the letters to choose from in getopts
+echo "      The list of letters for the parameters:"
+echo "d: data_folder"
+
+# print values of the parameters 
+echo "      The values of the parameters:"
+echo "data_folder: $data_folder"
+echo "remove_small_tiles: $remove_small_tiles"
+
+# Do the environment setup
+# check if PYTHONPATH is set to the current directory
+if [ -z "$PYTHONPATH" ]; then
+    echo "PYTHONPATH is not set. Setting it to the current directory"
+    export PYTHONPATH=$PWD
+else
+    echo "PYTHONPATH is set to '$PYTHONPATH'"
+fi
+
+# conda activate pdal-env-1
+
+# check if activated conda environment is the same as the one specified in the parameters
+if [ "$CONDA_DEFAULT_ENV" != "$CONDA_ENV" ]; then
+    echo "The activated conda environment is not the same as the one specified in the parameters."
+    echo "Please activate the correct conda environment and run the script again."
+    exit 1
+fi
+
+# if no input folder is provided, case a message and exit
+if [ -z "$data_folder" ]
+then
+    echo " "
+    echo "No input folder provided, please provide the input folder as a command line argument"
+    exit 1
+fi
+
+# clear input folder if CLEAR_INPUT_FOLDER is set to 1
+if [ $CLEAR_INPUT_FOLDER -eq 1 ]
+then
+    # delete all the files and folders except the ply, las and laz files in the input folder
+    echo "Clearing input folder"
+    find $data_folder/ -type f ! -name '*.ply' ! -name '*.las' ! -name '*.laz' -delete # delete all the files except the ply and las files
+    find $data_folder/* -type d -exec rm -rf {} + # delete all the folders in the input folder
+    echo "Removed all the files and folders except the ply and las files in the input folder"
+fi
+
+# check if there are las and laz files in the input folder
+count_las=`ls -1 $data_folder/*.las 2>/dev/null | wc -l`
+count_laz=`ls -1 $data_folder/*.laz 2>/dev/null | wc -l`
+
+count=$(($count_las + $count_laz))
+
+if [ $count != 0 ]; then
+    echo "$count las files found in the input folder good to go with!"
+else
+    echo "No las or laz files found in the input folder."
+    echo "All files in the input folder should have *.las or *.laz extension."
+    exit 1
+fi
+
+# do the conversion from laz to las if there are laz files in place (this is need for metrics calculation)
+python nibio_preprocessing/convert_files_in_folder.py --input_folder $data_folder --output_folder $data_folder --out_file_type las --in_place --verbose
+
+# do the conversion to ply
+python nibio_preprocessing/convert_files_in_folder.py --input_folder $data_folder --output_folder $data_folder --out_file_type ply --verbose
+
+# clear input folder if CLEAR_INPUT_FOLDER is set to 1
+if [ $CLEAR_INPUT_FOLDER -eq 1 ]
+then
+    # delete all the files and folders except the ply and las files in the input folder
+    echo "Clearing input folder"
+    find $data_folder/ -type f ! -name '*.ply' ! -name '*.las' -delete # delete all the files except the ply and las files
+    find $data_folder/* -type d -exec rm -rf {} + # delete all the folders in the input folder
+    echo "Removed all the files and folders except the ply and las files in the input folder"
+fi
+
+# move the output of the first step to the input folder of the second step
+mkdir -p $data_folder/segmented_point_clouds
+
+# move all .segmented.ply files to the segmented_point_clouds folder if they are in the input folder
+find $data_folder/ -type f -name '*.ply' -exec mv {} $data_folder/segmented_point_clouds/ \;
+
+# do the tiling and tile index generation
+echo "Tiling and tile index generation"
+python nibio_preprocessing/tiling.py \
+-i $data_folder/segmented_point_clouds/ \
+-o $data_folder/segmented_point_clouds/tiled \
+--tile_size $tile_size
+
+# remove small tiles using nibio_preprocessing/remove_small_tiles.py
+
+# make it conditional bassed remove_small_tiles parameter
+if  [ $remove_small_tiles -eq 1 ]
+then
+    # iterate over all the directories in the tiled folder
+    for d in $data_folder/segmented_point_clouds/tiled/*; do
+        echo "Removing small tiles from $d"
+        python nibio_preprocessing/remove_small_tiles.py \
+        --dir $d \
+        --tile_index $d/tile_index.dat \
+        --min_density $min_density \
+        --verbose
+    done
+fi
+
+# iterate over all the directories in the tiled folder
+for d in $data_folder/segmented_point_clouds/tiled/*/; do
+    for f in $d/*.ply; do
+        echo "Processing $f file..."
+        python sean_sem_seg/run_single_file.py \
+        --model $checkpoint_model_path \
+        --point-cloud $f \
+        --batch_size $batch_size \
+        --odir $d \
+        --verbose \
+        # --tile-index $d/tile_index.dat \
+        # --buffer 2
+    done
+done
+
+# remove all the files in the tiled subfolders except the *segmented.ply and tile_index.dat files
+find $data_folder/segmented_point_clouds/tiled/*/ -type f ! -name '*segmented.ply' ! -name 'tile_index.dat' -delete # delete all the files except the segmented.ply files
+# delete all the folders in the tiled subfolders
+find $data_folder/segmented_point_clouds/tiled/*/* -type d -exec rm -rf {} +
+
+# # merge the segmented point clouds
+echo "Merging the segmented point clouds"
+# iterate over all the directories in the tiled folder
+for d in $data_folder/segmented_point_clouds/tiled/*/; do
+    # get a base name of the directory
+    base_name=$(basename $d)
+    # create a name for the merged file
+    merged_file_name=$data_folder/segmented_point_clouds/$base_name.segmented.ply
+    python nibio_preprocessing/merging_and_labeling.py \
+    --data_folder $d \
+    --output_file $merged_file_name \
+    --only_merging
+done
+
+# rename all the segmented.ply files to .ply in the tiled subfolders
+for file in $data_folder/segmented_point_clouds/tiled/*/*; do
+    # skip if the file is not a ply file
+    if [[ $file != *.ply ]]; then
+        continue
+    fi
+    mv -- "$file" "${file%.segmented.ply}.ply"
+done
+
+# rename all the folder in the tiled subfolders to .segmented suffix
+for d in $data_folder/segmented_point_clouds/tiled/*; do
+    echo "Renaming $d to ${d%.segmented}"
+    # mv "$d" "${d%.segmented}"
+    mv $d{,.segmented}
+done
+
+# create folder for the output of the second step
+
+mkdir -p $data_folder/instance_segmented_point_clouds
+
+echo "Semantic segmentation done."
diff --git a/run_bash_scripts/tls.sh b/run_bash_scripts/tls.sh
new file mode 100755
index 0000000000000000000000000000000000000000..2e3a19a031dc8cc54051261c80875524b2045e3d
--- /dev/null
+++ b/run_bash_scripts/tls.sh
@@ -0,0 +1,222 @@
+#!/bin/bash
+
+############################ parameters #################################################
+# General parameters
+CLEAR_INPUT_FOLDER=1  # 1: clear input folder, 0: not clear input folder
+CONDA_ENV="pdal-env" # conda environment for running the pipeline
+
+# Tiling parameters
+data_folder="" # path to the folder containing the data
+N_TILES=3
+SLICE_THICKNESS=0.5
+FIND_STEMS_HEIGHT=1.5
+FIND_STEMS_THICKNESS=0.5
+GRAPH_MAXIMUM_CUMULATIVE_GAP=3
+ADD_LEAVES_VOXEL_LENGTH=0.5
+FIND_STEMS_MIN_POINTS=50
+GRAPH_EDGE_LENGTH=1.0
+ADD_LEAVES_EDGE_LENGTH=1.0
+
+############################# end of parameters declaration ############################
+
+# extract tiling parameters as command line arguments with the same default values
+while getopts "d:n:s:h:t:g:l:m:o:p:" opt; do
+  case $opt in
+    d) data_folder="$OPTARG"
+    ;;
+    n) N_TILES="$OPTARG"
+    ;;
+    s) SLICE_THICKNESS="$OPTARG"
+    ;;
+    h) FIND_STEMS_HEIGHT="$OPTARG"
+    ;;
+    t) FIND_STEMS_THICKNESS="$OPTARG"
+    ;;
+    g) GRAPH_MAXIMUM_CUMULATIVE_GAP="$OPTARG"
+    ;;
+    l) ADD_LEAVES_VOXEL_LENGTH="$OPTARG"
+    ;;
+    m) FIND_STEMS_MIN_POINTS="$OPTARG"
+    ;;
+    o) GRAPH_EDGE_LENGTH="$OPTARG"
+    ;;
+    p) ADD_LEAVES_EDGE_LENGTH="$OPTARG"
+    ;;
+    \?) echo "Invalid option -$OPTARG" >&2
+    ;;
+  esac
+done
+
+# print the letters to choose from in getopts
+echo "      The list of letters for the parameters:"
+echo "d: data_folder"
+echo "n: N_TILES"
+echo "s: SLICE_THICKNESS"
+echo "h: FIND_STEMS_HEIGHT"
+echo "t: FIND_STEMS_THICKNESS"
+echo "g: GRAPH_MAXIMUM_CUMULATIVE_GAP"
+echo "l: ADD_LEAVES_VOXEL_LENGTH"
+echo "m: FIND_STEMS_MIN_POINTS"
+echo "o: GRAPH_EDGE_LENGTH"
+echo "p: ADD_LEAVES_EDGE_LENGTH"
+
+echo " "
+# print values of the parameters 
+echo "      The values of the parameters:"
+echo "data_folder: $data_folder"
+echo "N_TILES: $N_TILES"
+echo "SLICE_THICKNESS: $SLICE_THICKNESS"
+echo "FIND_STEMS_HEIGHT: $FIND_STEMS_HEIGHT"
+echo "FIND_STEMS_THICKNESS: $FIND_STEMS_THICKNESS"
+echo "GRAPH_MAXIMUM_CUMULATIVE_GAP: $GRAPH_MAXIMUM_CUMULATIVE_GAP"
+echo "ADD_LEAVES_VOXEL_LENGTH: $ADD_LEAVES_VOXEL_LENGTH"
+echo "FIND_STEMS_MIN_POINTS: $FIND_STEMS_MIN_POINTS"
+echo "GRAPH_EDGE_LENGTH: $GRAPH_EDGE_LENGTH"
+echo "ADD_LEAVES_EDGE_LENGTH: $ADD_LEAVES_EDGE_LENGTH"
+
+# exit 0
+
+# Do the environment setup
+# check if PYTHONPATH is set to the current directory
+if [ -z "$PYTHONPATH" ]; then
+    echo "PYTHONPATH is not set. Setting it to the current directory"
+    export PYTHONPATH=$PWD
+else
+    echo "PYTHONPATH is set to '$PYTHONPATH'"
+fi
+
+# conda activate pdal-env-1
+
+# check if activated conda environment is the same as the one specified in the parameters
+if [ "$CONDA_DEFAULT_ENV" != "$CONDA_ENV" ]; then
+    echo "The activated conda environment is not the same as the one specified in the parameters."
+    echo "Please activate the correct conda environment and run the script again."
+    exit 1
+fi
+
+# if no input folder is provided, case a message and exit
+if [ -z "$data_folder" ]
+then
+    echo " "
+    echo "No input folder provided, please provide the input folder as a command line argument"
+    exit 1
+fi
+
+# Do the instances and iterate over all the segmented point clouds
+for segmented_point_cloud in $data_folder/segmented_point_clouds/*.segmented.ply; do
+    # get the name of the segmented point cloud
+    segmented_point_cloud_name=$(basename $segmented_point_cloud)
+    # get the name of the segmented point cloud without the extension
+    segmented_point_cloud_name_no_ext="${segmented_point_cloud_name%.*}"
+    # create a directory for the instance segmented point clouds
+    mkdir -p $data_folder/instance_segmented_point_clouds/$segmented_point_cloud_name_no_ext
+    # iterate over all the tiles of the segmented point cloud
+    for tile in $data_folder/segmented_point_clouds/tiled/$segmented_point_cloud_name_no_ext/*.ply; do
+        # get the name of the tile
+        tile_name=$(basename $tile)
+        # get the name of the tile without the extension
+        tile_name_no_ext="${tile_name%.*}"
+        echo "Processing $tile"
+        # show the output folder
+        echo "Output folder: $data_folder/instance_segmented_point_clouds/$segmented_point_cloud_name_no_ext/$tile_name_no_ext"
+        python3 fsct/points2trees.py \
+        -t $tile \
+        --tindex $data_folder/segmented_point_clouds/tiled/$segmented_point_cloud_name_no_ext/tile_index.dat \
+        -o $data_folder/instance_segmented_point_clouds/$segmented_point_cloud_name_no_ext/$tile_name_no_ext \
+        --n-tiles $N_TILES \
+        --slice-thickness $SLICE_THICKNESS \
+        --find-stems-height $FIND_STEMS_HEIGHT \
+        --find-stems-thickness $FIND_STEMS_THICKNESS \
+        --pandarallel --verbose \
+        --add-leaves \
+        --add-leaves-voxel-length $ADD_LEAVES_VOXEL_LENGTH \
+        --graph-maximum-cumulative-gap $GRAPH_MAXIMUM_CUMULATIVE_GAP \
+        --save-diameter-class \
+        --ignore-missing-tiles \
+        --find-stems-min-points $FIND_STEMS_MIN_POINTS \
+        --graph-edge-length $GRAPH_EDGE_LENGTH \
+        --add-leaves-edge-length $ADD_LEAVES_EDGE_LENGTH 
+    done
+done
+
+# do merging of the instance segmented point clouds
+for instance_segmented_point_cloud in $data_folder/instance_segmented_point_clouds/*; do
+    python nibio_preprocessing/merging_and_labeling.py \
+    --data_folder $instance_segmented_point_cloud \
+    --output_file $instance_segmented_point_cloud/output_instance_segmented.ply
+done
+
+# create the results folder
+mkdir -p $data_folder/results
+
+# # create the input data folder
+mkdir -p $data_folder/results/input_data
+
+# # move input data (ply and las) to the input data folder
+find $data_folder/ -maxdepth 1 -type f -name '*.ply' -exec mv {} $data_folder/results/input_data/ \;
+find $data_folder/ -maxdepth 1 -type f -name '*.las' -exec mv {} $data_folder/results/input_data/ \;
+
+# # create the segmented point clouds folder
+mkdir -p $data_folder/results/segmented_point_clouds
+
+# move segmented point clouds to the segmented point clouds folder
+find $data_folder/segmented_point_clouds/ -maxdepth 1 -type f -name '*segmented.ply' -exec mv {} $data_folder/results/segmented_point_clouds/ \;
+
+# # create the instance segmented point clouds folder
+mkdir -p $data_folder/results/instance_segmented_point_clouds
+
+# iterate over all the instance segmented point clouds
+# move instance segmented point clouds to the instance segmented point clouds folder and rename them
+for instance_segmented_point_cloud in $data_folder/instance_segmented_point_clouds/*; do
+    # get the name of the instance segmented point cloud
+    instance_segmented_point_cloud_name=$(basename $instance_segmented_point_cloud)
+    # get the name of the instance segmented point cloud without the extension and add the suffix instance_segmented
+    instance_segmented_point_cloud_name_no_ext="${instance_segmented_point_cloud_name%.*}.instance_segmented"
+    # move the instance segmented point cloud to the instance segmented point clouds folder
+    find $instance_segmented_point_cloud/ -maxdepth 1 -type f -name '*.ply' -exec mv {} $data_folder/results/instance_segmented_point_clouds/$instance_segmented_point_cloud_name_no_ext.ply \;
+    # map the instance segmented point cloud to las file
+    pdal translate \
+    $data_folder/results/instance_segmented_point_clouds/$instance_segmented_point_cloud_name_no_ext.ply \
+    $data_folder/results/instance_segmented_point_clouds/$instance_segmented_point_cloud_name_no_ext.las \
+    --writers.las.dataformat_id=3 \
+    --writers.las.extra_dims=all
+done
+
+ # change the names of the segmented files to *.segmented.las
+for segmented_point_cloud_in_ply in $data_folder/results/segmented_point_clouds/*; do
+    # get the prefix of the point clouds
+    SEGMENTED_POINT_CLOUDS_PREFIX="segmented."
+    # get the ending of the point clouds
+    SEGMENTED_POINT_CLOUDS_EXTENSION="ply"
+    # get the name of the ply point cloud
+    segmented_point_cloud_in_ply_name=$(basename $segmented_point_cloud_in_ply)
+    # got the name of the las file without the starting prefix and the .ply extension
+    segmented_point_cloud_in_las_name_no_prefix_no_extension=${segmented_point_cloud_in_ply_name#$SEGMENTED_POINT_CLOUDS_PREFIX}
+    segmented_point_cloud_in_las_name_no_extension=${segmented_point_cloud_in_las_name_no_prefix_no_extension%.$SEGMENTED_POINT_CLOUDS_EXTENSION}
+    # convert it to las and move it to the segmented point clouds folder
+    pdal translate \
+    $segmented_point_cloud_in_ply \
+    $data_folder/results/segmented_point_clouds/$segmented_point_cloud_in_las_name_no_extension.las \
+    --writers.las.dataformat_id=3 \
+    --writers.las.extra_dims=all
+done
+
+# create the instance segmented point clouds with ground folder
+mkdir -p $data_folder/results/instance_segmented_point_clouds_with_ground
+
+# to add the ground to the instance segmented point clouds
+python nibio_preprocessing/add_ground_to_inst_seg_folders.py \
+--sem_seg_folder $data_folder/results/segmented_point_clouds/ \
+--inst_seg_folder $data_folder/results/instance_segmented_point_clouds/ \
+--output_folder $data_folder/results/instance_segmented_point_clouds_with_ground \
+--verbose
+
+echo " "
+echo "Done"
+# print path to the results folder and the subfolders
+echo "Results can be found here: $data_folder/results"
+echo "Results containing the input point clouds can be found here:  $data_folder/results/input_data"
+echo "Results containing the segmented point clouds can be found here:  $data_folder/results/segmented_point_clouds"
+echo "Results containing the instance segmented point clouds can be found here:  $data_folder/results/instance_segmented_point_clouds"
+echo "Results containing the instance segmented point clouds with ground can be found here:  $data_folder/results/instance_segmented_point_clouds_with_ground"
+
diff --git a/run_oracle_wrapper.py b/run_oracle_wrapper.py
new file mode 100644
index 0000000000000000000000000000000000000000..550aabffe533773bb8b652440355cebd7bb4c729
--- /dev/null
+++ b/run_oracle_wrapper.py
@@ -0,0 +1,139 @@
+# This is the the file to be run on the oracle cloud
+
+import oci
+import argparse
+import os
+import io
+import sys
+import json
+import shutil
+import yaml
+from urllib.parse import urlparse
+from pathlib import Path
+from oci.config import validate_config
+from oci.object_storage import ObjectStorageClient
+
+
+def run_oracle_wrapper(path_to_config_file):
+    # read the config file with the credentials with json format
+    with open('login_oracle_config.json') as f:
+        config = json.load(f)
+
+    # validate the config file
+    validate_config(config)
+
+    # create the client
+    client = ObjectStorageClient(config)
+
+    # read system environment variables
+    input_location = os.environ['OBJ_INPUT_LOCATION']
+    output_location = os.environ['OBJ_OUTPUT_LOCATION']
+
+    # doing for the input
+    if input_location is not None:
+        print('Taking the input from the location ' + input_location)
+        parsed_url = urlparse(input_location)
+        input_folder_in_bucket = parsed_url.path[1:]
+        input_bucket_name = parsed_url.netloc.split('@')[0]
+        input_namespace = parsed_url.netloc.split('@')[1]
+
+    else:
+        print('Taking the input from the default location')
+        # get the input_namespace
+        input_namespace = client.get_input_namespace().data
+        # get the bucket name
+        input_bucket_name = 'bucket_lidar_data'
+        # folder name inside the bucket
+        input_folder_in_bucket = 'geoslam'
+
+    # doing for the output
+    if output_location is not None:
+        print('Saving the output to the location ' + output_location)
+        parsed_url = urlparse(output_location)
+        output_folder_in_bucket = parsed_url.path[1:]
+        output_bucket_name = parsed_url.netloc.split('@')[0]
+        output_namespace = parsed_url.netloc.split('@')[1]
+
+    else:
+        print('Saving the output to the default location')
+        # get the output_namespace
+        output_namespace = client.get_input_namespace().data
+        # get the bucket name
+        output_bucket_name = 'bucket_lidar_data'
+        # folder name inside the bucket
+        output_folder_in_bucket = 'output'
+
+    # read the config file from config folder
+    with open(path_to_config_file) as f:
+        config_flow_params = yaml.load(f, Loader=yaml.FullLoader)
+
+    # copy all files from the bucket to the input folder
+    # get the list of objects in the bucket
+    objects = client.list_objects(input_namespace, input_bucket_name).data.objects
+
+    # create the input folder if it does not exist
+    if not os.path.exists(config_flow_params['general']['input_folder']):
+        os.mkdir(config_flow_params['general']['input_folder'])
+
+    # download the files from the bucket to the input folder
+    for item in objects:
+        if item.name.split('/')[0] == input_folder_in_bucket:
+            if not (item.name.split('/')[1] == ''):
+                object_name = item.name.split('/')[1]
+
+                print('Downloading the file ' + object_name + ' from the bucket ' + input_bucket_name)
+                path_to_object = os.path.join(input_folder_in_bucket, object_name)
+                # get the object
+                file = client.get_object(input_namespace, input_bucket_name, path_to_object)
+
+                # write the object to a file
+                with open(object_name, 'wb') as f:
+                    for chunk in file.data.raw.stream(1024 * 1024, decode_content=False):
+                        f.write(chunk)
+
+                # check if the file already exists in the input folder and delete it if it does
+                if os.path.exists(config_flow_params['general']['input_folder'] + '/' + object_name):
+                    os.remove(config_flow_params['general']['input_folder'] + '/' + object_name)
+                # move the file to the input folder and overwrite if it already exists
+                shutil.move(object_name, config_flow_params['general']['input_folder'])
+
+    from run import main
+
+    # run the main function
+    main(path_to_config_file)
+
+    # instance segmentation is set to true
+    if config_flow_params['general']['run_instance_segmentation']:
+        path_to_the_output_folder = os.path.join(config_flow_params['general']['output_folder'], 'instance_segmented_point_clouds')
+    else:
+        path_to_the_output_folder = config_flow_params['general']['output_folder']
+
+    # get list of files in the output folder
+    list_of_files = os.listdir(path_to_the_output_folder)
+
+    # save files to the output bucket 'bucket_lidar_data' in the subfolder 'output'
+    for file in list_of_files:
+        # get the full path of the file
+        path_to_file = path_to_the_output_folder + '/' + file
+
+        # get the file name
+        file_name = file
+
+        # upload the file to the bucket
+        client.put_object(
+            output_namespace, 
+            output_bucket_name, 
+            os.path.join(output_folder_in_bucket, file_name), 
+            io.open(path_to_file, 'rb')
+            )
+
+if __name__ == '__main__':
+    # use argparse to get the path to the config file
+    parser = argparse.ArgumentParser()
+    parser.add_argument("--path_to_config_file", type=str, default="./config/config.yaml")
+    args = parser.parse_args()
+
+    # run the main function
+    print('Running the main function in run_oracle_wrapper.py')
+    run_oracle_wrapper(args.path_to_config_file)
+
diff --git a/sean_sem_seg/run_single_file.py b/sean_sem_seg/run_single_file.py
index 9db78d050e09e2de627928ae878ec2fc7a98a1da..7961f1acfe3ba5196306b68b7b72d3814d77223e 100644
--- a/sean_sem_seg/run_single_file.py
+++ b/sean_sem_seg/run_single_file.py
@@ -98,7 +98,10 @@ if __name__ == "__main__":
 
     # copy the output "segmented_cleaned.las" to the output directory
     
-    results_dir_name = args.point_cloud.split('.')[0] + '_FSCT_output'
+    # results_dir_name = args.point_cloud.split('.')[0] + '_FSCT_output'
+    dir_core_name = os.path.dirname(args.point_cloud)
+    file_name = os.path.basename(args.point_cloud).split('.')[0]
+    results_dir_name = os.path.join(dir_core_name, file_name + '_FSCT_output')
 
     print("Copying results to output directory.")
     shutil.copy(os.path.join(results_dir_name, "segmented_cleaned.las"), args.odir)
diff --git a/test_run.sh b/test_run.sh
deleted file mode 100755
index 7a04086726d71cce707a69102818c0db8940e623..0000000000000000000000000000000000000000
--- a/test_run.sh
+++ /dev/null
@@ -1,30 +0,0 @@
-#!/bin/bash
-
-# run run_all_command_lines.sh with the following arguments:
-
-data_folder="/home/nibio/mutable-outside-world/code/gitlab_fsct/instance_segmentation_classic/sample_playground" 
-N_TILES=3
-
-SLICE_THICKNESS=0.382368454442735
-FIND_STEMS_HEIGHT=1.8948172056774
-FIND_STEMS_THICKNESS=0.9980435744231868
-GRAPH_MAXIMUM_CUMULATIVE_GAP=13.841583930676254
-ADD_LEAVES_VOXEL_LENGTH=0.19332721135500391
-FIND_STEMS_MIN_POINTS=495
-GRAPH_EDGE_LENGTH=0.5652008887940575
-ADD_LEAVES_EDGE_LENGTH=0.5622733957401558
-
-# get test data from the following link:
-bash ./bash_helper_scripts/get_terrestial_sem_seg_test.sh
-
-# run run_all_command_lines.sh with the following arguments:
-./run_all_command_line.sh -d $data_folder \
--n $N_TILES \
--s $SLICE_THICKNESS \
--h $FIND_STEMS_HEIGHT \
--t $FIND_STEMS_THICKNESS \
--g $GRAPH_MAXIMUM_CUMULATIVE_GAP \
--l $ADD_LEAVES_VOXEL_LENGTH \
--m $FIND_STEMS_MIN_POINTS \
--o $GRAPH_EDGE_LENGTH \
--p $ADD_LEAVES_EDGE_LENGTH
\ No newline at end of file