diff --git a/nibio_preprocessing/merging_and_labeling.py b/nibio_preprocessing/merging_and_labeling.py
index 498e479e433e6613a5dc6fa0fca3241ee92321ae..f452ac3c187b84a924a645b8e8631f4ea4812039 100644
--- a/nibio_preprocessing/merging_and_labeling.py
+++ b/nibio_preprocessing/merging_and_labeling.py
@@ -55,7 +55,7 @@ def remove_leafoff_files(data_folder):
                     os.remove(os.path.join(root, file))
 
 
-def merge_ply_files(data_folder, output_file):
+def merge_ply_files(data_folder, output_file='output_instance_segmented.ply'):
     """
         data_folder: the folder where the ply files are stored
     """
@@ -71,13 +71,21 @@ def merge_ply_files(data_folder, output_file):
 
     data["pipeline"].append({"type": "filters.merge", "inputs": tags})
 
-    data["pipeline"].append({"type":"writers.ply", "filename":os.path.join(data_folder, output_file)})
+    if output_file == 'output_instance_segmented.ply':
+        # if the output file is the default one, then we want to save it in the data folder
+        data["pipeline"].append({"type":"writers.ply", "filename":os.path.join(data_folder, output_file)})
+    else:
+        # if the output file is not the default one, then we want to save it the target folder
+        data["pipeline"].append({"type":"writers.ply", "filename":output_file})
 
-
-    print("Merging was done for {} number of files".format(len(tags)))
+    # print where the file is saved
+    logging.info("The file is saved in: " + os.path.join(data_folder, output_file))
+    logging.info("Merging was done for {} number of files".format(len(tags)))
 
     pipeline = pdal.Pipeline(json.dumps(data))
     pipeline.execute()
+    
+    logging.info("Done")
 
 
 def main(data_folder, output_file="output_instance_segmented.ply"):
@@ -109,7 +117,16 @@ if __name__ == '__main__':
     parser = argparse.ArgumentParser(description='Label the instances in the ply files.')
     parser.add_argument('--data_folder', type=str, help='The folder where the ply files are stored')
     parser.add_argument('--output_file', help='The output file name.', default="output_instance_segmented.ply")
+    parser.add_argument('--only_merging', action='store_true', help='Only merge the ply files')
+
     args = parser.parse_args()
-    main(args.data_folder)
+
+    if args.only_merging:
+        print("Only merging the ply files")
+        merge_ply_files(args.data_folder, args.output_file)
+    else:
+        print("Labeling instances in ply files and merging them afterwards")
+        main(args.data_folder, args.output_file)
+
 
 
diff --git a/run_all.sh b/run_all.sh
index 815c468ed2a5c36405d71438d5328bb1c1c32d9d..a10d158c424849290dabfa656dc0519ca2bfa854 100755
--- a/run_all.sh
+++ b/run_all.sh
@@ -138,7 +138,9 @@ done
 
 # do merging of the instance segmented point clouds
 for instance_segmented_point_cloud in $data_folder/instance_segmented_point_clouds/*; do
-    python nibio_preprocessing/merging_and_labeling.py --data_folder $instance_segmented_point_cloud 
+    python nibio_preprocessing/merging_and_labeling.py \
+    --data_folder $instance_segmented_point_cloud \
+    --output_file $instance_segmented_point_cloud/output_instance_segmented.ply
 done
 
 # # create the results folder
diff --git a/run_all_fine_grained.sh b/run_all_fine_grained.sh
index a96e7f0044c65f1d9495fc624a662c67e57bd7e9..235d90091896b5ed5ce1f020236ce2bb8ddc162c 100755
--- a/run_all_fine_grained.sh
+++ b/run_all_fine_grained.sh
@@ -120,26 +120,126 @@ for d in $data_folder/segmented_point_clouds/tiled/*/; do
     done
 done 
 
+# remove all the files in the tiled subfolders except the *segmented.ply and tile_index.dat files
+find $data_folder/segmented_point_clouds/tiled/*/ -type f ! -name '*segmented.ply' ! -name 'tile_index.dat' -delete # delete all the files except the segmented.ply files
+# delete all the folders in the tiled subfolders
+find $data_folder/segmented_point_clouds/tiled/*/* -type d -exec rm -rf {} +
 
-# # # iterate over all files in the input folder and do sematic segmentation
-# echo  "Starting semantic segmentation"
-# for file in $data_folder/*.ply; do
-#     # python fsct/run.py --point-cloud $file --batch_size 5 --odir $data_folder --model ./fsct/model/model.pth
-#     python fsct/run.py --point-cloud $file --batch_size 5 --odir $data_folder --verbose
-# done
+# # merge the segmented point clouds
+echo "Merging the segmented point clouds"
+# iterate over all the directories in the tiled folder
+for d in $data_folder/segmented_point_clouds/tiled/*/; do
+    # get a base name of the directory
+    base_name=$(basename $d)
+    # create a name for the merged file
+    merged_file_name=$data_folder/segmented_point_clouds/$base_name.segmented.ply
+    python nibio_preprocessing/merging_and_labeling.py \
+    --data_folder $d \
+    --output_file $merged_file_name \
+    --only_merging
+done
+
+# rename all the segmented.ply files to .ply in the tiled subfolders
+for file in $data_folder/segmented_point_clouds/tiled/*/*; do 
+    # skip if the file is not a ply file
+    if [[ $file != *.ply ]]; then
+        continue
+    fi
+    mv -- "$file" "${file%.segmented.ply}.ply"
+done
 
-# # move the output of the first step to the input folder of the second step
-# mkdir -p $data_folder/segmented_point_clouds
+# rename all the folder in the tiled subfolders to .segmented suffix
+for d in $data_folder/segmented_point_clouds/tiled/*; do
+    echo "Renaming $d to ${d%.segmented}"
+    # mv "$d" "${d%.segmented}"
+    mv $d{,.segmented}
+done
 
-# # move all .segmented.ply files to the segmented_point_clouds folder if they are in the input folder
-# find $data_folder/ -type f -name '*.segmented.ply' -exec mv {} $data_folder/segmented_point_clouds/ \;
 
-# # do the tiling and tile index generation
-# echo "Tiling and tile index generation"
-# python nibio_preprocessing/tiling.py -i $data_folder/segmented_point_clouds/ -o $data_folder/segmented_point_clouds/tiled
+# create folder for the output of the second step
+
+mkdir -p $data_folder/instance_segmented_point_clouds
+
+# Do the instances and iterate over all the segmented point clouds
+for segmented_point_cloud in $data_folder/segmented_point_clouds/*.segmented.ply; do
+    # get the name of the segmented point cloud
+    segmented_point_cloud_name=$(basename $segmented_point_cloud)
+    # get the name of the segmented point cloud without the extension
+    segmented_point_cloud_name_no_ext="${segmented_point_cloud_name%.*}"
+    # create a directory for the instance segmented point clouds
+    mkdir -p $data_folder/instance_segmented_point_clouds/$segmented_point_cloud_name_no_ext
+    # iterate over all the tiles of the segmented point cloud
+    for tile in $data_folder/segmented_point_clouds/tiled/$segmented_point_cloud_name_no_ext/*.ply; do
+        # get the name of the tile
+        tile_name=$(basename $tile)
+        # get the name of the tile without the extension
+        tile_name_no_ext="${tile_name%.*}"
+        echo "Processing $tile"
+        # show the output folder
+        echo "Output folder: $data_folder/instance_segmented_point_clouds/$segmented_point_cloud_name_no_ext/$tile_name_no_ext"
+        python3 fsct/points2trees.py \
+        -t $tile \
+        --tindex $data_folder/segmented_point_clouds/tiled/$segmented_point_cloud_name_no_ext/tile_index.dat \
+        -o $data_folder/instance_segmented_point_clouds/$segmented_point_cloud_name_no_ext/$tile_name_no_ext \
+        --n-tiles $N_TILES \
+        --slice-thickness $SLICE_THICKNESS \
+        --find-stems-height $FIND_STEMS_HEIGHT \
+        --find-stems-thickness $FIND_STEMS_THICKNESS \
+        --pandarallel --verbose \
+        --add-leaves \
+        --add-leaves-voxel-length $ADD_LEAVES_VOXEL_LENGTH \
+        --graph-maximum-cumulative-gap $GRAPH_MAXIMUM_CUMULATIVE_GAP \
+        --save-diameter-class \
+        --ignore-missing-tiles \
+        --find-stems-min-points $FIND_STEMS_MIN_POINTS
+    done
+done
 
-# # create folder for the output of the second step
+# do merging of the instance segmented point clouds
+for instance_segmented_point_cloud in $data_folder/instance_segmented_point_clouds/*; do
+    python nibio_preprocessing/merging_and_labeling.py \
+    --data_folder $instance_segmented_point_cloud \
+    --output_file $instance_segmented_point_cloud/output_instance_segmented.ply
+done
 
-# mkdir -p $data_folder/instance_segmented_point_clouds
+# create the results folder
+mkdir -p $data_folder/results
+
+# # create the input data folder
+mkdir -p $data_folder/results/input_data
+
+# # move input data (ply and las) to the input data folder
+find $data_folder/ -maxdepth 1 -type f -name '*.ply' -exec mv {} $data_folder/results/input_data/ \;
+find $data_folder/ -maxdepth 1 -type f -name '*.las' -exec mv {} $data_folder/results/input_data/ \;
+
+# # create the segmented point clouds folder
+mkdir -p $data_folder/results/segmented_point_clouds
+
+# move segmented point clouds to the segmented point clouds folder
+find $data_folder/segmented_point_clouds/ -maxdepth 1 -type f -name '*segmented.ply' -exec mv {} $data_folder/results/segmented_point_clouds/ \;
+
+# # create the instance segmented point clouds folder
+mkdir -p $data_folder/results/instance_segmented_point_clouds
+
+# iterate over all the instance segmented point clouds 
+# move instance segmented point clouds to the instance segmented point clouds folder and rename them
+for instance_segmented_point_cloud in $data_folder/instance_segmented_point_clouds/*; do
+    # get the name of the instance segmented point cloud
+    instance_segmented_point_cloud_name=$(basename $instance_segmented_point_cloud)
+    # get the name of the instance segmented point cloud without the extension
+    instance_segmented_point_cloud_name_no_ext="${instance_segmented_point_cloud_name%.*}"
+    # move the instance segmented point cloud to the instance segmented point clouds folder
+    find $instance_segmented_point_cloud/ -maxdepth 1 -type f -name '*.ply' -exec mv {} $data_folder/results/instance_segmented_point_clouds/$instance_segmented_point_cloud_name_no_ext.ply \;
+    # map the instance segmented point cloud to las file
+    pdal translate \
+    $data_folder/results/instance_segmented_point_clouds/$instance_segmented_point_cloud_name_no_ext.ply \
+    $data_folder/results/instance_segmented_point_clouds/$instance_segmented_point_cloud_name_no_ext.las \
+    --writers.las.dataformat_id=3 \
+    --writers.las.scale_x=0.01 \
+    --writers.las.scale_y=0.01 \
+    --writers.las.scale_z=0.01 \
+    --writers.las.extra_dims=all
+done
 
-# echo "done  with the first step"
\ No newline at end of file
+echo "Done"
+echo "Results are in $data_folder/results"
\ No newline at end of file