diff --git a/nibio_preprocessing/density_filtering.py b/nibio_preprocessing/density_filtering.py
new file mode 100644
index 0000000000000000000000000000000000000000..f2950ebfbd91ac73a793d291a6f8ba905f796090
--- /dev/null
+++ b/nibio_preprocessing/density_filtering.py
@@ -0,0 +1,109 @@
+import geopandas as gpd
+import pdal
+import json
+import subprocess
+import glob
+import os
+from pathlib import Path
+
+# filter the density of the point cloud using pdal
+# it does it only for a single file
+
+class DensityFiltering:
+    def __init__(self, path_data, min_density, count_threshold, buffer_size, verbose=False):
+        self.path_data = Path(path_data)
+        # remove the extension
+        self.path_data_out = os.path.join(os.path.dirname(path_data), os.path.basename(path_data).split('.')[0])
+        self.min_density = min_density
+        self.count_threshold = count_threshold
+        self.buffer_size = buffer_size
+        self.verbose = verbose
+        # get a file with suffix .shp in self.mls_boundary_path
+        self.mls_boundary_path = None
+
+    def compute_density(self):
+        # check if the file exists
+        if not os.path.exists(self.path_data):
+            raise Exception(f"File {self.path_data} does not exist.")
+        cmd_density = f"pdal density {self.path_data} {self.path_data_out} --threshold {self.min_density}"
+        subprocess.call(cmd_density, shell=True)
+        self.mls_boundary_path = glob.glob(os.path.join(self.path_data_out, '*.shp'))[0]
+
+    def filter_and_dissolve_boundary_shapefile(self):
+        # check if the file exists
+        if not os.path.exists(self.mls_boundary_path):
+            raise Exception(f"File {self.mls_boundary_path} does not exist.")
+
+        gdf = gpd.read_file(self.mls_boundary_path)
+        filtered = gdf[gdf['COUNT'] > self.count_threshold]
+        # check if the filtered is empty
+        if filtered.empty:
+            print("The filtered shapefile is empty. No clipping will be done.")
+            return None
+        else:
+            dissolved = filtered.buffer(self.buffer_size).unary_union
+            dissolved_gdf = gpd.GeoDataFrame(geometry=[dissolved])
+            dissolved_gdf['CLASS'] = 30
+            dissolved_gdf.to_file(self.mls_boundary_path)
+            return dissolved_gdf
+
+    def clip_point_cloud(self):
+        mls_path_clipped = os.path.join(self.path_data_out, "_clipped.las")
+
+        pipeline_clipping_MLS = [
+            str(self.path_data),
+            {
+                "type": "filters.overlay",
+                "dimension": "Classification",
+                "datasource": self.mls_boundary_path,
+                "column": "CLASS"
+            },
+            {
+                "type": "filters.range",
+                "limits": "Classification[30:30]"
+            },
+            {
+                "filename": mls_path_clipped
+            }
+        ]
+
+        pipeline = pdal.Pipeline(json.dumps(pipeline_clipping_MLS))
+        pipeline.execute()
+
+    def process(self):
+        self.compute_density()
+        # check density if self.mls_boundary_path is not empty 
+        if self.mls_boundary_path is None:
+            raise Exception("self.mls_boundary_path is None. Check if self.compute_density() was called.")
+        result = self.filter_and_dissolve_boundary_shapefile()
+        if result is not None:
+            self.clip_point_cloud()
+            if self.verbose:
+                print(f"File {self.path_data} has been processed.")
+                print(f"Clipped file is in {os.path.join(self.path_data_out, '_clipped.las')}")
+            return os.path.join(self.path_data_out, '_clipped.las')
+        else:
+            if self.verbose:
+                print(f"File {self.path_data} has not been processed.")
+                print(f"No clipping was done because the filtered shapefile is empty.This means that the number of points in the file is less than {self.count_threshold}.")
+            return None
+     
+if __name__ == '__main__':
+    # read the parameters from the command line
+    import argparse
+    parser = argparse.ArgumentParser()
+    parser.add_argument('--path_data', type=str, required=True)
+    parser.add_argument('--min_density', type=int, required=True)
+    parser.add_argument('--count_threshold', type=int, required=True)
+    parser.add_argument('--buffer_size', type=float, required=True)
+    parser.add_argument('--verbose', action='store_true')
+    args = parser.parse_args()
+
+    processor = DensityFiltering(args.path_data, args.min_density, args.count_threshold, args.buffer_size, args.verbose)
+    processor.process()
+
+    # a simple way to test the code
+    # python density_filtering.py 
+    # --path_data /home/nibio/mutable-outside-world/code/gitlab_fsct/instance_segmentation_classic/new_density_data/first.las 
+    # --min_density 1 
+    # --count_threshold 15000 --buffer_size 0.01
\ No newline at end of file
diff --git a/nibio_preprocessing/density_filtering_in_folders.py b/nibio_preprocessing/density_filtering_in_folders.py
new file mode 100644
index 0000000000000000000000000000000000000000..a4506cf7bbc241234b2a750c69ab4f1c3d11866d
--- /dev/null
+++ b/nibio_preprocessing/density_filtering_in_folders.py
@@ -0,0 +1,56 @@
+import glob
+import os
+import argparse
+import shutil
+from nibio_preprocessing.density_filtering import DensityFiltering
+
+class DensityFilteringInFolders:
+    def __init__(self, folder, min_density, count_threshold, buffer_size, verbose=False):
+        self.input_folder = folder
+        self.min_density = min_density
+        self.count_threshold = count_threshold
+        self.buffer_size = buffer_size
+        self.verbose = verbose
+
+    def filter_density_in_folder(self):
+        # get all the files in the folder
+        files = glob.glob(os.path.join(self.input_folder, '*.las'))
+        for file in files:
+            print(f"Processing file {file}")
+            density_filtering = DensityFiltering(file, self.min_density, self.count_threshold, self.buffer_size, self.verbose)
+            result = density_filtering.process()
+          
+            # copy files from os.path.join(self.path_data_out, "_clipped.las") to the input folder and rename them as the original file
+            if result is not None:
+                shutil.move(
+                    os.path.join(self.input_folder, os.path.basename(density_filtering.path_data_out), "_clipped.las"), 
+                    os.path.join(self.input_folder, file))
+        
+            
+if __name__ == "__main__":
+    # parse the arguments
+    parser = argparse.ArgumentParser(description='Filter the density of the point cloud in a folder.')
+    parser.add_argument('--input_folder', type=str, required=True, help='The folder containing the point clouds to be filtered.')
+    parser.add_argument('--min_density', type=int, required=True, help='The minimum density of the point cloud.')
+    parser.add_argument('--count_threshold', type=int, required=True, help='The minimum number of points in the filtered point cloud.')
+    parser.add_argument('--buffer_size', type=float, required=True, help='The buffer size for the boundary shapefile.')
+    parser.add_argument('--verbose', action='store_true')
+    args = parser.parse_args()
+
+    # run the density filtering
+    density_filtering_in_folders = DensityFilteringInFolders(
+        args.input_folder, 
+        args.min_density, 
+        args.count_threshold, 
+        args.buffer_size, 
+        args.verbose
+        )
+    
+    density_filtering_in_folders.filter_density_in_folder()
+
+    # a simple way to test the code
+    # python density_filtering_in_folders.py --input_folder /home/nibio/mutable-outside-world/code/gitlab_fsct/instance_segmentation_classic/check_maciek/ --min_density 1 --count_threshold 15000 --buffer_size 0.01 --verbose 
+        
+
+
+