diff --git a/nibio_preprocessing/density_filtering.py b/nibio_preprocessing/density_filtering.py
index d837bb3cd4954982f9da97a85c5af46572681117..0df4d244102b457661c3b3d46f5515cc6b1c3c6a 100644
--- a/nibio_preprocessing/density_filtering.py
+++ b/nibio_preprocessing/density_filtering.py
@@ -6,6 +6,16 @@ import glob
 import os
 from pathlib import Path
 
+
+"""
+The pdal density command calculates the point density for each point in the point cloud data. 
+Points in regions of the point cloud with density below the specified min_density threshold
+are flagged or marked. The script then generates a density map and stores it as a shapefile (.shp) on disk.
+
+The density map is later used to filter the point cloud, essentially
+removing regions with a density below the specified min_density threshold. 
+"""
+
 class DensityFiltering:
     def __init__(self, path_data, min_density=1, count_threshold=15000, buffer_size=0.01, verbose=False):
         self.path_data = Path(path_data)
diff --git a/nibio_preprocessing/shrink_point_clound.py b/nibio_preprocessing/shrink_point_clound.py
index 4dd77e61060f38779eca283c77ba7ea7d66a4523..83758ab9274a5bb789b4e73b7eef94d233339d75 100644
--- a/nibio_preprocessing/shrink_point_clound.py
+++ b/nibio_preprocessing/shrink_point_clound.py
@@ -6,6 +6,12 @@ import laspy # requires laspy==2.1.2 (check if doesn't work)
 import numpy as np
 from tqdm import tqdm
 
+"""
+ The script essentially removes points that are close to the boundaries of the point cloud
+   in each axis (x, y, and optionally z). Specifically, it filters points based on a threshold which is 
+   calculated from the minimum and maximum coordinates along each axis. 
+"""
+
 
 class ShrinkPointCloud():
     def __init__(self, input_folder, output_folder, treshold=0.9, verbose=False):
diff --git a/nibio_preprocessing/sparsify_las_based_sq_m.py b/nibio_preprocessing/sparsify_las_based_sq_m.py
new file mode 100644
index 0000000000000000000000000000000000000000..86fa4dc84f6f8440c28c4c02ad39d97c1776dc61
--- /dev/null
+++ b/nibio_preprocessing/sparsify_las_based_sq_m.py
@@ -0,0 +1,71 @@
+import argparse
+import os
+import numpy as np
+import random
+import laspy
+from scipy.spatial import ConvexHull
+import logging
+
+
+class SparsifyPointCloudToDensity:
+    def __init__(self, input_file, output_folder=None, target_density=10, verbose=False):
+        self.input_file = input_file
+        if output_folder is None:
+            self.output_folder = os.path.dirname(input_file)
+        self.target_density = target_density
+        self.verbose = verbose
+        if self.verbose:
+            logging.info(f"Initialized with input file: {self.input_file}, target density: {self.target_density}")
+
+    def calculate_density_convex_hull(self, las):
+        points_3D = np.vstack((las.x, las.y, las.z)).transpose()
+        points_2D = points_3D[:, :2]
+        hull = ConvexHull(points_2D)
+        area = hull.volume
+        point_count = len(points_2D)
+        density = point_count / area
+        return density
+
+    def sparsify(self, point_cloud):
+        density = self.calculate_density_convex_hull(point_cloud)
+        if self.verbose:
+            logging.info(f"Point cloud density: {density} points per square meter.")
+        x = point_cloud.x
+        keep_count = int(len(x) * (self.target_density / density))
+        sampled_indices = random.sample(range(len(x)), keep_count)
+        filtered_point_cloud = point_cloud.points[sampled_indices]
+        if self.verbose:
+            logging.info(f"Reduced point cloud size from {len(x)} to {len(filtered_point_cloud)} points.")
+            logging.info(f"Reduced point cloud by {(1 - len(filtered_point_cloud) / len(x)) * 100}%.")
+            density = self.calculate_density_convex_hull(filtered_point_cloud)
+            density = round(density, 2)
+            logging.info(f"Filtered point cloud density: {density} points per square meter.")
+
+        return filtered_point_cloud
+
+    def process(self):
+        if not os.path.exists(self.output_folder):
+            os.makedirs(self.output_folder)
+        inFile = laspy.read(self.input_file)
+        filtered_points = self.sparsify(inFile)
+        if self.verbose:
+            logging.info("Creating output laspy object")
+        outFile = laspy.create(point_format=inFile.point_format, file_version=inFile.header.version)
+        outFile.header = inFile.header
+        outFile.points = filtered_points
+        # save the point cloud to the output folder with the same name as the input file but suffixed with _sparse and desired density
+        output_file_path = os.path.join(self.output_folder, os.path.basename(self.input_file).replace(".las", f"_sparse_{self.target_density}.las"))
+        outFile.write(output_file_path)
+
+
+if __name__ == "__main__":
+    logging.basicConfig(level=logging.INFO)
+    parser = argparse.ArgumentParser()
+    parser.add_argument("-i", "--input_file", help="The .las file to sparsify.")
+    parser.add_argument("--output_folder", default=None, help="The folder where the sparse point cloud will be saved.")
+    parser.add_argument("-d", "--target_density", help="Target density of points per square meter.", default=10, type=int)
+    parser.add_argument("-v", "--verbose", help="Enable verbose logging", action="store_true")
+    args = parser.parse_args()
+
+    sparsifier = SparsifyPointCloudToDensity(args.input_file, args.output_folder, args.target_density, args.verbose)
+    sparsifier.process()
diff --git a/nibio_preprocessing/sparsify_las_file.py b/nibio_preprocessing/sparsify_las_file.py
new file mode 100644
index 0000000000000000000000000000000000000000..cc31b7dbe7734cd758ba9897ff59a42d8c52ca7c
--- /dev/null
+++ b/nibio_preprocessing/sparsify_las_file.py
@@ -0,0 +1,60 @@
+import argparse
+import os
+import random
+import laspy
+
+class SparsifyPointCloud:
+    def __init__(self, input_file, output_folder=None, keep_percentage=50.0):
+        self.input_file = input_file
+        if output_folder is None:
+            self.output_folder = os.path.dirname(input_file)
+        self.keep_percentage = keep_percentage
+
+    def sparsify(self, point_cloud):
+        original_count = len(point_cloud.points)
+        keep_count = int(original_count * (self.keep_percentage / 100.0))
+
+        # Randomly sample points
+        sampled_indices = random.sample(range(original_count), keep_count)
+        sparse_point_cloud = point_cloud.points[sampled_indices]
+
+        print(f"Reduced point cloud from {original_count} to {len(sparse_point_cloud)} points.")
+        print(f"Reduced point cloud by {(1 - len(sparse_point_cloud) / original_count) * 100}%.")
+
+        return sparse_point_cloud
+
+    def process(self):
+        # Create output folder if it doesn't exist
+        if not os.path.exists(self.output_folder):
+            os.makedirs(self.output_folder)
+
+        # Read the input .las file
+        inFile = laspy.read(self.input_file)
+
+        # Sparsify the point cloud
+        sparse_point_cloud = self.sparsify(inFile)
+
+        # Create an empty las file and copy the header from the original las file
+        outFile = laspy.create(point_format=inFile.header.point_format, file_version=inFile.header.version)
+        outFile.header = inFile.header
+
+        # Update point count in the header
+        outFile.header.point_count = len(sparse_point_cloud)
+
+        # Assign the points to the output file
+        outFile.points = sparse_point_cloud
+
+        # Write the sparse point cloud to a new .las file
+        output_file_path = os.path.join(self.output_folder, os.path.basename(self.input_file).replace(".las", "_sparse.las"))
+        outFile.write(output_file_path)
+
+if __name__ == "__main__":
+    parser = argparse.ArgumentParser()
+    parser.add_argument("-i", "--input_file", help="The .las file to sparsify.")
+    parser.add_argument("--output_folder", default=None, help="The folder where the sparse point cloud will be saved.")
+    parser.add_argument("-d", "--keep_percentage", help="The percentage of points to keep.", default=50.0, type=float)
+
+    args = parser.parse_args()
+
+    sparsifier = SparsifyPointCloud(args.input_file, args.output_folder, args.keep_percentage)
+    sparsifier.process()