diff --git a/nibio_postprocessing/las_to_pandas.py b/nibio_postprocessing/las_to_pandas.py new file mode 100644 index 0000000000000000000000000000000000000000..05ca58a38f87e7a518986e0183203269f7a9c63e --- /dev/null +++ b/nibio_postprocessing/las_to_pandas.py @@ -0,0 +1,33 @@ +import numpy as np +import pandas as pd +import laspy + +def las_to_pandas(las_file_path, csv_file_path): + """ + Reads a LAS file and converts it to a pandas dataframe, then saves it to a CSV file. + + Args: + las_file_path (str): The path to the LAS file to be read. + csv_file_path (str): The path to the CSV file to be saved. + + Returns: + None + """ + file_content = laspy.read(las_file_path) + + # Put x, y, z, label into a numpy array + basic_points = np.vstack((file_content.x, file_content.y, file_content.z)).T + + # Fetch any extra dimensions + gt_extra_dimensions = list(file_content.point_format.extra_dimension_names) + extra_points = np.vstack([getattr(file_content, dim) for dim in gt_extra_dimensions]).T + + # Combine basic and extra dimensions + all_points = np.hstack((basic_points, extra_points)) + + # Create dataframe + all_columns = ['x', 'y', 'z'] + gt_extra_dimensions + points_df = pd.DataFrame(all_points, columns=all_columns) + + # Save pandas dataframe to csv + points_df.to_csv(csv_file_path, index=False, header=True, sep=',') diff --git a/nibio_postprocessing/pandas_to_las.py b/nibio_postprocessing/pandas_to_las.py new file mode 100644 index 0000000000000000000000000000000000000000..01d6077d0ca476300c5cf40f712e84fc67da8e6d --- /dev/null +++ b/nibio_postprocessing/pandas_to_las.py @@ -0,0 +1,69 @@ +import laspy +import pandas as pd +import numpy as np + + +def pandas_to_las(csv, las_file_path, csv_file_provided=True, verbose=False): + """ + Convert a pandas DataFrame to a .las file. + + Parameters + ---------- + csv : pandas DataFrame + The DataFrame to be converted to .las file. + But if the csv_file_provided argument is true, + the csv argument is considered as the path to the .csv file. + las_file_path : str + The path to the .las file to be created. + csv_file_provided : str, optional + The path to the .csv file to be converted to .las file. + If None, the csv argument is used instead. + The default is None. + """ + # Check if the csv_file_provided argument is provided + + if csv_file_provided: + df = pd.read_csv(csv) + else: + df = csv + + # Check if the DataFrame has the required columns + required_columns = ['x', 'y', 'z'] + for col in required_columns: + if col not in df.columns: + raise ValueError(f'Column {col} not found in {csv}') + + # Create a new .las file + las_header = laspy.LasHeader() + + # read all the colum names from the csv file + csv_columns = list(df.columns) + + if verbose: + print('csv_columns: {}'.format(csv_columns)) + + # get extra dimensions from target las file + gt_extra_dimensions = list(set(csv_columns) - set(required_columns)) + + # add extra dimensions to new las file + for item in gt_extra_dimensions: + las_header.add_extra_dim(laspy.ExtraBytesParams(name=item, type=np.int32)) + + outfile = laspy.LasData(las_header) + + # Assign coordinates + for col in required_columns: + outfile[col] = df[col].values + + # Assign extra dimensions + for col in gt_extra_dimensions: + outfile[col] = df[col].values + + # Write the file + outfile.write(las_file_path) + +# Test the function +# CSV_FILE = '/home/nibio/mutable-outside-world/code/gitlab_fsct/instance_segmentation_classic/maciek/first_cc.csv' +# NEW_LAS_FILE = '/home/nibio/mutable-outside-world/code/gitlab_fsct/instance_segmentation_classic/maciek/new_first.las' + +# pandas_to_las(CSV_FILE, NEW_LAS_FILE) diff --git a/nibio_preprocessing/distance_filtering_dem_based.py b/nibio_preprocessing/distance_filtering_dem_based.py index 224300bc00699a782dc246079e0fb367b6fefc68..38f829688799800adab8e5b0c7cd1da18d7d89bc 100644 --- a/nibio_preprocessing/distance_filtering_dem_based.py +++ b/nibio_preprocessing/distance_filtering_dem_based.py @@ -4,23 +4,25 @@ import argparse import laspy import numpy as np import pandas as pd - from scipy.interpolate import griddata from scipy.spatial import KDTree +from nibio_postprocessing import pandas_to_las + class DistanceFilteringDemBased(object): # MLS data : 1 - ground, 2 - vegetation, 3 - CWD, 4 - trunk GROUND_CLASS = 1 TARGET_CLASS = 2 - def __init__(self, las_file, distance, verbose=False): + def __init__(self, distance, input_las_file_path, output_las_file_path, verbose=False): # compute distance filtering based on DEM # remove points which are smaller than distance from the DEM # low vegetation - 0.01 m - self.point_cloud_file = las_file self.distance = distance + self.point_cloud_file = input_las_file_path + self.output_las_file_path = output_las_file_path self.verbose = verbose def read_las_and_put_to_pandas(self, las_file): @@ -109,45 +111,56 @@ class DistanceFilteringDemBased(object): return points_df - def save_las_file(self): - # save las file - pass + def save_las_file(self, points_df): + # save pandas dataframe to csv + pandas_to_las.pandas_to_las(points_df, self.output_las_file_path, csv_file_provided=False, verbose=self.verbose) def run(self): + if self.verbose: + print('distance: {}'.format(self.distance)) + print('point_cloud_file: {}'.format(self.point_cloud_file)) + print('output_las_file_path: {}'.format(self.output_las_file_path)) points_df, ground_points, target_points = self.read_las_and_put_to_pandas(las_file=self.point_cloud_file) - dem = self.compute_dem_for_ground(ground_points=ground_points) - # save pandas dataframe to csv - dem.to_csv('maciek/dem_from_class.csv', index=False, header=True, sep=',') + dem = self.compute_dem_for_ground(ground_points=ground_points) target_points_with_distances = self.compute_distance_between_dem_and_target(dem=dem.values, target_points=target_points) - # save pandas dataframe to csv - target_points_with_distances.to_csv('maciek/target_points_with_distances.csv', index=False, header=True, sep=',') - filtered_points = self.filter_points(target_points_with_distances=target_points_with_distances) - # save pandas dataframe to csv - filtered_points.to_csv('maciek/filtered_points.csv', index=False, header=True, sep=',') - points_df = self.update_las_file(points_df=points_df, target_points_with_distances=filtered_points) - # save pandas dataframe to csv - points_df.to_csv('maciek/updated_points.csv', index=False, header=True, sep=',') + self.save_las_file(points_df=points_df) + + def __call__(self): + self.run() if __name__ == '__main__': # parse the arguments parser = argparse.ArgumentParser(description='Distance filtering based on DEM') + parser.add_argument('-d', '--distance', help='Distance in meters e.g. 0.5', default=0.5, required=False, type=float) parser.add_argument('-i', '--input', help='Input file', required=True) - parser.add_argument('-d', '--distance', help='Distance', default=0.01, required=False, type=float) - parser.add_argument('-v', '--verbose', help='Verbose', required=False) + parser.add_argument('-o', '--output', help='Output file', required=True) + parser.add_argument('-v', '--verbose', action='store_true', help="Print information about the process") + args = vars(parser.parse_args()) # get the arguments - LAS_FILE = args['input'] DISTANCE = args['distance'] + INPUT_LAS_FILE = args['input'] + OUTPUT_LAS_FILE = args['output'] VERBOSE = args['verbose'] - # run the script - DistanceFilteringDemBased(LAS_FILE, DISTANCE, VERBOSE).run() + # run the distance filtering + DistanceFilteringDemBased( + distance=DISTANCE, + input_las_file_path=INPUT_LAS_FILE, + output_las_file_path=OUTPUT_LAS_FILE, + verbose=VERBOSE + ) + () + + + +