Skip to content
Snippets Groups Projects
Commit df38aa13 authored by Maciej Wielgosz's avatar Maciej Wielgosz
Browse files

removing low vegatation along with mapping from las to csv and vice versa is implemented

parent 0f2311a6
No related branches found
No related tags found
No related merge requests found
import numpy as np
import pandas as pd
import laspy
def las_to_pandas(las_file_path, csv_file_path):
"""
Reads a LAS file and converts it to a pandas dataframe, then saves it to a CSV file.
Args:
las_file_path (str): The path to the LAS file to be read.
csv_file_path (str): The path to the CSV file to be saved.
Returns:
None
"""
file_content = laspy.read(las_file_path)
# Put x, y, z, label into a numpy array
basic_points = np.vstack((file_content.x, file_content.y, file_content.z)).T
# Fetch any extra dimensions
gt_extra_dimensions = list(file_content.point_format.extra_dimension_names)
extra_points = np.vstack([getattr(file_content, dim) for dim in gt_extra_dimensions]).T
# Combine basic and extra dimensions
all_points = np.hstack((basic_points, extra_points))
# Create dataframe
all_columns = ['x', 'y', 'z'] + gt_extra_dimensions
points_df = pd.DataFrame(all_points, columns=all_columns)
# Save pandas dataframe to csv
points_df.to_csv(csv_file_path, index=False, header=True, sep=',')
import laspy
import pandas as pd
import numpy as np
def pandas_to_las(csv, las_file_path, csv_file_provided=True, verbose=False):
"""
Convert a pandas DataFrame to a .las file.
Parameters
----------
csv : pandas DataFrame
The DataFrame to be converted to .las file.
But if the csv_file_provided argument is true,
the csv argument is considered as the path to the .csv file.
las_file_path : str
The path to the .las file to be created.
csv_file_provided : str, optional
The path to the .csv file to be converted to .las file.
If None, the csv argument is used instead.
The default is None.
"""
# Check if the csv_file_provided argument is provided
if csv_file_provided:
df = pd.read_csv(csv)
else:
df = csv
# Check if the DataFrame has the required columns
required_columns = ['x', 'y', 'z']
for col in required_columns:
if col not in df.columns:
raise ValueError(f'Column {col} not found in {csv}')
# Create a new .las file
las_header = laspy.LasHeader()
# read all the colum names from the csv file
csv_columns = list(df.columns)
if verbose:
print('csv_columns: {}'.format(csv_columns))
# get extra dimensions from target las file
gt_extra_dimensions = list(set(csv_columns) - set(required_columns))
# add extra dimensions to new las file
for item in gt_extra_dimensions:
las_header.add_extra_dim(laspy.ExtraBytesParams(name=item, type=np.int32))
outfile = laspy.LasData(las_header)
# Assign coordinates
for col in required_columns:
outfile[col] = df[col].values
# Assign extra dimensions
for col in gt_extra_dimensions:
outfile[col] = df[col].values
# Write the file
outfile.write(las_file_path)
# Test the function
# CSV_FILE = '/home/nibio/mutable-outside-world/code/gitlab_fsct/instance_segmentation_classic/maciek/first_cc.csv'
# NEW_LAS_FILE = '/home/nibio/mutable-outside-world/code/gitlab_fsct/instance_segmentation_classic/maciek/new_first.las'
# pandas_to_las(CSV_FILE, NEW_LAS_FILE)
...@@ -4,23 +4,25 @@ import argparse ...@@ -4,23 +4,25 @@ import argparse
import laspy import laspy
import numpy as np import numpy as np
import pandas as pd import pandas as pd
from scipy.interpolate import griddata from scipy.interpolate import griddata
from scipy.spatial import KDTree from scipy.spatial import KDTree
from nibio_postprocessing import pandas_to_las
class DistanceFilteringDemBased(object): class DistanceFilteringDemBased(object):
# MLS data : 1 - ground, 2 - vegetation, 3 - CWD, 4 - trunk # MLS data : 1 - ground, 2 - vegetation, 3 - CWD, 4 - trunk
GROUND_CLASS = 1 GROUND_CLASS = 1
TARGET_CLASS = 2 TARGET_CLASS = 2
def __init__(self, las_file, distance, verbose=False): def __init__(self, distance, input_las_file_path, output_las_file_path, verbose=False):
# compute distance filtering based on DEM # compute distance filtering based on DEM
# remove points which are smaller than distance from the DEM # remove points which are smaller than distance from the DEM
# low vegetation - 0.01 m # low vegetation - 0.01 m
self.point_cloud_file = las_file
self.distance = distance self.distance = distance
self.point_cloud_file = input_las_file_path
self.output_las_file_path = output_las_file_path
self.verbose = verbose self.verbose = verbose
def read_las_and_put_to_pandas(self, las_file): def read_las_and_put_to_pandas(self, las_file):
...@@ -109,45 +111,56 @@ class DistanceFilteringDemBased(object): ...@@ -109,45 +111,56 @@ class DistanceFilteringDemBased(object):
return points_df return points_df
def save_las_file(self): def save_las_file(self, points_df):
# save las file # save pandas dataframe to csv
pass pandas_to_las.pandas_to_las(points_df, self.output_las_file_path, csv_file_provided=False, verbose=self.verbose)
def run(self): def run(self):
if self.verbose:
print('distance: {}'.format(self.distance))
print('point_cloud_file: {}'.format(self.point_cloud_file))
print('output_las_file_path: {}'.format(self.output_las_file_path))
points_df, ground_points, target_points = self.read_las_and_put_to_pandas(las_file=self.point_cloud_file) points_df, ground_points, target_points = self.read_las_and_put_to_pandas(las_file=self.point_cloud_file)
dem = self.compute_dem_for_ground(ground_points=ground_points)
# save pandas dataframe to csv dem = self.compute_dem_for_ground(ground_points=ground_points)
dem.to_csv('maciek/dem_from_class.csv', index=False, header=True, sep=',')
target_points_with_distances = self.compute_distance_between_dem_and_target(dem=dem.values, target_points=target_points) target_points_with_distances = self.compute_distance_between_dem_and_target(dem=dem.values, target_points=target_points)
# save pandas dataframe to csv
target_points_with_distances.to_csv('maciek/target_points_with_distances.csv', index=False, header=True, sep=',')
filtered_points = self.filter_points(target_points_with_distances=target_points_with_distances) filtered_points = self.filter_points(target_points_with_distances=target_points_with_distances)
# save pandas dataframe to csv
filtered_points.to_csv('maciek/filtered_points.csv', index=False, header=True, sep=',')
points_df = self.update_las_file(points_df=points_df, target_points_with_distances=filtered_points) points_df = self.update_las_file(points_df=points_df, target_points_with_distances=filtered_points)
# save pandas dataframe to csv self.save_las_file(points_df=points_df)
points_df.to_csv('maciek/updated_points.csv', index=False, header=True, sep=',')
def __call__(self):
self.run()
if __name__ == '__main__': if __name__ == '__main__':
# parse the arguments # parse the arguments
parser = argparse.ArgumentParser(description='Distance filtering based on DEM') parser = argparse.ArgumentParser(description='Distance filtering based on DEM')
parser.add_argument('-d', '--distance', help='Distance in meters e.g. 0.5', default=0.5, required=False, type=float)
parser.add_argument('-i', '--input', help='Input file', required=True) parser.add_argument('-i', '--input', help='Input file', required=True)
parser.add_argument('-d', '--distance', help='Distance', default=0.01, required=False, type=float) parser.add_argument('-o', '--output', help='Output file', required=True)
parser.add_argument('-v', '--verbose', help='Verbose', required=False) parser.add_argument('-v', '--verbose', action='store_true', help="Print information about the process")
args = vars(parser.parse_args()) args = vars(parser.parse_args())
# get the arguments # get the arguments
LAS_FILE = args['input']
DISTANCE = args['distance'] DISTANCE = args['distance']
INPUT_LAS_FILE = args['input']
OUTPUT_LAS_FILE = args['output']
VERBOSE = args['verbose'] VERBOSE = args['verbose']
# run the script # run the distance filtering
DistanceFilteringDemBased(LAS_FILE, DISTANCE, VERBOSE).run() DistanceFilteringDemBased(
distance=DISTANCE,
input_las_file_path=INPUT_LAS_FILE,
output_las_file_path=OUTPUT_LAS_FILE,
verbose=VERBOSE
)
()
0% Loading or .
You are about to add 0 people to the discussion. Proceed with caution.
Please register or to comment