Skip to content
Snippets Groups Projects
Commit 49379a4b authored by Maciej Wielgosz's avatar Maciej Wielgosz
Browse files

update with mapper from las to text

parent 3b922e5a
No related branches found
No related tags found
No related merge requests found
...@@ -23,3 +23,9 @@ data/ ...@@ -23,3 +23,9 @@ data/
*.pth *.pth
# add wandb files # add wandb files
wandb/ wandb/
*.tar.gz
lightning_logs
cifar-10-batches-py
*.ckpt
*.gz
MNIST
\ No newline at end of file
...@@ -62,7 +62,13 @@ class ModelNetDataLoader(Dataset): ...@@ -62,7 +62,13 @@ class ModelNetDataLoader(Dataset):
class PartNormalDataset(Dataset): class PartNormalDataset(Dataset):
def __init__(self, root='./data/shapenetcore_partanno_segmentation_benchmark_v0_normal', npoints=2500, split='train', class_choice=None, normal_channel=False): def __init__(self,
root='./data/shapenetcore_partanno_segmentation_benchmark_v0_normal',
npoints=2500,
split='train',
class_choice=None,
normal_channel=False
):
self.npoints = npoints self.npoints = npoints
self.root = root self.root = root
self.catfile = os.path.join(self.root, 'synsetoffset2category.txt') self.catfile = os.path.join(self.root, 'synsetoffset2category.txt')
......
import glob
import os
import laspy
import numpy as np
import pandas as pd
from joblib import Parallel, delayed
class Las2TextMapper:
""" Mapper class for las2text.py """
def __init__(self, data_dir, save_dir, verbose=False):
self.data_dir = data_dir
self.save_dir = save_dir
# check if the save_dir exists
if not os.path.exists(self.save_dir):
os.makedirs(self.save_dir)
self.verbose = verbose
def read_single_las(self, filepath):
"""read_single_las.
Args:
filepath: path to the main las file
Returns:
points: numpy array of points
"""
las = laspy.read(filepath)
# get x, y, z
points = np.vstack((las.x, las.y, las.z)).transpose()
# get intensity
# points = np.hstack((points, las.intensity[..., None]))
# get rgb
points = np.hstack((points, las.red[..., None]))
points = np.hstack((points, las.green[..., None]))
points = np.hstack((points, las.blue[..., None]))
# get label
points = np.hstack((points, las.label[..., None]))
# get treeID
points = np.hstack((points, las.treeID[..., None]))
# put all together to pandas dataframe
points = pd.DataFrame(points, columns=['x', 'y', 'z', 'red', 'green', 'blue', 'label', 'treeID'])
return points
def process_single_file(self, filepath):
"""process_single_file.
Args:
filepath: path to the main las file
"""
# read the las file
points = self.read_single_las(filepath)
# save the points as text file in self.save_dir
filepath = filepath.split("/")[-1].split(".")[0]
filepath = self.save_dir + "/" + filepath
# save the points
points.to_csv(filepath + ".txt", sep=' ', index=False, header=False)
def process_folder(self):
"""process_folder.
Args:
mode: train, test or validation
"""
# read all las files in the folder data_dir using glob
list_of_files = glob.glob(self.data_dir + "/*.las", recursive=False)
Parallel(n_jobs=8)(delayed(self.process_single_file)(filepath) for filepath in list_of_files)
if self.verbose:
print("Done processing the folder")
if __name__ == "__main__":
# use argparse to get the data_dir and save_dir
import argparse
parser = argparse.ArgumentParser()
parser.add_argument('--data_dir', type=str, default='data/stpls3d/val', help='path to the data directory')
parser.add_argument('--save_dir', type=str, default='data/stpls3d/val', help='path to the save directory')
# get verbose
parser.add_argument('--verbose', action='store_true', help='verbose')
args = parser.parse_args()
# create the mapper
mapper = Las2TextMapper(args.data_dir, args.save_dir, args.verbose)
# process the folder
mapper.process_folder()
0% Loading or .
You are about to add 0 people to the discussion. Proceed with caution.
Please register or to comment