diff --git a/config/config.yaml b/config/config.yaml index 4166035e6aaac50b6c1a87bfe807814ce12adc3f..fc9e44d3de6f0776dd5450698dc6d5626183b2a3 100644 --- a/config/config.yaml +++ b/config/config.yaml @@ -9,6 +9,7 @@ label_formats: label_for_instances_in_predicted: 'instance_nr' semantic_segmentation_params: sematic_segmentation_script: './run_bash_scripts/sem_seg_sean.sh' + checkpoint_model_path: './fsct/model/model.pth' tile_size: 10 # tile size in meters min_density: 100 # minimum density of points in a tile(used for removing small tiles) remove_small_tiles: 1 # 1: remove small tiles, 0: not remove small tiles diff --git a/run.py b/run.py index 4d94222297bf3fd8fcab5f00d0f82a88edfda72c..a008ea3a94ff345e12e3dff0faf1a96aa400f541 100644 --- a/run.py +++ b/run.py @@ -35,6 +35,7 @@ def main(path_to_config_file): sem_seg_args.extend([ "-d", str(config["general"]["input_folder"]), + "-c", str(config["semantic_segmentation_params"]["checkpoint_model_path"]), "-t", str(config["semantic_segmentation_params"]["tile_size"]), "-m", str(config["semantic_segmentation_params"]["min_density"]), "-z", str(config["semantic_segmentation_params"]["remove_small_tiles"]) diff --git a/run_bash_scripts/sem_seg_sean.sh b/run_bash_scripts/sem_seg_sean.sh index 67295d46d98cc49dfa6fc444bc2c0cc058e381e4..51c33952731db9f622605b4d74c91d0cd80e5f6d 100755 --- a/run_bash_scripts/sem_seg_sean.sh +++ b/run_bash_scripts/sem_seg_sean.sh @@ -7,6 +7,7 @@ CONDA_ENV="pdal-env-1" # conda environment for running the pipeline # Parameters for the semetnic segmentation data_folder="" # path to the folder containing the data +checkpoint_model_path="./fsct/model/model.pth" tile_size=10 # tile size in meters min_density=75 # minimum density of points in a tile(used for removing small tiles) remove_small_tiles=0 # 1: remove small tiles, 0: not remove small tiles @@ -16,10 +17,12 @@ remove_small_tiles=0 # 1: remove small tiles, 0: not remove small tiles # extract tiling parameters as command line arguments with the same default values # add remove_small_tiles parameter -while getopts "d:t:m:z:" opt; do +while getopts "d:c:t:m:z:" opt; do case $opt in d) data_folder="$OPTARG" ;; + c) checkpoint_model_path="$OPTARG" + ;; t) tile_size="$OPTARG" ;; m) min_density="$OPTARG" @@ -140,7 +143,7 @@ for d in $data_folder/segmented_point_clouds/tiled/*/; do for f in $d/*.ply; do echo "Processing $f file..." python sean_sem_seg/run_single_file.py \ - --model /home/nibio/mutable-outside-world/code/gitlab_fsct/instance_segmentation_classic/fsct/model/model.pth \ + --model $checkpoint_model_path \ --point-cloud $f \ --batch_size 10 \ --odir $d \