Skip to content

Commit

Permalink
Merge pull request #2 from philwilkes/voxel
Browse files Browse the repository at this point in the history
Voxel - minor conflicts resolved.
  • Loading branch information
mattbv authored Jan 12, 2018
2 parents 3fccc5a + b47b727 commit fdadf86
Show file tree
Hide file tree
Showing 2 changed files with 43 additions and 18 deletions.
55 changes: 38 additions & 17 deletions tlseparation/classification/path_detection.py
Original file line number Diff line number Diff line change
Expand Up @@ -25,15 +25,17 @@
__email__ = "[email protected]"
__status__ = "Development"


import datetime
import numpy as np
import datetime
import pandas as pd
import struct
from sklearn.neighbors import NearestNeighbors
from ..utility.shortpath import (array_to_graph, extract_path_info)


def detect_main_pathways(point_cloud, k_retrace, knn, nbrs_threshold,
verbose=False):

def detect_main_pathways(point_cloud, k_retrace, knn, nbrs_threshold, voxel=.1, verbose=False):


"""
Detects the main pathways of an unordered 3D point cloud. Set as true
Expand Down Expand Up @@ -80,19 +82,38 @@ def detect_main_pathways(point_cloud, k_retrace, knn, nbrs_threshold,
assert point_cloud.shape[1] == 3, "point_cloud must be a 3D point cloud.\
Make sure it has the shape n_points x 3 (x, y, z)."

# voxelise the data
point_cloud_v = pd.DataFrame(point_cloud, columns=['x', 'y', 'z'])
point_cloud_v.loc[:, 'xx'] = (point_cloud_v.x // voxel) * voxel
point_cloud_v.loc[:, 'yy'] = (point_cloud_v.y // voxel) * voxel
point_cloud_v.loc[:, 'zz'] = (point_cloud_v.z // voxel) * voxel

point_cloud_v.loc[:, 'xxb'] = point_cloud_v.xx.apply(lambda x: struct.pack('f', x ))
point_cloud_v.loc[:, 'yyb'] = point_cloud_v.yy.apply(lambda x: struct.pack('f', x ))
point_cloud_v.loc[:, 'zzb'] = point_cloud_v.zz.apply(lambda x: struct.pack('f', x ))
point_cloud_v.loc[:, 'I'] = point_cloud_v.xxb + point_cloud_v.yyb + point_cloud_v.zzb

point_cloud_w = point_cloud_v.groupby(['xx', 'yy', 'zz']).size().reset_index()
point_cloud_w.loc[:, 'xxb'] = point_cloud_w.xx.apply(lambda x: struct.pack('f', x ))
point_cloud_w.loc[:, 'yyb'] = point_cloud_w.yy.apply(lambda x: struct.pack('f', x ))
point_cloud_w.loc[:, 'zzb'] = point_cloud_w.zz.apply(lambda x: struct.pack('f', x ))
point_cloud_w.loc[:, 'I'] = point_cloud_w.xxb + point_cloud_w.yyb + point_cloud_w.zzb

# Getting root index (base_id) from point cloud.
base_id = np.argmin(point_cloud[:, 2])
base_id = point_cloud_w.zz.idxmin()

# Generating graph from point cloud and extracting shortest path
# information.

if verbose:
print(str(datetime.datetime.now()) + ' | >>> generating graph from \
point cloud and extracting shortest path information')
G = array_to_graph(point_cloud, base_id, 3, 100, 0.05, 0.02)
G = array_to_graph(point_cloud_w[['xx', 'yy', 'zz']], base_id, 3, 100, 0.05, 0.02)

nodes_ids, D, path_list = extract_path_info(G, base_id,
return_path=True)
# Obtaining nodes coordinates from shortest path information.
nodes = point_cloud[nodes_ids]
nodes = point_cloud_w.loc[nodes_ids]
# Converting list of shortest path distances to array.
D = np.asarray(D)

Expand All @@ -111,8 +132,8 @@ def detect_main_pathways(point_cloud, k_retrace, knn, nbrs_threshold,

# Generating array of all indices from 'arr' and all indices to process
# 'idx'.
idx_base = np.arange(point_cloud.shape[0], dtype=int)
idx = np.arange(point_cloud.shape[0], dtype=int)
idx_base = np.arange(point_cloud_w.shape[0], dtype=int)
idx = np.arange(point_cloud_w.shape[0], dtype=int)

# Initializing NearestNeighbors search and searching for all 'knn'
# neighboring points arround each point in 'arr'.
Expand All @@ -121,8 +142,8 @@ def detect_main_pathways(point_cloud, k_retrace, knn, nbrs_threshold,
NearestNeighbors search and searching for all knn neighboring points \
arround each point in arr')
nbrs = NearestNeighbors(n_neighbors=knn, metric='euclidean',
leaf_size=15, n_jobs=-1).fit(point_cloud)
distances, indices = nbrs.kneighbors(point_cloud)
leaf_size=15, n_jobs=-1).fit(point_cloud_w[['xx', 'yy', 'zz']])
distances, indices = nbrs.kneighbors(point_cloud_w[['xx', 'yy', 'zz']])
indices = indices.astype(int)

# Initializing variables for current ids being processed (current_idx)
Expand Down Expand Up @@ -251,14 +272,14 @@ def detect_main_pathways(point_cloud, k_retrace, knn, nbrs_threshold,
processed_idx = np.unique(processed_idx).astype(int)

# Generating list of remaining proints to process.
idx = idx_base[np.in1d(idx_base, processed_idx, invert=True)]

# Generating final path mask and setting processed indices as True.
path_mask = np.zeros(point_cloud.shape[0], dtype=bool)
path_mask = np.zeros(point_cloud_w.shape[0], dtype=bool)
path_mask[processed_idx] = True

# identifying points in stem voxels and attributing True
path_mask_all = np.zeros(point_cloud_v.shape[0], dtype=bool)
path_mask_all[point_cloud_v[point_cloud_v.I.isin(point_cloud_w.loc[path_mask].I)].index] = True

return path_mask

return path_mask_all

def get_base(point_cloud, base_height):

Expand Down
6 changes: 5 additions & 1 deletion tlseparation/scripts/automated_separation.py
Original file line number Diff line number Diff line change
Expand Up @@ -101,10 +101,14 @@ def large_tree_1(arr, class_file=[], cont_filt=True,
# Masking points most likely to be part of the trunk and larger branches.
if verbose:
print(str(datetime.datetime.now()) + ' | masking points most likely \
to be part of the trunk and larger branches')
to be part of the trunk and larger branches')
try:
trunk_mask = detect_main_pathways(arr, 80, 20, .15, voxel=.1, verbose=verbose)

try:
trunk_mask = detect_main_pathways(arr, 80, 100, nndist,
verbose=verbose)

trunk_ids = np.where(trunk_mask)[0]
not_trunk_ids = np.where(~trunk_mask)[0].astype(int)
except:
Expand Down

0 comments on commit fdadf86

Please sign in to comment.