Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
55 changes: 48 additions & 7 deletions trackpy/linking.py
Original file line number Diff line number Diff line change
Expand Up @@ -27,6 +27,9 @@ def __init__(self, points):
self.points = copy(points)
self.rebuild()

def __len__(self):
return len(self.points)

def add_point(self, pt):
self.points.append(pt)
self._clean = False
Expand All @@ -49,8 +52,9 @@ def rebuild(self, coord_map=None):
coord_map = functools.partial(map, lambda x: x.pos)
coords = np.asarray(list(coord_map(self.points)))
if len(self.points) == 0:
raise ValueError('Frame (aka level) contains zero points')
self._kdtree = cKDTree(coords, 15)
self._kdtree = None
else:
self._kdtree = cKDTree(coords, 15)
# This could be tuned
self._clean = True

Expand Down Expand Up @@ -100,6 +104,7 @@ def __init__(self, dims, box_size):
self.cached_rrange = None
self.strides = np.cumprod(
np.concatenate(([1], self.hash_dims[1:])))[::-1]
self._len = 0

def get_region(self, point, rrange):
'''
Expand Down Expand Up @@ -171,6 +176,10 @@ def add_point(self, point):
raise Hash_table.Out_of_hash_excpt("cord out of range")
indx = int(sum(cord * self.strides))
self.hash_table[indx].append(point)
self._len += 1

def __len__(self):
return self._len


class TrackUnstored(object):
Expand Down Expand Up @@ -563,9 +572,7 @@ def link_df(features, search_range, memory=0,
if retain_index:
orig_index = features.index.copy() # Save it; restore it at the end.
features.reset_index(inplace=True, drop=True)
levels = (_build_level(frame, pos_columns, t_column,
diagnostics=diagnostics) for frame_no, frame
in features.groupby(t_column))
levels = _gen_levels_df(features, pos_columns, t_column, diagnostics)
labeled_levels = link_iter(
levels, search_range, memory=memory, predictor=predictor,
adaptive_stop=adaptive_stop, adaptive_step=adaptive_step,
Expand All @@ -580,6 +587,8 @@ def link_df(features, search_range, memory=0,
# Do the tracking, and update the DataFrame after each iteration.
features['particle'] = np.nan # placeholder
for level in labeled_levels:
if len(level) == 0:
continue
index = [x.id for x in level]
labels = pd.Series([x.track.id for x in level], index)
frame_no = next(iter(level)).t # uses an arbitary element from the set
Expand Down Expand Up @@ -773,6 +782,37 @@ def _build_level(frame, pos_columns, t_column, diagnostics=False):
frame[pos_columns].values, frame.index))


def _gen_levels_df(df, pos_columns, t_column, diagnostics=False):
"""Return a generator of PointND objects for a DataFrame of points.

The DataFrame is assumed to contain integer framenumbers. For a missing
frame number, an empty list is returned.

Parameters
----------
df : DataFrame
Unlinked points data for all frames.
pos_columns : list
Names of position columns in "frame"
t_column : string
Name of time column in "frame"
diagnostics : boolean, optional
Whether resulting point objects should collect diagnostic information.
"""
grouped = iter(df.groupby(t_column))
cur_frame, frame = next(grouped)
cur_frame += 1.5 # set counter to 1.5 for issues with e.g. 1.000001
yield _build_level(frame, pos_columns, t_column, diagnostics)

for frame_no, frame in grouped:
while cur_frame < frame_no:
cur_frame += 1
yield []

cur_frame += 1
yield _build_level(frame, pos_columns, t_column, diagnostics)


def _add_diagnostic_columns(features, level):
"""Copy the diagnostic information stored in each particle to the
corresponding columns in 'features'. Create columns as needed."""
Expand Down Expand Up @@ -1009,8 +1049,9 @@ def link(self, levels):
p.forward_cands = []

# Sort out what can go to what.
assign_candidates(cur_level, prev_hash, self.search_range,
self.neighbor_strategy)
if len(cur_level) > 0 and len(prev_hash) > 0:
Copy link
Member

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Does assign_candidates not deal with empty levels gracefully on it's own?

assign_candidates(cur_level, prev_hash, self.search_range,
self.neighbor_strategy)

# sort the candidate lists by distance
for p in cur_set:
Expand Down
13 changes: 8 additions & 5 deletions trackpy/tests/test_link.py
Original file line number Diff line number Diff line change
Expand Up @@ -209,19 +209,22 @@ def test_start_at_frame_other_than_zero(self):
assert_frame_equal(actual, expected)

def test_blank_frame_no_memory(self):
# One 1D stepper
N = 5
f = DataFrame({'x': np.arange(N), 'y': np.ones(N),
'frame': [0, 1, 2, 4, 5]})
expected = f.copy()
expected['particle'] = np.zeros(N)

# Using link_df, the particle will be given a new ID after the gap.
expected['particle'] = np.array([0, 0, 0, 1, 1], dtype=np.float64)
actual = self.link_df(f, 5)
assert_frame_equal(actual, expected)

# link_df_iter will (in this test suite) iterate over only the frames
# present in the dataframe, so the gap will be ignored.
expected['particle'] = 0.0
actual = self.link_df_iter(f, 5, hash_size=(10, 10))
assert_frame_equal(actual, expected)
# This doesn't error, but we might wish it would
# give the particle a new ID after the gap. It just
# ignores the missing frame.


def test_real_data_that_causes_duplicate_bug(self):
filename = 'reproduce_duplicate_track_assignment.df'
Expand Down