Replace object tracker with SORT

Source: https://github.com/abewley/sort
Commit: c1b808481200d7997413c6e831bc59c8b08b61b8

It has better accuracy when measured in my environment. It
uses a Kalman filter to correct "bad" detections and is able
to recover from lost tracks to some extent. Deep sort is
also an alterative but adds more complexity. This was a
key to be able to get a room tracker working good without
losing track of objects all the time.

Input configuration exposed to user config for users to
calibrate after their needs.

Removed main function together with some plot dependencies
from sort.py to not be needed to add unnecessary deps to
frigate.
This commit is contained in:
Alexander Ernfridsson 2020-08-04 21:03:31 +02:00 committed by Alexander
parent 3a1f1c946b
commit d746d84bd4
6 changed files with 332 additions and 140 deletions

View File

@ -25,6 +25,8 @@ RUN apt -qq update && apt -qq install --no-install-recommends -y \
imutils \
scipy \
psutil \
# SORT deps
filterpy scikit-image lap \
&& python3.7 -m pip install -U \
Flask \
paho-mqtt \

View File

@ -149,3 +149,19 @@ cameras:
min_area: 5000
max_area: 100000
threshold: 0.5
################
## Tracker configuration
##
## If you have problems with keeping track of objects try changing these constants.
## See SORT project for details:
## https://github.com/abewley/sort/blob/master/sort.py
##
## min_hits: Minimum number of hits to start tracking an object.
## max_age: Number of missed frames before discarding track.
## iou_threshold: Intersection over union threshold.
################
tracker:
min_hits: 1
max_age: 5
iou_threshold: 0.2

View File

@ -22,10 +22,6 @@ COLOR_MAP = {}
for key, val in LABELS.items():
COLOR_MAP[val] = tuple(int(round(255 * c)) for c in cmap(key)[:3])
def filter_false_positives(event):
if len(event['history']) < 2:
return True
return False
class TrackedObjectProcessor(threading.Thread):
def __init__(self, config, client, topic_prefix, tracked_objects_queue, event_queue):
@ -70,8 +66,6 @@ class TrackedObjectProcessor(threading.Thread):
updated_ids = list(set(current_ids).intersection(previous_ids))
for id in new_ids:
# only register the object here if we are sure it isnt a false positive
if not filter_false_positives(current_tracked_objects[id]):
tracked_objects[id] = current_tracked_objects[id]
# publish events to mqtt
self.client.publish(f"{self.topic_prefix}/{camera}/events/start", json.dumps(tracked_objects[id]), retain=False)

View File

@ -1,26 +1,15 @@
import time
import datetime
import threading
import cv2
import itertools
import copy
import numpy as np
import random
import string
import multiprocessing as mp
from collections import defaultdict
from scipy.spatial import distance as dist
from frigate.util import draw_box_with_label, calculate_region
from frigate.sort import Sort
class ObjectTracker():
def __init__(self, max_disappeared):
def __init__(self, min_hits, max_age, iou_threshold):
self.tracked_objects = {}
self.disappeared = {}
self.max_disappeared = max_disappeared
self.max_age = max_age
self.mot_tracker = Sort(min_hits=min_hits, max_age=self.max_age, iou_threshold=iou_threshold)
def register(self, index, obj):
rand_id = ''.join(random.choices(string.ascii_lowercase + string.digits, k=6))
id = f"{obj['frame_time']}-{rand_id}"
def register(self, id, obj):
obj['id'] = id
obj['start_time'] = obj['frame_time']
obj['top_score'] = obj['score']
@ -56,111 +45,47 @@ class ObjectTracker():
obj['history'] = [entry]
def match_and_update(self, frame_time, new_objects):
# group by name
new_object_groups = defaultdict(lambda: [])
for obj in new_objects:
new_object_groups[obj[0]].append({
'label': obj[0],
'score': obj[1],
'box': obj[2],
'area': obj[3],
'region': obj[4],
'frame_time': frame_time
})
a = []
for i, obj in enumerate(new_objects):
a.append(list(obj[2]) + [obj[1], i])
# update any tracked objects with labels that are not
# seen in the current objects and deregister if needed
new_tracked_ids = {}
if len(a) == 0:
self.mot_tracker.update()
else:
new_tracked = self.mot_tracker.update(np.array(a))
for t in new_tracked:
# Convert from numpy float array to list of ints.
int_arr = t.astype(np.int).tolist()
new_tracked_ids[str(int_arr[-2])] = int_arr
# Remove lost tracks
for obj in list(self.tracked_objects.values()):
if not obj['label'] in new_object_groups:
if self.disappeared[obj['id']] >= self.max_disappeared:
if obj['id'] not in new_tracked_ids:
# SORT will not return "missed" objects even though it still tracks them.
# Therefore, we need to count in the max_age here as well.
if self.disappeared[obj['id']] > self.max_age:
self.deregister(obj['id'])
else:
self.disappeared[obj['id']] += 1
if len(new_objects) == 0:
return
# track objects for each label type
for label, group in new_object_groups.items():
current_objects = [o for o in self.tracked_objects.values() if o['label'] == label]
current_ids = [o['id'] for o in current_objects]
current_centroids = np.array([o['centroid'] for o in current_objects])
# compute centroids of new objects
for obj in group:
# Add/update new trackings
for tracker_id, track in new_tracked_ids.items():
new_object_index = int(track[-1])
enhanced_box = track[0:4]
new_obj = new_objects[new_object_index]
obj = {
'label': new_obj[0],
'score': new_obj[1],
'box': enhanced_box,
'area': new_obj[3],
'region': new_obj[4],
'frame_time': frame_time
}
centroid_x = int((obj['box'][0]+obj['box'][2]) / 2.0)
centroid_y = int((obj['box'][1]+obj['box'][3]) / 2.0)
obj['centroid'] = (centroid_x, centroid_y)
if len(current_objects) == 0:
for index, obj in enumerate(group):
self.register(index, obj)
return
new_centroids = np.array([o['centroid'] for o in group])
# compute the distance between each pair of tracked
# centroids and new centroids, respectively -- our
# goal will be to match each new centroid to an existing
# object centroid
D = dist.cdist(current_centroids, new_centroids)
# in order to perform this matching we must (1) find the
# smallest value in each row and then (2) sort the row
# indexes based on their minimum values so that the row
# with the smallest value is at the *front* of the index
# list
rows = D.min(axis=1).argsort()
# next, we perform a similar process on the columns by
# finding the smallest value in each column and then
# sorting using the previously computed row index list
cols = D.argmin(axis=1)[rows]
# in order to determine if we need to update, register,
# or deregister an object we need to keep track of which
# of the rows and column indexes we have already examined
usedRows = set()
usedCols = set()
# loop over the combination of the (row, column) index
# tuples
for (row, col) in zip(rows, cols):
# if we have already examined either the row or
# column value before, ignore it
if row in usedRows or col in usedCols:
continue
# otherwise, grab the object ID for the current row,
# set its new centroid, and reset the disappeared
# counter
objectID = current_ids[row]
self.update(objectID, group[col])
# indicate that we have examined each of the row and
# column indexes, respectively
usedRows.add(row)
usedCols.add(col)
# compute the column index we have NOT yet examined
unusedRows = set(range(0, D.shape[0])).difference(usedRows)
unusedCols = set(range(0, D.shape[1])).difference(usedCols)
# in the event that the number of object centroids is
# equal or greater than the number of input centroids
# we need to check and see if some of these objects have
# potentially disappeared
if D.shape[0] >= D.shape[1]:
for row in unusedRows:
id = current_ids[row]
if self.disappeared[id] >= self.max_disappeared:
self.deregister(id)
if tracker_id not in self.tracked_objects:
self.register(tracker_id, obj)
else:
self.disappeared[id] += 1
# if the number of input centroids is greater
# than the number of existing object centroids we need to
# register each new input centroid as a trackable object
else:
for col in unusedCols:
self.register(col, group[col])
self.update(tracker_id, obj)

252
frigate/sort.py Normal file
View File

@ -0,0 +1,252 @@
"""
SORT: A Simple, Online and Realtime Tracker
Copyright (C) 2016-2020 Alex Bewley alex@bewley.ai
This program is free software: you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation, either version 3 of the License, or
(at your option) any later version.
This program is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
You should have received a copy of the GNU General Public License
along with this program. If not, see <http://www.gnu.org/licenses/>.
"""
from __future__ import print_function
import os
import numpy as np
from skimage import io
import glob
import time
import argparse
from filterpy.kalman import KalmanFilter
np.random.seed(0)
def linear_assignment(cost_matrix):
try:
import lap
_, x, y = lap.lapjv(cost_matrix, extend_cost=True)
return np.array([[y[i],i] for i in x if i >= 0]) #
except ImportError:
from scipy.optimize import linear_sum_assignment
x, y = linear_sum_assignment(cost_matrix)
return np.array(list(zip(x, y)))
def iou_batch(bb_test, bb_gt):
"""
From SORT: Computes IUO between two bboxes in the form [l,t,w,h]
"""
bb_gt = np.expand_dims(bb_gt, 0)
bb_test = np.expand_dims(bb_test, 1)
xx1 = np.maximum(bb_test[..., 0], bb_gt[..., 0])
yy1 = np.maximum(bb_test[..., 1], bb_gt[..., 1])
xx2 = np.minimum(bb_test[..., 2], bb_gt[..., 2])
yy2 = np.minimum(bb_test[..., 3], bb_gt[..., 3])
w = np.maximum(0., xx2 - xx1)
h = np.maximum(0., yy2 - yy1)
wh = w * h
o = wh / ((bb_test[..., 2] - bb_test[..., 0]) * (bb_test[..., 3] - bb_test[..., 1])
+ (bb_gt[..., 2] - bb_gt[..., 0]) * (bb_gt[..., 3] - bb_gt[..., 1]) - wh)
return(o)
def convert_bbox_to_z(bbox):
"""
Takes a bounding box in the form [x1,y1,x2,y2] and returns z in the form
[x,y,s,r] where x,y is the centre of the box and s is the scale/area and r is
the aspect ratio
"""
w = bbox[2] - bbox[0]
h = bbox[3] - bbox[1]
x = bbox[0] + w/2.
y = bbox[1] + h/2.
s = w * h #scale is just area
r = w / float(h)
return np.array([x, y, s, r]).reshape((4, 1))
def convert_x_to_bbox(x,score=None):
"""
Takes a bounding box in the centre form [x,y,s,r] and returns it in the form
[x1,y1,x2,y2] where x1,y1 is the top left and x2,y2 is the bottom right
"""
w = np.sqrt(x[2] * x[3])
h = x[2] / w
if(score==None):
return np.array([x[0]-w/2.,x[1]-h/2.,x[0]+w/2.,x[1]+h/2.]).reshape((1,4))
else:
return np.array([x[0]-w/2.,x[1]-h/2.,x[0]+w/2.,x[1]+h/2.,score]).reshape((1,5))
class KalmanBoxTracker(object):
"""
This class represents the internal state of individual tracked objects observed as bbox.
"""
count = 0
def __init__(self,bbox):
"""
Initialises a tracker using initial bounding box.
"""
#define constant velocity model
self.kf = KalmanFilter(dim_x=7, dim_z=4)
self.kf.F = np.array([[1,0,0,0,1,0,0],[0,1,0,0,0,1,0],[0,0,1,0,0,0,1],[0,0,0,1,0,0,0], [0,0,0,0,1,0,0],[0,0,0,0,0,1,0],[0,0,0,0,0,0,1]])
self.kf.H = np.array([[1,0,0,0,0,0,0],[0,1,0,0,0,0,0],[0,0,1,0,0,0,0],[0,0,0,1,0,0,0]])
self.kf.R[2:,2:] *= 10.
self.kf.P[4:,4:] *= 1000. #give high uncertainty to the unobservable initial velocities
self.kf.P *= 10.
self.kf.Q[-1,-1] *= 0.01
self.kf.Q[4:,4:] *= 0.01
self.kf.x[:4] = convert_bbox_to_z(bbox)
self.time_since_update = 0
self.id = KalmanBoxTracker.count
KalmanBoxTracker.count += 1
self.history = []
self.hits = 0
self.hit_streak = 0
self.age = 0
self.original_id = bbox[5]
def update(self,bbox):
"""
Updates the state vector with observed bbox.
"""
self.time_since_update = 0
self.history = []
self.hits += 1
self.hit_streak += 1
self.kf.update(convert_bbox_to_z(bbox))
self.original_id = bbox[5]
def predict(self):
"""
Advances the state vector and returns the predicted bounding box estimate.
"""
if((self.kf.x[6]+self.kf.x[2])<=0):
self.kf.x[6] *= 0.0
self.kf.predict()
self.age += 1
if(self.time_since_update>0):
self.hit_streak = 0
self.time_since_update += 1
self.history.append(convert_x_to_bbox(self.kf.x))
return self.history[-1]
def get_state(self):
"""
Returns the current bounding box estimate.
"""
return convert_x_to_bbox(self.kf.x)
def associate_detections_to_trackers(detections,trackers,iou_threshold = 0.3):
"""
Assigns detections to tracked object (both represented as bounding boxes)
Returns 3 lists of matches, unmatched_detections and unmatched_trackers
"""
if(len(trackers)==0):
return np.empty((0,2),dtype=int), np.arange(len(detections)), np.empty((0,5),dtype=int)
iou_matrix = iou_batch(detections, trackers)
if min(iou_matrix.shape) > 0:
a = (iou_matrix > iou_threshold).astype(np.int32)
if a.sum(1).max() == 1 and a.sum(0).max() == 1:
matched_indices = np.stack(np.where(a), axis=1)
else:
matched_indices = linear_assignment(-iou_matrix)
else:
matched_indices = np.empty(shape=(0,2))
unmatched_detections = []
for d, det in enumerate(detections):
if(d not in matched_indices[:,0]):
unmatched_detections.append(d)
unmatched_trackers = []
for t, trk in enumerate(trackers):
if(t not in matched_indices[:,1]):
unmatched_trackers.append(t)
#filter out matched with low IOU
matches = []
for m in matched_indices:
if(iou_matrix[m[0], m[1]]<iou_threshold):
unmatched_detections.append(m[0])
unmatched_trackers.append(m[1])
else:
matches.append(m.reshape(1,2))
if(len(matches)==0):
matches = np.empty((0,2),dtype=int)
else:
matches = np.concatenate(matches,axis=0)
return matches, np.array(unmatched_detections), np.array(unmatched_trackers)
class Sort(object):
def __init__(self, max_age=1, min_hits=3, iou_threshold=0.3):
"""
Sets key parameters for SORT
"""
self.max_age = max_age
self.min_hits = min_hits
self.iou_threshold = iou_threshold
self.trackers = []
self.frame_count = 0
def update(self, dets=np.empty((0, 6))):
"""
Params:
dets - a numpy array of detections in the format [[x1,y1,x2,y2,score],[x1,y1,x2,y2,score],...]
Requires: this method must be called once for each frame even with empty detections (use np.empty((0, 5)) for frames without detections).
Returns the a similar array, where the last column is the object ID.
NOTE: The number of objects returned may differ from the number of detections provided.
"""
self.frame_count += 1
# get predicted locations from existing trackers.
trks = np.zeros((len(self.trackers), 5))
to_del = []
ret = []
for t, trk in enumerate(trks):
pos = self.trackers[t].predict()[0]
trk[:] = [pos[0], pos[1], pos[2], pos[3], 0]
if np.any(np.isnan(pos)):
to_del.append(t)
trks = np.ma.compress_rows(np.ma.masked_invalid(trks))
for t in reversed(to_del):
self.trackers.pop(t)
matched, unmatched_dets, unmatched_trks = associate_detections_to_trackers(dets,trks, self.iou_threshold)
# update matched trackers with assigned detections
for m in matched:
self.trackers[m[1]].update(dets[m[0], :])
# create and initialise new trackers for unmatched detections
for i in unmatched_dets:
trk = KalmanBoxTracker(dets[i,:])
self.trackers.append(trk)
i = len(self.trackers)
for trk in reversed(self.trackers):
d = trk.get_state()[0]
if (trk.time_since_update < 1) and (trk.hit_streak >= self.min_hits or self.frame_count <= self.min_hits):
ret.append(np.concatenate((d,[trk.id+1], [trk.original_id])).reshape(1,-1)) # +1 as MOT benchmark requires positive
i -= 1
# remove dead tracklet
if(trk.time_since_update > self.max_age):
self.trackers.pop(i)
if(len(ret)>0):
return np.concatenate(ret)
return np.empty((0,5))

View File

@ -200,7 +200,10 @@ def track_camera(name, config, global_objects_config, frame_queue, frame_shape,
motion_detector = MotionDetector(frame_shape, mask, resize_factor=6)
object_detector = RemoteObjectDetector(name, '/labelmap.txt', detection_queue)
object_tracker = ObjectTracker(10)
camera_tracker_config = config.get('tracker', {"min_hits": 1, "max_age": 5, "iou_threshold": 0.2})
object_tracker = ObjectTracker(camera_tracker_config["min_hits"],
camera_tracker_config["max_age"],
camera_tracker_config["iou_threshold"])
plasma_client = PlasmaManager()
avg_wait = 0.0