2020-11-04 13:31:25 +01:00
|
|
|
import copy
|
2019-02-26 03:27:02 +01:00
|
|
|
import datetime
|
2019-12-31 21:59:22 +01:00
|
|
|
import itertools
|
2020-11-04 13:31:25 +01:00
|
|
|
import multiprocessing as mp
|
2020-07-26 14:22:45 +02:00
|
|
|
import random
|
|
|
|
import string
|
2020-11-04 13:31:25 +01:00
|
|
|
import threading
|
|
|
|
import time
|
2020-01-08 03:44:00 +01:00
|
|
|
from collections import defaultdict
|
2020-11-04 13:31:25 +01:00
|
|
|
|
|
|
|
import cv2
|
|
|
|
import numpy as np
|
2019-12-31 21:59:22 +01:00
|
|
|
from scipy.spatial import distance as dist
|
2020-11-04 13:31:25 +01:00
|
|
|
|
2020-12-19 06:00:13 +01:00
|
|
|
from frigate.config import DetectConfig
|
2021-10-30 21:01:31 +02:00
|
|
|
from frigate.util import intersection_over_union
|
2020-11-04 13:31:25 +01:00
|
|
|
|
2019-02-26 03:27:02 +01:00
|
|
|
|
2021-02-17 14:23:32 +01:00
|
|
|
class ObjectTracker:
|
2020-12-19 06:00:13 +01:00
|
|
|
def __init__(self, config: DetectConfig):
|
2019-12-31 21:59:22 +01:00
|
|
|
self.tracked_objects = {}
|
2020-02-16 04:07:54 +01:00
|
|
|
self.disappeared = {}
|
2020-12-19 06:00:13 +01:00
|
|
|
self.max_disappeared = config.max_disappeared
|
2019-12-31 21:59:22 +01:00
|
|
|
|
|
|
|
def register(self, index, obj):
|
2021-02-17 14:23:32 +01:00
|
|
|
rand_id = "".join(random.choices(string.ascii_lowercase + string.digits, k=6))
|
2020-07-26 14:22:45 +02:00
|
|
|
id = f"{obj['frame_time']}-{rand_id}"
|
2021-02-17 14:23:32 +01:00
|
|
|
obj["id"] = id
|
|
|
|
obj["start_time"] = obj["frame_time"]
|
2021-10-30 21:01:31 +02:00
|
|
|
obj["motionless_count"] = 0
|
2019-12-31 21:59:22 +01:00
|
|
|
self.tracked_objects[id] = obj
|
2020-02-16 04:07:54 +01:00
|
|
|
self.disappeared[id] = 0
|
2019-12-31 21:59:22 +01:00
|
|
|
|
|
|
|
def deregister(self, id):
|
|
|
|
del self.tracked_objects[id]
|
2020-02-16 04:07:54 +01:00
|
|
|
del self.disappeared[id]
|
2021-02-17 14:23:32 +01:00
|
|
|
|
2019-12-31 21:59:22 +01:00
|
|
|
def update(self, id, new_obj):
|
2020-02-16 04:07:54 +01:00
|
|
|
self.disappeared[id] = 0
|
2021-10-30 21:01:31 +02:00
|
|
|
if (
|
|
|
|
intersection_over_union(self.tracked_objects[id]["box"], new_obj["box"])
|
|
|
|
> 0.9
|
|
|
|
):
|
|
|
|
self.tracked_objects[id]["motionless_count"] += 1
|
|
|
|
else:
|
|
|
|
self.tracked_objects[id]["motionless_count"] = 0
|
2020-01-08 03:43:25 +01:00
|
|
|
self.tracked_objects[id].update(new_obj)
|
2019-12-31 21:59:22 +01:00
|
|
|
|
2020-02-16 04:07:54 +01:00
|
|
|
def match_and_update(self, frame_time, new_objects):
|
2020-01-09 13:52:28 +01:00
|
|
|
# group by name
|
|
|
|
new_object_groups = defaultdict(lambda: [])
|
2019-12-31 21:59:22 +01:00
|
|
|
for obj in new_objects:
|
2021-02-17 14:23:32 +01:00
|
|
|
new_object_groups[obj[0]].append(
|
|
|
|
{
|
|
|
|
"label": obj[0],
|
|
|
|
"score": obj[1],
|
|
|
|
"box": obj[2],
|
|
|
|
"area": obj[3],
|
|
|
|
"region": obj[4],
|
|
|
|
"frame_time": frame_time,
|
|
|
|
}
|
|
|
|
)
|
|
|
|
|
2020-02-23 14:55:51 +01:00
|
|
|
# update any tracked objects with labels that are not
|
|
|
|
# seen in the current objects and deregister if needed
|
2020-02-23 18:18:00 +01:00
|
|
|
for obj in list(self.tracked_objects.values()):
|
2021-02-17 14:23:32 +01:00
|
|
|
if not obj["label"] in new_object_groups:
|
|
|
|
if self.disappeared[obj["id"]] >= self.max_disappeared:
|
|
|
|
self.deregister(obj["id"])
|
2020-02-23 14:55:51 +01:00
|
|
|
else:
|
2021-02-17 14:23:32 +01:00
|
|
|
self.disappeared[obj["id"]] += 1
|
|
|
|
|
2020-02-23 14:55:51 +01:00
|
|
|
if len(new_objects) == 0:
|
|
|
|
return
|
2021-02-17 14:23:32 +01:00
|
|
|
|
2020-01-09 13:52:28 +01:00
|
|
|
# track objects for each label type
|
|
|
|
for label, group in new_object_groups.items():
|
2021-02-17 14:23:32 +01:00
|
|
|
current_objects = [
|
|
|
|
o for o in self.tracked_objects.values() if o["label"] == label
|
|
|
|
]
|
|
|
|
current_ids = [o["id"] for o in current_objects]
|
|
|
|
current_centroids = np.array([o["centroid"] for o in current_objects])
|
2020-01-09 13:52:28 +01:00
|
|
|
|
2020-01-11 20:22:56 +01:00
|
|
|
# compute centroids of new objects
|
2020-01-09 13:52:28 +01:00
|
|
|
for obj in group:
|
2021-02-17 14:23:32 +01:00
|
|
|
centroid_x = int((obj["box"][0] + obj["box"][2]) / 2.0)
|
|
|
|
centroid_y = int((obj["box"][1] + obj["box"][3]) / 2.0)
|
|
|
|
obj["centroid"] = (centroid_x, centroid_y)
|
2020-01-09 13:52:28 +01:00
|
|
|
|
|
|
|
if len(current_objects) == 0:
|
|
|
|
for index, obj in enumerate(group):
|
|
|
|
self.register(index, obj)
|
2021-05-22 04:11:36 +02:00
|
|
|
continue
|
2021-02-17 14:23:32 +01:00
|
|
|
|
|
|
|
new_centroids = np.array([o["centroid"] for o in group])
|
2020-01-09 13:52:28 +01:00
|
|
|
|
|
|
|
# compute the distance between each pair of tracked
|
|
|
|
# centroids and new centroids, respectively -- our
|
2021-05-22 04:11:36 +02:00
|
|
|
# goal will be to match each current centroid to a new
|
2020-01-09 13:52:28 +01:00
|
|
|
# object centroid
|
|
|
|
D = dist.cdist(current_centroids, new_centroids)
|
|
|
|
|
2021-05-22 04:11:36 +02:00
|
|
|
# in order to perform this matching we must (1) find the smallest
|
|
|
|
# value in each row (i.e. the distance from each current object to
|
|
|
|
# the closest new object) and then (2) sort the row indexes based
|
|
|
|
# on their minimum values so that the row with the smallest
|
|
|
|
# distance (the best match) is at the *front* of the index list
|
2020-01-09 13:52:28 +01:00
|
|
|
rows = D.min(axis=1).argsort()
|
|
|
|
|
2021-05-22 04:11:36 +02:00
|
|
|
# next, we determine which new object each existing object matched
|
|
|
|
# against, and apply the same sorting as was applied previously
|
2020-01-09 13:52:28 +01:00
|
|
|
cols = D.argmin(axis=1)[rows]
|
|
|
|
|
2021-05-22 04:11:36 +02:00
|
|
|
# many current objects may register with each new object, so only
|
|
|
|
# match the closest ones. unique returns the indices of the first
|
|
|
|
# occurrences of each value, and because the rows are sorted by
|
|
|
|
# distance, this will be index of the closest match
|
|
|
|
_, index = np.unique(cols, return_index=True)
|
|
|
|
rows = rows[index]
|
|
|
|
cols = cols[index]
|
|
|
|
|
|
|
|
# loop over the combination of the (row, column) index tuples
|
|
|
|
for row, col in zip(rows, cols):
|
|
|
|
# grab the object ID for the current row, set its new centroid,
|
|
|
|
# and reset the disappeared counter
|
2019-12-31 21:59:22 +01:00
|
|
|
objectID = current_ids[row]
|
2020-01-11 20:22:56 +01:00
|
|
|
self.update(objectID, group[col])
|
2020-01-09 13:52:28 +01:00
|
|
|
|
2021-05-22 04:11:36 +02:00
|
|
|
# compute the row and column indices we have NOT yet examined
|
|
|
|
unusedRows = set(range(D.shape[0])).difference(rows)
|
|
|
|
unusedCols = set(range(D.shape[1])).difference(cols)
|
2020-01-09 13:52:28 +01:00
|
|
|
|
2020-02-16 04:07:54 +01:00
|
|
|
# in the event that the number of object centroids is
|
2021-02-17 14:23:32 +01:00
|
|
|
# equal or greater than the number of input centroids
|
|
|
|
# we need to check and see if some of these objects have
|
|
|
|
# potentially disappeared
|
2020-02-16 04:07:54 +01:00
|
|
|
if D.shape[0] >= D.shape[1]:
|
|
|
|
for row in unusedRows:
|
|
|
|
id = current_ids[row]
|
|
|
|
|
|
|
|
if self.disappeared[id] >= self.max_disappeared:
|
|
|
|
self.deregister(id)
|
|
|
|
else:
|
|
|
|
self.disappeared[id] += 1
|
2020-01-11 20:22:56 +01:00
|
|
|
# if the number of input centroids is greater
|
2020-01-09 13:52:28 +01:00
|
|
|
# than the number of existing object centroids we need to
|
|
|
|
# register each new input centroid as a trackable object
|
2020-02-16 04:07:54 +01:00
|
|
|
else:
|
|
|
|
for col in unusedCols:
|
|
|
|
self.register(col, group[col])
|