mirror of
https://github.com/blakeblackshear/frigate.git
synced 2024-11-21 19:07:46 +01:00
support multiple coral devices (fixes #100)
This commit is contained in:
parent
49fca1b839
commit
f946813ccb
53
benchmark.py
53
benchmark.py
@ -37,9 +37,7 @@ labels = load_labels('/labelmap.txt')
|
|||||||
# print(f"Processed for {duration:.2f} seconds.")
|
# print(f"Processed for {duration:.2f} seconds.")
|
||||||
# print(f"Average frame processing time: {mean(frame_times)*1000:.2f}ms")
|
# print(f"Average frame processing time: {mean(frame_times)*1000:.2f}ms")
|
||||||
|
|
||||||
######
|
|
||||||
# Separate process runner
|
|
||||||
######
|
|
||||||
def start(id, num_detections, detection_queue, event):
|
def start(id, num_detections, detection_queue, event):
|
||||||
object_detector = RemoteObjectDetector(str(id), '/labelmap.txt', detection_queue, event)
|
object_detector = RemoteObjectDetector(str(id), '/labelmap.txt', detection_queue, event)
|
||||||
start = datetime.datetime.now().timestamp()
|
start = datetime.datetime.now().timestamp()
|
||||||
@ -51,38 +49,45 @@ def start(id, num_detections, detection_queue, event):
|
|||||||
frame_times.append(datetime.datetime.now().timestamp()-start_frame)
|
frame_times.append(datetime.datetime.now().timestamp()-start_frame)
|
||||||
|
|
||||||
duration = datetime.datetime.now().timestamp()-start
|
duration = datetime.datetime.now().timestamp()-start
|
||||||
|
object_detector.cleanup()
|
||||||
print(f"{id} - Processed for {duration:.2f} seconds.")
|
print(f"{id} - Processed for {duration:.2f} seconds.")
|
||||||
|
print(f"{id} - FPS: {object_detector.fps.eps():.2f}")
|
||||||
print(f"{id} - Average frame processing time: {mean(frame_times)*1000:.2f}ms")
|
print(f"{id} - Average frame processing time: {mean(frame_times)*1000:.2f}ms")
|
||||||
|
|
||||||
event = mp.Event()
|
######
|
||||||
edgetpu_process = EdgeTPUProcess({'1': event})
|
# Separate process runner
|
||||||
|
######
|
||||||
|
# event = mp.Event()
|
||||||
|
# detection_queue = mp.Queue()
|
||||||
|
# edgetpu_process = EdgeTPUProcess(detection_queue, {'1': event}, 'usb:0')
|
||||||
|
|
||||||
start(1, 1000, edgetpu_process.detection_queue, event)
|
# start(1, 1000, edgetpu_process.detection_queue, event)
|
||||||
print(f"Average raw inference speed: {edgetpu_process.avg_inference_speed.value*1000:.2f}ms")
|
# print(f"Average raw inference speed: {edgetpu_process.avg_inference_speed.value*1000:.2f}ms")
|
||||||
|
|
||||||
####
|
####
|
||||||
# Multiple camera processes
|
# Multiple camera processes
|
||||||
####
|
####
|
||||||
# camera_processes = []
|
camera_processes = []
|
||||||
|
|
||||||
# pipes = {}
|
events = {}
|
||||||
# for x in range(0, 10):
|
for x in range(0, 10):
|
||||||
# pipes[x] = mp.Pipe(duplex=False)
|
events[str(x)] = mp.Event()
|
||||||
|
detection_queue = mp.Queue()
|
||||||
|
edgetpu_process_1 = EdgeTPUProcess(detection_queue, events, 'usb:0')
|
||||||
|
edgetpu_process_2 = EdgeTPUProcess(detection_queue, events, 'usb:1')
|
||||||
|
|
||||||
# edgetpu_process = EdgeTPUProcess({str(key): value[1] for (key, value) in pipes.items()})
|
for x in range(0, 10):
|
||||||
|
camera_process = mp.Process(target=start, args=(x, 300, detection_queue, events[str(x)]))
|
||||||
|
camera_process.daemon = True
|
||||||
|
camera_processes.append(camera_process)
|
||||||
|
|
||||||
# for x in range(0, 10):
|
start_time = datetime.datetime.now().timestamp()
|
||||||
# camera_process = mp.Process(target=start, args=(x, 100, edgetpu_process.detection_queue, pipes[x][0]))
|
|
||||||
# camera_process.daemon = True
|
|
||||||
# camera_processes.append(camera_process)
|
|
||||||
|
|
||||||
# start = datetime.datetime.now().timestamp()
|
for p in camera_processes:
|
||||||
|
p.start()
|
||||||
|
|
||||||
# for p in camera_processes:
|
for p in camera_processes:
|
||||||
# p.start()
|
p.join()
|
||||||
|
|
||||||
# for p in camera_processes:
|
duration = datetime.datetime.now().timestamp()-start_time
|
||||||
# p.join()
|
print(f"Total - Processed for {duration:.2f} seconds.")
|
||||||
|
|
||||||
# duration = datetime.datetime.now().timestamp()-start
|
|
||||||
# print(f"Total - Processed for {duration:.2f} seconds.")
|
|
@ -1,10 +1,13 @@
|
|||||||
web_port: 5000
|
web_port: 5000
|
||||||
|
|
||||||
################
|
################
|
||||||
## Tell frigate to look for a specific EdgeTPU device. Useful if you want to run multiple instances of frigate
|
## List of detectors.
|
||||||
## on the same machine with multiple EdgeTPUs. https://coral.ai/docs/edgetpu/multiple-edgetpu/#using-the-tensorflow-lite-python-api
|
## Currently supported types: cpu, edgetpu
|
||||||
|
## EdgeTPU requires device as defined here: https://coral.ai/docs/edgetpu/multiple-edgetpu/#using-the-tensorflow-lite-python-api
|
||||||
################
|
################
|
||||||
tensorflow_device: usb
|
detectors:
|
||||||
|
- type: edgetpu
|
||||||
|
device: usb
|
||||||
|
|
||||||
mqtt:
|
mqtt:
|
||||||
host: mqtt.server.com
|
host: mqtt.server.com
|
||||||
|
@ -61,15 +61,15 @@ FFMPEG_DEFAULT_CONFIG = {
|
|||||||
GLOBAL_OBJECT_CONFIG = CONFIG.get('objects', {})
|
GLOBAL_OBJECT_CONFIG = CONFIG.get('objects', {})
|
||||||
|
|
||||||
WEB_PORT = CONFIG.get('web_port', 5000)
|
WEB_PORT = CONFIG.get('web_port', 5000)
|
||||||
DEBUG = (CONFIG.get('debug', '0') == '1')
|
DETECTORS = CONFIG.get('detectors', [{'type': 'edgetpu', 'device': 'usb'}])
|
||||||
TENSORFLOW_DEVICE = CONFIG.get('tensorflow_device')
|
|
||||||
|
|
||||||
class CameraWatchdog(threading.Thread):
|
class CameraWatchdog(threading.Thread):
|
||||||
def __init__(self, camera_processes, config, tflite_process, tracked_objects_queue, stop_event):
|
def __init__(self, camera_processes, config, detectors, detection_queue, tracked_objects_queue, stop_event):
|
||||||
threading.Thread.__init__(self)
|
threading.Thread.__init__(self)
|
||||||
self.camera_processes = camera_processes
|
self.camera_processes = camera_processes
|
||||||
self.config = config
|
self.config = config
|
||||||
self.tflite_process = tflite_process
|
self.detectors = detectors
|
||||||
|
self.detection_queue = detection_queue
|
||||||
self.tracked_objects_queue = tracked_objects_queue
|
self.tracked_objects_queue = tracked_objects_queue
|
||||||
self.stop_event = stop_event
|
self.stop_event = stop_event
|
||||||
|
|
||||||
@ -85,15 +85,16 @@ class CameraWatchdog(threading.Thread):
|
|||||||
|
|
||||||
now = datetime.datetime.now().timestamp()
|
now = datetime.datetime.now().timestamp()
|
||||||
|
|
||||||
# check the detection process
|
# check the detection processes
|
||||||
detection_start = self.tflite_process.detection_start.value
|
for detector in self.detectors:
|
||||||
if (detection_start > 0.0 and
|
detection_start = detector.detection_start.value
|
||||||
now - detection_start > 10):
|
if (detection_start > 0.0 and
|
||||||
print("Detection appears to be stuck. Restarting detection process")
|
now - detection_start > 10):
|
||||||
self.tflite_process.start_or_restart()
|
print("Detection appears to be stuck. Restarting detection process")
|
||||||
elif not self.tflite_process.detect_process.is_alive():
|
detector.start_or_restart()
|
||||||
print("Detection appears to have stopped. Restarting detection process")
|
elif not detector.detect_process.is_alive():
|
||||||
self.tflite_process.start_or_restart()
|
print("Detection appears to have stopped. Restarting detection process")
|
||||||
|
detector.start_or_restart()
|
||||||
|
|
||||||
# check the camera processes
|
# check the camera processes
|
||||||
for name, camera_process in self.camera_processes.items():
|
for name, camera_process in self.camera_processes.items():
|
||||||
@ -104,9 +105,9 @@ class CameraWatchdog(threading.Thread):
|
|||||||
camera_process['detection_fps'].value = 0.0
|
camera_process['detection_fps'].value = 0.0
|
||||||
camera_process['read_start'].value = 0.0
|
camera_process['read_start'].value = 0.0
|
||||||
process = mp.Process(target=track_camera, args=(name, self.config[name], camera_process['frame_queue'],
|
process = mp.Process(target=track_camera, args=(name, self.config[name], camera_process['frame_queue'],
|
||||||
camera_process['frame_shape'], self.tflite_process.detection_queue, self.tracked_objects_queue,
|
camera_process['frame_shape'], self.detection_queue, self.tracked_objects_queue,
|
||||||
camera_process['process_fps'], camera_process['detection_fps'],
|
camera_process['process_fps'], camera_process['detection_fps'],
|
||||||
camera_process['read_start'], camera_process['detection_frame'], self.stop_event))
|
camera_process['read_start'], self.stop_event))
|
||||||
process.daemon = True
|
process.daemon = True
|
||||||
camera_process['process'] = process
|
camera_process['process'] = process
|
||||||
process.start()
|
process.start()
|
||||||
@ -117,7 +118,7 @@ class CameraWatchdog(threading.Thread):
|
|||||||
frame_size = frame_shape[0] * frame_shape[1] * frame_shape[2]
|
frame_size = frame_shape[0] * frame_shape[1] * frame_shape[2]
|
||||||
ffmpeg_process = start_or_restart_ffmpeg(camera_process['ffmpeg_cmd'], frame_size)
|
ffmpeg_process = start_or_restart_ffmpeg(camera_process['ffmpeg_cmd'], frame_size)
|
||||||
camera_capture = CameraCapture(name, ffmpeg_process, frame_shape, camera_process['frame_queue'],
|
camera_capture = CameraCapture(name, ffmpeg_process, frame_shape, camera_process['frame_queue'],
|
||||||
camera_process['take_frame'], camera_process['camera_fps'], camera_process['detection_frame'], self.stop_event)
|
camera_process['take_frame'], camera_process['camera_fps'], self.stop_event)
|
||||||
camera_capture.start()
|
camera_capture.start()
|
||||||
camera_process['ffmpeg_process'] = ffmpeg_process
|
camera_process['ffmpeg_process'] = ffmpeg_process
|
||||||
camera_process['capture_thread'] = camera_capture
|
camera_process['capture_thread'] = camera_capture
|
||||||
@ -177,9 +178,15 @@ def main():
|
|||||||
out_events = {}
|
out_events = {}
|
||||||
for name in CONFIG['cameras'].keys():
|
for name in CONFIG['cameras'].keys():
|
||||||
out_events[name] = mp.Event()
|
out_events[name] = mp.Event()
|
||||||
|
|
||||||
# Start the shared tflite process
|
detection_queue = mp.Queue()
|
||||||
tflite_process = EdgeTPUProcess(out_events=out_events, tf_device=TENSORFLOW_DEVICE)
|
|
||||||
|
detectors = []
|
||||||
|
for detector in DETECTORS:
|
||||||
|
if detector['type'] == 'cpu':
|
||||||
|
detectors.append(EdgeTPUProcess(detection_queue, out_events=out_events, tf_device='cpu'))
|
||||||
|
if detector['type'] == 'edgetpu':
|
||||||
|
detectors.append(EdgeTPUProcess(detection_queue, out_events=out_events, tf_device=detector['device']))
|
||||||
|
|
||||||
# create the camera processes
|
# create the camera processes
|
||||||
camera_processes = {}
|
camera_processes = {}
|
||||||
@ -233,10 +240,10 @@ def main():
|
|||||||
detection_frame = mp.Value('d', 0.0)
|
detection_frame = mp.Value('d', 0.0)
|
||||||
|
|
||||||
ffmpeg_process = start_or_restart_ffmpeg(ffmpeg_cmd, frame_size)
|
ffmpeg_process = start_or_restart_ffmpeg(ffmpeg_cmd, frame_size)
|
||||||
frame_queue = mp.Queue()
|
frame_queue = mp.Queue(maxsize=2)
|
||||||
camera_fps = EventsPerSecond()
|
camera_fps = EventsPerSecond()
|
||||||
camera_fps.start()
|
camera_fps.start()
|
||||||
camera_capture = CameraCapture(name, ffmpeg_process, frame_shape, frame_queue, take_frame, camera_fps, detection_frame, stop_event)
|
camera_capture = CameraCapture(name, ffmpeg_process, frame_shape, frame_queue, take_frame, camera_fps, stop_event)
|
||||||
camera_capture.start()
|
camera_capture.start()
|
||||||
|
|
||||||
camera_processes[name] = {
|
camera_processes[name] = {
|
||||||
@ -265,7 +272,7 @@ def main():
|
|||||||
}
|
}
|
||||||
|
|
||||||
camera_process = mp.Process(target=track_camera, args=(name, config, frame_queue, frame_shape,
|
camera_process = mp.Process(target=track_camera, args=(name, config, frame_queue, frame_shape,
|
||||||
tflite_process.detection_queue, out_events[name], tracked_objects_queue, camera_processes[name]['process_fps'],
|
detection_queue, out_events[name], tracked_objects_queue, camera_processes[name]['process_fps'],
|
||||||
camera_processes[name]['detection_fps'],
|
camera_processes[name]['detection_fps'],
|
||||||
camera_processes[name]['read_start'], camera_processes[name]['detection_frame'], stop_event))
|
camera_processes[name]['read_start'], camera_processes[name]['detection_frame'], stop_event))
|
||||||
camera_process.daemon = True
|
camera_process.daemon = True
|
||||||
@ -282,7 +289,7 @@ def main():
|
|||||||
object_processor = TrackedObjectProcessor(CONFIG['cameras'], client, MQTT_TOPIC_PREFIX, tracked_objects_queue, event_queue, stop_event)
|
object_processor = TrackedObjectProcessor(CONFIG['cameras'], client, MQTT_TOPIC_PREFIX, tracked_objects_queue, event_queue, stop_event)
|
||||||
object_processor.start()
|
object_processor.start()
|
||||||
|
|
||||||
camera_watchdog = CameraWatchdog(camera_processes, CONFIG['cameras'], tflite_process, tracked_objects_queue, stop_event)
|
camera_watchdog = CameraWatchdog(camera_processes, CONFIG['cameras'], detectors, detection_queue, tracked_objects_queue, stop_event)
|
||||||
camera_watchdog.start()
|
camera_watchdog.start()
|
||||||
|
|
||||||
def receiveSignal(signalNumber, frame):
|
def receiveSignal(signalNumber, frame):
|
||||||
@ -293,7 +300,8 @@ def main():
|
|||||||
camera_watchdog.join()
|
camera_watchdog.join()
|
||||||
for camera_process in camera_processes.values():
|
for camera_process in camera_processes.values():
|
||||||
camera_process['capture_thread'].join()
|
camera_process['capture_thread'].join()
|
||||||
tflite_process.stop()
|
for detector in detectors:
|
||||||
|
detector.stop()
|
||||||
sys.exit()
|
sys.exit()
|
||||||
|
|
||||||
signal.signal(signal.SIGTERM, receiveSignal)
|
signal.signal(signal.SIGTERM, receiveSignal)
|
||||||
@ -350,12 +358,14 @@ def main():
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
stats['coral'] = {
|
stats['detectors'] = []
|
||||||
'fps': round(total_detection_fps, 2),
|
for detector in detectors:
|
||||||
'inference_speed': round(tflite_process.avg_inference_speed.value*1000, 2),
|
stats['detectors'].append({
|
||||||
'detection_start': tflite_process.detection_start.value,
|
'inference_speed': round(detector.avg_inference_speed.value*1000, 2),
|
||||||
'pid': tflite_process.detect_process.pid
|
'detection_start': detector.detection_start.value,
|
||||||
}
|
'pid': detector.detect_process.pid
|
||||||
|
})
|
||||||
|
stats['detection_fps'] = round(total_detection_fps, 2)
|
||||||
|
|
||||||
return jsonify(stats)
|
return jsonify(stats)
|
||||||
|
|
||||||
|
@ -48,15 +48,12 @@ class LocalObjectDetector(ObjectDetector):
|
|||||||
device_config = {"device": tf_device}
|
device_config = {"device": tf_device}
|
||||||
|
|
||||||
edge_tpu_delegate = None
|
edge_tpu_delegate = None
|
||||||
try:
|
|
||||||
print(f"Attempting to load TPU as {device_config['device']}")
|
if tf_device != 'cpu':
|
||||||
edge_tpu_delegate = load_delegate('libedgetpu.so.1.0', device_config)
|
|
||||||
print("TPU found")
|
|
||||||
except ValueError:
|
|
||||||
try:
|
try:
|
||||||
print(f"Attempting to load TPU as pci:0")
|
print(f"Attempting to load TPU as {device_config['device']}")
|
||||||
edge_tpu_delegate = load_delegate('libedgetpu.so.1.0', {"device": "pci:0"})
|
edge_tpu_delegate = load_delegate('libedgetpu.so.1.0', device_config)
|
||||||
print("PCIe TPU found")
|
print("TPU found")
|
||||||
except ValueError:
|
except ValueError:
|
||||||
print("No EdgeTPU detected. Falling back to CPU.")
|
print("No EdgeTPU detected. Falling back to CPU.")
|
||||||
|
|
||||||
@ -135,9 +132,9 @@ def run_detector(detection_queue, out_events: Dict[str, mp.Event], avg_speed, st
|
|||||||
avg_speed.value = (avg_speed.value*9 + duration)/10
|
avg_speed.value = (avg_speed.value*9 + duration)/10
|
||||||
|
|
||||||
class EdgeTPUProcess():
|
class EdgeTPUProcess():
|
||||||
def __init__(self, out_events, tf_device=None):
|
def __init__(self, detection_queue, out_events, tf_device=None):
|
||||||
self.out_events = out_events
|
self.out_events = out_events
|
||||||
self.detection_queue = mp.Queue()
|
self.detection_queue = detection_queue
|
||||||
self.avg_inference_speed = mp.Value('d', 0.01)
|
self.avg_inference_speed = mp.Value('d', 0.01)
|
||||||
self.detection_start = mp.Value('d', 0.0)
|
self.detection_start = mp.Value('d', 0.0)
|
||||||
self.detect_process = None
|
self.detect_process = None
|
||||||
@ -192,3 +189,7 @@ class RemoteObjectDetector():
|
|||||||
))
|
))
|
||||||
self.fps.update()
|
self.fps.update()
|
||||||
return detections
|
return detections
|
||||||
|
|
||||||
|
def cleanup(self):
|
||||||
|
self.shm.unlink()
|
||||||
|
self.out_shm.unlink()
|
@ -117,10 +117,9 @@ def start_or_restart_ffmpeg(ffmpeg_cmd, frame_size, ffmpeg_process=None):
|
|||||||
|
|
||||||
def capture_frames(ffmpeg_process, camera_name, frame_shape, frame_manager: FrameManager,
|
def capture_frames(ffmpeg_process, camera_name, frame_shape, frame_manager: FrameManager,
|
||||||
frame_queue, take_frame: int, fps:EventsPerSecond, skipped_fps: EventsPerSecond,
|
frame_queue, take_frame: int, fps:EventsPerSecond, skipped_fps: EventsPerSecond,
|
||||||
stop_event: mp.Event, detection_frame: mp.Value, current_frame: mp.Value):
|
stop_event: mp.Event, current_frame: mp.Value):
|
||||||
|
|
||||||
frame_num = 0
|
frame_num = 0
|
||||||
last_frame = 0
|
|
||||||
frame_size = frame_shape[0] * frame_shape[1] * frame_shape[2]
|
frame_size = frame_shape[0] * frame_shape[1] * frame_shape[2]
|
||||||
skipped_fps.start()
|
skipped_fps.start()
|
||||||
while True:
|
while True:
|
||||||
@ -147,8 +146,8 @@ def capture_frames(ffmpeg_process, camera_name, frame_shape, frame_manager: Fram
|
|||||||
skipped_fps.update()
|
skipped_fps.update()
|
||||||
continue
|
continue
|
||||||
|
|
||||||
# if the detection process is more than 1 second behind, skip this frame
|
# if the queue is full, skip this frame
|
||||||
if detection_frame.value > 0.0 and (last_frame - detection_frame.value) > 1:
|
if frame_queue.full():
|
||||||
skipped_fps.update()
|
skipped_fps.update()
|
||||||
continue
|
continue
|
||||||
|
|
||||||
@ -159,10 +158,9 @@ def capture_frames(ffmpeg_process, camera_name, frame_shape, frame_manager: Fram
|
|||||||
|
|
||||||
# add to the queue
|
# add to the queue
|
||||||
frame_queue.put(current_frame.value)
|
frame_queue.put(current_frame.value)
|
||||||
last_frame = current_frame.value
|
|
||||||
|
|
||||||
class CameraCapture(threading.Thread):
|
class CameraCapture(threading.Thread):
|
||||||
def __init__(self, name, ffmpeg_process, frame_shape, frame_queue, take_frame, fps, detection_frame, stop_event):
|
def __init__(self, name, ffmpeg_process, frame_shape, frame_queue, take_frame, fps, stop_event):
|
||||||
threading.Thread.__init__(self)
|
threading.Thread.__init__(self)
|
||||||
self.name = name
|
self.name = name
|
||||||
self.frame_shape = frame_shape
|
self.frame_shape = frame_shape
|
||||||
@ -175,13 +173,12 @@ class CameraCapture(threading.Thread):
|
|||||||
self.ffmpeg_process = ffmpeg_process
|
self.ffmpeg_process = ffmpeg_process
|
||||||
self.current_frame = mp.Value('d', 0.0)
|
self.current_frame = mp.Value('d', 0.0)
|
||||||
self.last_frame = 0
|
self.last_frame = 0
|
||||||
self.detection_frame = detection_frame
|
|
||||||
self.stop_event = stop_event
|
self.stop_event = stop_event
|
||||||
|
|
||||||
def run(self):
|
def run(self):
|
||||||
self.skipped_fps.start()
|
self.skipped_fps.start()
|
||||||
capture_frames(self.ffmpeg_process, self.name, self.frame_shape, self.frame_manager, self.frame_queue, self.take_frame,
|
capture_frames(self.ffmpeg_process, self.name, self.frame_shape, self.frame_manager, self.frame_queue, self.take_frame,
|
||||||
self.fps, self.skipped_fps, self.stop_event, self.detection_frame, self.current_frame)
|
self.fps, self.skipped_fps, self.stop_event, self.current_frame)
|
||||||
|
|
||||||
def track_camera(name, config, frame_queue, frame_shape, detection_queue, result_connection, detected_objects_queue, fps, detection_fps, read_start, detection_frame, stop_event):
|
def track_camera(name, config, frame_queue, frame_shape, detection_queue, result_connection, detected_objects_queue, fps, detection_fps, read_start, detection_frame, stop_event):
|
||||||
print(f"Starting process for {name}: {os.getpid()}")
|
print(f"Starting process for {name}: {os.getpid()}")
|
||||||
|
Loading…
Reference in New Issue
Block a user