mirror of
https://github.com/blakeblackshear/frigate.git
synced 2024-11-21 19:07:46 +01:00
Use JSON instead of pickle for serialization (#12590)
This commit is contained in:
parent
47aecff567
commit
d28ad0f0c8
@ -21,7 +21,7 @@ class ConfigPublisher:
|
|||||||
def publish(self, topic: str, payload: any) -> None:
|
def publish(self, topic: str, payload: any) -> None:
|
||||||
"""There is no communication back to the processes."""
|
"""There is no communication back to the processes."""
|
||||||
self.socket.send_string(topic, flags=zmq.SNDMORE)
|
self.socket.send_string(topic, flags=zmq.SNDMORE)
|
||||||
self.socket.send_pyobj(payload)
|
self.socket.send_json(payload)
|
||||||
|
|
||||||
def stop(self) -> None:
|
def stop(self) -> None:
|
||||||
self.stop_event.set()
|
self.stop_event.set()
|
||||||
@ -42,7 +42,7 @@ class ConfigSubscriber:
|
|||||||
"""Returns updated config or None if no update."""
|
"""Returns updated config or None if no update."""
|
||||||
try:
|
try:
|
||||||
topic = self.socket.recv_string(flags=zmq.NOBLOCK)
|
topic = self.socket.recv_string(flags=zmq.NOBLOCK)
|
||||||
return (topic, self.socket.recv_pyobj())
|
return (topic, self.socket.recv_json())
|
||||||
except zmq.ZMQError:
|
except zmq.ZMQError:
|
||||||
return (None, None)
|
return (None, None)
|
||||||
|
|
||||||
|
@ -68,7 +68,7 @@ class DetectionPublisher:
|
|||||||
def send_data(self, payload: any) -> None:
|
def send_data(self, payload: any) -> None:
|
||||||
"""Publish detection."""
|
"""Publish detection."""
|
||||||
self.socket.send_string(self.topic.value, flags=zmq.SNDMORE)
|
self.socket.send_string(self.topic.value, flags=zmq.SNDMORE)
|
||||||
self.socket.send_pyobj(payload)
|
self.socket.send_json(payload)
|
||||||
|
|
||||||
def stop(self) -> None:
|
def stop(self) -> None:
|
||||||
self.socket.close()
|
self.socket.close()
|
||||||
@ -91,7 +91,7 @@ class DetectionSubscriber:
|
|||||||
|
|
||||||
if has_update:
|
if has_update:
|
||||||
topic = DetectionTypeEnum[self.socket.recv_string(flags=zmq.NOBLOCK)]
|
topic = DetectionTypeEnum[self.socket.recv_string(flags=zmq.NOBLOCK)]
|
||||||
return (topic, self.socket.recv_pyobj())
|
return (topic, self.socket.recv_json())
|
||||||
except zmq.ZMQError:
|
except zmq.ZMQError:
|
||||||
pass
|
pass
|
||||||
|
|
||||||
|
@ -20,7 +20,7 @@ class EventUpdatePublisher:
|
|||||||
self, payload: tuple[EventTypeEnum, EventStateEnum, str, dict[str, any]]
|
self, payload: tuple[EventTypeEnum, EventStateEnum, str, dict[str, any]]
|
||||||
) -> None:
|
) -> None:
|
||||||
"""There is no communication back to the processes."""
|
"""There is no communication back to the processes."""
|
||||||
self.socket.send_pyobj(payload)
|
self.socket.send_json(payload)
|
||||||
|
|
||||||
def stop(self) -> None:
|
def stop(self) -> None:
|
||||||
self.socket.close()
|
self.socket.close()
|
||||||
@ -43,7 +43,7 @@ class EventUpdateSubscriber:
|
|||||||
has_update, _, _ = zmq.select([self.socket], [], [], timeout)
|
has_update, _, _ = zmq.select([self.socket], [], [], timeout)
|
||||||
|
|
||||||
if has_update:
|
if has_update:
|
||||||
return self.socket.recv_pyobj()
|
return self.socket.recv_json()
|
||||||
except zmq.ZMQError:
|
except zmq.ZMQError:
|
||||||
pass
|
pass
|
||||||
|
|
||||||
@ -66,7 +66,7 @@ class EventEndPublisher:
|
|||||||
self, payload: tuple[EventTypeEnum, EventStateEnum, str, dict[str, any]]
|
self, payload: tuple[EventTypeEnum, EventStateEnum, str, dict[str, any]]
|
||||||
) -> None:
|
) -> None:
|
||||||
"""There is no communication back to the processes."""
|
"""There is no communication back to the processes."""
|
||||||
self.socket.send_pyobj(payload)
|
self.socket.send_json(payload)
|
||||||
|
|
||||||
def stop(self) -> None:
|
def stop(self) -> None:
|
||||||
self.socket.close()
|
self.socket.close()
|
||||||
@ -89,7 +89,7 @@ class EventEndSubscriber:
|
|||||||
has_update, _, _ = zmq.select([self.socket], [], [], timeout)
|
has_update, _, _ = zmq.select([self.socket], [], [], timeout)
|
||||||
|
|
||||||
if has_update:
|
if has_update:
|
||||||
return self.socket.recv_pyobj()
|
return self.socket.recv_json()
|
||||||
except zmq.ZMQError:
|
except zmq.ZMQError:
|
||||||
pass
|
pass
|
||||||
|
|
||||||
|
@ -37,14 +37,14 @@ class InterProcessCommunicator(Communicator):
|
|||||||
break
|
break
|
||||||
|
|
||||||
try:
|
try:
|
||||||
(topic, value) = self.socket.recv_pyobj(flags=zmq.NOBLOCK)
|
(topic, value) = self.socket.recv_json(flags=zmq.NOBLOCK)
|
||||||
|
|
||||||
response = self._dispatcher(topic, value)
|
response = self._dispatcher(topic, value)
|
||||||
|
|
||||||
if response is not None:
|
if response is not None:
|
||||||
self.socket.send_pyobj(response)
|
self.socket.send_json(response)
|
||||||
else:
|
else:
|
||||||
self.socket.send_pyobj([])
|
self.socket.send_json([])
|
||||||
except zmq.ZMQError:
|
except zmq.ZMQError:
|
||||||
break
|
break
|
||||||
|
|
||||||
@ -65,8 +65,8 @@ class InterProcessRequestor:
|
|||||||
|
|
||||||
def send_data(self, topic: str, data: any) -> any:
|
def send_data(self, topic: str, data: any) -> any:
|
||||||
"""Sends data and then waits for reply."""
|
"""Sends data and then waits for reply."""
|
||||||
self.socket.send_pyobj((topic, data))
|
self.socket.send_json((topic, data))
|
||||||
return self.socket.recv_pyobj()
|
return self.socket.recv_json()
|
||||||
|
|
||||||
def stop(self) -> None:
|
def stop(self) -> None:
|
||||||
self.socket.close()
|
self.socket.close()
|
||||||
|
@ -419,19 +419,19 @@ class RecordingMaintainer(threading.Thread):
|
|||||||
)
|
)
|
||||||
|
|
||||||
return {
|
return {
|
||||||
Recordings.id: f"{start_time.timestamp()}-{rand_id}",
|
Recordings.id.name: f"{start_time.timestamp()}-{rand_id}",
|
||||||
Recordings.camera: camera,
|
Recordings.camera.name: camera,
|
||||||
Recordings.path: file_path,
|
Recordings.path.name: file_path,
|
||||||
Recordings.start_time: start_time.timestamp(),
|
Recordings.start_time.name: start_time.timestamp(),
|
||||||
Recordings.end_time: end_time.timestamp(),
|
Recordings.end_time.name: end_time.timestamp(),
|
||||||
Recordings.duration: duration,
|
Recordings.duration.name: duration,
|
||||||
Recordings.motion: segment_info.motion_count,
|
Recordings.motion.name: segment_info.motion_count,
|
||||||
# TODO: update this to store list of active objects at some point
|
# TODO: update this to store list of active objects at some point
|
||||||
Recordings.objects: segment_info.active_object_count
|
Recordings.objects.name: segment_info.active_object_count
|
||||||
+ (1 if manual_event else 0),
|
+ (1 if manual_event else 0),
|
||||||
Recordings.regions: segment_info.region_count,
|
Recordings.regions.name: segment_info.region_count,
|
||||||
Recordings.dBFS: segment_info.average_dBFS,
|
Recordings.dBFS.name: segment_info.average_dBFS,
|
||||||
Recordings.segment_size: segment_size,
|
Recordings.segment_size.name: segment_size,
|
||||||
}
|
}
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
logger.error(f"Unable to store recording segment {cache_path}")
|
logger.error(f"Unable to store recording segment {cache_path}")
|
||||||
|
@ -127,13 +127,13 @@ class PendingReviewSegment:
|
|||||||
|
|
||||||
def get_data(self, ended: bool) -> dict:
|
def get_data(self, ended: bool) -> dict:
|
||||||
return {
|
return {
|
||||||
ReviewSegment.id: self.id,
|
ReviewSegment.id.name: self.id,
|
||||||
ReviewSegment.camera: self.camera,
|
ReviewSegment.camera.name: self.camera,
|
||||||
ReviewSegment.start_time: self.start_time,
|
ReviewSegment.start_time.name: self.start_time,
|
||||||
ReviewSegment.end_time: self.last_update if ended else None,
|
ReviewSegment.end_time.name: self.last_update if ended else None,
|
||||||
ReviewSegment.severity: self.severity.value,
|
ReviewSegment.severity.name: self.severity.value,
|
||||||
ReviewSegment.thumb_path: self.frame_path,
|
ReviewSegment.thumb_path.name: self.frame_path,
|
||||||
ReviewSegment.data: {
|
ReviewSegment.data.name: {
|
||||||
"detections": list(set(self.detections.keys())),
|
"detections": list(set(self.detections.keys())),
|
||||||
"objects": list(set(self.detections.values())),
|
"objects": list(set(self.detections.values())),
|
||||||
"sub_labels": list(self.sub_labels),
|
"sub_labels": list(self.sub_labels),
|
||||||
@ -176,7 +176,7 @@ class ReviewSegmentMaintainer(threading.Thread):
|
|||||||
"""New segment."""
|
"""New segment."""
|
||||||
new_data = segment.get_data(ended=False)
|
new_data = segment.get_data(ended=False)
|
||||||
self.requestor.send_data(UPSERT_REVIEW_SEGMENT, new_data)
|
self.requestor.send_data(UPSERT_REVIEW_SEGMENT, new_data)
|
||||||
start_data = {k.name: v for k, v in new_data.items()}
|
start_data = {k: v for k, v in new_data.items()}
|
||||||
self.requestor.send_data(
|
self.requestor.send_data(
|
||||||
"reviews",
|
"reviews",
|
||||||
json.dumps(
|
json.dumps(
|
||||||
@ -207,8 +207,8 @@ class ReviewSegmentMaintainer(threading.Thread):
|
|||||||
json.dumps(
|
json.dumps(
|
||||||
{
|
{
|
||||||
"type": "update",
|
"type": "update",
|
||||||
"before": {k.name: v for k, v in prev_data.items()},
|
"before": {k: v for k, v in prev_data.items()},
|
||||||
"after": {k.name: v for k, v in new_data.items()},
|
"after": {k: v for k, v in new_data.items()},
|
||||||
}
|
}
|
||||||
),
|
),
|
||||||
)
|
)
|
||||||
@ -226,8 +226,8 @@ class ReviewSegmentMaintainer(threading.Thread):
|
|||||||
json.dumps(
|
json.dumps(
|
||||||
{
|
{
|
||||||
"type": "end",
|
"type": "end",
|
||||||
"before": {k.name: v for k, v in prev_data.items()},
|
"before": {k: v for k, v in prev_data.items()},
|
||||||
"after": {k.name: v for k, v in final_data.items()},
|
"after": {k: v for k, v in final_data.items()},
|
||||||
}
|
}
|
||||||
),
|
),
|
||||||
)
|
)
|
||||||
|
Loading…
Reference in New Issue
Block a user