From f54fa2e56c6f14f6ed75a306cbee1436e8912eb9 Mon Sep 17 00:00:00 2001 From: blakeblackshear Date: Tue, 19 Feb 2019 21:15:57 -0600 Subject: [PATCH 1/2] add the ability to apply a masking image --- config/mask-0-300.bmp | Bin 0 -> 245056 bytes config/mask-350-250.bmp | Bin 0 -> 320056 bytes config/mask-750-250.bmp | Bin 0 -> 320056 bytes detect_objects.py | 11 +++++++++-- 4 files changed, 9 insertions(+), 2 deletions(-) create mode 100644 config/mask-0-300.bmp create mode 100644 config/mask-350-250.bmp create mode 100644 config/mask-750-250.bmp diff --git a/config/mask-0-300.bmp b/config/mask-0-300.bmp new file mode 100644 index 0000000000000000000000000000000000000000..f451c61b1c1e2bc39ef4c9594f6ac244765e1e3f GIT binary patch literal 245056 zcmeI(FKpXb90%}B7hv55RHp!gk$_4U5D>7F0xF#h3MySdKtVu=K|nx&L4d)?!NS7O zz`?@Mz=MINgN22=dQmlPs&v}w=T&;2R&H8X@-Of6`}XPiPqV-O+W*q2KmL5&{&?IL zZ%*5r&RM;?^Xud9fB&uXdHcNM@yn<8ZG6sGtI<=%re1gbV!AEbc=u|=E3n@b(=2H! zvtN#LNN;ERO}wr~9$uha56{0OuF3C)=V@M*;YXlCoB~yOcO7EZt^WoF-SMS$L!7y4 zW7ON?X58dl`}MJg=aXd{qfjG8fi(Z;bgKAu=o!@MEsZ@nxtLLmmHm z)!=+Bd>>=1ur02JgKk5U?6Zftzp&zsF{{=((aHE~gr9fsFJw2qT(vRc@atXKZKIDd)(C%{;Vd)n#+a<| zW3I~CRo;8{B&(sFuJ4BLVyqP=%VZGl3h$s>OwGHoW-Ii#>zOy)UE#cUoVzC+x`*)| ze_MQb7wW$7@O-|CSJueG3zX|H>CFiXrxFg&+TZljhCN zzPrBv`3c`Q6;1;C&2j$mZ8I0g@}B>hkgGNYs`6ox3|^kzEIG?OKh|dF)cW+kDv$G* zJ{LLYet1`|X%c8#V86M$e#y!qOK!%J+jiH~I|=MJ4~y04$FBM2J#(%c{Y-(XEZ4WQ z)#&@~xDC38)EU~(?3>yrfvVgVXfGL6xSlILuScLN_nXObc6BpO z26cwB(1}*FU5m->yby*~4O6 z$nBGr*Vijh&&1_=vP|t?1PBlyK!5-N0t5&UAV7cs0RjXF5FkK+ z009C72oNAZfB*pk1PBlyK!5-N0t5&UAV7cs0RjXF5FkK+009C72oNAZfB*pk1PBly zK!5-N0t5&UAV7cs0RjXF5FkK+009C72oNAZfB*pk1PBlyK!5-N0t5&UAV7cs0RjXF z5FkK+009C72oNAZfB*pk1PBlyK!5-N0t5&UAV7cs0RjXF5FkK+009C72oNAZfB*pk z1PBlyK!5-N0t5&UAV7cs0RjXF5FkK+009C72oNAZfB*pk1PBlyK!5-N0t5&UAV7cs z0RjXF5FkK+009C72oNAZfB*pk1PBlyK!5-N0t5&UAV7cs0RjXF5FkK+009C72oNAZ zfB*pk1PBlyK!5-N0t5&UAV7cs0RjXF5FkK+009C72oNAZfB*pk1PBlyK!5-N0t5&U zAV7cs0RjXF5FkK+009C72oNAZfB*pk1PBlyK!5-N0t5&UAV7cs0RjXF5FkK+009C7 z2oNAZfB*pk1PBlyK!5-N0t5&UAV7cs0RjXF5FkK+009C72oNAZfB*pk1PBlyK!5-N z0t5&UAV7cs0RjXF5FkK+009C72oNAZfB*pk1PBlyK!5-N0t5&UAV7cs0RjXF5FkK+ z009C72oNAZfB*pk1PBlyK!5-N0t5&UAV7cs0RjXF5FkK+009C72oNAZfB*pk1PBly zK!5-N0t5&UAV7cs0RjXF5FkK+009C72oNAZfB*pk1PBlyK!5-N0t5&UAV7cs0RjXF z5FkK+009C72oNAZfB*pk1PBlyK!5-N0t5&UAV7cs0RjXF5FkK+009C72oNAZfB*pk z1PBlyK!5-N0t5&UAV7cs0RjXF5FkK+009C72oNAZfB*pk1PBlyK!5-N0t5&UAV7cs z0RjXF5FkK+009C72oNAZfB*pk1PBlyK!5-N0t5&UAV7cs0RjXF5FkK+009C72oNAZ zfB*pk1PBlyK!5-N0t5&UAV7cs0RjXF5FkK+009C72oNAZfB*pk1PBlyK!5-N0t5&U zAV7cs0RjXF5FkK+009C72oNAZfB*pk1PBlyK!5-N0t5&UAV7cs0RjXF5FkK+009C7 z2oNAZfB*pk1PBlyK!5-N0t5&UAV7cs0RjXF5FkK+009C72oNAZfB*pk1PBlyK!5-N z0t5&UAV7cs0RjXF5FkK+009C72oNAZfB*pk1PBlyK!5-N0t5&UAV7cs0RjXF5FkK+ U009C72oNAZfB=EG1v;Jo04*901_HOGT4=Vsk-MU@Azi0#VV+RbVO*O%73ksKBhKSW!_?u>w#zIQpZBkS zzhtj3l_*qKe!JiQ$M2PIuWuFi-|mhT0t5&UAV7cs0RjXF5FkK+009C72oNAZfB*pk z1PBlyK!5-N0t5&UAV7cs0RjXF5FkK+009C72oNAZfB*pk1PBlyK!5-N0t5&UAV7cs z0RjXF5FkK+009C72oNAZfB*pk1PBlyK!5-N0t5&UAV7cs0RjXF5FkK+009C72oNAZ zfB*pk1PBlyK!5-N0t5&UAV7cs0RjXF5FkK+009C72oNAZfB*pk1PBlyK!5-N0t5&U zAV7cs0RjXF5FkK+009C72oNAZfB*pk1PBlyK!5-N0t5&UAV7cs0RjXF5FkK+009C7 z2oNAZfB*pk1PBlyK!5-N0t5&UAV7cs0RjXF5FkK+009C72oNAZfB*pk1PBlyK!5-N z0t5&UAV7cs0RjXF5FkK+009C72oNAZfB*pk1PBlyK!5-N0t5&UAV7cs0RjXF5FkK+ z009C72oNAZfB*pk1PBlyK!5-N0t5&UAV7cs0RjXF5FkK+009C72oNAZfB*pk1PBly zK!5-N0t5&UAV7cs0RjXF5FkK+009C72oNAZfB*pk1PBlyK!5-N0t5&UAV7cs0RjXF z5FkK+009C72oNAZfB*pk1PBlyK!5-N0t5&UAV7cs0RjXF5FkK+009C72oNAZfB*pk z1PBlyK!5-N0t5&UAV7cs0RjXF5FkK+009C72oNAZfB*pk1PBlyK!5-N0t5&UAV7cs z0RjXF5FkK+009C72oNAZfB*pk1PBlyK!5-N0t5&UAV7cs0RjXF5FkK+009C72oNAZ zfB*pk1PBlyK!5-N0t5&UAV7cs0RjXF5FkK+009C72oNAZfB*pk1PBlyK!5-N0t5&U zAV7cs0RjXF5FkK+009C72oNAZfB*pk1PBlyK!5-N0t5&UAV7cs0RjXF5FkK+009C7 z2oNAZfB*pk1PBlyK!5-N0t5&UAV7cs0RjXF5FkK+009C72oNAZfB*pk1PBlyK!5-N z0t5&UAV7cs0RjXF5FkK+009C72oNAZfB*pk1PBlyK!5-N0t5&UAV7cs0RjXF5FkK+ z009C72oNAZfB*pk1PBlyK!5-N0t5&UAV7cs0RjXF5FkK+009C72oNAZfB*pk1PBly zK!5-N0t5&UAV7cs0RjXF5FkK+009C72oNAZfB*pk1PBlyK!5-N0t5&UAV7cs0RjXF z5FkK+009C72oNAZfB*pk1PBlyK!5-N0t5&UAV7cs0RjXF5FkK+009C72oNAZfB*pk z1PBlyK!5-N0t5&UAV7cs0RjXF5FkK+009C72oNAZfB*pk1PBlyK!5-N0t5&UAV7cs z0RjXF5FkK+009C72oNAZfB*pk1PBlyK!5-N0t5&UAV7cs0RjXF5FkK+009C72oNAZ zfB*pk1PBlyK!5-N0t5&UAV7cs0RjXF5FkK+009C72oNAZfB*pk1PBlyK!5-N0t5&U zAV7cs0RjXF5FkK+009C72oNAZfB*pk1PBlyK!5-N0t5&UAV7cs0RjXF5FkK+009C7 z2oNAZfB*pk1PBlyK!5-N0t5&UAV7cs0RjXF5FkK+009C72oNAZfB*pk1PBlyK!5-N z0t5&UAV7cs0RjXF5FkK+009C72oNAZfB*pk1YRq!OlRZH@M>^=cz$?$G)NYcW%}Cp z)Sf^Y1eWO_>GzsD)2LeuCH{I*pSRBsXJa{1hR@d+UL!CYj}K~%Zmm(ReD~F;PNR9d z9?Ou|*lJsJfpm1UH;sPGjfXU8RBMf^!D3SMG3q~obpp$DF-b@D_-b%92=j^`ubF1! zo4xq!Lq0tHnPrG`$U0s1MHUEi?yJFNr`9;znnu&;Z0maaq;=X)M_I(r#!>gV>{y)#>uYO`)_zzwOh+HRZkXL)7$oWFE7nqG( z`!CJA*XP3OJ&kIk8rBVy-H<8u_#>iqG3g9X`%!n^K53n_=Iy9^+7Cybjl=B8UA5<< zz}|HN^|;qu9p!IPo8?J!XWkwkWOt0;BUZzj{`er-4fhuwdYv7)*9@7`@0}lZhCkk| zUgx2nz@xx29Un}ihnRZ5pCMY$wnCnSGfHO|&M50LWtoQSq}g~j4%bQPXpnS<^|&5) zhM|P5Z2MuW9*5jmw{Dq@sareS%FbrDN1@c?u(}g^5X#+g_?{hakc841-X4WDpsa)^ z&BB^cR&I~v+G7Njdn>GJyye{EJvu);?e}}F z{hPhZ&Pl7+tTnE;L%FD*ZJljh)U)!q9ad`|&l)csKjdjx+X?T~+(~xl?VG*UemL4m ztGSaMC9M9$Uqi{BFbmI^J#J+yL&FA{OHW9OV@6xE2Jvxl;kj#W#_YB%Y5EE zDtmMo-(u45z4hwH=Z)vL?wR;0yFhrhV40rw zU17kx!Egroc^lb=M~qk zNCNUBZ+;c&Q`XnK0`fzCh$GYfHe|sc9^5;jT@;~A@?{xqG literal 0 HcmV?d00001 diff --git a/config/mask-750-250.bmp b/config/mask-750-250.bmp new file mode 100644 index 0000000000000000000000000000000000000000..392422cedeac353186c3a501892ba5d523c95be2 GIT binary patch literal 320056 zcmeI(AxtY>769P>n~EljXf}un%!npiff&JND=-y^CX1**RMb>dR3Iu470)0mRv@uR zBod26RwC9Qk;u32yqif2KY;>m-=qJ>gmwx;d*_>b=bm$BX#MBk|0|c`>t8><|M}zl zzkj6fKT6g3ap`YAe*cfZmwtZ#{K1ch@82v02oNAZfB*pk1PBlyK!5-N0t5&UAV7cs z0RjXF5FkK+009C72oNAZfB*pk1PBlyK!5-N0t5&UAV7cs0RjXF5FkK+009C72oNAZ zfB*pk1PBlyK!5-N0t5&UAV7cs0RjXF5FkK+009C72oNAZfB*pk1PBlyK!5-N0t5&U zAV7cs0RjXF5FkK+009C72oNAZfB*pk1PBlyK!5-N0t5&UAV7cs0RjXF5FkK+009C7 z2oNAZfB*pk1PBlyK!5-N0t5&UAV7cs0RjXF5FkK+009C72oNAZfB*pk1PBlyK!5-N z0t5&UAV7cs0RjXF5FkK+009C72oNAZfB*pk1PBlyK!5-N0t5&UAV7cs0RjXF5FkK+ z009C72oNAZfB*pk1PBlyK!5-N0t5&UAV7cs0RjXF5FkK+009C72oNAZfB*pk1PBly zK!5-N0t5&UAV7cs0RjXF5FkK+009C72oNAZfB*pk1PBlyK!5-N0t5&UAV7cs0RjXF z5FkK+009C72oNAZfB*pk1PBlyK!5-N0t5&UAV7cs0RjXF5FkK+009C72oNAZfB*pk z1PBlyK!5-N0t5&UAV7cs0RjXF5FkK+009C72oNAZfB*pk1PBlyK!5-N0t5&UAV7cs z0RjXF5FkK+009C72oNAZfB*pk1PBlyK!5-N0t5&UAV7cs0RjXF5FkK+009C72oNAZ zfB*pk1PBlyK!5-N0t5&UAV7cs0RjXF5FkK+009C72oNAZfB*pk1PBlyK!5-N0t5&U zAV7cs0RjXF5FkK+009C72oNAZfB*pk1PBlyK!5-N0t5&UAV7cs0RjXF5FkK+009C7 z2oNAZfB*pk1PBlyK!5-N0t5&UAV7cs0RjXF5FkK+009C72oNAZfB*pk1PBlyK!5-N z0t5&UAV7cs0RjXF5FkK+009C72oNAZfB*pk1PBlyK!5-N0t5&UAV7cs0RjXF5FkK+ z009C72oNAZfB*pk1PBlyK!5-N0t5&UAV7cs0RjXF5FkK+009C72oNAZfB*pk1PBly zK!5-N0t5&UAV7cs0RjXF5FkK+009C72oNAZfB*pk1PBlyK!5-N0t5&UAV7cs0RjXF z5FkK+009C72oNAZfB*pk1PBlyK!5-N0t5&UAV7cs0RjXF5FkK+009C72oNAZfB*pk z1PBlyK!5-N0t5&UAV7cs0RjXF5FkK+009C72oNAZfB*pk1PBlyK!5-N0t5&UAV7cs z0RjXF5FkK+009C72oNAZfB*pk1PBlyK!5-N0t5&UAV7cs0RjXF5FkK+009C72oNAZ zfB*pk1PBlyK!5-N0t5&UAV7cs0RjXF5FkK+009C72oNAZfB*pk1PBlyK!5-N0t5&U zAV7cs0RjXF5FkK+009C72oNAZfB*pk1PBlyK!5-N0t5&UAV7cs0RjXF5FkK+009C7 z2oNAZfB*pk1PBlyK!5-N0t5&UAV7cs0RjXF5FkK+009C72oNAZfB*pk1PBlyK!5-N z0t5&UAV7cs0RjXF5FkK+009C72oNAZfB*pk1PBlyK!5-N0t5&UAV7cs0RjXF5FkK+ z009C72oNAZfB*pk1PBlyK!5-N0t5&UAV7cs0RjXF5FkK+009C72oNAZfB*pk1PBly zK!5-N0t5&UAV7csfv+WSH;(0NwOMNktXkl1d_HOprrop7=O)*l~VpRD>by+47k zAaFPC4o8RW#%VPxmwtZrw<=kF&>R>eU*Y&^SzJueasY(y=gaUDw90+ z^X;P<3R!txWBzOw>sk3?d$>C~Y!3d~qvU+_u)oU4YUX}Zn|7CVu8xsW>&KaLk`H&= zjp6PjzimG3=Le(1X*c47)Q_5jYU`%t>Jfvq>Ij{6@W!r8%M)`7`Tft+gs?lyry7I&$;pkvKP`STN$i*%gC0 ze#H^|)iLm6uaXsK)Mcr@{65Qxv7&c%hgV0p%}VxiH6bgXRwF`){&umw$i7`{=Lf5b z{-V8UZs-nE-8>?iR4Grj@kLtrvgmq#8lH{%=x|xHY>1?*2xpxlx=SNHt{0^12N40z zpYe*EXceP$e#Aaszdp$iPOIy2E|o5}uZ}*mhjF%f?5(2CcCo#@<6js@Yny|JdLH(p zYX0(QUYGsnJ||+)NnY!}KSECDhZGscsF-#;?Z}Vrcj78SM0l^aETX{E>P@XtpXAln z)zLa<`tRSy$CLc67QWv8_x!e1x!ildaM9nQ2K}&K$yO5$E;IW3{;e3vt7`3g)R(vI zY4u^h){nYhQ2|WP5e-EJthl2hvZ1&OuhZ_d;~H}j5mvI49euRNwJP^JmwVIh$NNyx zmll0)4jT34`Mv#4_dDI;qHTM+T~yiHjn#~e)b6+!Ce=S@`AbB^!l_!)-=1bBI*Ml9RY9e>w525s`V?`+ihCj7N^x|_i4X> z{wn2csUBaPmm-(V4{mC!Q%`#sQAAWg?{`xE68V0oeb$+FstT%0+sBek@|Z^%X;(kbX4Mg#aTW5e00`nH!ik+)%@`Fd&*yP z53+Knz4{qsz4hJVNJfs8A56OuPhIY9W*%GYtLIe-H0n>|koU|Bi*9>2Uf=b>r%sUz zFLEcu)4%eHKq1iITE{)F=c8(CeOCyJee+DT?9uY~ z>lav04eM?^?Y0}MzP7M_J)gMg0_zzc-@nEGM>Xn3$fozF&Dy{9#8)ZyiaF$_wpo7m zX0rwQTg}01nZt5_cYSi(+-y&Kw^;)1#_H?4voUrz?zA_{&)#gZzv_Tcf^?`%7lyjo!Z+@-7XaCb~zTYppK0aRWz3#tO*r$J8fzxWI N{nB0Fi}mzU>HqR6L81Tv literal 0 HcmV?d00001 diff --git a/detect_objects.py b/detect_objects.py index 6e9f30605..60569614c 100644 --- a/detect_objects.py +++ b/detect_objects.py @@ -190,11 +190,14 @@ def main(): regions = [] for region_string in REGIONS.split(':'): region_parts = region_string.split(',') + region_mask_image = cv2.imread("/config/{}".format(region_parts[4]), cv2.IMREAD_GRAYSCALE) + region_mask = np.where(region_mask_image==[0]) regions.append({ 'size': int(region_parts[0]), 'x_offset': int(region_parts[1]), 'y_offset': int(region_parts[2]), 'min_object_size': int(region_parts[3]), + 'mask': region_mask, # Event for motion detection signaling 'motion_detected': mp.Event(), # create shared array for storing 10 detected objects @@ -259,7 +262,7 @@ def main(): motion_changed, frame_shape, region['size'], region['x_offset'], region['y_offset'], - region['min_object_size'], + region['min_object_size'], region['mask'], True)) motion_process.daemon = True motion_processes.append(motion_process) @@ -426,7 +429,7 @@ def process_frames(shared_arr, shared_output_arr, shared_frame_time, frame_lock, # do the actual motion detection def detect_motion(shared_arr, shared_frame_time, frame_lock, frame_ready, motion_detected, motion_changed, - frame_shape, region_size, region_x_offset, region_y_offset, min_motion_area, debug): + frame_shape, region_size, region_x_offset, region_y_offset, min_motion_area, mask, debug): # shape shared input array into frame for processing arr = tonumpyarray(shared_arr).reshape(frame_shape) @@ -455,6 +458,10 @@ def detect_motion(shared_arr, shared_frame_time, frame_lock, frame_ready, motion # convert to grayscale gray = cv2.cvtColor(cropped_frame, cv2.COLOR_BGR2GRAY) + + # apply image mask + gray[mask] = [255] + # apply gaussian blur gray = cv2.GaussianBlur(gray, (21, 21), 0) From 496b96b4f72164561b513eaa144b137e501ca69a Mon Sep 17 00:00:00 2001 From: blakeblackshear Date: Wed, 20 Feb 2019 06:20:52 -0600 Subject: [PATCH 2/2] make motion detection less sensitive to rain reduces the significance of fast moving objects and prioritizes objects that overlap in location across. multiple frames --- README.md | 4 ++-- detect_objects.py | 45 ++++++++++++++++++++++++++++++++------------- 2 files changed, 34 insertions(+), 15 deletions(-) diff --git a/README.md b/README.md index 70379be36..222beb22a 100644 --- a/README.md +++ b/README.md @@ -44,7 +44,7 @@ Access the mjpeg stream at http://localhost:5000 - [x] Add last will and availability for MQTT - [ ] Add ability to turn detection on and off via MQTT - [ ] Add a max size for motion and objects (height/width > 1.5, total area > 1500 and < 100,000) -- [ ] Make motion less sensitive to rain +- [x] Make motion less sensitive to rain - [x] Use Events or Conditions to signal between threads rather than polling a value - [ ] Implement a debug option to save images with detected objects - [ ] Only report if x% of the recent frames have a person to avoid single frame false positives (maybe take an average of the person scores in the past x frames?) @@ -53,7 +53,7 @@ Access the mjpeg stream at http://localhost:5000 - [ ] Merge bounding boxes that span multiple regions - [ ] Switch to a config file - [ ] Allow motion regions to be different than object detection regions -- [ ] Add motion detection masking +- [x] Add motion detection masking - [x] Change color of bounding box if motion detected - [x] Look for a subset of object types - [ ] Try and reduce CPU usage by simplifying the tensorflow model to just include the objects we care about diff --git a/detect_objects.py b/detect_objects.py index 60569614c..499884890 100644 --- a/detect_objects.py +++ b/detect_objects.py @@ -434,17 +434,11 @@ def detect_motion(shared_arr, shared_frame_time, frame_lock, frame_ready, motion arr = tonumpyarray(shared_arr).reshape(frame_shape) avg_frame = None - last_motion = -1 + avg_delta = None frame_time = 0.0 motion_frames = 0 while True: now = datetime.datetime.now().timestamp() - # if it has been long enough since the last motion, clear the flag - if last_motion > 0 and (now - last_motion) > 2: - last_motion = -1 - motion_detected.clear() - with motion_changed: - motion_changed.notify_all() with frame_ready: # if there isnt a frame ready for processing or it is old, wait for a signal @@ -459,7 +453,7 @@ def detect_motion(shared_arr, shared_frame_time, frame_lock, frame_ready, motion # convert to grayscale gray = cv2.cvtColor(cropped_frame, cv2.COLOR_BGR2GRAY) - # apply image mask + # apply image mask to remove areas from motion detection gray[mask] = [255] # apply gaussian blur @@ -470,15 +464,33 @@ def detect_motion(shared_arr, shared_frame_time, frame_lock, frame_ready, motion continue # look at the delta from the avg_frame - cv2.accumulateWeighted(gray, avg_frame, 0.01) frameDelta = cv2.absdiff(gray, cv2.convertScaleAbs(avg_frame)) - thresh = cv2.threshold(frameDelta, 25, 255, cv2.THRESH_BINARY)[1] + + if avg_delta is None: + avg_delta = frameDelta.copy().astype("float") + + # compute the average delta over the past few frames + # the alpha value can be modified to configure how sensitive the motion detection is + # higher values mean the current frame impacts the delta a lot, and a single raindrop may + # put it over the edge, too low and a fast moving person wont be detected as motion + # this also assumes that a person is in the same location across more than a single frame + cv2.accumulateWeighted(frameDelta, avg_delta, 0.2) + + # compute the threshold image for the current frame + current_thresh = cv2.threshold(frameDelta, 25, 255, cv2.THRESH_BINARY)[1] + + # black out everything in the avg_delta where there isnt motion in the current frame + avg_delta_image = cv2.convertScaleAbs(avg_delta) + avg_delta_image[np.where(current_thresh==[0])] = [0] + + # then look for deltas above the threshold, but only in areas where there is a delta + # in the current frame. this prevents deltas from previous frames from being included + thresh = cv2.threshold(avg_delta_image, 25, 255, cv2.THRESH_BINARY)[1] # dilate the thresholded image to fill in holes, then find contours # on thresholded image thresh = cv2.dilate(thresh, None, iterations=2) - cnts = cv2.findContours(thresh.copy(), cv2.RETR_EXTERNAL, - cv2.CHAIN_APPROX_SIMPLE) + cnts = cv2.findContours(thresh.copy(), cv2.RETR_EXTERNAL, cv2.CHAIN_APPROX_SIMPLE) cnts = imutils.grab_contours(cnts) # if there are no contours, there is no motion @@ -506,15 +518,22 @@ def detect_motion(shared_arr, shared_frame_time, frame_lock, frame_ready, motion motion_frames += 1 # if there have been enough consecutive motion frames, report motion if motion_frames >= 3: + # only average in the current frame if the difference persists for at least 3 frames + cv2.accumulateWeighted(gray, avg_frame, 0.01) motion_detected.set() with motion_changed: motion_changed.notify_all() - last_motion = now else: + # when no motion, just keep averaging the frames together + cv2.accumulateWeighted(gray, avg_frame, 0.01) motion_frames = 0 + motion_detected.clear() + with motion_changed: + motion_changed.notify_all() if debug and motion_frames >= 3: cv2.imwrite("/lab/debug/motion-{}-{}-{}.jpg".format(region_x_offset, region_y_offset, datetime.datetime.now().timestamp()), cropped_frame) + cv2.imwrite("/lab/debug/avg_delta-{}-{}-{}.jpg".format(region_x_offset, region_y_offset, datetime.datetime.now().timestamp()), avg_delta_image) if __name__ == '__main__': mp.freeze_support()