133 lines
4.2 KiB
Python
133 lines
4.2 KiB
Python
import cv2
|
|
import math
|
|
import sys
|
|
import os
|
|
import time
|
|
import numpy as np
|
|
from prometheus_client import start_http_server, Counter, Gauge
|
|
import logging
|
|
logging.basicConfig(level=logging.INFO, format='%(asctime)s [%(levelname)s] %(name)s: %(message)s')
|
|
cut_count = Counter('cut_count', 'Total number of jebnięcie')
|
|
machine_on = Gauge('machine_on', 'Current machine engine status')
|
|
stream_on = Gauge('stream_on', 'Current camera stream status')
|
|
|
|
start_http_server(8888)
|
|
|
|
def dist(pos1, pos2):
|
|
(x1, y1) = pos1
|
|
(x2, y2) = pos2
|
|
return math.sqrt((x2 - x1)**2 + (y2 - y1)**2)
|
|
|
|
|
|
def run(path):
|
|
gui = 'DISPLAY' in os.environ
|
|
|
|
stream_on.set(0)
|
|
machine_on.set(0)
|
|
|
|
# Create a VideoCapture object and read from input file
|
|
# If the input is the camera, pass 0 instead of the video file name
|
|
cap = cv2.VideoCapture(path)
|
|
|
|
# Check if camera opened successfully
|
|
if not cap.isOpened():
|
|
raise Exception('VideoCapture not opened')
|
|
|
|
logging.info('Stream opened')
|
|
stream_on.set(1)
|
|
|
|
spots = []
|
|
|
|
turnedon = False
|
|
|
|
framenum = 0
|
|
framerate = 30
|
|
|
|
# Read until video is completed
|
|
while(cap.isOpened()):
|
|
# Capture frame-by-frame
|
|
ret, frame = cap.read()
|
|
if ret == True:
|
|
h, w, channels = frame.shape
|
|
|
|
smol = frame[int(h*0.5):int(h*0.9), int(w*0.63):int(w*0.8)]
|
|
gray = cv2.cvtColor(smol, cv2.COLOR_BGR2GRAY)
|
|
blurred = cv2.GaussianBlur(gray, (7, 7), 0)
|
|
thresh = cv2.threshold(blurred, 235, 255, cv2.THRESH_BINARY)[1]
|
|
cnts = cv2.findContours(thresh, cv2.RETR_LIST,
|
|
cv2.CHAIN_APPROX_SIMPLE)[-2]
|
|
|
|
for c in cnts:
|
|
area = cv2.contourArea(c)
|
|
if area > 20 and area < 80:
|
|
# compute the center of the contour
|
|
M = cv2.moments(c)
|
|
cX = int(M["m10"] / M["m00"])
|
|
cY = int(M["m01"] / M["m00"])
|
|
|
|
for spot in spots:
|
|
if dist(spot['pos'], (cX, cY)) < 10:
|
|
spot['pos'] = (cX, cY)
|
|
spot['lastseen'] = framenum
|
|
spot['lifetime'] += 1
|
|
found = True
|
|
break
|
|
else:
|
|
spots.append({
|
|
'pos': (cX, cY),
|
|
'lastseen': framenum,
|
|
'lifetime': 1,
|
|
'firstseen': framenum,
|
|
})
|
|
|
|
if gui:
|
|
cv2.putText(thresh, str(area), (cX - 20, cY - 20),
|
|
cv2.FONT_HERSHEY_SIMPLEX, 0.5, (255, 255, 255), 2)
|
|
|
|
for spot in spots:
|
|
if spot['lastseen'] <= framenum - framerate and turnedon:
|
|
d = dist(spots[0]['pos'], spot['pos'])
|
|
l = spot['lifetime'] / framerate
|
|
if d < 30 and d > 5 and l > 1.0 and l < 8.0:
|
|
logging.info('PRYJEBAŁ :___DDD %.2f' % (l,))
|
|
cut_count.inc()
|
|
|
|
if len(spots) and spots[0]['lifetime'] > framerate * 5:
|
|
if not turnedon:
|
|
logging.info('just turned on')
|
|
machine_on.set(1)
|
|
turnedon = True
|
|
else:
|
|
if turnedon:
|
|
logging.info('just turned off')
|
|
machine_on.set(0)
|
|
turnedon = False
|
|
|
|
spots = [spot for spot in spots if spot['lastseen'] > framenum - framerate]
|
|
spots.sort(key=lambda v: v['lifetime'], reverse=True)
|
|
framenum += 1
|
|
|
|
if gui:
|
|
cv2.imshow('frame',blurred)
|
|
#cv2.imshow('edges',edges)
|
|
cv2.imshow('thresh',thresh)
|
|
|
|
# Press Q on keyboard to exit
|
|
if cv2.waitKey(25) & 0xFF == ord('q'):
|
|
break
|
|
|
|
# Break the loop
|
|
else:
|
|
break
|
|
|
|
# When everything done, release the video capture object
|
|
cap.release()
|
|
|
|
while True:
|
|
try:
|
|
run(sys.argv[1])
|
|
except Exception as exc:
|
|
logging.exception('Failed?')
|
|
stream_on.set(0)
|
|
time.sleep(5.0)
|