Pyqt problemi con Qthread
-
Ciao, sto creando un applicazione che utilizza le QT, OpenCV e face_recognition.
Se voglio visualizzare il live della webcam e basta arrivo ad avere un FPS di 30.
Quando avvio il riconoscimento facciale però gli FPS scendono a 2-5 e l'interfaccia fisicamente si blocca. Ho provato ad utilizzare QThread ma la situazione non cambia. Avete qualche idea?from PyQt5 import QtCore, QtGui, QtWidgets, uic from PyQt5.QtWidgets import QInputDialog from PyQt5.QtCore import QObject, QThread, QRunnable, pyqtSignal, pyqtSlot from Opencv_GUI import Ui_MainWindow from timeit import default_timer as timer import sys import cv2 import time import datetime import logging import platform import os import numpy as np import face_recognition count = 0 black = [0, 0, 0] red = [0, 0, 255] yellow = [0, 255, 0] blue = [255, 0, 0] white = [255, 255, 255] BASE_DIR = os.path.dirname(os.path.abspath(__file__)) image_dir = os.path.join(BASE_DIR, "images") img_Known = os.path.join(BASE_DIR, "images/known") img_unKnown = os.path.join(BASE_DIR, "images/unknown") wind_Width = 800 wind_Height = 600 width_frame = 800 height_frame = 600 # logging.basicConfig(level=logging.DEBUG, format=' %(asctime)s - %(levelname)s - %(message)s') logging.basicConfig(filename='LOG.txt', level=logging.DEBUG, format=' %(asctime)s - %(levelname)s - %(message)s') # logging.disable() file_RD = "" file_WT = "" CamOpen = False CamFram = [] CamWork = [] cam_height = 600 cam_width = 800 cam_module = 0 frame_count = 0 class Background(QtCore.QThread): ''' Run Background worker ''' def __init__(self): QtCore.QThread.__init__(self) def __del__(self): self.wait() @pyqtSlot() def run(self): global count global CamOpen global CamFram try: while True: if CamOpen and len(CamFram) > 1: start = timer() NewFram = CamFram[0] gray_cam = cv2.cvtColor(NewFram, cv2.COLOR_BGR2GRAY) face_locations = face_recognition.face_locations(gray_cam) end = timer() if len(face_locations) != 0: print('Found {} face in {}.' .format( len(face_locations), round(end-start, 2))) logging.debug('Found {} face.' .format(len(face_locations))) # for face in face_locations: # top, right, bottom, left = face # cv2.rectangle(CamWork[0], (left, top), (right, bottom), yellow, 2) # WorkFrame = CamWork[0] # image = QtGui.QImage( # WorkFrame, WorkFrame.shape[1], WorkFrame.shape[0], QtGui.QImage.Format_RGB888) # pixmap = QtGui.QPixmap.fromImage(image) # pixmap = pixmap.scaled( # width_frame, height_frame, QtCore.Qt.KeepAspectRatio) # self.GUI.LB_WORK.setPixmap(pixmap) # print(len(CamFram)) del CamFram[0] except Exception as msg: logging.error('Error: ' + str(msg)) return class GUI(QtWidgets.QMainWindow): def __init__(self): super(GUI, self).__init__() self.GUI = Ui_MainWindow() self.GUI.setupUi(self) # self.setStyleSheet("font: 10pt Comic Sans MS") self.setStyleSheet("font: 10pt Roboto") self.Center_Screen() self.Gui_connect() self.Gui_Timer() def Center_Screen(self): try: screen = app.primaryScreen() size = screen.size() logging.debug('Screen width {}, height {}' .format(size.width(), size.height())) self.resize(size.width()*3/4, size.height()*3/4) except Exception as msg: logging.debug('Error: ' + str(msg)) def Gui_connect(self): ''' ''' # Designer statusbar self.GUI.statusbar.showMessage('Start program.') # Designer menuFile self.GUI.actionExit.triggered.connect(self.GUI_exit) self.GUI.actionOpen_log_File.triggered.connect(self.GUI_LogFile) # Designer menuCamera self.GUI.CheckCam0.triggered.connect(self.Gui_Cam) self.GUI.CheckCam1.triggered.connect(self.Gui_Cam) # Designer menuInfo self.GUI.actionInfo.triggered.connect(self.GUI_info) # Designer PushButton self.GUI.BT_OpenCAM.clicked.connect(self.GUI_OpenCam) self.GUI.BT_OpenFile.clicked.connect(self.GUI_OpenFile) # QThread def Gui_Timer(self): try: self.timer01 = QtCore.QTimer() self.timer01.timeout.connect(self.time_ck01) self.timer01.start(10) self.timer1 = QtCore.QTimer() self.timer1.timeout.connect(self.time_ck1) self.timer1.start(1000) self.background = Background() self.background.start() except Exception as msg: logging.debug('Error: ' + str(msg)) def GUI_exit(self): try: logging.debug('Exit program.') self.GUI.statusbar.showMessage('Exit program.') cap.release() sys.exit() except Exception as msg: logging.debug('Error: ' + str(msg)) def GUI_LogFile(self): try: os.startfile('DebugS.txt') logging.debug('Open Log File.') self.GUI.statusbar.showMessage('Open Log File.') except Exception as msg: logging.debug('Error: ' + str(msg)) def GUI_info(self): try: logging.debug('Info program.') self.GUI.statusbar.showMessage('Info program.') except Exception as msg: logging.debug('Error: ' + str(msg)) def Gui_Cam(self): global cam_module try: if cam_module == 0: self.GUI.CheckCam0.setChecked(False) self.GUI.CheckCam1.setChecked(True) cam_module = 1 else: self.GUI.CheckCam0.setChecked(True) self.GUI.CheckCam1.setChecked(False) cam_module = 0 logging.debug('Change CAM {}.' .format(cam_module)) self.GUI.statusbar.showMessage('Select active CAM {}.' .format(cam_module)) except Exception as msg: logging.debug('Error: ' + str(msg)) def GUI_OpenCam(self): global cam_module global CamOpen global CamFram global cap try: if not CamOpen: cap = cv2.VideoCapture(cam_module) CamOpen, NewFram = cap.read() CamFram.append(NewFram) logging.debug('Open CAM {},{}.' .format(cam_module, CamOpen)) self.GUI.statusbar.showMessage('Open CAM {},{}.' .format(cam_module, CamOpen)) else: cap.release() except Exception as msg: cap.release() logging.debug('Error: ' + str(msg)) def GUI_OpenFile(self): global file_RD try: logging.debug('Open File {}.' .format(file_RD)) self.GUI.statusbar.showMessage('Open File {}.' .format(file_RD)) except Exception as msg: logging.debug('Error: ' + str(msg)) def time_ck01(self): ''' Run function every 0.1s ''' global CamOpen global CamFram global cap global frame_count global width_frame global height_frame try: if CamOpen: start = timer() frame_count += 1 CamOpen, NewFram = cap.read() NewFram = cv2.flip(NewFram, 1) CamFram.append(NewFram) # cv2.imshow("Camera View", CamFram) NewFram = cv2.cvtColor(NewFram, cv2.COLOR_BGR2RGB) image = QtGui.QImage( NewFram, NewFram.shape[1], NewFram.shape[0], QtGui.QImage.Format_RGB888) pixmap = QtGui.QPixmap.fromImage(image) pixmap = pixmap.scaled(width_frame, height_frame, QtCore.Qt.KeepAspectRatio) self.GUI.LB_ORIGINAL.setPixmap(pixmap) end = timer() print("Get Frame {} in {}." .format(len(CamFram), round(end-start, 2))) # logging.debug('Retrive a frame from CAM {},{}.' .format(cam_module, CamOpen)) except Exception as msg: logging.debug('Error: ' + str(msg)) def time_ck1(self): ''' Run function every 1s ''' global width_frame global height_frame global frame_count width_frame = self.GUI.LB_ORIGINAL.geometry().width() - 2 height_frame = self.GUI.LB_ORIGINAL.geometry().height() - 2 if CamOpen: # logging.debug('Frame count {}.' .format(frame_count)) self.GUI.LB_Frame_OR.setText("Frame count {}" .format(frame_count)) frame_count = 0 def Update_work(self): ''' Run function every 1s ''' global CamOpen global CamFram try: pass except Exception as msg: logging.debug('Error: ' + str(msg)) logging.debug('##############################################################') logging.debug('Run application.') app = QtWidgets.QApplication([]) application = GUI() application.show() sys.exit(app.exec())
-
Se non mi sbaglio face_recognition usa deep learning che e' chiaramente lento e probabilmente non quello che vuoi qui.
Per avere real-time face recognition tutti i software usano una qualche forma dell'algoritmo Viola–Jones che e' gia' presente in OpenCV: https://docs.opencv.org/4.2.0/db/d28/tutorial_cascade_classifier.htmlIn pratica: usa face_recognition solo dopo che hai scattato la foto per avere maggiore accuratezza ma per avere velocita' usa un classifier non-deep-learning
-
Stai usando una variabile globale per trasferire informazioni a thread diversi, e' la definizione di race condition.
QThread (o qualsiasi altro threading) non velocizza l'esecuzione digray_cam = cv2.cvtColor(NewFram, cv2.COLOR_BGR2GRAY)
e/oface_locations = face_recognition.face_locations(gray_cam)
quindi non riuscirai mai a raggiungere alti livelli di FPS usando deep-learning (da qui il mio consiglio).
La tua implementazione comunque non dovrebbe bloccare l'interfaccia principale a meno che non ci sia un crash dovuto alla sopraccitata race condition