Thread / OpenCV Segmentation Fault
-
wrote on 23 May 2023, 07:34 last edited by
Hello folks,
I'm trying to visualize some image processing using cv2.
In general the following code works as intended and I would have started to implement my processing Code. But unfortunately sometimes I get segmentation faults if I repeatedly stop/start the VideoThread.I use qml to Image Item with QuickImageProvider to visualize the video feed.
Interestingly I get console output from receiving signals in my qml file when the VideoThreads quit() method is already called.
Can anyone help? Google couldn't
import sys import cv2 from PySide6.QtGui import QImage from PySide6.QtCore import Signal, Slot, Qt, QThread from PySide6.QtQuick import QQuickImageProvider from PySide6.QtQml import QQmlImageProviderBase class VideoThread(QThread): frameChanged = Signal(QImage) def __init__(self, parent=None): QThread.__init__(self, parent) self.capture = cv2.VideoCapture(0) self.capture.set(cv2.CAP_PROP_FPS, 30) self.running = True self.detecting = False # initialize haar cascade face detection self.faceCascade = cv2.CascadeClassifier(cv2.data.haarcascades + 'haarcascade_frontalface_default.xml') print("VideoThread initialization finished") def run(self): while self.running: print("getframe...") ret, frame = self.capture.read() if ret: if self.detecting: # Convert the frame to grayscale gray = cv2.cvtColor(frame, cv2.COLOR_BGR2GRAY) # Detect faces in the frame faces = self.faceCascade.detectMultiScale(gray, scaleFactor=1.1, minNeighbors=5) # Draw rectangles around the faces for (x, y, w, h) in faces: cv2.rectangle(frame, (x, y), (x + w, y + h), (255, 0, 0), 2) rgbImage = cv2.cvtColor(frame, cv2.COLOR_BGR2RGB) h, w, _ = rgbImage.shape qImage = QImage(rgbImage.data, w, h, QImage.Format_RGB888) self.frameChanged.emit(qImage) print("...emitted frame") def quit(self): print("try closing") self.running = False self.capture.release() super().quit() super().wait() self.deleteLater() print("closed") def start(self): print("VideoThread: start") self.running = True super().start() print("VideoThread: started") def detect(self): self.detecting = not self.detecting class VideoPlayer(QQuickImageProvider): imageChanged = Signal(QImage) def __init__(self): super().__init__(QQmlImageProviderBase.Image, QQmlImageProviderBase.ForceAsynchronousImageLoading) self.videoThread = None self.image = None def requestImage(self, id, size, requestedSize): if self.image: img = self.image else: img = QImage(1280, 720, QImage.Format_RGBA8888) img.fill(Qt.black) return img @Slot(QImage) def updateImage(self, frame): print("new image in updateImage") self.image = frame self.imageChanged.emit(frame) @Slot() def start(self): print("Starting Video feed...") if not self.videoThread: self.videoThread = VideoThread() self.videoThread.frameChanged.connect(self.updateImage) self.videoThread.running = True self.videoThread.start() @Slot() def stop(self): print("Finishing Video feed.") if self.videoThread: self.videoThread.quit() # self.videoThread.deleteLater() self.videoThread = None print("Finished Video feed.") @Slot() def toggleDetection(self): self.videoThread.detect()
-
Hello folks,
I'm trying to visualize some image processing using cv2.
In general the following code works as intended and I would have started to implement my processing Code. But unfortunately sometimes I get segmentation faults if I repeatedly stop/start the VideoThread.I use qml to Image Item with QuickImageProvider to visualize the video feed.
Interestingly I get console output from receiving signals in my qml file when the VideoThreads quit() method is already called.
Can anyone help? Google couldn't
import sys import cv2 from PySide6.QtGui import QImage from PySide6.QtCore import Signal, Slot, Qt, QThread from PySide6.QtQuick import QQuickImageProvider from PySide6.QtQml import QQmlImageProviderBase class VideoThread(QThread): frameChanged = Signal(QImage) def __init__(self, parent=None): QThread.__init__(self, parent) self.capture = cv2.VideoCapture(0) self.capture.set(cv2.CAP_PROP_FPS, 30) self.running = True self.detecting = False # initialize haar cascade face detection self.faceCascade = cv2.CascadeClassifier(cv2.data.haarcascades + 'haarcascade_frontalface_default.xml') print("VideoThread initialization finished") def run(self): while self.running: print("getframe...") ret, frame = self.capture.read() if ret: if self.detecting: # Convert the frame to grayscale gray = cv2.cvtColor(frame, cv2.COLOR_BGR2GRAY) # Detect faces in the frame faces = self.faceCascade.detectMultiScale(gray, scaleFactor=1.1, minNeighbors=5) # Draw rectangles around the faces for (x, y, w, h) in faces: cv2.rectangle(frame, (x, y), (x + w, y + h), (255, 0, 0), 2) rgbImage = cv2.cvtColor(frame, cv2.COLOR_BGR2RGB) h, w, _ = rgbImage.shape qImage = QImage(rgbImage.data, w, h, QImage.Format_RGB888) self.frameChanged.emit(qImage) print("...emitted frame") def quit(self): print("try closing") self.running = False self.capture.release() super().quit() super().wait() self.deleteLater() print("closed") def start(self): print("VideoThread: start") self.running = True super().start() print("VideoThread: started") def detect(self): self.detecting = not self.detecting class VideoPlayer(QQuickImageProvider): imageChanged = Signal(QImage) def __init__(self): super().__init__(QQmlImageProviderBase.Image, QQmlImageProviderBase.ForceAsynchronousImageLoading) self.videoThread = None self.image = None def requestImage(self, id, size, requestedSize): if self.image: img = self.image else: img = QImage(1280, 720, QImage.Format_RGBA8888) img.fill(Qt.black) return img @Slot(QImage) def updateImage(self, frame): print("new image in updateImage") self.image = frame self.imageChanged.emit(frame) @Slot() def start(self): print("Starting Video feed...") if not self.videoThread: self.videoThread = VideoThread() self.videoThread.frameChanged.connect(self.updateImage) self.videoThread.running = True self.videoThread.start() @Slot() def stop(self): print("Finishing Video feed.") if self.videoThread: self.videoThread.quit() # self.videoThread.deleteLater() self.videoThread = None print("Finished Video feed.") @Slot() def toggleDetection(self): self.videoThread.detect()
@LS-KS said in Thread / OpenCV Segmentation Fault:
qImage = QImage(rgbImage.data, w, h, QImage.Format_RGB888)
Default problem when using this QImage ctor and not reading the docs I would guess: "The buffer must remain valid throughout the life of the QImage and all copies that have not been modified or otherwise detached from the original buffer"
Make a deep copy of the image.
-
@LS-KS said in Thread / OpenCV Segmentation Fault:
qImage = QImage(rgbImage.data, w, h, QImage.Format_RGB888)
Default problem when using this QImage ctor and not reading the docs I would guess: "The buffer must remain valid throughout the life of the QImage and all copies that have not been modified or otherwise detached from the original buffer"
Make a deep copy of the image.
Hi,
Your thread stopping looks wrong. You change running to false and immediately nuke your capture while in fact, the run function might still be doing something. The deallocation should rather happen as cleanup at the end of the run method.
-
Hi,
Your thread stopping looks wrong. You change running to false and immediately nuke your capture while in fact, the run function might still be doing something. The deallocation should rather happen as cleanup at the end of the run method.
wrote on 24 May 2023, 06:50 last edited by@Christian-Ehrlicher ,
Thank you for your replys.
I moved the super().wait() just one line up. And it now never crushes.
But somehow this way it feels wrong?!?I think i will have time to read the docs today.
@SGaist : what do you mean by 'clean up at the end of the run function'?
-
@Christian-Ehrlicher ,
Thank you for your replys.
I moved the super().wait() just one line up. And it now never crushes.
But somehow this way it feels wrong?!?I think i will have time to read the docs today.
@SGaist : what do you mean by 'clean up at the end of the run function'?
wrote on 24 May 2023, 07:18 last edited by JonB@LS-KS said in Thread / OpenCV Segmentation Fault:
I moved the super().wait() just one line up. And it now never crushes.
@SGaist : what do you mean by 'clean up at the end of the run function'?
I have not analyzed all your code, but I think from your
def quit(self)
you should move at least the linesself.capture.release() self.deleteLater()
out and put them as the last lines in
def run(self)
after thewhile self.running
loop. -
@LS-KS said in Thread / OpenCV Segmentation Fault:
I moved the super().wait() just one line up. And it now never crushes.
@SGaist : what do you mean by 'clean up at the end of the run function'?
I have not analyzed all your code, but I think from your
def quit(self)
you should move at least the linesself.capture.release() self.deleteLater()
out and put them as the last lines in
def run(self)
after thewhile self.running
loop. -
1/6