Assistance Needed for Implementing Camera Functionality in Qt 6 Using GStreamer Pipeline
-
camera.qml
import QtQuick import QtQuick.Controls import QtQuick.Layouts import QtMultimedia 6.8 import QtQuick.Effects import GStreamerPipeline 1.0 Item { id: camera property bool showBothCameras: false property bool showFrontCamera: true property bool showBackCamera: false property int frontCameraFps: 0 property int backCameraFps: 0 GStreamerPipeline { id: frontCameraPipeline onFpsUpdated: function(fps) { frontCameraFps = fps } } GStreamerPipeline { id: backCameraPipeline onFpsUpdated: function(fps) { backCameraFps = fps } } MediaPlayer { id: frontCameraMediaSingle autoPlay: true source: "gst-pipeline:udpsrc port=5000 ! application/x-rtp,encoding-name=H264 ! rtph264depay ! queue ! avdec_h264 ! queue ! videoconvert ! autovideosink" videoOutput: singleCameraViewFront onErrorChanged: console.error("Front Camera Error:", errorString) onPlayingChanged: console.log("Front Camera Playing", playing) } MediaPlayer { id: backCameraMediaSingle autoPlay: true source: "gst-pipeline:udpsrc port=5000 ! application/x-rtp,encoding-name=H264 ! rtph264depay ! queue ! avdec_h264 ! queue ! videoconvert ! autovideosink" videoOutput: singleCameraViewBack onErrorChanged: console.error("Back Camera Error:", errorString) onPlayingChanged: console.log("Back Camera Playing", playing) } MediaPlayer { id: frontCameraMediaDual autoPlay: true source: "gst-pipeline:udpsrc port=5000 ! application/x-rtp,encoding-name=H264 ! rtph264depay ! queue ! avdec_h264 ! queue ! videoconvert ! autovideosink" videoOutput: frontCameraView onErrorChanged: console.error("Front Camera Error:", errorString) onPlayingChanged: console.log("Front Camera Playing", playing) } MediaPlayer { id: backCameraMediaDual autoPlay: true source: "gst-pipeline:udpsrc port=5000 ! application/x-rtp,encoding-name=H264 ! rtph264depay ! queue ! avdec_h264 ! queue ! videoconvert ! autovideosink" videoOutput: backCameraView onErrorChanged: console.error("Back Camera Error:", errorString) onPlayingChanged: console.log("Back Camera Playing", playing) } // Background Gradient Rectangle { id: background anchors.fill: parent gradient: Gradient { GradientStop { position: 0.0; color: "#ffffff" } GradientStop { position: 0.5; color: "#9b9b9b" } GradientStop { position: 1.0; color: "#000000" } } } // Top Navbar TopNavbar { id: topNavbar anchors.top: parent.top } // Bottom Navbar BottomNavbar { id: bottomNavbar anchors.bottom: parent.bottom } // Camera Elements // Single Camera View VideoOutput { id: singleCameraViewFront visible: showFrontCamera && !showBothCameras fillMode: VideoOutput.PreserveAspectFit anchors.centerIn: parent width: parent.width * 0.8 height: parent.height * 0.5 Text { id: fpsDisplaySingleFront text: "Front FPS: " + frontCameraFps color: "white" font.pixelSize: 20 font.family: "Manrope" anchors.bottom: parent.bottom anchors.left: parent.left anchors.leftMargin: 20 anchors.bottomMargin: 20 } } VideoOutput { id: singleCameraViewBack visible: showBackCamera && !showBothCameras fillMode: VideoOutput.PreserveAspectFit anchors.centerIn: parent width: parent.width * 0.8 height: parent.height * 0.5 Text { id: fpsDisplaySingleBack text: "Back FPS: " + backCameraFps color: "white" font.pixelSize: 20 font.family: "Manrope" anchors.bottom: parent.bottom anchors.left: parent.left anchors.leftMargin: 20 anchors.bottomMargin: 20 } } // Dual Camera Side-by-Side View RowLayout { id: dualCameraView visible: showBothCameras spacing: 10 anchors.horizontalCenter: parent.horizontalCenter anchors.verticalCenter: parent.verticalCenter width: parent.width * 0.8 height: parent.height * 0.5 VideoOutput { id: frontCameraView fillMode: VideoOutput.PreserveAspectFit Layout.fillWidth: true Layout.fillHeight: true Text { id: frontFpsOverlay text: "Front FPS: " + frontCameraFps color: "white" font.pixelSize: 20 font.family: "Manrope" anchors.bottom: parent.bottom anchors.left: parent.left anchors.leftMargin: 20 anchors.topMargin: 60 } } VideoOutput { id: backCameraView fillMode: VideoOutput.PreserveAspectFit Layout.fillWidth: true Layout.fillHeight: true Text { id: backFpsOverlay text: "Back FPS: " + backCameraFps color: "white" font.pixelSize: 20 font.family: "Manrope" anchors.bottom: parent.bottom anchors.right: parent.right anchors.rightMargin: 20 anchors.bottomMargin: 5 } } } // Overlay Buttons Column { id: cameraControls anchors.right: parent.right anchors.verticalCenter: parent.verticalCenter spacing: 20 anchors.rightMargin: 20 // Front Camera Button Rectangle { width: 60; height: 60; radius: 30 color: "black" Image { source: "qrc:/new/icons/Icons/up_arrow.svg" anchors.centerIn: parent width: 40; height: 40 } MouseArea { anchors.fill: parent onClicked: { showFrontCamera = true; showBackCamera = false; showBothCameras = false; frontCameraMediaSingle.play(); console.log("Switched to Front Camera"); } } } // Dual Camera View Button Rectangle { width: 60; height: 60; radius: 30 color: "black" Image { source: "qrc:/new/icons/Icons/scooty.png" anchors.centerIn: parent width: 40; height: 40 } MouseArea { anchors.fill: parent onClicked: { showBothCameras = true; frontCameraMediaDual.play(); backCameraMediaDual.play(); console.log("Dual Camera View Enabled"); } } } // Back Camera Button Rectangle { width: 60; height: 60; radius: 30 color: "black" Image { source: "qrc:/new/icons/Icons/down_arrow.svg" anchors.centerIn: parent width: 40; height: 40 } MouseArea { anchors.fill: parent onClicked: { showFrontCamera = false; showBothCameras = false; showBackCamera = true; frontCameraMediaSingle.stop(); backCameraMediaSingle.play(); console.log("Switched to Back Camera"); } } } } }
Hello Qt Creator Support Team,
I am working on implementing camera functionality in a Qt application using Qt Creator. I am utilizing a GStreamer pipeline where the video stream is transmitted over UDP to the Qt application, which then receives and displays it on the page.
However, I am facing significant challenges due to the removal of certain backend plugins in the latest Qt 6.x versions. Key components such as QMediaPlayer and VideoOutput are no longer available, making it difficult to integrate the video stream properly.
Given these limitations, could you suggest alternative solutions for implementing camera functionality using a GStreamer pipeline in Qt 6? Any guidance on best practices or recommended approaches would be greatly appreciated.
Thank you for your support!
Best regards
Nandini -
-
Hi and welcome to devnet,
This is not the Qt Creator support team, this is a user forum.
As for GStreamer and Qt 6. You might want to take a look at their qml6glsink.