qt quick QScreenCapture error on Android
Unsolved
QML and Qt Quick
-
Screen capture fail: QScreenCapture::CaptureFailed , "Failed to grab the screen content"
qScreenCaptrue = new QScreenCapture(); qScreenCaptrue->setScreen(QGuiApplication::primaryScreen()); // 获取主屏幕的属性 qDebug() << "Primary Screen Name: " << qScreenCaptrue->screen()->name(); qDebug() << "Primary Screen Geometry: " << qScreenCaptrue->screen()->geometry(); qDebug() << "Primary Screen Physical Size: " << qScreenCaptrue->screen()->physicalSize(); qDebug() << "Primary Screen Logical DPI: " << qScreenCaptrue->screen()->logicalDotsPerInch(); qDebug() << "Primary Screen Physical DPI: " << qScreenCaptrue->screen()->physicalDotsPerInch(); qDebug() << "Primary Screen Orientation: " << qScreenCaptrue->screen()->orientation(); captureSession.setScreenCapture(qScreenCaptrue); captureSession.setVideoOutput(&videoSink);
error:
2023-09-30 13:03:33.411 3250-3275/org.qtproject.example.appqtAndroid W/libappqtAndroid_x86.so: Screen capture fail: QScreenCapture::CaptureFailed , "Failed to grab the screen content" -
package com.fm.webrtc; import android.annotation.SuppressLint; import android.app.Notification; import android.app.NotificationChannel; import android.app.NotificationManager; import android.app.PendingIntent; import android.app.Service; import android.content.Context; import android.content.Intent; import android.graphics.BitmapFactory; import android.media.projection.MediaProjection; import android.media.projection.MediaProjectionManager; import android.os.Binder; import android.os.Build; import android.os.IBinder; import android.util.Log; import androidx.annotation.Nullable; import androidx.annotation.RequiresApi; import androidx.core.app.NotificationCompat; public class ScreenCaptureServices extends Service { public final static String TAG = "ScreenCaptureServices"; private String NOTIFICATION_CHANNEL_ID = "AudioCaptureService_nofity"; private String NOTIFICATION_CHANNEL_NAME = "AudioCaptureService"; private String NOTIFICATION_CHANNEL_DESC = "AudioCaptureService"; private int NOTIFICATION_ID = 1000; private static final String NOTIFICATION_TICKER = "RecorderApp"; private int resultCode; private Intent resultData; public MediaProjectionManager mediaProjectionManager; public MediaProjection mediaProjection; public class ScreenCaptureBinder extends Binder { public ScreenCaptureServices getScreenRecordService() { return ScreenCaptureServices.this; } } @Nullable @Override public IBinder onBind(Intent intent) { // return new ScreenCaptureBinder(); throw new UnsupportedOperationException("Not yet implemented"); } @Override public void onCreate() { super.onCreate(); createNotification(); } @RequiresApi(api = Build.VERSION_CODES.Q) @Override public int onStartCommand(Intent intent, int flags, int startId) { try { resultCode = intent.getIntExtra("resultCode", -1); resultData = intent.getParcelableExtra("data"); Log.i(TAG, "onStartCommand: " + resultCode); Log.i(TAG, "onStartCommand: " + resultData); mediaProjectionManager = (MediaProjectionManager) getSystemService(MEDIA_PROJECTION_SERVICE); mediaProjection = mediaProjectionManager.getMediaProjection(resultCode, resultData); VideoCodec videoCodec = new VideoCodec(); videoCodec.setMediaProjection(mediaProjection); videoCodec.prepare(); videoCodec.startCoding(); ; } catch (Exception e) { Log.e(TAG, e.getMessage()); } return super.onStartCommand(intent, flags, startId); } @SuppressLint("WrongConstant") public void createNotification() { Log.i(TAG, "notification: " + Build.VERSION.SDK_INT); if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.O) { //Call Start foreground with notification Intent notificationIntent = new Intent(this, ScreenCaptureServices.class); PendingIntent pendingIntent; if (android.os.Build.VERSION.SDK_INT >= android.os.Build.VERSION_CODES.S) { pendingIntent = PendingIntent.getActivity(this, 0, notificationIntent, PendingIntent.FLAG_MUTABLE); } else { pendingIntent = PendingIntent.getActivity(this, 0, notificationIntent, 0); } NotificationCompat.Builder notificationBuilder = new NotificationCompat.Builder(this, NOTIFICATION_CHANNEL_ID) .setLargeIcon(BitmapFactory.decodeResource(getResources(), R.drawable.ic_launcher_foreground)) .setSmallIcon(R.drawable.ic_launcher_foreground) .setContentTitle("Starting Service") .setContentText("Starting monitoring service") .setTicker(NOTIFICATION_TICKER) .setContentIntent(pendingIntent); Notification notification = notificationBuilder.build(); NotificationChannel channel = new NotificationChannel(NOTIFICATION_CHANNEL_ID, NOTIFICATION_CHANNEL_NAME, NotificationManager.IMPORTANCE_DEFAULT); channel.setDescription(NOTIFICATION_CHANNEL_DESC); NotificationManager notificationManager = (NotificationManager) getSystemService(Context.NOTIFICATION_SERVICE); notificationManager.createNotificationChannel(channel); startForeground(NOTIFICATION_ID, notification); //notificationManager.notify(NOTIFICATION_ID, notification); } } }
package com.fm.webrtc; import android.annotation.TargetApi; import android.hardware.display.DisplayManager; import android.hardware.display.VirtualDisplay; import android.media.MediaCodec; import android.media.MediaCodecInfo; import android.media.MediaFormat; import android.media.projection.MediaProjection; import android.os.Build; import android.os.Bundle; import android.util.Log; import android.view.Surface; import java.io.IOException; import java.nio.ByteBuffer; public class VideoCodec extends Thread { private static final String MIME_TYPE = "video/avc"; public final static int WIDTH = 1280; public final static int HEIGHT = 720; private final static int BITRETE = 500_000; private final static int FRAME_RATE = 20; // fps private final static int IFRAME_INTERVAL = 2;//关键帧间隔2s private final static String TAG = "VideoCodec"; private MediaProjection mediaProjection; private VirtualDisplay virtualDisplay; private MediaCodec mediaCodec; private boolean isRecoding; private MediaCodec.BufferInfo bufferInfo; private long startTime; private long timeStamp; private H264 h264 = null; private boolean isInitH264 = false; // 辅助方法:将字节数组转换为十六进制字符串 private String bytesToHex(byte[] bytes) { StringBuilder sb = new StringBuilder(); for (byte b : bytes) { sb.append(String.format("%02x", b)); } return sb.toString(); } @TargetApi(Build.VERSION_CODES.LOLLIPOP) public boolean prepare() { if (null == mediaProjection) { return false; } bufferInfo = new MediaCodec.BufferInfo(); MediaFormat format = MediaFormat.createVideoFormat(MIME_TYPE, WIDTH, HEIGHT); format.setInteger(MediaFormat.KEY_COLOR_FORMAT, MediaCodecInfo.CodecCapabilities.COLOR_FormatSurface); format.setInteger(MediaFormat.KEY_BIT_RATE, BITRETE); format.setInteger(MediaFormat.KEY_FRAME_RATE, FRAME_RATE); format.setInteger(MediaFormat.KEY_I_FRAME_INTERVAL, IFRAME_INTERVAL); Log.d(TAG, "created video format: " + format); try { mediaCodec = MediaCodec.createEncoderByType(MIME_TYPE); } catch (IOException e) { e.printStackTrace(); return false; } mediaCodec.configure(format, null, null, MediaCodec.CONFIGURE_FLAG_ENCODE); Surface surface = mediaCodec.createInputSurface(); Log.d(TAG, "created video input surface: " + surface); mediaCodec.start(); //创建一个公共显示。surface是mediacodec的in virtualDisplay = mediaProjection.createVirtualDisplay("screen-codec", WIDTH, HEIGHT, 1, DisplayManager.VIRTUAL_DISPLAY_FLAG_PUBLIC, surface, null, null); return true; } public void setMediaProjection(MediaProjection mediaProjection) { this.mediaProjection = mediaProjection; } @TargetApi(Build.VERSION_CODES.LOLLIPOP) @Override public void run() { while (isRecoding) { if (timeStamp != 0) {//2000毫秒后 if (System.currentTimeMillis() - timeStamp >= 2_000) { Bundle params = new Bundle(); //立即刷新 让下一帧是关键帧 params.putInt(MediaCodec.PARAMETER_KEY_REQUEST_SYNC_FRAME, 0); mediaCodec.setParameters(params); timeStamp = System.currentTimeMillis(); } } else { timeStamp = System.currentTimeMillis(); } int index = mediaCodec.dequeueOutputBuffer(bufferInfo, 0); //TODO 这里是解决累计延时的关键地方 try { sleep(20); } catch (InterruptedException e) { e.printStackTrace(); } if (index >= 0) { ByteBuffer buffer = mediaCodec.getOutputBuffer(index); byte[] outData = new byte[bufferInfo.size]; buffer.get(outData); if(!isInitH264) { MediaFormat outputFormat = mediaCodec.getOutputFormat(); // 获取 SPS 和 PPS 数据 ByteBuffer spsBuffer = outputFormat.getByteBuffer("csd-0"); ByteBuffer ppsBuffer = outputFormat.getByteBuffer("csd-1"); // 将 SPS 和 PPS 数据提取到字节数组中 byte[] spsData = new byte[spsBuffer.remaining()]; byte[] ppsData = new byte[ppsBuffer.remaining()]; spsBuffer.get(spsData); ppsBuffer.get(ppsData); // 打印 SPS 和 PPS 数据(以十六进制形式) Log.d(TAG, "SPS Data: " + bytesToHex(spsData)); Log.d(TAG, "PPS Data: " + bytesToHex(ppsData)); isInitH264 = true; h264 = new H264(spsData, ppsData, WIDTH, HEIGHT, null); h264.startDecoder(); } h264.onFrame(outData); if (startTime == 0) { startTime = bufferInfo.presentationTimeUs / 1000; Log.i(TAG, "video tms " + startTime); } mediaCodec.releaseOutputBuffer(index, false); } } isRecoding = false; startTime = 0; mediaCodec.stop(); mediaCodec.release(); mediaCodec = null; virtualDisplay.release(); virtualDisplay = null; mediaProjection.stop(); mediaProjection = null; Log.i(TAG, "release video"); } public void startCoding() { isRecoding = true; start(); } public void stopCoding() { isRecoding = false; if (h264 != null) h264.release(); try { join(); } catch (InterruptedException e) { e.printStackTrace(); } } }
package com.fm.webrtc; import android.media.MediaCodec; import android.media.MediaCodecInfo; import android.media.MediaFormat; import android.os.Build; import android.util.Log; import android.view.Surface; import androidx.annotation.RequiresApi; import java.io.IOException; import java.nio.ByteBuffer; import java.util.concurrent.ConcurrentLinkedQueue; /** * h264 解码类 */ @RequiresApi(api = Build.VERSION_CODES.LOLLIPOP) public class H264 { private MediaFormat h264MediaFormat = null; private MediaCodec h264MediaCodec = null; private long timeOutUs = 1000; private String TAG = "com.video.player.Decoder.h264-fuweicong"; private final static String MIME_TYPE = "video/avc"; private Surface surfaceView = null; private byte[] sps = null; private byte[] pps = null; // private static native void callNativeImageData(byte[] outData, int width, int length, int format); // public int getWidth() { return width; } public int getHeight() { return height; } public int width; public int height; public int fps; public int mediaFormatNum = 0; public int decoderWidth = 0; public int decoderHeight = 0; //SPS 包含信息 private ConcurrentLinkedQueue<byte[]> videoFrameData = new ConcurrentLinkedQueue<>(); public int getVideoFrameDataSize(){ return videoFrameData.size(); } public byte[] getVideoFrameData(){ if(videoFrameData.size() > 0) return videoFrameData.poll(); return null; } private boolean hardDecoderError = false; public H264(byte[] sps, byte[] pps, int width, int height, Surface surface) { surfaceView = surface; this.sps = sps; this.width = width; this.height = height; this.pps = pps; byte[] sps2 = new byte[this.sps.length - 4]; int index = 0; for (int i = 4; i < this.sps.length; i++) { sps2[index] = this.sps[i]; index++; } hardDecoder(); } /** * 硬解码 * * @return */ private boolean hardDecoder() { try { h264MediaFormat = MediaFormat.createVideoFormat(MIME_TYPE, width, height); h264MediaFormat.setByteBuffer("csd-0", ByteBuffer.wrap(this.sps)); h264MediaFormat.setByteBuffer("csd-1", ByteBuffer.wrap(this.pps)); h264MediaCodec = MediaCodec.createDecoderByType(MIME_TYPE); if (surfaceView == null){ Log.e(TAG,"surfaceView 为null"); } h264MediaCodec.configure(h264MediaFormat, surfaceView, null, 0); //没有填 surface 可以获取到数据 } catch (IOException e) { e.printStackTrace(); Log.e(TAG, "解码异常"); return false; } catch (Exception e){ Log.e(TAG,"异常"); e.printStackTrace(); return false; } return true; } /** * 开始解码 * * @return */ public boolean startDecoder() { try { h264MediaCodec.start(); h264MediaCodec.getOutputFormat().getInteger(MediaFormat.KEY_COLOR_FORMAT);//颜色格式 } catch (Exception e) { e.printStackTrace(); } return true; } private void getMediaOutFormat(int outputIndex){ MediaFormat mediaFormat = h264MediaCodec.getOutputFormat(outputIndex); int keyColor = mediaFormat.getInteger(MediaFormat.KEY_COLOR_FORMAT); decoderWidth = mediaFormat.getInteger(MediaFormat.KEY_WIDTH); decoderHeight = mediaFormat.getInteger(MediaFormat.KEY_HEIGHT); mediaFormatNum = keyColor; Log.e("fuweicong 颜色值", String.valueOf(keyColor)); switch (mediaFormat.getInteger(MediaFormat.KEY_COLOR_FORMAT)) { case MediaCodecInfo.CodecCapabilities.COLOR_FormatYUV420SemiPlanar: Log.e("fuweicong", "YUV420SemiPlanar"); break; case MediaCodecInfo.CodecCapabilities.COLOR_FormatYUV420Planar: Log.e("fuweicong", "YUV420Planar"); break; default: Log.e("fuweicong", "MediaCodecInfo.CodecCapabilities.err"); break; } } private void getMaxVideoFrameData(){ while (getVideoFrameDataSize() > 5){ try { Thread.sleep(100); } catch (InterruptedException e) { e.printStackTrace(); } } } /** * 解码过程 * * @param buf * @return */ public boolean onFrame(byte[] buf) { long startTime = System.currentTimeMillis(); if (surfaceView != null && !surfaceView.isValid()) { //屏幕暂时不可以渲染,等待可渲染 Log.e("fuweicong","不可以渲染"); return false; } try { int inputBufferIndex = h264MediaCodec.dequeueInputBuffer(timeOutUs);//获取输入缓冲区下标 if (inputBufferIndex >= 0) { ByteBuffer inputByteBuffer = h264MediaCodec.getInputBuffer(inputBufferIndex); inputByteBuffer.clear(); inputByteBuffer.put(buf); h264MediaCodec.queueInputBuffer(inputBufferIndex, 0, buf.length, timeOutUs, 0); } else { return false; } // Get output buffer index MediaCodec.BufferInfo bufferInfo = new MediaCodec.BufferInfo(); int outputBufferIndex = h264MediaCodec.dequeueOutputBuffer(bufferInfo, timeOutUs); // if (outputBufferIndex < 0) // Log.e(TAG, String.valueOf(outputBufferIndex)); while (outputBufferIndex >= 0) { this.getMediaOutFormat(outputBufferIndex); ByteBuffer buffer = h264MediaCodec.getOutputBuffer(outputBufferIndex); byte[] outData = new byte[bufferInfo.size]; buffer.get(outData); Log.e(TAG, outData.length + ""); h264MediaCodec.releaseOutputBuffer(outputBufferIndex, true); outputBufferIndex = h264MediaCodec.dequeueOutputBuffer(bufferInfo, timeOutUs); } } catch (Exception e) { e.printStackTrace(); // Log.e(TAG, "缓冲区硬解码失败"); return false; } return true; } public void release() { if (h264MediaCodec != null) { h264MediaCodec.stop(); h264MediaCodec.release(); videoFrameData.clear(); } } }