Qt and ffmpeg: image display delay.
-
Good afternoon.
I use ffmpeg to read videos, split them into frames, and try to display images on the label.
For some reason, only the last image begins to be displayed and only after the function is completed, while the previous images are not displayed, although debag says that the drawing function works.
Please tell me what the rendering bug might be related to and how to fix it.void MainWindow::test2() { static struct SwsContext *img_convert_ctx; int videoStream, i, numBytes; int ret, got_picture; avformat_network_init(); //初始化FFmpeg网络模块 av_register_all(); //初始化FFMPEG 调用了这个才能正常适用编码器和解码器 AVFormatContext *pFormatCtx = NULL; //Allocate an AVFormatContext. pFormatCtx = avformat_alloc_context(); // Open video file if(avformat_open_input(&pFormatCtx, "./Wildlife.wmv", 0, 0) != 0) qDebug()<<"Couldn't open file"; if(avformat_find_stream_info(pFormatCtx, NULL) < 0) qDebug()<<"Couldn't find stream information"; av_dump_format(pFormatCtx, 0, "./Wildlife.wmv", 0);//information abaut file AVCodecContext *pCodecCtxOrig = NULL; AVCodecContext *pCodecCtx = NULL; // Find the first video stream videoStream = -1; for(i=0; i<pFormatCtx->nb_streams; i++) if(pFormatCtx->streams[i]->codec->codec_type==AVMEDIA_TYPE_VIDEO) { videoStream=i; break; } if(videoStream == -1) qDebug()<<"Didn't find a video stream"; // Get a pointer to the codec context for the video stream pCodecCtx=pFormatCtx->streams[videoStream]->codec; AVCodec *pCodec = NULL; // Find the decoder for the video stream pCodec = avcodec_find_decoder(pCodecCtx->codec_id); if(pCodec == NULL) { fprintf(stderr, "Unsupported codec!\n"); qDebug()<<"Codec not found"; } // Copy context //pCodecCtx = avcodec_alloc_context3(pCodec); // if(avcodec_copy_context(pCodecCtx, pCodecCtxOrig) != 0) { //not work!!!!!!!!!!!!!!!!!!!!! // qDebug()<<"Error copying codec context"; // } // Open codec AVDictionary *aVDictionary; if(avcodec_open2(pCodecCtx, pCodec, NULL) < 0) qDebug()<<"Could not open codec"; // Выделить видеокадр // Allocate video frame AVFrame *pFrame = NULL; pFrame = av_frame_alloc(); // Allocate an AVFrame structure AVFrame *pFrameRGB; pFrameRGB = av_frame_alloc(); if(pFrameRGB == NULL) qDebug()<<"pFrameRGB == NULL"; uint8_t *buffer = NULL; numBytes; // Determine required buffer size and allocate buffer numBytes=avpicture_get_size(AV_PIX_FMT_RGB24, pCodecCtx->width, pCodecCtx->height); buffer=(uint8_t *)av_malloc(numBytes*sizeof(uint8_t)); // Assign appropriate parts of buffer to image planes in pFrameRGB // Note that pFrameRGB is an AVFrame, but AVFrame is a superset // of AVPicture avpicture_fill((AVPicture *)pFrameRGB, buffer, AV_PIX_FMT_RGB24, pCodecCtx->width, pCodecCtx->height); struct SwsContext *sws_ctx = NULL; int frameFinished; AVPacket packet; // initialize SWS context for software scaling sws_ctx = sws_getContext(pCodecCtx->width, pCodecCtx->height, pCodecCtx->pix_fmt, pCodecCtx->width, pCodecCtx->height, AV_PIX_FMT_RGB24, SWS_BILINEAR, NULL, NULL, NULL ); i=0; while(av_read_frame(pFormatCtx, &packet)>=0) { // Is this a packet from the video stream? if(packet.stream_index==videoStream) { // Decode video frame avcodec_decode_video2(pCodecCtx, pFrame, &frameFinished, &packet); // Did we get a video frame? if(frameFinished) { // Convert the image from its native format to RGB sws_scale(sws_ctx, (uint8_t const * const *)pFrame->data, pFrame->linesize, 0, pCodecCtx->height, pFrameRGB->data, pFrameRGB->linesize); // view image on label //if(++i<=20) //first 20 images view on label { qDebug()<<"view image on label"; QImage image( pCodecCtx->width, pCodecCtx->height, QImage::Format_RGB888 ); for( int y = 0; y < pCodecCtx->height; ++y ){ memcpy( image.scanLine(y), pFrameRGB->data[0]+y * pFrameRGB->linesize[0], pCodecCtx->width * 3 ); } //ui->label->setPixmap(QPixmap::fromImage(image,Qt::AutoColor)); paintImageOnLabel(image); qDebug()<<"view image on label222"; } } } // Free the packet that was allocated by av_read_frame av_free_packet(&packet); } // Free the RGB image av_free(buffer); av_frame_free(&pFrameRGB); // Free the YUV frame av_frame_free(&pFrame); // Close the codecs avcodec_close(pCodecCtx); avcodec_close(pCodecCtxOrig); // Close the video file avformat_close_input(&pFormatCtx); } void MainWindow::paintImageOnLabel(QImage image) { qDebug()<<"paintImageOnLabel()"; ui->label->update(); ui->label->setPixmap(QPixmap::fromImage(image,Qt::AutoColor)); ui->label->update(); //QThread::sleep(1); }
The image is drawn in this section of the code
qDebug()<<"view image on label"; QImage image( pCodecCtx->width, pCodecCtx->height, QImage::Format_RGB888 ); for( int y = 0; y < pCodecCtx->height; ++y ){ memcpy( image.scanLine(y), pFrameRGB->data[0]+y * pFrameRGB->linesize[0], pCodecCtx->width * 3 ); } //ui->label->setPixmap(QPixmap::fromImage(image,Qt::AutoColor)); paintImageOnLabel(image); qDebug()<<"view image on label222";
-
Hi,
You are basically using a blocking loop to show your images, therefore you do not allows Qt's event loop to run and update the GUI.
-
As I said before, don't do that that way, you are blocking the event loop.
One way is to use a QTimer to read the file at the interval defined by its frame rate.
-
Encapsulate the file handling properly in a QObject based class.