欢迎您访问 最编程 本站为您分享编程语言代码,编程技术文章!
您现在的位置是: 首页

使用Qt开发音视频应用:通过FFmpeg解码本地摄像头并转换yuv422为yuv420

最编程 2024-08-15 15:40:26
...
void CameraThreadFFmpeg::initCamera() { //https://blog.****.net/weixin_37921201/article/details/120357826 //命令行打开 ffplay -f dshow -i video="USB Video Device" -s 1280x720 -framerate 30 //启动计时 timer.restart(); //参数字典 AVDictionary *options = NULL; //设置分辨率 QString size = QString("%1x%2").arg(videoWidth).arg(videoHeight); av_dict_set(&options, "video_size", size.toUtf8().constData(), 0); //设置帧率 if (frameRate > 0) { av_dict_set(&options, "framerate", QString::number(frameRate).toUtf8().constData(), 0); } //设置输入格式(前提是要对应设备对应平台支持) //av_dict_set(&options, "input_format", "mjpeg", 0); //设置图像格式(有些设备设置了格式后帧率上不去) //av_dict_set(&options, "pixel_format", "yuyv422", 0); //打印设备列表 //FFmpegHelper::showDevice(); //打印设备参数 //FFmpegHelper::showOption(cameraName); //实例化格式处理上下文 formatCtx = avformat_alloc_context(); AVInputFormatx *ifmt = NULL; QByteArray url = cameraName.toUtf8(); #if defined(Q_OS_WIN) //ifmt = av_find_input_format("vfwcap"); ifmt = av_find_input_format("dshow"); url = QString("video=%1").arg(cameraName).toUtf8(); #elif defined(Q_OS_LINUX) //ifmt = av_find_input_format("v4l2"); ifmt = av_find_input_format("video4linux2"); #elif defined(Q_OS_MAC) ifmt = av_find_input_format("avfoundation"); #endif int result = avformat_open_input(&formatCtx, url.data(), ifmt, &options); av_dict_free(&options); if (result < 0) { debug("打开地址", "错误: 打开出错 " + FFmpegHelper::getError(result)); return; } //获取流信息 result = avformat_find_stream_info(formatCtx, NULL); if (result < 0) { debug("打开地址", "错误: 找流失败 " + FFmpegHelper::getError(result)); return; } //获取最佳流索引 AVCodecx *videoCodec; videoIndex = av_find_best_stream(formatCtx, AVMEDIA_TYPE_VIDEO, -1, -1, NULL, 0); if (videoIndex < 0) { debug("打开地址", "错误: 未找到视频流"); return; } //获取视频流 AVStream *videoStream = formatCtx->streams[videoIndex]; //查找视频解码器(如果上面av_find_best_stream第五个参数传了则这里不需要) AVCodecID codecID = FFmpegHelper::getCodecID(videoStream); videoCodec = avcodec_find_decoder(codecID); //videoCodec = avcodec_find_decoder_by_name("h264"); if (!videoCodec) { debug("打开地址", "错误: 查找视频解码器失败"); return; } //创建视频流解码器上下文 videoCodecCtx = avcodec_alloc_context3(videoCodec); if (!videoCodecCtx) { debug("打开地址", "错误: 创建视频解码器上下文失败"); return; } result = FFmpegHelper::copyContext(videoCodecCtx, videoStream, false); if (result < 0) { debug("打开地址", "错误: 设置视频解码器参数失败"); return; } //设置解码器参数 videoCodecCtx->flags |= AV_CODEC_FLAG_LOW_DELAY; videoCodecCtx->flags |= AV_CODEC_FLAG_GLOBAL_HEADER; videoCodecCtx->flags2 |= AV_CODEC_FLAG2_FAST; //打开视频解码器 result = avcodec_open2(videoCodecCtx, videoCodec, NULL); if (result < 0) { debug("打开地址", "错误: 打开视频解码器失败 " + FFmpegHelper::getError(result)); return; } //获取实际分辨率大小 FFmpegHelper::getResolution(videoStream, videoWidth, videoHeight); //如果没有获取到宽高则返回 if (videoWidth <= 0 || videoHeight <= 0) { debug("打开地址", "错误: 获取宽度高度失败"); return; } //获取最终真实的帧率 frameRate = av_q2d(videoStream->r_frame_rate); QString msg = QString("索引: %1 解码: %2 帧率: %3 宽高: %4x%5").arg(videoIndex).arg(videoCodec->name).arg(frameRate).arg(videoWidth).arg(videoHeight); debug("视频信息", msg); openCamera(); } bool CameraThreadFFmpeg::openCamera() { //分配内存 packet = FFmpegHelper::creatPacket(NULL); videoFrame = av_frame_alloc(); yuvFrame = av_frame_alloc(); imageFrame = av_frame_alloc(); //设置属性以便该帧对象正常 yuvFrame->format = AV_PIX_FMT_YUV420P; yuvFrame->width = videoWidth; yuvFrame->height = videoHeight; //定义及获取像素格式 AVPixelFormat srcFormat = AV_PIX_FMT_YUYV422; //通过解码器获取解码格式 srcFormat = videoCodecCtx->pix_fmt; //各种转换速度比对 https://www.cnblogs.com/xumaojun/p/8541634.html int flags = SWS_FAST_BILINEAR; //分配视频帧数据(转yuv420) int yuvSize = av_image_get_buffer_size(AV_PIX_FMT_YUV420P, videoWidth, videoHeight, 1); yuvData = (quint8 *)av_malloc(yuvSize * sizeof(quint8)); av_image_fill_arrays(yuvFrame->data, yuvFrame->linesize, yuvData, AV_PIX_FMT_YUV420P, videoWidth, videoHeight, 1); //视频图像转换(转yuv420) yuvSwsCtx = sws_getContext(videoWidth, videoHeight, srcFormat, videoWidth, videoHeight, AV_PIX_FMT_YUV420P, flags, NULL, NULL, NULL); //分配视频帧数据(转rgb) int imageSize = av_image_get_buffer_size(AV_PIX_FMT_RGB24, videoWidth, videoHeight, 1); imageData = (quint8 *)av_malloc(imageSize * sizeof(quint8)); av_image_fill_arrays(imageFrame->data, imageFrame->linesize, imageData, AV_PIX_FMT_RGB24, videoWidth, videoHeight, 1); //视频图像转换(转rgb) imageSwsCtx = sws_getContext(videoWidth, videoHeight, AV_PIX_FMT_YUV420P, videoWidth, videoHeight, AV_PIX_FMT_RGB24, flags, NULL, NULL, NULL); //打印媒体信息 //av_dump_format(formatCtx, 0, 0, 0); QString msg = QString("源头: %1 目标: %2").arg(srcFormat).arg(videoMode == VideoMode_Painter ? AV_PIX_FMT_RGB24 : AV_PIX_FMT_YUV420P); debug("格式信息", msg); //初始化音频播放 this->initAudioPlayer(); //初始化滤镜 this->initFilter(); int time = timer.elapsed(); debug("打开成功", QString("用时: %1 毫秒").arg(time)); emit receivePlayStart(time); emit recorderStateChanged(RecorderState_Stopped, fileName); isOk = true; return isOk; }