mp4解码过程主要通过ffmpeg实现,解码转出rgba格式后,通过CCSprite绘制出来。

VideoPanel.h
extern "C"
{
#include <libavcodec/avcodec.h>
#include <libavdevice/avdevice.h>
#include<libavfilter/avfilter.h>
#include <libswscale/swscale.h>
#include <libswresample/swresample.h>
#include <libavformat/avformat.h>
#include <libavutil/avutil.h>
#include "libavutil/imgutils.h"
};


struct FrameData
{
    void*    pData;
    int        iLen;
    int        iWidth;
    int        iHeight;
    int        iTime; //显示时间戳
    int        iIndex;

    FrameData(void*    _pData, int    _iLen, int _iWidth, int _iHeight, float _iTime, int _iIndex)
    {
        iLen = _iLen;
        iWidth = _iWidth;
        iHeight = _iHeight;
        iTime = _iTime;
        iIndex = _iIndex;

        pData = malloc(iLen);
        memcpy(pData, _pData, iLen);
    }
};


class VideoPanel : public Layer
{
public:
    VideoPanel();
    ~VideoPanel();

    static VideoPanel*        create();
    bool                    init();
    bool                    play(const std::string& strUrl);
    void                    pause();

    void                    resize(int iWidth, int iHeight);

    bool                    readFile(const std::string& strUrl);
    void                    run(float fDelay);
    void                    sliderEvent(Ref *pSender, ui::Slider::EventType type);
    void                    menuCallback(Ref* pSender);
    void                    closeCallback(Ref* pSender);

    void                    cacheFrameData();//用线程缓存帧数据
    void                    setTipKey(const std::string& strKey);

private:
    Sprite*                m_pVideoSp;

    AVFormatContext*    m_pFormatContext;
    AVCodecContext*        m_pVideoCodecContext;
    AVCodecContext*        m_pAudioCodecContext;

    int                    m_iVideoIndex;
    int                    m_iAudioIndex;

    AVFrame*            m_pVideoFrame;
    AVFrame*            m_pVideoFrameYUV;
    AVFrame*            m_pAudioFrame;

    int                    m_iOutBufferSize;
    void*                m_pOutBuffer;
    SwsContext*            m_pVideoSwSContext;
    SwrContext*            m_pAudioSwrContext;

    int                    m_iOutChannelNum;//输出的声道个数
    void*                m_pOutAudioBuffer;

    AVPacket*            m_pPacket;
    bool                m_bPause;
    bool                m_bExitThread;
    AVSampleFormat        m_OutSampleFormat;//音频输出采样率格式


    vector<FrameData*>    m_vtFrameData;
    int                    m_iCurFrameIndex;

    int                    m_iTotalTime;//视频总时长
    int                    m_iTotalFrameNum;//视频总帧数
    AVRational            m_sTimeBase;//时间基
    int                    m_iFrameRate;//帧率

};

VideoPanel.cpp

#include "VideoPanel.h"


#define MAX_CACHE_FRAME_NUM 300        


VideoPanel::VideoPanel()
    : m_pVideoSp(nullptr)
    , m_iCurFrameIndex(0)
{
}

VideoPanel::~VideoPanel()
{
    for (FrameData* pData : m_vtFrameData)
    {
        free(pData->pData);
        delete pData;
    }

    m_vtFrameData.clear();
}

VideoPanel* VideoPanel::create()
{
    VideoPanel *pNode = new VideoPanel();
    if (pNode != nullptr && pNode->init())
    {
        pNode->autorelease();
    }
    else
    {
        CC_SAFE_DELETE(pNode);
    }

    return pNode;
}

bool VideoPanel::init()
{
    RETURN_IF(!Layer::init(), false);

    return true;
}

bool VideoPanel::play(const std::string& strUrl)
{
    if (!readFile(strUrl))
    {
        return false;
    }

    m_bPause = false;

    _scheduler->schedule(schedule_selector(VideoPanel::run), this, 1.0f / m_iFrameRate, false);

    m_bExitThread = false;
    thread t(&VideoPanel::cacheFrameData, this);
    t.join();

    return true;
}

void VideoPanel::pause()
{
    m_bPause = true;
}


bool VideoPanel::readFile(const std::string& strUrl)
{
    AVFormatContext* pContext = avformat_alloc_context();  // 获取文件信息上下文初始化
    m_pFormatContext = pContext;

    int iError = avformat_open_input(&m_pFormatContext, strUrl.c_str(), nullptr, nullptr);
    if (iError != 0)
    {
        LOG_ERROR("avformat_open_input fail");
        return false;
    }

    m_iVideoIndex = -1;
    m_iAudioIndex = -1;

    // 获取流的通道
    for (int iIndex = 0; iIndex < pContext->nb_streams; iIndex++)
    {
        if (pContext->streams[iIndex]->codecpar->codec_type == AVMEDIA_TYPE_VIDEO)
        {
            m_iVideoIndex = iIndex;
            LOG_INFO("video index : %d", m_iVideoIndex);
        }
        if (pContext->streams[iIndex]->codecpar->codec_type == AVMEDIA_TYPE_AUDIO)
        {
            m_iAudioIndex = iIndex;
            LOG_INFO("audio index : %d", m_iAudioIndex);
        }
    }

    if (m_iVideoIndex == -1)
    {
        LOG_INFO("Couldn't find a video stream");
        return false;
    }

    // 视频流处理
    if (m_iVideoIndex > -1)
    {
        //获取视频流中的编解码上下文
        AVCodecParameters* pCodecPar = pContext->streams[m_iVideoIndex]->codecpar;

        //根据编解码上下文中的编码id查找对应的解码
        AVCodec* pCodecVideo = avcodec_find_decoder(pCodecPar->codec_id);
        if (pCodecVideo == nullptr)
        {
            LOG_INFO("Couldn't find a video codec");
            return false;
        }

        m_pVideoCodecContext = avcodec_alloc_context3(pCodecVideo);
        //事实上codecpar包含了大部分解码器相关的信息,这里是直接从AVCodecParameters复制到AVCodecContext
        avcodec_parameters_to_context(m_pVideoCodecContext, pCodecPar);
        //av_codec_set_pkt_timebase(m_pVideoCodecContext, pContext->streams[m_iVideoIndex]->time_base);

        //打开编码器
        if (avcodec_open2(m_pVideoCodecContext, pCodecVideo, nullptr) < 0)
        {
            cout << "编码器无法打开" << endl;
            return -1;
        }

        //输出视频信息
        const char* pszFormat = pContext->iformat->name;
        m_iTotalTime = (pContext->duration) / 1000000;
        int iWidth = m_pVideoCodecContext->width;
        int iHeight = m_pVideoCodecContext->height;
        m_sTimeBase = pContext->streams[m_iVideoIndex]->time_base;
        m_iFrameRate = pContext->streams[m_iVideoIndex]->r_frame_rate.num;
        m_iTotalFrameNum = pContext->streams[m_iVideoIndex]->nb_frames;
        m_pProgressBar->setMaxPercent(m_iTotalTime);

        LOG_INFO("video format:%s, length:%d, width:%d, height:%d", pszFormat, m_iTotalTime, iWidth, iHeight);

        //准备读取
        //AVFrame用于存储解码后的像素数据(YUV)
        m_pVideoFrame = av_frame_alloc();//内存分配
        //YUV420
        m_pVideoFrameYUV = av_frame_alloc();
        //只有指定了AVFrame的像素格式、画面大小才能真正分配内存
        //缓冲区分配内存
        m_iOutBufferSize = av_image_get_buffer_size(AV_PIX_FMT_RGBA, iWidth, iHeight, 1);

        m_pOutBuffer = av_malloc(m_iOutBufferSize);
        //初始化缓冲区
        av_image_fill_arrays(m_pVideoFrameYUV->data, m_pVideoFrameYUV->linesize,
            (uint8_t*)m_pOutBuffer, AV_PIX_FMT_RGBA,
            iWidth, iHeight, 1);
        //用于转码(缩放)的参数,转之前的宽高,转之后的宽高,格式等
        m_pVideoSwSContext = sws_getContext(iWidth, iHeight, AV_PIX_FMT_YUV420P /*pCodecCtx->pix_fmt*/,
            iWidth, iHeight, AV_PIX_FMT_RGBA, SWS_BICUBIC, nullptr, nullptr, nullptr);
    }

    //缓冲区,开辟空间
    m_pPacket = (AVPacket*)av_malloc(sizeof(AVPacket));

    return true;
}

void VideoPanel::run(float fDelay)
{
    RETURN_VOID_IF(m_bPause);
    RETURN_VOID_IF(m_iCurFrameIndex >= m_vtFrameData.size());

    FrameData* pFrameData = m_vtFrameData[m_iCurFrameIndex];
    int iWidth = pFrameData->iWidth;
    int iHeight = pFrameData->iHeight;

    if (m_pVideoSp == nullptr)
    {
        Image* image = new (std::nothrow) Image;
        image->initWithRawData((const unsigned char *)pFrameData->pData, pFrameData->iLen,
            iWidth, iHeight, 0, true);

        _director->getTextureCache()->removeTextureForKey("videoTexture");
        Texture2D* pTexture = _director->getTextureCache()->addImage(image, "videoTexture");

        m_pVideoSp = Sprite::createWithTexture(pTexture);
        m_pVideoSp->setAnchorPoint(Vec2::ZERO);
        m_pVideoSp->setPosition(Vec2(0, BOTTOM_HEIGHT));
        addChild(m_pVideoSp);

        if (iWidth < DEFAULT_WIDTH)
        {
            float fScale = (float)(DEFAULT_WIDTH / iWidth);
            m_pVideoSp->setScale(fScale);

            int width = iWidth * fScale;
            int height = iHeight * fScale;
            resize(width, height + BOTTOM_HEIGHT);
        }
        else
        {
            resize(iWidth, iHeight + BOTTOM_HEIGHT);
        }

        delete image;
    }
    else
    {
        m_pVideoSp->getTexture()->updateWithData(pFrameData->pData, 0, 0, iWidth, iHeight);
    }


    m_iCurFrameIndex++;
}


void VideoPanel::cacheFrameData()
{
    AVStream* pVideoStream = m_pFormatContext->streams[m_iVideoIndex];
    int iIndex = 0;

    while (av_read_frame(m_pFormatContext, m_pPacket) >= 0 && !m_bExitThread)
    {
        int iRet = 0;
        if (m_pPacket->stream_index == m_iVideoIndex)
        {
            //解码一帧视频压缩数据,得到视频像素数据
            iRet = avcodec_send_packet(m_pVideoCodecContext, m_pPacket);
            //iRet = avcodec_decode_video2(m_pVideoCodecContext, m_pVideoFrame, &iCurFrame, m_pPacket);
            if (iRet != 0)
            {
                LOG_ERROR("av send packet error");
                return;
            }

            //读取到一帧音频或者视频
            if (avcodec_receive_frame(m_pVideoCodecContext, m_pVideoFrame) == 0) {
                //AVFrame转为像素格式RGB24,宽高
                sws_scale(m_pVideoSwSContext, m_pVideoFrame->data, m_pVideoFrame->linesize,
                    0, m_pVideoCodecContext->height, m_pVideoFrameYUV->data, m_pVideoFrameYUV->linesize);

                int iWidth = m_pVideoCodecContext->width;
                int iHeight = m_pVideoCodecContext->height;

                FrameData* pFrameData = new FrameData(m_pOutBuffer, m_iOutBufferSize, iWidth, iHeight, 
                    m_pVideoFrame->pts * av_q2d(m_sTimeBase), iIndex++);
                m_vtFrameData.push_back(pFrameData);
            }
        }
    }
    
    av_packet_unref(m_pPacket);
}

#endif

调用方法:

VideoPanel* player = VideoPanel::create();
player->play("E:\cocosvideo.mp4");
Director::getInstance()->getRunningScene()->addChild(player);
player->show();

最终加上进度条和按钮的效果:

cocos2dx windows 播放mp4视频-风君雪科技博客

如需要支持有偿服务,加q:980550823

转载请注明出处,from 博客园HemJohn