欢迎您访问程序员文章站本站旨在为大家提供分享程序员计算机编程知识!
您现在的位置是: 首页  >  移动技术

Android NuPlayer要点详解

程序员文章站 2022-03-31 10:08:44
本文将基于Android N源码对NuPlayer做一个详解。NuPlayer是Android中本地和流媒体播放所用的播放器。 1、AHandler机制 首先介绍NuPlaye...

本文将基于Android N源码对NuPlayer做一个详解。NuPlayer是Android中本地和流媒体播放所用的播放器。

1、AHandler机制

首先介绍NuPlayer中无处不在的AHandler机制
frameworks/av/include/media/stagefright/foundation/
frameworks/av/media/libstagefright/foundation/
AHandler是Android native层实现的一个异步消息机制,在这个机制中所有的处理都是异步的,将变量封装到一个消息AMessage结构体中,然后放到队列中去,后台专门有一个线程会从这个队列中取出消息然后执行,执行函数就是onMessageReceived。

Ahandler机制包括以下几个类

 AMessage

消息类,用于构造消息,通过post方法投递出去给ALooper

status_t AMessage::post(int64_t delayUs) {
    sp looper = mLooper.promote();
    if (looper == NULL) {
        ALOGW("failed to post message as target looper for handler %d is gone.", mTarget);
        return -ENOENT;
    }

    looper->post(this, delayUs);
    return OK;
}

void AMessage::deliver() {
    sp handler = mHandler.promote();
    if (handler == NULL) {
        ALOGW("failed to deliver message as target handler %d is gone.", mTarget);
        return;
    }

    handler->deliverMessage(this); //see AHandler@deliverMessage,前面通过looper post最后就是调用这里的deliever送到handler手里
}

1234567891011121314151617181920
AHandler
消息处理类,一般当做父类,继承该类的子类需要实现onMessageReceived方法
void AHandler::deliverMessage(const sp &msg) {
    onMessageReceived(msg);
    mMessageCounter++;
….
}

12345
ALooper
与Ahander一一对应,负责存储消息并分发Ahandler的消息,与AMessage一对多关系
// posts a message on this looper with the given timeout
void ALooper::post(const sp &msg, int64_t delayUs) {
    Mutex::Autolock autoLock(mLock);

    int64_t whenUs;
    if (delayUs > 0) {
        whenUs = GetNowUs() + delayUs;
    } else {
        whenUs = GetNowUs();
    }

    List::iterator it = mEventQueue.begin();
    while (it != mEventQueue.end() && (*it).mWhenUs <= whenUs) {
        ++it;
    }

    Event event;
    event.mWhenUs = whenUs;
    event.mMessage = msg;

    if (it == mEventQueue.begin()) {
        mQueueChangedCondition.signal();
    }

    mEventQueue.insert(it, event);
}
----------------------------------------------------------
status_t ALooper::start(
        bool runOnCallingThread, bool canCallJava, int32_t priority) {
    if (runOnCallingThread) {
        {
            Mutex::Autolock autoLock(mLock);

            if (mThread != NULL || mRunningLocally) {
                return INVALID_OPERATION;
            }

            mRunningLocally = true;
        }

        do {
        } while (loop());

        return OK;
    }

    Mutex::Autolock autoLock(mLock);

    if (mThread != NULL || mRunningLocally) {
        return INVALID_OPERATION;
    }

    mThread = new LooperThread(this, canCallJava);

    status_t err = mThread->run(
            mName.empty() ? "ALooper" : mName.c_str(), priority);
    if (err != OK) {
        mThread.clear();
    }

    return err;
}
bool ALooper::loop() {
    Event event;

    {
        Mutex::Autolock autoLock(mLock);
        if (mThread == NULL && !mRunningLocally) {
            return false;
        }
        if (mEventQueue.empty()) {
            mQueueChangedCondition.wait(mLock);
            return true;
        }
        int64_t whenUs = (*mEventQueue.begin()).mWhenUs;
        int64_t nowUs = GetNowUs();

        if (whenUs > nowUs) {
            int64_t delayUs = whenUs - nowUs;
            mQueueChangedCondition.waitRelative(mLock, delayUs * 1000ll);

            return true;
        }

        event = *mEventQueue.begin();
        mEventQueue.erase(mEventQueue.begin());
    }

    event.mMessage->deliver(); //see AHandler.deliverMessage
…..
    return true;
}

12345678910111213141516171819202122232425262728293031323334353637383940414243444546474849505152535455565758596061626364656667686970717273747576777879808182838485868788899091929394
LooperThread
此线程调用ALooper的loop方法来分发消息
 virtual status_t readyToRun() {
        mThreadId = androidGetThreadId();

        return Thread::readyToRun();
    }

    virtual bool threadLoop() {
        return mLooper->loop();
    }
1 2 3 4 5 6 7 8 9 ALooperRoaster
与Handler是一对多的关系, 管理Looper和Handler一一对应关系,负责释放stale handler
ALooper::handler_id ALooperRoster::registerHandler(
        const sp looper, const sp &handler) {
    Mutex::Autolock autoLock(mLock);

    if (handler->id() != 0) {
        CHECK(!"A handler must only be registered once.");
        return INVALID_OPERATION;
    }

    HandlerInfo info;
    info.mLooper = looper;
    info.mHandler = handler;
    ALooper::handler_id handlerID = mNextHandlerID++;//一对一
    mHandlers.add(handlerID, info);//一对多

    handler->setID(handlerID, looper);

    return handlerID;
}

void ALooperRoster::unregisterHandler(ALooper::handler_id handlerID) {
    Mutex::Autolock autoLock(mLock);

    ssize_t index = mHandlers.indexOfKey(handlerID);

    if (index < 0) {
        return;
    }

    const HandlerInfo &info = mHandlers.valueAt(index);

    sp handler = info.mHandler.promote();

    if (handler != NULL) {
        handler->setID(0, NULL);
    }

    mHandlers.removeItemsAt(index);
}

void ALooperRoster::unregisterStaleHandlers() {

    Vector > activeLoopers;
    {
        Mutex::Autolock autoLock(mLock);

        for (size_t i = mHandlers.size(); i > 0;) {
            i--;
            const HandlerInfo &info = mHandlers.valueAt(i);

            sp looper = info.mLooper.promote();
            if (looper == NULL) {
                ALOGV("Unregistering stale handler %d", mHandlers.keyAt(i));
                mHandlers.removeItemsAt(i);
            } else {
                // At this point 'looper' might be the only sp<> keeping
                // the object alive. To prevent it from going out of scope
                // and having ~ALooper call this method again recursively
                // and then deadlocking because of the Autolock above, add
                // it to a Vector which will go out of scope after the lock
                // has been released.
                activeLoopers.add(looper);
            }
        }
    }
}

12345678910111213141516171819202122232425262728293031323334353637383940414243444546474849505152535455565758596061626364656667
异步消息机制的创建
sp mLooper = new ALooper; //创建一个Alooper实例
sp mHandler = new AHandlerReflector //创建一个Ahandler实例
mLooper->setName(“xxxxx”); //设置looper名字
mLooper->start(false, true, PRIORITY_XXX); //根据参数创建并启动 looper thread
mLooper->regiserHandler(mHandler); //register handler 会调用AHandler的setID方法将looper设置到Handler里去

12345
Post消息
sp msg = new AMessage(kWhatSayGoodbye, mHandler); //在AMessage的构造方法里获取Ahandler对应的Looper并保存
msg->post(); // 调用looper的post方法

Message Post的调用过程
Message::post
↓
ALooper::post
mEventQueue.insert
mQueueChangedCondition.signal() //如果之前没有event,通知looper thread
↓
ALooper::loop()
if (mEventQueue.empty()) { //如果消息队列为空,则等待
mQueueChangedCondition.wait(mLock);
return true;
}
event = *mEventQueue.begin();
event.mMessage->deliver();
↓
AHandler::deliverMessage
↓
AHandlerReflector:: onMessageReceived
↓
具体的实现

1234567891011121314151617181920212223
NuPlayer

下面就进入我们的正题,NuPlayer
frameworks/av/media/libmediaplayerservice/nuplayer/

NuPlayerDriver
NuPlayerDriver是对NuPlayer的封装,继承MediaPlayerInterface接口。通过NuPlayer来实现播放的功能。看这部分代码的方法就是先看NuPlayerDriver里面干了啥,转头就去找NuPlayer里面的实现,一般都要再去NuPlayer的onMessageReceive中看消息的响应,最后回到NuPlayerDriver的各种notify中看流程的周转,下面附上一张播放器状态机流转图
Android NuPlayer要点详解
NuPlayerDriver::NuPlayerDriver(pid_t pid)
    : mState(STATE_IDLE),  //对应播放器状态机的初始化状态
      mIsAsyncPrepare(false),
      mAsyncResult(UNKNOWN_ERROR),
      mSetSurfaceInProgress(false),
      mDurationUs(-1),
      mPositionUs(-1),
      mSeekInProgress(false),
      mLooper(new ALooper),
      mPlayerFlags(0),
      mAtEOS(false),
      mLooping(false),
      mAutoLoop(false) {
    ALOGV("NuPlayerDriver(%p)", this);
    //和前面所述的异步消息创建机制相符
 mLooper->setName("NuPlayerDriver Looper");

    mLooper->start(
            false, /* runOnCallingThread */
            true,  /* canCallJava */
            PRIORITY_AUDIO);
 //mPlayer即NuPlayer,继承于AHandler
    mPlayer = AVNuFactory::get()->createNuPlayer(pid);
    mLooper->registerHandler(mPlayer);

    mPlayer->setDriver(this);
}

NuPlayerDriver::~NuPlayerDriver() {
    ALOGV("~NuPlayerDriver(%p)", this);
    mLooper->stop();    //整个NuPlayerDriver就是一个大ALooper
}
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 AVNuFactory
负责关键组件的create,通过它能看到:
1.每一个NuPlayer对应一个进程
2.数据流从Source-Decoder-Renderer,中间由AMessages驱动
sp AVNuFactory::createNuPlayer(pid_t pid) {
    return new NuPlayer(pid);
}

sp AVNuFactory::createPassThruDecoder(
            const sp ¬ify,
            const sp &source,
            const sp &renderer) {
    return new NuPlayer::DecoderPassThrough(notify, source, renderer);
}

sp AVNuFactory::createDecoder(
            const sp ¬ify,
            const sp &source,
            pid_t pid,
            const sp &renderer) {
    return new NuPlayer::Decoder(notify, source, pid, renderer);
}

sp AVNuFactory::createRenderer(
            const sp &sink,
            const sp ¬ify,
            uint32_t flags) {
    return new NuPlayer::Renderer(sink, notify, flags);
}

12345678910111213141516171819202122232425

下面分别分析Source, Decoder, Renderer

Source

以setDataSource为切入点

status_t NuPlayerDriver::setDataSource(const sp &source) {
    ALOGV("setDataSource(%p) stream source", this);
    Mutex::Autolock autoLock(mLock);

    if (mState != STATE_IDLE) {
        return INVALID_OPERATION;
    }

    mState = STATE_SET_DATASOURCE_PENDING;

    mPlayer->setDataSourceAsync(source);//因为driver只是NuPlayer的封装,所以还是要去调用NuPlayer完成实际动作

    while (mState == STATE_SET_DATASOURCE_PENDING) {
        mCondition.wait(mLock);
    }

    return mAsyncResult;
}
--------------------------------------
void NuPlayer::setDataSourceAsync(const sp &source) {
    sp msg = new AMessage(kWhatSetDataSource, this);

    sp notify = new AMessage(kWhatSourceNotify, this);

    msg->setObject("source", new StreamingSource(notify, source));
    msg->post(); //到了NuPlayer中,也不是直接进行操作,而是先发个消息,验证前面所说的一切都由AMessage驱动
}
---------------------------------------
void NuPlayer::onMessageReceived(const sp &msg) {
    switch (msg->what()) {
        case kWhatSetDataSource://实际的处理在这里
        {
            ALOGV("kWhatSetDataSource");

            CHECK(mSource == NULL);

            status_t err = OK;
            sp obj;
            CHECK(msg->findObject("source", &obj));
            if (obj != NULL) {
                Mutex::Autolock autoLock(mSourceLock);
                mSource = static_cast(obj.get());//赋值给mSource
            } else {
                err = UNKNOWN_ERROR;
            }

            CHECK(mDriver != NULL);
            sp driver = mDriver.promote();
            if (driver != NULL) {
                driver->notifySetDataSourceCompleted(err);//通知driver设置完毕
            }
            break;
        }......
---------------------------------------
void NuPlayerDriver::notifySetDataSourceCompleted(status_t err) {
    Mutex::Autolock autoLock(mLock);

    CHECK_EQ(mState, STATE_SET_DATASOURCE_PENDING);

    mAsyncResult = err;
    mState = (err == OK) ? STATE_UNPREPARED : STATE_IDLE;//回到driver中,流转播放器状态进入下一阶段
    mCondition.broadcast();
}

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263

下面就来看看具体有哪些source,它们都继承自NuPlayer:Source(NuPlayerSource.h & NuPlayerSource.cpp)
1.HTTP-进一步判断是以下的哪一种:HTTPLiveSource,RTSPSource,GenericSource
2.File-GenericSource
3.StreamSource-StreamingSource
4.DataSource-GenericSource

GenericSource

nuplayer/GenericSource.h & GenericSource.cpp

几个水位
static int64_t kLowWaterMarkUs = 2000000ll;  // 2secs
static int64_t kHighWaterMarkUs = 5000000ll;  // 5secs
static int64_t kHighWaterMarkRebufferUs = 15000000ll;  // 15secs,这一个是新增加的水位
static const ssize_t kLowWaterMarkBytes = 40000;
static const ssize_t kHighWaterMarkBytes = 200000;

status_t NuPlayer::GenericSource::initFromDataSource() {
init extractor;get track info and metadata
}

void NuPlayer::GenericSource::prepareAsync() {
    if (mLooper == NULL) {
        mLooper = new ALooper;
        mLooper->setName("generic");
        mLooper->start();

        mLooper->registerHandler(this);
    }

    sp msg = new AMessage(kWhatPrepareAsync, this);
    msg->post();
}

status_t NuPlayer::GenericSource::feedMoreTSData() {
    return OK;
}

12345678910111213141516171819202122232425262728
LiveSession

libstagefright/httplive/LiveSession.h & cpp

// static
// Bandwidth Switch Mark Defaults
const int64_t LiveSession::kUpSwitchMarkUs = 15000000ll;
const int64_t LiveSession::kDownSwitchMarkUs = 20000000ll;
const int64_t LiveSession::kUpSwitchMarginUs = 5000000ll;
const int64_t LiveSession::kResumeThresholdUs = 100000ll;

// Buffer Prepare/Ready/Underflow Marks
const int64_t LiveSession::kReadyMarkUs = 5000000ll;
const int64_t LiveSession::kPrepareMarkUs = 1500000ll;
const int64_t LiveSession::kUnderflowMarkUs = 1000000ll;

与Fetcher,Bandwidth Estimater(和ExoPlayer一样是滑动窗口平均),switching,Buffering相关的操作都在这里
1 2 3 4 5 6 7 8 9 10 11 12 13 HTTPLiveSource

nuplayer目录下

enum Flags {
        // Don't log any URLs.不在log中记录URL
        kFlagIncognito = 1,
    };

NuPlayer::HTTPLiveSource::HTTPLiveSource(
    if (headers) { //也搞了一个header机制
        mExtraHeaders = *headers;

        ssize_t index =
            mExtraHeaders.indexOfKey(String8("x-hide-urls-from-log"));

        if (index >= 0) {
            mFlags |= kFlagIncognito;

            mExtraHeaders.removeItemsAt(index);
        }
    }
}
---------------------------------------
void NuPlayer::HTTPLiveSource::prepareAsync() {
    if (mLiveLooper == NULL) {
        mLiveLooper = new ALooper;//一如既往的ALooper
        mLiveLooper->setName("http live");
        mLiveLooper->start();

        mLiveLooper->registerHandler(this);
    }

    sp notify = new AMessage(kWhatSessionNotify, this);

    mLiveSession = new LiveSession(
            notify,
            (mFlags & kFlagIncognito) ? LiveSession::kFlagIncognito : 0,
            mHTTPService);

    mLiveLooper->registerHandler(mLiveSession);

    mLiveSession->connectAsync(//HTTPLiveSource包含LiveSession,很多实际的工作都由LiveSession完成
            mURL.c_str(), mExtraHeaders.isEmpty() ? NULL : &mExtraHeaders);
}

1234567891011121314151617181920212223242526272829303132333435363738394041

ATSParser
frameworks/av/media/libstagefright/mpeg2ts/ATSParser.cpp
就是一个TS Parser,虽然也叫Axx,但是没有消息机制在里面

StreamingSource
nuplayer目录

void NuPlayer::StreamingSource::prepareAsync() {
    if (mLooper == NULL) {
        mLooper = new ALooper;
        mLooper->setName("streaming");
        mLooper->start();//何其相似

        mLooper->registerHandler(this);
    }

    notifyVideoSizeChanged();
    notifyFlagsChanged(0);
    notifyPrepared();
}
---------------------------------------
StreamingSource中的数据由onReadBuffer驱动,最后的EOS,Discontiunity等都交给ATSParser去处理,ATSParser又最终交给AnotherPacketSource去做真正的处理.实际上,这里提到的三个Source最后都会用到AnotherPacketSource

void NuPlayer::StreamingSource::onReadBuffer() {
    for (int32_t i = 0; i < kNumListenerQueuePackets; ++i) {
        char buffer[188];
        sp extra;
        ssize_t n = mStreamListener->read(buffer, sizeof(buffer), &extra);//实际用NuPlayerStreamListener完成工作

        if (n == 0) {
            ALOGI("input data EOS reached.");
            mTSParser->signalEOS(ERROR_END_OF_STREAM);//EOS了
            setError(ERROR_END_OF_STREAM);
            break;
        } else if (n == INFO_DISCONTINUITY) {
            int32_t type = ATSParser::DISCONTINUITY_TIME;

            int32_t mask;
            if (extra != NULL
                    && extra->findInt32(
                        IStreamListener::kKeyDiscontinuityMask, &mask)) {
                if (mask == 0) {
                    ALOGE("Client specified an illegal discontinuity type.");
                    setError(ERROR_UNSUPPORTED);
                    break;
                }

                type = mask;
            }
 mTSParser->signalDiscontinuity(
                    (ATSParser::DiscontinuityType)type, extra);
        } else if (n < 0) {
            break;
        } else {
            if (buffer[0] == 0x00) {
                // XXX legacy

                if (extra == NULL) {
                    extra = new AMessage;
                }

                uint8_t type = buffer[1];

                if (type & 2) {
                    int64_t mediaTimeUs;
                    memcpy(&mediaTimeUs, &buffer[2], sizeof(mediaTimeUs));

                    extra->setInt64(IStreamListener::kKeyMediaTimeUs, mediaTimeUs);
                }

                mTSParser->signalDiscontinuity(
                        ((type & 1) == 0)
                            ? ATSParser::DISCONTINUITY_TIME
                            : ATSParser::DISCONTINUITY_FORMATCHANGE,
                        extra);
            } else {
                status_t err = mTSParser->feedTSPacket(buffer, sizeof(buffer));

                if (err != OK) {
                    ALOGE("TS Parser returned error %d", err);

                    mTSParser->signalEOS(err);
                    setError(err);
                    break;
                }
            }
        }
    }
}

1234567891011121314151617181920212223242526272829303132333435363738394041424344454647484950515253545556575859606162636465666768697071727374757677787980818283
AnotherPacketSource

frameworks/av/media/libstagefright/mpeg2ts
可以类比ExoPlayer中的chunk source,同时负责buffer管理,EOS\Discontinuity的处理等等.前面三个Source最后都会落到AnotherPacketSource

bool AnotherPacketSource::hasBufferAvailable(status_t *finalResult) {
    Mutex::Autolock autoLock(mLock);
    *finalResult = OK;
    if (!mEnabled) {
        return false;
    }
    if (!mBuffers.empty()) {//一个ABuffer List,其实就是一个环形缓冲
        return true;
    }

    *finalResult = mEOSResult;
    return false;
}
--------------------------------------
void AnotherPacketSource::queueDiscontinuity(
        ATSParser::DiscontinuityType type,
        const sp &extra,
        bool discard) {
    Mutex::Autolock autoLock(mLock);

    if (discard) {
        // Leave only discontinuities in the queue.
…...
    }

    mEOSResult = OK;
    mLastQueuedTimeUs = 0;
    mLatestEnqueuedMeta = NULL;

    if (type == ATSParser::DISCONTINUITY_NONE) {
        return;
    }

    mDiscontinuitySegments.push_back(DiscontinuitySegment());

    sp buffer = new ABuffer(0);
    buffer->meta()->setInt32("discontinuity", static_cast(type));
    buffer->meta()->setMessage("extra", extra);

    mBuffers.push_back(buffer); //将记录了discontinuity的ABuffer推入缓冲区中,这样各个Source在从缓冲区读数据的时候就能正确处理discontinuity了
    mCondition.signal();
}

12345678910111213141516171819202122232425262728293031323334353637383940414243
Decoder

Decoder是如何被初始化的

从NuPlayer::OnStart方法看起
void NuPlayer::onStart(int64_t startPositionUs) {
    if (!mSourceStarted) {
        mSourceStarted = true;
        mSource->start();
    }
    if (startPositionUs > 0) {
        performSeek(startPositionUs);
        if (mSource->getFormat(false /* audio */) == NULL) {
            return;
        }
    }
...
    sp notify = new AMessage(kWhatRendererNotify, this);
    ++mRendererGeneration;
    notify->setInt32("generation", mRendererGeneration);
    //在这里用AVNuFactory初始化Renderer和它对应的Looper
    mRenderer = AVNuFactory::get()->createRenderer(mAudioSink, notify, flags);
    mRendererLooper = new ALooper;
    mRendererLooper->setName("NuPlayerRenderer");
    mRendererLooper->start(false, false, ANDROID_PRIORITY_AUDIO);
    mRendererLooper->registerHandler(mRenderer);
 //设置Renderer的播放参数
    status_t err = mRenderer->setPlaybackSettings(mPlaybackSettings);
    ...
 //给Decoder设置Renderer,两者之间的关系建立起来了
    if (mVideoDecoder != NULL) {
        mVideoDecoder->setRenderer(mRenderer);
    }
    if (mAudioDecoder != NULL) {
        mAudioDecoder->setRenderer(mRenderer);
    }
 //抛出这一消息
    postScanSources();
}
--------------------------------------
case kWhatScanSources:
        {
            int32_t generation;
            CHECK(msg->findInt32("generation", &generation));
            if (generation != mScanSourcesGeneration) {
                // Drop obsolete msg.
                break;
            }

            mScanSourcesPending = false;

            ALOGV("scanning sources haveAudio=%d, haveVideo=%d",
                 mAudioDecoder != NULL, mVideoDecoder != NULL);

            bool mHadAnySourcesBefore =
                (mAudioDecoder != NULL) || (mVideoDecoder != NULL);
            bool rescan = false;

            // initialize video before audio because successful initialization of
            // video may change deep buffer mode of audio.
   //在这里初始化decoder
            if (mSurface != NULL) {
                if (instantiateDecoder(false, &mVideoDecoder) == -EWOULDBLOCK) {
                    rescan = true;
                }
            }

            // Don't try to re-open audio sink if there's an existing decoder.
            if (mAudioSink != NULL && mAudioDecoder == NULL) {
                if (instantiateDecoder(true, &mAudioDecoder) == -EWOULDBLOCK) {
                    rescan = true;
                }
            }

            if (!mHadAnySourcesBefore
                    && (mAudioDecoder != NULL || mVideoDecoder != NULL)) {
                // This is the first time we've found anything playable.

                if (mSourceFlags & Source::FLAG_DYNAMIC_DURATION) {
                    schedulePollDuration();
                }
            }

            status_t err;
            if ((err = mSource->feedMoreTSData()) != OK) {
                if (mAudioDecoder == NULL && mVideoDecoder == NULL) {
                    // We're not currently decoding anything (no audio or
                    // video tracks found) and we just ran out of input data.

                    if (err == ERROR_END_OF_STREAM) {
                        notifyListener(MEDIA_PLAYBACK_COMPLETE, 0, 0);
                    } else {
                        notifyListener(MEDIA_ERROR, MEDIA_ERROR_UNKNOWN, err);
                    }
                }
                break;
            }
            //postScanSources和ExoPlayer中的doSomeWork一样是一个不断循环运转的东西
            if (rescan) {
                msg->post(100000ll);
                mScanSourcesPending = true;
            }
            break;
        }
---------------------------------------
在NuPlayer的instantiateDecoder中完成Decoder的初始化
status_t NuPlayer::instantiateDecoder(
        bool audio, sp *decoder, bool checkAudioModeChange) {
   ...
 
    if (!audio) {
        AString mime;
        CHECK(format->findString("mime", &mime));

        sp ccNotify = new AMessage(kWhatClosedCaptionNotify, this);
        if (mCCDecoder == NULL) {
            mCCDecoder = new CCDecoder(ccNotify); //new字幕解码器
        }

        if (mSourceFlags & Source::FLAG_SECURE) {
            format->setInt32("secure", true);
        }

        if (mSourceFlags & Source::FLAG_PROTECTED) {
            format->setInt32("protected", true);
        }

        float rate = getFrameRate();
        if (rate > 0) {
            format->setFloat("operating-rate", rate * mPlaybackSettings.mSpeed);
        }
    }

    if (audio) {
        sp notify = new AMessage(kWhatAudioNotify, this);
        ++mAudioDecoderGeneration;
        notify->setInt32("generation", mAudioDecoderGeneration);

        if (checkAudioModeChange) {
            determineAudioModeChange(format);
        }
        if (mOffloadAudio)
            mSource->setOffloadAudio(true /* offload */);

        if (mOffloadAudio) {
            const bool hasVideo = (mSource->getFormat(false /*audio */) != NULL);
            format->setInt32("has-video", hasVideo);
            *decoder = AVNuFactory::get()->createPassThruDecoder(notify, mSource, mRenderer);//利用AVNuFactory的方法创建pass through的音频解码器
        } else {
            AVNuUtils::get()->setCodecOutputFormat(format);
            mSource->setOffloadAudio(false /* offload */);
            *decoder = AVNuFactory::get()->createDecoder(notify, mSource, mPID, mRenderer);//创建普通的音频解码器
        }
    } else {
        sp notify = new AMessage(kWhatVideoNotify, this);
        ++mVideoDecoderGeneration;
        notify->setInt32("generation", mVideoDecoderGeneration);

        *decoder = new Decoder(
                notify, mSource, mPID, mRenderer, mSurface, mCCDecoder);
   //new视频解码器,这里还会把字幕解码器作为一个参数传进来
        // enable FRC if high-quality AV sync is requested, even if not
        // directly queuing to display, as this will even improve textureview
        // playback.
        {
            char value[PROPERTY_VALUE_MAX];
            if (property_get("persist.sys.media.avsync", value, NULL) &&
                    (!strcmp("1", value) || !strcasecmp("true", value))) {
                format->setInt32("auto-frc", 1);
            }
        }
    }
    (*decoder)->init();//解码器初始化
    (*decoder)->configure(format);//解码器配置

    // allocate buffers to decrypt widevine source buffers
    if (!audio && (mSourceFlags & Source::FLAG_SECURE)) {
        Vector > inputBufs;
        CHECK_EQ((*decoder)->getInputBuffers(&inputBufs), (status_t)OK);

        Vector mediaBufs;
        for (size_t i = 0; i < inputBufs.size(); i++) {
            const sp &buffer = inputBufs[i];
            MediaBuffer *mbuf = new MediaBuffer(buffer->data(), buffer->size());
            mediaBufs.push(mbuf);
        }

        status_t err = mSource->setBuffers(audio, mediaBufs);
        if (err != OK) {
            for (size_t i = 0; i < mediaBufs.size(); ++i) {
                mediaBufs[i]->release();
            }
            mediaBufs.clear();
            ALOGE("Secure source didn't support secure mediaBufs.");
            return err;
        }
    }
...
    return OK;
}
-------------------------------------

-------------------------------------
void NuPlayer::Decoder::doFlush(bool notifyComplete) {
    if (mCCDecoder != NULL) {
        mCCDecoder->flush();//先flush字幕Decoder
    }

    if (mRenderer != NULL) {
        mRenderer->flush(mIsAudio, notifyComplete);
        mRenderer->signalTimeDiscontinuity();//再flush Renderer
    }

    status_t err = OK;
    if (mCodec != NULL) {
        err = mCodec->flush();//最后flush Decoder
        mCSDsToSubmit = mCSDsForCurrentFormat; // copy operator
        ++mBufferGeneration;
    }
...
    releaseAndResetMediaBuffers();//清空buffer
    mPaused = true;
}

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220
DecoderBase
是NuPlayer::Decoder的基类,内部也维护了一个Looper,各种工作也依然是异步消息驱动完成的,只不过各种onMessage的响应方法都是虚函数,需要由子类来做具体的实现.也能看到利用setRenderer方法和Renderer结合到一起的方法
NuPlayer::DecoderBase::DecoderBase(const sp ¬ify)
    :  mNotify(notify),
       mBufferGeneration(0),
       mPaused(false),
       mStats(new AMessage),
       mRequestInputBuffersPending(false) {
    // Every decoder has its own looper because MediaCodec operations
    // are blocking, but NuPlayer needs asynchronous operations.
    mDecoderLooper = new ALooper;
    mDecoderLooper->setName("NPDecoder");
    mDecoderLooper->start(false, false, ANDROID_PRIORITY_AUDIO);
}

void NuPlayer::DecoderBase::init() {
    mDecoderLooper->registerHandler(this);
}

12345678910111213141516
NuPlayer::Decoder
由相应的onMessage方法处理各种工作
先关注init和configure两个方法,init直接继承自DecoderBase,就是给Looper注册Handler
--------------------------------------
void NuPlayer::Decoder::onConfigure(const sp &format) {
...
 mCodec = AVUtils::get()->createCustomComponentByName(mCodecLooper, mime.c_str(), false /* encoder */, format);
    if (mCodec == NULL) {
   //由mimeType创建Decoder
    mCodec = MediaCodec::CreateByType(
            mCodecLooper, mime.c_str(), false /* encoder */, NULL /* err */, mPid);
    }
...
//mCodec就是libstagefright中的MediaCodec,没什么可说的
err = mCodec->configure(
            format, mSurface, NULL /* crypto */, 0 /* flags */);
    if (err != OK) {
        ALOGE("Failed to configure %s decoder (err=%d)", mComponentName.c_str(), err);
        mCodec->release();
        mCodec.clear();
        handleError(err);
        return;
    }
    rememberCodecSpecificData(format);
  // the following should work in configured state读Format信息
    CHECK_EQ((status_t)OK, mCodec->getOutputFormat(&mOutputFormat));
    CHECK_EQ((status_t)OK, mCodec->getInputFormat(&mInputFormat));

    mStats->setString("mime", mime.c_str());
    mStats->setString("component-name", mComponentName.c_str());

    if (!mIsAudio) {
        int32_t width, height;
        if (mOutputFormat->findInt32("width", &width)
                && mOutputFormat->findInt32("height", &height)) {
            mStats->setInt32("width", width);
            mStats->setInt32("height", height);
        }
    }
...
//MediaCodec开始
err = mCodec->start();
    if (err != OK) {
        ALOGE("Failed to start %s decoder (err=%d)", mComponentName.c_str(), err);
        mCodec->release();
        mCodec.clear();
        handleError(err);
        return;
    }
//先把buffer都release置为null
    releaseAndResetMediaBuffers();
...
}

前面提到NuPlayer::Decoder里面有一个MediaCodec,所以不用再研究具体怎么解码的,关注点在于从这个模块输出的东西是怎样的,以及是怎样输入这个东西的
先来看输出:
当MediaCodec有Available Output的时候,在onMessageReceived中有
case MediaCodec::CB_OUTPUT_AVAILABLE:
                {
                    int32_t index;
                    size_t offset;
                    size_t size;
                    int64_t timeUs;
                    int32_t flags;

                    CHECK(msg->findInt32("index", &index));
                    CHECK(msg->findSize("offset", &offset));
                    CHECK(msg->findSize("size", &size));
                    CHECK(msg->findInt64("timeUs", &timeUs));
                    CHECK(msg->findInt32("flags", &flags));

                    handleAnOutputBuffer(index, offset, size, timeUs, flags);
                    break;
                }
-------------------------------------
bool NuPlayer::Decoder::handleAnOutputBuffer(
        size_t index,
        size_t offset,
        size_t size,
        int64_t timeUs,
        int32_t flags) {
//    CHECK_LT(bufferIx, mOutputBuffers.size());
    sp buffer;
    mCodec->getOutputBuffer(index, &buffer);
....
 //发送kWhatRenderBuffer消息
    sp reply = new AMessage(kWhatRenderBuffer, this);
    reply->setSize("buffer-ix", index);
    reply->setInt32("generation", mBufferGeneration);

    if (eos) {
        ALOGI("[%s] saw output EOS", mIsAudio ? "audio" : "video");
    //EOS了
        buffer->meta()->setInt32("eos", true);
        reply->setInt32("eos", true);
    } else if (mSkipRenderingUntilMediaTimeUs >= 0) {
        if (timeUs < mSkipRenderingUntilMediaTimeUs) {
            ALOGV("[%s] dropping buffer at time %lld as requested.",
                     mComponentName.c_str(), (long long)timeUs);
    //中间这一段不用render,skip掉
            reply->post();
            return true;
        }

        mSkipRenderingUntilMediaTimeUs = -1;
    } else if ((flags & MediaCodec::BUFFER_FLAG_DATACORRUPT) &&
            AVNuUtils::get()->dropCorruptFrame()) {
        ALOGV("[%s] dropping corrupt buffer at time %lld as requested.",
                     mComponentName.c_str(), (long long)timeUs);
  //本段buffer坏到了,扔掉
        reply->post();
        return true;
    }

    mNumFramesTotal += !mIsAudio;

    // wait until 1st frame comes out to signal resume complete
    notifyResumeCompleteIfNecessary();

    if (mRenderer != NULL) {
        // send the buffer to renderer.把Buffer送到Renderer
        mRenderer->queueBuffer(mIsAudio, buffer, reply);
        if (eos && !isDiscontinuityPending()) {
            mRenderer->queueEOS(mIsAudio, ERROR_END_OF_STREAM);
        }
    }

    return true;
}
--------------------------------------
case kWhatRenderBuffer:
        {
            if (!isStaleReply(msg)) {
                onRenderBuffer(msg);
            }
            break;
        }
-------------------------------------
void NuPlayer::Decoder::onRenderBuffer(const sp &msg) {
    ...
    if (!mIsAudio) {
        int64_t timeUs;
        sp buffer = mOutputBuffers[bufferIx];
        buffer->meta()->findInt64("timeUs", &timeUs);

        if (mCCDecoder != NULL && mCCDecoder->isSelected()) {
            mCCDecoder->display(timeUs);//字幕显示
        }
    }

    if (msg->findInt32("render", &render) && render) {
        int64_t timestampNs;
        CHECK(msg->findInt64("timestampNs", ×tampNs));

//由MediaCodec的renderOutputBufferAndRelease完成
        err = mCodec->renderOutputBufferAndRelease(bufferIx, timestampNs);
    } else {
        mNumOutputFramesDropped += !mIsAudio;
        err = mCodec->releaseOutputBuffer(bufferIx);
    }
    ...
}
再来看输入
当MediaCodec有Available Input的时候,在onMessageReceived中有
case MediaCodec::CB_INPUT_AVAILABLE:
                {
                    int32_t index;
                    CHECK(msg->findInt32("index", &index));

                    handleAnInputBuffer(index);
                    break;
                }
------------------------------------
bool NuPlayer::Decoder::handleAnInputBuffer(size_t index) {
   ...

    sp buffer;
    mCodec->getInputBuffer(index, &buffer);
...
    if (index >= mInputBuffers.size()) {
        for (size_t i = mInputBuffers.size(); i <= index; ++i) {
            mInputBuffers.add();
            mMediaBuffers.add();
            mInputBufferIsDequeued.add();
            mMediaBuffers.editItemAt(i) = NULL;
            mInputBufferIsDequeued.editItemAt(i) = false;
        }
    }
    mInputBuffers.editItemAt(index) = buffer;

    //CHECK_LT(bufferIx, mInputBuffers.size());

    if (mMediaBuffers[index] != NULL) {
        mMediaBuffers[index]->release();
        mMediaBuffers.editItemAt(index) = NULL;
    }
    mInputBufferIsDequeued.editItemAt(index) = true;

    if (!mCSDsToSubmit.isEmpty()) {
        sp msg = new AMessage();
        msg->setSize("buffer-ix", index);

        sp buffer = mCSDsToSubmit.itemAt(0);
        ALOGI("[%s] resubmitting CSD", mComponentName.c_str());
        msg->setBuffer("buffer", buffer);
        mCSDsToSubmit.removeAt(0);
        if (!onInputBufferFetched(msg)) {
            handleError(UNKNOWN_ERROR);
            return false;
        }
        return true;
    }

    while (!mPendingInputMessages.empty()) {
        sp msg = *mPendingInputMessages.begin();
        if (!onInputBufferFetched(msg)) {
            break;
        }
        mPendingInputMessages.erase(mPendingInputMessages.begin());
    }

    if (!mInputBufferIsDequeued.editItemAt(index)) {
        return true;
    }

    mDequeuedInputBuffers.push_back(index);

    onRequestInputBuffers();
    return true;
}
------------------------------------
bool NuPlayer::Decoder::onInputBufferFetched(const sp &msg) {
    ...
    sp buffer;
    bool hasBuffer = msg->findBuffer("buffer", &buffer);

    // handle widevine classic source - that fills an arbitrary input buffer
    MediaBuffer *mediaBuffer = NULL;
    if (hasBuffer) {
   //TODO:更多信息,可以研究mediabuffer
        mediaBuffer = (MediaBuffer *)(buffer->getMediaBufferBase());
        if (mediaBuffer != NULL) {
            // likely filled another buffer than we requested: adjust buffer index
            size_t ix;
            for (ix = 0; ix < mInputBuffers.size(); ix++) {
                const sp &buf = mInputBuffers[ix];
                if (buf->data() == mediaBuffer->data()) {
                    // all input buffers are dequeued on start, hence the check
                    if (!mInputBufferIsDequeued[ix]) {
                        ALOGV("[%s] received MediaBuffer for #%zu instead of #%zu",
                                mComponentName.c_str(), ix, bufferIx);
                        mediaBuffer->release();
                        return false;
                    }

                    // TRICKY: need buffer for the metadata, so instead, set
                    // codecBuffer to the same (though incorrect) buffer to
                    // avoid a memcpy into the codecBuffer
                    codecBuffer = buffer;
                    codecBuffer->setRange(
                            mediaBuffer->range_offset(),
                            mediaBuffer->range_length());
                    bufferIx = ix;
                    break;
                }
            }
            CHECK(ix < mInputBuffers.size());
        }
    }

    if (buffer == NULL /* includes !hasBuffer */) {
        int32_t streamErr = ERROR_END_OF_STREAM;
        CHECK(msg->findInt32("err", &streamErr) || !hasBuffer);

        CHECK(streamErr != OK);

        // attempt to queue EOS
        status_t err = mCodec->queueInputBuffer(
                bufferIx,
                0,
                0,
                0,
                MediaCodec::BUFFER_FLAG_EOS);
        if (err == OK) {
            mInputBufferIsDequeued.editItemAt(bufferIx) = false;
        } else if (streamErr == ERROR_END_OF_STREAM) {
            streamErr = err;
            // err will not be ERROR_END_OF_STREAM
        }

        if (streamErr != ERROR_END_OF_STREAM) {
            ALOGE("Stream error for %s (err=%d), EOS %s queued",
                    mComponentName.c_str(),
                    streamErr,
                    err == OK ? "successfully" : "unsuccessfully");
            handleError(streamErr);
        }
    } else {
        sp extra;
        if (buffer->meta()->findMessage("extra", &extra) && extra != NULL) {
            int64_t resumeAtMediaTimeUs;
            if (extra->findInt64(
                        "resume-at-mediaTimeUs", &resumeAtMediaTimeUs)) {
                ALOGI("[%s] suppressing rendering until %lld us",
                        mComponentName.c_str(), (long long)resumeAtMediaTimeUs);
                mSkipRenderingUntilMediaTimeUs = resumeAtMediaTimeUs;
            }
        }

        int64_t timeUs = 0;
        uint32_t flags = 0;
        CHECK(buffer->meta()->findInt64("timeUs", &timeUs));

        int32_t eos, csd;
        // we do not expect SYNCFRAME for decoder
        if (buffer->meta()->findInt32("eos", &eos) && eos) {
            flags |= MediaCodec::BUFFER_FLAG_EOS;
        } else if (buffer->meta()->findInt32("csd", &csd) && csd) {
            flags |= MediaCodec::BUFFER_FLAG_CODECCONFIG;
        }

        // copy into codec buffer
        if (buffer != codecBuffer) {
            if (buffer->size() > codecBuffer->capacity()) {
                handleError(ERROR_BUFFER_TOO_SMALL);
                mDequeuedInputBuffers.push_back(bufferIx);
                return false;
            }
            codecBuffer->setRange(0, buffer->size());
            memcpy(codecBuffer->data(), buffer->data(), buffer->size());
        }

        status_t err = mCodec->queueInputBuffer(
                        bufferIx,
                        codecBuffer->offset(),
                        codecBuffer->size(),
                        timeUs,
                        flags);
        if (err != OK) {
            if (mediaBuffer != NULL) {
                mediaBuffer->release();
            }
            ALOGE("Failed to queue input buffer for %s (err=%d)",
                    mComponentName.c_str(), err);
            handleError(err);
        } else {
            mInputBufferIsDequeued.editItemAt(bufferIx) = false;
            if (mediaBuffer != NULL) {
                CHECK(mMediaBuffers[bufferIx] == NULL);
                mMediaBuffers.editItemAt(bufferIx) = mediaBuffer;
            }
        }
    }
    return true;
}不管做什么,最后都是MediaCodec.queueInputBuffer完成了实际工作
-------------------------------------
void NuPlayer::DecoderBase::onRequestInputBuffers() {
    if (mRequestInputBuffersPending) {
        return;
    }

    // doRequestBuffers() return true if we should request more data
    if (doRequestBuffers()) {
        mRequestInputBuffersPending = true;
   //注意这里,会自己循环调用,不停地request
        sp msg = new AMessage(kWhatRequestInputBuffers, this);
        msg->post(10 * 1000ll);
    }
}
--------------------------------------
 case kWhatRequestInputBuffers:
        {
            mRequestInputBuffersPending = false;
            onRequestInputBuffers();
            break;
        }
--------------------------------------
/*
 * returns true if we should request more data
 */
bool NuPlayer::Decoder::doRequestBuffers() {
    // mRenderer is only NULL if we have a legacy widevine source that
    // is not yet ready. In this case we must not fetch input.
    if (isDiscontinuityPending() || mRenderer == NULL) {
        return false;
    }
    status_t err = OK;
    while (err == OK && !mDequeuedInputBuffers.empty()) {
        size_t bufferIx = *mDequeuedInputBuffers.begin();
        sp msg = new AMessage();
        msg->setSize("buffer-ix", bufferIx);
        err = fetchInputData(msg);
        if (err != OK && err != ERROR_END_OF_STREAM) {
            // if EOS, need to queue EOS buffer
            break;
        }
        mDequeuedInputBuffers.erase(mDequeuedInputBuffers.begin());

        if (!mPendingInputMessages.empty()
                || !onInputBufferFetched(msg)) {  //前面分析过这个方法了
            mPendingInputMessages.push_back(msg);
        }
    }

    return err == -EWOULDBLOCK
            && mSource->feedMoreTSData() == OK;
}
-------------------------------------
status_t NuPlayer::Decoder::fetchInputData(sp &reply) {
    sp accessUnit;
    bool dropAccessUnit;
    do {
        //在这里调用source的方法,从而建立起了联系
        status_t err = mSource->dequeueAccessUnit(mIsAudio, &accessUnit);
...
}while(...)
}
------------------------------------

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380381382383384385386387388389390391392393394395396397398399400401402403404405406407408409410411412413414415416417418

至此,Decoder部分也分析完毕了

Renderer

nuplayer目录下
Renderer的初始化在NuPlayer::OnStart方法中完成

void NuPlayer::onStart(int64_t startPositionUs) {
   ...
    sp notify = new AMessage(kWhatRendererNotify, this);
    ++mRendererGeneration;
    notify->setInt32("generation", mRendererGeneration);
    mRenderer = AVNuFactory::get()->createRenderer(mAudioSink, notify, flags);
    mRendererLooper = new ALooper;
    mRendererLooper->setName("NuPlayerRenderer");
    mRendererLooper->start(false, false, ANDROID_PRIORITY_AUDIO);
    mRendererLooper->registerHandler(mRenderer);

    status_t err = mRenderer->setPlaybackSettings(mPlaybackSettings);
…...
    float rate = getFrameRate();
    if (rate > 0) {
        mRenderer->setVideoFrameRate(rate);
    }

    if (mVideoDecoder != NULL) {
        mVideoDecoder->setRenderer(mRenderer);
    }
    if (mAudioDecoder != NULL) {
        mAudioDecoder->setRenderer(mRenderer);
    }

    postScanSources();
}

1234567891011121314151617181920212223242526272829

Renderer的数据输入在NuPlayer::Decoder::handleAnOutputBuffer中完成

bool NuPlayer::Decoder::handleAnOutputBuffer(
        size_t index,
        size_t offset,
        size_t size,
        int64_t timeUs,
        int32_t flags) {
...
if (mRenderer != NULL) {
        // send the buffer to renderer.把Buffer送到Renderer
        mRenderer->queueBuffer(mIsAudio, buffer, reply);
        if (eos && !isDiscontinuityPending()) {
            mRenderer->queueEOS(mIsAudio, ERROR_END_OF_STREAM);
        }
    }
...
}
libmediaplayerservice/nuplayer/NuplayerRenderer.cpp
mRenderer->queueBuffer最终会调用下面的方法
void NuPlayer::Renderer::onQueueBuffer(const sp &msg) {
    int32_t audio;
    CHECK(msg->findInt32("audio", &audio));

    if (dropBufferIfStale(audio, msg)) {
        return;
    }

    if (audio) {
        mHasAudio = true;
    } else {
        mHasVideo = true;
    }

    if (mHasVideo) {
        if (mVideoScheduler == NULL) {
            mVideoScheduler = new VideoFrameScheduler();//初始化VideoFrameSche,用于VSync
            mVideoScheduler->init();
        }
    }

    sp buffer;
    CHECK(msg->findBuffer("buffer", &buffer));

    sp notifyConsumed;
    CHECK(msg->findMessage("notifyConsumed", ¬ifyConsumed));

    QueueEntry entry;//可以理解为buffer循环队列的抽象
    entry.mBuffer = buffer;
    entry.mNotifyConsumed = notifyConsumed;
    entry.mOffset = 0;
    entry.mFinalResult = OK;
    entry.mBufferOrdinal = ++mTotalBuffersQueued;

//mAudioQueue和mVideoQueue都是List
    if (audio) {
        Mutex::Autolock autoLock(mLock);
        mAudioQueue.push_back(entry);
        postDrainAudioQueue_l();
    } else {
        mVideoQueue.push_back(entry);
        postDrainVideoQueue();//每隔一段时间就被调用一次的
    }

    Mutex::Autolock autoLock(mLock);
    if (!mSyncQueues || mAudioQueue.empty() || mVideoQueue.empty()) {
        return;
    }

    sp firstAudioBuffer = (*mAudioQueue.begin()).mBuffer;
    sp firstVideoBuffer = (*mVideoQueue.begin()).mBuffer;

    if (firstAudioBuffer == NULL || firstVideoBuffer == NULL) {
        // EOS signalled on either queue.某一个queue空了
        syncQueuesDone_l();
        return;
    }

    int64_t firstAudioTimeUs;
    int64_t firstVideoTimeUs;
    CHECK(firstAudioBuffer->meta()
            ->findInt64("timeUs", &firstAudioTimeUs));
    CHECK(firstVideoBuffer->meta()
            ->findInt64("timeUs", &firstVideoTimeUs));

    int64_t diff = firstVideoTimeUs - firstAudioTimeUs;

    ALOGV("queueDiff = %.2f secs", diff / 1E6);

    if (diff > 100000ll) {
        // Audio data starts More than 0.1 secs before video.
        // Drop some audio.音频超前视频0.1s,丢掉一些音频

        (*mAudioQueue.begin()).mNotifyConsumed->post();
        mAudioQueue.erase(mAudioQueue.begin());
        return;
    }

    syncQueuesDone_l();
}

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101

Renderer的数据输出

同样是在postDrainVideoQueue方法中,会抛出kwhatDrainVideoQueue的消息
void NuPlayer::Renderer::postDrainVideoQueue() {
….
    sp msg = new AMessage(kWhatDrainVideoQueue, this);
    msg->setInt32("drainGeneration", getDrainGeneration(false /* audio */));
…}
case kWhatDrainVideoQueue:
        {
            int32_t generation;
            CHECK(msg->findInt32("drainGeneration", &generation));
            if (generation != getDrainGeneration(false /* audio */)) {
                break;
            }

            mDrainVideoQueuePending = false;

            onDrainVideoQueue();

            postDrainVideoQueue();
            break;
        }
void NuPlayer::Renderer::onDrainVideoQueue() {
...
    QueueEntry *entry = &*mVideoQueue.begin();

    if (entry->mBuffer == NULL) {
        // EOS
        notifyEOS(false /* audio */, entry->mFinalResult);
...
        return;
    }

    int64_t nowUs = ALooper::GetNowUs();
    int64_t realTimeUs;
    int64_t mediaTimeUs = -1;
    if (mFlags & FLAG_REAL_TIME) {
        CHECK(entry->mBuffer->meta()->findInt64("timeUs", &realTimeUs));
    } else {
        CHECK(entry->mBuffer->meta()->findInt64("timeUs", &mediaTimeUs));

        realTimeUs = getRealTimeUs(mediaTimeUs, nowUs);
    }

    bool tooLate = false;

    if (!mPaused) {
//比较媒体时间和wall clock
        setVideoLateByUs(nowUs - realTimeUs);
        tooLate = (mVideoLateByUs > 40000);

        if (tooLate) {
            ALOGV("video late by %lld us (%.2f secs)",
                 (long long)mVideoLateByUs, mVideoLateByUs / 1E6);
        } else {
            int64_t mediaUs = 0;
            mMediaClock->getMediaTime(realTimeUs, &mediaUs);
            ALOGV("rendering video at media time %.2f secs",
                    (mFlags & FLAG_REAL_TIME ? realTimeUs :
                    mediaUs) / 1E6);...
        }
    } else {
        setVideoLateByUs(0);
...
    }

    // Always render the first video frame while keeping stats on A/V sync.
    if (!mVideoSampleReceived) {
        realTimeUs = nowUs;
        tooLate = false;
    }

    entry->mNotifyConsumed->setInt64("timestampNs", realTimeUs * 1000ll);
    entry->mNotifyConsumed->setInt32("render", !tooLate);
    entry->mNotifyConsumed->post();
    mVideoQueue.erase(mVideoQueue.begin());
    entry = NULL;

    mVideoSampleReceived = true;

    if (!mPaused) {
        if (!mVideoRenderingStarted) {
            mVideoRenderingStarted = true;
            notifyVideoRenderingStart();//刚刚开始
        }
        Mutex::Autolock autoLock(mLock);
        notifyIfMediaRenderingStarted_l();//started,和上面的notifyVideoRenderingStart走的基本是同一条路,都会post对应的kwhatxxxx msg,最后回到NuPlayer的onMessage中被处理
    }
}
NuPlayer
case kWhatRendererNotify:
        {
...
            int32_t what;
            CHECK(msg->findInt32("what", &what));
//根据what内容的不同能看到各种熟悉的消息
            if (what == Renderer::kWhatEOS) {
...
                if (audio) {
                    mAudioEOS = true;
                } else {
                    mVideoEOS = true;
                }

                if (finalResult == ERROR_END_OF_STREAM) {
                    ALOGV("reached %s EOS", audio ? "audio" : "video");
                } else {
                    ALOGE("%s track encountered an error (%d)",
                         audio ? "audio" : "video", finalResult);

                    notifyListener(
                            MEDIA_ERROR, MEDIA_ERROR_UNKNOWN, finalResult);
                }

                if ((mAudioEOS || mAudioDecoder == NULL)
                        && (mVideoEOS || mVideoDecoder == NULL)) {
                    notifyListener(MEDIA_PLAYBACK_COMPLETE, 0, 0);
                }
            } else if (what == Renderer::kWhatFlushComplete) {
                int32_t audio;
                CHECK(msg->findInt32("audio", &audio));

                if (audio) {
                    mAudioEOS = false;
                } else {
                    mVideoEOS = false;
                }

                ALOGV("renderer %s flush completed.", audio ? "audio" : "video");
                if (audio && (mFlushingAudio == NONE || mFlushingAudio == FLUSHED
                        || mFlushingAudio == SHUT_DOWN)) {
                    // Flush has been handled by tear down.
                    break;
                }
                handleFlushComplete(audio, false /* isDecoder */);
                finishFlushIfPossible();
            } else if (what == Renderer::kWhatVideoRenderingStart) {
//对应前面的第一个,也就是刚刚开始render
                notifyListener(MEDIA_INFO, MEDIA_INFO_RENDERING_START, 0);
            } else if (what == Renderer::kWhatMediaRenderingStart) {
                ALOGV("media rendering started");
//对应前面的第二个
                notifyListener(MEDIA_STARTED, 0, 0);
            } else if (what == Renderer::kWhatAudioTearDown) {
               ….
            }
            break;
        }
framework\av\include\media
// The player just pushed the very first video frame for rendering
enum media_info_type {
    MEDIA_INFO_RENDERING_START = 3,
}
enum media_event_type {
MEDIA_STARTED           = 6,
}


至此,就完成对android中nuplayer的ahandler机制和source\decoder\renderer三个模块的分析。欢迎互相交流学习。