MediaPlayer的核心-NuPlayer
之前整理过Android MediaPlayer源码分析,知道MediaPlayer的核心是NuPlayer实现视频的解码、渲染、同步、输出,这篇深入分析NuPlayer相关的知识体系
整体设计架构
下图是MediaPlayer在Android架构中的工作流程
下图是NuPlayer的工作流程
可以看到Android当中采用NuPlayer作为主流播放器,NuPlayer采用StageFright引擎,StageFright内部集成了OpenMAX编解码器,会话管理、时间同步、渲染、传输控制和DRM(Digital Rights Management)
AHandler机制
在NuPlayer源码当中到处能看到AHandler的影子,NuPlayer就是通过AHandler建立各个成员之间的通信和交互,类似Android Java层中的Handler一样,AHandler是Android native层实现的一个异步消息机制,在这个机制中所有的处理都是异步的,将变量封装到一个消息AMessage结构体中,然后放到队列中去,后台专门有一个线程会从这个队列中取出消息然后执行,执行函数就是onMessageReceived。 原理如下图:
AHandler机制包括以下几个核心类
-
AMessage
消息类,用于构造消息,通过deliver()方法使用AHandler发送出去,通过post方法使用ALooper投递给AHandler处理
//使用AHandler发送消息 void AMessage::deliver() { sp<AHandler> handler = mHandler.promote(); if (handler == NULL) { ALOGW("failed to deliver message as target handler %d is gone.", mTarget); return; } handler->deliverMessage(this); } //使用ALooper投递给AHandler处理 status_t AMessage::post(int64_t delayUs) { sp<ALooper> looper = mLooper.promote(); if (looper == NULL) { ALOGW("failed to post message as target looper for handler %d is gone.", mTarget); return -ENOENT; } looper->post(this, delayUs); return OK; }
-
AHandler
消息处理类,一般当做父类,继承该类的子类需要实现onMessageReceived方法,类似于java层Handler中的handleMessage(msg)方法
void AHandler::deliverMessage(const sp<AMessage> &msg) { //交由该方法处理,该方法是一个虚函数,由AHandler的子类来实现该方法,NuPlayer继承自AHandler,里面实现了该方法的功能 onMessageReceived(msg); mMessageCounter++; //省略部分代码 }
-
ALooper
与Ahander一一对应,负责存储消息并分发Ahandler的消息,与AMessage一对多关系
// posts a message on this looper with the given timeout void ALooper::post(const sp<AMessage> &msg, int64_t delayUs) { Mutex::Autolock autoLock(mLock); int64_t whenUs; if (delayUs > 0) { whenUs = GetNowUs() + delayUs; } else { whenUs = GetNowUs(); } List<Event>::iterator it = mEventQueue.begin(); while (it != mEventQueue.end() && (*it).mWhenUs <= whenUs) { ++it; } Event event; event.mWhenUs = whenUs; event.mMessage = msg; if (it == mEventQueue.begin()) { mQueueChangedCondition.signal(); } //将消息插入到消息队列当中,mEventQueue是一个List<Event>集合 mEventQueue.insert(it, event); } status_t ALooper::start( bool runOnCallingThread, bool canCallJava, int32_t priority) { if (runOnCallingThread) { { Mutex::Autolock autoLock(mLock); if (mThread != NULL || mRunningLocally) { return INVALID_OPERATION; } mRunningLocally = true; } //循环取出消息,方法体为空,取消息动作在loop()方法中 do { } while (loop()); return OK; } Mutex::Autolock autoLock(mLock); if (mThread != NULL || mRunningLocally) { return INVALID_OPERATION; } mThread = new LooperThread(this, canCallJava); status_t err = mThread->run( mName.empty() ? "ALooper" : mName.c_str(), priority); if (err != OK) { mThread.clear(); } return err; } bool ALooper::loop() { Event event; { Mutex::Autolock autoLock(mLock); if (mThread == NULL && !mRunningLocally) { return false; } if (mEventQueue.empty()) { mQueueChangedCondition.wait(mLock); return true; } int64_t whenUs = (*mEventQueue.begin()).mWhenUs; int64_t nowUs = GetNowUs(); if (whenUs > nowUs) { int64_t delayUs = whenUs - nowUs; mQueueChangedCondition.waitRelative(mLock, delayUs * 1000ll); return true; } event = *mEventQueue.begin(); mEventQueue.erase(mEventQueue.begin()); } event.mMessage->deliver(); //省略部分代码 }
NuPlayer
NuPlayer本身继承自AHandler,具备发送/处理消息的能力
struct NuPlayer : public AHandler {
explicit NuPlayer(pid_t pid, const sp<MediaClock> &mediaClock);
void setUID(uid_t uid);
void init(const wp<NuPlayerDriver> &driver);
void setDataSourceAsync(const sp<IStreamSource> &source);
virtual void setDataSourceAsync(
const sp<IMediaHTTPService> &httpService,
const char *url,
const KeyedVector<String8, String8> *headers);
void setDataSourceAsync(int fd, int64_t offset, int64_t length);
void setDataSourceAsync(const sp<DataSource> &source);
//省略部分代码
}
下图是正常播放器的执行流程
下图是NuPlayer的架构设计
NuPlayer的创建过程在之前的文章中已经梳理过了,见Android MediaPlayer源码分析,创建过程中涉及到几个重要的成员,逐一分析下
-
NuPlayerDriver
NuPlayerDriver是对NuPlayer的封装,继承MediaPlayerInterface接口。通过NuPlayer来实现播放的功能。看这部分代码的方法就是先看NuPlayerDriver里面干了啥,转头就去找NuPlayer里面的实现,一般都要再去NuPlayer的onMessageReceive中看消息的响应,最后回到NuPlayerDriver的各种notify中看流程的周转,下面是播放器状态机流转图
struct NuPlayerDriver : public MediaPlayerInterface {
explicit NuPlayerDriver(pid_t pid);
virtual status_t initCheck();
virtual status_t setUID(uid_t uid);
virtual status_t setDataSource(
const sp<IMediaHTTPService> &httpService,
const char *url,
const KeyedVector<String8, String8> *headers);
virtual status_t setDataSource(int fd, int64_t offset, int64_t length);
virtual status_t setDataSource(const sp<IStreamSource> &source);
virtual status_t setDataSource(const sp<DataSource>& dataSource);
virtual status_t setVideoSurfaceTexture(
const sp<IGraphicBufferProducer> &bufferProducer);
virtual status_t getBufferingSettings(
BufferingSettings* buffering /* nonnull */) override;
virtual status_t setBufferingSettings(const BufferingSettings& buffering) override;
virtual status_t prepare();
virtual status_t prepareAsync();
virtual status_t start();
virtual status_t stop();
virtual status_t pause();
virtual bool isPlaying();
virtual status_t setPlaybackSettings(const AudioPlaybackRate &rate);
virtual status_t getPlaybackSettings(AudioPlaybackRate *rate);
virtual status_t setSyncSettings(const AVSyncSettings &sync, float videoFpsHint);
virtual status_t getSyncSettings(AVSyncSettings *sync, float *videoFps);
virtual status_t seekTo(
int msec, MediaPlayerSeekMode mode = MediaPlayerSeekMode::SEEK_PREVIOUS_SYNC);
virtual status_t getCurrentPosition(int *msec);
virtual status_t getDuration(int *msec);
virtual status_t reset();
virtual status_t notifyAt(int64_t mediaTimeUs) override;
virtual status_t setLooping(int loop);
virtual player_type playerType();
virtual status_t invoke(const Parcel &request, Parcel *reply);
virtual void setAudioSink(const sp<AudioSink> &audioSink);
virtual status_t setParameter(int key, const Parcel &request);
virtual status_t getParameter(int key, Parcel *reply);
virtual status_t getMetadata(
const media::Metadata::Filter& ids, Parcel *records);
virtual status_t dump(int fd, const Vector<String16> &args) const;
void notifySetDataSourceCompleted(status_t err);
void notifyPrepareCompleted(status_t err);
void notifyResetComplete();
void notifySetSurfaceComplete();
void notifyDuration(int64_t durationUs);
void notifyMorePlayingTimeUs(int64_t timeUs);
void notifyMoreRebufferingTimeUs(int64_t timeUs);
void notifyRebufferingWhenExit(bool status);
void notifySeekComplete();
void notifySeekComplete_l();
void notifyListener(int msg, int ext1 = 0, int ext2 = 0, const Parcel *in = NULL);
void notifyFlagsChanged(uint32_t flags);
// Modular DRM
virtual status_t prepareDrm(const uint8_t uuid[16], const Vector<uint8_t> &drmSessionId);
virtual status_t releaseDrm();
//省略部分代码
}
-
AVNuFactory
负责关键组件的创建,包括decoder,renderer,通过它能看到数据流从Source-Decoder-Renderer,中间由AMessages驱动
struct AVNuFactory { virtual sp<NuPlayer> createNuPlayer(pid_t pid, const sp<MediaClock> &mediaClock); virtual sp<NuPlayer::DecoderBase> createPassThruDecoder( const sp<AMessage> ¬ify, const sp<NuPlayer::Source> &source, const sp<NuPlayer::Renderer> &renderer); virtual sp<NuPlayer::DecoderBase> createDecoder( const sp<AMessage> ¬ify, const sp<NuPlayer::Source> &source, pid_t pid, uid_t uid, const sp<NuPlayer::Renderer> &renderer); virtual sp<NuPlayer::Renderer> createRenderer( const sp<MediaPlayerBase::AudioSink> &sink, const sp<MediaClock> &mediaClock, const sp<AMessage> ¬ify, uint32_t flags); // ----- NO TRESSPASSING BEYOND THIS LINE ------ DECLARE_LOADABLE_SINGLETON(AVNuFactory); };
-
Source
表示数据源,同时里面包含解析模块MediaExtractor,功能类似FFmpeg的avformat,并且Source继承自AHandler,同样拥有发送/处理消息的能力
struct NuPlayer::Source : public AHandler { enum Flags { FLAG_CAN_PAUSE = 1, FLAG_CAN_SEEK_BACKWARD = 2, // the "10 sec back button" FLAG_CAN_SEEK_FORWARD = 4, // the "10 sec forward button" FLAG_CAN_SEEK = 8, // the "seek bar" FLAG_DYNAMIC_DURATION = 16, FLAG_SECURE = 32, // Secure codec is required. FLAG_PROTECTED = 64, // The screen needs to be protected (screenshot is disabled). }; //省略部分代码 }
数据源一般分三种,在NuPlayer::setDataSourceAsync()中会对数据源进行判断区分,不同的数据源需要用到对应的解析模块
void NuPlayer::setDataSourceAsync( const sp<IMediaHTTPService> &httpService, const char *url, const KeyedVector<String8, String8> *headers) { sp<AMessage> msg = new AMessage(kWhatSetDataSource, this); size_t len = strlen(url); sp<AMessage> notify = new AMessage(kWhatSourceNotify, this); sp<Source> source; //判断数据源的类型 if (IsHTTPLiveURL(url)) { //流媒体 source = new HTTPLiveSource(notify, httpService, url, headers); ALOGV("setDataSourceAsync HTTPLiveSource %s", url); mDataSourceType = DATA_SOURCE_TYPE_HTTP_LIVE; } else if (!strncasecmp(url, "rtsp://", 7)) { //直播 source = new RTSPSource( notify, httpService, url, headers, mUIDValid, mUID); ALOGV("setDataSourceAsync RTSPSource %s", url); mDataSourceType = DATA_SOURCE_TYPE_RTSP; } else if ((!strncasecmp(url, "http://", 7) || !strncasecmp(url, "https://", 8)) && ((len >= 4 && !strcasecmp(".sdp", &url[len - 4])) || strstr(url, ".sdp?"))) { //直播 source = new RTSPSource( notify, httpService, url, headers, mUIDValid, mUID, true); ALOGV("setDataSourceAsync RTSPSource http/https/.sdp %s", url); mDataSourceType = DATA_SOURCE_TYPE_RTSP; } else { //本地视频 ALOGV("setDataSourceAsync GenericSource %s", url); sp<GenericSource> genericSource = new GenericSource(notify, mUIDValid, mUID, mMediaClock); status_t err = genericSource->setDataSource(httpService, url, headers); if (err == OK) { source = genericSource; } else { ALOGE("Failed to set data source!"); } // regardless of success/failure mDataSourceType = DATA_SOURCE_TYPE_GENERIC_URL; } msg->setObject("source", source); msg->post(); }
-
GenericSource:表示本地数据源
struct NuPlayer::GenericSource : public NuPlayer::Source, public MediaBufferObserver // Modular DRM { GenericSource(const sp<AMessage> ¬ify, bool uidValid, uid_t uid, const sp<MediaClock> &mediaClock); status_t setDataSource( const sp<IMediaHTTPService> &httpService, const char *url, const KeyedVector<String8, String8> *headers); status_t setDataSource(int fd, int64_t offset, int64_t length); status_t setDataSource(const sp<DataSource>& dataSource); virtual status_t getBufferingSettings( BufferingSettings* buffering /* nonnull */) override; virtual status_t setBufferingSettings(const BufferingSettings& buffering) override; virtual void prepareAsync(); virtual void start(); virtual void stop(); virtual void pause(); virtual void resume(); virtual void disconnect(); virtual status_t feedMoreTSData(); virtual sp<MetaData> getFileFormatMeta() const; virtual status_t dequeueAccessUnit(bool audio, sp<ABuffer> *accessUnit); virtual status_t getDuration(int64_t *durationUs); virtual size_t getTrackCount() const; virtual sp<AMessage> getTrackInfo(size_t trackIndex) const; virtual ssize_t getSelectedTrack(media_track_type type) const; virtual status_t selectTrack(size_t trackIndex, bool select, int64_t timeUs); virtual status_t seekTo( int64_t seekTimeUs, MediaPlayerSeekMode mode = MediaPlayerSeekMode::SEEK_PREVIOUS_SYNC) override; //省略部分代码 }
-
HTTPLiveSource:表示网络数据源,一般指流媒体
struct NuPlayer::HTTPLiveSource : public NuPlayer::Source { HTTPLiveSource( const sp<AMessage> ¬ify, const sp<IMediaHTTPService> &httpService, const char *url, const KeyedVector<String8, String8> *headers); virtual status_t getBufferingSettings( BufferingSettings* buffering /* nonnull */) override; virtual status_t setBufferingSettings(const BufferingSettings& buffering) override; virtual void prepareAsync(); virtual void start(); virtual status_t dequeueAccessUnit(bool audio, sp<ABuffer> *accessUnit); virtual sp<MetaData> getFormatMeta(bool audio); virtual sp<AMessage> getFormat(bool audio); virtual status_t feedMoreTSData(); virtual status_t getDuration(int64_t *durationUs); virtual size_t getTrackCount() const; virtual sp<AMessage> getTrackInfo(size_t trackIndex) const; virtual ssize_t getSelectedTrack(media_track_type /* type */) const; virtual status_t selectTrack(size_t trackIndex, bool select, int64_t timeUs); virtual status_t seekTo( int64_t seekTimeUs, MediaPlayerSeekMode mode = MediaPlayerSeekMode::SEEK_PREVIOUS_SYNC) override; //省略部分代码 }
-
RTSPSource:表示直播数据源
struct NuPlayer::RTSPSource : public NuPlayer::Source { RTSPSource( const sp<AMessage> ¬ify, const sp<IMediaHTTPService> &httpService, const char *url, const KeyedVector<String8, String8> *headers, bool uidValid = false, uid_t uid = 0, bool isSDP = false); virtual status_t getBufferingSettings( BufferingSettings* buffering /* nonnull */) override; virtual status_t setBufferingSettings(const BufferingSettings& buffering) override; virtual void prepareAsync(); virtual void start(); virtual void stop(); virtual status_t feedMoreTSData(); virtual status_t dequeueAccessUnit(bool audio, sp<ABuffer> *accessUnit); virtual status_t getDuration(int64_t *durationUs); virtual status_t seekTo( int64_t seekTimeUs, MediaPlayerSeekMode mode = MediaPlayerSeekMode::SEEK_PREVIOUS_SYNC) override; void onMessageReceived(const sp<AMessage> &msg); //省略部分代码 }
-
-
Decoder
解码器,功能类似FFmpeg的avcodec,封装了用于AVC、AAC解码的接口,通过ACodec实现解码(包含OMX硬解码和软解码),它的初始化在NuPlayer::onMessageReceived()方法中
void NuPlayer::onMessageReceived(const sp<AMessage> &msg) { //省略部分代码 case kWhatPause: { onPause(); mPausedByClient = true; break; } case kWhatSourceNotify: { //在此处创建Decoder onSourceNotify(msg); break; } case kWhatClosedCaptionNotify: { onClosedCaptionNotify(msg); break; } //省略部分代码 } --------------------------------------------------------------- void NuPlayer::onSourceNotify(const sp<AMessage> &msg) { //省略部分代码 switch (what) { case Source::kWhatInstantiateSecureDecoders: { if (mSource == NULL) { // This is a stale notification from a source that was // asynchronously preparing when the client called reset(). // We handled the reset, the source is gone. break; } sp<AMessage> reply; CHECK(msg->findMessage("reply", &reply)); //此处创建decoder status_t err = onInstantiateSecureDecoders(); reply->setInt32("err", err); reply->post(); break; } //省略部分代码 } --------------------------------------------------------------- status_t NuPlayer::onInstantiateSecureDecoders() { status_t err; if (!(mSourceFlags & Source::FLAG_SECURE)) { return BAD_TYPE; } if (mRenderer != NULL) { ALOGE("renderer should not be set when instantiating secure decoders"); return UNKNOWN_ERROR; } // TRICKY: We rely on mRenderer being null, so that decoder does not start requesting // data on instantiation. if (mSurface != NULL) { //创建VideoDecoder err = instantiateDecoder(false, &mVideoDecoder); if (err != OK) { return err; } } if (mAudioSink != NULL) { //创建AudioDecoder err = instantiateDecoder(true, &mAudioDecoder); if (err != OK) { return err; } } return OK; } --------------------------------------------------------------- status_t NuPlayer::instantiateDecoder( bool audio, sp<DecoderBase> *decoder, bool checkAudioModeChange) { //省略部分代码 if (audio) { //创建音频解码器 sp<AMessage> notify = new AMessage(kWhatAudioNotify, this); ++mAudioDecoderGeneration; notify->setInt32("generation", mAudioDecoderGeneration); if (checkAudioModeChange) { determineAudioModeChange(format); } if (mOffloadAudio) { mSource->setOffloadAudio(true /* offload */); const bool hasVideo = (mSource->getFormat(false /*audio */) != NULL); format->setInt32("has-video", hasVideo); *decoder = AVNuFactory::get()->createPassThruDecoder(notify, mSource, mRenderer); ALOGV("instantiateDecoder audio DecoderPassThrough hasVideo: %d", hasVideo); } else { AVNuUtils::get()->setCodecOutputFormat(format); mSource->setOffloadAudio(false /* offload */); *decoder = AVNuFactory::get()->createDecoder(notify, mSource, mPID, mUID, mRenderer); ALOGV("instantiateDecoder audio Decoder"); } mAudioDecoderError = false; } else { //创建视频解码器 sp<AMessage> notify = new AMessage(kWhatVideoNotify, this); ++mVideoDecoderGeneration; notify->setInt32("generation", mVideoDecoderGeneration); *decoder = new Decoder( notify, mSource, mPID, mUID, mRenderer, mSurface, mCCDecoder); mVideoDecoderError = false; // enable FRC if high-quality AV sync is requested, even if not // directly queuing to display, as this will even improve textureview // playback. { if (property_get_bool("persist.sys.media.avsync", false)) { format->setInt32("auto-frc", 1); } } } (*decoder)->init(); //省略部分代码 }
-
Renderer
渲染器,功能类似声卡驱动和显卡驱动,主要用于音视频渲染和同步,与NativeWindow有关,创建过程在NuPlayer::onStart()中
void NuPlayer::onStart(int64_t startPositionUs, MediaPlayerSeekMode mode) { //省略部分代码 sp<AMessage> notify = new AMessage(kWhatRendererNotify, this); ++mRendererGeneration; notify->setInt32("generation", mRendererGeneration); //创建renderer mRenderer = AVNuFactory::get()->createRenderer(mAudioSink, mMediaClock, notify, flags); mRendererLooper = new ALooper; mRendererLooper->setName("NuPlayerRenderer"); mRendererLooper->start(false, false, ANDROID_PRIORITY_AUDIO); mRendererLooper->registerHandler(mRenderer); //省略部分代码 }
数据的输入在NuPlayer:Decoder::handleAnOutputBuffer()中
bool NuPlayer::Decoder::handleAnOutputBuffer( size_t index, size_t offset, size_t size, int64_t timeUs, int32_t flags) { //省略部分代码 if (mRenderer != NULL) { // send the buffer to renderer.把Buffer送到Renderer mRenderer->queueBuffer(mIsAudio, buffer, reply); if (eos && !isDiscontinuityPending()) { mRenderer->queueEOS(mIsAudio, ERROR_END_OF_STREAM); } } //省略部分代码 }
数据的输出在NuPlayer::Renderer::postDrainVideoQueue()中
void NuPlayer::Renderer::postDrainVideoQueue() { //省略部分代码 sp<AMessage> msg = new AMessage(kWhatDrainVideoQueue, this); msg->setInt32("drainGeneration", getDrainGeneration(false /* audio */)); //省略部分代码 } -------------------------------------------------------- case kWhatDrainVideoQueue: { int32_t generation; CHECK(msg->findInt32("drainGeneration", &generation)); if (generation != getDrainGeneration(false /* audio */)) { break; } mDrainVideoQueuePending = false; //输出视频缓冲队列的内容 onDrainVideoQueue(); postDrainVideoQueue(); break; } -------------------------------------------------------- void NuPlayer::Renderer::onDrainVideoQueue() { //省略部分代码 mVideoSampleReceived = true; if (!mPaused) { if (!mVideoRenderingStarted) { mVideoRenderingStarted = true; //通知Renderer开始渲染 notifyVideoRenderingStart(); } Mutex::Autolock autoLock(mLock); notifyIfMediaRenderingStarted_l(); } } -------------------------------------------------------- void NuPlayer::Renderer::notifyVideoRenderingStart() { sp<AMessage> notify = mNotify->dup(); notify->setInt32("what", kWhatVideoRenderingStart); notify->post(); }
void NuPlayer::onMessageReceived(const sp<AMessage> &msg) { switch (msg->what()) { //省略部分代码 case kWhatRendererNotify: //处理Renderer发送过来的消息 { int32_t requesterGeneration = mRendererGeneration - 1; CHECK(msg->findInt32("generation", &requesterGeneration)); if (requesterGeneration != mRendererGeneration) { ALOGV("got message from old renderer, generation(%d:%d)", requesterGeneration, mRendererGeneration); return; } int32_t what; CHECK(msg->findInt32("what", &what)); if (what == Renderer::kWhatEOS) { //省略部分代码 } else if (what == Renderer::kWhatFlushComplete) { //省略部分代码 } else if (what == Renderer::kWhatVideoRenderingStart) { //这里处理kWhatVideoRenderingStart消息 notifyListener(MEDIA_INFO, MEDIA_INFO_RENDERING_START, 0); } else if (what == Renderer::kWhatMediaRenderingStart) { ALOGV("media rendering started"); notifyListener(MEDIA_STARTED, 0, 0); } else if (!mPaused && what == Renderer::kWhatVideoPrerollComplete) { // If NuPlayer is paused too, don't resume renderer. The pause may be called by // client, wait for client to resume NuPlayer ALOGI("NOTE: Video preroll complete.. resume renderer.."); mRenderer->resume(); } else if (what == Renderer::kWhatAudioTearDown) { //省略部分代码 } break; } } }