MediaPlayer 播放本地视频源码流程【2】-setSurface和prepareAsync

1. setSurface

//设置画布进行展示
mediaPlayer.setSurface(new Surface(textureView.getSurfaceTexture()));

  • frameworks/base/media/java/android/media/MediaPlayer.java
    public void setSurface(Surface surface) {
        if (mScreenOnWhilePlaying && surface != null) {
            Log.w(TAG, "setScreenOnWhilePlaying(true) is ineffective for Surface");
        }
        mSurfaceHolder = null;

// 调用native 层的方法
        _setVideoSurface(surface);
        updateSurfaceScreenOn();
    }

-----------------
    private native void _setVideoSurface(Surface surface);
  • frameworks/base/media/jni/android_media_MediaPlayer.cpp
static const JNINativeMethod gMethods[] = {
    {
        "nativeSetDataSource",
        "(Landroid/os/IBinder;Ljava/lang/String;[Ljava/lang/String;"
        "[Ljava/lang/String;)V",
        (void *)android_media_MediaPlayer_setDataSourceAndHeaders
    },

    {"_setVideoSurface",    "(Landroid/view/Surface;)V",        (void *)android_media_MediaPlayer_setVideoSurface},
    {"_prepare",            "()V",                              (void *)android_media_MediaPlayer_prepare},
    {"prepareAsync",        "()V",                              (void *)android_media_MediaPlayer_prepareAsync},
    {"_start",              "()V",                              (void *)android_media_MediaPlayer_start},
    {"_stop",               "()V",                              (void *)android_media_MediaPlayer_stop},

----------------
static void
android_media_MediaPlayer_setVideoSurface(JNIEnv *env, jobject thiz, jobject jsurface)
{
    setVideoSurface(env, thiz, jsurface, true /* mediaPlayerMustBeAlive */);
}

--------------
static void
setVideoSurface(JNIEnv *env, jobject thiz, jobject jsurface, jboolean mediaPlayerMustBeAlive)
{

// 获取MediaPlayer 对象
    sp<MediaPlayer> mp = getMediaPlayer(env, thiz);
    if (mp == NULL) {
        if (mediaPlayerMustBeAlive) {
            jniThrowException(env, "java/lang/IllegalStateException", NULL);
        }
        return;
    }

    decVideoSurfaceRef(env, thiz);


    sp<IGraphicBufferProducer> new_st;
    if (jsurface) {
// 创建surface 对象
        sp<Surface> surface(android_view_Surface_getSurface(env, jsurface));
        if (surface != NULL) {

// 获取新的Surface中的图形数据缓冲生产者对象
            new_st = surface->getIGraphicBufferProducer();
            if (new_st == NULL) {
                jniThrowException(env, "java/lang/IllegalArgumentException",
                    "The surface does not have a binding SurfaceTexture!");
                return;
            }
            new_st->incStrong((void*)decVideoSurfaceRef);
        } else {
            jniThrowException(env, "java/lang/IllegalArgumentException",
                    "The surface has been released");
            return;
        }
    }

    env->SetLongField(thiz, fields.surface_texture, (jlong)new_st.get());

    // This will fail if the media player has not been initialized yet. This
    // can be the case if setDisplay() on MediaPlayer.java has been called
    // before setDataSource(). The redundant call to setVideoSurfaceTexture()
    // in prepare/prepareAsync covers for this case.

// 调用 mediaplayer 的方法
    mp->setVideoSurfaceTexture(new_st);
}

  • frameworks/av/media/libmedia/mediaplayer.cpp
// 在第一章中setDataSource 的时候,有 attachNewPlayer 设置 mPlayer对象
// mPlayer是MediaPlayerService::Client对象在客户端这边的Bp端代理对象

status_t MediaPlayer::setVideoSurfaceTexture(
        const sp<IGraphicBufferProducer>& bufferProducer)
{
    ALOGV("setVideoSurfaceTexture");
    Mutex::Autolock _l(mLock);
    if (mPlayer == 0) return NO_INIT;
    return mPlayer->setVideoSurfaceTexture(bufferProducer);
}

调用Client代理对象即BpMediaPlayer代理类的该方法实现

  • frameworks/av/media/libmedia/IMediaPlayer.cpp
class BpMediaPlayer: public BpInterface<IMediaPlayer>
{
public:
    explicit BpMediaPlayer(const sp<IBinder>& impl)
        : BpInterface<IMediaPlayer>(impl)
    {
    }

    status_t setVideoSurfaceTexture(const sp<IGraphicBufferProducer>& bufferProducer)
    {
        Parcel data, reply;
        data.writeInterfaceToken(IMediaPlayer::getInterfaceDescriptor());
        sp<IBinder> b(IInterface::asBinder(bufferProducer));
        data.writeStrongBinder(b);

// 发起【SET_VIDEO_SURFACETEXTURE】事务码的Binder请求,并携带参数,然后等待应答执行状态码
        remote()->transact(SET_VIDEO_SURFACETEXTURE, data, &reply);

// 此处去读取:Bn实现端执行完毕后将返回该状态码
        return reply.readInt32();
    }
status_t BnMediaPlayer::onTransact(
    uint32_t code, const Parcel& data, Parcel* reply, uint32_t flags)
{
    switch (code) {

        case SET_VIDEO_SURFACETEXTURE: {
            CHECK_INTERFACE(IMediaPlayer, data, reply);
            sp<IGraphicBufferProducer> bufferProducer =
                    interface_cast<IGraphicBufferProducer>(data.readStrongBinder());

// // 调用BnMediaPlayer实现者子类该方法setVideoSurfaceTexture,它的实现是Client类方法
            reply->writeInt32(setVideoSurfaceTexture(bufferProducer));
            return NO_ERROR;
        } break;
  • frameworks/av/media/libmediaplayerservice/MediaPlayerService.cpp
        sp<MediaPlayerBase>     getPlayer() const { Mutex::Autolock lock(mLock); return mPlayer; }


status_t MediaPlayerService::Client::setVideoSurfaceTexture(
        const sp<IGraphicBufferProducer>& bufferProducer)
{
    ALOGV("[%d] setVideoSurfaceTexture(%p)", mConnId, bufferProducer.get());

// 获取player,在上面的头文件获取
// 在 setDataSource_post方法中有传入 p对象,为NuPlayerDriver对象
    sp<MediaPlayerBase> p = getPlayer();
    if (p == 0) return UNKNOWN_ERROR;

    sp<IBinder> binder(IInterface::asBinder(bufferProducer));
    if (mConnectedWindowBinder == binder) {
        return OK;
    }

    sp<ANativeWindow> anw;
    if (bufferProducer != NULL) {

// 创建 surface 对象
        anw = new Surface(bufferProducer, true /* controlledByApp */);

// 连接新的window
        status_t err = nativeWindowConnect(anw.get(), "setVideoSurfaceTexture");

        if (err != OK) {
            ALOGE("setVideoSurfaceTexture failed: %d", err);
            // Note that we must do the reset before disconnecting from the ANW.
            // Otherwise queue/dequeue calls could be made on the disconnected
            // ANW, which may result in errors.
            reset();

            Mutex::Autolock lock(mLock);
            disconnectNativeWindow_l();

            return err;
        }
    }

    // Note that we must set the player's new GraphicBufferProducer before
    // disconnecting the old one.  Otherwise queue/dequeue calls could be made
    // on the disconnected ANW, which may result in errors.

// 调用 NuPlayerDriver对象 的方法
    status_t err = p->setVideoSurfaceTexture(bufferProducer);

    mLock.lock();
    disconnectNativeWindow_l();

    if (err == OK) {
// 缓存当前 Surface对象
        mConnectedWindow = anw;
        mConnectedWindowBinder = binder;
        mLock.unlock();
    } else {
        mLock.unlock();
        status_t err = nativeWindowDisconnect(
                anw.get(), "disconnectNativeWindow");

        if (err != OK) {
            ALOGW("nativeWindowDisconnect returned an error: %s (%d)",
                    strerror(-err), err);
        }
    }

    return err;
}

 frameworks/av/media/libstagefright/SurfaceUtils.cpp

其中,看下;nativeWindowConnect


status_t nativeWindowConnect(ANativeWindow *surface, const char *reason) {
    ALOGD("connecting to surface %p, reason %s", surface, reason);

    // NATIVE_WINDOW_API_MEDIA 表示当前native window渲染的数据是通过stagefright经过解码模块得到的视频渲染数据,
    // 其是一个枚举,另外还有多种方式提供视频渲染数据,比如OpenGL ES、CPU、Camera

    status_t err = native_window_api_connect(surface, NATIVE_WINDOW_API_MEDIA);
    ALOGE_IF(err != OK, "Failed to connect to surface %p, err %d", surface, err);

    return err;
}

  • frameworks/av/media/libmediaplayerservice/nuplayer/NuPlayerDriver.cpp
status_t NuPlayerDriver::setVideoSurfaceTexture(
        const sp<IGraphicBufferProducer> &bufferProducer) {
    ALOGV("setVideoSurfaceTexture(%p)", this);
    Mutex::Autolock autoLock(mLock);

    if (mSetSurfaceInProgress) {
        return INVALID_OPERATION;
    }

    switch (mState) {
        case STATE_SET_DATASOURCE_PENDING:
        case STATE_RESET_IN_PROGRESS:
            return INVALID_OPERATION;

        default:
            break;
    }
// 设置为true
    mSetSurfaceInProgress = true;

// 在构造函数中有初始话:mPlayer(new NuPlayer(pid, mMediaClock))
// 异步执行调用NuPlayer的方法setVideoSurfaceTextureAsync
    mPlayer->setVideoSurfaceTextureAsync(bufferProducer);

// 此处为同步上面异步执行的结果处理,
// 等待 NuPlayer的异步处理结果,然后才跳出循环
// 即等待 notifySetSurfaceComplete 方法的调用
    while (mSetSurfaceInProgress) {
        mCondition.wait(mLock);
    }

    return OK;
}
  • frameworks/av/media/libmediaplayerservice/nuplayer/NuPlayer.cpp
void NuPlayer::setVideoSurfaceTextureAsync(
        const sp<IGraphicBufferProducer> &bufferProducer) {

// 发送的消息为:kWhatSetVideoSurface
    sp<AMessage> msg = new AMessage(kWhatSetVideoSurface, this);

    if (bufferProducer == NULL) {
        msg->setObject("surface", NULL);
    } else {
        msg->setObject("surface", new Surface(bufferProducer, true /* controlledByApp */));
    }

// post 发送消息不同于java 层的handler处理,native层的会自己创建个线程
    msg->post();
}
// 其实 NuPlayer继承了AHandler,所以会调用 onMessageReceived处理消息

void NuPlayer::onMessageReceived(const sp<AMessage> &msg) {
    switch (msg->what()) {

        case kWhatSetVideoSurface:
        {

            sp<RefBase> obj;
            CHECK(msg->findObject("surface", &obj));
            sp<Surface> surface = static_cast<Surface *>(obj.get());

            ALOGD("onSetVideoSurface(%p, %s video decoder)",
                    surface.get(),
                    (mSource != NULL && mStarted && mSource->getFormat(false /* audio */) != NULL
                            && mVideoDecoder != NULL) ? "have" : "no");

            // Need to check mStarted before calling mSource->getFormat because NuPlayer might
            // be in preparing state and it could take long time.
            // When mStarted is true, mSource must have been set.

// 判断条件:数据源mSource为空 或 mStarted为false,或getFormat获取视频格式信息数据对象不存在,
// 或视频解码器对象不为空且解码器设置Surface成功【该情况是视频已正在播放中时进行的设置】,则进入该条件
            if (mSource == NULL || !mStarted || mSource->getFormat(false /* audio */) == NULL
                    // NOTE: mVideoDecoder's mSurface is always non-null
                    || (mVideoDecoder != NULL && mVideoDecoder->setVideoSurface(surface) == OK)) {
                performSetSurface(surface);
                break;
            }

            mDeferredActions.push_back(
                    new FlushDecoderAction(
                            (obj != NULL ? FLUSH_CMD_FLUSH : FLUSH_CMD_NONE) /* audio */,
                                           FLUSH_CMD_SHUTDOWN /* video */));

            mDeferredActions.push_back(new SetSurfaceAction(surface));

            if (obj != NULL) {
                if (mStarted) {
                    // Issue a seek to refresh the video screen only if started otherwise
                    // the extractor may not yet be started and will assert.
                    // If the video decoder is not set (perhaps audio only in this case)
                    // do not perform a seek as it is not needed.
                    int64_t currentPositionUs = 0;
                    if (getCurrentPosition(&currentPositionUs) == OK) {
                        mDeferredActions.push_back(
                                new SeekAction(currentPositionUs,
                                        MediaPlayerSeekMode::SEEK_PREVIOUS_SYNC /* mode */));
                    }
                }

                // If there is a new surface texture, instantiate decoders
                // again if possible.
                mDeferredActions.push_back(
                        new SimpleAction(&NuPlayer::performScanSources));

                // After a flush without shutdown, decoder is paused.
                // Don't resume it until source seek is done, otherwise it could
                // start pulling stale data too soon.
                mDeferredActions.push_back(
                        new ResumeDecoderAction(false /* needNotify */));
            }

            processDeferredActions();
            break;
        }
void NuPlayer::performSetSurface(const sp<Surface> &surface) {
    ALOGV("performSetSurface");

    mSurface = surface;

// 设置视频缩放模式:即视频帧buffer匹配native window大小的缩放即以window大小来缩放
    // XXX - ignore error from setVideoScalingMode for now
    setVideoScalingMode(mVideoScalingMode);

    if (mDriver != NULL) {
        sp<NuPlayerDriver> driver = mDriver.promote();
        if (driver != NULL) {
// 如上分析,异步通知到设置surface 完成
            driver->notifySetSurfaceComplete();
        }
    }
}

----------------
// 设置视频缩放模式:即视频帧buffer匹配native window大小的缩放即以window大小来缩放

status_t NuPlayer::setVideoScalingMode(int32_t mode) {
    mVideoScalingMode = mode;
    if (mSurface != NULL) {
        status_t ret = native_window_set_scaling_mode(mSurface.get(), mVideoScalingMode);
        if (ret != OK) {
            ALOGE("Failed to set scaling mode (%d): %s",
                -ret, strerror(-ret));
            return ret;
        }
    }
    return OK;
}

2. 异步执行prepareAsync

MediaPlayer提供了两个prepare处理流程:同步和异步执行。下面分析的是异步的

由前几个章节的 setDataSource 和 setSurface 等可以知道java层到 Native层的一个流程为:

MediaPlayer.java  --->  android_media_MediaPlayer.cpp   ---> meidaplayer.cpp   ---> MediaPlayerService::Client  ---> NuPlayerDriver.cpp  ---> NuPlayer.cpp ---> GenericSource.cpp

  • frameworks/base/media/java/android/media/MediaPlayer.java
// 直接调用native 方法
    public native void prepareAsync() throws IllegalStateException;
  • frameworks/base/media/jni/android_media_MediaPlayer.cpp
static const JNINativeMethod gMethods[] = {
    {
        "nativeSetDataSource",
        "(Landroid/os/IBinder;Ljava/lang/String;[Ljava/lang/String;"
        "[Ljava/lang/String;)V",
        (void *)android_media_MediaPlayer_setDataSourceAndHeaders
    },


    {"prepareAsync",        "()V",                              (void *)android_media_MediaPlayer_prepareAsync},

-----------------
static void
android_media_MediaPlayer_prepareAsync(JNIEnv *env, jobject thiz)
{
    sp<MediaPlayer> mp = getMediaPlayer(env, thiz);
    if (mp == NULL ) {
        jniThrowException(env, "java/lang/IllegalStateException", NULL);
        return;
    }

    // Handle the case where the display surface was set before the mp was
    // initialized. We try again to make it stick.

// 重新去设置一下 setVideoSurfaceTexture
    sp<IGraphicBufferProducer> st = getVideoSurfaceTexture(env, thiz);
    mp->setVideoSurfaceTexture(st);

// mediaplayer 调用 prepareAsync方法
    process_media_player_call( env, thiz, mp->prepareAsync(), "java/io/IOException", "Prepare Async failed." );
}
  • frameworks/av/media/libmedia/mediaplayer.cpp
status_t MediaPlayer::prepareAsync()
{
    ALOGV("prepareAsync");
    Mutex::Autolock _l(mLock);
    return prepareAsync_l();
}

-------------

status_t MediaPlayer::prepareAsync_l()
{

// 在setDatasource 的时候attachNewPlayer ,有设置 mCurrentState 为 MEDIA_PLAYER_INITIALIZED 初始化
    if ( (mPlayer != 0) && ( mCurrentState & (MEDIA_PLAYER_INITIALIZED | MEDIA_PLAYER_STOPPED) ) ) {
        if (mAudioAttributesParcel != NULL) {
            mPlayer->setParameter(KEY_PARAMETER_AUDIO_ATTRIBUTES, *mAudioAttributesParcel);
        } else {

// 设置audio 音频流的类型,是music还是其他,初始化时music
            mPlayer->setAudioStreamType(mStreamType);
        }
        mCurrentState = MEDIA_PLAYER_PREPARING;

        return mPlayer->prepareAsync();
    }
    ALOGE("prepareAsync called in state %d, mPlayer(%p)", mCurrentState, mPlayer.get());
    return INVALID_OPERATION;
}
  • frameworks/av/media/libmediaplayerservice/MediaPlayerService.cpp
status_t MediaPlayerService::Client::prepareAsync()
{
    ALOGV("[%d] prepareAsync", mConnId);
    sp<MediaPlayerBase> p = getPlayer();
    if (p == 0) return UNKNOWN_ERROR;
    status_t ret = p->prepareAsync();
#if CALLBACK_ANTAGONIZER
    ALOGD("start Antagonizer");
    if (ret == NO_ERROR) mAntagonizer->start();
#endif
    return ret;
}
  • frameworks/av/media/libmediaplayerservice/nuplayer/NuPlayerDriver.cpp

status_t NuPlayerDriver::prepareAsync() {
    ALOGV("prepareAsync(%p)", this);
    Mutex::Autolock autoLock(mLock);

// 在 notifySetDataSourceCompleted设置datasource完成后,有设置mState 为 STATE_UNPREPARED
    switch (mState) {
        case STATE_UNPREPARED:
            mState = STATE_PREPARING;
            mIsAsyncPrepare = true;
// 调用 NuPlayer 的方法 prepareAsync
            mPlayer->prepareAsync();
            return OK;
        case STATE_STOPPED:
            // this is really just paused. handle as seek to start
            mAtEOS = false;
            mState = STATE_STOPPED_AND_PREPARING;
            mIsAsyncPrepare = true;

// 如果是stop 状态的话,则直接从头开始播放
            mPlayer->seekToAsync(0, MediaPlayerSeekMode::SEEK_PREVIOUS_SYNC /* mode */,
                    true /* needNotify */);
            return OK;
        default:
            return INVALID_OPERATION;
    };
}

调用 NuPlayer 的方法 prepareAsync

  • frameworks/av/media/libmediaplayerservice/nuplayer/NuPlayer.cpp
void NuPlayer::prepareAsync() {
    ALOGV("prepareAsync");

// 消息处理机制
    (new AMessage(kWhatPrepare, this))->post();
}

---------------
// mSource 是在 setDataSourceAsync有创建这个对象:new GenericSource
        case kWhatPrepare:
        {
            ALOGV("onMessageReceived kWhatPrepare");

// 所以这里是调用 GenericSource
            mSource->prepareAsync();
            break;
        }
  • frameworks/av/media/libmediaplayerservice/nuplayer/GenericSource.cpp
runOnCallingThread = false

void NuPlayer::GenericSource::prepareAsync() {
    Mutex::Autolock _l(mLock);
    ALOGV("prepareAsync: (looper: %d)", (mLooper != NULL));

    if (mLooper == NULL) {
        mLooper = new ALooper;
        mLooper->setName("generic");

// start 采用默认的参数值:runOnCallingThread = false
// 会创建一个新的线程
        mLooper->start();

        mLooper->registerHandler(this);
    }

    sp<AMessage> msg = new AMessage(kWhatPrepareAsync, this);
    msg->post();
}

发送消息 :kWhatPrepareAsync

void NuPlayer::GenericSource::onMessageReceived(const sp<AMessage> &msg) {
    Mutex::Autolock _l(mLock);
    switch (msg->what()) {
      case kWhatPrepareAsync:
      {
          onPrepareAsync();
          break;
      }

---------------
void NuPlayer::GenericSource::onPrepareAsync() {
    mDisconnectLock.lock();
    ALOGV("onPrepareAsync: mDataSource: %d", (mDataSource != NULL));

    // delayed data source creation
    if (mDataSource == NULL) {
        // set to false first, if the extractor
        // comes back as secure, set it to true then.
        mIsSecure = false;

// 对网络的视频进行处理
        if (!mUri.empty()) {
            const char* uri = mUri.c_str();
            String8 contentType;

            if (!strncasecmp("http://", uri, 7) || !strncasecmp("https://", uri, 8)) {
                sp<DataSource> httpSource;
                mDisconnectLock.unlock();
                httpSource = PlayerServiceDataSourceFactory::getInstance()
                        ->CreateMediaHTTP(mHTTPService);
                if (httpSource == NULL) {
                    ALOGE("Failed to create http source!");
                    notifyPreparedAndCleanup(UNKNOWN_ERROR);
                    return;
                }
                mDisconnectLock.lock();

                if (!mDisconnected) {
                    mHttpSource = httpSource;
                }
            }

            mLock.unlock();
            mDisconnectLock.unlock();
            // This might take long time if connection has some issue.
            sp<DataSource> dataSource = PlayerServiceDataSourceFactory::getInstance()
                    ->CreateFromURI(mHTTPService, uri, &mUriHeaders, &contentType,
                            static_cast<HTTPBase *>(mHttpSource.get()));
            mDisconnectLock.lock();
            mLock.lock();
            if (!mDisconnected) {
                mDataSource = dataSource;
            }

// 处理本地的视频
        } else {
            if (property_get_bool("media.stagefright.extractremote", true) &&
                    !PlayerServiceFileSource::requiresDrm(
                            mFd.get(), mOffset, mLength, nullptr /* mime */)) {

// 获取解复用模块,分离音频和视频文件
                sp<IBinder> binder =
                        defaultServiceManager()->getService(String16("media.extractor"));
                if (binder != nullptr) {
                    ALOGD("FileSource remote");

// 即获取到了BpMediaExtractorService实现,此处转换成它的父类类型赋值给mediaExService对象
                    sp<IMediaExtractorService> mediaExService(
                            interface_cast<IMediaExtractorService>(binder));
                    sp<IDataSource> source;


// 1. mediaExService调用方法获取IDataSource对象
                    mediaExService->makeIDataSource(base::unique_fd(dup(mFd.get())), mOffset, mLength, &source);
                    ALOGV("IDataSource(FileSource): %p %d %lld %lld",
                            source.get(), mFd.get(), (long long)mOffset, (long long)mLength);

// 2. source对象获取成功时,再次封装成客户端需要的代理类对象DataSource类型
                    if (source.get() != nullptr) {
                        mDataSource = CreateDataSourceFromIDataSource(source);
                    } else {
                        ALOGW("extractor service cannot make data source");
                    }
                } else {
                    ALOGW("extractor service not running");
                }
            }
            if (mDataSource == nullptr) {
                ALOGD("FileSource local");
                mDataSource = new PlayerServiceFileSource(dup(mFd.get()), mOffset, mLength);
            }
        }

        if (mDataSource == NULL) {
            ALOGE("Failed to create data source!");
            mDisconnectLock.unlock();
// 3. 回调通知上层错误信息
            notifyPreparedAndCleanup(UNKNOWN_ERROR);
            return;
        }
    }

    if (mDataSource->flags() & DataSource::kIsCachingDataSource) {
        mCachedSource = static_cast<NuCachedSource2 *>(mDataSource.get());
    }

    mDisconnectLock.unlock();

// 判断是否是流媒体数据源,true的话则需要缓冲足够多的数据之后才能上报prepare完成状态给上层APP
    // For cached streaming cases, we need to wait for enough
    // buffering before reporting prepared.
    mIsStreaming = (mCachedSource != NULL);

// 4. 使用DataSource数据源数据创建和初始化媒体提取器,初始化具体的媒体解复用模块 initFromDataSource
    // init extractor from data source
    status_t err = initFromDataSource();

    if (err != OK) {
        ALOGE("Failed to init from data source!");
        notifyPreparedAndCleanup(err);
        return;
    }

    if (mVideoTrack.mSource != NULL) {

// 获取video track信息的元数据
        sp<MetaData> meta = getFormatMeta_l(false /* audio */);
        sp<AMessage> msg = new AMessage;

// 转换媒体元数据对象为AMessage消息对象
        err = convertMetaDataToMessage(meta, &msg);
        if(err != OK) {
            notifyPreparedAndCleanup(err);
            return;
        }
        notifyVideoSizeChanged(msg);
    }

// 通知视频相关标志位变化事件【即数据源解复用模块是否可以暂停和seek操作】
    notifyFlagsChanged(
            // FLAG_SECURE will be known if/when prepareDrm is called by the app
            // FLAG_PROTECTED will be known if/when prepareDrm is called by the app
            FLAG_CAN_PAUSE |
            FLAG_CAN_SEEK_BACKWARD |
            FLAG_CAN_SEEK_FORWARD |
            FLAG_CAN_SEEK);

// 5. 结束(完成)异步prepare处理流程
    finishPrepareAsync();

    ALOGV("onPrepareAsync: Done");
}

1)1. mediaExService调用方法获取IDataSource对象

mediaExService->makeIDataSource(base::unique_fd(dup(mFd.get())), mOffset, mLength, &source)

  • frameworks/av/services/mediaextractor/MediaExtractorService.cpp
::android::binder::Status MediaExtractorService::makeIDataSource(
        base::unique_fd fd,
        int64_t offset,
        int64_t length,
        ::android::sp<::android::IDataSource>* _aidl_return) {

// 创建文件数据源读取对象,并执行initCheck()
    sp<DataSource> source = DataSourceFactory::getInstance()->CreateFromFd(fd.release(), offset, length);

// 通过封装DataSource对象创建IDataSource
    *_aidl_return = CreateIDataSourceFromDataSource(source);
    return binder::Status::ok();
}

frameworks/av/media/libdatasource/DataSourceFactory.cpp

sp<DataSource> DataSourceFactory::CreateFromFd(int fd, int64_t offset, int64_t length) {

// FileSource 继承了父类 DataSource
    sp<FileSource> source = new FileSource(fd, offset, length);
    return source->initCheck() != OK ? nullptr : source;
}

-----------
frameworks/av/media/libdatasource/FileSource.cpp

// 返回ok
status_t FileSource::initCheck() const {
    return mFd >= 0 ? OK : NO_INIT;
}

frameworks/av/media/libdatasource/FileSource.cpp

 FileSource 的构造函数

FileSource::FileSource(int fd, int64_t offset, int64_t length)
    : mFd(fd),
      mOffset(offset),
      mLength(length),
      mName("<null>") {
    ALOGV("fd=%d (%s), offset=%lld, length=%lld",
            fd, nameForFd(fd).c_str(), (long long) offset, (long long) length);

    if (mOffset < 0) {
        mOffset = 0;
    }
    if (mLength < 0) {
        mLength = 0;
    }
    if (mLength > INT64_MAX - mOffset) {
        mLength = INT64_MAX - mOffset;
    }
    struct stat s;
    if (fstat(fd, &s) == 0) {
        if (mOffset > s.st_size) {
            mOffset = s.st_size;
            mLength = 0;
        }
        if (mOffset + mLength > s.st_size) {
            mLength = s.st_size - mOffset;
        }
    }
    if (mOffset != offset || mLength != length) {
        ALOGW("offset/length adjusted from %lld/%lld to %lld/%lld",
                (long long) offset, (long long) length,
                (long long) mOffset, (long long) mLength);
    }

// nameForFd 调用父类方法
    mName = String8::format(
            "FileSource(fd(%s), %lld, %lld)",
            nameForFd(fd).c_str(),
            (long long) mOffset,
            (long long) mLength);

}

------------
// 比如调用 readAt ,最终会调用到此处
ssize_t FileSource::readAt(off64_t offset, void *data, size_t size) {
    if (mFd < 0) {
        return NO_INIT;
    }

    Mutex::Autolock autoLock(mLock);
    if (mLength >= 0) {
        if (offset < 0) {
            return UNKNOWN_ERROR;
        }
        if (offset >= mLength) {
            return 0;  // read beyond EOF.
        }
        uint64_t numAvailable = mLength - offset;
        if ((uint64_t)size > numAvailable) {
            size = numAvailable;
        }
    }
    return readAt_l(offset, data, size);

fd:内核(kernel)利用文件描述符(file descriptor)来访问文件。文件描述符是非负整数。打开现存文件或新建文件时,内核会返回一个文件描述符。读写文件也需要使用文件描述符来指定待读写的文件。
文件描述符在形式上是一个非负整数。实际上,它是一个索引值,指向内核为每一个进程所维护的该进程打开文件的记录表。当程序打开一个现有文件或者创建一个新文件时,内核向进程返回一个文件描述符。

继续分析 CreateIDataSourceFromDataSource

  • frameworks/av/media/libstagefright/InterfaceUtils.cpp

sp<DataSource> &source 是 FileSource 

sp<IDataSource> CreateIDataSourceFromDataSource(const sp<DataSource> &source) {
    if (source == nullptr) {
        return nullptr;
    }
    return RemoteDataSource::wrap(source);
}

frameworks/av/media/libstagefright/include/media/stagefright/RemoteDataSource.h

// Originally in MediaExtractor.cpp
class RemoteDataSource : public BnDataSource {
public:
    static sp<IDataSource> wrap(const sp<DataSource> &source) {
        if (source.get() == nullptr) {
            return nullptr;
        }
        if (source->getIDataSource().get() != nullptr) {
            return source->getIDataSource();
        }
        return new RemoteDataSource(source);
    }

----------------
构造函数
    explicit RemoteDataSource(const sp<DataSource> &source) {
        Mutex::Autolock lock(mLock);
        mSource = source;
        sp<MemoryDealer> memoryDealer = new MemoryDealer(kBufferSize, "RemoteDataSource");

// kBufferSize = 64 * 1024,分配 64k 内存
        mMemory = memoryDealer->allocate(kBufferSize);
        if (mMemory.get() == nullptr) {
            ALOGE("Failed to allocate memory!");
        }
        mName = String8::format("RemoteDataSource(%s)", mSource->toString().string());
    }

由上述分析: source  sp<IDataSource> 为 RemoteDataSource

2). source对象获取成功时,再次封装成客户端需要的代理类对象DataSource类型

mDataSource = CreateDataSourceFromIDataSource(source)

将IDataSource(BpBinder)类型数据对象封装成客户端需要的代理类对象DataSource类型

  • frameworks/av/media/libstagefright/InterfaceUtils.cpp
sp<DataSource> CreateDataSourceFromIDataSource(const sp<IDataSource> &source) {
    if (source == nullptr) {
        return nullptr;
    }
    return new TinyCacheSource(new CallbackDataSource(source));
}

frameworks/av/media/libstagefright/CallbackDataSource.cpp

CallbackDataSource::CallbackDataSource(
    const sp<IDataSource>& binderDataSource)
    : mIDataSource(binderDataSource),
      mIsClosed(false) {
    // Set up the buffer to read into.
    mMemory = mIDataSource->getIMemory();
    mName = String8::format("CallbackDataSource(%d->%d, %s)",
            getpid(),
            IPCThreadState::self()->getCallingPid(),
            mIDataSource->toString().string());

}

frameworks/av/media/libstagefright/CallbackDataSource.cpp

TinyCacheSource::TinyCacheSource(const sp<DataSource>& source)
    : mSource(source), mCachedOffset(0), mCachedSize(0) {
    mName = String8::format("TinyCacheSource(%s)", mSource->toString().string());
}

CallbackDataSource 和 TinyCacheSource 都是继承了 DataSource

frameworks/av/media/libmedia/include/CallbackDataSource.h

// A stagefright DataSource that wraps a binder IDataSource. It's a "Callback"
// DataSource because it calls back to the IDataSource for data.
class CallbackDataSource : public DataSource {}

frameworks/av/media/libmedia/include/CallbackDataSource.h

class TinyCacheSource : public DataSource {}

一层层封装,最终到 FileSource 

new TinyCacheSource (CallbackDataSource (RemoteDataSource (FileSource )))

// FileSource 也是继承于 DataSource 
class FileSource : public DataSource {
   
   

3). 回调通知上层错误信息

notifyPreparedAndCleanup(UNKNOWN_ERROR)

void NuPlayer::GenericSource::notifyPreparedAndCleanup(status_t err) {
    if (err != OK) {
        {
            Mutex::Autolock _l_d(mDisconnectLock);
            mDataSource.clear();
            mHttpSource.clear();
        }

        mCachedSource.clear();

        mBitrate = -1;
        mPrevBufferPercentage = -1;
        ++mPollBufferingGeneration;
    }

// 通知上层错误消息:UNKNOWN_ERROR
    notifyPrepared(err);
}

--------------------
frameworks/av/media/libmediaplayerservice/nuplayer/GenericSource.h

// 在头文件中有 包含 NuPlayer
#include "NuPlayer.h"
  • frameworks/av/media/libmediaplayerservice/nuplayer/NuPlayer.cpp

在 NuPlayer 的方法中实现的

void NuPlayer::Source::notifyPrepared(status_t err) {
    ALOGV("Source::notifyPrepared %d", err);

// 获取 notify 是message
    sp<AMessage> notify = dupNotify();
    notify->setInt32("what", kWhatPrepared);
    notify->setInt32("err", err);

//  发送消息
    notify->post();
}

dupNotify() 方法实现

  • frameworks/av/media/libmediaplayerservice/nuplayer/NuPlayerSource.h
// 其实是一个handler
struct NuPlayer::Source : public AHandler {

    sp<AMessage> dupNotify() const { return mNotify->dup(); }

private:
// mNotify 是一个Message
    sp<AMessage> mNotify;

// 在构造函数中有传入这个参数
    explicit Source(const sp<AMessage> &notify)
        : mNotify(notify) {
    }


=======
// 其中:mNotify->dup() 为如下

frameworks/av/media/libstagefright/foundation/AMessage.cpp

sp<AMessage> AMessage::dup() const {
    sp<AMessage> msg = new AMessage(mWhat, mHandler.promote());
    msg->mItems = mItems;

在设置数据源章节的  setDataSourceAsync  有设置notify

  • frameworks/av/media/libmediaplayerservice/nuplayer/NuPlayer.cpp
void NuPlayer::setDataSourceAsync(int fd, int64_t offset, int64_t length) {
 
// 创建【kWhatSetDataSource】设置数据源事件消息,NuPlayer自身为AHandler子类接收处理
    sp<AMessage> msg = new AMessage(kWhatSetDataSource, this);
 
    sp<AMessage> notify = new AMessage(kWhatSourceNotify, this);
 
// 创建一个通用(统一)数据源处理对象 GenericSource
    sp<GenericSource> source =
            new GenericSource(notify, mUIDValid, mUID, mMediaClock);

其中:GenericSource 有继承于 NuPlayer::Source

  • frameworks/av/media/libmediaplayerservice/nuplayer/GenericSource.h
struct NuPlayer::GenericSource : public NuPlayer::Source,
                                 public MediaBufferObserver // Modular DRM
{
    GenericSource(const sp<AMessage> &notify, bool uidValid, uid_t uid,
                  const sp<MediaClock> &mediaClock);

所以notify 是:sp<AMessage> notify = new AMessage(kWhatSourceNotify, this)

void NuPlayer::onMessageReceived(const sp<AMessage> &msg) {
    switch (msg->what()) {

        case kWhatSourceNotify:
        {
            onSourceNotify(msg);
            break;
        }

----------
// 最终还是在 onSourceNotify 方法中处理

void NuPlayer::onSourceNotify(const sp<AMessage> &msg) {
    int32_t what;
    CHECK(msg->findInt32("what", &what));

    switch (what) {

        case Source::kWhatPrepared:
        {
            ALOGV("NuPlayer::onSourceNotify Source::kWhatPrepared source: %p", mSource.get());
            if (mSource == NULL) {
                // This is a stale notification from a source that was
                // asynchronously preparing when the client called reset().
                // We handled the reset, the source is gone.
                break;
            }

            int32_t err;
            CHECK(msg->findInt32("err", &err));

            if (err != OK) {
                // shut down potential secure codecs in case client never calls reset
                mDeferredActions.push_back(
                        new FlushDecoderAction(FLUSH_CMD_SHUTDOWN /* audio */,
                                               FLUSH_CMD_SHUTDOWN /* video */));
                processDeferredActions();
            } else {
                mPrepared = true;
            }

            sp<NuPlayerDriver> driver = mDriver.promote();
            if (driver != NULL) {
                // notify duration first, so that it's definitely set when
                // the app received the "prepare complete" callback.
                int64_t durationUs;
                if (mSource->getDuration(&durationUs) == OK) {
                    driver->notifyDuration(durationUs);
                }

//调用 NuPlayerDriver 的 notifyPrepareCompleted方法
                driver->notifyPrepareCompleted(err);
            }

            break;
        }

调用 NuPlayerDriver 的 notifyPrepareCompleted方法

  • frameworks/av/media/libmediaplayerservice/nuplayer/NuPlayerDriver.cpp
void NuPlayerDriver::notifyPrepareCompleted(status_t err) {
    ALOGV("notifyPrepareCompleted %d", err);

    Mutex::Autolock autoLock(mLock);

    if (mState != STATE_PREPARING) {
        // We were preparing asynchronously when the client called
        // reset(), we sent a premature "prepared" notification and
        // then initiated the reset. This notification is stale.
        CHECK(mState == STATE_RESET_IN_PROGRESS || mState == STATE_IDLE);
        return;
    }

    CHECK_EQ(mState, STATE_PREPARING);

    mAsyncResult = err;

    if (err == OK) {
        // update state before notifying client, so that if client calls back into NuPlayerDriver
        // in response, NuPlayerDriver has the right state
        mState = STATE_PREPARED;
        if (mIsAsyncPrepare) {
            notifyListener_l(MEDIA_PREPARED);
        }
    } else {
        mState = STATE_UNPREPARED;

// 去通知到观察者
        if (mIsAsyncPrepare) {
            notifyListener_l(MEDIA_ERROR, MEDIA_ERROR_UNKNOWN, err);
        }
    }

    sp<MetaData> meta = mPlayer->getFileMeta();
    int32_t loop;
    if (meta != NULL
            && meta->findInt32(kKeyAutoLoop, &loop) && loop != 0) {
        mAutoLoop = true;
    }

    mCondition.broadcast();
}

去通知到观察者 notifyListener_l

void NuPlayerDriver::notifyListener_l(
        int msg, int ext1, int ext2, const Parcel *in) {
    ALOGV("notifyListener_l(%p), (%d, %d, %d, %d), loop setting(%d, %d)",
            this, msg, ext1, ext2, (in == NULL ? -1 : (int)in->dataSize()), mAutoLoop, mLooping);
    switch (msg) {

        case MEDIA_ERROR:
        {
            // when we have an error, add it to the analytics for this playback.
            // ext1 is our primary 'error type' value. Only add ext2 when non-zero.
            // [test against msg is due to fall through from previous switch value]
            if (msg == MEDIA_ERROR) {
                Mutex::Autolock autoLock(mMetricsLock);
                if (mMetricsItem != NULL) {
                    mMetricsItem->setInt32(kPlayerError, ext1);
                    if (ext2 != 0) {
                        mMetricsItem->setInt32(kPlayerErrorCode, ext2);
                    }
                    mMetricsItem->setCString(kPlayerErrorState, stateString(mState).c_str());
                }
            }
            mAtEOS = true;
            break;
        }

        default:
            break;
    }

    mLock.unlock();

// 去发送消息
    sendEvent(msg, ext1, ext2, in);
    mLock.lock();
}
  • frameworks/av/media/libmediaplayerservice/nuplayer/NuPlayerDriver.h
struct NuPlayer;

// 在头文件中有继承与 MediaPlayerInterface 
struct NuPlayerDriver : public MediaPlayerInterface {
    explicit NuPlayerDriver(pid_t pid);
  • frameworks/av/media/libmediaplayerservice/include/MediaPlayerInterface.h

看以下 MediaPlayerInterface 的头文件

// setNotifyCallback 方法中有设置观察者
    void        setNotifyCallback(
            const sp<Listener> &listener) {
        Mutex::Autolock autoLock(mNotifyLock);
        mListener = listener;
    }


// 这里发送消息,调用 listener->notify 方法
    void        sendEvent(int msg, int ext1=0, int ext2=0,
                          const Parcel *obj=NULL) {
        sp<Listener> listener;
        {
            Mutex::Autolock autoLock(mNotifyLock);
            listener = mListener;
        }

        if (listener != NULL) {
            listener->notify(msg, ext1, ext2, obj);
        }
    }

所以看下哪里去设置 setNotifyCallback 观察者,去回调

  • frameworks/av/media/libmediaplayerservice/MediaPlayerFactory.cpp
// 在创建player 的时候,有去设置观察者
// 传入了参数:listener
sp<MediaPlayerBase> MediaPlayerFactory::createPlayer(
        player_type playerType,
        const sp<MediaPlayerBase::Listener> &listener,
        pid_t pid) {
    sp<MediaPlayerBase> p;
    IFactory* factory;
    status_t init_result;
    Mutex::Autolock lock_(&sLock);

    if (sFactoryMap.indexOfKey(playerType) < 0) {
        ALOGE("Failed to create player object of type %d, no registered"
              " factory", playerType);
        return p;
    }

    factory = sFactoryMap.valueFor(playerType);
    CHECK(NULL != factory);
    p = factory->createPlayer(pid);

    if (p == NULL) {
        ALOGE("Failed to create player object of type %d, create failed",
               playerType);
        return p;
    }

    init_result = p->initCheck();
    if (init_result == NO_ERROR) {

// 这里去设置了观察者
        p->setNotifyCallback(listener);
  • frameworks/av/media/libmediaplayerservice/MediaPlayerService.cpp
sp<MediaPlayerBase> MediaPlayerService::Client::createPlayer(player_type playerType)
{
    // determine if we have the right player type
    sp<MediaPlayerBase> p = getPlayer();
    if ((p != NULL) && (p->playerType() != playerType)) {
        ALOGV("delete player");
        p.clear();
    }
    if (p == NULL) {

// 这里有传入 mListener 观察者
        p = MediaPlayerFactory::createPlayer(playerType, mListener,
            VALUE_OR_FATAL(aidl2legacy_int32_t_pid_t(mAttributionSource.pid)));
    }

---------------
MediaPlayerService::Client::Client(
        const sp<MediaPlayerService>& service, const AttributionSourceState& attributionSource,
        int32_t connId, const sp<IMediaPlayerClient>& client,
        audio_session_t audioSessionId)
        : mAttributionSource(attributionSource)
{
    ALOGV("Client(%d) constructor", connId);
    mConnId = connId;
    mService = service;
    mClient = client;
    mLoop = false;
    mStatus = NO_INIT;
    mAudioSessionId = audioSessionId;
    mRetransmitEndpointValid = false;
    mAudioAttributes = NULL;

// 在 Client 的构造函数中有创建这个对象
    mListener = new Listener(this);
  • frameworks/av/media/libmediaplayerservice/MediaPlayerService.h
        class Listener : public MediaPlayerBase::Listener {
        public:
            Listener(const wp<Client> &client) : mClient(client) {}
            virtual ~Listener() {}

// 有 notify方法
            virtual void notify(int msg, int ext1, int ext2, const Parcel *obj) {
                sp<Client> client = mClient.promote();
                if (client != NULL) {
// 又调用Client 的方法
                    client->notify(msg, ext1, ext2, obj);
                }
            }
        private:
            wp<Client> mClient;
        };
  • frameworks/av/media/libmediaplayerservice/MediaPlayerService.cpp
void MediaPlayerService::Client::notify(
        int msg, int ext1, int ext2, const Parcel *obj)
{
    sp<IMediaPlayerClient> c;
    sp<Client> nextClient;
    status_t errStartNext = NO_ERROR;
    {
        Mutex::Autolock l(mLock);

// 这里有将c 赋值为 mClient
        c = mClient;
        if (msg == MEDIA_PLAYBACK_COMPLETE && mNextClient != NULL) {
            nextClient = mNextClient;

            if (mAudioOutput != NULL)
                mAudioOutput->switchToNextOutput();

            errStartNext = nextClient->start();
        }
    }

    if (nextClient != NULL) {
        sp<IMediaPlayerClient> nc;
        {
            Mutex::Autolock l(nextClient->mLock);
            nc = nextClient->mClient;
        }
        if (nc != NULL) {
            if (errStartNext == NO_ERROR) {
                nc->notify(MEDIA_INFO, MEDIA_INFO_STARTED_AS_NEXT, 0, obj);
            } else {
                nc->notify(MEDIA_ERROR, MEDIA_ERROR_UNKNOWN , 0, obj);
                ALOGE("gapless:start playback for next track failed, err(%d)", errStartNext);
            }
        }
    }

    if (MEDIA_INFO == msg &&
        MEDIA_INFO_METADATA_UPDATE == ext1) {
        const media::Metadata::Type metadata_type = ext2;

        if(shouldDropMetadata(metadata_type)) {
            return;
        }

        // Update the list of metadata that have changed. getMetadata
        // also access mMetadataUpdated and clears it.
        addNewMetadataUpdate(metadata_type);
    }

// 调用 mClient 的 notify方法
    if (c != NULL) {
        ALOGV("[%d] notify (%d, %d, %d)", mConnId, msg, ext1, ext2);
        c->notify(msg, ext1, ext2, obj);
    }
}

--------------
MediaPlayerService::Client::Client(
        const sp<MediaPlayerService>& service, const AttributionSourceState& attributionSource,
        int32_t connId, const sp<IMediaPlayerClient>& client,
        audio_session_t audioSessionId)
        : mAttributionSource(attributionSource)
{
    ALOGV("Client(%d) constructor", connId);
    mConnId = connId;
    mService = service;
// Client构造函数 
    mClient = client;

----------------
sp<IMediaPlayer> MediaPlayerService::create(const sp<IMediaPlayerClient>& client,
        audio_session_t audioSessionId, const AttributionSourceState& attributionSource)
{
    int32_t connId = android_atomic_inc(&mNextConnId);
    // TODO b/182392769: use attribution source util
    AttributionSourceState verifiedAttributionSource = attributionSource;
    verifiedAttributionSource.pid = VALUE_OR_FATAL(
        legacy2aidl_pid_t_int32_t(IPCThreadState::self()->getCallingPid()));
    verifiedAttributionSource.uid = VALUE_OR_FATAL(
        legacy2aidl_uid_t_int32_t(IPCThreadState::self()->getCallingUid()));

// 在这里有初始化
    sp<Client> c = new Client(
            this, verifiedAttributionSource, connId, client, audioSessionId);

具体的流程在 setDataSource 可以看出

class BpMediaPlayerClient: public BpInterface<IMediaPlayerClient>
{
public:
    explicit BpMediaPlayerClient(const sp<IBinder>& impl)
        : BpInterface<IMediaPlayerClient>(impl)
    {
    }

    virtual void notify(int msg, int ext1, int ext2, const Parcel *obj)
    {
        Parcel data, reply;
        data.writeInterfaceToken(IMediaPlayerClient::getInterfaceDescriptor());
        data.writeInt32(msg);
        data.writeInt32(ext1);
        data.writeInt32(ext2);
        if (obj && obj->dataSize() > 0) {
            data.appendFrom(const_cast<Parcel *>(obj), 0, obj->dataSize());
        }
        remote()->transact(NOTIFY, data, &reply, IBinder::FLAG_ONEWAY);
    }
};

----------------
// MediaPlayer  有继承于BnMediaPlayerClient,调用此处
class MediaPlayer : public BnMediaPlayerClient,
                    public virtual IMediaDeathNotifier
{

            void            notify(int msg, int ext1, int ext2, const Parcel *obj = NULL);

以前分析过,然后通知到java 层

4). 使用DataSource数据源初始化具体的媒体解复用模块initFromDataSource

  • frameworks/av/media/libmediaplayerservice/nuplayer/GenericSource.cpp
status_t NuPlayer::GenericSource::initFromDataSource() {
    sp<IMediaExtractor> extractor;
    sp<DataSource> dataSource;
    {
        Mutex::Autolock _l_d(mDisconnectLock);
        dataSource = mDataSource;
    }
    CHECK(dataSource != NULL);

    mLock.unlock();

// 4.1 创建媒体提取器
    // This might take long time if data source is not reliable.
    extractor = MediaExtractorFactory::Create(dataSource, NULL);

    if (extractor == NULL) {
        ALOGE("initFromDataSource, cannot create extractor!");
        mLock.lock();
        return UNKNOWN_ERROR;
    }

// 调用具体媒体提取器,获取文件元数据项信息
    sp<MetaData> fileMeta = extractor->getMetaData();

// 获取媒体中的Track个数,可能有多个音频流和视频流
    size_t numtracks = extractor->countTracks();
    if (numtracks == 0) {
        ALOGE("initFromDataSource, source has no track!");
        mLock.lock();
        return UNKNOWN_ERROR;
    }

    mLock.lock();
    mFileMeta = fileMeta;
    if (mFileMeta != NULL) {
        int64_t duration;

// 获取对应的媒体时长值
        if (mFileMeta->findInt64(kKeyDuration, &duration)) {
            mDurationUs = duration;
        }
    }

// 总码率 
    int32_t totalBitrate = 0;

    mMimes.clear();

    for (size_t i = 0; i < numtracks; ++i) {

// 获取第i个位置的track 信息
        sp<IMediaSource> track = extractor->getTrack(i);
        if (track == NULL) {
            continue;
        }

// 获取媒体中第i个位置的Track对象信息的媒体元数据信息对象
        sp<MetaData> meta = extractor->getTrackMetaData(i);
        if (meta == NULL) {
            ALOGE("no metadata for track %zu", i);
            return UNKNOWN_ERROR;
        }

        const char *mime;
        CHECK(meta->findCString(kKeyMIMEType, &mime));

        ALOGV("initFromDataSource track[%zu]: %s", i, mime);

        // Do the string compare immediately with "mime",
        // we can't assume "mime" would stay valid after another
        // extractor operation, some extractors might modify meta
        // during getTrack() and make it invalid.

// 如果是audio 文件类型
        if (!strncasecmp(mime, "audio/", 6)) {
            if (mAudioTrack.mSource == NULL) {
                mAudioTrack.mIndex = i;
// 记录对应的track
                mAudioTrack.mSource = track;

// 创建一个另外的数据包(数据源处理)对象,其主要是用于提供已解复用音视频数据给音视频解码器
// 和缓存读取到的数据源track音视频流数据并完成解复用后的已解复用数据。

// 4.2 AnotherPacketSource 实现,创建一个数据包(数据源处理)对象
                mAudioTrack.mPackets =
                    new AnotherPacketSource(mAudioTrack.mSource->getFormat());

                if (!strcasecmp(mime, MEDIA_MIMETYPE_AUDIO_VORBIS)) {
                    mAudioIsVorbis = true;
                } else {
                    mAudioIsVorbis = false;
                }

                mMimes.add(String8(mime));
            }
        } else if (!strncasecmp(mime, "video/", 6)) {
            if (mVideoTrack.mSource == NULL) {
                mVideoTrack.mIndex = i;
                mVideoTrack.mSource = track;
                mVideoTrack.mPackets =
                    new AnotherPacketSource(mVideoTrack.mSource->getFormat());

                // video always at the beginning
                mMimes.insertAt(String8(mime), 0);
            }
        }

        mSources.push(track);
        int64_t durationUs;
        if (meta->findInt64(kKeyDuration, &durationUs)) {
            if (durationUs > mDurationUs) {
                mDurationUs = durationUs;
            }
        }
// 获取当前track的码流值
        int32_t bitrate;
        if (totalBitrate >= 0 && meta->findInt32(kKeyBitRate, &bitrate)) {
            totalBitrate += bitrate;
        } else {
            totalBitrate = -1;
        }
    }

    ALOGV("initFromDataSource mSources.size(): %zu  mIsSecure: %d  mime[0]: %s", mSources.size(),
            mIsSecure, (mMimes.isEmpty() ? "NONE" : mMimes[0].string()));

    if (mSources.size() == 0) {
        ALOGE("b/23705695");
        return UNKNOWN_ERROR;
    }

    // Modular DRM: The return value doesn't affect source initialization.
    (void)checkDrmInfo();

    mBitrate = totalBitrate;

    return OK;
}

4.1 创建媒体提取器

extractor = MediaExtractorFactory::Create(dataSource, NULL)

  • frameworks/av/media/libstagefright/MediaExtractorFactory.cpp
sp<IMediaExtractor> MediaExtractorFactory::Create(
        const sp<DataSource> &source, const char *mime) {
    ALOGV("MediaExtractorFactory::Create %s", mime);

    if (!property_get_bool("media.stagefright.extractremote", true)) {
        // local extractor
        ALOGW("creating media extractor in calling process");
        return CreateFromService(source, mime);
    } else {
        // remote extractor
        ALOGV("get service manager");
        sp<IBinder> binder = defaultServiceManager()->getService(String16("media.extractor"));

        if (binder != 0) {

// 为BpMediaExtractorService
            sp<IMediaExtractorService> mediaExService(
                    interface_cast<IMediaExtractorService>(binder));
            sp<IMediaExtractor> ex;

// 调用服务器端对象:MediaExtractorService
            mediaExService->makeExtractor(
                    CreateIDataSourceFromDataSource(source),
                    mime ? std::optional<std::string>(mime) : std::nullopt,
                    &ex);
            return ex;
        } else {
            ALOGE("extractor service not running");
            return NULL;
        }
    }
    return NULL;
}
  • frameworks/av/services/mediaextractor/MediaExtractorService.cpp
::android::binder::Status MediaExtractorService::makeExtractor(
        const ::android::sp<::android::IDataSource>& remoteSource,
        const ::std::optional< ::std::string> &mime,
        ::android::sp<::android::IMediaExtractor>* _aidl_return) {
    ALOGV("@@@ MediaExtractorService::makeExtractor for %s", mime ? mime->c_str() : nullptr);

    sp<DataSource> localSource = CreateDataSourceFromIDataSource(remoteSource);

    MediaBuffer::useSharedMemory();

// 还是调用 MediaExtractorFactory的方法
    sp<IMediaExtractor> extractor = MediaExtractorFactory::CreateFromService(
            localSource,
            mime ? mime->c_str() : nullptr);

    ALOGV("extractor service created %p (%s)",
            extractor.get(),
            extractor == nullptr ? "" : extractor->name());

    if (extractor != nullptr) {

// 注册媒体提取器
        registerMediaExtractor(extractor, localSource, mime ? mime->c_str() : nullptr);
    }
    *_aidl_return = extractor;
    return binder::Status::ok();
}
  • frameworks/av/media/libstagefright/MediaExtractorFactory.cpp

创建媒体提取器IMediaExtractor对象

sp<IMediaExtractor> MediaExtractorFactory::CreateFromService(
        const sp<DataSource> &source, const char *mime) {

    ALOGV("MediaExtractorFactory::CreateFromService %s", mime);

    void *meta = nullptr;
    void *creator = NULL;
    FreeMetaFunc freeMeta = nullptr;

// 媒体提取器的得分值,分值最高的将会被指定为应使用的解复用模块,该分值范围:0 ~ 1
    float confidence;
    sp<ExtractorPlugin> plugin;
    uint32_t creatorVersion = 0;

// 返回一个媒体解复用提取器最高得分者的创建方法指针
    creator = sniff(source, &confidence, &meta, &freeMeta, plugin, &creatorVersion);
    if (!creator) {
        ALOGV("FAILED to autodetect media content.");
        return NULL;
    }

    MediaExtractor *ex = nullptr;
    if (creatorVersion == EXTRACTORDEF_VERSION_NDK_V1 ||
            creatorVersion == EXTRACTORDEF_VERSION_NDK_V2) {
        CMediaExtractor *ret = ((CreatorFunc)creator)(source->wrap(), meta);
        if (meta != nullptr && freeMeta != nullptr) {
            freeMeta(meta);
        }

// 若创建成功,则再次将CMediaExtractor对象封装进代理对象,然后返回
        ex = ret != nullptr ? new MediaExtractorCUnwrapper(ret) : nullptr;
    }

    ALOGV("Created an extractor '%s' with confidence %.2f",
         ex != nullptr ? ex->name() : "<null>", confidence);

    return CreateIMediaExtractorFromMediaExtractor(ex, source, plugin);
}

sniff( 方法

// static
void *MediaExtractorFactory::sniff(
        const sp<DataSource> &source, float *confidence, void **meta,
        FreeMetaFunc *freeMeta, sp<ExtractorPlugin> &plugin, uint32_t *creatorVersion) {
    *confidence = 0.0f;
    *meta = nullptr;

    std::shared_ptr<std::list<sp<ExtractorPlugin>>> plugins;
    {
        Mutex::Autolock autoLock(gPluginMutex);
        if (!gPluginsRegistered) {
            return NULL;
        }
        plugins = gPlugins;
    }

    void *bestCreator = NULL;
    for (auto it = plugins->begin(); it != plugins->end(); ++it) {
        ALOGV("sniffing %s", (*it)->def.extractor_name);
        float newConfidence;
        void *newMeta = nullptr;
        FreeMetaFunc newFreeMeta = nullptr;

        void *curCreator = NULL;
        if ((*it)->def.def_version == EXTRACTORDEF_VERSION_NDK_V1) {
            curCreator = (void*) (*it)->def.u.v2.sniff(
                    source->wrap(), &newConfidence, &newMeta, &newFreeMeta);
        } else if ((*it)->def.def_version == EXTRACTORDEF_VERSION_NDK_V2) {
            curCreator = (void*) (*it)->def.u.v3.sniff(
                    source->wrap(), &newConfidence, &newMeta, &newFreeMeta);
        }

        if (curCreator) {
            if (newConfidence > *confidence) {
                *confidence = newConfidence;
                if (*meta != nullptr && *freeMeta != nullptr) {
                    (*freeMeta)(*meta);
                }
                *meta = newMeta;
                *freeMeta = newFreeMeta;

// 这里有设置最高得分的插件,媒体分解器插件
                plugin = *it;
                bestCreator = curCreator;
                *creatorVersion = (*it)->def.def_version;
            } else {
                if (newMeta != nullptr && newFreeMeta != nullptr) {
                    newFreeMeta(newMeta);
                }
            }
        }
    }

// 最后会返回一个媒体解复用提取器最高得分者的创建方法指针
    return bestCreator;
}

source->wrap()实现,由前面的分析:source对象其实是一个FileSource对象,而wrap()方法使其父类DataSource实现的方法

// [frameworks/av/media/libstagefright/include/media/stagefright/DataSource.h]

    CDataSource *wrap() {
        if (mWrapper) {
            return mWrapper;
        }
        mWrapper = new CDataSource();
        mWrapper->handle = this;

		// 下面是赋值方法指针,注意下面的实现均是用匿名方法定义实现的,其执行最终还是DataSource自身对应的实现
		
		// 按要求读取指定大小的文件数据
        mWrapper->readAt = [](void *handle, off64_t offset, void *data, size_t size) -> ssize_t {
            return ((DataSource*)handle)->readAt(offset, data, size);
        };
        // 获取文件数据大小
        mWrapper->getSize = [](void *handle, off64_t *size) -> status_t {
            return ((DataSource*)handle)->getSize(size);
        };
        // 当前数据源类对象的类型
        mWrapper->flags = [](void *handle) -> uint32_t {
            return ((DataSource*)handle)->flags();
        };
        // 获取当前数据源的Uri即URL路径等
        mWrapper->getUri = [](void *handle, char *uriString, size_t bufferSize) -> bool {
            return ((DataSource*)handle)->getUri(uriString, bufferSize);
        };
        // 返回封装代理实现类对象
        return mWrapper;
    }

CreateIMediaExtractorFromMediaExtractor 方法实现:

需要封装成可通过Binder机制跨进程来调用的操作类即IMediaExtractor子类RemoteMediaExtractor,其实现了Binder接口,成为了Bn实现端BnMediaExtractor的子类,因此再返回给其他进程调用端跨进程使用。

  • frameworks/av/media/libstagefright/InterfaceUtils.cpp
sp<IMediaExtractor> CreateIMediaExtractorFromMediaExtractor(
        MediaExtractor *extractor,
        const sp<DataSource> &source,
        const sp<RefBase> &plugin) {
    if (extractor == nullptr) {
        return nullptr;
    }
    return RemoteMediaExtractor::wrap(extractor, source, plugin);
}

----------
// IMediaExtractor wrapper to the MediaExtractor.

// RemoteMediaExtractor 继承了 BnMediaExtractor ,其是作为服务器端的对象,供远程客户端调用
class RemoteMediaExtractor : public BnMediaExtractor {
   
   

4.2 AnotherPacketSource 实现,创建一个数据包(数据源处理)对象

  • frameworks/av/media/libstagefright/mpeg2ts/AnotherPacketSource.cpp
AnotherPacketSource::AnotherPacketSource(const sp<MetaData> &meta)
    : mIsAudio(false),
      mIsVideo(false),
      mEnabled(true),
      mFormat(NULL),
// 媒体数据加入队列的媒体时间戳
      mLastQueuedTimeUs(0),
// 估算缓冲数据时长
      mEstimatedBufferDurationUs(-1),
// 标记EOS结果状态,OK表示非EOS
      mEOSResult(OK),
// 最近媒体数据加入队列(读取的解复用数据)的媒体元数据信息的消息对象(AMessage)
      mLatestEnqueuedMeta(NULL),
// 最近媒体数据出队列(解码器获取已解复用数据)的媒体元数据信息的消息对象
      mLatestDequeuedMeta(NULL) {
    setFormat(meta);

    mDiscontinuitySegments.push_back(DiscontinuitySegment());
}

---------
void AnotherPacketSource::setFormat(const sp<MetaData> &meta) {
    if (mFormat != NULL) {
        // Only allowed to be set once. Requires explicit clear to reset.
        return;
    }

    mIsAudio = false;
    mIsVideo = false;
    const char *mime;

    // Do not use meta if no mime.
    if (meta == NULL || !meta->findCString(kKeyMIMEType, &mime)) {
        return;
    }

    mFormat = meta;

    if (!strncasecmp("audio/", mime, 6)) {
        mIsAudio = true;
    } else if (!strncasecmp("video/", mime, 6)) {
        mIsVideo = true;
    } else if (!strncasecmp("text/", mime, 5) || !strncasecmp("application/", mime, 12)) {
        return;
    } else {
        ALOGW("Unsupported mime type: %s", mime);
    }
}

5. 完成异步prepare处理流程

finishPrepareAsync()

  • frameworks/av/media/libmediaplayerservice/nuplayer/GenericSource.cpp
void NuPlayer::GenericSource::finishPrepareAsync() {
    ALOGV("finishPrepareAsync");

// 5.1 启动音视频track
    status_t err = startSources();
    if (err != OK) {
        ALOGE("Failed to init start data source!");
        notifyPreparedAndCleanup(err);
        return;
    }

    if (mIsStreaming) {
        mCachedSource->resumeFetchingIfNecessary();
        mPreparing = true;

// 如果是网络流媒体数据的话,会每隔1秒拉取网络媒体数据
        schedulePollBuffering();
    } else {

// 通知上层prepare 完成
        notifyPrepared();
    }

// 5.2 开始通知音频track读取音频数据事件处理流程
    if (mAudioTrack.mSource != NULL) {
        postReadBuffer(MEDIA_TRACK_TYPE_AUDIO);
    }

    if (mVideoTrack.mSource != NULL) {
        postReadBuffer(MEDIA_TRACK_TYPE_VIDEO);
    }
}

5.1 启动音视频track

通过一层层的代理对象往下层具体媒体提取器解复用模块的音视频track具体实现对象的对应start()方法将被执行

status_t NuPlayer::GenericSource::startSources() {
    // Start the selected A/V tracks now before we start buffering.
    // Widevine sources might re-initialize crypto when starting, if we delay
    // this to start(), all data buffered during prepare would be wasted.
    // (We don't actually start reading until start().)
    //
    // TODO: this logic may no longer be relevant after the removal of widevine
    // support
    if (mAudioTrack.mSource != NULL && mAudioTrack.mSource->start() != OK) {
        ALOGE("failed to start audio track!");
        return UNKNOWN_ERROR;
    }

    if (mVideoTrack.mSource != NULL && mVideoTrack.mSource->start() != OK) {
        ALOGE("failed to start video track!");
        return UNKNOWN_ERROR;
    }

    return OK;
}

比如 map4 的媒体分解器

  • frameworks/av/media/extractors/mp4/MPEG4Extractor.cpp
MediaTrackHelper *MPEG4Extractor::getTrack(size_t index) {
    status_t err;
    if ((err = readMetaData()) != OK) {
        return NULL;
    }

    Track *track = mFirstTrack;
    while (index > 0) {
        if (track == NULL) {
            return NULL;
        }

        track = track->next;
        --index;
    }

    if (track == NULL) {
        return NULL;
    }


    Trex *trex = NULL;
    int32_t trackId;
    if (AMediaFormat_getInt32(track->meta, AMEDIAFORMAT_KEY_TRACK_ID, &trackId)) {
        for (size_t i = 0; i < mTrex.size(); i++) {
            Trex *t = &mTrex.editItemAt(i);
            if (t->track_ID == (uint32_t) trackId) {
                trex = t;
                break;
            }
        }
    } else {
        ALOGE("b/21657957");
        return NULL;
    }

    ALOGV("getTrack called, pssh: %zu", mPssh.size());

    const char *mime;
    if (!AMediaFormat_getString(track->meta, AMEDIAFORMAT_KEY_MIME, &mime)) {
        return NULL;
    }
    sp<ItemTable> itemTable;
    if (!strcasecmp(mime, MEDIA_MIMETYPE_VIDEO_AVC)) {
        void *data;
        size_t size;
        if (!AMediaFormat_getBuffer(track->meta, AMEDIAFORMAT_KEY_CSD_AVC, &data, &size)) {
            return NULL;
        }

        const uint8_t *ptr = (const uint8_t *)data;

        if (size < 7 || ptr[0] != 1) {  // configurationVersion == 1
            return NULL;
        }
    } else if (!strcasecmp(mime, MEDIA_MIMETYPE_VIDEO_HEVC)
            || !strcasecmp(mime, MEDIA_MIMETYPE_IMAGE_ANDROID_HEIC)) {
        void *data;
        size_t size;
        if (!AMediaFormat_getBuffer(track->meta, AMEDIAFORMAT_KEY_CSD_HEVC, &data, &size)) {
            return NULL;
        }

        const uint8_t *ptr = (const uint8_t *)data;

        if (size < 22 || ptr[0] != 1) {  // configurationVersion == 1
            return NULL;
        }
        if (!strcasecmp(mime, MEDIA_MIMETYPE_IMAGE_ANDROID_HEIC)) {
            itemTable = mItemTable;
        }
    } else if (!strcasecmp(mime, MEDIA_MIMETYPE_VIDEO_DOLBY_VISION)) {
        void *data;
        size_t size;
        if (!AMediaFormat_getBuffer(track->meta, AMEDIAFORMAT_KEY_CSD_2, &data, &size)
                || size != 24) {
            return NULL;
        }

        const uint8_t *ptr = (const uint8_t *)data;
        // dv_major.dv_minor Should be 1.0 or 2.1
        if ((ptr[0] != 1 || ptr[1] != 0) && (ptr[0] != 2 || ptr[1] != 1)) {
            return NULL;
        }
   } else if (!strcasecmp(mime, MEDIA_MIMETYPE_VIDEO_AV1)
           || !strcasecmp(mime, MEDIA_MIMETYPE_IMAGE_AVIF)) {
        void *data;
        size_t size;
        if (!AMediaFormat_getBuffer(track->meta, AMEDIAFORMAT_KEY_CSD_0, &data, &size)) {
            return NULL;
        }

        const uint8_t *ptr = (const uint8_t *)data;

        if (size < 4 || ptr[0] != 0x81) {  // configurationVersion == 1
            return NULL;
        }
        if (!strcasecmp(mime, MEDIA_MIMETYPE_IMAGE_AVIF)) {
            itemTable = mItemTable;
        }
    } else if (!strcasecmp(mime, MEDIA_MIMETYPE_VIDEO_VP9)) {
        void *data;
        size_t size;
        if (!AMediaFormat_getBuffer(track->meta, AMEDIAFORMAT_KEY_CSD_0, &data, &size)) {
            return NULL;
        }

        const uint8_t *ptr = (const uint8_t *)data;

        if (size < 5 || ptr[0] != 0x01) {  // configurationVersion == 1
            return NULL;
        }
    }

    ALOGV("track->elst_shift_start_ticks :%" PRIu64, track->elst_shift_start_ticks);

    uint64_t elst_initial_empty_edit_ticks = 0;
    if (mHeaderTimescale != 0) {
        // Convert empty_edit_ticks from movie timescale to media timescale.
        uint64_t elst_initial_empty_edit_ticks_mul = 0, elst_initial_empty_edit_ticks_add = 0;
        if (__builtin_mul_overflow(track->elst_initial_empty_edit_ticks, track->timescale,
                                   &elst_initial_empty_edit_ticks_mul) ||
            __builtin_add_overflow(elst_initial_empty_edit_ticks_mul, (mHeaderTimescale / 2),
                                   &elst_initial_empty_edit_ticks_add)) {
            ALOGE("track->elst_initial_empty_edit_ticks overflow");
            return nullptr;
        }
        elst_initial_empty_edit_ticks = elst_initial_empty_edit_ticks_add / mHeaderTimescale;
    }
    ALOGV("elst_initial_empty_edit_ticks in MediaTimeScale :%" PRIu64,
          elst_initial_empty_edit_ticks);

    MPEG4Source* source =
            new MPEG4Source(track->meta, mDataSource, track->timescale, track->sampleTable,
                            mSidxEntries, trex, mMoofOffset, itemTable,
                            track->elst_shift_start_ticks, elst_initial_empty_edit_ticks);
    if (source->init() != OK) {
        delete source;
        return NULL;
    }
    return source;
}

去start:

media_status_t MPEG4Source::start() {
    Mutex::Autolock autoLock(mLock);

    CHECK(!mStarted);

    int32_t tmp;
    CHECK(AMediaFormat_getInt32(mFormat, AMEDIAFORMAT_KEY_MAX_INPUT_SIZE, &tmp));
    size_t max_size = tmp;

    // A somewhat arbitrary limit that should be sufficient for 8k video frames
    // If you see the message below for a valid input stream: increase the limit
    const size_t kMaxBufferSize = 64 * 1024 * 1024;
    if (max_size > kMaxBufferSize) {
        ALOGE("bogus max input size: %zu > %zu", max_size, kMaxBufferSize);
        return AMEDIA_ERROR_MALFORMED;
    }
    if (max_size == 0) {
        ALOGE("zero max input size");
        return AMEDIA_ERROR_MALFORMED;
    }

    // Allow up to kMaxBuffers, but not if the total exceeds kMaxBufferSize.
    const size_t kInitialBuffers = 2;
    const size_t kMaxBuffers = 8;
    const size_t realMaxBuffers = min(kMaxBufferSize / max_size, kMaxBuffers);

// 去初始化
    mBufferGroup->init(kInitialBuffers, max_size, realMaxBuffers);
    mSrcBuffer = new (std::nothrow) uint8_t[max_size];
    if (mSrcBuffer == NULL) {
        // file probably specified a bad max size
        return AMEDIA_ERROR_MALFORMED;
    }
    mSrcBufferSize = max_size;

    mStarted = true;

    return AMEDIA_OK;
}

5.2 开始通知音频track读取音频数据事件处理流程

postReadBuffer(MEDIA_TRACK_TYPE_VIDEO)

  • frameworks/av/media/libmediaplayerservice/nuplayer/GenericSource.cpp
void NuPlayer::GenericSource::postReadBuffer(media_track_type trackType) {
    if ((mPendingReadBufferTypes & (1 << trackType)) == 0) {
        mPendingReadBufferTypes |= (1 << trackType);
        sp<AMessage> msg = new AMessage(kWhatReadBuffer, this);
        msg->setInt32("trackType", trackType);
        msg->post();
    }
}

--------------
void NuPlayer::GenericSource::onMessageReceived(const sp<AMessage> &msg) {
    Mutex::Autolock _l(mLock);
    switch (msg->what()) {

      case kWhatReadBuffer:
      {
          onReadBuffer(msg);
          break;
      }

-----------
void NuPlayer::GenericSource::onReadBuffer(const sp<AMessage>& msg) {
    int32_t tmpType;
    CHECK(msg->findInt32("trackType", &tmpType));
    media_track_type trackType = (media_track_type)tmpType;
    mPendingReadBufferTypes &= ~(1 << trackType);
    readBuffer(trackType);
}
-------------
void NuPlayer::GenericSource::onReadBuffer(const sp<AMessage>& msg) {
    int32_t tmpType;
    CHECK(msg->findInt32("trackType", &tmpType));
    media_track_type trackType = (media_track_type)tmpType;
    mPendingReadBufferTypes &= ~(1 << trackType);

// 依据不同类型去读取buffer
    readBuffer(trackType);
}

readBuffer 的实现

void NuPlayer::GenericSource::readBuffer(
        media_track_type trackType, int64_t seekTimeUs, MediaPlayerSeekMode mode,
        int64_t *actualTimeUs, bool formatChange) {
    Track *track;
    size_t maxBuffers = 1;
    switch (trackType) {
        case MEDIA_TRACK_TYPE_VIDEO:
            track = &mVideoTrack;
            maxBuffers = 8;  // too large of a number may influence seeks
            break;
        case MEDIA_TRACK_TYPE_AUDIO:
            track = &mAudioTrack;
            maxBuffers = 64;
            break;
        case MEDIA_TRACK_TYPE_SUBTITLE:
            track = &mSubtitleTrack;
            break;
        case MEDIA_TRACK_TYPE_TIMEDTEXT:
            track = &mTimedTextTrack;
            break;
        default:
            TRESPASS();
    }

    if (track->mSource == NULL) {
        return;
    }

    if (actualTimeUs) {
        *actualTimeUs = seekTimeUs;
    }

    MediaSource::ReadOptions options;

    bool seeking = false;
    if (seekTimeUs >= 0) {
        options.setSeekTo(seekTimeUs, mode);
        seeking = true;
    }

    const bool couldReadMultiple = (track->mSource->supportReadMultiple());

    if (couldReadMultiple) {
        options.setNonBlocking();
    }

    int32_t generation = getDataGeneration(trackType);
    for (size_t numBuffers = 0; numBuffers < maxBuffers; ) {
        Vector<MediaBufferBase *> mediaBuffers;
        status_t err = NO_ERROR;

        sp<IMediaSource> source = track->mSource;
        mLock.unlock();
        if (couldReadMultiple) {
            err = source->readMultiple(
                    &mediaBuffers, maxBuffers - numBuffers, &options);
        } else {
            MediaBufferBase *mbuf = NULL;
            err = source->read(&mbuf, &options);
            if (err == OK && mbuf != NULL) {
                mediaBuffers.push_back(mbuf);
            }
        }
        mLock.lock();

        options.clearNonPersistent();

        size_t id = 0;
        size_t count = mediaBuffers.size();

        // in case track has been changed since we don't have lock for some time.
        if (generation != getDataGeneration(trackType)) {
            for (; id < count; ++id) {
                mediaBuffers[id]->release();
            }
            break;
        }

        for (; id < count; ++id) {
            int64_t timeUs;
            MediaBufferBase *mbuf = mediaBuffers[id];
            if (!mbuf->meta_data().findInt64(kKeyTime, &timeUs)) {
                mbuf->meta_data().dumpToLog();
                track->mPackets->signalEOS(ERROR_MALFORMED);
                break;
            }
            if (trackType == MEDIA_TRACK_TYPE_AUDIO) {
                mAudioTimeUs = timeUs;
            } else if (trackType == MEDIA_TRACK_TYPE_VIDEO) {
                mVideoTimeUs = timeUs;
            }

            queueDiscontinuityIfNeeded(seeking, formatChange, trackType, track);

            sp<ABuffer> buffer = mediaBufferToABuffer(mbuf, trackType);
            if (numBuffers == 0 && actualTimeUs != nullptr) {
                *actualTimeUs = timeUs;
            }
            if (seeking && buffer != nullptr) {
                sp<AMessage> meta = buffer->meta();
                if (meta != nullptr && mode == MediaPlayerSeekMode::SEEK_CLOSEST
                        && seekTimeUs > timeUs) {
                    sp<AMessage> extra = new AMessage;
                    extra->setInt64("resume-at-mediaTimeUs", seekTimeUs);
                    meta->setMessage("extra", extra);
                }
            }

            track->mPackets->queueAccessUnit(buffer);
            formatChange = false;
            seeking = false;
            ++numBuffers;
        }
        if (id < count) {
            // Error, some mediaBuffer doesn't have kKeyTime.
            for (; id < count; ++id) {
                mediaBuffers[id]->release();
            }
            break;
        }

        if (err == WOULD_BLOCK) {
            break;
        } else if (err == INFO_FORMAT_CHANGED) {
#if 0
            track->mPackets->queueDiscontinuity(
                    ATSParser::DISCONTINUITY_FORMATCHANGE,
                    NULL,
                    false /* discard */);
#endif
        } else if (err != OK) {
            queueDiscontinuityIfNeeded(seeking, formatChange, trackType, track);
            track->mPackets->signalEOS(err);
            break;
        }
    }

    if (mIsStreaming
        && (trackType == MEDIA_TRACK_TYPE_VIDEO || trackType == MEDIA_TRACK_TYPE_AUDIO)) {
        status_t finalResult;
        int64_t durationUs = track->mPackets->getBufferedDurationUs(&finalResult);

        // TODO: maxRebufferingMarkMs could be larger than
        // mBufferingSettings.mResumePlaybackMarkMs
        int64_t markUs = (mPreparing ? mBufferingSettings.mInitialMarkMs
            : mBufferingSettings.mResumePlaybackMarkMs) * 1000LL;
        if (finalResult == ERROR_END_OF_STREAM || durationUs >= markUs) {
            if (mPreparing || mSentPauseOnBuffering) {
                Track *counterTrack =
                    (trackType == MEDIA_TRACK_TYPE_VIDEO ? &mAudioTrack : &mVideoTrack);
                if (counterTrack->mSource != NULL) {
                    durationUs = counterTrack->mPackets->getBufferedDurationUs(&finalResult);
                }
                if (finalResult == ERROR_END_OF_STREAM || durationUs >= markUs) {
                    if (mPreparing) {
                        notifyPrepared();
                        mPreparing = false;
                    } else {
                        sendCacheStats();
                        mSentPauseOnBuffering = false;
                        sp<AMessage> notify = dupNotify();
                        notify->setInt32("what", kWhatResumeOnBufferingEnd);
                        notify->post();
                    }
                }
            }
            return;
        }

        postReadBuffer(trackType);
    }
}

猜你喜欢

转载自blog.csdn.net/qq_40587575/article/details/124926345