11.AndroidMultiMedia框架完全解析-start流程分析
还是从mediaplayer.cpp开始分析,看start函数的实现:
status_t MediaPlayer::start()
{
mPlayer->setLooping(mLoop);
mPlayer->setVolume(mLeftVolume, mRightVolume);
mPlayer->setAuxEffectSendLevel(mSendLevel);
mCurrentState = MEDIA_PLAYER_STARTED;
ret = mPlayer->start();
return ret;
}
核⼼代码就是这些,需要注意的是,这⾥的mPlayer是IMediaPlayer类型的,是IMediaPlayer这个匿名Binder Server的Bp端,所以最终是通过这个匿名Binder Server类来传输消息的,传输的⽬的地是MediaPlayerService,其他函数暂时不分析,就从最后的start函数开始分析。
通过IMediaPlayer的Bp端传送到Bn端,最后到达MediaPlayerService,⽽MediaPlayerService为这个客户端创建了⼀个Client,所以最终对应的函数就是:MediaPlayerService::Client::start()
status_t MediaPlayerService::Client::start()
{
ALOGV("[%d] start", mConnId);
sp<MediaPlayerBase> p =getPlayer();
if(p ==0)return UNKNOWN_ERROR;
p->setLooping(mLoop);
return p->start();
}
这⾥获取到的MediaPlayer是NuPlayerDriver,所以最后还是调⽤到NuPlayerDriver的start函数:
status_t NuPlayerDriver::start(){
ALOGD("start(%p), state is %d, eos is %d",this, mState, mAtEOS);
Mutex::Autolock autoLock(mLock);
switch(mState){
case STATE_PREPARED:
{
mAtEOS =false;
mPlayer->start();
if(mStartupSeekTimeUs >=0){
mPlayer->seekToAsync(mStartupSeekTimeUs);
mStartupSeekTimeUs =-1;
}
break;
}
经过上⾯的prepare步骤,此时的状态,已经是STATE_PREPARED了,⽽且这⾥的mPlayer是NuPlayer,所以继续调⽤到NuPlayer中的start函数:
void NuPlayer::start(){
(new AMessage(kWhatStart,this))->post();
}
通过消息机制,继续传…android11适配机型
NuPlayer::onMessageReceived(const sp<AMessage>&msg)
case kWhatStart:
{
ALOGV("kWhatStart");
if(mStarted){
// do not resume yet if the source is still buffering
if(!mPausedForBuffering){
onResume();
}
}else{
onStart();
}
mPausedByClient =false;
break;
}
终于到核⼼函数了,下⾯就仔细分析这个onStart函数:
void NuPlayer::onStart(int64_t startPositionUs){
if(!mSourceStarted){
mSourceStarted =true;
mSource->start();
}
if(startPositionUs >0){
performSeek(startPositionUs);
if(mSource->getFormat(false/* audio */)==NULL){
return;
}
}
mOffloadAudio =false;
mAudioEOS =false;
mVideoEOS =false;
mStarted =true;
uint32_t flags =0;
if(mSource->isRealTime()){
flags |= Renderer::FLAG_REAL_TIME;
}
sp<MetaData> audioMeta = mSource->getFormatMeta(true/* audio */);
audio_stream_type_t streamType = AUDIO_STREAM_MUSIC;
if(mAudioSink !=NULL){
streamType = mAudioSink->getAudioStreamType();
}
sp<AMessage> videoFormat = mSource->getFormat(false/* audio */);
mOffloadAudio =
canOffloadStream(audioMeta,(videoFormat !=NULL), mSource->isStreaming(), streamType); if(mOffloadAudio){
flags |= Renderer::FLAG_OFFLOAD_AUDIO;
}
sp<AMessage> notify =new AMessage(kWhatRendererNotify,this);
++mRendererGeneration;
notify->setInt32("generation", mRendererGeneration);
mRenderer =new Renderer(mAudioSink, notify, flags);
mRendererLooper =new ALooper;
mRendererLooper->setName("NuPlayerRenderer");
mRendererLooper->start(false,false, ANDROID_PRIORITY_AUDIO);
mRendererLooper->registerHandler(mRenderer);
status_t err = mRenderer->setPlaybackSettings(mPlaybackSettings);
if(err != OK){
mSource->stop();
mSourceStarted =false;
notifyListener(MEDIA_ERROR, MEDIA_ERROR_UNKNOWN, err);
return;
}
float rate =getFrameRate();
if(rate >0){
mRenderer->setVideoFrameRate(rate);
}
if(mVideoDecoder !=NULL){
mVideoDecoder->setRenderer(mRenderer);
}
if(mAudioDecoder !=NULL){
mAudioDecoder->setRenderer(mRenderer);
}
if(mVideoDecoder !=NULL){
scheduleSetVideoDecoderTime();
}
postScanSources();
}
1. ⾸先来看mSource->start()函数,在之前的NuPlayer::setDataSourceAsync函数中,创建了⼀个GenericSource:
sp<GenericSource> source =new GenericSource(notify, mUIDValid, mUID);
然后⼜在NuPlayer::onMessageReceived函数的kWhatSetDataSource case中设置了NuPlayer中的mSource是创建的这个GenericSource:
void NuPlayer::onMessageReceived(const sp<AMessage>&msg){
switch(msg->what()){
case kWhatSetDataSource:
{
ALOGV("kWhatSetDataSource");
CHECK(mSource ==NULL);
status_t err = OK;
sp<RefBase> obj;
CHECK(msg->findObject("source",&obj));
if(obj !=NULL){
mSource =static_cast<Source *>(());
所以这⾥的mSource->start()函数最终跑到GenericSource.cpp中去执⾏了,
void NuPlayer::GenericSource::start(){
ALOGI("start");
mStopRead =false;
if(mAudioTrack.mSource !=NULL){
postReadBuffer(MEDIA_TRACK_TYPE_AUDIO);
}
if(mVideoTrack.mSource !=NULL){
postReadBuffer(MEDIA_TRACK_TYPE_VIDEO);
}
setDrmPlaybackStatusIfNeeded(Playback::START,getLastReadPosition()/1000);
mStarted =true;
(new AMessage(kWhatStart,this))->post();
}
这⾥通过postReadBuffer函数来分别发送Video Track和Audio Track的数据,并发送了⼀个kWhatStart的msg。先来看看postReadBuffer函数,这个函数中会根据不同的媒体类型来执⾏不同的操作。
void NuPlayer::GenericSource::postReadBuffer(media_track_type trackType){
Mutex::Autolock _l(mReadBufferLock);
if((mPendingReadBufferTypes &(1<< trackType))==0){
mPendingReadBufferTypes |=(1<< trackType);
sp<AMessage> msg =new AMessage(kWhatReadBuffer,this);
msg->setInt32("trackType", trackType);
msg->post();
}
}
void NuPlayer::GenericSource::onMessageReceived(const sp<AMessage>&msg){
case kWhatReadBuffer:
{
onReadBuffer(msg);
break;
}
void NuPlayer::GenericSource::onReadBuffer(sp<AMessage> msg){
int32_t tmpType;
CHECK(msg->findInt32("trackType",&tmpType));
media_track_type trackType =(media_track_type)tmpType;
readBuffer(trackType);
{
// only protect the variable change, as readBuffer may
// take considerable time.
Mutex::Autolock _l(mReadBufferLock);
mPendingReadBufferTypes &=~(1<< trackType);
}
}
⼜是通过⼀系列的转换,最终直到readBuffer函数中,这个函数根据不同的媒体类型来执⾏不同的操作,继续追踪:
void NuPlayer::GenericSource::readBuffer(
media_track_type trackType,int64_t seekTimeUs,int64_t*actualTimeUs,bool formatChange){
ALOGV("GenericSource readBuffer BEGIN type=%d",trackType);
// Do not read data if Widevine source is stopped
if(mStopRead){
return;
}
Track *track;
size_t maxBuffers =1;
switch(trackType){
case MEDIA_TRACK_TYPE_VIDEO:
版权声明:本站内容均来自互联网,仅供演示用,请勿用于商业和其他非法用途。如果侵犯了您的权益请与我们联系QQ:729038198,我们将在24小时内删除。
发表评论