1./system/bin/mediaserver
int main(int argc, char** argv)
{
sp
proc(ProcessState::self());
sp sm = defaultServiceManager();
LOGI("ServiceManager: %p", sm.get());
AudioFlinger::instantiate();
//创建mediaplayservice实例。
MediaPlayerService::instantiate();
CameraService::instantiate();
AudioPolicyService::instantiate();
ProcessState::self()->startThreadPool();
IPCThreadState::self()->joinThreadPool();
}
//向/system/bin/servicemanager注册“media.play"服务。
void MediaPlayerService::instantiate() {
defaultServiceManager()->addService(
String16("media.player"), new MediaPlayerService());
}
2.mediaplay level
//继承关系。
class MediaPlayerService : public BnMediaPlayerService
class BnMediaPlayerService: public BnInterface
//用基类BnMediaPlayerService::onTransact()接收所有客户端的请求。
如下:
status_t BnMediaPlayerService::onTransact(
uint32_t code, const Parcel& data, Parcel* reply, uint32_t flags)
{
switch(code) {
case CREATE_URL: {
CHECK_INTERFACE(IMediaPlayerService, data, reply);
pid_t pid = data.readInt32();
sp client =
interface_cast(data.readStrongBinder());
const char* url = data.readCString();
KeyedVector headers;
int32_t numHeaders = data.readInt32();
for (int i = 0; i < numHeaders; ++i) {
String8 key = data.readString8();
String8 value = data.readString8();
headers.add(key, value);
}
int audioSessionId = data.readInt32();
//实际执行MediaPlayerService:create
sp player = create(
pid, client, url, numHeaders > 0 ? &headers : NULL, audioSessionId);
reply->writeStrongBinder(player->asBinder());
return NO_ERROR;
} break;
case CREATE_FD: {
CHECK_INTERFACE(IMediaPlayerService, data, reply);
pid_t pid = data.readInt32();
sp client = interface_cast(data.readStrongBinder());
int fd = dup(data.readFileDescriptor());
int64_t offset = data.readInt64();
int64_t length = data.readInt64();
int audioSessionId = data.readInt32();
//实际执行MediaPlayerService:create
sp player = create(pid, client, fd, offset, length, audioSessionId);
//返回IMediaPlayer的Binder,以后就直接和这个Binder通信了。
reply->writeStrongBinder(player->asBinder());
return NO_ERROR;
} break;
case DECODE_URL: {
CHECK_INTERFACE(IMediaPlayerService, data, reply);
const char* url = data.readCString();
uint32_t sampleRate;
int numChannels;
int format;
sp player = decode(url, &sampleRate, &numChannels, &format);
reply->writeInt32(sampleRate);
reply->writeInt32(numChannels);
reply->writeInt32(format);
reply->writeStrongBinder(player->asBinder());
return NO_ERROR;
} break;
case DECODE_FD: {
CHECK_INTERFACE(IMediaPlayerService, data, reply);
int fd = dup(data.readFileDescriptor());
int64_t offset = data.readInt64();
int64_t length = data.readInt64();
uint32_t sampleRate;
int numChannels;
int format;
//实际执行MediaPlayerService::decode
sp player = decode(fd, offset, length, &sampleRate, &numChannels, &format);
reply->writeInt32(sampleRate);
reply->writeInt32(numChannels);
reply->writeInt32(format);
reply->writeStrongBinder(player->asBinder());
return NO_ERROR;
} break;MediaPlayerService
case CREATE_MEDIA_RECORDER: {
CHECK_INTERFACE(IMediaPlayerService, data, reply);
pid_t pid = data.readInt32();
sp recorder = createMediaRecorder(pid);
reply->writeStrongBinder(recorder->asBinder());
return NO_ERROR;
} break;
case CREATE_METADATA_RETRIEVER: {
CHECK_INTERFACE(IMediaPlayerService, data, reply);
pid_t pid = data.readInt32();
//实际执行MediaPlayerService::createMetadataRetriever
sp retriever = createMetadataRetriever(pid);
reply->writeStrongBinder(retriever->asBinder());
return NO_ERROR;
} break;
case GET_OMX: {
CHECK_INTERFACE(IMediaPlayerService, data, reply);
////实际执行MediaPlayerService::getOMX()
sp omx = getOMX();
reply->writeStrongBinder(omx->asBinder());
return NO_ERROR;
} break;
default:
return BBinder::onTransact(code, data, reply, flags);
}
}
class Client : public BnMediaPlayer
class BnMediaPlayer: public BnInterface
//处理每一个播放器的命令。
status_t BnMediaPlayer::onTransact(
uint32_t code, const Parcel& data, Parcel* reply, uint32_t flags)
{
switch(code) {
case DISCONNECT: {
CHECK_INTERFACE(IMediaPlayer, data, reply);
disconnect();
return NO_ERROR;
} break;
case SET_VIDEO_SURFACE: {
CHECK_INTERFACE(IMediaPlayer, data, reply);
sp surface = interface_cast(data.readStrongBinder());
reply->writeInt32(setVideoSurface(surface));
return NO_ERROR;
} break;
case PREPARE_ASYNC: {
CHECK_INTERFACE(IMediaPlayer, data, reply);
reply->writeInt32(prepareAsync());
return NO_ERROR;
} break;
case START: {
CHECK_INTERFACE(IMediaPlayer, data, reply);
reply->writeInt32(start());
return NO_ERROR;
} break;
case STOP: {
CHECK_INTERFACE(IMediaPlayer, data, reply);
reply->writeInt32(stop());
return NO_ERROR;
} break;
case IS_PLAYING: {
CHECK_INTERFACE(IMediaPlayer, data, reply);
bool state;
status_t ret = isPlaying(&state);
reply->writeInt32(state);
reply->writeInt32(ret);
return NO_ERROR;
} break;
case PAUSE: {
CHECK_INTERFACE(IMediaPlayer, data, reply);
reply->writeInt32(pause());
return NO_ERROR;
} break;
case SEEK_TO: {
CHECK_INTERFACE(IMediaPlayer, data, reply);
reply->writeInt32(seekTo(data.readInt32()));
return NO_ERROR;
} break;
case GET_CURRENT_POSITION: {
CHECK_INTERFACE(IMediaPlayer, data, reply);
int msec;
status_t ret = getCurrentPosition(&msec);
reply->writeInt32(msec);
reply->writeInt32(ret);
return NO_ERROR;
} break;
case GET_DURATION: {
CHECK_INTERFACE(IMediaPlayer, data, reply);
int msec;
status_t ret = getDuration(&msec);
reply->writeInt32(msec);
reply->writeInt32(ret);
return NO_ERROR;
} break;
case RESET: {
CHECK_INTERFACE(IMediaPlayer, data, reply);
reply->writeInt32(reset());
return NO_ERROR;
} break;
case SET_AUDIO_STREAM_TYPE: {
CHECK_INTERFACE(IMediaPlayer, data, reply);
reply->writeInt32(setAudioStreamType(data.readInt32()));
return NO_ERROR;
} break;
case SET_LOOPING: {
CHECK_INTERFACE(IMediaPlayer, data, reply);
reply->writeInt32(setLooping(data.readInt32()));
return NO_ERROR;
} break;
case SET_VOLUME: {
CHECK_INTERFACE(IMediaPlayer, data, reply);
reply->writeInt32(setVolume(data.readFloat(), data.readFloat()));
return NO_ERROR;
} break;
case INVOKE: {
CHECK_INTERFACE(IMediaPlayer, data, reply);
invoke(data, reply);
return NO_ERROR;
} break;
case SET_METADATA_FILTER: {
CHECK_INTERFACE(IMediaPlayer, data, reply);
reply->writeInt32(setMetadataFilter(data));
return NO_ERROR;
} break;
case SUSPEND: {
CHECK_INTERFACE(IMediaPlayer, data, reply);
reply->writeInt32(suspend());
return NO_ERROR;
} break;
case RESUME: {
CHECK_INTERFACE(IMediaPlayer, data, reply);
reply->writeInt32(resume());
return NO_ERROR;
} break;
case GET_METADATA: {
CHECK_INTERFACE(IMediaPlayer, data, reply);
const status_t retcode = getMetadata(data.readInt32(), data.readInt32(), reply);
reply->setDataPosition(0);
reply->writeInt32(retcode);
reply->setDataPosition(0);
return NO_ERROR;
} break;
case SET_AUX_EFFECT_SEND_LEVEL: {
CHECK_INTERFACE(IMediaPlayer, data, reply);
reply->writeInt32(setAuxEffectSendLevel(data.readFloat()));
return NO_ERROR;
} break;
case ATTACH_AUX_EFFECT: {
CHECK_INTERFACE(IMediaPlayer, data, reply);
reply->writeInt32(attachAuxEffect(data.readInt32()));
return NO_ERROR;
} break;
default:
return BBinder::onTransact(code, data, reply, flags);
}
}
sp MediaPlayerService::create(pid_t pid, const sp& client,
int fd, int64_t offset, int64_t length, int audioSessionId)
{
int32_t connId = android_atomic_inc(&mNextConnId);
//创建一个实际的MediaPlayer
sp c = new Client(this, pid, connId, client, audioSessionId);
LOGV("Create new client(%d) from pid %d, fd=%d, offset=%lld, length=%lld, audioSessionId=%d",
connId, pid, fd, offset, length, audioSessionId);
//设置实际数据源。
if (NO_ERROR != c->setDataSource(fd, offset, length)) {
c.clear();
} else {
wp w = c;
Mutex::Autolock lock(mLock);
//增加到MediaPlayerService用来管理。
mClients.add(w);
}
::close(fd);
return c;
}
MediaPlayerService::Client::Client(const sp& service, pid_t pid,
int32_t connId, const sp& client, int audioSessionId)
{
LOGV("Client(%d) constructor", connId);
mPid = pid;
mConnId = connId;
mService = service;
//保存客户端Binder
mClient = client;
mLoop = false;
mStatus = NO_INIT;
mAudioSessionId = audioSessionId;
#if CALLBACK_ANTAGONIZER
LOGD("create Antagonizer");
mAntagonizer = new Antagonizer(notify, this);
#endif
}
status_t MediaPlayerService::Client::setDataSource(int fd, int64_t offset, int64_t length)
{
LOGV("setDataSource fd=%d, offset=%lld, length=%lld", fd, offset, length);
struct stat sb;
int ret = fstat(fd, &sb);
if (ret != 0) {
LOGE("fstat(%d) failed: %d, %s", fd, ret, strerror(errno));
return UNKNOWN_ERROR;
}
LOGV("st_dev = %llu", sb.st_dev);
LOGV("st_mode = %u", sb.st_mode);
LOGV("st_uid = %lu", sb.st_uid);
LOGV("st_gid = %lu", sb.st_gid);
LOGV("st_size = %llu", sb.st_size);
if (offset >= sb.st_size) {
LOGE("offset error");
::close(fd);
return UNKNOWN_ERROR;
}
if (offset + length > sb.st_size) {
length = sb.st_size - offset;
LOGV("calculated length = %lld", length);
}
//根据源数据确定播放类型。
player_type playerType = getPlayerType(fd, offset, length);
LOGV("player type = %d", playerType);
//创建播放器
// create the right type of player
sp p = createPlayer(playerType);
if (p == NULL) return NO_INIT;
if (!p->hardwareOutput()) {
mAudioOutput = new AudioOutput(mAudioSessionId);
static_cast(p.get())->setAudioSink(mAudioOutput);
}
// now set data source
mStatus = p->setDataSource(fd, offset, length);
if (mStatus == NO_ERROR) mPlayer = p;
return mStatus;
}
sp MediaPlayerService::Client::createPlayer(player_type playerType)
{
// determine if we have the right player type
sp p = mPlayer;
if ((p != NULL) && (p->playerType() != playerType)) {
LOGV("delete player");
p.clear();
}
if (p == NULL) {
p = android::createPlayer(playerType, this, notify);
}
return p;
}
static sp createPlayer(player_type playerType, void* cookie,
notify_callback_f notifyFunc)
{
sp p;
switch (playerType) {
#ifndef NO_OPENCORE
case PV_PLAYER:
LOGV(" create PVPlayer");
p = new PVPlayer();
break;
#endif
case SONIVOX_PLAYER:
LOGV(" create MidiFile");
p = new MidiFile();
break;
case STAGEFRIGHT_PLAYER:
LOGV(" create StagefrightPlayer");
p = new StagefrightPlayer;
break;
case TEST_PLAYER:
LOGV("Create Test Player stub");
p = new TestPlayerStub();
break;
}
if (p != NULL) {
if (p->initCheck() == NO_ERROR) {
p->setNotifyCallback(cookie, notifyFunc);
} else {
p.clear();
}
}
if (p == NULL) {
LOGE("Failed to create player object");
}
return p;
}
3.StagefrightPlayer level
//这个StagefrightPlayer基本上是把上层的命令直接传给 AwesomePlayer
StagefrightPlayer::StagefrightPlayer()
//又创建一个AwesomePlayer
: mPlayer(new AwesomePlayer) {
LOGV("StagefrightPlayer");
mPlayer->setListener(this);
}
4.AwesomePlayer level
(1)surface
//LayBuffer。
void AwesomePlayer::setISurface(const sp &isurface) {
Mutex::Autolock autoLock(mLock);
mISurface = isurface;
}
status_t AwesomePlayer::initRenderer_l() {
if (mISurface == NULL) {
return OK;
}
sp meta = mVideoSource->getFormat();
int32_t format;
const char *component;
int32_t decodedWidth, decodedHeight;
CHECK(meta->findInt32(kKeyColorFormat, &format));
CHECK(meta->findCString(kKeyDecoderComponent, &component));
CHECK(meta->findInt32(kKeyWidth, &decodedWidth));
CHECK(meta->findInt32(kKeyHeight, &decodedHeight));
int32_t rotationDegrees;
if (!mVideoTrack->getFormat()->findInt32(
kKeyRotation, &rotationDegrees)) {
rotationDegrees = 0;
}
mVideoRenderer.clear();
// Must ensure that mVideoRenderer's destructor is actually executed
// before creating a new one.
IPCThreadState::self()->flushCommands();
if (!strncmp("OMX.", component, 4)) {
// Our OMX codecs allocate buffers on the media_server side
// therefore they require a remote IOMXRenderer that knows how
// to display them.
//调用OMX创建renderer。
sp native =
mClient.interface()->createRenderer(
mISurface, component,
(OMX_COLOR_FORMATTYPE)format,
decodedWidth, decodedHeight,
mVideoWidth, mVideoHeight,
rotationDegrees);
if (native == NULL) {
return NO_INIT;
}
//再创建一个video renderer,以后的视频数据都给它,它再给OMXRenderer,再给硬件overlay。
mVideoRenderer = new AwesomeRemoteRenderer(native);
} else {
// Other decoders are instantiated locally and as a consequence
// allocate their buffers in local address space.
mVideoRenderer = new AwesomeLocalRenderer(
false, // previewOnly
component,
(OMX_COLOR_FORMATTYPE)format,
mISurface,
mVideoWidth, mVideoHeight,
decodedWidth, decodedHeight, rotationDegrees);
}
return mVideoRenderer->initCheck();
}
sp OMX::createRenderer(
const sp &surface,
const char *componentName,
OMX_COLOR_FORMATTYPE colorFormat,
size_t encodedWidth, size_t encodedHeight,
size_t displayWidth, size_t displayHeight,
int32_t rotationDegrees) {
Mutex::Autolock autoLock(mLock);
VideoRenderer *impl = NULL;
//打开openmax接口库
void *libHandle = dlopen("libstagefrighthw.so", RTLD_NOW);
if (libHandle) {
typedef VideoRenderer *(*CreateRendererWithRotationFunc)(
const sp &surface,
const char *componentName,
OMX_COLOR_FORMATTYPE colorFormat,
size_t displayWidth, size_t displayHeight,
size_t decodedWidth, size_t decodedHeight,
int32_t rotationDegrees);
typedef VideoRenderer *(*CreateRendererFunc)(
const sp &surface,
const char *componentName,
OMX_COLOR_FORMATTYPE colorFormat,
size_t displayWidth, size_t displayHeight,
size_t decodedWidth, size_t decodedHeight);
CreateRendererWithRotationFunc funcWithRotation =
(CreateRendererWithRotationFunc)dlsym(
libHandle,
"_Z26createRendererWithRotationRKN7android2spINS_8"
"ISurfaceEEEPKc20OMX_COLOR_FORMATTYPEjjjji");
if (funcWithRotation) {
//由硬件接口库实现
impl = (*funcWithRotation)(
surface, componentName, colorFormat,
displayWidth, displayHeight, encodedWidth, encodedHeight,
rotationDegrees);
} else {
CreateRendererFunc func =
(CreateRendererFunc)dlsym(
libHandle,
"_Z14createRendererRKN7android2spINS_8ISurfaceEEEPKc20"
"OMX_COLOR_FORMATTYPEjjjj");
if (func) {
impl = (*func)(surface, componentName, colorFormat,
displayWidth, displayHeight, encodedWidth, encodedHeight);
}
}
if (impl) {
impl = new SharedVideoRenderer(libHandle, impl);
libHandle = NULL;
}
if (libHandle) {
dlclose(libHandle);
libHandle = NULL;
}
}
//如果硬件失败,创建软件renderer。
if (!impl) {
LOGW("Using software renderer.");
impl = new SoftwareRenderer(
colorFormat,
surface,
displayWidth, displayHeight,
encodedWidth, encodedHeight);
if (((SoftwareRenderer *)impl)->initCheck() != OK) {
delete impl;
impl = NULL;
return NULL;
}
}
return new OMXRenderer(impl);
}
5.openmax
(1)解码后的数据上传过程。
omx向openmax注册回调函数。
OMXNodeInstance::kCallbacks
OMX_CALLBACKTYPE OMXNodeInstance::kCallbacks = {
&OnEvent, &OnEmptyBufferDone, &OnFillBufferDone
};
static OMX_ERRORTYPE SEC_OutputBufferReturn(OMX_COMPONENTTYPE *pOMXComponent)
{
pSECComponent->pCallbacks->FillBufferDone(pOMXComponent, pSECComponent->callbackData, bufferHeader);
return ret;
}
//openmax解码一帧后调用
OMX_ERRORTYPE OMXNodeInstance::OnFillBufferDone(
OMX_IN OMX_HANDLETYPE hComponent,
OMX_IN OMX_PTR pAppData,
OMX_IN OMX_BUFFERHEADERTYPE* pBuffer) {
OMXNodeInstance *instance = static_cast(pAppData);
if (instance->mDying) {
return OMX_ErrorNone;
}
owner为OMX
return instance->owner()->OnFillBufferDone(instance->nodeID(), pBuffer);
}
OMX_ERRORTYPE OMX::OnFillBufferDone(
node_id node, OMX_IN OMX_BUFFERHEADERTYPE *pBuffer) {
LOGV("OnFillBufferDone buffer=%p", pBuffer);
omx_message msg;
msg.type = omx_message::FILL_BUFFER_DONE;
msg.node = node;
msg.u.extended_buffer_data.buffer = pBuffer;
msg.u.extended_buffer_data.range_offset = pBuffer->nOffset;
msg.u.extended_buffer_data.range_length = pBuffer->nFilledLen;
msg.u.extended_buffer_data.flags = pBuffer->nFlags;
msg.u.extended_buffer_data.timestamp = pBuffer->nTimeStamp;
msg.u.extended_buffer_data.platform_private = pBuffer->pPlatformPrivate;
msg.u.extended_buffer_data.data_ptr = pBuffer->pBuffer;
//CallbackDispatcher::post
findDispatcher(node)->post(msg);
return OMX_ErrorNone;
}
//CallbackDispatcher是个线程发到消息队列并通知。
void OMX::CallbackDispatcher::post(const omx_message &msg) {
Mutex::Autolock autoLock(mLock);
mQueue.push_back(msg);
mQueueChanged.signal();
}
void OMX::CallbackDispatcher::threadEntry() {
setpriority(PRIO_PROCESS, 0, ANDROID_PRIORITY_AUDIO);
prctl(PR_SET_NAME, (unsigned long)"OMXCallbackDisp", 0, 0, 0);
for (;;) {
omx_message msg;
{
Mutex::Autolock autoLock(mLock);
while (!mDone && mQueue.empty()) {
mQueueChanged.wait(mLock);
}
if (mDone) {
break;
}
msg = *mQueue.begin();
mQueue.erase(mQueue.begin());
}
dispatch(msg);
}
}
void OMX::CallbackDispatcher::dispatch(const omx_message &msg) {
if (mOwner == NULL) {
LOGV("Would have dispatched a message to a node that's already gone.");
return;
}
//这个owner是OMXNodeInstance
mOwner->onMessage(msg);
}
void OMXNodeInstance::onMessage(const omx_message &msg) {
if (msg.type == omx_message::FILL_BUFFER_DONE) {
OMX_BUFFERHEADERTYPE *buffer =
static_cast(
msg.u.extended_buffer_data.buffer);
BufferMeta *buffer_meta =
static_cast(buffer->pAppPrivate);
//这是干哈不清楚。
buffer_meta->CopyFromOMX(buffer);
}
//这个是OMXCodecObserver
mObserver->onMessage(msg);
}
virtual void OMXCodecObserver::onMessage(const omx_message &msg) {
sp codec = mTarget.promote();
if (codec.get() != NULL) {
Mutex::Autolock autoLock(codec->mLock);
//这个是OMXCodec
codec->on_message(msg);
codec.clear();
}
}
void OMXCodec::on_message(const omx_message &msg) {
switch (msg.type) {
case omx_message::EVENT:
{
onEvent(
msg.u.event_data.event, msg.u.event_data.data1,
msg.u.event_data.data2);
break;
}
case omx_message::EMPTY_BUFFER_DONE:
{
IOMX::buffer_id buffer = msg.u.extended_buffer_data.buffer;
CODEC_LOGV("EMPTY_BUFFER_DONE(buffer: %p)", buffer);
Vector *buffers = &mPortBuffers[kPortIndexInput];
size_t i = 0;
while (i < buffers->size() && (*buffers)[i].mBuffer != buffer) {
++i;
}
CHECK(i < buffers->size());
if (!(*buffers)[i].mOwnedByComponent) {
LOGW("We already own input buffer %p, yet received "
"an EMPTY_BUFFER_DONE.", buffer);
}
{
BufferInfo *info = &buffers->editItemAt(i);
info->mOwnedByComponent = false;
if (info->mMediaBuffer != NULL) {
// It is time to release the media buffers storing meta data
info->mMediaBuffer->release();
info->mMediaBuffer = NULL;
}
}
if (mPortStatus[kPortIndexInput] == DISABLING) {
CODEC_LOGV("Port is disabled, freeing buffer %p", buffer);
status_t err =
mOMX->freeBuffer(mNode, kPortIndexInput, buffer);
CHECK_EQ(err, OK);
buffers->removeAt(i);
} else if (mState != ERROR
&& mPortStatus[kPortIndexInput] != SHUTTING_DOWN) {
CHECK_EQ(mPortStatus[kPortIndexInput], ENABLED);
drainInputBuffer(&buffers->editItemAt(i));
}
break;
}
case omx_message::FILL_BUFFER_DONE:
{
IOMX::buffer_id buffer = msg.u.extended_buffer_data.buffer;
OMX_U32 flags = msg.u.extended_buffer_data.flags;
CODEC_LOGV("FILL_BUFFER_DONE(buffer: %p, size: %ld, flags: 0x%08lx, timestamp: %lld us (%.2f secs))",
buffer,
msg.u.extended_buffer_data.range_length,
flags,
msg.u.extended_buffer_data.timestamp,
msg.u.extended_buffer_data.timestamp / 1E6);
Vector *buffers = &mPortBuffers[kPortIndexOutput];
size_t i = 0;
while (i < buffers->size() && (*buffers)[i].mBuffer != buffer) {
++i;
}
CHECK(i < buffers->size());
BufferInfo *info = &buffers->editItemAt(i);
if (!info->mOwnedByComponent) {
LOGW("We already own output buffer %p, yet received "
"a FILL_BUFFER_DONE.", buffer);
}
info->mOwnedByComponent = false;
if (mPortStatus[kPortIndexOutput] == DISABLING) {
CODEC_LOGV("Port is disabled, freeing buffer %p", buffer);
status_t err =
mOMX->freeBuffer(mNode, kPortIndexOutput, buffer);
CHECK_EQ(err, OK);
buffers->removeAt(i);
#if 0
} else if (mPortStatus[kPortIndexOutput] == ENABLED
&& (flags & OMX_BUFFERFLAG_EOS)) {
CODEC_LOGV("No more output data.");
mNoMoreOutputData = true;
mBufferFilled.signal();
#endif
} else if (mPortStatus[kPortIndexOutput] != SHUTTING_DOWN) {
CHECK_EQ(mPortStatus[kPortIndexOutput], ENABLED);
if (info->mMediaBuffer == NULL) {
CHECK(mOMXLivesLocally);
CHECK(mQuirks & kRequiresAllocateBufferOnOutputPorts);
CHECK(mQuirks & kDefersOutputBufferAllocation);
// The qcom video decoders on Nexus don't actually allocate
// output buffer memory on a call to OMX_AllocateBuffer
// the "pBuffer" member of the OMX_BUFFERHEADERTYPE
// structure is only filled in later.
info->mMediaBuffer = new MediaBuffer(
msg.u.extended_buffer_data.data_ptr,
info->mSize);
info->mMediaBuffer->setObserver(this);
}
MediaBuffer *buffer = info->mMediaBuffer;
if (msg.u.extended_buffer_data.range_offset
+ msg.u.extended_buffer_data.range_length
> buffer->size()) {
CODEC_LOGE(
"Codec lied about its buffer size requirements, "
"sending a buffer larger than the originally "
"advertised size in FILL_BUFFER_DONE!");
}
buffer->set_range(
msg.u.extended_buffer_data.range_offset,
msg.u.extended_buffer_data.range_length);
buffer->meta_data()->clear();
buffer->meta_data()->setInt64(
kKeyTime, msg.u.extended_buffer_data.timestamp);
if (msg.u.extended_buffer_data.flags & OMX_BUFFERFLAG_SYNCFRAME) {
buffer->meta_data()->setInt32(kKeyIsSyncFrame, true);
}
if (msg.u.extended_buffer_data.flags & OMX_BUFFERFLAG_CODECCONFIG) {
buffer->meta_data()->setInt32(kKeyIsCodecConfig, true);
}
if (mQuirks & kOutputBuffersAreUnreadable) {
buffer->meta_data()->setInt32(kKeyIsUnreadable, true);
}
buffer->meta_data()->setPointer(
kKeyPlatformPrivate,
msg.u.extended_buffer_data.platform_private);
buffer->meta_data()->setPointer(
kKeyBufferID,
msg.u.extended_buffer_data.buffer);
if (msg.u.extended_buffer_data.flags & OMX_BUFFERFLAG_EOS) {
CODEC_LOGV("No more output data.");
mNoMoreOutputData = true;
}
if (mTargetTimeUs >= 0) {
CHECK(msg.u.extended_buffer_data.timestamp <= mTargetTimeUs);
if (msg.u.extended_buffer_data.timestamp < mTargetTimeUs) {
CODEC_LOGV(
"skipping output buffer at timestamp %lld us",
msg.u.extended_buffer_data.timestamp);
fillOutputBuffer(info);
break;
}
CODEC_LOGV(
"returning output buffer at target timestamp "
"%lld us",
msg.u.extended_buffer_data.timestamp);
mTargetTimeUs = -1;
}
//这个是通知谁呢?是read()的调用者。
mFilledBuffers.push_back(i);
mBufferFilled.signal();
}
break;
}
default:
{
CHECK(!"should not be here.");
break;
}
}
}
status_t OMXCodec::read(
MediaBuffer **buffer, const ReadOptions *options) {
*buffer = NULL;
Mutex::Autolock autoLock(mLock);
if (mState != EXECUTING && mState != RECONFIGURING) {
return UNKNOWN_ERROR;
}
bool seeking = false;
int64_t seekTimeUs;
ReadOptions::SeekMode seekMode;
if (options && options->getSeekTo(&seekTimeUs, &seekMode)) {
seeking = true;
}
int64_t skipTimeUs;
if (options && options->getSkipFrame(&skipTimeUs)) {
mSkipTimeUs = skipTimeUs;
} else {
mSkipTimeUs = -1;
}
if (mInitialBufferSubmit) {
mInitialBufferSubmit = false;
if (seeking) {
CHECK(seekTimeUs >= 0);
mSeekTimeUs = seekTimeUs;
mSeekMode = seekMode;
// There's no reason to trigger the code below, there's
// nothing to flush yet.
seeking = false;
mPaused = false;
}
drainInputBuffers();
if (mState == EXECUTING) {
// Otherwise mState == RECONFIGURING and this code will trigger
// after the output port is reenabled.
fillOutputBuffers();
}
}
if (seeking) {
CODEC_LOGV("seeking to %lld us (%.2f secs)", seekTimeUs, seekTimeUs / 1E6);
mSignalledEOS = false;
CHECK(seekTimeUs >= 0);
mSeekTimeUs = seekTimeUs;
mSeekMode = seekMode;
mFilledBuffers.clear();
CHECK_EQ(mState, EXECUTING);
bool emulateInputFlushCompletion = !flushPortAsync(kPortIndexInput);
bool emulateOutputFlushCompletion = !flushPortAsync(kPortIndexOutput);
if (emulateInputFlushCompletion) {
onCmdComplete(OMX_CommandFlush, kPortIndexInput);
}
if (emulateOutputFlushCompletion) {
onCmdComplete(OMX_CommandFlush, kPortIndexOutput);
}
while (mSeekTimeUs >= 0) {
mBufferFilled.wait(mLock);
}
}
//没有就一直等
while (mState != ERROR && !mNoMoreOutputData && mFilledBuffers.empty()) {
mBufferFilled.wait(mLock);
}
if (mState == ERROR) {
return UNKNOWN_ERROR;
}
if (mFilledBuffers.empty()) {
return mSignalledEOS ? mFinalStatus : ERROR_END_OF_STREAM;
}
if (mOutputPortSettingsHaveChanged) {
mOutputPortSettingsHaveChanged = false;
return INFO_FORMAT_CHANGED;
}
size_t index = *mFilledBuffers.begin();
mFilledBuffers.erase(mFilledBuffers.begin());
BufferInfo *info = &mPortBuffers[kPortIndexOutput].editItemAt(index);
info->mMediaBuffer->add_ref();
//返回调用者。
*buffer = info->mMediaBuffer;
return OK;
}
//虽然这个类里也调用read我看这不是用于显示而是释放内存,正常过程由下面的函数来调用。
//谁调用它一会再说。
void AwesomePlayer::onVideoEvent() {
Mutex::Autolock autoLock(mLock);
if (!mVideoEventPending) {
// The event has been cancelled in reset_l() but had already
// been scheduled for execution at that time.
return;
}
mVideoEventPending = false;
if (mSeeking) {
if (mLastVideoBuffer) {
mLastVideoBuffer->release();
mLastVideoBuffer = NULL;
}
if (mVideoBuffer) {
mVideoBuffer->release();
mVideoBuffer = NULL;
}
if (mCachedSource != NULL && mAudioSource != NULL) {
// We're going to seek the video source first, followed by
// the audio source.
// In order to avoid jumps in the DataSource offset caused by
// the audio codec prefetching data from the old locations
// while the video codec is already reading data from the new
// locations, we'll "pause" the audio source, causing it to
// stop reading input data until a subsequent seek.
if (mAudioPlayer != NULL) {
mAudioPlayer->pause();
}
mAudioSource->pause();
}
}
if (!mVideoBuffer) {
MediaSource::ReadOptions options;
if (mSeeking) {
LOGV("seeking to %lld us (%.2f secs)", mSeekTimeUs, mSeekTimeUs / 1E6);
options.setSeekTo(
mSeekTimeUs, MediaSource::ReadOptions::SEEK_CLOSEST_SYNC);
}
for (;;) {
//OMXCodec::read,这就是OMXCodec::read
status_t err = mVideoSource->read(&mVideoBuffer, &options);
options.clearSeekTo();
if (err != OK) {
CHECK_EQ(mVideoBuffer, NULL);
if (err == INFO_FORMAT_CHANGED) {
LOGV("VideoSource signalled format change.");
if (mVideoRenderer != NULL) {
mVideoRendererIsPreview = false;
err = initRenderer_l();
if (err == OK) {
continue;
}
// fall through
} else {
continue;
}
}
// So video playback is complete, but we may still have
// a seek request pending that needs to be applied
// to the audio track.
if (mSeeking) {
LOGV("video stream ended while seeking!");
}
finishSeekIfNecessary(-1);
mFlags |= VIDEO_AT_EOS;
postStreamDoneEvent_l(err);
return;
}
if (mVideoBuffer->range_length() == 0) {
// Some decoders, notably the PV AVC software decoder
// return spurious empty buffers that we just want to ignore.
mVideoBuffer->release();
mVideoBuffer = NULL;
continue;
}
//读出一帧。
break;
}
}
int64_t timeUs;
CHECK(mVideoBuffer->meta_data()->findInt64(kKeyTime, &timeUs));
{
Mutex::Autolock autoLock(mMiscStateLock);
mVideoTimeUs = timeUs;
}
bool wasSeeking = mSeeking;
finishSeekIfNecessary(timeUs);
TimeSource *ts = (mFlags & AUDIO_AT_EOS) ? &mSystemTimeSource : mTimeSource;
if (mDecryptHandle != NULL) {
mDrmManagerClient->setPlaybackStatus(mDecryptHandle,
Playback::PAUSE, 0);
mDrmManagerClient->setPlaybackStatus(mDecryptHandle,
Playback::START, timeUs / 1000);
}
if (mFlags & FIRST_FRAME) {
mFlags &= ~FIRST_FRAME;
mTimeSourceDeltaUs = ts->getRealTimeUs() - timeUs;
}
int64_t realTimeUs, mediaTimeUs;
if (!(mFlags & AUDIO_AT_EOS) && mAudioPlayer != NULL
&& mAudioPlayer->getMediaTimeMapping(&realTimeUs, &mediaTimeUs)) {
mTimeSourceDeltaUs = realTimeUs - mediaTimeUs;
}
int64_t nowUs = ts->getRealTimeUs() - mTimeSourceDeltaUs;
int64_t latenessUs = nowUs - timeUs;
if (wasSeeking) {
// Let's display the first frame after seeking right away.
latenessUs = 0;
}
if (mRTPSession != NULL) {
// We'll completely ignore timestamps for gtalk videochat
// and we'll play incoming video as fast as we get it.
latenessUs = 0;
}
if (latenessUs > 40000) {
// We're more than 40ms late.
LOGV("we're late by %lld us (%.2f secs)", latenessUs, latenessUs / 1E6);
mVideoBuffer->release();
mVideoBuffer = NULL;
postVideoEvent_l();
return;
}
if (latenessUs < -10000) {
// We're more than 10ms early.
//发给线程一会再调用当前函数。
postVideoEvent_l(10000);
return;
}
if (mVideoRendererIsPreview || mVideoRenderer == NULL) {
mVideoRendererIsPreview = false;
status_t err = initRenderer_l();
if (err != OK) {
finishSeekIfNecessary(-1);
mFlags |= VIDEO_AT_EOS;
//发给线程一会再调用当前函数。
postStreamDoneEvent_l(err);
return;
}
}
if (mVideoRenderer != NULL) {
//这就传给Overlay了。
mVideoRenderer->render(mVideoBuffer);
}
//显示完要释放 。
if (mLastVideoBuffer) {
mLastVideoBuffer->release();
mLastVideoBuffer = NULL;
}
mLastVideoBuffer = mVideoBuffer;
mVideoBuffer = NULL;
postVideoEvent_l();
}
//给线程发Videoevent.
void AwesomePlayer::postVideoEvent_l(int64_t delayUs) {
if (mVideoEventPending) {
return;
}
mVideoEventPending = true;
mQueue.postEventWithDelay(mVideoEvent, delayUs < 0 ? 10000 : delayUs);
//mQueue是TimedEventQueue,这里有一个线程收到VideoEvent事件会调用onVideoEvent.
}
阅读(1671) | 评论(0) | 转发(0) |