Loading include/media/stagefright/ACodec.h +10 −3 Original line number Original line Diff line number Diff line Loading @@ -25,6 +25,7 @@ #include <media/stagefright/foundation/AHierarchicalStateMachine.h> #include <media/stagefright/foundation/AHierarchicalStateMachine.h> #include <media/stagefright/CodecBase.h> #include <media/stagefright/CodecBase.h> #include <media/stagefright/FrameRenderTracker.h> #include <media/stagefright/FrameRenderTracker.h> #include <media/stagefright/MediaDefs.h> #include <media/stagefright/SkipCutBuffer.h> #include <media/stagefright/SkipCutBuffer.h> #include <utils/NativeHandle.h> #include <utils/NativeHandle.h> #include <OMX_Audio.h> #include <OMX_Audio.h> Loading @@ -36,6 +37,7 @@ namespace android { struct ABuffer; struct ABuffer; struct MemoryDealer; struct MemoryDealer; struct DescribeColorFormat2Params; struct DescribeColorFormat2Params; struct DataConverter; struct ACodec : public AHierarchicalStateMachine, public CodecBase { struct ACodec : public AHierarchicalStateMachine, public CodecBase { ACodec(); ACodec(); Loading Loading @@ -188,8 +190,11 @@ private: Status mStatus; Status mStatus; unsigned mDequeuedAt; unsigned mDequeuedAt; sp<ABuffer> mData; sp<ABuffer> mData; // the client's buffer; if not using data conversion, this is the sp<RefBase> mMemRef; // codec buffer; otherwise, it is allocated separately sp<RefBase> mMemRef; // and a reference to the IMemory, so it does not go away sp<ABuffer> mCodecData; // the codec's buffer sp<RefBase> mCodecRef; // and a reference to the IMemory sp<GraphicBuffer> mGraphicBuffer; sp<GraphicBuffer> mGraphicBuffer; sp<NativeHandle> mNativeHandle; sp<NativeHandle> mNativeHandle; int mFenceFd; int mFenceFd; Loading Loading @@ -280,6 +285,7 @@ private: bool mLegacyAdaptiveExperiment; bool mLegacyAdaptiveExperiment; int32_t mMetadataBuffersToSubmit; int32_t mMetadataBuffersToSubmit; size_t mNumUndequeuedBuffers; size_t mNumUndequeuedBuffers; sp<DataConverter> mConverter[2]; int64_t mRepeatFrameDelayUs; int64_t mRepeatFrameDelayUs; int64_t mMaxPtsGapUs; int64_t mMaxPtsGapUs; Loading Loading @@ -441,7 +447,8 @@ private: bool encoder, int32_t numChannels, int32_t sampleRate, int32_t compressionLevel); bool encoder, int32_t numChannels, int32_t sampleRate, int32_t compressionLevel); status_t setupRawAudioFormat( status_t setupRawAudioFormat( OMX_U32 portIndex, int32_t sampleRate, int32_t numChannels); OMX_U32 portIndex, int32_t sampleRate, int32_t numChannels, AudioEncoding encoding = kAudioEncodingPcm16bit); status_t setPriority(int32_t priority); status_t setPriority(int32_t priority); status_t setOperatingRate(float rateFloat, bool isVideo); status_t setOperatingRate(float rateFloat, bool isVideo); Loading media/libstagefright/ACodec.cpp +156 −32 Original line number Original line Diff line number Diff line Loading @@ -52,6 +52,7 @@ #include <OMX_AsString.h> #include <OMX_AsString.h> #include "include/avc_utils.h" #include "include/avc_utils.h" #include "include/DataConverter.h" #include "omx/OMXUtils.h" #include "omx/OMXUtils.h" namespace android { namespace android { Loading Loading @@ -114,6 +115,13 @@ private: DISALLOW_EVIL_CONSTRUCTORS(MessageList); DISALLOW_EVIL_CONSTRUCTORS(MessageList); }; }; static sp<DataConverter> getCopyConverter() { static pthread_once_t once = PTHREAD_ONCE_INIT; // const-inited static sp<DataConverter> sCopyConverter; // zero-inited pthread_once(&once, [](){ sCopyConverter = new DataConverter(); }); return sCopyConverter; } struct CodecObserver : public BnOMXObserver { struct CodecObserver : public BnOMXObserver { CodecObserver() {} CodecObserver() {} Loading Loading @@ -505,6 +513,7 @@ ACodec::ACodec() mOutputMetadataType(kMetadataBufferTypeInvalid), mOutputMetadataType(kMetadataBufferTypeInvalid), mLegacyAdaptiveExperiment(false), mLegacyAdaptiveExperiment(false), mMetadataBuffersToSubmit(0), mMetadataBuffersToSubmit(0), mNumUndequeuedBuffers(0), mRepeatFrameDelayUs(-1ll), mRepeatFrameDelayUs(-1ll), mMaxPtsGapUs(-1ll), mMaxPtsGapUs(-1ll), mMaxFps(-1), mMaxFps(-1), Loading Loading @@ -781,7 +790,7 @@ status_t ACodec::allocateBuffersOnPort(OMX_U32 portIndex) { if (err == OK) { if (err == OK) { MetadataBufferType type = MetadataBufferType type = portIndex == kPortIndexOutput ? mOutputMetadataType : mInputMetadataType; portIndex == kPortIndexOutput ? mOutputMetadataType : mInputMetadataType; int32_t bufSize = def.nBufferSize; size_t bufSize = def.nBufferSize; if (type == kMetadataBufferTypeGrallocSource) { if (type == kMetadataBufferTypeGrallocSource) { bufSize = sizeof(VideoGrallocMetadata); bufSize = sizeof(VideoGrallocMetadata); } else if (type == kMetadataBufferTypeANWBuffer) { } else if (type == kMetadataBufferTypeANWBuffer) { Loading @@ -792,33 +801,47 @@ status_t ACodec::allocateBuffersOnPort(OMX_U32 portIndex) { // metadata size as we prefer to generate native source metadata, but component // metadata size as we prefer to generate native source metadata, but component // may require gralloc source. For camera source, allocate at least enough // may require gralloc source. For camera source, allocate at least enough // size for native metadata buffers. // size for native metadata buffers. int32_t allottedSize = bufSize; size_t allottedSize = bufSize; if (portIndex == kPortIndexInput && type >= kMetadataBufferTypeGrallocSource) { if (portIndex == kPortIndexInput && type >= kMetadataBufferTypeGrallocSource) { bufSize = max(sizeof(VideoGrallocMetadata), sizeof(VideoNativeMetadata)); bufSize = max(sizeof(VideoGrallocMetadata), sizeof(VideoNativeMetadata)); } else if (portIndex == kPortIndexInput && type == kMetadataBufferTypeCameraSource) { } else if (portIndex == kPortIndexInput && type == kMetadataBufferTypeCameraSource) { bufSize = max(bufSize, (int32_t)sizeof(VideoNativeMetadata)); bufSize = max(bufSize, sizeof(VideoNativeMetadata)); } size_t conversionBufferSize = 0; sp<DataConverter> converter = mConverter[portIndex]; if (converter != NULL) { // here we assume sane conversions of max 4:1, so result fits in int32 if (portIndex == kPortIndexInput) { conversionBufferSize = converter->sourceSize(bufSize); } else { conversionBufferSize = converter->targetSize(bufSize); } } } size_t alignment = MemoryDealer::getAllocationAlignment(); size_t alignment = MemoryDealer::getAllocationAlignment(); ALOGV("[%s] Allocating %u buffers of size %d/%d (from %u using %s) on %s port", ALOGV("[%s] Allocating %u buffers of size %zu/%zu (from %u using %s) on %s port", mComponentName.c_str(), mComponentName.c_str(), def.nBufferCountActual, bufSize, allottedSize, def.nBufferSize, asString(type), def.nBufferCountActual, bufSize, allottedSize, def.nBufferSize, asString(type), portIndex == kPortIndexInput ? "input" : "output"); portIndex == kPortIndexInput ? "input" : "output"); if (bufSize == 0 || bufSize > kMaxCodecBufferSize) { // verify buffer sizes to avoid overflow in align() if (bufSize == 0 || max(bufSize, conversionBufferSize) > kMaxCodecBufferSize) { ALOGE("b/22885421"); ALOGE("b/22885421"); return NO_MEMORY; return NO_MEMORY; } } // don't modify bufSize as OMX may not expect it to increase after negotiation // don't modify bufSize as OMX may not expect it to increase after negotiation size_t alignedSize = align(bufSize, alignment); size_t alignedSize = align(bufSize, alignment); if (def.nBufferCountActual > SIZE_MAX / alignedSize) { size_t alignedConvSize = align(conversionBufferSize, alignment); if (def.nBufferCountActual > SIZE_MAX / (alignedSize + alignedConvSize)) { ALOGE("b/22885421"); ALOGE("b/22885421"); return NO_MEMORY; return NO_MEMORY; } } size_t totalSize = def.nBufferCountActual * alignedSize; size_t totalSize = def.nBufferCountActual * (alignedSize + alignedConvSize); mDealer[portIndex] = new MemoryDealer(totalSize, "ACodec"); mDealer[portIndex] = new MemoryDealer(totalSize, "ACodec"); for (OMX_U32 i = 0; i < def.nBufferCountActual && err == OK; ++i) { for (OMX_U32 i = 0; i < def.nBufferCountActual && err == OK; ++i) { Loading Loading @@ -857,6 +880,7 @@ status_t ACodec::allocateBuffersOnPort(OMX_U32 portIndex) { // because Widevine source only receives these base addresses. // because Widevine source only receives these base addresses. info.mData = new ABuffer(ptr != NULL ? ptr : (void *)native_handle, bufSize); info.mData = new ABuffer(ptr != NULL ? ptr : (void *)native_handle, bufSize); info.mNativeHandle = NativeHandle::create(native_handle, true /* ownsHandle */); info.mNativeHandle = NativeHandle::create(native_handle, true /* ownsHandle */); info.mCodecData = info.mData; } else if (mQuirks & requiresAllocateBufferBit) { } else if (mQuirks & requiresAllocateBufferBit) { err = mOMX->allocateBufferWithBackup( err = mOMX->allocateBufferWithBackup( mNode, portIndex, mem, &info.mBufferID, allottedSize); mNode, portIndex, mem, &info.mBufferID, allottedSize); Loading @@ -865,11 +889,27 @@ status_t ACodec::allocateBuffersOnPort(OMX_U32 portIndex) { } } if (mem != NULL) { if (mem != NULL) { info.mData = new ABuffer(mem->pointer(), bufSize); info.mCodecData = new ABuffer(mem->pointer(), bufSize); info.mCodecRef = mem; if (type == kMetadataBufferTypeANWBuffer) { if (type == kMetadataBufferTypeANWBuffer) { ((VideoNativeMetadata *)mem->pointer())->nFenceFd = -1; ((VideoNativeMetadata *)mem->pointer())->nFenceFd = -1; } } // if we require conversion, allocate conversion buffer for client use; // otherwise, reuse codec buffer if (mConverter[portIndex] != NULL) { CHECK_GT(conversionBufferSize, (size_t)0); mem = mDealer[portIndex]->allocate(conversionBufferSize); if (mem == NULL|| mem->pointer() == NULL) { return NO_MEMORY; } info.mData = new ABuffer(mem->pointer(), conversionBufferSize); info.mMemRef = mem; info.mMemRef = mem; } else { info.mData = info.mCodecData; info.mMemRef = info.mCodecRef; } } } mBuffers[portIndex].push(info); mBuffers[portIndex].push(info); Loading Loading @@ -1062,6 +1102,7 @@ status_t ACodec::allocateOutputBuffersFromNativeWindow() { info.mIsReadFence = false; info.mIsReadFence = false; info.mRenderInfo = NULL; info.mRenderInfo = NULL; info.mData = new ABuffer(NULL /* data */, bufferSize /* capacity */); info.mData = new ABuffer(NULL /* data */, bufferSize /* capacity */); info.mCodecData = info.mData; info.mGraphicBuffer = graphicBuffer; info.mGraphicBuffer = graphicBuffer; mBuffers[kPortIndexOutput].push(info); mBuffers[kPortIndexOutput].push(info); Loading Loading @@ -1146,11 +1187,13 @@ status_t ACodec::allocateOutputMetadataBuffers() { ((VideoNativeMetadata *)mem->pointer())->nFenceFd = -1; ((VideoNativeMetadata *)mem->pointer())->nFenceFd = -1; } } info.mData = new ABuffer(mem->pointer(), mem->size()); info.mData = new ABuffer(mem->pointer(), mem->size()); info.mMemRef = mem; info.mCodecData = info.mData; info.mCodecRef = mem; // we use useBuffer for metadata regardless of quirks // we use useBuffer for metadata regardless of quirks err = mOMX->useBuffer( err = mOMX->useBuffer( mNode, kPortIndexOutput, mem, &info.mBufferID, mem->size()); mNode, kPortIndexOutput, mem, &info.mBufferID, mem->size()); info.mMemRef = mem; mBuffers[kPortIndexOutput].push(info); mBuffers[kPortIndexOutput].push(info); ALOGV("[%s] allocated meta buffer with ID %u (pointer = %p)", ALOGV("[%s] allocated meta buffer with ID %u (pointer = %p)", Loading Loading @@ -1944,6 +1987,10 @@ status_t ACodec::configureCodec( } } } } AudioEncoding pcmEncoding = kAudioEncodingPcm16bit; (void)msg->findInt32("pcm-encoding", (int32_t*)&pcmEncoding); // invalid encodings will default to PCM-16bit in setupRawAudioFormat. if (video) { if (video) { // determine need for software renderer // determine need for software renderer bool usingSwRenderer = false; bool usingSwRenderer = false; Loading Loading @@ -2148,7 +2195,7 @@ status_t ACodec::configureCodec( || !msg->findInt32("sample-rate", &sampleRate)) { || !msg->findInt32("sample-rate", &sampleRate)) { err = INVALID_OPERATION; err = INVALID_OPERATION; } else { } else { err = setupRawAudioFormat(kPortIndexInput, sampleRate, numChannels); err = setupRawAudioFormat(kPortIndexInput, sampleRate, numChannels, pcmEncoding); } } } else if (!strcasecmp(mime, MEDIA_MIMETYPE_AUDIO_AC3)) { } else if (!strcasecmp(mime, MEDIA_MIMETYPE_AUDIO_AC3)) { int32_t numChannels; int32_t numChannels; Loading Loading @@ -2222,6 +2269,25 @@ status_t ACodec::configureCodec( mOutputFormat = outputFormat; mOutputFormat = outputFormat; } } } } // create data converters if needed if (!video && err == OK) { AudioEncoding codecPcmEncoding = kAudioEncodingPcm16bit; if (encoder) { (void)mInputFormat->findInt32("pcm-encoding", (int32_t*)&codecPcmEncoding); mConverter[kPortIndexInput] = AudioConverter::Create(pcmEncoding, codecPcmEncoding); if (mConverter[kPortIndexInput] != NULL) { mInputFormat->setInt32("pcm-encoding", pcmEncoding); } } else { (void)mOutputFormat->findInt32("pcm-encoding", (int32_t*)&codecPcmEncoding); mConverter[kPortIndexOutput] = AudioConverter::Create(codecPcmEncoding, pcmEncoding); if (mConverter[kPortIndexOutput] != NULL) { mOutputFormat->setInt32("pcm-encoding", pcmEncoding); } } } return err; return err; } } Loading Loading @@ -2772,7 +2838,7 @@ status_t ACodec::setupFlacCodec( } } status_t ACodec::setupRawAudioFormat( status_t ACodec::setupRawAudioFormat( OMX_U32 portIndex, int32_t sampleRate, int32_t numChannels) { OMX_U32 portIndex, int32_t sampleRate, int32_t numChannels, AudioEncoding encoding) { OMX_PARAM_PORTDEFINITIONTYPE def; OMX_PARAM_PORTDEFINITIONTYPE def; InitOMXParams(&def); InitOMXParams(&def); def.nPortIndex = portIndex; def.nPortIndex = portIndex; Loading Loading @@ -2805,9 +2871,23 @@ status_t ACodec::setupRawAudioFormat( } } pcmParams.nChannels = numChannels; pcmParams.nChannels = numChannels; switch (encoding) { case kAudioEncodingPcm8bit: pcmParams.eNumData = OMX_NumericalDataUnsigned; pcmParams.nBitPerSample = 8; break; case kAudioEncodingPcmFloat: pcmParams.eNumData = OMX_NumericalDataFloat; pcmParams.nBitPerSample = 32; break; case kAudioEncodingPcm16bit: pcmParams.eNumData = OMX_NumericalDataSigned; pcmParams.eNumData = OMX_NumericalDataSigned; pcmParams.bInterleaved = OMX_TRUE; pcmParams.nBitPerSample = 16; pcmParams.nBitPerSample = 16; break; default: return BAD_VALUE; } pcmParams.bInterleaved = OMX_TRUE; pcmParams.nSamplingRate = sampleRate; pcmParams.nSamplingRate = sampleRate; pcmParams.ePCMMode = OMX_AUDIO_PCMModeLinear; pcmParams.ePCMMode = OMX_AUDIO_PCMModeLinear; Loading @@ -2815,9 +2895,18 @@ status_t ACodec::setupRawAudioFormat( return OMX_ErrorNone; return OMX_ErrorNone; } } return mOMX->setParameter( err = mOMX->setParameter( mNode, OMX_IndexParamAudioPcm, &pcmParams, sizeof(pcmParams)); // if we could not set up raw format to non-16-bit, try with 16-bit // NOTE: we will also verify this via readback, in case codec ignores these fields if (err != OK && encoding != kAudioEncodingPcm16bit) { pcmParams.eNumData = OMX_NumericalDataSigned; pcmParams.nBitPerSample = 16; err = mOMX->setParameter( mNode, OMX_IndexParamAudioPcm, &pcmParams, sizeof(pcmParams)); mNode, OMX_IndexParamAudioPcm, &pcmParams, sizeof(pcmParams)); } } return err; } status_t ACodec::configureTunneledVideoPlayback( status_t ACodec::configureTunneledVideoPlayback( int32_t audioHwSync, const sp<ANativeWindow> &nativeWindow) { int32_t audioHwSync, const sp<ANativeWindow> &nativeWindow) { Loading Loading @@ -4653,22 +4742,33 @@ status_t ACodec::getPortFormat(OMX_U32 portIndex, sp<AMessage> ¬ify) { if (params.nChannels <= 0 if (params.nChannels <= 0 || (params.nChannels != 1 && !params.bInterleaved) || (params.nChannels != 1 && !params.bInterleaved) || params.nBitPerSample != 16u || params.eNumData != OMX_NumericalDataSigned || params.ePCMMode != OMX_AUDIO_PCMModeLinear) { || params.ePCMMode != OMX_AUDIO_PCMModeLinear) { ALOGE("unsupported PCM port: %u channels%s, %u-bit, %s(%d), %s(%d) mode ", ALOGE("unsupported PCM port: %u channels%s, %u-bit", params.nChannels, params.nChannels, params.bInterleaved ? " interleaved" : "", params.bInterleaved ? " interleaved" : "", params.nBitPerSample, params.nBitPerSample); asString(params.eNumData), params.eNumData, asString(params.ePCMMode), params.ePCMMode); return FAILED_TRANSACTION; return FAILED_TRANSACTION; } } notify->setString("mime", MEDIA_MIMETYPE_AUDIO_RAW); notify->setString("mime", MEDIA_MIMETYPE_AUDIO_RAW); notify->setInt32("channel-count", params.nChannels); notify->setInt32("channel-count", params.nChannels); notify->setInt32("sample-rate", params.nSamplingRate); notify->setInt32("sample-rate", params.nSamplingRate); notify->setInt32("pcm-encoding", kAudioEncodingPcm16bit); AudioEncoding encoding = kAudioEncodingPcm16bit; if (params.eNumData == OMX_NumericalDataUnsigned && params.nBitPerSample == 8u) { encoding = kAudioEncodingPcm8bit; } else if (params.eNumData == OMX_NumericalDataFloat && params.nBitPerSample == 32u) { encoding = kAudioEncodingPcmFloat; } else if (params.nBitPerSample != 16u || params.eNumData != OMX_NumericalDataSigned) { ALOGE("unsupported PCM port: %s(%d), %s(%d) mode ", asString(params.eNumData), params.eNumData, asString(params.ePCMMode), params.ePCMMode); return FAILED_TRANSACTION; } notify->setInt32("pcm-encoding", encoding); if (mChannelMaskPresent) { if (mChannelMaskPresent) { notify->setInt32("channel-mask", mChannelMask); notify->setInt32("channel-mask", mChannelMask); Loading Loading @@ -4937,6 +5037,18 @@ void ACodec::onOutputFormatChanged() { return; return; } } if (!mIsVideo && !mIsEncoder) { AudioEncoding pcmEncoding = kAudioEncodingPcm16bit; (void)mConfigFormat->findInt32("pcm-encoding", (int32_t*)&pcmEncoding); AudioEncoding codecPcmEncoding = kAudioEncodingPcm16bit; (void)mOutputFormat->findInt32("pcm-encoding", (int32_t*)&pcmEncoding); mConverter[kPortIndexOutput] = AudioConverter::Create(codecPcmEncoding, pcmEncoding); if (mConverter[kPortIndexOutput] != NULL) { mOutputFormat->setInt32("pcm-encoding", pcmEncoding); } } if (mTunneled) { if (mTunneled) { sendFormatChange(); sendFormatChange(); } } Loading Loading @@ -5464,20 +5576,21 @@ void ACodec::BaseState::onInputBufferFilled(const sp<AMessage> &msg) { flags |= OMX_BUFFERFLAG_EOS; flags |= OMX_BUFFERFLAG_EOS; } } if (buffer != info->mData) { if (buffer != info->mCodecData) { ALOGV("[%s] Needs to copy input data for buffer %u. (%p != %p)", ALOGV("[%s] Needs to copy input data for buffer %u. (%p != %p)", mCodec->mComponentName.c_str(), mCodec->mComponentName.c_str(), bufferID, bufferID, buffer.get(), info->mData.get()); buffer.get(), info->mCodecData.get()); if (buffer->size() > info->mData->capacity()) { sp<DataConverter> converter = mCodec->mConverter[kPortIndexInput]; ALOGE("data size (%zu) is greated than buffer capacity (%zu)", if (converter == NULL) { buffer->size(), // this is the data received converter = getCopyConverter(); info->mData->capacity()); // this is out buffer size } mCodec->signalError(OMX_ErrorUndefined, FAILED_TRANSACTION); status_t err = converter->convert(buffer, info->mCodecData); if (err != OK) { mCodec->signalError(OMX_ErrorUndefined, err); return; return; } } memcpy(info->mData->data(), buffer->data(), buffer->size()); } } if (flags & OMX_BUFFERFLAG_CODECCONFIG) { if (flags & OMX_BUFFERFLAG_CODECCONFIG) { Loading Loading @@ -5520,7 +5633,7 @@ void ACodec::BaseState::onInputBufferFilled(const sp<AMessage> &msg) { mCodec->mNode, mCodec->mNode, bufferID, bufferID, 0, 0, buffer->size(), info->mCodecData->size(), flags, flags, timeUs, timeUs, info->mFenceFd); info->mFenceFd); Loading Loading @@ -5722,8 +5835,17 @@ bool ACodec::BaseState::onOMXFillBufferDone( info->mData->meta()->setPointer("handle", handle); info->mData->meta()->setPointer("handle", handle); info->mData->meta()->setInt32("rangeOffset", rangeOffset); info->mData->meta()->setInt32("rangeOffset", rangeOffset); info->mData->meta()->setInt32("rangeLength", rangeLength); info->mData->meta()->setInt32("rangeLength", rangeLength); } else { } else if (info->mData == info->mCodecData) { info->mData->setRange(rangeOffset, rangeLength); info->mData->setRange(rangeOffset, rangeLength); } else { info->mCodecData->setRange(rangeOffset, rangeLength); // in this case we know that mConverter is not null status_t err = mCodec->mConverter[kPortIndexOutput]->convert( info->mCodecData, info->mData); if (err != OK) { mCodec->signalError(OMX_ErrorUndefined, makeNoSideEffectStatus(err)); return true; } } } #if 0 #if 0 if (mCodec->mNativeWindow == NULL) { if (mCodec->mNativeWindow == NULL) { Loading Loading @@ -5939,6 +6061,8 @@ void ACodec::UninitializedState::stateEntered() { mCodec->mFlags = 0; mCodec->mFlags = 0; mCodec->mInputMetadataType = kMetadataBufferTypeInvalid; mCodec->mInputMetadataType = kMetadataBufferTypeInvalid; mCodec->mOutputMetadataType = kMetadataBufferTypeInvalid; mCodec->mOutputMetadataType = kMetadataBufferTypeInvalid; mCodec->mConverter[0].clear(); mCodec->mConverter[1].clear(); mCodec->mComponentName.clear(); mCodec->mComponentName.clear(); } } Loading media/libstagefright/Android.mk +1 −0 Original line number Original line Diff line number Diff line Loading @@ -14,6 +14,7 @@ LOCAL_SRC_FILES:= \ CameraSource.cpp \ CameraSource.cpp \ CameraSourceTimeLapse.cpp \ CameraSourceTimeLapse.cpp \ CodecBase.cpp \ CodecBase.cpp \ DataConverter.cpp \ DataSource.cpp \ DataSource.cpp \ DataURISource.cpp \ DataURISource.cpp \ DRMExtractor.cpp \ DRMExtractor.cpp \ Loading media/libstagefright/DataConverter.cpp 0 → 100644 +123 −0 Original line number Original line Diff line number Diff line /* * Copyright (C) 2016 The Android Open Source Project * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ //#define LOG_NDEBUG 0 #define LOG_TAG "DataConverter" #include "include/DataConverter.h" #include <audio_utils/primitives.h> #include <media/stagefright/foundation/ABuffer.h> #include <media/stagefright/foundation/ADebug.h> #include <media/stagefright/foundation/AUtils.h> namespace android { status_t DataConverter::convert(const sp<ABuffer> &source, sp<ABuffer> &target) { CHECK(source->base() != target->base()); size_t size = targetSize(source->size()); status_t err = OK; if (size > target->capacity()) { ALOGE("data size (%zu) is greater than buffer capacity (%zu)", size, // this is the data received/to be converted target->capacity()); // this is out buffer size err = FAILED_TRANSACTION; } else { err = safeConvert(source, target); } target->setRange(0, err == OK ? size : 0); return err; } status_t DataConverter::safeConvert(const sp<ABuffer> &source, sp<ABuffer> &target) { memcpy(target->base(), source->data(), source->size()); return OK; } size_t DataConverter::sourceSize(size_t targetSize) { return targetSize; } size_t DataConverter::targetSize(size_t sourceSize) { return sourceSize; } DataConverter::~DataConverter() { } size_t SampleConverterBase::sourceSize(size_t targetSize) { size_t numSamples = targetSize / mTargetSampleSize; if (numSamples > SIZE_MAX / mSourceSampleSize) { ALOGW("limiting source size due to overflow (%zu*%zu/%zu)", targetSize, mSourceSampleSize, mTargetSampleSize); return SIZE_MAX; } return numSamples * mSourceSampleSize; } size_t SampleConverterBase::targetSize(size_t sourceSize) { // we round up on conversion size_t numSamples = divUp(sourceSize, (size_t)mSourceSampleSize); if (numSamples > SIZE_MAX / mTargetSampleSize) { ALOGW("limiting target size due to overflow (%zu*%zu/%zu)", sourceSize, mTargetSampleSize, mSourceSampleSize); return SIZE_MAX; } return numSamples * mTargetSampleSize; } static size_t getAudioSampleSize(AudioEncoding e) { switch (e) { case kAudioEncodingPcm16bit: return 2; case kAudioEncodingPcm8bit: return 1; case kAudioEncodingPcmFloat: return 4; default: return 0; } } // static AudioConverter* AudioConverter::Create(AudioEncoding source, AudioEncoding target) { uint32_t sourceSampleSize = getAudioSampleSize(source); uint32_t targetSampleSize = getAudioSampleSize(target); if (sourceSampleSize && targetSampleSize && sourceSampleSize != targetSampleSize) { return new AudioConverter(source, sourceSampleSize, target, targetSampleSize); } return NULL; } status_t AudioConverter::safeConvert(const sp<ABuffer> &src, sp<ABuffer> &tgt) { if (mTo == kAudioEncodingPcm8bit && mFrom == kAudioEncodingPcm16bit) { memcpy_to_u8_from_i16((uint8_t*)tgt->base(), (const int16_t*)src->data(), src->size() / 2); } else if (mTo == kAudioEncodingPcm8bit && mFrom == kAudioEncodingPcmFloat) { memcpy_to_u8_from_float((uint8_t*)tgt->base(), (const float*)src->data(), src->size() / 4); } else if (mTo == kAudioEncodingPcm16bit && mFrom == kAudioEncodingPcm8bit) { memcpy_to_i16_from_u8((int16_t*)tgt->base(), (const uint8_t*)src->data(), src->size()); } else if (mTo == kAudioEncodingPcm16bit && mFrom == kAudioEncodingPcmFloat) { memcpy_to_i16_from_float((int16_t*)tgt->base(), (const float*)src->data(), src->size() / 4); } else if (mTo == kAudioEncodingPcmFloat && mFrom == kAudioEncodingPcm8bit) { memcpy_to_float_from_u8((float*)tgt->base(), (const uint8_t*)src->data(), src->size()); } else if (mTo == kAudioEncodingPcmFloat && mFrom == kAudioEncodingPcm16bit) { memcpy_to_float_from_i16((float*)tgt->base(), (const int16_t*)src->data(), src->size() / 2); } else { return INVALID_OPERATION; } return OK; } } // namespace android media/libstagefright/codecs/raw/SoftRaw.cpp +7 −3 Original line number Original line Diff line number Diff line Loading @@ -42,7 +42,9 @@ SoftRaw::SoftRaw( : SimpleSoftOMXComponent(name, callbacks, appData, component), : SimpleSoftOMXComponent(name, callbacks, appData, component), mSignalledError(false), mSignalledError(false), mChannelCount(2), mChannelCount(2), mSampleRate(44100) { mSampleRate(44100), mNumericalData(OMX_NumericalDataSigned), mBitsPerSample(16) { initPorts(); initPorts(); CHECK_EQ(initDecoder(), (status_t)OK); CHECK_EQ(initDecoder(), (status_t)OK); } } Loading Loading @@ -111,10 +113,10 @@ OMX_ERRORTYPE SoftRaw::internalGetParameter( return OMX_ErrorUndefined; return OMX_ErrorUndefined; } } pcmParams->eNumData = OMX_NumericalDataSigned; pcmParams->eNumData = (OMX_NUMERICALDATATYPE)mNumericalData; pcmParams->eEndian = OMX_EndianBig; pcmParams->eEndian = OMX_EndianBig; pcmParams->bInterleaved = OMX_TRUE; pcmParams->bInterleaved = OMX_TRUE; pcmParams->nBitPerSample = 16; pcmParams->nBitPerSample = mBitsPerSample; pcmParams->ePCMMode = OMX_AUDIO_PCMModeLinear; pcmParams->ePCMMode = OMX_AUDIO_PCMModeLinear; pcmParams->eChannelMapping[0] = OMX_AUDIO_ChannelLF; pcmParams->eChannelMapping[0] = OMX_AUDIO_ChannelLF; pcmParams->eChannelMapping[1] = OMX_AUDIO_ChannelRF; pcmParams->eChannelMapping[1] = OMX_AUDIO_ChannelRF; Loading Loading @@ -166,6 +168,8 @@ OMX_ERRORTYPE SoftRaw::internalSetParameter( mChannelCount = pcmParams->nChannels; mChannelCount = pcmParams->nChannels; mSampleRate = pcmParams->nSamplingRate; mSampleRate = pcmParams->nSamplingRate; mNumericalData = pcmParams->eNumData; mBitsPerSample = pcmParams->nBitPerSample; return OMX_ErrorNone; return OMX_ErrorNone; } } Loading Loading
include/media/stagefright/ACodec.h +10 −3 Original line number Original line Diff line number Diff line Loading @@ -25,6 +25,7 @@ #include <media/stagefright/foundation/AHierarchicalStateMachine.h> #include <media/stagefright/foundation/AHierarchicalStateMachine.h> #include <media/stagefright/CodecBase.h> #include <media/stagefright/CodecBase.h> #include <media/stagefright/FrameRenderTracker.h> #include <media/stagefright/FrameRenderTracker.h> #include <media/stagefright/MediaDefs.h> #include <media/stagefright/SkipCutBuffer.h> #include <media/stagefright/SkipCutBuffer.h> #include <utils/NativeHandle.h> #include <utils/NativeHandle.h> #include <OMX_Audio.h> #include <OMX_Audio.h> Loading @@ -36,6 +37,7 @@ namespace android { struct ABuffer; struct ABuffer; struct MemoryDealer; struct MemoryDealer; struct DescribeColorFormat2Params; struct DescribeColorFormat2Params; struct DataConverter; struct ACodec : public AHierarchicalStateMachine, public CodecBase { struct ACodec : public AHierarchicalStateMachine, public CodecBase { ACodec(); ACodec(); Loading Loading @@ -188,8 +190,11 @@ private: Status mStatus; Status mStatus; unsigned mDequeuedAt; unsigned mDequeuedAt; sp<ABuffer> mData; sp<ABuffer> mData; // the client's buffer; if not using data conversion, this is the sp<RefBase> mMemRef; // codec buffer; otherwise, it is allocated separately sp<RefBase> mMemRef; // and a reference to the IMemory, so it does not go away sp<ABuffer> mCodecData; // the codec's buffer sp<RefBase> mCodecRef; // and a reference to the IMemory sp<GraphicBuffer> mGraphicBuffer; sp<GraphicBuffer> mGraphicBuffer; sp<NativeHandle> mNativeHandle; sp<NativeHandle> mNativeHandle; int mFenceFd; int mFenceFd; Loading Loading @@ -280,6 +285,7 @@ private: bool mLegacyAdaptiveExperiment; bool mLegacyAdaptiveExperiment; int32_t mMetadataBuffersToSubmit; int32_t mMetadataBuffersToSubmit; size_t mNumUndequeuedBuffers; size_t mNumUndequeuedBuffers; sp<DataConverter> mConverter[2]; int64_t mRepeatFrameDelayUs; int64_t mRepeatFrameDelayUs; int64_t mMaxPtsGapUs; int64_t mMaxPtsGapUs; Loading Loading @@ -441,7 +447,8 @@ private: bool encoder, int32_t numChannels, int32_t sampleRate, int32_t compressionLevel); bool encoder, int32_t numChannels, int32_t sampleRate, int32_t compressionLevel); status_t setupRawAudioFormat( status_t setupRawAudioFormat( OMX_U32 portIndex, int32_t sampleRate, int32_t numChannels); OMX_U32 portIndex, int32_t sampleRate, int32_t numChannels, AudioEncoding encoding = kAudioEncodingPcm16bit); status_t setPriority(int32_t priority); status_t setPriority(int32_t priority); status_t setOperatingRate(float rateFloat, bool isVideo); status_t setOperatingRate(float rateFloat, bool isVideo); Loading
media/libstagefright/ACodec.cpp +156 −32 Original line number Original line Diff line number Diff line Loading @@ -52,6 +52,7 @@ #include <OMX_AsString.h> #include <OMX_AsString.h> #include "include/avc_utils.h" #include "include/avc_utils.h" #include "include/DataConverter.h" #include "omx/OMXUtils.h" #include "omx/OMXUtils.h" namespace android { namespace android { Loading Loading @@ -114,6 +115,13 @@ private: DISALLOW_EVIL_CONSTRUCTORS(MessageList); DISALLOW_EVIL_CONSTRUCTORS(MessageList); }; }; static sp<DataConverter> getCopyConverter() { static pthread_once_t once = PTHREAD_ONCE_INIT; // const-inited static sp<DataConverter> sCopyConverter; // zero-inited pthread_once(&once, [](){ sCopyConverter = new DataConverter(); }); return sCopyConverter; } struct CodecObserver : public BnOMXObserver { struct CodecObserver : public BnOMXObserver { CodecObserver() {} CodecObserver() {} Loading Loading @@ -505,6 +513,7 @@ ACodec::ACodec() mOutputMetadataType(kMetadataBufferTypeInvalid), mOutputMetadataType(kMetadataBufferTypeInvalid), mLegacyAdaptiveExperiment(false), mLegacyAdaptiveExperiment(false), mMetadataBuffersToSubmit(0), mMetadataBuffersToSubmit(0), mNumUndequeuedBuffers(0), mRepeatFrameDelayUs(-1ll), mRepeatFrameDelayUs(-1ll), mMaxPtsGapUs(-1ll), mMaxPtsGapUs(-1ll), mMaxFps(-1), mMaxFps(-1), Loading Loading @@ -781,7 +790,7 @@ status_t ACodec::allocateBuffersOnPort(OMX_U32 portIndex) { if (err == OK) { if (err == OK) { MetadataBufferType type = MetadataBufferType type = portIndex == kPortIndexOutput ? mOutputMetadataType : mInputMetadataType; portIndex == kPortIndexOutput ? mOutputMetadataType : mInputMetadataType; int32_t bufSize = def.nBufferSize; size_t bufSize = def.nBufferSize; if (type == kMetadataBufferTypeGrallocSource) { if (type == kMetadataBufferTypeGrallocSource) { bufSize = sizeof(VideoGrallocMetadata); bufSize = sizeof(VideoGrallocMetadata); } else if (type == kMetadataBufferTypeANWBuffer) { } else if (type == kMetadataBufferTypeANWBuffer) { Loading @@ -792,33 +801,47 @@ status_t ACodec::allocateBuffersOnPort(OMX_U32 portIndex) { // metadata size as we prefer to generate native source metadata, but component // metadata size as we prefer to generate native source metadata, but component // may require gralloc source. For camera source, allocate at least enough // may require gralloc source. For camera source, allocate at least enough // size for native metadata buffers. // size for native metadata buffers. int32_t allottedSize = bufSize; size_t allottedSize = bufSize; if (portIndex == kPortIndexInput && type >= kMetadataBufferTypeGrallocSource) { if (portIndex == kPortIndexInput && type >= kMetadataBufferTypeGrallocSource) { bufSize = max(sizeof(VideoGrallocMetadata), sizeof(VideoNativeMetadata)); bufSize = max(sizeof(VideoGrallocMetadata), sizeof(VideoNativeMetadata)); } else if (portIndex == kPortIndexInput && type == kMetadataBufferTypeCameraSource) { } else if (portIndex == kPortIndexInput && type == kMetadataBufferTypeCameraSource) { bufSize = max(bufSize, (int32_t)sizeof(VideoNativeMetadata)); bufSize = max(bufSize, sizeof(VideoNativeMetadata)); } size_t conversionBufferSize = 0; sp<DataConverter> converter = mConverter[portIndex]; if (converter != NULL) { // here we assume sane conversions of max 4:1, so result fits in int32 if (portIndex == kPortIndexInput) { conversionBufferSize = converter->sourceSize(bufSize); } else { conversionBufferSize = converter->targetSize(bufSize); } } } size_t alignment = MemoryDealer::getAllocationAlignment(); size_t alignment = MemoryDealer::getAllocationAlignment(); ALOGV("[%s] Allocating %u buffers of size %d/%d (from %u using %s) on %s port", ALOGV("[%s] Allocating %u buffers of size %zu/%zu (from %u using %s) on %s port", mComponentName.c_str(), mComponentName.c_str(), def.nBufferCountActual, bufSize, allottedSize, def.nBufferSize, asString(type), def.nBufferCountActual, bufSize, allottedSize, def.nBufferSize, asString(type), portIndex == kPortIndexInput ? "input" : "output"); portIndex == kPortIndexInput ? "input" : "output"); if (bufSize == 0 || bufSize > kMaxCodecBufferSize) { // verify buffer sizes to avoid overflow in align() if (bufSize == 0 || max(bufSize, conversionBufferSize) > kMaxCodecBufferSize) { ALOGE("b/22885421"); ALOGE("b/22885421"); return NO_MEMORY; return NO_MEMORY; } } // don't modify bufSize as OMX may not expect it to increase after negotiation // don't modify bufSize as OMX may not expect it to increase after negotiation size_t alignedSize = align(bufSize, alignment); size_t alignedSize = align(bufSize, alignment); if (def.nBufferCountActual > SIZE_MAX / alignedSize) { size_t alignedConvSize = align(conversionBufferSize, alignment); if (def.nBufferCountActual > SIZE_MAX / (alignedSize + alignedConvSize)) { ALOGE("b/22885421"); ALOGE("b/22885421"); return NO_MEMORY; return NO_MEMORY; } } size_t totalSize = def.nBufferCountActual * alignedSize; size_t totalSize = def.nBufferCountActual * (alignedSize + alignedConvSize); mDealer[portIndex] = new MemoryDealer(totalSize, "ACodec"); mDealer[portIndex] = new MemoryDealer(totalSize, "ACodec"); for (OMX_U32 i = 0; i < def.nBufferCountActual && err == OK; ++i) { for (OMX_U32 i = 0; i < def.nBufferCountActual && err == OK; ++i) { Loading Loading @@ -857,6 +880,7 @@ status_t ACodec::allocateBuffersOnPort(OMX_U32 portIndex) { // because Widevine source only receives these base addresses. // because Widevine source only receives these base addresses. info.mData = new ABuffer(ptr != NULL ? ptr : (void *)native_handle, bufSize); info.mData = new ABuffer(ptr != NULL ? ptr : (void *)native_handle, bufSize); info.mNativeHandle = NativeHandle::create(native_handle, true /* ownsHandle */); info.mNativeHandle = NativeHandle::create(native_handle, true /* ownsHandle */); info.mCodecData = info.mData; } else if (mQuirks & requiresAllocateBufferBit) { } else if (mQuirks & requiresAllocateBufferBit) { err = mOMX->allocateBufferWithBackup( err = mOMX->allocateBufferWithBackup( mNode, portIndex, mem, &info.mBufferID, allottedSize); mNode, portIndex, mem, &info.mBufferID, allottedSize); Loading @@ -865,11 +889,27 @@ status_t ACodec::allocateBuffersOnPort(OMX_U32 portIndex) { } } if (mem != NULL) { if (mem != NULL) { info.mData = new ABuffer(mem->pointer(), bufSize); info.mCodecData = new ABuffer(mem->pointer(), bufSize); info.mCodecRef = mem; if (type == kMetadataBufferTypeANWBuffer) { if (type == kMetadataBufferTypeANWBuffer) { ((VideoNativeMetadata *)mem->pointer())->nFenceFd = -1; ((VideoNativeMetadata *)mem->pointer())->nFenceFd = -1; } } // if we require conversion, allocate conversion buffer for client use; // otherwise, reuse codec buffer if (mConverter[portIndex] != NULL) { CHECK_GT(conversionBufferSize, (size_t)0); mem = mDealer[portIndex]->allocate(conversionBufferSize); if (mem == NULL|| mem->pointer() == NULL) { return NO_MEMORY; } info.mData = new ABuffer(mem->pointer(), conversionBufferSize); info.mMemRef = mem; info.mMemRef = mem; } else { info.mData = info.mCodecData; info.mMemRef = info.mCodecRef; } } } mBuffers[portIndex].push(info); mBuffers[portIndex].push(info); Loading Loading @@ -1062,6 +1102,7 @@ status_t ACodec::allocateOutputBuffersFromNativeWindow() { info.mIsReadFence = false; info.mIsReadFence = false; info.mRenderInfo = NULL; info.mRenderInfo = NULL; info.mData = new ABuffer(NULL /* data */, bufferSize /* capacity */); info.mData = new ABuffer(NULL /* data */, bufferSize /* capacity */); info.mCodecData = info.mData; info.mGraphicBuffer = graphicBuffer; info.mGraphicBuffer = graphicBuffer; mBuffers[kPortIndexOutput].push(info); mBuffers[kPortIndexOutput].push(info); Loading Loading @@ -1146,11 +1187,13 @@ status_t ACodec::allocateOutputMetadataBuffers() { ((VideoNativeMetadata *)mem->pointer())->nFenceFd = -1; ((VideoNativeMetadata *)mem->pointer())->nFenceFd = -1; } } info.mData = new ABuffer(mem->pointer(), mem->size()); info.mData = new ABuffer(mem->pointer(), mem->size()); info.mMemRef = mem; info.mCodecData = info.mData; info.mCodecRef = mem; // we use useBuffer for metadata regardless of quirks // we use useBuffer for metadata regardless of quirks err = mOMX->useBuffer( err = mOMX->useBuffer( mNode, kPortIndexOutput, mem, &info.mBufferID, mem->size()); mNode, kPortIndexOutput, mem, &info.mBufferID, mem->size()); info.mMemRef = mem; mBuffers[kPortIndexOutput].push(info); mBuffers[kPortIndexOutput].push(info); ALOGV("[%s] allocated meta buffer with ID %u (pointer = %p)", ALOGV("[%s] allocated meta buffer with ID %u (pointer = %p)", Loading Loading @@ -1944,6 +1987,10 @@ status_t ACodec::configureCodec( } } } } AudioEncoding pcmEncoding = kAudioEncodingPcm16bit; (void)msg->findInt32("pcm-encoding", (int32_t*)&pcmEncoding); // invalid encodings will default to PCM-16bit in setupRawAudioFormat. if (video) { if (video) { // determine need for software renderer // determine need for software renderer bool usingSwRenderer = false; bool usingSwRenderer = false; Loading Loading @@ -2148,7 +2195,7 @@ status_t ACodec::configureCodec( || !msg->findInt32("sample-rate", &sampleRate)) { || !msg->findInt32("sample-rate", &sampleRate)) { err = INVALID_OPERATION; err = INVALID_OPERATION; } else { } else { err = setupRawAudioFormat(kPortIndexInput, sampleRate, numChannels); err = setupRawAudioFormat(kPortIndexInput, sampleRate, numChannels, pcmEncoding); } } } else if (!strcasecmp(mime, MEDIA_MIMETYPE_AUDIO_AC3)) { } else if (!strcasecmp(mime, MEDIA_MIMETYPE_AUDIO_AC3)) { int32_t numChannels; int32_t numChannels; Loading Loading @@ -2222,6 +2269,25 @@ status_t ACodec::configureCodec( mOutputFormat = outputFormat; mOutputFormat = outputFormat; } } } } // create data converters if needed if (!video && err == OK) { AudioEncoding codecPcmEncoding = kAudioEncodingPcm16bit; if (encoder) { (void)mInputFormat->findInt32("pcm-encoding", (int32_t*)&codecPcmEncoding); mConverter[kPortIndexInput] = AudioConverter::Create(pcmEncoding, codecPcmEncoding); if (mConverter[kPortIndexInput] != NULL) { mInputFormat->setInt32("pcm-encoding", pcmEncoding); } } else { (void)mOutputFormat->findInt32("pcm-encoding", (int32_t*)&codecPcmEncoding); mConverter[kPortIndexOutput] = AudioConverter::Create(codecPcmEncoding, pcmEncoding); if (mConverter[kPortIndexOutput] != NULL) { mOutputFormat->setInt32("pcm-encoding", pcmEncoding); } } } return err; return err; } } Loading Loading @@ -2772,7 +2838,7 @@ status_t ACodec::setupFlacCodec( } } status_t ACodec::setupRawAudioFormat( status_t ACodec::setupRawAudioFormat( OMX_U32 portIndex, int32_t sampleRate, int32_t numChannels) { OMX_U32 portIndex, int32_t sampleRate, int32_t numChannels, AudioEncoding encoding) { OMX_PARAM_PORTDEFINITIONTYPE def; OMX_PARAM_PORTDEFINITIONTYPE def; InitOMXParams(&def); InitOMXParams(&def); def.nPortIndex = portIndex; def.nPortIndex = portIndex; Loading Loading @@ -2805,9 +2871,23 @@ status_t ACodec::setupRawAudioFormat( } } pcmParams.nChannels = numChannels; pcmParams.nChannels = numChannels; switch (encoding) { case kAudioEncodingPcm8bit: pcmParams.eNumData = OMX_NumericalDataUnsigned; pcmParams.nBitPerSample = 8; break; case kAudioEncodingPcmFloat: pcmParams.eNumData = OMX_NumericalDataFloat; pcmParams.nBitPerSample = 32; break; case kAudioEncodingPcm16bit: pcmParams.eNumData = OMX_NumericalDataSigned; pcmParams.eNumData = OMX_NumericalDataSigned; pcmParams.bInterleaved = OMX_TRUE; pcmParams.nBitPerSample = 16; pcmParams.nBitPerSample = 16; break; default: return BAD_VALUE; } pcmParams.bInterleaved = OMX_TRUE; pcmParams.nSamplingRate = sampleRate; pcmParams.nSamplingRate = sampleRate; pcmParams.ePCMMode = OMX_AUDIO_PCMModeLinear; pcmParams.ePCMMode = OMX_AUDIO_PCMModeLinear; Loading @@ -2815,9 +2895,18 @@ status_t ACodec::setupRawAudioFormat( return OMX_ErrorNone; return OMX_ErrorNone; } } return mOMX->setParameter( err = mOMX->setParameter( mNode, OMX_IndexParamAudioPcm, &pcmParams, sizeof(pcmParams)); // if we could not set up raw format to non-16-bit, try with 16-bit // NOTE: we will also verify this via readback, in case codec ignores these fields if (err != OK && encoding != kAudioEncodingPcm16bit) { pcmParams.eNumData = OMX_NumericalDataSigned; pcmParams.nBitPerSample = 16; err = mOMX->setParameter( mNode, OMX_IndexParamAudioPcm, &pcmParams, sizeof(pcmParams)); mNode, OMX_IndexParamAudioPcm, &pcmParams, sizeof(pcmParams)); } } return err; } status_t ACodec::configureTunneledVideoPlayback( status_t ACodec::configureTunneledVideoPlayback( int32_t audioHwSync, const sp<ANativeWindow> &nativeWindow) { int32_t audioHwSync, const sp<ANativeWindow> &nativeWindow) { Loading Loading @@ -4653,22 +4742,33 @@ status_t ACodec::getPortFormat(OMX_U32 portIndex, sp<AMessage> ¬ify) { if (params.nChannels <= 0 if (params.nChannels <= 0 || (params.nChannels != 1 && !params.bInterleaved) || (params.nChannels != 1 && !params.bInterleaved) || params.nBitPerSample != 16u || params.eNumData != OMX_NumericalDataSigned || params.ePCMMode != OMX_AUDIO_PCMModeLinear) { || params.ePCMMode != OMX_AUDIO_PCMModeLinear) { ALOGE("unsupported PCM port: %u channels%s, %u-bit, %s(%d), %s(%d) mode ", ALOGE("unsupported PCM port: %u channels%s, %u-bit", params.nChannels, params.nChannels, params.bInterleaved ? " interleaved" : "", params.bInterleaved ? " interleaved" : "", params.nBitPerSample, params.nBitPerSample); asString(params.eNumData), params.eNumData, asString(params.ePCMMode), params.ePCMMode); return FAILED_TRANSACTION; return FAILED_TRANSACTION; } } notify->setString("mime", MEDIA_MIMETYPE_AUDIO_RAW); notify->setString("mime", MEDIA_MIMETYPE_AUDIO_RAW); notify->setInt32("channel-count", params.nChannels); notify->setInt32("channel-count", params.nChannels); notify->setInt32("sample-rate", params.nSamplingRate); notify->setInt32("sample-rate", params.nSamplingRate); notify->setInt32("pcm-encoding", kAudioEncodingPcm16bit); AudioEncoding encoding = kAudioEncodingPcm16bit; if (params.eNumData == OMX_NumericalDataUnsigned && params.nBitPerSample == 8u) { encoding = kAudioEncodingPcm8bit; } else if (params.eNumData == OMX_NumericalDataFloat && params.nBitPerSample == 32u) { encoding = kAudioEncodingPcmFloat; } else if (params.nBitPerSample != 16u || params.eNumData != OMX_NumericalDataSigned) { ALOGE("unsupported PCM port: %s(%d), %s(%d) mode ", asString(params.eNumData), params.eNumData, asString(params.ePCMMode), params.ePCMMode); return FAILED_TRANSACTION; } notify->setInt32("pcm-encoding", encoding); if (mChannelMaskPresent) { if (mChannelMaskPresent) { notify->setInt32("channel-mask", mChannelMask); notify->setInt32("channel-mask", mChannelMask); Loading Loading @@ -4937,6 +5037,18 @@ void ACodec::onOutputFormatChanged() { return; return; } } if (!mIsVideo && !mIsEncoder) { AudioEncoding pcmEncoding = kAudioEncodingPcm16bit; (void)mConfigFormat->findInt32("pcm-encoding", (int32_t*)&pcmEncoding); AudioEncoding codecPcmEncoding = kAudioEncodingPcm16bit; (void)mOutputFormat->findInt32("pcm-encoding", (int32_t*)&pcmEncoding); mConverter[kPortIndexOutput] = AudioConverter::Create(codecPcmEncoding, pcmEncoding); if (mConverter[kPortIndexOutput] != NULL) { mOutputFormat->setInt32("pcm-encoding", pcmEncoding); } } if (mTunneled) { if (mTunneled) { sendFormatChange(); sendFormatChange(); } } Loading Loading @@ -5464,20 +5576,21 @@ void ACodec::BaseState::onInputBufferFilled(const sp<AMessage> &msg) { flags |= OMX_BUFFERFLAG_EOS; flags |= OMX_BUFFERFLAG_EOS; } } if (buffer != info->mData) { if (buffer != info->mCodecData) { ALOGV("[%s] Needs to copy input data for buffer %u. (%p != %p)", ALOGV("[%s] Needs to copy input data for buffer %u. (%p != %p)", mCodec->mComponentName.c_str(), mCodec->mComponentName.c_str(), bufferID, bufferID, buffer.get(), info->mData.get()); buffer.get(), info->mCodecData.get()); if (buffer->size() > info->mData->capacity()) { sp<DataConverter> converter = mCodec->mConverter[kPortIndexInput]; ALOGE("data size (%zu) is greated than buffer capacity (%zu)", if (converter == NULL) { buffer->size(), // this is the data received converter = getCopyConverter(); info->mData->capacity()); // this is out buffer size } mCodec->signalError(OMX_ErrorUndefined, FAILED_TRANSACTION); status_t err = converter->convert(buffer, info->mCodecData); if (err != OK) { mCodec->signalError(OMX_ErrorUndefined, err); return; return; } } memcpy(info->mData->data(), buffer->data(), buffer->size()); } } if (flags & OMX_BUFFERFLAG_CODECCONFIG) { if (flags & OMX_BUFFERFLAG_CODECCONFIG) { Loading Loading @@ -5520,7 +5633,7 @@ void ACodec::BaseState::onInputBufferFilled(const sp<AMessage> &msg) { mCodec->mNode, mCodec->mNode, bufferID, bufferID, 0, 0, buffer->size(), info->mCodecData->size(), flags, flags, timeUs, timeUs, info->mFenceFd); info->mFenceFd); Loading Loading @@ -5722,8 +5835,17 @@ bool ACodec::BaseState::onOMXFillBufferDone( info->mData->meta()->setPointer("handle", handle); info->mData->meta()->setPointer("handle", handle); info->mData->meta()->setInt32("rangeOffset", rangeOffset); info->mData->meta()->setInt32("rangeOffset", rangeOffset); info->mData->meta()->setInt32("rangeLength", rangeLength); info->mData->meta()->setInt32("rangeLength", rangeLength); } else { } else if (info->mData == info->mCodecData) { info->mData->setRange(rangeOffset, rangeLength); info->mData->setRange(rangeOffset, rangeLength); } else { info->mCodecData->setRange(rangeOffset, rangeLength); // in this case we know that mConverter is not null status_t err = mCodec->mConverter[kPortIndexOutput]->convert( info->mCodecData, info->mData); if (err != OK) { mCodec->signalError(OMX_ErrorUndefined, makeNoSideEffectStatus(err)); return true; } } } #if 0 #if 0 if (mCodec->mNativeWindow == NULL) { if (mCodec->mNativeWindow == NULL) { Loading Loading @@ -5939,6 +6061,8 @@ void ACodec::UninitializedState::stateEntered() { mCodec->mFlags = 0; mCodec->mFlags = 0; mCodec->mInputMetadataType = kMetadataBufferTypeInvalid; mCodec->mInputMetadataType = kMetadataBufferTypeInvalid; mCodec->mOutputMetadataType = kMetadataBufferTypeInvalid; mCodec->mOutputMetadataType = kMetadataBufferTypeInvalid; mCodec->mConverter[0].clear(); mCodec->mConverter[1].clear(); mCodec->mComponentName.clear(); mCodec->mComponentName.clear(); } } Loading
media/libstagefright/Android.mk +1 −0 Original line number Original line Diff line number Diff line Loading @@ -14,6 +14,7 @@ LOCAL_SRC_FILES:= \ CameraSource.cpp \ CameraSource.cpp \ CameraSourceTimeLapse.cpp \ CameraSourceTimeLapse.cpp \ CodecBase.cpp \ CodecBase.cpp \ DataConverter.cpp \ DataSource.cpp \ DataSource.cpp \ DataURISource.cpp \ DataURISource.cpp \ DRMExtractor.cpp \ DRMExtractor.cpp \ Loading
media/libstagefright/DataConverter.cpp 0 → 100644 +123 −0 Original line number Original line Diff line number Diff line /* * Copyright (C) 2016 The Android Open Source Project * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ //#define LOG_NDEBUG 0 #define LOG_TAG "DataConverter" #include "include/DataConverter.h" #include <audio_utils/primitives.h> #include <media/stagefright/foundation/ABuffer.h> #include <media/stagefright/foundation/ADebug.h> #include <media/stagefright/foundation/AUtils.h> namespace android { status_t DataConverter::convert(const sp<ABuffer> &source, sp<ABuffer> &target) { CHECK(source->base() != target->base()); size_t size = targetSize(source->size()); status_t err = OK; if (size > target->capacity()) { ALOGE("data size (%zu) is greater than buffer capacity (%zu)", size, // this is the data received/to be converted target->capacity()); // this is out buffer size err = FAILED_TRANSACTION; } else { err = safeConvert(source, target); } target->setRange(0, err == OK ? size : 0); return err; } status_t DataConverter::safeConvert(const sp<ABuffer> &source, sp<ABuffer> &target) { memcpy(target->base(), source->data(), source->size()); return OK; } size_t DataConverter::sourceSize(size_t targetSize) { return targetSize; } size_t DataConverter::targetSize(size_t sourceSize) { return sourceSize; } DataConverter::~DataConverter() { } size_t SampleConverterBase::sourceSize(size_t targetSize) { size_t numSamples = targetSize / mTargetSampleSize; if (numSamples > SIZE_MAX / mSourceSampleSize) { ALOGW("limiting source size due to overflow (%zu*%zu/%zu)", targetSize, mSourceSampleSize, mTargetSampleSize); return SIZE_MAX; } return numSamples * mSourceSampleSize; } size_t SampleConverterBase::targetSize(size_t sourceSize) { // we round up on conversion size_t numSamples = divUp(sourceSize, (size_t)mSourceSampleSize); if (numSamples > SIZE_MAX / mTargetSampleSize) { ALOGW("limiting target size due to overflow (%zu*%zu/%zu)", sourceSize, mTargetSampleSize, mSourceSampleSize); return SIZE_MAX; } return numSamples * mTargetSampleSize; } static size_t getAudioSampleSize(AudioEncoding e) { switch (e) { case kAudioEncodingPcm16bit: return 2; case kAudioEncodingPcm8bit: return 1; case kAudioEncodingPcmFloat: return 4; default: return 0; } } // static AudioConverter* AudioConverter::Create(AudioEncoding source, AudioEncoding target) { uint32_t sourceSampleSize = getAudioSampleSize(source); uint32_t targetSampleSize = getAudioSampleSize(target); if (sourceSampleSize && targetSampleSize && sourceSampleSize != targetSampleSize) { return new AudioConverter(source, sourceSampleSize, target, targetSampleSize); } return NULL; } status_t AudioConverter::safeConvert(const sp<ABuffer> &src, sp<ABuffer> &tgt) { if (mTo == kAudioEncodingPcm8bit && mFrom == kAudioEncodingPcm16bit) { memcpy_to_u8_from_i16((uint8_t*)tgt->base(), (const int16_t*)src->data(), src->size() / 2); } else if (mTo == kAudioEncodingPcm8bit && mFrom == kAudioEncodingPcmFloat) { memcpy_to_u8_from_float((uint8_t*)tgt->base(), (const float*)src->data(), src->size() / 4); } else if (mTo == kAudioEncodingPcm16bit && mFrom == kAudioEncodingPcm8bit) { memcpy_to_i16_from_u8((int16_t*)tgt->base(), (const uint8_t*)src->data(), src->size()); } else if (mTo == kAudioEncodingPcm16bit && mFrom == kAudioEncodingPcmFloat) { memcpy_to_i16_from_float((int16_t*)tgt->base(), (const float*)src->data(), src->size() / 4); } else if (mTo == kAudioEncodingPcmFloat && mFrom == kAudioEncodingPcm8bit) { memcpy_to_float_from_u8((float*)tgt->base(), (const uint8_t*)src->data(), src->size()); } else if (mTo == kAudioEncodingPcmFloat && mFrom == kAudioEncodingPcm16bit) { memcpy_to_float_from_i16((float*)tgt->base(), (const int16_t*)src->data(), src->size() / 2); } else { return INVALID_OPERATION; } return OK; } } // namespace android
media/libstagefright/codecs/raw/SoftRaw.cpp +7 −3 Original line number Original line Diff line number Diff line Loading @@ -42,7 +42,9 @@ SoftRaw::SoftRaw( : SimpleSoftOMXComponent(name, callbacks, appData, component), : SimpleSoftOMXComponent(name, callbacks, appData, component), mSignalledError(false), mSignalledError(false), mChannelCount(2), mChannelCount(2), mSampleRate(44100) { mSampleRate(44100), mNumericalData(OMX_NumericalDataSigned), mBitsPerSample(16) { initPorts(); initPorts(); CHECK_EQ(initDecoder(), (status_t)OK); CHECK_EQ(initDecoder(), (status_t)OK); } } Loading Loading @@ -111,10 +113,10 @@ OMX_ERRORTYPE SoftRaw::internalGetParameter( return OMX_ErrorUndefined; return OMX_ErrorUndefined; } } pcmParams->eNumData = OMX_NumericalDataSigned; pcmParams->eNumData = (OMX_NUMERICALDATATYPE)mNumericalData; pcmParams->eEndian = OMX_EndianBig; pcmParams->eEndian = OMX_EndianBig; pcmParams->bInterleaved = OMX_TRUE; pcmParams->bInterleaved = OMX_TRUE; pcmParams->nBitPerSample = 16; pcmParams->nBitPerSample = mBitsPerSample; pcmParams->ePCMMode = OMX_AUDIO_PCMModeLinear; pcmParams->ePCMMode = OMX_AUDIO_PCMModeLinear; pcmParams->eChannelMapping[0] = OMX_AUDIO_ChannelLF; pcmParams->eChannelMapping[0] = OMX_AUDIO_ChannelLF; pcmParams->eChannelMapping[1] = OMX_AUDIO_ChannelRF; pcmParams->eChannelMapping[1] = OMX_AUDIO_ChannelRF; Loading Loading @@ -166,6 +168,8 @@ OMX_ERRORTYPE SoftRaw::internalSetParameter( mChannelCount = pcmParams->nChannels; mChannelCount = pcmParams->nChannels; mSampleRate = pcmParams->nSamplingRate; mSampleRate = pcmParams->nSamplingRate; mNumericalData = pcmParams->eNumData; mBitsPerSample = pcmParams->nBitPerSample; return OMX_ErrorNone; return OMX_ErrorNone; } } Loading