diff --git a/media/codec2/Android.bp b/media/codec2/Android.bp new file mode 100644 index 0000000000000000000000000000000000000000..9addc79cd489335b06e81081932f143ac0730e7a --- /dev/null +++ b/media/codec2/Android.bp @@ -0,0 +1,42 @@ +cc_library_shared { + name: "libstagefright_codec2", + vendor_available: true, + + srcs: ["C2.cpp"], + + cflags: [ + "-Wall", + "-Werror", + ], + + include_dirs: [ + "frameworks/native/include/media/hardware", + ], + + export_include_dirs: [ + "include", + ], + + header_libs: [ + "libhardware_headers", + "libutils_headers", + ], + + export_header_lib_headers: [ + "libhardware_headers", + "libutils_headers", + ], + + sanitize: { + misc_undefined: [ + "unsigned-integer-overflow", + "signed-integer-overflow", + ], + cfi: false, // true, + diag: { + cfi: false, // true, + }, + }, + + ldflags: ["-Wl,-Bsymbolic"], +} diff --git a/media/codec2/Android.mk b/media/codec2/Android.mk new file mode 100644 index 0000000000000000000000000000000000000000..82d739f4c134002e0a498bed4e7c156ad59c4a28 --- /dev/null +++ b/media/codec2/Android.mk @@ -0,0 +1,48 @@ +# ============================================================================= +# DOCUMENTATION GENERATION +# ============================================================================= +C2_ROOT := $(call my-dir) + +C2_DOCS_ROOT := $(OUT_DIR)/target/common/docs/codec2 + +C2_OUT_TEMP := $(PRODUCT_OUT)/gen/ETC/Codec2-docs_intermediates + +C2_DOXY := $(or $(shell command -v doxygen),\ + $(shell command -v /Applications/Doxygen.app/Contents/Resources/doxygen)) + +.PHONY: check-doxygen +check-doxygen: +ifndef C2_DOXY + $(error 'doxygen is not available') +endif + +$(C2_OUT_TEMP)/doxy-api.config: $(C2_ROOT)/docs/doxygen.config + # only document include directory, no internal sections + sed 's/\(^INPUT *=.*\)/\1include\//; \ + s/\(^INTERNAL_DOCS *= *\).*/\1NO/; \ + s/\(^ENABLED_SECTIONS *=.*\)INTERNAL\(.*\).*/\1\2/; \ + s:\(^OUTPUT_DIRECTORY *= \)out:\1'$(OUT_DIR)':;' \ + $(C2_ROOT)/docs/doxygen.config > $@ + +$(C2_OUT_TEMP)/doxy-internal.config: $(C2_ROOT)/docs/doxygen.config + sed 's:\(^OUTPUT_DIRECTORY *= \)out\(.*\)api:\1'$(OUT_DIR)'\2internal:;' \ + $(C2_ROOT)/docs/doxygen.config > $@ + +.PHONY: docs-api +docs-api: $(C2_OUT_TEMP)/doxy-api.config check-doxygen + echo API docs are building in $(C2_DOCS_ROOT)/api + rm -rf $(C2_DOCS_ROOT)/api + mkdir -p $(C2_DOCS_ROOT)/api + $(C2_DOXY) $(C2_OUT_TEMP)/doxy-api.config + +.PHONY: docs-internal +docs-internal: $(C2_OUT_TEMP)/doxy-internal.config check-doxygen + echo Internal docs are building in $(C2_DOCS_ROOT)/internal + rm -rf $(C2_DOCS_ROOT)/internal + mkdir -p $(C2_DOCS_ROOT)/internal + $(C2_DOXY) $(C2_OUT_TEMP)/doxy-internal.config + +.PHONY: docs-all +docs-all: docs-api docs-internal + +include $(call all-makefiles-under,$(call my-dir)) diff --git a/media/codec2/C2.cpp b/media/codec2/C2.cpp new file mode 100644 index 0000000000000000000000000000000000000000..359d4e53ca412fe9702ffb8b039767744cf9f8bf --- /dev/null +++ b/media/codec2/C2.cpp @@ -0,0 +1,33 @@ +/* + * Copyright (C) 2016 The Android Open Source Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#include +#include +#include +#include +#include +#include +#include + +/** + * There is nothing here yet. This library is built to see what symbols and methods get + * defined as part of the API include files. + * + * Going forward, the Codec2 library will contain utility methods that are useful for + * Codec2 clients. + */ + + diff --git a/media/codec2/components/Android.bp b/media/codec2/components/Android.bp new file mode 100644 index 0000000000000000000000000000000000000000..e8176cf6b6b1faa268394d1af98575165192c0d4 --- /dev/null +++ b/media/codec2/components/Android.bp @@ -0,0 +1,3 @@ +subdirs = [ + "*", +] diff --git a/media/codec2/components/aac/Android.bp b/media/codec2/components/aac/Android.bp new file mode 100644 index 0000000000000000000000000000000000000000..b70f30af22faabbca9ea6fa8a2223fb486eff78d --- /dev/null +++ b/media/codec2/components/aac/Android.bp @@ -0,0 +1,30 @@ +cc_library_shared { + name: "libstagefright_soft_c2aacdec", + defaults: [ + "libstagefright_soft_c2-defaults", + "libstagefright_soft_c2_sanitize_all-defaults", + ], + + srcs: [ + "C2SoftAacDec.cpp", + "DrcPresModeWrap.cpp", + ], + + static_libs: [ + "libFraunhoferAAC", + ], +} + +cc_library_shared { + name: "libstagefright_soft_c2aacenc", + defaults: [ + "libstagefright_soft_c2-defaults", + "libstagefright_soft_c2_sanitize_all-defaults", + ], + + srcs: ["C2SoftAacEnc.cpp"], + + static_libs: [ + "libFraunhoferAAC", + ], +} diff --git a/media/codec2/components/aac/C2SoftAacDec.cpp b/media/codec2/components/aac/C2SoftAacDec.cpp new file mode 100644 index 0000000000000000000000000000000000000000..c7c84421ab8ac869f6af1d4ab721c8cd0bb20e5c --- /dev/null +++ b/media/codec2/components/aac/C2SoftAacDec.cpp @@ -0,0 +1,941 @@ +/* + * Copyright (C) 2017 The Android Open Source Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +//#define LOG_NDEBUG 0 +#define LOG_TAG "C2SoftAacDec" +#include + +#include +#include +#include + +#include +#include +#include +#include +#include + +#include +#include + +#include "C2SoftAacDec.h" + +#define FILEREAD_MAX_LAYERS 2 + +#define DRC_DEFAULT_MOBILE_REF_LEVEL -16.0 /* 64*-0.25dB = -16 dB below full scale for mobile conf */ +#define DRC_DEFAULT_MOBILE_DRC_CUT 1.0 /* maximum compression of dynamic range for mobile conf */ +#define DRC_DEFAULT_MOBILE_DRC_BOOST 1.0 /* maximum compression of dynamic range for mobile conf */ +#define DRC_DEFAULT_MOBILE_DRC_HEAVY C2Config::DRC_COMPRESSION_HEAVY /* switch for heavy compression for mobile conf */ +#define DRC_DEFAULT_MOBILE_DRC_EFFECT 3 /* MPEG-D DRC effect type; 3 => Limited playback range */ +#define DRC_DEFAULT_MOBILE_ENC_LEVEL (0.25) /* encoder target level; -1 => the value is unknown, otherwise dB step value (e.g. 64 for -16 dB) */ +#define MAX_CHANNEL_COUNT 8 /* maximum number of audio channels that can be decoded */ +// names of properties that can be used to override the default DRC settings +#define PROP_DRC_OVERRIDE_REF_LEVEL "aac_drc_reference_level" +#define PROP_DRC_OVERRIDE_CUT "aac_drc_cut" +#define PROP_DRC_OVERRIDE_BOOST "aac_drc_boost" +#define PROP_DRC_OVERRIDE_HEAVY "aac_drc_heavy" +#define PROP_DRC_OVERRIDE_ENC_LEVEL "aac_drc_enc_target_level" +#define PROP_DRC_OVERRIDE_EFFECT "ro.aac_drc_effect_type" + +namespace android { + +class C2SoftAacDec::IntfImpl : public C2InterfaceHelper { +public: + explicit IntfImpl(const std::shared_ptr &helper) + : C2InterfaceHelper(helper) { + + setDerivedInstance(this); + + addParameter( + DefineParam(mInputFormat, C2_NAME_INPUT_STREAM_FORMAT_SETTING) + .withConstValue(new C2StreamFormatConfig::input(0u, C2FormatCompressed)) + .build()); + + addParameter( + DefineParam(mOutputFormat, C2_NAME_OUTPUT_STREAM_FORMAT_SETTING) + .withConstValue(new C2StreamFormatConfig::output(0u, C2FormatAudio)) + .build()); + + addParameter( + DefineParam(mInputMediaType, C2_NAME_INPUT_PORT_MIME_SETTING) + .withConstValue(AllocSharedString( + MEDIA_MIMETYPE_AUDIO_AAC)) + .build()); + + addParameter( + DefineParam(mOutputMediaType, C2_NAME_OUTPUT_PORT_MIME_SETTING) + .withConstValue(AllocSharedString( + MEDIA_MIMETYPE_AUDIO_RAW)) + .build()); + + addParameter( + DefineParam(mSampleRate, C2_NAME_STREAM_SAMPLE_RATE_SETTING) + .withDefault(new C2StreamSampleRateInfo::output(0u, 44100)) + .withFields({C2F(mSampleRate, value).oneOf({ + 7350, 8000, 11025, 12000, 16000, 22050, 24000, 32000, 44100, 48000 + })}) + .withSetter(Setter::NonStrictValueWithNoDeps) + .build()); + + addParameter( + DefineParam(mChannelCount, C2_NAME_STREAM_CHANNEL_COUNT_SETTING) + .withDefault(new C2StreamChannelCountInfo::output(0u, 1)) + .withFields({C2F(mChannelCount, value).inRange(1, 8)}) + .withSetter(Setter::StrictValueWithNoDeps) + .build()); + + addParameter( + DefineParam(mBitrate, C2_NAME_STREAM_BITRATE_SETTING) + .withDefault(new C2BitrateTuning::input(0u, 64000)) + .withFields({C2F(mBitrate, value).inRange(8000, 960000)}) + .withSetter(Setter::NonStrictValueWithNoDeps) + .build()); + + addParameter( + DefineParam(mInputMaxBufSize, C2_PARAMKEY_INPUT_MAX_BUFFER_SIZE) + .withConstValue(new C2StreamMaxBufferSizeInfo::input(0u, 8192)) + .build()); + + addParameter( + DefineParam(mAacFormat, C2_NAME_STREAM_AAC_FORMAT_SETTING) + .withDefault(new C2StreamAacFormatInfo::input(0u, C2AacStreamFormatRaw)) + .withFields({C2F(mAacFormat, value).oneOf({ + C2AacStreamFormatRaw, C2AacStreamFormatAdts + })}) + .withSetter(Setter::StrictValueWithNoDeps) + .build()); + + addParameter( + DefineParam(mProfileLevel, C2_PARAMKEY_PROFILE_LEVEL) + .withDefault(new C2StreamProfileLevelInfo::input(0u, + C2Config::PROFILE_AAC_LC, C2Config::LEVEL_UNUSED)) + .withFields({ + C2F(mProfileLevel, profile).oneOf({ + C2Config::PROFILE_AAC_LC, + C2Config::PROFILE_AAC_HE, + C2Config::PROFILE_AAC_HE_PS, + C2Config::PROFILE_AAC_LD, + C2Config::PROFILE_AAC_ELD, + C2Config::PROFILE_AAC_ER_SCALABLE, + C2Config::PROFILE_AAC_XHE}), + C2F(mProfileLevel, level).oneOf({ + C2Config::LEVEL_UNUSED + }) + }) + .withSetter(ProfileLevelSetter) + .build()); + + addParameter( + DefineParam(mDrcCompressMode, C2_PARAMKEY_DRC_COMPRESSION_MODE) + .withDefault(new C2StreamDrcCompressionModeTuning::input(0u, C2Config::DRC_COMPRESSION_HEAVY)) + .withFields({ + C2F(mDrcCompressMode, value).oneOf({ + C2Config::DRC_COMPRESSION_ODM_DEFAULT, + C2Config::DRC_COMPRESSION_NONE, + C2Config::DRC_COMPRESSION_LIGHT, + C2Config::DRC_COMPRESSION_HEAVY}) + }) + .withSetter(Setter::StrictValueWithNoDeps) + .build()); + + addParameter( + DefineParam(mDrcTargetRefLevel, C2_PARAMKEY_DRC_TARGET_REFERENCE_LEVEL) + .withDefault(new C2StreamDrcTargetReferenceLevelTuning::input(0u, DRC_DEFAULT_MOBILE_REF_LEVEL)) + .withFields({C2F(mDrcTargetRefLevel, value).inRange(-31.75, 0.25)}) + .withSetter(Setter::StrictValueWithNoDeps) + .build()); + + addParameter( + DefineParam(mDrcEncTargetLevel, C2_PARAMKEY_DRC_ENCODED_TARGET_LEVEL) + .withDefault(new C2StreamDrcEncodedTargetLevelTuning::input(0u, DRC_DEFAULT_MOBILE_ENC_LEVEL)) + .withFields({C2F(mDrcEncTargetLevel, value).inRange(-31.75, 0.25)}) + .withSetter(Setter::StrictValueWithNoDeps) + .build()); + + addParameter( + DefineParam(mDrcBoostFactor, C2_PARAMKEY_DRC_BOOST_FACTOR) + .withDefault(new C2StreamDrcBoostFactorTuning::input(0u, DRC_DEFAULT_MOBILE_DRC_BOOST)) + .withFields({C2F(mDrcBoostFactor, value).inRange(0, 1.)}) + .withSetter(Setter::StrictValueWithNoDeps) + .build()); + + addParameter( + DefineParam(mDrcAttenuationFactor, C2_PARAMKEY_DRC_ATTENUATION_FACTOR) + .withDefault(new C2StreamDrcAttenuationFactorTuning::input(0u, DRC_DEFAULT_MOBILE_DRC_CUT)) + .withFields({C2F(mDrcAttenuationFactor, value).inRange(0, 1.)}) + .withSetter(Setter::StrictValueWithNoDeps) + .build()); + + addParameter( + DefineParam(mDrcEffectType, C2_PARAMKEY_DRC_EFFECT_TYPE) + .withDefault(new C2StreamDrcEffectTypeTuning::input(0u, C2Config::DRC_EFFECT_LIMITED_PLAYBACK_RANGE)) + .withFields({ + C2F(mDrcEffectType, value).oneOf({ + C2Config::DRC_EFFECT_ODM_DEFAULT, + C2Config::DRC_EFFECT_OFF, + C2Config::DRC_EFFECT_NONE, + C2Config::DRC_EFFECT_LATE_NIGHT, + C2Config::DRC_EFFECT_NOISY_ENVIRONMENT, + C2Config::DRC_EFFECT_LIMITED_PLAYBACK_RANGE, + C2Config::DRC_EFFECT_LOW_PLAYBACK_LEVEL, + C2Config::DRC_EFFECT_DIALOG_ENHANCEMENT, + C2Config::DRC_EFFECT_GENERAL_COMPRESSION}) + }) + .withSetter(Setter::StrictValueWithNoDeps) + .build()); + } + + bool isAdts() const { return mAacFormat->value == C2AacStreamFormatAdts; } + static C2R ProfileLevelSetter(bool mayBlock, C2P &me) { + (void)mayBlock; + (void)me; // TODO: validate + return C2R::Ok(); + } + int32_t getDrcCompressMode() const { return mDrcCompressMode->value == C2Config::DRC_COMPRESSION_HEAVY ? 1 : 0; } + int32_t getDrcTargetRefLevel() const { return (mDrcTargetRefLevel->value <= 0 ? -mDrcTargetRefLevel->value * 4. + 0.5 : -1); } + int32_t getDrcEncTargetLevel() const { return (mDrcEncTargetLevel->value <= 0 ? -mDrcEncTargetLevel->value * 4. + 0.5 : -1); } + int32_t getDrcBoostFactor() const { return mDrcBoostFactor->value * 127. + 0.5; } + int32_t getDrcAttenuationFactor() const { return mDrcAttenuationFactor->value * 127. + 0.5; } + int32_t getDrcEffectType() const { return mDrcEffectType->value; } + +private: + std::shared_ptr mInputFormat; + std::shared_ptr mOutputFormat; + std::shared_ptr mInputMediaType; + std::shared_ptr mOutputMediaType; + std::shared_ptr mSampleRate; + std::shared_ptr mChannelCount; + std::shared_ptr mBitrate; + std::shared_ptr mInputMaxBufSize; + std::shared_ptr mAacFormat; + std::shared_ptr mProfileLevel; + std::shared_ptr mDrcCompressMode; + std::shared_ptr mDrcTargetRefLevel; + std::shared_ptr mDrcEncTargetLevel; + std::shared_ptr mDrcBoostFactor; + std::shared_ptr mDrcAttenuationFactor; + std::shared_ptr mDrcEffectType; + // TODO Add : C2StreamAacSbrModeTuning +}; + +constexpr char COMPONENT_NAME[] = "c2.android.aac.decoder"; + +C2SoftAacDec::C2SoftAacDec( + const char *name, + c2_node_id_t id, + const std::shared_ptr &intfImpl) + : SimpleC2Component(std::make_shared>(name, id, intfImpl)), + mIntf(intfImpl), + mAACDecoder(nullptr), + mStreamInfo(nullptr), + mSignalledError(false), + mOutputDelayRingBuffer(nullptr) { +} + +C2SoftAacDec::~C2SoftAacDec() { + onRelease(); +} + +c2_status_t C2SoftAacDec::onInit() { + status_t err = initDecoder(); + return err == OK ? C2_OK : C2_CORRUPTED; +} + +c2_status_t C2SoftAacDec::onStop() { + drainDecoder(); + // reset the "configured" state + mOutputDelayCompensated = 0; + mOutputDelayRingBufferWritePos = 0; + mOutputDelayRingBufferReadPos = 0; + mOutputDelayRingBufferFilled = 0; + mBuffersInfo.clear(); + + // To make the codec behave the same before and after a reset, we need to invalidate the + // streaminfo struct. This does that: + mStreamInfo->sampleRate = 0; // TODO: mStreamInfo is read only + + mSignalledError = false; + + return C2_OK; +} + +void C2SoftAacDec::onReset() { + (void)onStop(); +} + +void C2SoftAacDec::onRelease() { + if (mAACDecoder) { + aacDecoder_Close(mAACDecoder); + mAACDecoder = nullptr; + } + if (mOutputDelayRingBuffer) { + delete[] mOutputDelayRingBuffer; + mOutputDelayRingBuffer = nullptr; + } +} + +status_t C2SoftAacDec::initDecoder() { + ALOGV("initDecoder()"); + status_t status = UNKNOWN_ERROR; + mAACDecoder = aacDecoder_Open(TT_MP4_ADIF, /* num layers */ 1); + if (mAACDecoder != nullptr) { + mStreamInfo = aacDecoder_GetStreamInfo(mAACDecoder); + if (mStreamInfo != nullptr) { + status = OK; + } + } + + mOutputDelayCompensated = 0; + mOutputDelayRingBufferSize = 2048 * MAX_CHANNEL_COUNT * kNumDelayBlocksMax; + mOutputDelayRingBuffer = new short[mOutputDelayRingBufferSize]; + mOutputDelayRingBufferWritePos = 0; + mOutputDelayRingBufferReadPos = 0; + mOutputDelayRingBufferFilled = 0; + + if (mAACDecoder == nullptr) { + ALOGE("AAC decoder is null. TODO: Can not call aacDecoder_SetParam in the following code"); + } + + //aacDecoder_SetParam(mAACDecoder, AAC_PCM_LIMITER_ENABLE, 0); + + //init DRC wrapper + mDrcWrap.setDecoderHandle(mAACDecoder); + mDrcWrap.submitStreamData(mStreamInfo); + + // for streams that contain metadata, use the mobile profile DRC settings unless overridden by platform properties + // TODO: change the DRC settings depending on audio output device type (HDMI, loadspeaker, headphone) + + // DRC_PRES_MODE_WRAP_DESIRED_TARGET + int32_t targetRefLevel = mIntf->getDrcTargetRefLevel(); + ALOGV("AAC decoder using desired DRC target reference level of %d", targetRefLevel); + mDrcWrap.setParam(DRC_PRES_MODE_WRAP_DESIRED_TARGET, (unsigned)targetRefLevel); + + // DRC_PRES_MODE_WRAP_DESIRED_ATT_FACTOR + + int32_t attenuationFactor = mIntf->getDrcAttenuationFactor(); + ALOGV("AAC decoder using desired DRC attenuation factor of %d", attenuationFactor); + mDrcWrap.setParam(DRC_PRES_MODE_WRAP_DESIRED_ATT_FACTOR, (unsigned)attenuationFactor); + + // DRC_PRES_MODE_WRAP_DESIRED_BOOST_FACTOR + int32_t boostFactor = mIntf->getDrcBoostFactor(); + ALOGV("AAC decoder using desired DRC boost factor of %d", boostFactor); + mDrcWrap.setParam(DRC_PRES_MODE_WRAP_DESIRED_BOOST_FACTOR, (unsigned)boostFactor); + + // DRC_PRES_MODE_WRAP_DESIRED_HEAVY + int32_t compressMode = mIntf->getDrcCompressMode(); + ALOGV("AAC decoder using desried DRC heavy compression switch of %d", compressMode); + mDrcWrap.setParam(DRC_PRES_MODE_WRAP_DESIRED_HEAVY, (unsigned)compressMode); + + // DRC_PRES_MODE_WRAP_ENCODER_TARGET + int32_t encTargetLevel = mIntf->getDrcEncTargetLevel(); + ALOGV("AAC decoder using encoder-side DRC reference level of %d", encTargetLevel); + mDrcWrap.setParam(DRC_PRES_MODE_WRAP_ENCODER_TARGET, (unsigned)encTargetLevel); + + // AAC_UNIDRC_SET_EFFECT + int32_t effectType = mIntf->getDrcEffectType(); + ALOGV("AAC decoder using MPEG-D DRC effect type %d", effectType); + aacDecoder_SetParam(mAACDecoder, AAC_UNIDRC_SET_EFFECT, effectType); + + // By default, the decoder creates a 5.1 channel downmix signal. + // For seven and eight channel input streams, enable 6.1 and 7.1 channel output + aacDecoder_SetParam(mAACDecoder, AAC_PCM_MAX_OUTPUT_CHANNELS, -1); + + return status; +} + +bool C2SoftAacDec::outputDelayRingBufferPutSamples(INT_PCM *samples, int32_t numSamples) { + if (numSamples == 0) { + return true; + } + if (outputDelayRingBufferSpaceLeft() < numSamples) { + ALOGE("RING BUFFER WOULD OVERFLOW"); + return false; + } + if (mOutputDelayRingBufferWritePos + numSamples <= mOutputDelayRingBufferSize + && (mOutputDelayRingBufferReadPos <= mOutputDelayRingBufferWritePos + || mOutputDelayRingBufferReadPos > mOutputDelayRingBufferWritePos + numSamples)) { + // faster memcopy loop without checks, if the preconditions allow this + for (int32_t i = 0; i < numSamples; i++) { + mOutputDelayRingBuffer[mOutputDelayRingBufferWritePos++] = samples[i]; + } + + if (mOutputDelayRingBufferWritePos >= mOutputDelayRingBufferSize) { + mOutputDelayRingBufferWritePos -= mOutputDelayRingBufferSize; + } + } else { + ALOGV("slow C2SoftAacDec::outputDelayRingBufferPutSamples()"); + + for (int32_t i = 0; i < numSamples; i++) { + mOutputDelayRingBuffer[mOutputDelayRingBufferWritePos] = samples[i]; + mOutputDelayRingBufferWritePos++; + if (mOutputDelayRingBufferWritePos >= mOutputDelayRingBufferSize) { + mOutputDelayRingBufferWritePos -= mOutputDelayRingBufferSize; + } + } + } + mOutputDelayRingBufferFilled += numSamples; + return true; +} + +int32_t C2SoftAacDec::outputDelayRingBufferGetSamples(INT_PCM *samples, int32_t numSamples) { + + if (numSamples > mOutputDelayRingBufferFilled) { + ALOGE("RING BUFFER WOULD UNDERRUN"); + return -1; + } + + if (mOutputDelayRingBufferReadPos + numSamples <= mOutputDelayRingBufferSize + && (mOutputDelayRingBufferWritePos < mOutputDelayRingBufferReadPos + || mOutputDelayRingBufferWritePos >= mOutputDelayRingBufferReadPos + numSamples)) { + // faster memcopy loop without checks, if the preconditions allow this + if (samples != nullptr) { + for (int32_t i = 0; i < numSamples; i++) { + samples[i] = mOutputDelayRingBuffer[mOutputDelayRingBufferReadPos++]; + } + } else { + mOutputDelayRingBufferReadPos += numSamples; + } + if (mOutputDelayRingBufferReadPos >= mOutputDelayRingBufferSize) { + mOutputDelayRingBufferReadPos -= mOutputDelayRingBufferSize; + } + } else { + ALOGV("slow C2SoftAacDec::outputDelayRingBufferGetSamples()"); + + for (int32_t i = 0; i < numSamples; i++) { + if (samples != nullptr) { + samples[i] = mOutputDelayRingBuffer[mOutputDelayRingBufferReadPos]; + } + mOutputDelayRingBufferReadPos++; + if (mOutputDelayRingBufferReadPos >= mOutputDelayRingBufferSize) { + mOutputDelayRingBufferReadPos -= mOutputDelayRingBufferSize; + } + } + } + mOutputDelayRingBufferFilled -= numSamples; + return numSamples; +} + +int32_t C2SoftAacDec::outputDelayRingBufferSamplesAvailable() { + return mOutputDelayRingBufferFilled; +} + +int32_t C2SoftAacDec::outputDelayRingBufferSpaceLeft() { + return mOutputDelayRingBufferSize - outputDelayRingBufferSamplesAvailable(); +} + +void C2SoftAacDec::drainRingBuffer( + const std::unique_ptr &work, + const std::shared_ptr &pool, + bool eos) { + while (!mBuffersInfo.empty() && outputDelayRingBufferSamplesAvailable() + >= mStreamInfo->frameSize * mStreamInfo->numChannels) { + Info &outInfo = mBuffersInfo.front(); + ALOGV("outInfo.frameIndex = %" PRIu64, outInfo.frameIndex); + int samplesize __unused = mStreamInfo->numChannels * sizeof(int16_t); + + int available = outputDelayRingBufferSamplesAvailable(); + int numFrames = outInfo.decodedSizes.size(); + int numSamples = numFrames * (mStreamInfo->frameSize * mStreamInfo->numChannels); + if (available < numSamples) { + if (eos) { + numSamples = available; + } else { + break; + } + } + ALOGV("%d samples available (%d), or %d frames", + numSamples, available, numFrames); + ALOGV("getting %d from ringbuffer", numSamples); + + std::shared_ptr block; + std::function&)> fillWork = + [&block, numSamples, pool, this]() + -> std::function&)> { + auto fillEmptyWork = []( + const std::unique_ptr &work, c2_status_t err) { + work->result = err; + C2FrameData &output = work->worklets.front()->output; + output.flags = work->input.flags; + output.buffers.clear(); + output.ordinal = work->input.ordinal; + + work->workletsProcessed = 1u; + }; + + using namespace std::placeholders; + if (numSamples == 0) { + return std::bind(fillEmptyWork, _1, C2_OK); + } + + // TODO: error handling, proper usage, etc. + C2MemoryUsage usage = { C2MemoryUsage::CPU_READ, C2MemoryUsage::CPU_WRITE }; + c2_status_t err = pool->fetchLinearBlock( + numSamples * sizeof(int16_t), usage, &block); + if (err != C2_OK) { + ALOGD("failed to fetch a linear block (%d)", err); + return std::bind(fillEmptyWork, _1, C2_NO_MEMORY); + } + C2WriteView wView = block->map().get(); + // TODO + INT_PCM *outBuffer = reinterpret_cast(wView.data()); + int32_t ns = outputDelayRingBufferGetSamples(outBuffer, numSamples); + if (ns != numSamples) { + ALOGE("not a complete frame of samples available"); + mSignalledError = true; + return std::bind(fillEmptyWork, _1, C2_CORRUPTED); + } + return [buffer = createLinearBuffer(block)]( + const std::unique_ptr &work) { + work->result = C2_OK; + C2FrameData &output = work->worklets.front()->output; + output.flags = work->input.flags; + output.buffers.clear(); + output.buffers.push_back(buffer); + output.ordinal = work->input.ordinal; + work->workletsProcessed = 1u; + }; + }(); + + if (work && work->input.ordinal.frameIndex == c2_cntr64_t(outInfo.frameIndex)) { + fillWork(work); + } else { + finish(outInfo.frameIndex, fillWork); + } + + ALOGV("out timestamp %" PRIu64 " / %u", outInfo.timestamp, block ? block->capacity() : 0); + mBuffersInfo.pop_front(); + } +} + +void C2SoftAacDec::process( + const std::unique_ptr &work, + const std::shared_ptr &pool) { + // Initialize output work + work->result = C2_OK; + work->workletsProcessed = 1u; + work->worklets.front()->output.configUpdate.clear(); + work->worklets.front()->output.flags = work->input.flags; + + if (mSignalledError) { + return; + } + + UCHAR* inBuffer[FILEREAD_MAX_LAYERS]; + UINT inBufferLength[FILEREAD_MAX_LAYERS] = {0}; + UINT bytesValid[FILEREAD_MAX_LAYERS] = {0}; + + INT_PCM tmpOutBuffer[2048 * MAX_CHANNEL_COUNT]; + C2ReadView view = mDummyReadView; + size_t offset = 0u; + size_t size = 0u; + if (!work->input.buffers.empty()) { + view = work->input.buffers[0]->data().linearBlocks().front().map().get(); + size = view.capacity(); + } + + bool eos = (work->input.flags & C2FrameData::FLAG_END_OF_STREAM) != 0; + bool codecConfig = (work->input.flags & C2FrameData::FLAG_CODEC_CONFIG) != 0; + + //TODO +#if 0 + if (mInputBufferCount == 0 && !codecConfig) { + ALOGW("first buffer should have FLAG_CODEC_CONFIG set"); + codecConfig = true; + } +#endif + if (codecConfig && size > 0u) { + // const_cast because of libAACdec method signature. + inBuffer[0] = const_cast(view.data() + offset); + inBufferLength[0] = size; + + AAC_DECODER_ERROR decoderErr = + aacDecoder_ConfigRaw(mAACDecoder, + inBuffer, + inBufferLength); + + if (decoderErr != AAC_DEC_OK) { + ALOGE("aacDecoder_ConfigRaw decoderErr = 0x%4.4x", decoderErr); + mSignalledError = true; + work->result = C2_CORRUPTED; + return; + } + work->worklets.front()->output.flags = work->input.flags; + work->worklets.front()->output.ordinal = work->input.ordinal; + work->worklets.front()->output.buffers.clear(); + return; + } + + Info inInfo; + inInfo.frameIndex = work->input.ordinal.frameIndex.peeku(); + inInfo.timestamp = work->input.ordinal.timestamp.peeku(); + inInfo.bufferSize = size; + inInfo.decodedSizes.clear(); + while (size > 0u) { + ALOGV("size = %zu", size); + if (mIntf->isAdts()) { + size_t adtsHeaderSize = 0; + // skip 30 bits, aac_frame_length follows. + // ssssssss ssssiiip ppffffPc ccohCCll llllllll lll????? + + const uint8_t *adtsHeader = view.data() + offset; + + bool signalError = false; + if (size < 7) { + ALOGE("Audio data too short to contain even the ADTS header. " + "Got %zu bytes.", size); + hexdump(adtsHeader, size); + signalError = true; + } else { + bool protectionAbsent = (adtsHeader[1] & 1); + + unsigned aac_frame_length = + ((adtsHeader[3] & 3) << 11) + | (adtsHeader[4] << 3) + | (adtsHeader[5] >> 5); + + if (size < aac_frame_length) { + ALOGE("Not enough audio data for the complete frame. " + "Got %zu bytes, frame size according to the ADTS " + "header is %u bytes.", + size, aac_frame_length); + hexdump(adtsHeader, size); + signalError = true; + } else { + adtsHeaderSize = (protectionAbsent ? 7 : 9); + if (aac_frame_length < adtsHeaderSize) { + signalError = true; + } else { + // const_cast because of libAACdec method signature. + inBuffer[0] = const_cast(adtsHeader + adtsHeaderSize); + inBufferLength[0] = aac_frame_length - adtsHeaderSize; + + offset += adtsHeaderSize; + size -= adtsHeaderSize; + } + } + } + + if (signalError) { + mSignalledError = true; + work->result = C2_CORRUPTED; + return; + } + } else { + // const_cast because of libAACdec method signature. + inBuffer[0] = const_cast(view.data() + offset); + inBufferLength[0] = size; + } + + // Fill and decode + bytesValid[0] = inBufferLength[0]; + + INT prevSampleRate = mStreamInfo->sampleRate; + INT prevNumChannels = mStreamInfo->numChannels; + + aacDecoder_Fill(mAACDecoder, + inBuffer, + inBufferLength, + bytesValid); + + // run DRC check + mDrcWrap.submitStreamData(mStreamInfo); + mDrcWrap.update(); + + UINT inBufferUsedLength = inBufferLength[0] - bytesValid[0]; + size -= inBufferUsedLength; + offset += inBufferUsedLength; + + AAC_DECODER_ERROR decoderErr; + do { + if (outputDelayRingBufferSpaceLeft() < + (mStreamInfo->frameSize * mStreamInfo->numChannels)) { + ALOGV("skipping decode: not enough space left in ringbuffer"); + // discard buffer + size = 0; + break; + } + + int numConsumed = mStreamInfo->numTotalBytes; + decoderErr = aacDecoder_DecodeFrame(mAACDecoder, + tmpOutBuffer, + 2048 * MAX_CHANNEL_COUNT, + 0 /* flags */); + + numConsumed = mStreamInfo->numTotalBytes - numConsumed; + + if (decoderErr == AAC_DEC_NOT_ENOUGH_BITS) { + break; + } + inInfo.decodedSizes.push_back(numConsumed); + + if (decoderErr != AAC_DEC_OK) { + ALOGW("aacDecoder_DecodeFrame decoderErr = 0x%4.4x", decoderErr); + } + + if (bytesValid[0] != 0) { + ALOGE("bytesValid[0] != 0 should never happen"); + mSignalledError = true; + work->result = C2_CORRUPTED; + return; + } + + size_t numOutBytes = + mStreamInfo->frameSize * sizeof(int16_t) * mStreamInfo->numChannels; + + if (decoderErr == AAC_DEC_OK) { + if (!outputDelayRingBufferPutSamples(tmpOutBuffer, + mStreamInfo->frameSize * mStreamInfo->numChannels)) { + mSignalledError = true; + work->result = C2_CORRUPTED; + return; + } + } else { + ALOGW("AAC decoder returned error 0x%4.4x, substituting silence", decoderErr); + + memset(tmpOutBuffer, 0, numOutBytes); // TODO: check for overflow + + if (!outputDelayRingBufferPutSamples(tmpOutBuffer, + mStreamInfo->frameSize * mStreamInfo->numChannels)) { + mSignalledError = true; + work->result = C2_CORRUPTED; + return; + } + + // Discard input buffer. + size = 0; + + aacDecoder_SetParam(mAACDecoder, AAC_TPDEC_CLEAR_BUFFER, 1); + + // After an error, replace bufferSize with the sum of the + // decodedSizes to resynchronize the in/out lists. + inInfo.decodedSizes.pop_back(); + inInfo.bufferSize = std::accumulate( + inInfo.decodedSizes.begin(), inInfo.decodedSizes.end(), 0); + + // fall through + } + + /* + * AAC+/eAAC+ streams can be signalled in two ways: either explicitly + * or implicitly, according to MPEG4 spec. AAC+/eAAC+ is a dual + * rate system and the sampling rate in the final output is actually + * doubled compared with the core AAC decoder sampling rate. + * + * Explicit signalling is done by explicitly defining SBR audio object + * type in the bitstream. Implicit signalling is done by embedding + * SBR content in AAC extension payload specific to SBR, and hence + * requires an AAC decoder to perform pre-checks on actual audio frames. + * + * Thus, we could not say for sure whether a stream is + * AAC+/eAAC+ until the first data frame is decoded. + */ + if (!mStreamInfo->sampleRate || !mStreamInfo->numChannels) { + // if ((mInputBufferCount > 2) && (mOutputBufferCount <= 1)) { + ALOGD("Invalid AAC stream"); + // TODO: notify(OMX_EventError, OMX_ErrorUndefined, decoderErr, NULL); + // mSignalledError = true; + // } + } else if ((mStreamInfo->sampleRate != prevSampleRate) || + (mStreamInfo->numChannels != prevNumChannels)) { + ALOGI("Reconfiguring decoder: %d->%d Hz, %d->%d channels", + prevSampleRate, mStreamInfo->sampleRate, + prevNumChannels, mStreamInfo->numChannels); + + C2StreamSampleRateInfo::output sampleRateInfo(0u, mStreamInfo->sampleRate); + C2StreamChannelCountInfo::output channelCountInfo(0u, mStreamInfo->numChannels); + std::vector> failures; + c2_status_t err = mIntf->config( + { &sampleRateInfo, &channelCountInfo }, + C2_MAY_BLOCK, + &failures); + if (err == OK) { + // TODO: this does not handle the case where the values are + // altered during config. + C2FrameData &output = work->worklets.front()->output; + output.configUpdate.push_back(C2Param::Copy(sampleRateInfo)); + output.configUpdate.push_back(C2Param::Copy(channelCountInfo)); + } else { + ALOGE("Config Update failed"); + mSignalledError = true; + work->result = C2_CORRUPTED; + return; + } + } + ALOGV("size = %zu", size); + } while (decoderErr == AAC_DEC_OK); + } + + int32_t outputDelay = mStreamInfo->outputDelay * mStreamInfo->numChannels; + + mBuffersInfo.push_back(std::move(inInfo)); + work->workletsProcessed = 0u; + if (!eos && mOutputDelayCompensated < outputDelay) { + // discard outputDelay at the beginning + int32_t toCompensate = outputDelay - mOutputDelayCompensated; + int32_t discard = outputDelayRingBufferSamplesAvailable(); + if (discard > toCompensate) { + discard = toCompensate; + } + int32_t discarded = outputDelayRingBufferGetSamples(nullptr, discard); + mOutputDelayCompensated += discarded; + return; + } + + if (eos) { + drainInternal(DRAIN_COMPONENT_WITH_EOS, pool, work); + } else { + drainRingBuffer(work, pool, false /* not EOS */); + } +} + +c2_status_t C2SoftAacDec::drainInternal( + uint32_t drainMode, + const std::shared_ptr &pool, + const std::unique_ptr &work) { + if (drainMode == NO_DRAIN) { + ALOGW("drain with NO_DRAIN: no-op"); + return C2_OK; + } + if (drainMode == DRAIN_CHAIN) { + ALOGW("DRAIN_CHAIN not supported"); + return C2_OMITTED; + } + + bool eos = (drainMode == DRAIN_COMPONENT_WITH_EOS); + + drainDecoder(); + drainRingBuffer(work, pool, eos); + + if (eos) { + auto fillEmptyWork = [](const std::unique_ptr &work) { + work->worklets.front()->output.flags = work->input.flags; + work->worklets.front()->output.buffers.clear(); + work->worklets.front()->output.ordinal = work->input.ordinal; + work->workletsProcessed = 1u; + }; + while (mBuffersInfo.size() > 1u) { + finish(mBuffersInfo.front().frameIndex, fillEmptyWork); + mBuffersInfo.pop_front(); + } + if (work && work->workletsProcessed == 0u) { + fillEmptyWork(work); + } + mBuffersInfo.clear(); + } + + return C2_OK; +} + +c2_status_t C2SoftAacDec::drain( + uint32_t drainMode, + const std::shared_ptr &pool) { + return drainInternal(drainMode, pool, nullptr); +} + +c2_status_t C2SoftAacDec::onFlush_sm() { + drainDecoder(); + mBuffersInfo.clear(); + + int avail; + while ((avail = outputDelayRingBufferSamplesAvailable()) > 0) { + if (avail > mStreamInfo->frameSize * mStreamInfo->numChannels) { + avail = mStreamInfo->frameSize * mStreamInfo->numChannels; + } + int32_t ns = outputDelayRingBufferGetSamples(nullptr, avail); + if (ns != avail) { + ALOGW("not a complete frame of samples available"); + break; + } + } + mOutputDelayRingBufferReadPos = mOutputDelayRingBufferWritePos; + + return C2_OK; +} + +void C2SoftAacDec::drainDecoder() { + // flush decoder until outputDelay is compensated + while (mOutputDelayCompensated > 0) { + // a buffer big enough for MAX_CHANNEL_COUNT channels of decoded HE-AAC + INT_PCM tmpOutBuffer[2048 * MAX_CHANNEL_COUNT]; + + // run DRC check + mDrcWrap.submitStreamData(mStreamInfo); + mDrcWrap.update(); + + AAC_DECODER_ERROR decoderErr = + aacDecoder_DecodeFrame(mAACDecoder, + tmpOutBuffer, + 2048 * MAX_CHANNEL_COUNT, + AACDEC_FLUSH); + if (decoderErr != AAC_DEC_OK) { + ALOGW("aacDecoder_DecodeFrame decoderErr = 0x%4.4x", decoderErr); + } + + int32_t tmpOutBufferSamples = mStreamInfo->frameSize * mStreamInfo->numChannels; + if (tmpOutBufferSamples > mOutputDelayCompensated) { + tmpOutBufferSamples = mOutputDelayCompensated; + } + outputDelayRingBufferPutSamples(tmpOutBuffer, tmpOutBufferSamples); + + mOutputDelayCompensated -= tmpOutBufferSamples; + } +} + +class C2SoftAacDecFactory : public C2ComponentFactory { +public: + C2SoftAacDecFactory() : mHelper(std::static_pointer_cast( + GetCodec2PlatformComponentStore()->getParamReflector())) { + } + + virtual c2_status_t createComponent( + c2_node_id_t id, + std::shared_ptr* const component, + std::function deleter) override { + *component = std::shared_ptr( + new C2SoftAacDec(COMPONENT_NAME, + id, + std::make_shared(mHelper)), + deleter); + return C2_OK; + } + + virtual c2_status_t createInterface( + c2_node_id_t id, std::shared_ptr* const interface, + std::function deleter) override { + *interface = std::shared_ptr( + new SimpleInterface( + COMPONENT_NAME, id, std::make_shared(mHelper)), + deleter); + return C2_OK; + } + + virtual ~C2SoftAacDecFactory() override = default; + +private: + std::shared_ptr mHelper; +}; + +} // namespace android + +extern "C" ::C2ComponentFactory* CreateCodec2Factory() { + ALOGV("in %s", __func__); + return new ::android::C2SoftAacDecFactory(); +} + +extern "C" void DestroyCodec2Factory(::C2ComponentFactory* factory) { + ALOGV("in %s", __func__); + delete factory; +} diff --git a/media/codec2/components/aac/C2SoftAacDec.h b/media/codec2/components/aac/C2SoftAacDec.h new file mode 100644 index 0000000000000000000000000000000000000000..965c29e11b22d7894c478e9252bd2bd0114fc585 --- /dev/null +++ b/media/codec2/components/aac/C2SoftAacDec.h @@ -0,0 +1,109 @@ +/* + * Copyright (C) 2017 The Android Open Source Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#ifndef ANDROID_C2_SOFT_AAC_DEC_H_ +#define ANDROID_C2_SOFT_AAC_DEC_H_ + +#include + + +#include "aacdecoder_lib.h" +#include "DrcPresModeWrap.h" + +namespace android { + +struct C2SoftAacDec : public SimpleC2Component { + class IntfImpl; + + C2SoftAacDec(const char *name, c2_node_id_t id, const std::shared_ptr &intfImpl); + virtual ~C2SoftAacDec(); + + // From SimpleC2Component + c2_status_t onInit() override; + c2_status_t onStop() override; + void onReset() override; + void onRelease() override; + c2_status_t onFlush_sm() override; + void process( + const std::unique_ptr &work, + const std::shared_ptr &pool) override; + c2_status_t drain( + uint32_t drainMode, + const std::shared_ptr &pool) override; + +private: + enum { + kNumDelayBlocksMax = 8, + }; + + std::shared_ptr mIntf; + + HANDLE_AACDECODER mAACDecoder; + CStreamInfo *mStreamInfo; + bool mIsFirst; + size_t mInputBufferCount; + size_t mOutputBufferCount; + bool mSignalledError; + struct Info { + uint64_t frameIndex; + size_t bufferSize; + uint64_t timestamp; + std::vector decodedSizes; + }; + std::list mBuffersInfo; + + CDrcPresModeWrapper mDrcWrap; + + enum { + NONE, + AWAITING_DISABLED, + AWAITING_ENABLED + } mOutputPortSettingsChange; + + void initPorts(); + status_t initDecoder(); + bool isConfigured() const; + void drainDecoder(); + + void drainRingBuffer( + const std::unique_ptr &work, + const std::shared_ptr &pool, + bool eos); + c2_status_t drainInternal( + uint32_t drainMode, + const std::shared_ptr &pool, + const std::unique_ptr &work); + +// delay compensation + bool mEndOfInput; + bool mEndOfOutput; + int32_t mOutputDelayCompensated; + int32_t mOutputDelayRingBufferSize; + short *mOutputDelayRingBuffer; + int32_t mOutputDelayRingBufferWritePos; + int32_t mOutputDelayRingBufferReadPos; + int32_t mOutputDelayRingBufferFilled; + bool outputDelayRingBufferPutSamples(INT_PCM *samples, int numSamples); + int32_t outputDelayRingBufferGetSamples(INT_PCM *samples, int numSamples); + int32_t outputDelayRingBufferSamplesAvailable(); + int32_t outputDelayRingBufferSpaceLeft(); + + C2_DO_NOT_COPY(C2SoftAacDec); +}; + +} // namespace android + +#endif // ANDROID_C2_SOFT_AAC_DEC_H_ diff --git a/media/codec2/components/aac/C2SoftAacEnc.cpp b/media/codec2/components/aac/C2SoftAacEnc.cpp new file mode 100644 index 0000000000000000000000000000000000000000..aeefbdba414ae3924c8d7ce0a0bb8718b00bde3a --- /dev/null +++ b/media/codec2/components/aac/C2SoftAacEnc.cpp @@ -0,0 +1,602 @@ +/* + * Copyright (C) 2012 The Android Open Source Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +//#define LOG_NDEBUG 0 +#define LOG_TAG "C2SoftAacEnc" +#include + +#include + +#include +#include +#include +#include + +#include "C2SoftAacEnc.h" + +namespace android { + +class C2SoftAacEnc::IntfImpl : public C2InterfaceHelper { +public: + explicit IntfImpl(const std::shared_ptr &helper) + : C2InterfaceHelper(helper) { + + setDerivedInstance(this); + + addParameter( + DefineParam(mInputFormat, C2_NAME_INPUT_STREAM_FORMAT_SETTING) + .withConstValue(new C2StreamFormatConfig::input(0u, C2FormatAudio)) + .build()); + + addParameter( + DefineParam(mOutputFormat, C2_NAME_OUTPUT_STREAM_FORMAT_SETTING) + .withConstValue(new C2StreamFormatConfig::output(0u, C2FormatCompressed)) + .build()); + + addParameter( + DefineParam(mInputMediaType, C2_NAME_INPUT_PORT_MIME_SETTING) + .withConstValue(AllocSharedString( + MEDIA_MIMETYPE_AUDIO_RAW)) + .build()); + + addParameter( + DefineParam(mOutputMediaType, C2_NAME_OUTPUT_PORT_MIME_SETTING) + .withConstValue(AllocSharedString( + MEDIA_MIMETYPE_AUDIO_AAC)) + .build()); + + addParameter( + DefineParam(mSampleRate, C2_NAME_STREAM_SAMPLE_RATE_SETTING) + .withDefault(new C2StreamSampleRateInfo::input(0u, 44100)) + .withFields({C2F(mSampleRate, value).oneOf({ + 8000, 11025, 12000, 16000, 22050, 24000, 32000, 44100, 48000 + })}) + .withSetter((Setter::StrictValueWithNoDeps)) + .build()); + + addParameter( + DefineParam(mChannelCount, C2_NAME_STREAM_CHANNEL_COUNT_SETTING) + .withDefault(new C2StreamChannelCountInfo::input(0u, 1)) + .withFields({C2F(mChannelCount, value).inRange(1, 6)}) + .withSetter(Setter::StrictValueWithNoDeps) + .build()); + + addParameter( + DefineParam(mBitrate, C2_NAME_STREAM_BITRATE_SETTING) + .withDefault(new C2BitrateTuning::output(0u, 64000)) + .withFields({C2F(mBitrate, value).inRange(8000, 960000)}) + .withSetter(Setter::NonStrictValueWithNoDeps) + .build()); + + addParameter( + DefineParam(mInputMaxBufSize, C2_PARAMKEY_INPUT_MAX_BUFFER_SIZE) + .withDefault(new C2StreamMaxBufferSizeInfo::input(0u, 8192)) + .calculatedAs(MaxBufSizeCalculator, mChannelCount) + .build()); + + addParameter( + DefineParam(mProfileLevel, C2_PARAMKEY_PROFILE_LEVEL) + .withDefault(new C2StreamProfileLevelInfo::output(0u, + C2Config::PROFILE_AAC_LC, C2Config::LEVEL_UNUSED)) + .withFields({ + C2F(mProfileLevel, profile).oneOf({ + C2Config::PROFILE_AAC_LC, + C2Config::PROFILE_AAC_HE, + C2Config::PROFILE_AAC_HE_PS, + C2Config::PROFILE_AAC_LD, + C2Config::PROFILE_AAC_ELD}), + C2F(mProfileLevel, level).oneOf({ + C2Config::LEVEL_UNUSED + }) + }) + .withSetter(ProfileLevelSetter) + .build()); + } + + uint32_t getSampleRate() const { return mSampleRate->value; } + uint32_t getChannelCount() const { return mChannelCount->value; } + uint32_t getBitrate() const { return mBitrate->value; } + static C2R ProfileLevelSetter(bool mayBlock, C2P &me) { + (void)mayBlock; + (void)me; // TODO: validate + return C2R::Ok(); + } + + static C2R MaxBufSizeCalculator( + bool mayBlock, + C2P &me, + const C2P &channelCount) { + (void)mayBlock; + me.set().value = 1024 * sizeof(short) * channelCount.v.value; + return C2R::Ok(); + } + +private: + std::shared_ptr mInputFormat; + std::shared_ptr mOutputFormat; + std::shared_ptr mInputMediaType; + std::shared_ptr mOutputMediaType; + std::shared_ptr mSampleRate; + std::shared_ptr mChannelCount; + std::shared_ptr mBitrate; + std::shared_ptr mInputMaxBufSize; + std::shared_ptr mProfileLevel; +}; + +constexpr char COMPONENT_NAME[] = "c2.android.aac.encoder"; + +C2SoftAacEnc::C2SoftAacEnc( + const char *name, + c2_node_id_t id, + const std::shared_ptr &intfImpl) + : SimpleC2Component(std::make_shared>(name, id, intfImpl)), + mIntf(intfImpl), + mAACEncoder(nullptr), + mSBRMode(-1), + mSBRRatio(0), + mAACProfile(AOT_AAC_LC), + mNumBytesPerInputFrame(0u), + mOutBufferSize(0u), + mSentCodecSpecificData(false), + mInputSize(0), + mInputTimeUs(-1ll), + mSignalledError(false), + mOutIndex(0u) { +} + +C2SoftAacEnc::~C2SoftAacEnc() { + onReset(); +} + +c2_status_t C2SoftAacEnc::onInit() { + status_t err = initEncoder(); + return err == OK ? C2_OK : C2_CORRUPTED; +} + +status_t C2SoftAacEnc::initEncoder() { + if (AACENC_OK != aacEncOpen(&mAACEncoder, 0, 0)) { + ALOGE("Failed to init AAC encoder"); + return UNKNOWN_ERROR; + } + return setAudioParams(); +} + +c2_status_t C2SoftAacEnc::onStop() { + mSentCodecSpecificData = false; + mInputSize = 0u; + mInputTimeUs = -1ll; + mSignalledError = false; + return C2_OK; +} + +void C2SoftAacEnc::onReset() { + (void)onStop(); + aacEncClose(&mAACEncoder); +} + +void C2SoftAacEnc::onRelease() { + // no-op +} + +c2_status_t C2SoftAacEnc::onFlush_sm() { + mSentCodecSpecificData = false; + mInputSize = 0u; + return C2_OK; +} + +static CHANNEL_MODE getChannelMode(uint32_t nChannels) { + CHANNEL_MODE chMode = MODE_INVALID; + switch (nChannels) { + case 1: chMode = MODE_1; break; + case 2: chMode = MODE_2; break; + case 3: chMode = MODE_1_2; break; + case 4: chMode = MODE_1_2_1; break; + case 5: chMode = MODE_1_2_2; break; + case 6: chMode = MODE_1_2_2_1; break; + default: chMode = MODE_INVALID; + } + return chMode; +} + +//static AUDIO_OBJECT_TYPE getAOTFromProfile(OMX_U32 profile) { +// if (profile == OMX_AUDIO_AACObjectLC) { +// return AOT_AAC_LC; +// } else if (profile == OMX_AUDIO_AACObjectHE) { +// return AOT_SBR; +// } else if (profile == OMX_AUDIO_AACObjectHE_PS) { +// return AOT_PS; +// } else if (profile == OMX_AUDIO_AACObjectLD) { +// return AOT_ER_AAC_LD; +// } else if (profile == OMX_AUDIO_AACObjectELD) { +// return AOT_ER_AAC_ELD; +// } else { +// ALOGW("Unsupported AAC profile - defaulting to AAC-LC"); +// return AOT_AAC_LC; +// } +//} + +status_t C2SoftAacEnc::setAudioParams() { + // We call this whenever sample rate, number of channels, bitrate or SBR mode change + // in reponse to setParameter calls. + + ALOGV("setAudioParams: %u Hz, %u channels, %u bps, %i sbr mode, %i sbr ratio", + mIntf->getSampleRate(), mIntf->getChannelCount(), mIntf->getBitrate(), mSBRMode, mSBRRatio); + + if (AACENC_OK != aacEncoder_SetParam(mAACEncoder, AACENC_AOT, mAACProfile)) { + ALOGE("Failed to set AAC encoder parameters"); + return UNKNOWN_ERROR; + } + + if (AACENC_OK != aacEncoder_SetParam(mAACEncoder, AACENC_SAMPLERATE, mIntf->getSampleRate())) { + ALOGE("Failed to set AAC encoder parameters"); + return UNKNOWN_ERROR; + } + if (AACENC_OK != aacEncoder_SetParam(mAACEncoder, AACENC_BITRATE, mIntf->getBitrate())) { + ALOGE("Failed to set AAC encoder parameters"); + return UNKNOWN_ERROR; + } + if (AACENC_OK != aacEncoder_SetParam(mAACEncoder, AACENC_CHANNELMODE, + getChannelMode(mIntf->getChannelCount()))) { + ALOGE("Failed to set AAC encoder parameters"); + return UNKNOWN_ERROR; + } + if (AACENC_OK != aacEncoder_SetParam(mAACEncoder, AACENC_TRANSMUX, TT_MP4_RAW)) { + ALOGE("Failed to set AAC encoder parameters"); + return UNKNOWN_ERROR; + } + + if (mSBRMode != -1 && mAACProfile == AOT_ER_AAC_ELD) { + if (AACENC_OK != aacEncoder_SetParam(mAACEncoder, AACENC_SBR_MODE, mSBRMode)) { + ALOGE("Failed to set AAC encoder parameters"); + return UNKNOWN_ERROR; + } + } + + /* SBR ratio parameter configurations: + 0: Default configuration wherein SBR ratio is configured depending on audio object type by + the FDK. + 1: Downsampled SBR (default for ELD) + 2: Dualrate SBR (default for HE-AAC) + */ + if (AACENC_OK != aacEncoder_SetParam(mAACEncoder, AACENC_SBR_RATIO, mSBRRatio)) { + ALOGE("Failed to set AAC encoder parameters"); + return UNKNOWN_ERROR; + } + + return OK; +} + +void C2SoftAacEnc::process( + const std::unique_ptr &work, + const std::shared_ptr &pool) { + // Initialize output work + work->result = C2_OK; + work->workletsProcessed = 1u; + work->worklets.front()->output.flags = work->input.flags; + + if (mSignalledError) { + return; + } + bool eos = (work->input.flags & C2FrameData::FLAG_END_OF_STREAM) != 0; + + uint32_t sampleRate = mIntf->getSampleRate(); + uint32_t channelCount = mIntf->getChannelCount(); + + if (!mSentCodecSpecificData) { + // The very first thing we want to output is the codec specific + // data. + + if (AACENC_OK != aacEncEncode(mAACEncoder, nullptr, nullptr, nullptr, nullptr)) { + ALOGE("Unable to initialize encoder for profile / sample-rate / bit-rate / channels"); + mSignalledError = true; + work->result = C2_CORRUPTED; + return; + } + + uint32_t bitrate = mIntf->getBitrate(); + uint32_t actualBitRate = aacEncoder_GetParam(mAACEncoder, AACENC_BITRATE); + if (bitrate != actualBitRate) { + ALOGW("Requested bitrate %u unsupported, using %u", bitrate, actualBitRate); + } + + AACENC_InfoStruct encInfo; + if (AACENC_OK != aacEncInfo(mAACEncoder, &encInfo)) { + ALOGE("Failed to get AAC encoder info"); + mSignalledError = true; + work->result = C2_CORRUPTED; + return; + } + + std::unique_ptr csd = + C2StreamCsdInfo::output::AllocUnique(encInfo.confSize, 0u); + if (!csd) { + ALOGE("CSD allocation failed"); + mSignalledError = true; + work->result = C2_NO_MEMORY; + return; + } + memcpy(csd->m.value, encInfo.confBuf, encInfo.confSize); + ALOGV("put csd"); +#if defined(LOG_NDEBUG) && !LOG_NDEBUG + hexdump(csd->m.value, csd->flexCount()); +#endif + work->worklets.front()->output.configUpdate.push_back(std::move(csd)); + + mOutBufferSize = encInfo.maxOutBufBytes; + mNumBytesPerInputFrame = encInfo.frameLength * channelCount * sizeof(int16_t); + mInputTimeUs = work->input.ordinal.timestamp; + + mSentCodecSpecificData = true; + } + + uint8_t temp[1]; + C2ReadView view = mDummyReadView; + const uint8_t *data = temp; + size_t capacity = 0u; + if (!work->input.buffers.empty()) { + view = work->input.buffers[0]->data().linearBlocks().front().map().get(); + data = view.data(); + capacity = view.capacity(); + } + + size_t numFrames = (capacity + mInputSize + (eos ? mNumBytesPerInputFrame - 1 : 0)) + / mNumBytesPerInputFrame; + ALOGV("capacity = %zu; mInputSize = %zu; numFrames = %zu mNumBytesPerInputFrame = %u", + capacity, mInputSize, numFrames, mNumBytesPerInputFrame); + + std::shared_ptr block; + std::shared_ptr buffer; + std::unique_ptr wView; + uint8_t *outPtr = temp; + size_t outAvailable = 0u; + uint64_t inputIndex = work->input.ordinal.frameIndex.peeku(); + + AACENC_InArgs inargs; + AACENC_OutArgs outargs; + memset(&inargs, 0, sizeof(inargs)); + memset(&outargs, 0, sizeof(outargs)); + inargs.numInSamples = capacity / sizeof(int16_t); + + void* inBuffer[] = { (unsigned char *)data }; + INT inBufferIds[] = { IN_AUDIO_DATA }; + INT inBufferSize[] = { (INT)capacity }; + INT inBufferElSize[] = { sizeof(int16_t) }; + + AACENC_BufDesc inBufDesc; + inBufDesc.numBufs = sizeof(inBuffer) / sizeof(void*); + inBufDesc.bufs = (void**)&inBuffer; + inBufDesc.bufferIdentifiers = inBufferIds; + inBufDesc.bufSizes = inBufferSize; + inBufDesc.bufElSizes = inBufferElSize; + + void* outBuffer[] = { outPtr }; + INT outBufferIds[] = { OUT_BITSTREAM_DATA }; + INT outBufferSize[] = { 0 }; + INT outBufferElSize[] = { sizeof(UCHAR) }; + + AACENC_BufDesc outBufDesc; + outBufDesc.numBufs = sizeof(outBuffer) / sizeof(void*); + outBufDesc.bufs = (void**)&outBuffer; + outBufDesc.bufferIdentifiers = outBufferIds; + outBufDesc.bufSizes = outBufferSize; + outBufDesc.bufElSizes = outBufferElSize; + + AACENC_ERROR encoderErr = AACENC_OK; + + class FillWork { + public: + FillWork(uint32_t flags, C2WorkOrdinalStruct ordinal, + const std::shared_ptr &buffer) + : mFlags(flags), mOrdinal(ordinal), mBuffer(buffer) { + } + ~FillWork() = default; + + void operator()(const std::unique_ptr &work) { + work->worklets.front()->output.flags = (C2FrameData::flags_t)mFlags; + work->worklets.front()->output.buffers.clear(); + work->worklets.front()->output.ordinal = mOrdinal; + work->workletsProcessed = 1u; + work->result = C2_OK; + if (mBuffer) { + work->worklets.front()->output.buffers.push_back(mBuffer); + } + ALOGV("timestamp = %lld, index = %lld, w/%s buffer", + mOrdinal.timestamp.peekll(), + mOrdinal.frameIndex.peekll(), + mBuffer ? "" : "o"); + } + + private: + const uint32_t mFlags; + const C2WorkOrdinalStruct mOrdinal; + const std::shared_ptr mBuffer; + }; + + C2WorkOrdinalStruct outOrdinal = work->input.ordinal; + + while (encoderErr == AACENC_OK && inargs.numInSamples > 0) { + if (numFrames && !block) { + C2MemoryUsage usage = { C2MemoryUsage::CPU_READ, C2MemoryUsage::CPU_WRITE }; + // TODO: error handling, proper usage, etc. + c2_status_t err = pool->fetchLinearBlock(mOutBufferSize, usage, &block); + if (err != C2_OK) { + ALOGE("fetchLinearBlock failed : err = %d", err); + work->result = C2_NO_MEMORY; + return; + } + + wView.reset(new C2WriteView(block->map().get())); + outPtr = wView->data(); + outAvailable = wView->size(); + --numFrames; + } + + memset(&outargs, 0, sizeof(outargs)); + + outBuffer[0] = outPtr; + outBufferSize[0] = outAvailable; + + encoderErr = aacEncEncode(mAACEncoder, + &inBufDesc, + &outBufDesc, + &inargs, + &outargs); + + if (encoderErr == AACENC_OK) { + if (buffer) { + outOrdinal.frameIndex = mOutIndex++; + outOrdinal.timestamp = mInputTimeUs; + cloneAndSend( + inputIndex, + work, + FillWork(C2FrameData::FLAG_INCOMPLETE, outOrdinal, buffer)); + buffer.reset(); + } + + if (outargs.numOutBytes > 0) { + mInputSize = 0; + int consumed = ((capacity / sizeof(int16_t)) - inargs.numInSamples); + mInputTimeUs = work->input.ordinal.timestamp + + (consumed * 1000000ll / channelCount / sampleRate); + buffer = createLinearBuffer(block, 0, outargs.numOutBytes); +#if defined(LOG_NDEBUG) && !LOG_NDEBUG + hexdump(outPtr, std::min(outargs.numOutBytes, 256)); +#endif + outPtr = temp; + outAvailable = 0; + block.reset(); + } else { + mInputSize += outargs.numInSamples * sizeof(int16_t); + } + + if (outargs.numInSamples > 0) { + inBuffer[0] = (int16_t *)inBuffer[0] + outargs.numInSamples; + inBufferSize[0] -= outargs.numInSamples * sizeof(int16_t); + inargs.numInSamples -= outargs.numInSamples; + } + } + ALOGV("encoderErr = %d mInputSize = %zu inargs.numInSamples = %d, mInputTimeUs = %lld", + encoderErr, mInputSize, inargs.numInSamples, mInputTimeUs.peekll()); + } + + if (eos && inBufferSize[0] > 0) { + if (numFrames && !block) { + C2MemoryUsage usage = { C2MemoryUsage::CPU_READ, C2MemoryUsage::CPU_WRITE }; + // TODO: error handling, proper usage, etc. + c2_status_t err = pool->fetchLinearBlock(mOutBufferSize, usage, &block); + if (err != C2_OK) { + ALOGE("fetchLinearBlock failed : err = %d", err); + work->result = C2_NO_MEMORY; + return; + } + + wView.reset(new C2WriteView(block->map().get())); + outPtr = wView->data(); + outAvailable = wView->size(); + --numFrames; + } + + memset(&outargs, 0, sizeof(outargs)); + + outBuffer[0] = outPtr; + outBufferSize[0] = outAvailable; + + // Flush + inargs.numInSamples = -1; + + (void)aacEncEncode(mAACEncoder, + &inBufDesc, + &outBufDesc, + &inargs, + &outargs); + } + + outOrdinal.frameIndex = mOutIndex++; + outOrdinal.timestamp = mInputTimeUs; + FillWork((C2FrameData::flags_t)(eos ? C2FrameData::FLAG_END_OF_STREAM : 0), + outOrdinal, buffer)(work); +} + +c2_status_t C2SoftAacEnc::drain( + uint32_t drainMode, + const std::shared_ptr &pool) { + switch (drainMode) { + case DRAIN_COMPONENT_NO_EOS: + [[fallthrough]]; + case NO_DRAIN: + // no-op + return C2_OK; + case DRAIN_CHAIN: + return C2_OMITTED; + case DRAIN_COMPONENT_WITH_EOS: + break; + default: + return C2_BAD_VALUE; + } + + (void)pool; + mSentCodecSpecificData = false; + mInputSize = 0u; + + // TODO: we don't have any pending work at this time to drain. + return C2_OK; +} + +class C2SoftAacEncFactory : public C2ComponentFactory { +public: + C2SoftAacEncFactory() : mHelper(std::static_pointer_cast( + GetCodec2PlatformComponentStore()->getParamReflector())) { + } + + virtual c2_status_t createComponent( + c2_node_id_t id, + std::shared_ptr* const component, + std::function deleter) override { + *component = std::shared_ptr( + new C2SoftAacEnc(COMPONENT_NAME, + id, + std::make_shared(mHelper)), + deleter); + return C2_OK; + } + + virtual c2_status_t createInterface( + c2_node_id_t id, std::shared_ptr* const interface, + std::function deleter) override { + *interface = std::shared_ptr( + new SimpleInterface( + COMPONENT_NAME, id, std::make_shared(mHelper)), + deleter); + return C2_OK; + } + + virtual ~C2SoftAacEncFactory() override = default; + +private: + std::shared_ptr mHelper; +}; + +} // namespace android + +extern "C" ::C2ComponentFactory* CreateCodec2Factory() { + ALOGV("in %s", __func__); + return new ::android::C2SoftAacEncFactory(); +} + +extern "C" void DestroyCodec2Factory(::C2ComponentFactory* factory) { + ALOGV("in %s", __func__); + delete factory; +} diff --git a/media/codec2/components/aac/C2SoftAacEnc.h b/media/codec2/components/aac/C2SoftAacEnc.h new file mode 100644 index 0000000000000000000000000000000000000000..82fb4384a903f1a41914f01027ad6b9d38eee9c5 --- /dev/null +++ b/media/codec2/components/aac/C2SoftAacEnc.h @@ -0,0 +1,75 @@ +/* + * Copyright (C) 2018 The Android Open Source Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#ifndef ANDROID_C2_SOFT_AAC_ENC_H_ +#define ANDROID_C2_SOFT_AAC_ENC_H_ + +#include + +#include + +#include "aacenc_lib.h" + +namespace android { + +class C2SoftAacEnc : public SimpleC2Component { +public: + class IntfImpl; + + C2SoftAacEnc(const char *name, c2_node_id_t id, const std::shared_ptr &intfImpl); + virtual ~C2SoftAacEnc(); + + // From SimpleC2Component + c2_status_t onInit() override; + c2_status_t onStop() override; + void onReset() override; + void onRelease() override; + c2_status_t onFlush_sm() override; + void process( + const std::unique_ptr &work, + const std::shared_ptr &pool) override; + c2_status_t drain( + uint32_t drainMode, + const std::shared_ptr &pool) override; + +private: + std::shared_ptr mIntf; + + HANDLE_AACENCODER mAACEncoder; + + int32_t mSBRMode; + int32_t mSBRRatio; + AUDIO_OBJECT_TYPE mAACProfile; + UINT mNumBytesPerInputFrame; + UINT mOutBufferSize; + + bool mSentCodecSpecificData; + size_t mInputSize; + c2_cntr64_t mInputTimeUs; + + bool mSignalledError; + std::atomic_uint64_t mOutIndex; + + status_t initEncoder(); + + status_t setAudioParams(); + + C2_DO_NOT_COPY(C2SoftAacEnc); +}; + +} // namespace android + +#endif // ANDROID_C2_SOFT_AAC_ENC_H_ diff --git a/media/codec2/components/aac/DrcPresModeWrap.cpp b/media/codec2/components/aac/DrcPresModeWrap.cpp new file mode 100644 index 0000000000000000000000000000000000000000..5b9aebc7ce59fdc4b821a206db59460b12c85dd3 --- /dev/null +++ b/media/codec2/components/aac/DrcPresModeWrap.cpp @@ -0,0 +1,372 @@ +/* + * Copyright (C) 2014 The Android Open Source Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +#include "DrcPresModeWrap.h" + +#include + +#define LOG_TAG "C2SoftAacDrcWrapper" +//#define LOG_NDEBUG 0 +#include + +//#define DRC_PRES_MODE_WRAP_DEBUG + +#define GPM_ENCODER_TARGET_LEVEL 64 +#define MAX_TARGET_LEVEL 40 + +CDrcPresModeWrapper::CDrcPresModeWrapper() +{ + mDataUpdate = true; + + /* Data from streamInfo. */ + /* Initialized to the same values as in the aac decoder */ + mStreamPRL = -1; + mStreamDRCPresMode = -1; + mStreamNrAACChan = 0; + mStreamNrOutChan = 0; + + /* Desired values (set by user). */ + /* Initialized to the same values as in the aac decoder */ + mDesTarget = -1; + mDesAttFactor = 0; + mDesBoostFactor = 0; + mDesHeavy = 0; + + mEncoderTarget = -1; + + /* Values from last time. */ + /* Initialized to the same values as the desired values */ + mLastTarget = -1; + mLastAttFactor = 0; + mLastBoostFactor = 0; + mLastHeavy = 0; +} + +CDrcPresModeWrapper::~CDrcPresModeWrapper() +{ +} + +void +CDrcPresModeWrapper::setDecoderHandle(const HANDLE_AACDECODER handle) +{ + mHandleDecoder = handle; +} + +void +CDrcPresModeWrapper::submitStreamData(CStreamInfo* pStreamInfo) +{ + assert(pStreamInfo); + + if (mStreamPRL != pStreamInfo->drcProgRefLev) { + mStreamPRL = pStreamInfo->drcProgRefLev; + mDataUpdate = true; +#ifdef DRC_PRES_MODE_WRAP_DEBUG + ALOGV("DRC presentation mode wrapper: drcProgRefLev is %d\n", mStreamPRL); +#endif + } + + if (mStreamDRCPresMode != pStreamInfo->drcPresMode) { + mStreamDRCPresMode = pStreamInfo->drcPresMode; + mDataUpdate = true; +#ifdef DRC_PRES_MODE_WRAP_DEBUG + ALOGV("DRC presentation mode wrapper: drcPresMode is %d\n", mStreamDRCPresMode); +#endif + } + + if (mStreamNrAACChan != pStreamInfo->aacNumChannels) { + mStreamNrAACChan = pStreamInfo->aacNumChannels; + mDataUpdate = true; +#ifdef DRC_PRES_MODE_WRAP_DEBUG + ALOGV("DRC presentation mode wrapper: aacNumChannels is %d\n", mStreamNrAACChan); +#endif + } + + if (mStreamNrOutChan != pStreamInfo->numChannels) { + mStreamNrOutChan = pStreamInfo->numChannels; + mDataUpdate = true; +#ifdef DRC_PRES_MODE_WRAP_DEBUG + ALOGV("DRC presentation mode wrapper: numChannels is %d\n", mStreamNrOutChan); +#endif + } + + + + if (mStreamNrOutChan -31 dB + if ((mIsStereoDownmix == false) && (mIsMonoDownmix == false)) { + // no stereo or mono downmixing, calculated scaling of light DRC + /* use as little compression as possible */ + newAttFactor = 0; + newBoostFactor = 0; + if (mDesTarget PRL + if (mEncoderTarget < mDesTarget) { // if mEncoderTarget > target level + // mEncoderTarget > target level > PRL + int calcFactor; + float calcFactor_norm; + // 0.0f < calcFactor_norm < 1.0f + calcFactor_norm = (float)(mDesTarget - progRefLevel) / + (float)(mEncoderTarget - progRefLevel); + calcFactor = (int)(calcFactor_norm*127.0f); // 0 <= calcFactor < 127 + // calcFactor is the lower limit + newAttFactor = (calcFactor>newAttFactor) ? calcFactor : newAttFactor; + // new AttFactor will be always = calcFactor, as it is set to 0 before. + newBoostFactor = newAttFactor; + } else { + /* target level > mEncoderTarget > PRL */ + // newTDLimiterEnable = 1; + // the time domain limiter must always be active in this case. + // It is assumed that the framework activates it by default + newAttFactor = 127; + newBoostFactor = 127; + } + } else { // target level <= PRL + // no restrictions required + // newAttFactor = newAttFactor; + } + } else { // downmixing + // if target level > -23 dB or mono downmix + if ( (mDesTarget<92) || mIsMonoDownmix ) { + newHeavy = 1; + } else { + // we perform a downmix, so, we need at least full light DRC + newAttFactor = 127; + } + } + } else { // target level <= -31 dB + // playback -31 dB: light DRC only needed if we perform downmixing + if (mIsDownmix) { // we do downmixing + newAttFactor = 127; + } + } + } + else { // handle other used encoder target levels + + // Sanity check: DRC presentation mode is only specified for max. 5.1 channels + if (mStreamNrAACChan > 6) { + drcPresMode = 0; + } + + switch (drcPresMode) { + case 0: + default: // presentation mode not indicated + { + + if (mDesTarget<124) { // if target level > -31 dB + // no stereo or mono downmixing + if ((mIsStereoDownmix == false) && (mIsMonoDownmix == false)) { + if (mDesTarget PRL + // newTDLimiterEnable = 1; + // the time domain limiter must always be active in this case. + // It is assumed that the framework activates it by default + newAttFactor = 127; // at least, use light compression + } else { // target level <= PRL + // no restrictions required + // newAttFactor = newAttFactor; + } + } else { // downmixing + // newTDLimiterEnable = 1; + // the time domain limiter must always be active in this case. + // It is assumed that the framework activates it by default + + // if target level > -23 dB or mono downmix + if ( (mDesTarget < 92) || mIsMonoDownmix ) { + newHeavy = 1; + } else{ + // we perform a downmix, so, we need at least full light DRC + newAttFactor = 127; + } + } + } else { // target level <= -31 dB + if (mIsDownmix) { // we do downmixing. + // newTDLimiterEnable = 1; + // the time domain limiter must always be active in this case. + // It is assumed that the framework activates it by default + newAttFactor = 127; + } + } + } + break; + + // Presentation mode 1 and 2 according to ETSI TS 101 154: + // Digital Video Broadcasting (DVB); Specification for the use of Video and Audio Coding + // in Broadcasting Applications based on the MPEG-2 Transport Stream, + // section C.5.4., "Decoding", and Table C.33 + // ISO DRC -> newHeavy = 0 (Use light compression, MPEG-style) + // Compression_value -> newHeavy = 1 (Use heavy compression, DVB-style) + // scaling restricted -> newAttFactor = 127 + + case 1: // presentation mode 1, Light:-31/Heavy:-23 + { + if (mDesTarget < 124) { // if target level > -31 dB + // playback up to -23 dB + newHeavy = 1; + } else { // target level <= -31 dB + // playback -31 dB + if (mIsDownmix) { // we do downmixing. + newAttFactor = 127; + } + } + } + break; + + case 2: // presentation mode 2, Light:-23/Heavy:-23 + { + if (mDesTarget < 124) { // if target level > -31 dB + // playback up to -23 dB + if (mIsMonoDownmix) { // if mono downmix + newHeavy = 1; + } else { + newHeavy = 0; + newAttFactor = 127; + } + } else { // target level <= -31 dB + // playback -31 dB + newHeavy = 0; + if (mIsDownmix) { // we do downmixing. + newAttFactor = 127; + } + } + } + break; + + } // switch() + } // if (mEncoderTarget == GPM_ENCODER_TARGET_LEVEL) + + // sanity again + if (newHeavy == 1) { + newBoostFactor=127; // not really needed as the same would be done by the decoder anyway + newAttFactor = 127; + } + + // update the decoder + if (newTarget != mLastTarget) { + aacDecoder_SetParam(mHandleDecoder, AAC_DRC_REFERENCE_LEVEL, newTarget); + mLastTarget = newTarget; +#ifdef DRC_PRES_MODE_WRAP_DEBUG + if (newTarget != mDesTarget) + ALOGV("DRC presentation mode wrapper: forced target level to %d (from %d)\n", newTarget, mDesTarget); + else + ALOGV("DRC presentation mode wrapper: set target level to %d\n", newTarget); +#endif + } + + if (newAttFactor != mLastAttFactor) { + aacDecoder_SetParam(mHandleDecoder, AAC_DRC_ATTENUATION_FACTOR, newAttFactor); + mLastAttFactor = newAttFactor; +#ifdef DRC_PRES_MODE_WRAP_DEBUG + if (newAttFactor != mDesAttFactor) + ALOGV("DRC presentation mode wrapper: forced attenuation factor to %d (from %d)\n", newAttFactor, mDesAttFactor); + else + ALOGV("DRC presentation mode wrapper: set attenuation factor to %d\n", newAttFactor); +#endif + } + + if (newBoostFactor != mLastBoostFactor) { + aacDecoder_SetParam(mHandleDecoder, AAC_DRC_BOOST_FACTOR, newBoostFactor); + mLastBoostFactor = newBoostFactor; +#ifdef DRC_PRES_MODE_WRAP_DEBUG + if (newBoostFactor != mDesBoostFactor) + ALOGV("DRC presentation mode wrapper: forced boost factor to %d (from %d)\n", + newBoostFactor, mDesBoostFactor); + else + ALOGV("DRC presentation mode wrapper: set boost factor to %d\n", newBoostFactor); +#endif + } + + if (newHeavy != mLastHeavy) { + aacDecoder_SetParam(mHandleDecoder, AAC_DRC_HEAVY_COMPRESSION, newHeavy); + mLastHeavy = newHeavy; +#ifdef DRC_PRES_MODE_WRAP_DEBUG + if (newHeavy != mDesHeavy) + ALOGV("DRC presentation mode wrapper: forced heavy compression to %d (from %d)\n", + newHeavy, mDesHeavy); + else + ALOGV("DRC presentation mode wrapper: set heavy compression to %d\n", newHeavy); +#endif + } + +#ifdef DRC_PRES_MODE_WRAP_DEBUG + ALOGV("DRC config: tgt_lev: %3d, cut: %3d, boost: %3d, heavy: %d\n", newTarget, + newAttFactor, newBoostFactor, newHeavy); +#endif + mDataUpdate = false; + + } // if (mDataUpdate) +} diff --git a/media/codec2/components/aac/DrcPresModeWrap.h b/media/codec2/components/aac/DrcPresModeWrap.h new file mode 100644 index 0000000000000000000000000000000000000000..f0b6cf2d34cbce208ffc110f1b4fe527b814ed93 --- /dev/null +++ b/media/codec2/components/aac/DrcPresModeWrap.h @@ -0,0 +1,62 @@ +/* + * Copyright (C) 2014 The Android Open Source Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +#pragma once +#include "aacdecoder_lib.h" + +typedef enum +{ + DRC_PRES_MODE_WRAP_DESIRED_TARGET = 0x0000, + DRC_PRES_MODE_WRAP_DESIRED_ATT_FACTOR = 0x0001, + DRC_PRES_MODE_WRAP_DESIRED_BOOST_FACTOR = 0x0002, + DRC_PRES_MODE_WRAP_DESIRED_HEAVY = 0x0003, + DRC_PRES_MODE_WRAP_ENCODER_TARGET = 0x0004 +} DRC_PRES_MODE_WRAP_PARAM; + + +class CDrcPresModeWrapper { +public: + CDrcPresModeWrapper(); + ~CDrcPresModeWrapper(); + void setDecoderHandle(const HANDLE_AACDECODER handle); + void setParam(const DRC_PRES_MODE_WRAP_PARAM param, const int value); + void submitStreamData(CStreamInfo*); + void update(); + +protected: + HANDLE_AACDECODER mHandleDecoder; + int mDesTarget; + int mDesAttFactor; + int mDesBoostFactor; + int mDesHeavy; + + int mEncoderTarget; + + int mLastTarget; + int mLastAttFactor; + int mLastBoostFactor; + int mLastHeavy; + + SCHAR mStreamPRL; + SCHAR mStreamDRCPresMode; + INT mStreamNrAACChan; + INT mStreamNrOutChan; + + bool mIsDownmix; + bool mIsMonoDownmix; + bool mIsStereoDownmix; + + bool mDataUpdate; +}; diff --git a/media/codec2/components/aac/MODULE_LICENSE_APACHE2 b/media/codec2/components/aac/MODULE_LICENSE_APACHE2 new file mode 100644 index 0000000000000000000000000000000000000000..e69de29bb2d1d6434b8b29ae775ad8c2e48c5391 diff --git a/media/codec2/components/aac/NOTICE b/media/codec2/components/aac/NOTICE new file mode 100644 index 0000000000000000000000000000000000000000..c5b1efa7aac764ae6d8da63476a2d5cec02a6a5d --- /dev/null +++ b/media/codec2/components/aac/NOTICE @@ -0,0 +1,190 @@ + + Copyright (c) 2005-2008, The Android Open Source Project + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. + + + Apache License + Version 2.0, January 2004 + http://www.apache.org/licenses/ + + TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION + + 1. Definitions. + + "License" shall mean the terms and conditions for use, reproduction, + and distribution as defined by Sections 1 through 9 of this document. + + "Licensor" shall mean the copyright owner or entity authorized by + the copyright owner that is granting the License. + + "Legal Entity" shall mean the union of the acting entity and all + other entities that control, are controlled by, or are under common + control with that entity. For the purposes of this definition, + "control" means (i) the power, direct or indirect, to cause the + direction or management of such entity, whether by contract or + otherwise, or (ii) ownership of fifty percent (50%) or more of the + outstanding shares, or (iii) beneficial ownership of such entity. + + "You" (or "Your") shall mean an individual or Legal Entity + exercising permissions granted by this License. + + "Source" form shall mean the preferred form for making modifications, + including but not limited to software source code, documentation + source, and configuration files. + + "Object" form shall mean any form resulting from mechanical + transformation or translation of a Source form, including but + not limited to compiled object code, generated documentation, + and conversions to other media types. + + "Work" shall mean the work of authorship, whether in Source or + Object form, made available under the License, as indicated by a + copyright notice that is included in or attached to the work + (an example is provided in the Appendix below). + + "Derivative Works" shall mean any work, whether in Source or Object + form, that is based on (or derived from) the Work and for which the + editorial revisions, annotations, elaborations, or other modifications + represent, as a whole, an original work of authorship. For the purposes + of this License, Derivative Works shall not include works that remain + separable from, or merely link (or bind by name) to the interfaces of, + the Work and Derivative Works thereof. + + "Contribution" shall mean any work of authorship, including + the original version of the Work and any modifications or additions + to that Work or Derivative Works thereof, that is intentionally + submitted to Licensor for inclusion in the Work by the copyright owner + or by an individual or Legal Entity authorized to submit on behalf of + the copyright owner. For the purposes of this definition, "submitted" + means any form of electronic, verbal, or written communication sent + to the Licensor or its representatives, including but not limited to + communication on electronic mailing lists, source code control systems, + and issue tracking systems that are managed by, or on behalf of, the + Licensor for the purpose of discussing and improving the Work, but + excluding communication that is conspicuously marked or otherwise + designated in writing by the copyright owner as "Not a Contribution." + + "Contributor" shall mean Licensor and any individual or Legal Entity + on behalf of whom a Contribution has been received by Licensor and + subsequently incorporated within the Work. + + 2. Grant of Copyright License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + copyright license to reproduce, prepare Derivative Works of, + publicly display, publicly perform, sublicense, and distribute the + Work and such Derivative Works in Source or Object form. + + 3. Grant of Patent License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + (except as stated in this section) patent license to make, have made, + use, offer to sell, sell, import, and otherwise transfer the Work, + where such license applies only to those patent claims licensable + by such Contributor that are necessarily infringed by their + Contribution(s) alone or by combination of their Contribution(s) + with the Work to which such Contribution(s) was submitted. If You + institute patent litigation against any entity (including a + cross-claim or counterclaim in a lawsuit) alleging that the Work + or a Contribution incorporated within the Work constitutes direct + or contributory patent infringement, then any patent licenses + granted to You under this License for that Work shall terminate + as of the date such litigation is filed. + + 4. Redistribution. You may reproduce and distribute copies of the + Work or Derivative Works thereof in any medium, with or without + modifications, and in Source or Object form, provided that You + meet the following conditions: + + (a) You must give any other recipients of the Work or + Derivative Works a copy of this License; and + + (b) You must cause any modified files to carry prominent notices + stating that You changed the files; and + + (c) You must retain, in the Source form of any Derivative Works + that You distribute, all copyright, patent, trademark, and + attribution notices from the Source form of the Work, + excluding those notices that do not pertain to any part of + the Derivative Works; and + + (d) If the Work includes a "NOTICE" text file as part of its + distribution, then any Derivative Works that You distribute must + include a readable copy of the attribution notices contained + within such NOTICE file, excluding those notices that do not + pertain to any part of the Derivative Works, in at least one + of the following places: within a NOTICE text file distributed + as part of the Derivative Works; within the Source form or + documentation, if provided along with the Derivative Works; or, + within a display generated by the Derivative Works, if and + wherever such third-party notices normally appear. The contents + of the NOTICE file are for informational purposes only and + do not modify the License. You may add Your own attribution + notices within Derivative Works that You distribute, alongside + or as an addendum to the NOTICE text from the Work, provided + that such additional attribution notices cannot be construed + as modifying the License. + + You may add Your own copyright statement to Your modifications and + may provide additional or different license terms and conditions + for use, reproduction, or distribution of Your modifications, or + for any such Derivative Works as a whole, provided Your use, + reproduction, and distribution of the Work otherwise complies with + the conditions stated in this License. + + 5. Submission of Contributions. Unless You explicitly state otherwise, + any Contribution intentionally submitted for inclusion in the Work + by You to the Licensor shall be under the terms and conditions of + this License, without any additional terms or conditions. + Notwithstanding the above, nothing herein shall supersede or modify + the terms of any separate license agreement you may have executed + with Licensor regarding such Contributions. + + 6. Trademarks. This License does not grant permission to use the trade + names, trademarks, service marks, or product names of the Licensor, + except as required for reasonable and customary use in describing the + origin of the Work and reproducing the content of the NOTICE file. + + 7. Disclaimer of Warranty. Unless required by applicable law or + agreed to in writing, Licensor provides the Work (and each + Contributor provides its Contributions) on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or + implied, including, without limitation, any warranties or conditions + of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A + PARTICULAR PURPOSE. You are solely responsible for determining the + appropriateness of using or redistributing the Work and assume any + risks associated with Your exercise of permissions under this License. + + 8. Limitation of Liability. In no event and under no legal theory, + whether in tort (including negligence), contract, or otherwise, + unless required by applicable law (such as deliberate and grossly + negligent acts) or agreed to in writing, shall any Contributor be + liable to You for damages, including any direct, indirect, special, + incidental, or consequential damages of any character arising as a + result of this License or out of the use or inability to use the + Work (including but not limited to damages for loss of goodwill, + work stoppage, computer failure or malfunction, or any and all + other commercial damages or losses), even if such Contributor + has been advised of the possibility of such damages. + + 9. Accepting Warranty or Additional Liability. While redistributing + the Work or Derivative Works thereof, You may choose to offer, + and charge a fee for, acceptance of support, warranty, indemnity, + or other liability obligations and/or rights consistent with this + License. However, in accepting such obligations, You may act only + on Your own behalf and on Your sole responsibility, not on behalf + of any other Contributor, and only if You agree to indemnify, + defend, and hold each Contributor harmless for any liability + incurred by, or claims asserted against, such Contributor by reason + of your accepting any such warranty or additional liability. + + END OF TERMS AND CONDITIONS + diff --git a/media/codec2/components/aac/patent_disclaimer.txt b/media/codec2/components/aac/patent_disclaimer.txt new file mode 100644 index 0000000000000000000000000000000000000000..b4bf11d4ab14011ba28ce5ff6090529d2b7b587f --- /dev/null +++ b/media/codec2/components/aac/patent_disclaimer.txt @@ -0,0 +1,9 @@ + +THIS IS NOT A GRANT OF PATENT RIGHTS. + +Google makes no representation or warranty that the codecs for which +source code is made available hereunder are unencumbered by +third-party patents. Those intending to use this source code in +hardware or software products are advised that implementations of +these codecs, including in open source software or shareware, may +require patent licenses from the relevant patent holders. diff --git a/media/codec2/components/amr_nb_wb/Android.bp b/media/codec2/components/amr_nb_wb/Android.bp new file mode 100644 index 0000000000000000000000000000000000000000..764b3dba7007d5afddf4c12ee3257b213b2937f1 --- /dev/null +++ b/media/codec2/components/amr_nb_wb/Android.bp @@ -0,0 +1,77 @@ +cc_library_shared { + name: "libstagefright_soft_c2amrnbdec", + defaults: [ + "libstagefright_soft_c2-defaults", + "libstagefright_soft_c2_sanitize_all-defaults", + ], + + srcs: ["C2SoftAmrDec.cpp"], + + cflags: [ + "-DAMRNB", + ], + + static_libs: [ + "libstagefright_amrnbdec", + "libstagefright_amrwbdec", + ], + + shared_libs: [ + "libstagefright_amrnb_common", + ], +} + +cc_library_shared { + name: "libstagefright_soft_c2amrwbdec", + defaults: [ + "libstagefright_soft_c2-defaults", + "libstagefright_soft_c2_sanitize_all-defaults", + ], + + srcs: ["C2SoftAmrDec.cpp"], + + static_libs: [ + "libstagefright_amrnbdec", + "libstagefright_amrwbdec", + ], + + shared_libs: [ + "libstagefright_amrnb_common", + ], +} + +cc_library_shared { + name: "libstagefright_soft_c2amrnbenc", + defaults: [ + "libstagefright_soft_c2-defaults", + "libstagefright_soft_c2_sanitize_all-defaults", + ], + + srcs: ["C2SoftAmrNbEnc.cpp"], + + static_libs: [ + "libstagefright_amrnbenc", + ], + + shared_libs: [ + "libstagefright_amrnb_common", + ], +} + +cc_library_shared { + name: "libstagefright_soft_c2amrwbenc", + defaults: [ + "libstagefright_soft_c2-defaults", + "libstagefright_soft_c2_sanitize_all-defaults", + ], + + srcs: ["C2SoftAmrWbEnc.cpp"], + + static_libs: [ + "libstagefright_amrwbenc", + ], + + shared_libs: [ + "libstagefright_enc_common", + ], +} diff --git a/media/codec2/components/amr_nb_wb/C2SoftAmrDec.cpp b/media/codec2/components/amr_nb_wb/C2SoftAmrDec.cpp new file mode 100644 index 0000000000000000000000000000000000000000..c591e212ac9d08a952f88f51d61442973518e77d --- /dev/null +++ b/media/codec2/components/amr_nb_wb/C2SoftAmrDec.cpp @@ -0,0 +1,438 @@ +/* + * Copyright (C) 2018 The Android Open Source Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +//#define LOG_NDEBUG 0 +#ifdef AMRNB +#define LOG_TAG "C2SoftAmrNbDec" +#else +#define LOG_TAG "C2SoftAmrWbDec" +#endif +#include + +#include + +#include +#include + +#include "C2SoftAmrDec.h" +#include "gsmamr_dec.h" +#include "pvamrwbdecoder.h" + +namespace android { + +#ifdef AMRNB + constexpr char COMPONENT_NAME[] = "c2.android.amrnb.decoder"; +#else + constexpr char COMPONENT_NAME[] = "c2.android.amrwb.decoder"; +#endif + +class C2SoftAmrDec::IntfImpl : public C2InterfaceHelper { +public: + explicit IntfImpl(const std::shared_ptr &helper) + : C2InterfaceHelper(helper) { + + setDerivedInstance(this); + + addParameter( + DefineParam(mInputFormat, C2_NAME_INPUT_STREAM_FORMAT_SETTING) + .withConstValue(new C2StreamFormatConfig::input(0u, C2FormatCompressed)) + .build()); + + addParameter( + DefineParam(mOutputFormat, C2_NAME_OUTPUT_STREAM_FORMAT_SETTING) + .withConstValue(new C2StreamFormatConfig::output(0u, C2FormatAudio)) + .build()); + + addParameter( + DefineParam(mInputMediaType, C2_NAME_INPUT_PORT_MIME_SETTING) + .withConstValue(AllocSharedString( +#ifdef AMRNB + MEDIA_MIMETYPE_AUDIO_AMR_NB +#else + MEDIA_MIMETYPE_AUDIO_AMR_WB +#endif + )).build()); + + addParameter( + DefineParam(mOutputMediaType, C2_NAME_OUTPUT_PORT_MIME_SETTING) + .withConstValue(AllocSharedString( + MEDIA_MIMETYPE_AUDIO_RAW)) + .build()); + + addParameter( + DefineParam(mSampleRate, C2_NAME_STREAM_SAMPLE_RATE_SETTING) +#ifdef AMRNB + .withDefault(new C2StreamSampleRateInfo::output(0u, 8000)) + .withFields({C2F(mSampleRate, value).equalTo(8000)}) +#else + .withDefault(new C2StreamSampleRateInfo::output(0u, 16000)) + .withFields({C2F(mSampleRate, value).equalTo(16000)}) +#endif + .withSetter((Setter::StrictValueWithNoDeps)) + .build()); + + addParameter( + DefineParam(mChannelCount, C2_NAME_STREAM_CHANNEL_COUNT_SETTING) + .withDefault(new C2StreamChannelCountInfo::output(0u, 1)) + .withFields({C2F(mChannelCount, value).equalTo(1)}) + .withSetter((Setter::StrictValueWithNoDeps)) + .build()); + + addParameter( + DefineParam(mBitrate, C2_NAME_STREAM_BITRATE_SETTING) +#ifdef AMRNB + .withDefault(new C2BitrateTuning::input(0u, 4750)) + .withFields({C2F(mBitrate, value).inRange(4750, 12200)}) +#else + .withDefault(new C2BitrateTuning::input(0u, 6600)) + .withFields({C2F(mBitrate, value).inRange(6600, 23850)}) +#endif + .withSetter(Setter::NonStrictValueWithNoDeps) + .build()); + + addParameter( + DefineParam(mInputMaxBufSize, C2_PARAMKEY_INPUT_MAX_BUFFER_SIZE) + .withConstValue(new C2StreamMaxBufferSizeInfo::input(0u, 8192)) + .build()); + } + +private: + std::shared_ptr mInputFormat; + std::shared_ptr mOutputFormat; + std::shared_ptr mInputMediaType; + std::shared_ptr mOutputMediaType; + std::shared_ptr mSampleRate; + std::shared_ptr mChannelCount; + std::shared_ptr mBitrate; + std::shared_ptr mInputMaxBufSize; +}; + +C2SoftAmrDec::C2SoftAmrDec( + const char *name, + c2_node_id_t id, + const std::shared_ptr &intfImpl) + : SimpleC2Component(std::make_shared>(name, id, intfImpl)), + mIntf(intfImpl), + mAmrHandle(nullptr), + mDecoderBuf(nullptr), + mDecoderCookie(nullptr) { +#ifdef AMRNB + mIsWide = false; +#else + mIsWide = true; +#endif +} + +C2SoftAmrDec::~C2SoftAmrDec() { + (void)onRelease(); +} + +c2_status_t C2SoftAmrDec::onInit() { + status_t err = initDecoder(); + return err == OK ? C2_OK : C2_NO_MEMORY; +} + +c2_status_t C2SoftAmrDec::onStop() { + if (!mIsWide) { + Speech_Decode_Frame_reset(mAmrHandle); + } else { + pvDecoder_AmrWb_Reset(mAmrHandle, 0 /* reset_all */); + } + mSignalledError = false; + mSignalledOutputEos = false; + + return C2_OK; +} + +void C2SoftAmrDec::onReset() { + (void)onStop(); +} + +void C2SoftAmrDec::onRelease() { + if (!mIsWide) { + if (mAmrHandle) { + GSMDecodeFrameExit(&mAmrHandle); + } + mAmrHandle = nullptr; + } else { + if (mDecoderBuf) { + free(mDecoderBuf); + } + mDecoderBuf = nullptr; + mAmrHandle = nullptr; + mDecoderCookie = nullptr; + } +} + +c2_status_t C2SoftAmrDec::onFlush_sm() { + return onStop(); +} + +status_t C2SoftAmrDec::initDecoder() { + if (!mIsWide) { + if (GSMInitDecode(&mAmrHandle, (int8_t *)"AMRNBDecoder")) + return UNKNOWN_ERROR; + } else { + uint32_t memReq = pvDecoder_AmrWbMemRequirements(); + mDecoderBuf = malloc(memReq); + if (mDecoderBuf) { + pvDecoder_AmrWb_Init(&mAmrHandle, mDecoderBuf, &mDecoderCookie); + } + else { + return NO_MEMORY; + } + } + mSignalledError = false; + mSignalledOutputEos = false; + + return OK; +} + +static size_t getFrameSize(bool isWide, unsigned FM) { + static const size_t kFrameSizeNB[16] = { + 12, 13, 15, 17, 19, 20, 26, 31, + 5, 6, 5, 5, // SID + 0, 0, 0, // future use + 0 // no data + }; + static const size_t kFrameSizeWB[16] = { + 17, 23, 32, 36, 40, 46, 50, 58, 60, + 5, // SID + 0, 0, 0, 0, // future use + 0, // speech lost + 0 // no data + }; + + if (FM > 15 || (isWide && FM > 9 && FM < 14) || (!isWide && FM > 11 && FM < 15)) { + ALOGE("illegal AMR frame mode %d", FM); + return 0; + } + // add 1 for header byte + return (isWide ? kFrameSizeWB[FM] : kFrameSizeNB[FM]) + 1; +} + +static status_t calculateNumFrames(const uint8 *input, size_t inSize, + std::vector *frameSizeList, bool isWide) { + for (size_t k = 0; k < inSize;) { + int16_t FM = ((input[0] >> 3) & 0x0f); + size_t frameSize = getFrameSize(isWide, FM); + if (frameSize == 0) { + return UNKNOWN_ERROR; + } + if ((inSize - k) >= frameSize) { + input += frameSize; + k += frameSize; + } + else break; + frameSizeList->push_back(frameSize); + } + return OK; +} + +void C2SoftAmrDec::process( + const std::unique_ptr &work, + const std::shared_ptr &pool) { + // Initialize output work + work->result = C2_OK; + work->workletsProcessed = 1u; + work->worklets.front()->output.flags = work->input.flags; + + if (mSignalledError || mSignalledOutputEos) { + work->result = C2_BAD_VALUE; + return; + } + + C2ReadView rView = mDummyReadView; + size_t inOffset = 0u; + size_t inSize = 0u; + if (!work->input.buffers.empty()) { + rView = work->input.buffers[0]->data().linearBlocks().front().map().get(); + inSize = rView.capacity(); + if (inSize && rView.error()) { + ALOGE("read view map failed %d", rView.error()); + work->result = rView.error(); + return; + } + } + + bool eos = (work->input.flags & C2FrameData::FLAG_END_OF_STREAM) != 0; + if (inSize == 0) { + work->worklets.front()->output.flags = work->input.flags; + work->worklets.front()->output.buffers.clear(); + work->worklets.front()->output.ordinal = work->input.ordinal; + if (eos) { + mSignalledOutputEos = true; + ALOGV("signalled EOS"); + } + return; + } + + ALOGV("in buffer attr. size %zu timestamp %d frameindex %d", inSize, + (int)work->input.ordinal.timestamp.peeku(), (int)work->input.ordinal.frameIndex.peeku()); + + std::vector frameSizeList; + if (OK != calculateNumFrames(rView.data() + inOffset, inSize, &frameSizeList, + mIsWide)) { + work->result = C2_CORRUPTED; + mSignalledError = true; + return; + } + if (frameSizeList.empty()) { + ALOGE("input size smaller than expected"); + work->result = C2_CORRUPTED; + mSignalledError = true; + return; + } + + int16_t outSamples = mIsWide ? kNumSamplesPerFrameWB : kNumSamplesPerFrameNB; + size_t calOutSize = outSamples * frameSizeList.size() * sizeof(int16_t); + std::shared_ptr block; + C2MemoryUsage usage = { C2MemoryUsage::CPU_READ, C2MemoryUsage::CPU_WRITE }; + c2_status_t err = pool->fetchLinearBlock(calOutSize, usage, &block); + if (err != C2_OK) { + ALOGE("fetchLinearBlock for Output failed with status %d", err); + work->result = C2_NO_MEMORY; + return; + } + C2WriteView wView = block->map().get(); + if (wView.error()) { + ALOGE("write view map failed %d", wView.error()); + work->result = wView.error(); + return; + } + + int16_t *output = reinterpret_cast(wView.data()); + auto it = frameSizeList.begin(); + const uint8_t *inPtr = rView.data() + inOffset; + size_t inPos = 0; + while (inPos < inSize) { + if (it == frameSizeList.end()) { + ALOGD("unexpected trailing bytes, ignoring them"); + break; + } + uint8_t *input = const_cast(inPtr + inPos); + int16_t FM = ((*input >> 3) & 0x0f); + if (!mIsWide) { + int32_t numBytesRead = AMRDecode(mAmrHandle, + (Frame_Type_3GPP) FM, + input + 1, output, MIME_IETF); + if (static_cast(numBytesRead + 1) != *it) { + ALOGE("panic, parsed size does not match decoded size"); + work->result = C2_CORRUPTED; + mSignalledError = true; + return; + } + } else { + if (FM >= 9) { + // Produce silence instead of comfort noise and for + // speech lost/no data. + memset(output, 0, outSamples * sizeof(int16_t)); + } else { + int16_t FT; + RX_State_wb rx_state; + int16_t numRecSamples; + + mime_unsorting(const_cast(&input[1]), + mInputSampleBuffer, &FT, &FM, 1, &rx_state); + pvDecoder_AmrWb(FM, mInputSampleBuffer, output, &numRecSamples, + mDecoderBuf, FT, mDecoderCookie); + if (numRecSamples != outSamples) { + ALOGE("Sample output per frame incorrect"); + work->result = C2_CORRUPTED; + mSignalledError = true; + return; + } + /* Delete the 2 LSBs (14-bit output) */ + for (int i = 0; i < numRecSamples; ++i) { + output[i] &= 0xfffC; + } + } + } + inPos += *it; + output += outSamples; + ++it; + } + + work->worklets.front()->output.flags = work->input.flags; + work->worklets.front()->output.buffers.clear(); + work->worklets.front()->output.buffers.push_back(createLinearBuffer(block)); + work->worklets.front()->output.ordinal = work->input.ordinal; + if (eos) { + mSignalledOutputEos = true; + ALOGV("signalled EOS"); + } +} + +c2_status_t C2SoftAmrDec::drain( + uint32_t drainMode, + const std::shared_ptr &pool) { + (void)pool; + if (drainMode == NO_DRAIN) { + ALOGW("drain with NO_DRAIN: no-op"); + return C2_OK; + } + if (drainMode == DRAIN_CHAIN) { + ALOGW("DRAIN_CHAIN not supported"); + return C2_OMITTED; + } + return C2_OK; +} + +class C2SoftAMRDecFactory : public C2ComponentFactory { +public: + C2SoftAMRDecFactory() : mHelper(std::static_pointer_cast( + GetCodec2PlatformComponentStore()->getParamReflector())) { + } + + virtual c2_status_t createComponent( + c2_node_id_t id, + std::shared_ptr* const component, + std::function deleter) override { + *component = std::shared_ptr( + new C2SoftAmrDec(COMPONENT_NAME, id, + std::make_shared(mHelper)), + deleter); + return C2_OK; + } + + virtual c2_status_t createInterface( + c2_node_id_t id, + std::shared_ptr* const interface, + std::function deleter) override { + *interface = std::shared_ptr( + new SimpleInterface( + COMPONENT_NAME, id, std::make_shared(mHelper)), + deleter); + return C2_OK; + } + + virtual ~C2SoftAMRDecFactory() override = default; + +private: + std::shared_ptr mHelper; +}; + +} // namespace android + +extern "C" ::C2ComponentFactory* CreateCodec2Factory() { + ALOGV("in %s", __func__); + return new ::android::C2SoftAMRDecFactory(); +} + +extern "C" void DestroyCodec2Factory(::C2ComponentFactory* factory) { + ALOGV("in %s", __func__); + delete factory; +} diff --git a/media/codec2/components/amr_nb_wb/C2SoftAmrDec.h b/media/codec2/components/amr_nb_wb/C2SoftAmrDec.h new file mode 100644 index 0000000000000000000000000000000000000000..6384450a2748040ac8cdbe38b21518ce473e6bb9 --- /dev/null +++ b/media/codec2/components/amr_nb_wb/C2SoftAmrDec.h @@ -0,0 +1,68 @@ +/* + * Copyright (C) 2018 The Android Open Source Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#ifndef ANDROID_C2_SOFT_AMR_DEC_H_ +#define ANDROID_C2_SOFT_AMR_DEC_H_ + +#include + + +namespace android { + +struct C2SoftAmrDec : public SimpleC2Component { + class IntfImpl; + + C2SoftAmrDec(const char *name, c2_node_id_t id, + const std::shared_ptr &intfImpl); + virtual ~C2SoftAmrDec(); + + // From SimpleC2Component + c2_status_t onInit() override; + c2_status_t onStop() override; + void onReset() override; + void onRelease() override; + c2_status_t onFlush_sm() override; + void process( + const std::unique_ptr &work, + const std::shared_ptr &pool) override; + c2_status_t drain( + uint32_t drainMode, + const std::shared_ptr &pool) override; +private: + enum { + kNumSamplesPerFrameNB = 160, + kNumSamplesPerFrameWB = 320, + }; + + std::shared_ptr mIntf; + void *mAmrHandle; + void *mDecoderBuf; + int16_t *mDecoderCookie; + + int16_t mInputSampleBuffer[477]; + + bool mIsWide; + bool mSignalledError; + bool mSignalledOutputEos; + + status_t initDecoder(); + + C2_DO_NOT_COPY(C2SoftAmrDec); +}; + +} // namespace android + +#endif // ANDROID_C2_SOFT_AMR_DEC_H_ diff --git a/media/codec2/components/amr_nb_wb/C2SoftAmrNbEnc.cpp b/media/codec2/components/amr_nb_wb/C2SoftAmrNbEnc.cpp new file mode 100644 index 0000000000000000000000000000000000000000..ca21480cbabcd5abffb0d9206d638b427ce25279 --- /dev/null +++ b/media/codec2/components/amr_nb_wb/C2SoftAmrNbEnc.cpp @@ -0,0 +1,355 @@ +/* + * Copyright (C) 2018 The Android Open Source Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +//#define LOG_NDEBUG 0 +#define LOG_TAG "C2SoftAmrNbEnc" +#include + +#include + +#include +#include + +#include "C2SoftAmrNbEnc.h" + +namespace android { + +constexpr char COMPONENT_NAME[] = "c2.android.amrnb.encoder"; + +class C2SoftAmrNbEnc::IntfImpl : public C2InterfaceHelper { + public: + explicit IntfImpl(const std::shared_ptr& helper) + : C2InterfaceHelper(helper) { + setDerivedInstance(this); + + addParameter( + DefineParam(mInputFormat, C2_NAME_INPUT_STREAM_FORMAT_SETTING) + .withConstValue( + new C2StreamFormatConfig::input(0u, C2FormatAudio)) + .build()); + + addParameter( + DefineParam(mOutputFormat, C2_NAME_OUTPUT_STREAM_FORMAT_SETTING) + .withConstValue( + new C2StreamFormatConfig::output(0u, C2FormatCompressed)) + .build()); + + addParameter( + DefineParam(mInputMediaType, C2_NAME_INPUT_PORT_MIME_SETTING) + .withConstValue(AllocSharedString( + MEDIA_MIMETYPE_AUDIO_RAW)) + .build()); + + addParameter( + DefineParam(mOutputMediaType, C2_NAME_OUTPUT_PORT_MIME_SETTING) + .withConstValue(AllocSharedString( + MEDIA_MIMETYPE_AUDIO_AMR_NB)) + .build()); + + addParameter( + DefineParam(mChannelCount, C2_NAME_STREAM_CHANNEL_COUNT_SETTING) + .withDefault(new C2StreamChannelCountInfo::input(0u, 1)) + .withFields({C2F(mChannelCount, value).equalTo(1)}) + .withSetter((Setter::StrictValueWithNoDeps)) + .build()); + + addParameter( + DefineParam(mSampleRate, C2_NAME_STREAM_SAMPLE_RATE_SETTING) + .withDefault(new C2StreamSampleRateInfo::input(0u, 8000)) + .withFields({C2F(mSampleRate, value).equalTo(8000)}) + .withSetter( + (Setter::StrictValueWithNoDeps)) + .build()); + + addParameter( + DefineParam(mBitrate, C2_NAME_STREAM_BITRATE_SETTING) + .withDefault(new C2BitrateTuning::output(0u, 4750)) + .withFields({C2F(mBitrate, value).inRange(4750, 12200)}) + .withSetter(Setter::NonStrictValueWithNoDeps) + .build()); + + addParameter( + DefineParam(mInputMaxBufSize, C2_PARAMKEY_INPUT_MAX_BUFFER_SIZE) + .withConstValue(new C2StreamMaxBufferSizeInfo::input(0u, 8192)) + .build()); + } + + uint32_t getSampleRate() const { return mSampleRate->value; } + uint32_t getChannelCount() const { return mChannelCount->value; } + uint32_t getBitrate() const { return mBitrate->value; } + + private: + std::shared_ptr mInputFormat; + std::shared_ptr mOutputFormat; + std::shared_ptr mInputMediaType; + std::shared_ptr mOutputMediaType; + std::shared_ptr mSampleRate; + std::shared_ptr mChannelCount; + std::shared_ptr mBitrate; + std::shared_ptr mInputMaxBufSize; +}; + +C2SoftAmrNbEnc::C2SoftAmrNbEnc(const char* name, c2_node_id_t id, + const std::shared_ptr& intfImpl) + : SimpleC2Component( + std::make_shared>(name, id, intfImpl)), + mIntf(intfImpl), + mEncState(nullptr), + mSidState(nullptr) { +} + +C2SoftAmrNbEnc::~C2SoftAmrNbEnc() { + onRelease(); +} + +c2_status_t C2SoftAmrNbEnc::onInit() { + bool dtx_enable = false; + + if (AMREncodeInit(&mEncState, &mSidState, dtx_enable) != 0) + return C2_CORRUPTED; + // TODO: get mode directly from config + switch(mIntf->getBitrate()) { + case 4750: mMode = MR475; + break; + case 5150: mMode = MR515; + break; + case 5900: mMode = MR59; + break; + case 6700: mMode = MR67; + break; + case 7400: mMode = MR74; + break; + case 7950: mMode = MR795; + break; + case 10200: mMode = MR102; + break; + case 12200: mMode = MR122; + break; + default: mMode = MR795; + } + mIsFirst = true; + mSignalledError = false; + mSignalledOutputEos = false; + mAnchorTimeStamp = 0; + mProcessedSamples = 0; + mFilledLen = 0; + + return C2_OK; +} + +void C2SoftAmrNbEnc::onRelease() { + if (mEncState) { + AMREncodeExit(&mEncState, &mSidState); + mEncState = mSidState = nullptr; + } +} + +c2_status_t C2SoftAmrNbEnc::onStop() { + if (AMREncodeReset(mEncState, mSidState) != 0) + return C2_CORRUPTED; + mIsFirst = true; + mSignalledError = false; + mSignalledOutputEos = false; + mAnchorTimeStamp = 0; + mProcessedSamples = 0; + mFilledLen = 0; + + return C2_OK; +} + +void C2SoftAmrNbEnc::onReset() { + (void) onStop(); +} + +c2_status_t C2SoftAmrNbEnc::onFlush_sm() { + return onStop(); +} + +static void fillEmptyWork(const std::unique_ptr &work) { + work->worklets.front()->output.flags = work->input.flags; + work->worklets.front()->output.buffers.clear(); + work->worklets.front()->output.ordinal = work->input.ordinal; + work->workletsProcessed = 1u; +} + +void C2SoftAmrNbEnc::process( + const std::unique_ptr &work, + const std::shared_ptr &pool) { + // Initialize output work + work->result = C2_OK; + work->workletsProcessed = 1u; + work->worklets.front()->output.flags = work->input.flags; + + if (mSignalledError || mSignalledOutputEos) { + work->result = C2_BAD_VALUE; + return; + } + + bool eos = ((work->input.flags & C2FrameData::FLAG_END_OF_STREAM) != 0); + size_t inOffset = 0u; + size_t inSize = 0u; + C2ReadView rView = mDummyReadView; + if (!work->input.buffers.empty()) { + rView = work->input.buffers[0]->data().linearBlocks().front().map().get(); + inSize = rView.capacity(); + if (inSize && rView.error()) { + ALOGE("read view map failed %d", rView.error()); + work->result = C2_CORRUPTED; + return; + } + } + + ALOGV("in buffer attr. size %zu timestamp %d frameindex %d, flags %x", + inSize, (int)work->input.ordinal.timestamp.peeku(), + (int)work->input.ordinal.frameIndex.peeku(), work->input.flags); + + size_t outCapacity = kNumBytesPerInputFrame; + outCapacity += mFilledLen + inSize; + std::shared_ptr outputBlock; + C2MemoryUsage usage = { C2MemoryUsage::CPU_READ, C2MemoryUsage::CPU_WRITE }; + c2_status_t err = pool->fetchLinearBlock(outCapacity, usage, &outputBlock); + if (err != C2_OK) { + ALOGE("fetchLinearBlock for Output failed with status %d", err); + work->result = C2_NO_MEMORY; + return; + } + C2WriteView wView = outputBlock->map().get(); + if (wView.error()) { + ALOGE("write view map failed %d", wView.error()); + work->result = C2_CORRUPTED; + return; + } + uint64_t outTimeStamp = + mProcessedSamples * 1000000ll / mIntf->getSampleRate(); + size_t inPos = 0; + size_t outPos = 0; + while (inPos < inSize) { + const uint8_t *inPtr = rView.data() + inOffset; + int validSamples = mFilledLen / sizeof(int16_t); + if ((inPos + (kNumBytesPerInputFrame - mFilledLen)) <= inSize) { + memcpy(mInputFrame + validSamples, inPtr + inPos, + (kNumBytesPerInputFrame - mFilledLen)); + inPos += (kNumBytesPerInputFrame - mFilledLen); + } else { + memcpy(mInputFrame + validSamples, inPtr + inPos, (inSize - inPos)); + mFilledLen += (inSize - inPos); + inPos += (inSize - inPos); + if (eos) { + validSamples = mFilledLen / sizeof(int16_t); + memset(mInputFrame + validSamples, 0, (kNumBytesPerInputFrame - mFilledLen)); + } else break; + + } + Frame_Type_3GPP frameType; + int numEncBytes = AMREncode(mEncState, mSidState, mMode, mInputFrame, + wView.data() + outPos, &frameType, + AMR_TX_WMF); + if (numEncBytes < 0 || numEncBytes > ((int)outCapacity - (int)outPos)) { + ALOGE("encodeFrame call failed, state [%d %zu %zu]", numEncBytes, outPos, outCapacity); + mSignalledError = true; + work->result = C2_CORRUPTED; + return; + } + // Convert header byte from WMF to IETF format. + if (numEncBytes > 0) + wView.data()[outPos] = ((wView.data()[outPos] << 3) | 4) & 0x7c; + outPos += numEncBytes; + mProcessedSamples += kNumSamplesPerFrame; + mFilledLen = 0; + } + ALOGV("causal sample size %d", mFilledLen); + if (mIsFirst) { + mIsFirst = false; + mAnchorTimeStamp = work->input.ordinal.timestamp.peekull(); + } + fillEmptyWork(work); + if (outPos != 0) { + work->worklets.front()->output.buffers.push_back( + createLinearBuffer(std::move(outputBlock), 0, outPos)); + work->worklets.front()->output.ordinal.timestamp = mAnchorTimeStamp + outTimeStamp; + + } + if (eos) { + mSignalledOutputEos = true; + ALOGV("signalled EOS"); + if (mFilledLen) ALOGV("Discarding trailing %d bytes", mFilledLen); + } +} + +c2_status_t C2SoftAmrNbEnc::drain( + uint32_t drainMode, + const std::shared_ptr &pool) { + (void) pool; + if (drainMode == NO_DRAIN) { + ALOGW("drain with NO_DRAIN: no-op"); + return C2_OK; + } + if (drainMode == DRAIN_CHAIN) { + ALOGW("DRAIN_CHAIN not supported"); + return C2_OMITTED; + } + + onFlush_sm(); + return C2_OK; +} + +class C2SoftAmrNbEncFactory : public C2ComponentFactory { +public: + C2SoftAmrNbEncFactory() + : mHelper(std::static_pointer_cast( + GetCodec2PlatformComponentStore()->getParamReflector())) {} + + virtual c2_status_t createComponent( + c2_node_id_t id, + std::shared_ptr* const component, + std::function deleter) override { + *component = std::shared_ptr( + new C2SoftAmrNbEnc( + COMPONENT_NAME, id, + std::make_shared(mHelper)), + deleter); + return C2_OK; + } + + virtual c2_status_t createInterface( + c2_node_id_t id, + std::shared_ptr* const interface, + std::function deleter) override { + *interface = std::shared_ptr( + new SimpleInterface( + COMPONENT_NAME, id, + std::make_shared(mHelper)), + deleter); + return C2_OK; + } + + virtual ~C2SoftAmrNbEncFactory() override = default; + +private: + std::shared_ptr mHelper; +}; + +} // namespace android + +extern "C" ::C2ComponentFactory* CreateCodec2Factory() { + ALOGV("in %s", __func__); + return new ::android::C2SoftAmrNbEncFactory(); +} + +extern "C" void DestroyCodec2Factory(::C2ComponentFactory* factory) { + ALOGV("in %s", __func__); + delete factory; +} diff --git a/media/codec2/components/amr_nb_wb/C2SoftAmrNbEnc.h b/media/codec2/components/amr_nb_wb/C2SoftAmrNbEnc.h new file mode 100644 index 0000000000000000000000000000000000000000..6ab14dbc1018b4137d31fb756e684a35cc6c4459 --- /dev/null +++ b/media/codec2/components/amr_nb_wb/C2SoftAmrNbEnc.h @@ -0,0 +1,67 @@ +/* + * Copyright (C) 2018 The Android Open Source Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#ifndef ANDROID_C2_SOFT_AMR_NB_ENC_H_ +#define ANDROID_C2_SOFT_AMR_NB_ENC_H_ + +#include + +#include "gsmamr_enc.h" + +namespace android { + +class C2SoftAmrNbEnc : public SimpleC2Component { + public: + class IntfImpl; + C2SoftAmrNbEnc(const char* name, c2_node_id_t id, + const std::shared_ptr& intfImpl); + virtual ~C2SoftAmrNbEnc(); + + // From SimpleC2Component + c2_status_t onInit() override; + c2_status_t onStop() override; + void onReset() override; + void onRelease() override; + c2_status_t onFlush_sm() override; + void process( + const std::unique_ptr &work, + const std::shared_ptr &pool) override; + c2_status_t drain( + uint32_t drainMode, + const std::shared_ptr &pool) override; + +private: + std::shared_ptr mIntf; + static const int32_t kNumSamplesPerFrame = L_FRAME; + static const int32_t kNumBytesPerInputFrame = kNumSamplesPerFrame * sizeof(int16_t); + + void *mEncState; + void *mSidState; + Mode mMode; + bool mIsFirst; + bool mSignalledError; + bool mSignalledOutputEos; + uint64_t mAnchorTimeStamp; + uint64_t mProcessedSamples; + int32_t mFilledLen; + int16_t mInputFrame[kNumSamplesPerFrame]; + + C2_DO_NOT_COPY(C2SoftAmrNbEnc); +}; + +} // namespace android + +#endif // ANDROID_C2_SOFT_AMR_NB_ENC_H_ diff --git a/media/codec2/components/amr_nb_wb/C2SoftAmrWbEnc.cpp b/media/codec2/components/amr_nb_wb/C2SoftAmrWbEnc.cpp new file mode 100644 index 0000000000000000000000000000000000000000..be3892fcd7c6b48a848917d2b23cf8c83e78f082 --- /dev/null +++ b/media/codec2/components/amr_nb_wb/C2SoftAmrWbEnc.cpp @@ -0,0 +1,430 @@ +/* + * Copyright (C) 2018 The Android Open Source Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +//#define LOG_NDEBUG 0 +#define LOG_TAG "C2SoftAmrWbEnc" +#include + +#include + +#include +#include +#include + +#include "C2SoftAmrWbEnc.h" +#include "cmnMemory.h" + +namespace android { + +constexpr char COMPONENT_NAME[] = "c2.android.amrwb.encoder"; + +class C2SoftAmrWbEnc::IntfImpl : public C2InterfaceHelper { + public: + explicit IntfImpl(const std::shared_ptr& helper) + : C2InterfaceHelper(helper) { + setDerivedInstance(this); + + addParameter( + DefineParam(mInputFormat, C2_NAME_INPUT_STREAM_FORMAT_SETTING) + .withConstValue( + new C2StreamFormatConfig::input(0u, C2FormatAudio)) + .build()); + + addParameter( + DefineParam(mOutputFormat, C2_NAME_OUTPUT_STREAM_FORMAT_SETTING) + .withConstValue( + new C2StreamFormatConfig::output(0u, C2FormatCompressed)) + .build()); + + addParameter( + DefineParam(mInputMediaType, C2_NAME_INPUT_PORT_MIME_SETTING) + .withConstValue(AllocSharedString( + MEDIA_MIMETYPE_AUDIO_RAW)) + .build()); + + addParameter( + DefineParam(mOutputMediaType, C2_NAME_OUTPUT_PORT_MIME_SETTING) + .withConstValue(AllocSharedString( + MEDIA_MIMETYPE_AUDIO_AMR_WB)) + .build()); + + addParameter( + DefineParam(mChannelCount, C2_NAME_STREAM_CHANNEL_COUNT_SETTING) + .withDefault(new C2StreamChannelCountInfo::input(0u, 1)) + .withFields({C2F(mChannelCount, value).equalTo(1)}) + .withSetter((Setter::StrictValueWithNoDeps)) + .build()); + + addParameter( + DefineParam(mSampleRate, C2_NAME_STREAM_SAMPLE_RATE_SETTING) + .withDefault(new C2StreamSampleRateInfo::input(0u, 16000)) + .withFields({C2F(mSampleRate, value).equalTo(16000)}) + .withSetter( + (Setter::StrictValueWithNoDeps)) + .build()); + + addParameter( + DefineParam(mBitrate, C2_NAME_STREAM_BITRATE_SETTING) + .withDefault(new C2BitrateTuning::output(0u, 6600)) + .withFields({C2F(mBitrate, value).inRange(6600, 23850)}) + .withSetter(Setter::NonStrictValueWithNoDeps) + .build()); + + addParameter( + DefineParam(mInputMaxBufSize, C2_PARAMKEY_INPUT_MAX_BUFFER_SIZE) + .withConstValue(new C2StreamMaxBufferSizeInfo::input(0u, 8192)) + .build()); + } + + uint32_t getSampleRate() const { return mSampleRate->value; } + uint32_t getChannelCount() const { return mChannelCount->value; } + uint32_t getBitrate() const { return mBitrate->value; } + + private: + std::shared_ptr mInputFormat; + std::shared_ptr mOutputFormat; + std::shared_ptr mInputMediaType; + std::shared_ptr mOutputMediaType; + std::shared_ptr mSampleRate; + std::shared_ptr mChannelCount; + std::shared_ptr mBitrate; + std::shared_ptr mInputMaxBufSize; +}; + +C2SoftAmrWbEnc::C2SoftAmrWbEnc(const char* name, c2_node_id_t id, + const std::shared_ptr& intfImpl) + : SimpleC2Component( + std::make_shared>(name, id, intfImpl)), + mIntf(intfImpl), + mEncoderHandle(nullptr), + mApiHandle(nullptr), + mMemOperator(nullptr) { +} + +C2SoftAmrWbEnc::~C2SoftAmrWbEnc() { + onRelease(); +} + +c2_status_t C2SoftAmrWbEnc::onInit() { + // TODO: get mode directly from config + switch(mIntf->getBitrate()) { + case 6600: mMode = VOAMRWB_MD66; + break; + case 8850: mMode = VOAMRWB_MD885; + break; + case 12650: mMode = VOAMRWB_MD1265; + break; + case 14250: mMode = VOAMRWB_MD1425; + break; + case 15850: mMode = VOAMRWB_MD1585; + break; + case 18250: mMode = VOAMRWB_MD1825; + break; + case 19850: mMode = VOAMRWB_MD1985; + break; + case 23050: mMode = VOAMRWB_MD2305; + break; + case 23850: mMode = VOAMRWB_MD2385; + break; + default: mMode = VOAMRWB_MD2305; + } + status_t err = initEncoder(); + mIsFirst = true; + mSignalledError = false; + mSignalledOutputEos = false; + mAnchorTimeStamp = 0; + mProcessedSamples = 0; + mFilledLen = 0; + + return err == OK ? C2_OK : C2_NO_MEMORY; +} + +void C2SoftAmrWbEnc::onRelease() { + if (mEncoderHandle) { + CHECK_EQ((VO_U32)VO_ERR_NONE, mApiHandle->Uninit(mEncoderHandle)); + mEncoderHandle = nullptr; + } + if (mApiHandle) { + delete mApiHandle; + mApiHandle = nullptr; + } + if (mMemOperator) { + delete mMemOperator; + mMemOperator = nullptr; + } +} + +c2_status_t C2SoftAmrWbEnc::onStop() { + for (int i = 0; i < kNumSamplesPerFrame; i++) { + mInputFrame[i] = 0x0008; /* EHF_MASK */ + } + uint8_t outBuffer[kNumBytesPerInputFrame]; + (void) encodeInput(outBuffer, kNumBytesPerInputFrame); + mIsFirst = true; + mSignalledError = false; + mSignalledOutputEos = false; + mAnchorTimeStamp = 0; + mProcessedSamples = 0; + mFilledLen = 0; + + return C2_OK; +} + +void C2SoftAmrWbEnc::onReset() { + (void) onStop(); +} + +c2_status_t C2SoftAmrWbEnc::onFlush_sm() { + return onStop(); +} + +status_t C2SoftAmrWbEnc::initEncoder() { + mApiHandle = new VO_AUDIO_CODECAPI; + if (!mApiHandle) return NO_MEMORY; + + if (VO_ERR_NONE != voGetAMRWBEncAPI(mApiHandle)) { + ALOGE("Failed to get api handle"); + return UNKNOWN_ERROR; + } + + mMemOperator = new VO_MEM_OPERATOR; + if (!mMemOperator) return NO_MEMORY; + + mMemOperator->Alloc = cmnMemAlloc; + mMemOperator->Copy = cmnMemCopy; + mMemOperator->Free = cmnMemFree; + mMemOperator->Set = cmnMemSet; + mMemOperator->Check = cmnMemCheck; + + VO_CODEC_INIT_USERDATA userData; + memset(&userData, 0, sizeof(userData)); + userData.memflag = VO_IMF_USERMEMOPERATOR; + userData.memData = (VO_PTR) mMemOperator; + + if (VO_ERR_NONE != mApiHandle->Init( + &mEncoderHandle, VO_AUDIO_CodingAMRWB, &userData)) { + ALOGE("Failed to init AMRWB encoder"); + return UNKNOWN_ERROR; + } + + VOAMRWBFRAMETYPE type = VOAMRWB_RFC3267; + if (VO_ERR_NONE != mApiHandle->SetParam( + mEncoderHandle, VO_PID_AMRWB_FRAMETYPE, &type)) { + ALOGE("Failed to set AMRWB encoder frame type to %d", type); + return UNKNOWN_ERROR; + } + + if (VO_ERR_NONE != + mApiHandle->SetParam( + mEncoderHandle, VO_PID_AMRWB_MODE, &mMode)) { + ALOGE("Failed to set AMRWB encoder mode to %d", mMode); + return UNKNOWN_ERROR; + } + + return OK; +} + +int C2SoftAmrWbEnc::encodeInput(uint8_t *buffer, uint32_t length) { + VO_CODECBUFFER inputData; + memset(&inputData, 0, sizeof(inputData)); + inputData.Buffer = (unsigned char *) mInputFrame; + inputData.Length = kNumBytesPerInputFrame; + + CHECK_EQ((VO_U32)VO_ERR_NONE, + mApiHandle->SetInputData(mEncoderHandle, &inputData)); + + VO_AUDIO_OUTPUTINFO outputInfo; + memset(&outputInfo, 0, sizeof(outputInfo)); + VO_CODECBUFFER outputData; + memset(&outputData, 0, sizeof(outputData)); + outputData.Buffer = buffer; + outputData.Length = length; + VO_U32 ret = mApiHandle->GetOutputData( + mEncoderHandle, &outputData, &outputInfo); + if (ret != VO_ERR_NONE && ret != VO_ERR_INPUT_BUFFER_SMALL) { + ALOGD("encountered error during encode call"); + return -1; + } + return outputData.Length; +} + +static void fillEmptyWork(const std::unique_ptr &work) { + work->worklets.front()->output.flags = work->input.flags; + work->worklets.front()->output.buffers.clear(); + work->worklets.front()->output.ordinal = work->input.ordinal; + work->workletsProcessed = 1u; +} + +void C2SoftAmrWbEnc::process( + const std::unique_ptr &work, + const std::shared_ptr &pool) { + // Initialize output work + work->result = C2_OK; + work->workletsProcessed = 1u; + work->worklets.front()->output.flags = work->input.flags; + + if (mSignalledError || mSignalledOutputEos) { + work->result = C2_BAD_VALUE; + return; + } + + size_t inOffset = 0u; + size_t inSize = 0u; + C2ReadView rView = mDummyReadView; + if (!work->input.buffers.empty()) { + rView = work->input.buffers[0]->data().linearBlocks().front().map().get(); + inSize = rView.capacity(); + if (inSize && rView.error()) { + ALOGE("read view map failed %d", rView.error()); + work->result = rView.error(); + return; + } + } + bool eos = (work->input.flags & C2FrameData::FLAG_END_OF_STREAM) != 0; + + ALOGV("in buffer attr. size %zu timestamp %d frameindex %d, flags %x", + inSize, (int)work->input.ordinal.timestamp.peeku(), + (int)work->input.ordinal.frameIndex.peeku(), work->input.flags); + + size_t outCapacity = kNumBytesPerInputFrame; + outCapacity += mFilledLen + inSize; + std::shared_ptr outputBlock; + C2MemoryUsage usage = { C2MemoryUsage::CPU_READ, C2MemoryUsage::CPU_WRITE }; + c2_status_t err = pool->fetchLinearBlock(outCapacity, usage, &outputBlock); + if (err != C2_OK) { + ALOGE("fetchLinearBlock for Output failed with status %d", err); + work->result = C2_NO_MEMORY; + return; + } + C2WriteView wView = outputBlock->map().get(); + if (wView.error()) { + ALOGE("write view map failed %d", wView.error()); + work->result = wView.error(); + return; + } + uint64_t outTimeStamp = + mProcessedSamples * 1000000ll / mIntf->getSampleRate(); + size_t inPos = 0; + size_t outPos = 0; + while (inPos < inSize) { + const uint8_t *inPtr = rView.data() + inOffset; + int validSamples = mFilledLen / sizeof(int16_t); + if ((inPos + (kNumBytesPerInputFrame - mFilledLen)) <= inSize) { + memcpy(mInputFrame + validSamples, inPtr + inPos, + (kNumBytesPerInputFrame - mFilledLen)); + inPos += (kNumBytesPerInputFrame - mFilledLen); + } else { + memcpy(mInputFrame + validSamples, inPtr + inPos, (inSize - inPos)); + mFilledLen += (inSize - inPos); + inPos += (inSize - inPos); + if (eos) { + validSamples = mFilledLen / sizeof(int16_t); + memset(mInputFrame + validSamples, 0, (kNumBytesPerInputFrame - mFilledLen)); + } else break; + } + int numEncBytes = encodeInput((wView.data() + outPos), outCapacity - outPos); + if (numEncBytes < 0) { + ALOGE("encodeFrame call failed, state [%d %zu %zu]", numEncBytes, outPos, outCapacity); + mSignalledError = true; + work->result = C2_CORRUPTED; + return; + } + outPos += numEncBytes; + mProcessedSamples += kNumSamplesPerFrame; + mFilledLen = 0; + } + ALOGV("causal sample size %d", mFilledLen); + if (mIsFirst) { + mIsFirst = false; + mAnchorTimeStamp = work->input.ordinal.timestamp.peekull(); + } + fillEmptyWork(work); + if (outPos != 0) { + work->worklets.front()->output.buffers.push_back( + createLinearBuffer(std::move(outputBlock), 0, outPos)); + work->worklets.front()->output.ordinal.timestamp = mAnchorTimeStamp + outTimeStamp; + } + if (eos) { + mSignalledOutputEos = true; + ALOGV("signalled EOS"); + if (mFilledLen) ALOGV("Discarding trailing %d bytes", mFilledLen); + } +} + +c2_status_t C2SoftAmrWbEnc::drain( + uint32_t drainMode, + const std::shared_ptr &pool) { + (void) pool; + if (drainMode == NO_DRAIN) { + ALOGW("drain with NO_DRAIN: no-op"); + return C2_OK; + } + if (drainMode == DRAIN_CHAIN) { + ALOGW("DRAIN_CHAIN not supported"); + return C2_OMITTED; + } + + onFlush_sm(); + return C2_OK; +} + +class C2SoftAmrWbEncFactory : public C2ComponentFactory { +public: + C2SoftAmrWbEncFactory() + : mHelper(std::static_pointer_cast( + GetCodec2PlatformComponentStore()->getParamReflector())) {} + + virtual c2_status_t createComponent( + c2_node_id_t id, + std::shared_ptr* const component, + std::function deleter) override { + *component = std::shared_ptr( + new C2SoftAmrWbEnc( + COMPONENT_NAME, id, + std::make_shared(mHelper)), + deleter); + return C2_OK; + } + + virtual c2_status_t createInterface( + c2_node_id_t id, + std::shared_ptr* const interface, + std::function deleter) override { + *interface = std::shared_ptr( + new SimpleInterface( + COMPONENT_NAME, id, + std::make_shared(mHelper)), + deleter); + return C2_OK; + } + + virtual ~C2SoftAmrWbEncFactory() override = default; + +private: + std::shared_ptr mHelper; +}; + +} // namespace android + +extern "C" ::C2ComponentFactory* CreateCodec2Factory() { + ALOGV("in %s", __func__); + return new ::android::C2SoftAmrWbEncFactory(); +} + +extern "C" void DestroyCodec2Factory(::C2ComponentFactory* factory) { + ALOGV("in %s", __func__); + delete factory; +} diff --git a/media/codec2/components/amr_nb_wb/C2SoftAmrWbEnc.h b/media/codec2/components/amr_nb_wb/C2SoftAmrWbEnc.h new file mode 100644 index 0000000000000000000000000000000000000000..0cc9e9ff937ae45314e86be8ed7b76a4d1270fdc --- /dev/null +++ b/media/codec2/components/amr_nb_wb/C2SoftAmrWbEnc.h @@ -0,0 +1,71 @@ +/* + * Copyright (C) 2018 The Android Open Source Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#ifndef ANDROID_C2_SOFT_AMR_WB_ENC_H_ +#define ANDROID_C2_SOFT_AMR_WB_ENC_H_ + +#include + +#include "voAMRWB.h" + +namespace android { + +class C2SoftAmrWbEnc : public SimpleC2Component { +public: + class IntfImpl; + C2SoftAmrWbEnc(const char* name, c2_node_id_t id, + const std::shared_ptr& intfImpl); + virtual ~C2SoftAmrWbEnc(); + + // From SimpleC2Component + c2_status_t onInit() override; + c2_status_t onStop() override; + void onReset() override; + void onRelease() override; + c2_status_t onFlush_sm() override; + void process( + const std::unique_ptr &work, + const std::shared_ptr &pool) override; + c2_status_t drain( + uint32_t drainMode, + const std::shared_ptr &pool) override; + +private: + std::shared_ptr mIntf; + static const int32_t kNumSamplesPerFrame = 320; + static const int32_t kNumBytesPerInputFrame = kNumSamplesPerFrame * sizeof(int16_t); + + void *mEncoderHandle; + VO_AUDIO_CODECAPI *mApiHandle; + VO_MEM_OPERATOR *mMemOperator; + VOAMRWBMODE mMode; + bool mIsFirst; + bool mSignalledError; + bool mSignalledOutputEos; + uint64_t mAnchorTimeStamp; + uint64_t mProcessedSamples; + int32_t mFilledLen; + int16_t mInputFrame[kNumSamplesPerFrame]; + + status_t initEncoder(); + int encodeInput(uint8_t *buffer, uint32_t length); + + C2_DO_NOT_COPY(C2SoftAmrWbEnc); +}; + +} // namespace android + +#endif // ANDROID_C2_SOFT_AMR_WB_ENC_H_ diff --git a/media/codec2/components/amr_nb_wb/MODULE_LICENSE_APACHE2 b/media/codec2/components/amr_nb_wb/MODULE_LICENSE_APACHE2 new file mode 100644 index 0000000000000000000000000000000000000000..e69de29bb2d1d6434b8b29ae775ad8c2e48c5391 diff --git a/media/codec2/components/amr_nb_wb/NOTICE b/media/codec2/components/amr_nb_wb/NOTICE new file mode 100644 index 0000000000000000000000000000000000000000..c5b1efa7aac764ae6d8da63476a2d5cec02a6a5d --- /dev/null +++ b/media/codec2/components/amr_nb_wb/NOTICE @@ -0,0 +1,190 @@ + + Copyright (c) 2005-2008, The Android Open Source Project + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. + + + Apache License + Version 2.0, January 2004 + http://www.apache.org/licenses/ + + TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION + + 1. Definitions. + + "License" shall mean the terms and conditions for use, reproduction, + and distribution as defined by Sections 1 through 9 of this document. + + "Licensor" shall mean the copyright owner or entity authorized by + the copyright owner that is granting the License. + + "Legal Entity" shall mean the union of the acting entity and all + other entities that control, are controlled by, or are under common + control with that entity. For the purposes of this definition, + "control" means (i) the power, direct or indirect, to cause the + direction or management of such entity, whether by contract or + otherwise, or (ii) ownership of fifty percent (50%) or more of the + outstanding shares, or (iii) beneficial ownership of such entity. + + "You" (or "Your") shall mean an individual or Legal Entity + exercising permissions granted by this License. + + "Source" form shall mean the preferred form for making modifications, + including but not limited to software source code, documentation + source, and configuration files. + + "Object" form shall mean any form resulting from mechanical + transformation or translation of a Source form, including but + not limited to compiled object code, generated documentation, + and conversions to other media types. + + "Work" shall mean the work of authorship, whether in Source or + Object form, made available under the License, as indicated by a + copyright notice that is included in or attached to the work + (an example is provided in the Appendix below). + + "Derivative Works" shall mean any work, whether in Source or Object + form, that is based on (or derived from) the Work and for which the + editorial revisions, annotations, elaborations, or other modifications + represent, as a whole, an original work of authorship. For the purposes + of this License, Derivative Works shall not include works that remain + separable from, or merely link (or bind by name) to the interfaces of, + the Work and Derivative Works thereof. + + "Contribution" shall mean any work of authorship, including + the original version of the Work and any modifications or additions + to that Work or Derivative Works thereof, that is intentionally + submitted to Licensor for inclusion in the Work by the copyright owner + or by an individual or Legal Entity authorized to submit on behalf of + the copyright owner. For the purposes of this definition, "submitted" + means any form of electronic, verbal, or written communication sent + to the Licensor or its representatives, including but not limited to + communication on electronic mailing lists, source code control systems, + and issue tracking systems that are managed by, or on behalf of, the + Licensor for the purpose of discussing and improving the Work, but + excluding communication that is conspicuously marked or otherwise + designated in writing by the copyright owner as "Not a Contribution." + + "Contributor" shall mean Licensor and any individual or Legal Entity + on behalf of whom a Contribution has been received by Licensor and + subsequently incorporated within the Work. + + 2. Grant of Copyright License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + copyright license to reproduce, prepare Derivative Works of, + publicly display, publicly perform, sublicense, and distribute the + Work and such Derivative Works in Source or Object form. + + 3. Grant of Patent License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + (except as stated in this section) patent license to make, have made, + use, offer to sell, sell, import, and otherwise transfer the Work, + where such license applies only to those patent claims licensable + by such Contributor that are necessarily infringed by their + Contribution(s) alone or by combination of their Contribution(s) + with the Work to which such Contribution(s) was submitted. If You + institute patent litigation against any entity (including a + cross-claim or counterclaim in a lawsuit) alleging that the Work + or a Contribution incorporated within the Work constitutes direct + or contributory patent infringement, then any patent licenses + granted to You under this License for that Work shall terminate + as of the date such litigation is filed. + + 4. Redistribution. You may reproduce and distribute copies of the + Work or Derivative Works thereof in any medium, with or without + modifications, and in Source or Object form, provided that You + meet the following conditions: + + (a) You must give any other recipients of the Work or + Derivative Works a copy of this License; and + + (b) You must cause any modified files to carry prominent notices + stating that You changed the files; and + + (c) You must retain, in the Source form of any Derivative Works + that You distribute, all copyright, patent, trademark, and + attribution notices from the Source form of the Work, + excluding those notices that do not pertain to any part of + the Derivative Works; and + + (d) If the Work includes a "NOTICE" text file as part of its + distribution, then any Derivative Works that You distribute must + include a readable copy of the attribution notices contained + within such NOTICE file, excluding those notices that do not + pertain to any part of the Derivative Works, in at least one + of the following places: within a NOTICE text file distributed + as part of the Derivative Works; within the Source form or + documentation, if provided along with the Derivative Works; or, + within a display generated by the Derivative Works, if and + wherever such third-party notices normally appear. The contents + of the NOTICE file are for informational purposes only and + do not modify the License. You may add Your own attribution + notices within Derivative Works that You distribute, alongside + or as an addendum to the NOTICE text from the Work, provided + that such additional attribution notices cannot be construed + as modifying the License. + + You may add Your own copyright statement to Your modifications and + may provide additional or different license terms and conditions + for use, reproduction, or distribution of Your modifications, or + for any such Derivative Works as a whole, provided Your use, + reproduction, and distribution of the Work otherwise complies with + the conditions stated in this License. + + 5. Submission of Contributions. Unless You explicitly state otherwise, + any Contribution intentionally submitted for inclusion in the Work + by You to the Licensor shall be under the terms and conditions of + this License, without any additional terms or conditions. + Notwithstanding the above, nothing herein shall supersede or modify + the terms of any separate license agreement you may have executed + with Licensor regarding such Contributions. + + 6. Trademarks. This License does not grant permission to use the trade + names, trademarks, service marks, or product names of the Licensor, + except as required for reasonable and customary use in describing the + origin of the Work and reproducing the content of the NOTICE file. + + 7. Disclaimer of Warranty. Unless required by applicable law or + agreed to in writing, Licensor provides the Work (and each + Contributor provides its Contributions) on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or + implied, including, without limitation, any warranties or conditions + of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A + PARTICULAR PURPOSE. You are solely responsible for determining the + appropriateness of using or redistributing the Work and assume any + risks associated with Your exercise of permissions under this License. + + 8. Limitation of Liability. In no event and under no legal theory, + whether in tort (including negligence), contract, or otherwise, + unless required by applicable law (such as deliberate and grossly + negligent acts) or agreed to in writing, shall any Contributor be + liable to You for damages, including any direct, indirect, special, + incidental, or consequential damages of any character arising as a + result of this License or out of the use or inability to use the + Work (including but not limited to damages for loss of goodwill, + work stoppage, computer failure or malfunction, or any and all + other commercial damages or losses), even if such Contributor + has been advised of the possibility of such damages. + + 9. Accepting Warranty or Additional Liability. While redistributing + the Work or Derivative Works thereof, You may choose to offer, + and charge a fee for, acceptance of support, warranty, indemnity, + or other liability obligations and/or rights consistent with this + License. However, in accepting such obligations, You may act only + on Your own behalf and on Your sole responsibility, not on behalf + of any other Contributor, and only if You agree to indemnify, + defend, and hold each Contributor harmless for any liability + incurred by, or claims asserted against, such Contributor by reason + of your accepting any such warranty or additional liability. + + END OF TERMS AND CONDITIONS + diff --git a/media/codec2/components/amr_nb_wb/patent_disclaimer.txt b/media/codec2/components/amr_nb_wb/patent_disclaimer.txt new file mode 100644 index 0000000000000000000000000000000000000000..b4bf11d4ab14011ba28ce5ff6090529d2b7b587f --- /dev/null +++ b/media/codec2/components/amr_nb_wb/patent_disclaimer.txt @@ -0,0 +1,9 @@ + +THIS IS NOT A GRANT OF PATENT RIGHTS. + +Google makes no representation or warranty that the codecs for which +source code is made available hereunder are unencumbered by +third-party patents. Those intending to use this source code in +hardware or software products are advised that implementations of +these codecs, including in open source software or shareware, may +require patent licenses from the relevant patent holders. diff --git a/media/codec2/components/avc/Android.bp b/media/codec2/components/avc/Android.bp new file mode 100644 index 0000000000000000000000000000000000000000..d8839512ee84bf7fd583ebcf1d7bcf844d9c96df --- /dev/null +++ b/media/codec2/components/avc/Android.bp @@ -0,0 +1,37 @@ +cc_library_shared { + name: "libstagefright_soft_c2avcdec", + defaults: [ + "libstagefright_soft_c2-defaults", + "libstagefright_soft_c2_sanitize_signed-defaults", + ], + + static_libs: ["libavcdec"], + + srcs: ["C2SoftAvcDec.cpp"], + + include_dirs: [ + "external/libavc/decoder", + "external/libavc/common", + ], +} + +cc_library_shared { + name: "libstagefright_soft_c2avcenc", + defaults: [ + "libstagefright_soft_c2-defaults", + "libstagefright_soft_c2_sanitize_signed-defaults", + ], + + static_libs: ["libavcenc"], + + srcs: ["C2SoftAvcEnc.cpp"], + + include_dirs: [ + "external/libavc/encoder", + "external/libavc/common", + ], + + cflags: [ + "-Wno-unused-variable", + ], +} diff --git a/media/codec2/components/avc/C2SoftAvcDec.cpp b/media/codec2/components/avc/C2SoftAvcDec.cpp new file mode 100644 index 0000000000000000000000000000000000000000..3e62744bd1bb1de8455a6e6f95070cc084961b47 --- /dev/null +++ b/media/codec2/components/avc/C2SoftAvcDec.cpp @@ -0,0 +1,978 @@ +/* + * Copyright 2017 The Android Open Source Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +//#define LOG_NDEBUG 0 +#define LOG_TAG "C2SoftAvcDec" +#include + +#include + +#include +#include +#include +#include + +#include "C2SoftAvcDec.h" +#include "ih264d.h" + +namespace android { + +namespace { + +constexpr char COMPONENT_NAME[] = "c2.android.avc.decoder"; + +} // namespace + +class C2SoftAvcDec::IntfImpl : public SimpleInterface::BaseParams { +public: + explicit IntfImpl(const std::shared_ptr &helper) + : SimpleInterface::BaseParams( + helper, + COMPONENT_NAME, + C2Component::KIND_DECODER, + C2Component::DOMAIN_VIDEO, + MEDIA_MIMETYPE_VIDEO_AVC) { + noPrivateBuffers(); // TODO: account for our buffers here + noInputReferences(); + noOutputReferences(); + noInputLatency(); + noTimeStretch(); + + // TODO: output latency and reordering + + addParameter( + DefineParam(mAttrib, C2_PARAMKEY_COMPONENT_ATTRIBUTES) + .withConstValue(new C2ComponentAttributesSetting(C2Component::ATTRIB_IS_TEMPORAL)) + .build()); + + // coded and output picture size is the same for this codec + addParameter( + DefineParam(mSize, C2_PARAMKEY_PICTURE_SIZE) + .withDefault(new C2StreamPictureSizeInfo::output(0u, 320, 240)) + .withFields({ + C2F(mSize, width).inRange(2, 4080, 2), + C2F(mSize, height).inRange(2, 4080, 2), + }) + .withSetter(SizeSetter) + .build()); + + addParameter( + DefineParam(mMaxSize, C2_PARAMKEY_MAX_PICTURE_SIZE) + .withDefault(new C2StreamMaxPictureSizeTuning::output(0u, 320, 240)) + .withFields({ + C2F(mSize, width).inRange(2, 4080, 2), + C2F(mSize, height).inRange(2, 4080, 2), + }) + .withSetter(MaxPictureSizeSetter, mSize) + .build()); + + addParameter( + DefineParam(mProfileLevel, C2_PARAMKEY_PROFILE_LEVEL) + .withDefault(new C2StreamProfileLevelInfo::input(0u, + C2Config::PROFILE_AVC_CONSTRAINED_BASELINE, C2Config::LEVEL_AVC_5_2)) + .withFields({ + C2F(mProfileLevel, profile).oneOf({ + C2Config::PROFILE_AVC_CONSTRAINED_BASELINE, + C2Config::PROFILE_AVC_BASELINE, + C2Config::PROFILE_AVC_MAIN, + C2Config::PROFILE_AVC_CONSTRAINED_HIGH, + C2Config::PROFILE_AVC_PROGRESSIVE_HIGH, + C2Config::PROFILE_AVC_HIGH}), + C2F(mProfileLevel, level).oneOf({ + C2Config::LEVEL_AVC_1, C2Config::LEVEL_AVC_1B, C2Config::LEVEL_AVC_1_1, + C2Config::LEVEL_AVC_1_2, C2Config::LEVEL_AVC_1_3, + C2Config::LEVEL_AVC_2, C2Config::LEVEL_AVC_2_1, C2Config::LEVEL_AVC_2_2, + C2Config::LEVEL_AVC_3, C2Config::LEVEL_AVC_3_1, C2Config::LEVEL_AVC_3_2, + C2Config::LEVEL_AVC_4, C2Config::LEVEL_AVC_4_1, C2Config::LEVEL_AVC_4_2, + C2Config::LEVEL_AVC_5, C2Config::LEVEL_AVC_5_1, C2Config::LEVEL_AVC_5_2 + }) + }) + .withSetter(ProfileLevelSetter, mSize) + .build()); + + addParameter( + DefineParam(mMaxInputSize, C2_PARAMKEY_INPUT_MAX_BUFFER_SIZE) + .withDefault(new C2StreamMaxBufferSizeInfo::input(0u, 320 * 240 * 3 / 4)) + .withFields({ + C2F(mMaxInputSize, value).any(), + }) + .calculatedAs(MaxInputSizeSetter, mMaxSize) + .build()); + + C2ChromaOffsetStruct locations[1] = { C2ChromaOffsetStruct::ITU_YUV_420_0() }; + std::shared_ptr defaultColorInfo = + C2StreamColorInfo::output::AllocShared( + 1u, 0u, 8u /* bitDepth */, C2Color::YUV_420); + memcpy(defaultColorInfo->m.locations, locations, sizeof(locations)); + + defaultColorInfo = + C2StreamColorInfo::output::AllocShared( + { C2ChromaOffsetStruct::ITU_YUV_420_0() }, + 0u, 8u /* bitDepth */, C2Color::YUV_420); + helper->addStructDescriptors(); + + addParameter( + DefineParam(mColorInfo, C2_PARAMKEY_CODED_COLOR_INFO) + .withConstValue(defaultColorInfo) + .build()); + + addParameter( + DefineParam(mDefaultColorAspects, C2_PARAMKEY_DEFAULT_COLOR_ASPECTS) + .withDefault(new C2StreamColorAspectsTuning::output( + 0u, C2Color::RANGE_UNSPECIFIED, C2Color::PRIMARIES_UNSPECIFIED, + C2Color::TRANSFER_UNSPECIFIED, C2Color::MATRIX_UNSPECIFIED)) + .withFields({ + C2F(mDefaultColorAspects, range).inRange( + C2Color::RANGE_UNSPECIFIED, C2Color::RANGE_OTHER), + C2F(mDefaultColorAspects, primaries).inRange( + C2Color::PRIMARIES_UNSPECIFIED, C2Color::PRIMARIES_OTHER), + C2F(mDefaultColorAspects, transfer).inRange( + C2Color::TRANSFER_UNSPECIFIED, C2Color::TRANSFER_OTHER), + C2F(mDefaultColorAspects, matrix).inRange( + C2Color::MATRIX_UNSPECIFIED, C2Color::MATRIX_OTHER) + }) + .withSetter(DefaultColorAspectsSetter) + .build()); + + addParameter( + DefineParam(mCodedColorAspects, C2_PARAMKEY_VUI_COLOR_ASPECTS) + .withDefault(new C2StreamColorAspectsInfo::input( + 0u, C2Color::RANGE_LIMITED, C2Color::PRIMARIES_UNSPECIFIED, + C2Color::TRANSFER_UNSPECIFIED, C2Color::MATRIX_UNSPECIFIED)) + .withFields({ + C2F(mCodedColorAspects, range).inRange( + C2Color::RANGE_UNSPECIFIED, C2Color::RANGE_OTHER), + C2F(mCodedColorAspects, primaries).inRange( + C2Color::PRIMARIES_UNSPECIFIED, C2Color::PRIMARIES_OTHER), + C2F(mCodedColorAspects, transfer).inRange( + C2Color::TRANSFER_UNSPECIFIED, C2Color::TRANSFER_OTHER), + C2F(mCodedColorAspects, matrix).inRange( + C2Color::MATRIX_UNSPECIFIED, C2Color::MATRIX_OTHER) + }) + .withSetter(CodedColorAspectsSetter) + .build()); + + addParameter( + DefineParam(mColorAspects, C2_PARAMKEY_COLOR_ASPECTS) + .withDefault(new C2StreamColorAspectsInfo::output( + 0u, C2Color::RANGE_UNSPECIFIED, C2Color::PRIMARIES_UNSPECIFIED, + C2Color::TRANSFER_UNSPECIFIED, C2Color::MATRIX_UNSPECIFIED)) + .withFields({ + C2F(mColorAspects, range).inRange( + C2Color::RANGE_UNSPECIFIED, C2Color::RANGE_OTHER), + C2F(mColorAspects, primaries).inRange( + C2Color::PRIMARIES_UNSPECIFIED, C2Color::PRIMARIES_OTHER), + C2F(mColorAspects, transfer).inRange( + C2Color::TRANSFER_UNSPECIFIED, C2Color::TRANSFER_OTHER), + C2F(mColorAspects, matrix).inRange( + C2Color::MATRIX_UNSPECIFIED, C2Color::MATRIX_OTHER) + }) + .withSetter(ColorAspectsSetter, mDefaultColorAspects, mCodedColorAspects) + .build()); + + // TODO: support more formats? + addParameter( + DefineParam(mPixelFormat, C2_PARAMKEY_PIXEL_FORMAT) + .withConstValue(new C2StreamPixelFormatInfo::output( + 0u, HAL_PIXEL_FORMAT_YCBCR_420_888)) + .build()); + } + + static C2R SizeSetter(bool mayBlock, const C2P &oldMe, + C2P &me) { + (void)mayBlock; + C2R res = C2R::Ok(); + if (!me.F(me.v.width).supportsAtAll(me.v.width)) { + res = res.plus(C2SettingResultBuilder::BadValue(me.F(me.v.width))); + me.set().width = oldMe.v.width; + } + if (!me.F(me.v.height).supportsAtAll(me.v.height)) { + res = res.plus(C2SettingResultBuilder::BadValue(me.F(me.v.height))); + me.set().height = oldMe.v.height; + } + return res; + } + + static C2R MaxPictureSizeSetter(bool mayBlock, C2P &me, + const C2P &size) { + (void)mayBlock; + // TODO: get max width/height from the size's field helpers vs. hardcoding + me.set().width = c2_min(c2_max(me.v.width, size.v.width), 4080u); + me.set().height = c2_min(c2_max(me.v.height, size.v.height), 4080u); + return C2R::Ok(); + } + + static C2R MaxInputSizeSetter(bool mayBlock, C2P &me, + const C2P &maxSize) { + (void)mayBlock; + // assume compression ratio of 2 + me.set().value = (((maxSize.v.width + 15) / 16) * ((maxSize.v.height + 15) / 16) * 192); + return C2R::Ok(); + } + + static C2R ProfileLevelSetter(bool mayBlock, C2P &me, + const C2P &size) { + (void)mayBlock; + (void)size; + (void)me; // TODO: validate + return C2R::Ok(); + } + + static C2R DefaultColorAspectsSetter(bool mayBlock, C2P &me) { + (void)mayBlock; + if (me.v.range > C2Color::RANGE_OTHER) { + me.set().range = C2Color::RANGE_OTHER; + } + if (me.v.primaries > C2Color::PRIMARIES_OTHER) { + me.set().primaries = C2Color::PRIMARIES_OTHER; + } + if (me.v.transfer > C2Color::TRANSFER_OTHER) { + me.set().transfer = C2Color::TRANSFER_OTHER; + } + if (me.v.matrix > C2Color::MATRIX_OTHER) { + me.set().matrix = C2Color::MATRIX_OTHER; + } + return C2R::Ok(); + } + + static C2R CodedColorAspectsSetter(bool mayBlock, C2P &me) { + (void)mayBlock; + if (me.v.range > C2Color::RANGE_OTHER) { + me.set().range = C2Color::RANGE_OTHER; + } + if (me.v.primaries > C2Color::PRIMARIES_OTHER) { + me.set().primaries = C2Color::PRIMARIES_OTHER; + } + if (me.v.transfer > C2Color::TRANSFER_OTHER) { + me.set().transfer = C2Color::TRANSFER_OTHER; + } + if (me.v.matrix > C2Color::MATRIX_OTHER) { + me.set().matrix = C2Color::MATRIX_OTHER; + } + return C2R::Ok(); + } + + static C2R ColorAspectsSetter(bool mayBlock, C2P &me, + const C2P &def, + const C2P &coded) { + (void)mayBlock; + // take default values for all unspecified fields, and coded values for specified ones + me.set().range = coded.v.range == RANGE_UNSPECIFIED ? def.v.range : coded.v.range; + me.set().primaries = coded.v.primaries == PRIMARIES_UNSPECIFIED + ? def.v.primaries : coded.v.primaries; + me.set().transfer = coded.v.transfer == TRANSFER_UNSPECIFIED + ? def.v.transfer : coded.v.transfer; + me.set().matrix = coded.v.matrix == MATRIX_UNSPECIFIED ? def.v.matrix : coded.v.matrix; + return C2R::Ok(); + } + + std::shared_ptr getColorAspects_l() { + return mColorAspects; + } + +private: + std::shared_ptr mProfileLevel; + std::shared_ptr mSize; + std::shared_ptr mMaxSize; + std::shared_ptr mMaxInputSize; + std::shared_ptr mColorInfo; + std::shared_ptr mCodedColorAspects; + std::shared_ptr mDefaultColorAspects; + std::shared_ptr mColorAspects; + std::shared_ptr mPixelFormat; +}; + +static size_t getCpuCoreCount() { + long cpuCoreCount = 1; +#if defined(_SC_NPROCESSORS_ONLN) + cpuCoreCount = sysconf(_SC_NPROCESSORS_ONLN); +#else + // _SC_NPROC_ONLN must be defined... + cpuCoreCount = sysconf(_SC_NPROC_ONLN); +#endif + CHECK(cpuCoreCount >= 1); + ALOGV("Number of CPU cores: %ld", cpuCoreCount); + return (size_t)cpuCoreCount; +} + +static void *ivd_aligned_malloc(void *ctxt, WORD32 alignment, WORD32 size) { + (void) ctxt; + return memalign(alignment, size); +} + +static void ivd_aligned_free(void *ctxt, void *mem) { + (void) ctxt; + free(mem); +} + +C2SoftAvcDec::C2SoftAvcDec( + const char *name, + c2_node_id_t id, + const std::shared_ptr &intfImpl) + : SimpleC2Component(std::make_shared>(name, id, intfImpl)), + mIntf(intfImpl), + mDecHandle(nullptr), + mOutBufferFlush(nullptr), + mIvColorFormat(IV_YUV_420P), + mWidth(320), + mHeight(240), + mHeaderDecoded(false) { + GENERATE_FILE_NAMES(); + CREATE_DUMP_FILE(mInFile); +} + +C2SoftAvcDec::~C2SoftAvcDec() { + onRelease(); +} + +c2_status_t C2SoftAvcDec::onInit() { + status_t err = initDecoder(); + return err == OK ? C2_OK : C2_CORRUPTED; +} + +c2_status_t C2SoftAvcDec::onStop() { + if (OK != resetDecoder()) return C2_CORRUPTED; + resetPlugin(); + return C2_OK; +} + +void C2SoftAvcDec::onReset() { + (void) onStop(); +} + +void C2SoftAvcDec::onRelease() { + (void) deleteDecoder(); + if (mOutBufferFlush) { + ivd_aligned_free(nullptr, mOutBufferFlush); + mOutBufferFlush = nullptr; + } + if (mOutBlock) { + mOutBlock.reset(); + } +} + +c2_status_t C2SoftAvcDec::onFlush_sm() { + if (OK != setFlushMode()) return C2_CORRUPTED; + + uint32_t bufferSize = mStride * mHeight * 3 / 2; + mOutBufferFlush = (uint8_t *)ivd_aligned_malloc(nullptr, 128, bufferSize); + if (!mOutBufferFlush) { + ALOGE("could not allocate tmp output buffer (for flush) of size %u ", bufferSize); + return C2_NO_MEMORY; + } + + while (true) { + ivd_video_decode_ip_t s_decode_ip; + ivd_video_decode_op_t s_decode_op; + + setDecodeArgs(&s_decode_ip, &s_decode_op, nullptr, nullptr, 0, 0, 0); + (void) ivdec_api_function(mDecHandle, &s_decode_ip, &s_decode_op); + if (0 == s_decode_op.u4_output_present) { + resetPlugin(); + break; + } + } + + if (mOutBufferFlush) { + ivd_aligned_free(nullptr, mOutBufferFlush); + mOutBufferFlush = nullptr; + } + + return C2_OK; +} + +status_t C2SoftAvcDec::createDecoder() { + ivdext_create_ip_t s_create_ip; + ivdext_create_op_t s_create_op; + + s_create_ip.s_ivd_create_ip_t.u4_size = sizeof(ivdext_create_ip_t); + s_create_ip.s_ivd_create_ip_t.e_cmd = IVD_CMD_CREATE; + s_create_ip.s_ivd_create_ip_t.u4_share_disp_buf = 0; + s_create_ip.s_ivd_create_ip_t.e_output_format = mIvColorFormat; + s_create_ip.s_ivd_create_ip_t.pf_aligned_alloc = ivd_aligned_malloc; + s_create_ip.s_ivd_create_ip_t.pf_aligned_free = ivd_aligned_free; + s_create_ip.s_ivd_create_ip_t.pv_mem_ctxt = nullptr; + s_create_op.s_ivd_create_op_t.u4_size = sizeof(ivdext_create_op_t); + IV_API_CALL_STATUS_T status = ivdec_api_function(nullptr, + &s_create_ip, + &s_create_op); + if (status != IV_SUCCESS) { + ALOGE("error in %s: 0x%x", __func__, + s_create_op.s_ivd_create_op_t.u4_error_code); + return UNKNOWN_ERROR; + } + mDecHandle = (iv_obj_t*)s_create_op.s_ivd_create_op_t.pv_handle; + mDecHandle->pv_fxns = (void *)ivdec_api_function; + mDecHandle->u4_size = sizeof(iv_obj_t); + + return OK; +} + +status_t C2SoftAvcDec::setNumCores() { + ivdext_ctl_set_num_cores_ip_t s_set_num_cores_ip; + ivdext_ctl_set_num_cores_op_t s_set_num_cores_op; + + s_set_num_cores_ip.u4_size = sizeof(ivdext_ctl_set_num_cores_ip_t); + s_set_num_cores_ip.e_cmd = IVD_CMD_VIDEO_CTL; + s_set_num_cores_ip.e_sub_cmd = IVDEXT_CMD_CTL_SET_NUM_CORES; + s_set_num_cores_ip.u4_num_cores = mNumCores; + s_set_num_cores_op.u4_size = sizeof(ivdext_ctl_set_num_cores_op_t); + IV_API_CALL_STATUS_T status = ivdec_api_function(mDecHandle, + &s_set_num_cores_ip, + &s_set_num_cores_op); + if (IV_SUCCESS != status) { + ALOGD("error in %s: 0x%x", __func__, s_set_num_cores_op.u4_error_code); + return UNKNOWN_ERROR; + } + + return OK; +} + +status_t C2SoftAvcDec::setParams(size_t stride, IVD_VIDEO_DECODE_MODE_T dec_mode) { + ivd_ctl_set_config_ip_t s_set_dyn_params_ip; + ivd_ctl_set_config_op_t s_set_dyn_params_op; + + s_set_dyn_params_ip.u4_size = sizeof(ivd_ctl_set_config_ip_t); + s_set_dyn_params_ip.e_cmd = IVD_CMD_VIDEO_CTL; + s_set_dyn_params_ip.e_sub_cmd = IVD_CMD_CTL_SETPARAMS; + s_set_dyn_params_ip.u4_disp_wd = (UWORD32) stride; + s_set_dyn_params_ip.e_frm_skip_mode = IVD_SKIP_NONE; + s_set_dyn_params_ip.e_frm_out_mode = IVD_DISPLAY_FRAME_OUT; + s_set_dyn_params_ip.e_vid_dec_mode = dec_mode; + s_set_dyn_params_op.u4_size = sizeof(ivd_ctl_set_config_op_t); + IV_API_CALL_STATUS_T status = ivdec_api_function(mDecHandle, + &s_set_dyn_params_ip, + &s_set_dyn_params_op); + if (status != IV_SUCCESS) { + ALOGE("error in %s: 0x%x", __func__, s_set_dyn_params_op.u4_error_code); + return UNKNOWN_ERROR; + } + + return OK; +} + +void C2SoftAvcDec::getVersion() { + ivd_ctl_getversioninfo_ip_t s_get_versioninfo_ip; + ivd_ctl_getversioninfo_op_t s_get_versioninfo_op; + UWORD8 au1_buf[512]; + + s_get_versioninfo_ip.u4_size = sizeof(ivd_ctl_getversioninfo_ip_t); + s_get_versioninfo_ip.e_cmd = IVD_CMD_VIDEO_CTL; + s_get_versioninfo_ip.e_sub_cmd = IVD_CMD_CTL_GETVERSION; + s_get_versioninfo_ip.pv_version_buffer = au1_buf; + s_get_versioninfo_ip.u4_version_buffer_size = sizeof(au1_buf); + s_get_versioninfo_op.u4_size = sizeof(ivd_ctl_getversioninfo_op_t); + IV_API_CALL_STATUS_T status = ivdec_api_function(mDecHandle, + &s_get_versioninfo_ip, + &s_get_versioninfo_op); + if (status != IV_SUCCESS) { + ALOGD("error in %s: 0x%x", __func__, + s_get_versioninfo_op.u4_error_code); + } else { + ALOGV("ittiam decoder version number: %s", + (char *) s_get_versioninfo_ip.pv_version_buffer); + } +} + +status_t C2SoftAvcDec::initDecoder() { + if (OK != createDecoder()) return UNKNOWN_ERROR; + mNumCores = MIN(getCpuCoreCount(), MAX_NUM_CORES); + mStride = ALIGN64(mWidth); + mSignalledError = false; + resetPlugin(); + (void) setNumCores(); + if (OK != setParams(mStride, IVD_DECODE_FRAME)) return UNKNOWN_ERROR; + (void) getVersion(); + + return OK; +} + +bool C2SoftAvcDec::setDecodeArgs(ivd_video_decode_ip_t *ps_decode_ip, + ivd_video_decode_op_t *ps_decode_op, + C2ReadView *inBuffer, + C2GraphicView *outBuffer, + size_t inOffset, + size_t inSize, + uint32_t tsMarker) { + uint32_t displayStride = mStride; + uint32_t displayHeight = mHeight; + size_t lumaSize = displayStride * displayHeight; + size_t chromaSize = lumaSize >> 2; + + ps_decode_ip->u4_size = sizeof(ivd_video_decode_ip_t); + ps_decode_ip->e_cmd = IVD_CMD_VIDEO_DECODE; + if (inBuffer) { + ps_decode_ip->u4_ts = tsMarker; + ps_decode_ip->pv_stream_buffer = const_cast(inBuffer->data() + inOffset); + ps_decode_ip->u4_num_Bytes = inSize; + } else { + ps_decode_ip->u4_ts = 0; + ps_decode_ip->pv_stream_buffer = nullptr; + ps_decode_ip->u4_num_Bytes = 0; + } + ps_decode_ip->s_out_buffer.u4_min_out_buf_size[0] = lumaSize; + ps_decode_ip->s_out_buffer.u4_min_out_buf_size[1] = chromaSize; + ps_decode_ip->s_out_buffer.u4_min_out_buf_size[2] = chromaSize; + if (outBuffer) { + if (outBuffer->width() < displayStride || outBuffer->height() < displayHeight) { + ALOGE("Output buffer too small: provided (%dx%d) required (%ux%u)", + outBuffer->width(), outBuffer->height(), displayStride, displayHeight); + return false; + } + ps_decode_ip->s_out_buffer.pu1_bufs[0] = outBuffer->data()[C2PlanarLayout::PLANE_Y]; + ps_decode_ip->s_out_buffer.pu1_bufs[1] = outBuffer->data()[C2PlanarLayout::PLANE_U]; + ps_decode_ip->s_out_buffer.pu1_bufs[2] = outBuffer->data()[C2PlanarLayout::PLANE_V]; + } else { + ps_decode_ip->s_out_buffer.pu1_bufs[0] = mOutBufferFlush; + ps_decode_ip->s_out_buffer.pu1_bufs[1] = mOutBufferFlush + lumaSize; + ps_decode_ip->s_out_buffer.pu1_bufs[2] = mOutBufferFlush + lumaSize + chromaSize; + } + ps_decode_ip->s_out_buffer.u4_num_bufs = 3; + ps_decode_op->u4_size = sizeof(ivd_video_decode_op_t); + + return true; +} + +bool C2SoftAvcDec::getVuiParams() { + ivdext_ctl_get_vui_params_ip_t s_get_vui_params_ip; + ivdext_ctl_get_vui_params_op_t s_get_vui_params_op; + + s_get_vui_params_ip.u4_size = sizeof(ivdext_ctl_get_vui_params_ip_t); + s_get_vui_params_ip.e_cmd = IVD_CMD_VIDEO_CTL; + s_get_vui_params_ip.e_sub_cmd = + (IVD_CONTROL_API_COMMAND_TYPE_T) IH264D_CMD_CTL_GET_VUI_PARAMS; + s_get_vui_params_op.u4_size = sizeof(ivdext_ctl_get_vui_params_op_t); + IV_API_CALL_STATUS_T status = ivdec_api_function(mDecHandle, + &s_get_vui_params_ip, + &s_get_vui_params_op); + if (status != IV_SUCCESS) { + ALOGD("error in %s: 0x%x", __func__, s_get_vui_params_op.u4_error_code); + return false; + } + + VuiColorAspects vuiColorAspects; + vuiColorAspects.primaries = s_get_vui_params_op.u1_colour_primaries; + vuiColorAspects.transfer = s_get_vui_params_op.u1_tfr_chars; + vuiColorAspects.coeffs = s_get_vui_params_op.u1_matrix_coeffs; + vuiColorAspects.fullRange = s_get_vui_params_op.u1_video_full_range_flag; + + // convert vui aspects to C2 values if changed + if (!(vuiColorAspects == mBitstreamColorAspects)) { + mBitstreamColorAspects = vuiColorAspects; + ColorAspects sfAspects; + C2StreamColorAspectsInfo::input codedAspects = { 0u }; + ColorUtils::convertIsoColorAspectsToCodecAspects( + vuiColorAspects.primaries, vuiColorAspects.transfer, vuiColorAspects.coeffs, + vuiColorAspects.fullRange, sfAspects); + if (!C2Mapper::map(sfAspects.mPrimaries, &codedAspects.primaries)) { + codedAspects.primaries = C2Color::PRIMARIES_UNSPECIFIED; + } + if (!C2Mapper::map(sfAspects.mRange, &codedAspects.range)) { + codedAspects.range = C2Color::RANGE_UNSPECIFIED; + } + if (!C2Mapper::map(sfAspects.mMatrixCoeffs, &codedAspects.matrix)) { + codedAspects.matrix = C2Color::MATRIX_UNSPECIFIED; + } + if (!C2Mapper::map(sfAspects.mTransfer, &codedAspects.transfer)) { + codedAspects.transfer = C2Color::TRANSFER_UNSPECIFIED; + } + std::vector> failures; + (void)mIntf->config({&codedAspects}, C2_MAY_BLOCK, &failures); + } + return true; +} + +status_t C2SoftAvcDec::setFlushMode() { + ivd_ctl_flush_ip_t s_set_flush_ip; + ivd_ctl_flush_op_t s_set_flush_op; + + s_set_flush_ip.u4_size = sizeof(ivd_ctl_flush_ip_t); + s_set_flush_ip.e_cmd = IVD_CMD_VIDEO_CTL; + s_set_flush_ip.e_sub_cmd = IVD_CMD_CTL_FLUSH; + s_set_flush_op.u4_size = sizeof(ivd_ctl_flush_op_t); + IV_API_CALL_STATUS_T status = ivdec_api_function(mDecHandle, + &s_set_flush_ip, + &s_set_flush_op); + if (status != IV_SUCCESS) { + ALOGE("error in %s: 0x%x", __func__, s_set_flush_op.u4_error_code); + return UNKNOWN_ERROR; + } + + return OK; +} + +status_t C2SoftAvcDec::resetDecoder() { + ivd_ctl_reset_ip_t s_reset_ip; + ivd_ctl_reset_op_t s_reset_op; + + s_reset_ip.u4_size = sizeof(ivd_ctl_reset_ip_t); + s_reset_ip.e_cmd = IVD_CMD_VIDEO_CTL; + s_reset_ip.e_sub_cmd = IVD_CMD_CTL_RESET; + s_reset_op.u4_size = sizeof(ivd_ctl_reset_op_t); + IV_API_CALL_STATUS_T status = ivdec_api_function(mDecHandle, + &s_reset_ip, + &s_reset_op); + if (IV_SUCCESS != status) { + ALOGE("error in %s: 0x%x", __func__, s_reset_op.u4_error_code); + return UNKNOWN_ERROR; + } + mStride = 0; + (void) setNumCores(); + mSignalledError = false; + mHeaderDecoded = false; + + return OK; +} + +void C2SoftAvcDec::resetPlugin() { + mSignalledOutputEos = false; + gettimeofday(&mTimeStart, nullptr); + gettimeofday(&mTimeEnd, nullptr); +} + +status_t C2SoftAvcDec::deleteDecoder() { + if (mDecHandle) { + ivdext_delete_ip_t s_delete_ip; + ivdext_delete_op_t s_delete_op; + + s_delete_ip.s_ivd_delete_ip_t.u4_size = sizeof(ivdext_delete_ip_t); + s_delete_ip.s_ivd_delete_ip_t.e_cmd = IVD_CMD_DELETE; + s_delete_op.s_ivd_delete_op_t.u4_size = sizeof(ivdext_delete_op_t); + IV_API_CALL_STATUS_T status = ivdec_api_function(mDecHandle, + &s_delete_ip, + &s_delete_op); + if (status != IV_SUCCESS) { + ALOGE("error in %s: 0x%x", __func__, + s_delete_op.s_ivd_delete_op_t.u4_error_code); + return UNKNOWN_ERROR; + } + mDecHandle = nullptr; + } + + return OK; +} + +static void fillEmptyWork(const std::unique_ptr &work) { + uint32_t flags = 0; + if (work->input.flags & C2FrameData::FLAG_END_OF_STREAM) { + flags |= C2FrameData::FLAG_END_OF_STREAM; + ALOGV("signalling eos"); + } + work->worklets.front()->output.flags = (C2FrameData::flags_t)flags; + work->worklets.front()->output.buffers.clear(); + work->worklets.front()->output.ordinal = work->input.ordinal; + work->workletsProcessed = 1u; +} + +void C2SoftAvcDec::finishWork(uint64_t index, const std::unique_ptr &work) { + std::shared_ptr buffer = createGraphicBuffer(std::move(mOutBlock), + C2Rect(mWidth, mHeight)); + mOutBlock = nullptr; + { + IntfImpl::Lock lock = mIntf->lock(); + buffer->setInfo(mIntf->getColorAspects_l()); + } + + auto fillWork = [buffer](const std::unique_ptr &work) { + work->worklets.front()->output.flags = (C2FrameData::flags_t)0; + work->worklets.front()->output.buffers.clear(); + work->worklets.front()->output.buffers.push_back(buffer); + work->worklets.front()->output.ordinal = work->input.ordinal; + work->workletsProcessed = 1u; + }; + if (work && c2_cntr64_t(index) == work->input.ordinal.frameIndex) { + fillWork(work); + } else { + finish(index, fillWork); + } +} + +c2_status_t C2SoftAvcDec::ensureDecoderState(const std::shared_ptr &pool) { + if (!mDecHandle) { + ALOGE("not supposed to be here, invalid decoder context"); + return C2_CORRUPTED; + } + if (mStride != ALIGN64(mWidth)) { + mStride = ALIGN64(mWidth); + if (OK != setParams(mStride, IVD_DECODE_FRAME)) return C2_CORRUPTED; + } + if (mOutBlock && + (mOutBlock->width() != mStride || mOutBlock->height() != mHeight)) { + mOutBlock.reset(); + } + if (!mOutBlock) { + uint32_t format = HAL_PIXEL_FORMAT_YV12; + C2MemoryUsage usage = { C2MemoryUsage::CPU_READ, C2MemoryUsage::CPU_WRITE }; + c2_status_t err = pool->fetchGraphicBlock(mStride, mHeight, format, usage, &mOutBlock); + if (err != C2_OK) { + ALOGE("fetchGraphicBlock for Output failed with status %d", err); + return err; + } + ALOGV("provided (%dx%d) required (%dx%d)", + mOutBlock->width(), mOutBlock->height(), mStride, mHeight); + } + + return C2_OK; +} + +// TODO: can overall error checking be improved? +// TODO: allow configuration of color format and usage for graphic buffers instead +// of hard coding them to HAL_PIXEL_FORMAT_YV12 +// TODO: pass coloraspects information to surface +// TODO: test support for dynamic change in resolution +// TODO: verify if the decoder sent back all frames +void C2SoftAvcDec::process( + const std::unique_ptr &work, + const std::shared_ptr &pool) { + // Initialize output work + work->result = C2_OK; + work->workletsProcessed = 0u; + work->worklets.front()->output.flags = work->input.flags; + if (mSignalledError || mSignalledOutputEos) { + work->result = C2_BAD_VALUE; + return; + } + + size_t inOffset = 0u; + size_t inSize = 0u; + uint32_t workIndex = work->input.ordinal.frameIndex.peeku() & 0xFFFFFFFF; + C2ReadView rView = mDummyReadView; + if (!work->input.buffers.empty()) { + rView = work->input.buffers[0]->data().linearBlocks().front().map().get(); + inSize = rView.capacity(); + if (inSize && rView.error()) { + ALOGE("read view map failed %d", rView.error()); + work->result = rView.error(); + return; + } + } + bool eos = ((work->input.flags & C2FrameData::FLAG_END_OF_STREAM) != 0); + bool hasPicture = false; + + ALOGV("in buffer attr. size %zu timestamp %d frameindex %d, flags %x", + inSize, (int)work->input.ordinal.timestamp.peeku(), + (int)work->input.ordinal.frameIndex.peeku(), work->input.flags); + size_t inPos = 0; + while (inPos < inSize) { + if (C2_OK != ensureDecoderState(pool)) { + mSignalledError = true; + work->workletsProcessed = 1u; + work->result = C2_CORRUPTED; + return; + } + + ivd_video_decode_ip_t s_decode_ip; + ivd_video_decode_op_t s_decode_op; + { + C2GraphicView wView = mOutBlock->map().get(); + if (wView.error()) { + ALOGE("graphic view map failed %d", wView.error()); + work->result = wView.error(); + return; + } + if (!setDecodeArgs(&s_decode_ip, &s_decode_op, &rView, &wView, + inOffset + inPos, inSize - inPos, workIndex)) { + mSignalledError = true; + work->workletsProcessed = 1u; + work->result = C2_CORRUPTED; + return; + } + + if (false == mHeaderDecoded) { + /* Decode header and get dimensions */ + setParams(mStride, IVD_DECODE_HEADER); + } + + WORD32 delay; + GETTIME(&mTimeStart, nullptr); + TIME_DIFF(mTimeEnd, mTimeStart, delay); + (void) ivdec_api_function(mDecHandle, &s_decode_ip, &s_decode_op); + WORD32 decodeTime; + GETTIME(&mTimeEnd, nullptr); + TIME_DIFF(mTimeStart, mTimeEnd, decodeTime); + ALOGV("decodeTime=%6d delay=%6d numBytes=%6d", decodeTime, delay, + s_decode_op.u4_num_bytes_consumed); + } + if (IVD_MEM_ALLOC_FAILED == (s_decode_op.u4_error_code & 0xFF)) { + ALOGE("allocation failure in decoder"); + mSignalledError = true; + work->workletsProcessed = 1u; + work->result = C2_CORRUPTED; + return; + } else if (IVD_STREAM_WIDTH_HEIGHT_NOT_SUPPORTED == (s_decode_op.u4_error_code & 0xFF)) { + ALOGE("unsupported resolution : %dx%d", mWidth, mHeight); + mSignalledError = true; + work->workletsProcessed = 1u; + work->result = C2_CORRUPTED; + return; + } else if (IVD_RES_CHANGED == (s_decode_op.u4_error_code & 0xFF)) { + ALOGV("resolution changed"); + drainInternal(DRAIN_COMPONENT_NO_EOS, pool, work); + resetDecoder(); + resetPlugin(); + work->workletsProcessed = 0u; + + /* Decode header and get new dimensions */ + setParams(mStride, IVD_DECODE_HEADER); + (void) ivdec_api_function(mDecHandle, &s_decode_ip, &s_decode_op); + } + if (0 < s_decode_op.u4_pic_wd && 0 < s_decode_op.u4_pic_ht) { + if (mHeaderDecoded == false) { + mHeaderDecoded = true; + setParams(ALIGN64(s_decode_op.u4_pic_wd), IVD_DECODE_FRAME); + } + if (s_decode_op.u4_pic_wd != mWidth || s_decode_op.u4_pic_ht != mHeight) { + mWidth = s_decode_op.u4_pic_wd; + mHeight = s_decode_op.u4_pic_ht; + CHECK_EQ(0u, s_decode_op.u4_output_present); + + C2VideoSizeStreamInfo::output size(0u, mWidth, mHeight); + std::vector> failures; + c2_status_t err = mIntf->config({&size}, C2_MAY_BLOCK, &failures); + if (err == OK) { + work->worklets.front()->output.configUpdate.push_back( + C2Param::Copy(size)); + } else { + ALOGE("Cannot set width and height"); + mSignalledError = true; + work->workletsProcessed = 1u; + work->result = C2_CORRUPTED; + return; + } + continue; + } + } + (void)getVuiParams(); + hasPicture |= (1 == s_decode_op.u4_frame_decoded_flag); + if (s_decode_op.u4_output_present) { + finishWork(s_decode_op.u4_ts, work); + } + if (0 == s_decode_op.u4_num_bytes_consumed) { + ALOGD("Bytes consumed is zero. Ignoring remaining bytes"); + break; + } + inPos += s_decode_op.u4_num_bytes_consumed; + if (hasPicture && (inSize - inPos)) { + ALOGD("decoded frame in current access nal, ignoring further trailing bytes %d", + (int)inSize - (int)inPos); + break; + } + } + if (eos) { + drainInternal(DRAIN_COMPONENT_WITH_EOS, pool, work); + mSignalledOutputEos = true; + } else if (!hasPicture) { + fillEmptyWork(work); + } +} + +c2_status_t C2SoftAvcDec::drainInternal( + uint32_t drainMode, + const std::shared_ptr &pool, + const std::unique_ptr &work) { + if (drainMode == NO_DRAIN) { + ALOGW("drain with NO_DRAIN: no-op"); + return C2_OK; + } + if (drainMode == DRAIN_CHAIN) { + ALOGW("DRAIN_CHAIN not supported"); + return C2_OMITTED; + } + + if (OK != setFlushMode()) return C2_CORRUPTED; + while (true) { + if (C2_OK != ensureDecoderState(pool)) { + mSignalledError = true; + work->workletsProcessed = 1u; + work->result = C2_CORRUPTED; + return C2_CORRUPTED; + } + C2GraphicView wView = mOutBlock->map().get(); + if (wView.error()) { + ALOGE("graphic view map failed %d", wView.error()); + return C2_CORRUPTED; + } + ivd_video_decode_ip_t s_decode_ip; + ivd_video_decode_op_t s_decode_op; + if (!setDecodeArgs(&s_decode_ip, &s_decode_op, nullptr, &wView, 0, 0, 0)) { + mSignalledError = true; + work->workletsProcessed = 1u; + return C2_CORRUPTED; + } + (void) ivdec_api_function(mDecHandle, &s_decode_ip, &s_decode_op); + if (s_decode_op.u4_output_present) { + finishWork(s_decode_op.u4_ts, work); + } else { + fillEmptyWork(work); + break; + } + } + + return C2_OK; +} + +c2_status_t C2SoftAvcDec::drain( + uint32_t drainMode, + const std::shared_ptr &pool) { + return drainInternal(drainMode, pool, nullptr); +} + +class C2SoftAvcDecFactory : public C2ComponentFactory { +public: + C2SoftAvcDecFactory() : mHelper(std::static_pointer_cast( + GetCodec2PlatformComponentStore()->getParamReflector())) { + } + + virtual c2_status_t createComponent( + c2_node_id_t id, + std::shared_ptr* const component, + std::function deleter) override { + *component = std::shared_ptr( + new C2SoftAvcDec(COMPONENT_NAME, + id, + std::make_shared(mHelper)), + deleter); + return C2_OK; + } + + virtual c2_status_t createInterface( + c2_node_id_t id, + std::shared_ptr* const interface, + std::function deleter) override { + *interface = std::shared_ptr( + new SimpleInterface( + COMPONENT_NAME, id, std::make_shared(mHelper)), + deleter); + return C2_OK; + } + + virtual ~C2SoftAvcDecFactory() override = default; + +private: + std::shared_ptr mHelper; +}; + +} // namespace android + +extern "C" ::C2ComponentFactory* CreateCodec2Factory() { + ALOGV("in %s", __func__); + return new ::android::C2SoftAvcDecFactory(); +} + +extern "C" void DestroyCodec2Factory(::C2ComponentFactory* factory) { + ALOGV("in %s", __func__); + delete factory; +} diff --git a/media/codec2/components/avc/C2SoftAvcDec.h b/media/codec2/components/avc/C2SoftAvcDec.h new file mode 100644 index 0000000000000000000000000000000000000000..2127a933775b60c26e5d2555d1268b516d9b4f49 --- /dev/null +++ b/media/codec2/components/avc/C2SoftAvcDec.h @@ -0,0 +1,196 @@ +/* + * Copyright 2017 The Android Open Source Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#ifndef ANDROID_C2_SOFT_AVC_DEC_H_ +#define ANDROID_C2_SOFT_AVC_DEC_H_ + +#include + +#include + +#include + +#include "ih264_typedefs.h" +#include "iv.h" +#include "ivd.h" + +namespace android { + +#define ivdec_api_function ih264d_api_function +#define ivdext_create_ip_t ih264d_create_ip_t +#define ivdext_create_op_t ih264d_create_op_t +#define ivdext_delete_ip_t ih264d_delete_ip_t +#define ivdext_delete_op_t ih264d_delete_op_t +#define ivdext_ctl_set_num_cores_ip_t ih264d_ctl_set_num_cores_ip_t +#define ivdext_ctl_set_num_cores_op_t ih264d_ctl_set_num_cores_op_t +#define ivdext_ctl_get_vui_params_ip_t ih264d_ctl_get_vui_params_ip_t +#define ivdext_ctl_get_vui_params_op_t ih264d_ctl_get_vui_params_op_t +#define ALIGN64(x) ((((x) + 63) >> 6) << 6) +#define MAX_NUM_CORES 4 +#define IVDEXT_CMD_CTL_SET_NUM_CORES \ + (IVD_CONTROL_API_COMMAND_TYPE_T)IH264D_CMD_CTL_SET_NUM_CORES +#define MIN(a, b) (((a) < (b)) ? (a) : (b)) +#define GETTIME(a, b) gettimeofday(a, b); +#define TIME_DIFF(start, end, diff) \ + diff = (((end).tv_sec - (start).tv_sec) * 1000000) + \ + ((end).tv_usec - (start).tv_usec); + +#ifdef FILE_DUMP_ENABLE + #define INPUT_DUMP_PATH "/sdcard/clips/avcd_input" + #define INPUT_DUMP_EXT "h264" + #define GENERATE_FILE_NAMES() { \ + GETTIME(&mTimeStart, NULL); \ + strcpy(mInFile, ""); \ + sprintf(mInFile, "%s_%ld.%ld.%s", INPUT_DUMP_PATH, \ + mTimeStart.tv_sec, mTimeStart.tv_usec, \ + INPUT_DUMP_EXT); \ + } + #define CREATE_DUMP_FILE(m_filename) { \ + FILE *fp = fopen(m_filename, "wb"); \ + if (fp != NULL) { \ + fclose(fp); \ + } else { \ + ALOGD("Could not open file %s", m_filename); \ + } \ + } + #define DUMP_TO_FILE(m_filename, m_buf, m_size, m_offset)\ + { \ + FILE *fp = fopen(m_filename, "ab"); \ + if (fp != NULL && m_buf != NULL && m_offset == 0) { \ + int i; \ + i = fwrite(m_buf, 1, m_size, fp); \ + ALOGD("fwrite ret %d to write %d", i, m_size); \ + if (i != (int) m_size) { \ + ALOGD("Error in fwrite, returned %d", i); \ + perror("Error in write to file"); \ + } \ + } else if (fp == NULL) { \ + ALOGD("Could not write to file %s", m_filename);\ + } \ + if (fp) { \ + fclose(fp); \ + } \ + } +#else /* FILE_DUMP_ENABLE */ + #define INPUT_DUMP_PATH + #define INPUT_DUMP_EXT + #define OUTPUT_DUMP_PATH + #define OUTPUT_DUMP_EXT + #define GENERATE_FILE_NAMES() + #define CREATE_DUMP_FILE(m_filename) + #define DUMP_TO_FILE(m_filename, m_buf, m_size, m_offset) +#endif /* FILE_DUMP_ENABLE */ + + +class C2SoftAvcDec : public SimpleC2Component { +public: + class IntfImpl; + C2SoftAvcDec(const char *name, c2_node_id_t id, const std::shared_ptr &intfImpl); + virtual ~C2SoftAvcDec(); + + // From SimpleC2Component + c2_status_t onInit() override; + c2_status_t onStop() override; + void onReset() override; + void onRelease() override; + c2_status_t onFlush_sm() override; + void process( + const std::unique_ptr &work, + const std::shared_ptr &pool) override; + c2_status_t drain( + uint32_t drainMode, + const std::shared_ptr &pool) override; + +private: + status_t createDecoder(); + status_t setNumCores(); + status_t setParams(size_t stride, IVD_VIDEO_DECODE_MODE_T dec_mode); + void getVersion(); + status_t initDecoder(); + bool setDecodeArgs(ivd_video_decode_ip_t *ps_decode_ip, + ivd_video_decode_op_t *ps_decode_op, + C2ReadView *inBuffer, + C2GraphicView *outBuffer, + size_t inOffset, + size_t inSize, + uint32_t tsMarker); + bool getVuiParams(); + c2_status_t ensureDecoderState(const std::shared_ptr &pool); + void finishWork(uint64_t index, const std::unique_ptr &work); + status_t setFlushMode(); + c2_status_t drainInternal( + uint32_t drainMode, + const std::shared_ptr &pool, + const std::unique_ptr &work); + status_t resetDecoder(); + void resetPlugin(); + status_t deleteDecoder(); + + std::shared_ptr mIntf; + + // TODO:This is not the right place for this enum. These should + // be part of c2-vndk so that they can be accessed by all video plugins + // until then, make them feel at home + enum { + kNotSupported, + kPreferBitstream, + kPreferContainer, + }; + + iv_obj_t *mDecHandle; + std::shared_ptr mOutBlock; + uint8_t *mOutBufferFlush; + + size_t mNumCores; + IV_COLOR_FORMAT_T mIvColorFormat; + + uint32_t mWidth; + uint32_t mHeight; + uint32_t mStride; + bool mSignalledOutputEos; + bool mSignalledError; + bool mHeaderDecoded; + // Color aspects. These are ISO values and are meant to detect changes in aspects to avoid + // converting them to C2 values for each frame + struct VuiColorAspects { + uint8_t primaries; + uint8_t transfer; + uint8_t coeffs; + uint8_t fullRange; + + // default color aspects + VuiColorAspects() + : primaries(2), transfer(2), coeffs(2), fullRange(0) { } + + bool operator==(const VuiColorAspects &o) { + return primaries == o.primaries && transfer == o.transfer && coeffs == o.coeffs + && fullRange == o.fullRange; + } + } mBitstreamColorAspects; + + // profile + struct timeval mTimeStart; + struct timeval mTimeEnd; +#ifdef FILE_DUMP_ENABLE + char mInFile[200]; +#endif /* FILE_DUMP_ENABLE */ + + C2_DO_NOT_COPY(C2SoftAvcDec); +}; + +} // namespace android + +#endif // ANDROID_C2_SOFT_AVC_DEC_H_ diff --git a/media/codec2/components/avc/C2SoftAvcEnc.cpp b/media/codec2/components/avc/C2SoftAvcEnc.cpp new file mode 100644 index 0000000000000000000000000000000000000000..ee5cf27db5f3b058079ccc7f96aad03099a55814 --- /dev/null +++ b/media/codec2/components/avc/C2SoftAvcEnc.cpp @@ -0,0 +1,1559 @@ +/* + * Copyright 2018 The Android Open Source Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +//#define LOG_NDEBUG 0 +#define LOG_TAG "C2SoftAvcEnc" +#include +#include + +#include +#include +#include +#include +#include + +#include +#include +#include +#include +#include + +#include "C2SoftAvcEnc.h" +#include "ih264e.h" +#include "ih264e_error.h" + +namespace android { + +class C2SoftAvcEnc::IntfImpl : public C2InterfaceHelper { +public: + explicit IntfImpl(const std::shared_ptr &helper) + : C2InterfaceHelper(helper) { + + setDerivedInstance(this); + + addParameter( + DefineParam(mInputFormat, C2_NAME_INPUT_STREAM_FORMAT_SETTING) + .withConstValue(new C2StreamFormatConfig::input(0u, C2FormatVideo)) + .build()); + + addParameter( + DefineParam(mOutputFormat, C2_NAME_OUTPUT_STREAM_FORMAT_SETTING) + .withConstValue(new C2StreamFormatConfig::output(0u, C2FormatCompressed)) + .build()); + + addParameter( + DefineParam(mInputMediaType, C2_NAME_INPUT_PORT_MIME_SETTING) + .withConstValue(AllocSharedString( + MEDIA_MIMETYPE_VIDEO_RAW)) + .build()); + + addParameter( + DefineParam(mOutputMediaType, C2_NAME_OUTPUT_PORT_MIME_SETTING) + .withConstValue(AllocSharedString( + MEDIA_MIMETYPE_VIDEO_AVC)) + .build()); + + addParameter( + DefineParam(mUsage, C2_NAME_INPUT_STREAM_USAGE_SETTING) + .withConstValue(new C2StreamUsageTuning::input( + 0u, (uint64_t)C2MemoryUsage::CPU_READ)) + .build()); + + addParameter( + DefineParam(mSize, C2_NAME_STREAM_VIDEO_SIZE_SETTING) + .withDefault(new C2VideoSizeStreamTuning::input(0u, 320, 240)) + .withFields({ + C2F(mSize, width).inRange(2, 2560, 2), + C2F(mSize, height).inRange(2, 2560, 2), + }) + .withSetter(SizeSetter) + .build()); + + addParameter( + DefineParam(mFrameRate, C2_NAME_STREAM_FRAME_RATE_SETTING) + .withDefault(new C2StreamFrameRateInfo::output(0u, 30.)) + // TODO: More restriction? + .withFields({C2F(mFrameRate, value).greaterThan(0.)}) + .withSetter(Setter::StrictValueWithNoDeps) + .build()); + + addParameter( + DefineParam(mBitrate, C2_NAME_STREAM_BITRATE_SETTING) + .withDefault(new C2BitrateTuning::output(0u, 64000)) + .withFields({C2F(mBitrate, value).inRange(4096, 12000000)}) + .withSetter(BitrateSetter) + .build()); + + addParameter( + DefineParam(mIntraRefresh, C2_PARAMKEY_INTRA_REFRESH) + .withDefault(new C2StreamIntraRefreshTuning::output( + 0u, C2Config::INTRA_REFRESH_DISABLED, 0.)) + .withFields({ + C2F(mIntraRefresh, mode).oneOf({ + C2Config::INTRA_REFRESH_DISABLED, C2Config::INTRA_REFRESH_ARBITRARY }), + C2F(mIntraRefresh, period).any() + }) + .withSetter(IntraRefreshSetter) + .build()); + + addParameter( + DefineParam(mProfileLevel, C2_PARAMKEY_PROFILE_LEVEL) + .withDefault(new C2StreamProfileLevelInfo::output( + 0u, PROFILE_AVC_CONSTRAINED_BASELINE, LEVEL_AVC_4_1)) + .withFields({ + C2F(mProfileLevel, profile).oneOf({ + PROFILE_AVC_BASELINE, + PROFILE_AVC_CONSTRAINED_BASELINE, + PROFILE_AVC_MAIN, + }), + C2F(mProfileLevel, level).oneOf({ + LEVEL_AVC_1, + LEVEL_AVC_1B, + LEVEL_AVC_1_1, + LEVEL_AVC_1_2, + LEVEL_AVC_1_3, + LEVEL_AVC_2, + LEVEL_AVC_2_1, + LEVEL_AVC_2_2, + LEVEL_AVC_3, + LEVEL_AVC_3_1, + LEVEL_AVC_3_2, + LEVEL_AVC_4, + LEVEL_AVC_4_1, + LEVEL_AVC_4_2, + LEVEL_AVC_5, + }), + }) + .withSetter(ProfileLevelSetter, mSize, mFrameRate, mBitrate) + .build()); + + addParameter( + DefineParam(mRequestSync, C2_PARAMKEY_REQUEST_SYNC_FRAME) + .withDefault(new C2StreamRequestSyncFrameTuning::output(0u, C2_FALSE)) + .withFields({C2F(mRequestSync, value).oneOf({ C2_FALSE, C2_TRUE }) }) + .withSetter(Setter::NonStrictValueWithNoDeps) + .build()); + + addParameter( + DefineParam(mSyncFramePeriod, C2_PARAMKEY_SYNC_FRAME_INTERVAL) + .withDefault(new C2StreamSyncFrameIntervalTuning::output(0u, 1000000)) + .withFields({C2F(mSyncFramePeriod, value).any()}) + .withSetter(Setter::StrictValueWithNoDeps) + .build()); + } + + static C2R BitrateSetter(bool mayBlock, C2P &me) { + (void)mayBlock; + C2R res = C2R::Ok(); + if (me.v.value <= 4096) { + me.set().value = 4096; + } + return res; + } + + static C2R SizeSetter(bool mayBlock, const C2P &oldMe, + C2P &me) { + (void)mayBlock; + C2R res = C2R::Ok(); + if (!me.F(me.v.width).supportsAtAll(me.v.width)) { + res = res.plus(C2SettingResultBuilder::BadValue(me.F(me.v.width))); + me.set().width = oldMe.v.width; + } + if (!me.F(me.v.height).supportsAtAll(me.v.height)) { + res = res.plus(C2SettingResultBuilder::BadValue(me.F(me.v.height))); + me.set().height = oldMe.v.height; + } + return res; + } + + static C2R ProfileLevelSetter( + bool mayBlock, + C2P &me, + const C2P &size, + const C2P &frameRate, + const C2P &bitrate) { + (void)mayBlock; + if (!me.F(me.v.profile).supportsAtAll(me.v.profile)) { + me.set().profile = PROFILE_AVC_CONSTRAINED_BASELINE; + } + + struct LevelLimits { + C2Config::level_t level; + float mbsPerSec; + uint64_t mbs; + uint32_t bitrate; + }; + constexpr LevelLimits kLimits[] = { + { LEVEL_AVC_1, 1485, 99, 64000 }, + // Decoder does not properly handle level 1b. + // { LEVEL_AVC_1B, 1485, 99, 128000 }, + { LEVEL_AVC_1_1, 3000, 396, 192000 }, + { LEVEL_AVC_1_2, 6000, 396, 384000 }, + { LEVEL_AVC_1_3, 11880, 396, 768000 }, + { LEVEL_AVC_2, 11880, 396, 2000000 }, + { LEVEL_AVC_2_1, 19800, 792, 4000000 }, + { LEVEL_AVC_2_2, 20250, 1620, 4000000 }, + { LEVEL_AVC_3, 40500, 1620, 10000000 }, + { LEVEL_AVC_3_1, 108000, 3600, 14000000 }, + { LEVEL_AVC_3_2, 216000, 5120, 20000000 }, + { LEVEL_AVC_4, 245760, 8192, 20000000 }, + { LEVEL_AVC_4_1, 245760, 8192, 50000000 }, + { LEVEL_AVC_4_2, 522240, 8704, 50000000 }, + { LEVEL_AVC_5, 589824, 22080, 135000000 }, + }; + + uint64_t mbs = uint64_t((size.v.width + 15) / 16) * ((size.v.height + 15) / 16); + float mbsPerSec = float(mbs) / frameRate.v.value; + + // Check if the supplied level meets the MB / bitrate requirements. If + // not, update the level with the lowest level meeting the requirements. + + bool found = false; + // By default needsUpdate = false in case the supplied level does meet + // the requirements. For Level 1b, we want to update the level anyway, + // so we set it to true in that case. + bool needsUpdate = (me.v.level == LEVEL_AVC_1B); + for (const LevelLimits &limit : kLimits) { + if (mbs <= limit.mbs && mbsPerSec <= limit.mbsPerSec && + bitrate.v.value <= limit.bitrate) { + // This is the lowest level that meets the requirements, and if + // we haven't seen the supplied level yet, that means we don't + // need the update. + if (needsUpdate) { + ALOGD("Given level %x does not cover current configuration: " + "adjusting to %x", me.v.level, limit.level); + me.set().level = limit.level; + } + found = true; + break; + } + if (me.v.level == limit.level) { + // We break out of the loop when the lowest feasible level is + // found. The fact that we're here means that our level doesn't + // meet the requirement and needs to be updated. + needsUpdate = true; + } + } + if (!found) { + // We set to the highest supported level. + me.set().level = LEVEL_AVC_5; + } + + return C2R::Ok(); + } + + static C2R IntraRefreshSetter(bool mayBlock, C2P &me) { + (void)mayBlock; + C2R res = C2R::Ok(); + if (me.v.period < 1) { + me.set().mode = C2Config::INTRA_REFRESH_DISABLED; + me.set().period = 0; + } else { + // only support arbitrary mode (cyclic in our case) + me.set().mode = C2Config::INTRA_REFRESH_ARBITRARY; + } + return res; + } + + IV_PROFILE_T getProfile_l() const { + switch (mProfileLevel->profile) { + case PROFILE_AVC_CONSTRAINED_BASELINE: [[fallthrough]]; + case PROFILE_AVC_BASELINE: return IV_PROFILE_BASE; + case PROFILE_AVC_MAIN: return IV_PROFILE_MAIN; + default: + ALOGD("Unrecognized profile: %x", mProfileLevel->profile); + return IV_PROFILE_DEFAULT; + } + } + + UWORD32 getLevel_l() const { + struct Level { + C2Config::level_t c2Level; + UWORD32 avcLevel; + }; + constexpr Level levels[] = { + { LEVEL_AVC_1, 10 }, + { LEVEL_AVC_1B, 9 }, + { LEVEL_AVC_1_1, 11 }, + { LEVEL_AVC_1_2, 12 }, + { LEVEL_AVC_1_3, 13 }, + { LEVEL_AVC_2, 20 }, + { LEVEL_AVC_2_1, 21 }, + { LEVEL_AVC_2_2, 22 }, + { LEVEL_AVC_3, 30 }, + { LEVEL_AVC_3_1, 31 }, + { LEVEL_AVC_3_2, 32 }, + { LEVEL_AVC_4, 40 }, + { LEVEL_AVC_4_1, 41 }, + { LEVEL_AVC_4_2, 42 }, + { LEVEL_AVC_5, 50 }, + }; + for (const Level &level : levels) { + if (mProfileLevel->level == level.c2Level) { + return level.avcLevel; + } + } + ALOGD("Unrecognized level: %x", mProfileLevel->level); + return 41; + } + uint32_t getSyncFramePeriod_l() const { + if (mSyncFramePeriod->value < 0 || mSyncFramePeriod->value == INT64_MAX) { + return 0; + } + double period = mSyncFramePeriod->value / 1e6 * mFrameRate->value; + return (uint32_t)c2_max(c2_min(period + 0.5, double(UINT32_MAX)), 1.); + } + + // unsafe getters + std::shared_ptr getSize_l() const { return mSize; } + std::shared_ptr getIntraRefresh_l() const { return mIntraRefresh; } + std::shared_ptr getFrameRate_l() const { return mFrameRate; } + std::shared_ptr getBitrate_l() const { return mBitrate; } + std::shared_ptr getRequestSync_l() const { return mRequestSync; } + +private: + std::shared_ptr mInputFormat; + std::shared_ptr mOutputFormat; + std::shared_ptr mInputMediaType; + std::shared_ptr mOutputMediaType; + std::shared_ptr mUsage; + std::shared_ptr mSize; + std::shared_ptr mFrameRate; + std::shared_ptr mRequestSync; + std::shared_ptr mIntraRefresh; + std::shared_ptr mBitrate; + std::shared_ptr mProfileLevel; + std::shared_ptr mSyncFramePeriod; +}; + +#define ive_api_function ih264e_api_function + +constexpr char COMPONENT_NAME[] = "c2.android.avc.encoder"; + +namespace { + +// From external/libavc/encoder/ih264e_bitstream.h +constexpr uint32_t MIN_STREAM_SIZE = 0x800; + +static size_t GetCPUCoreCount() { + long cpuCoreCount = 1; +#if defined(_SC_NPROCESSORS_ONLN) + cpuCoreCount = sysconf(_SC_NPROCESSORS_ONLN); +#else + // _SC_NPROC_ONLN must be defined... + cpuCoreCount = sysconf(_SC_NPROC_ONLN); +#endif + CHECK(cpuCoreCount >= 1); + ALOGV("Number of CPU cores: %ld", cpuCoreCount); + return (size_t)cpuCoreCount; +} + +} // namespace + +C2SoftAvcEnc::C2SoftAvcEnc( + const char *name, c2_node_id_t id, const std::shared_ptr &intfImpl) + : SimpleC2Component(std::make_shared>(name, id, intfImpl)), + mIntf(intfImpl), + mIvVideoColorFormat(IV_YUV_420P), + mAVCEncProfile(IV_PROFILE_BASE), + mAVCEncLevel(41), + mStarted(false), + mSawInputEOS(false), + mSawOutputEOS(false), + mSignalledError(false), + mCodecCtx(nullptr), + // TODO: output buffer size + mOutBufferSize(524288) { + + // If dump is enabled, then open create an empty file + GENERATE_FILE_NAMES(); + CREATE_DUMP_FILE(mInFile); + CREATE_DUMP_FILE(mOutFile); + + initEncParams(); +} + +C2SoftAvcEnc::~C2SoftAvcEnc() { + releaseEncoder(); +} + +c2_status_t C2SoftAvcEnc::onInit() { + return C2_OK; +} + +c2_status_t C2SoftAvcEnc::onStop() { + return C2_OK; +} + +void C2SoftAvcEnc::onReset() { + // TODO: use IVE_CMD_CTL_RESET? + releaseEncoder(); + initEncParams(); +} + +void C2SoftAvcEnc::onRelease() { + releaseEncoder(); +} + +c2_status_t C2SoftAvcEnc::onFlush_sm() { + // TODO: use IVE_CMD_CTL_FLUSH? + return C2_OK; +} + +void C2SoftAvcEnc::initEncParams() { + mCodecCtx = nullptr; + mMemRecords = nullptr; + mNumMemRecords = DEFAULT_MEM_REC_CNT; + mHeaderGenerated = 0; + mNumCores = GetCPUCoreCount(); + mArch = DEFAULT_ARCH; + mSliceMode = DEFAULT_SLICE_MODE; + mSliceParam = DEFAULT_SLICE_PARAM; + mHalfPelEnable = DEFAULT_HPEL; + mIInterval = DEFAULT_I_INTERVAL; + mIDRInterval = DEFAULT_IDR_INTERVAL; + mDisableDeblkLevel = DEFAULT_DISABLE_DEBLK_LEVEL; + mEnableFastSad = DEFAULT_ENABLE_FAST_SAD; + mEnableAltRef = DEFAULT_ENABLE_ALT_REF; + mEncSpeed = DEFAULT_ENC_SPEED; + mIntra4x4 = DEFAULT_INTRA4x4; + mConstrainedIntraFlag = DEFAULT_CONSTRAINED_INTRA; + mPSNREnable = DEFAULT_PSNR_ENABLE; + mReconEnable = DEFAULT_RECON_ENABLE; + mEntropyMode = DEFAULT_ENTROPY_MODE; + mBframes = DEFAULT_B_FRAMES; + + gettimeofday(&mTimeStart, nullptr); + gettimeofday(&mTimeEnd, nullptr); +} + +c2_status_t C2SoftAvcEnc::setDimensions() { + ive_ctl_set_dimensions_ip_t s_dimensions_ip; + ive_ctl_set_dimensions_op_t s_dimensions_op; + IV_STATUS_T status; + + s_dimensions_ip.e_cmd = IVE_CMD_VIDEO_CTL; + s_dimensions_ip.e_sub_cmd = IVE_CMD_CTL_SET_DIMENSIONS; + s_dimensions_ip.u4_ht = mSize->height; + s_dimensions_ip.u4_wd = mSize->width; + + s_dimensions_ip.u4_timestamp_high = -1; + s_dimensions_ip.u4_timestamp_low = -1; + + s_dimensions_ip.u4_size = sizeof(ive_ctl_set_dimensions_ip_t); + s_dimensions_op.u4_size = sizeof(ive_ctl_set_dimensions_op_t); + + status = ive_api_function(mCodecCtx, &s_dimensions_ip, &s_dimensions_op); + if (status != IV_SUCCESS) { + ALOGE("Unable to set frame dimensions = 0x%x\n", + s_dimensions_op.u4_error_code); + return C2_CORRUPTED; + } + return C2_OK; +} + +c2_status_t C2SoftAvcEnc::setNumCores() { + IV_STATUS_T status; + ive_ctl_set_num_cores_ip_t s_num_cores_ip; + ive_ctl_set_num_cores_op_t s_num_cores_op; + s_num_cores_ip.e_cmd = IVE_CMD_VIDEO_CTL; + s_num_cores_ip.e_sub_cmd = IVE_CMD_CTL_SET_NUM_CORES; + s_num_cores_ip.u4_num_cores = MIN(mNumCores, CODEC_MAX_CORES); + s_num_cores_ip.u4_timestamp_high = -1; + s_num_cores_ip.u4_timestamp_low = -1; + s_num_cores_ip.u4_size = sizeof(ive_ctl_set_num_cores_ip_t); + + s_num_cores_op.u4_size = sizeof(ive_ctl_set_num_cores_op_t); + + status = ive_api_function( + mCodecCtx, (void *) &s_num_cores_ip, (void *) &s_num_cores_op); + if (status != IV_SUCCESS) { + ALOGE("Unable to set processor params = 0x%x\n", + s_num_cores_op.u4_error_code); + return C2_CORRUPTED; + } + return C2_OK; +} + +c2_status_t C2SoftAvcEnc::setFrameRate() { + ive_ctl_set_frame_rate_ip_t s_frame_rate_ip; + ive_ctl_set_frame_rate_op_t s_frame_rate_op; + IV_STATUS_T status; + + s_frame_rate_ip.e_cmd = IVE_CMD_VIDEO_CTL; + s_frame_rate_ip.e_sub_cmd = IVE_CMD_CTL_SET_FRAMERATE; + + s_frame_rate_ip.u4_src_frame_rate = mFrameRate->value + 0.5; + s_frame_rate_ip.u4_tgt_frame_rate = mFrameRate->value + 0.5; + + s_frame_rate_ip.u4_timestamp_high = -1; + s_frame_rate_ip.u4_timestamp_low = -1; + + s_frame_rate_ip.u4_size = sizeof(ive_ctl_set_frame_rate_ip_t); + s_frame_rate_op.u4_size = sizeof(ive_ctl_set_frame_rate_op_t); + + status = ive_api_function(mCodecCtx, &s_frame_rate_ip, &s_frame_rate_op); + if (status != IV_SUCCESS) { + ALOGE("Unable to set frame rate = 0x%x\n", + s_frame_rate_op.u4_error_code); + return C2_CORRUPTED; + } + return C2_OK; +} + +c2_status_t C2SoftAvcEnc::setIpeParams() { + ive_ctl_set_ipe_params_ip_t s_ipe_params_ip; + ive_ctl_set_ipe_params_op_t s_ipe_params_op; + IV_STATUS_T status; + + s_ipe_params_ip.e_cmd = IVE_CMD_VIDEO_CTL; + s_ipe_params_ip.e_sub_cmd = IVE_CMD_CTL_SET_IPE_PARAMS; + + s_ipe_params_ip.u4_enable_intra_4x4 = mIntra4x4; + s_ipe_params_ip.u4_enc_speed_preset = mEncSpeed; + s_ipe_params_ip.u4_constrained_intra_pred = mConstrainedIntraFlag; + + s_ipe_params_ip.u4_timestamp_high = -1; + s_ipe_params_ip.u4_timestamp_low = -1; + + s_ipe_params_ip.u4_size = sizeof(ive_ctl_set_ipe_params_ip_t); + s_ipe_params_op.u4_size = sizeof(ive_ctl_set_ipe_params_op_t); + + status = ive_api_function(mCodecCtx, &s_ipe_params_ip, &s_ipe_params_op); + if (status != IV_SUCCESS) { + ALOGE("Unable to set ipe params = 0x%x\n", + s_ipe_params_op.u4_error_code); + return C2_CORRUPTED; + } + return C2_OK; +} + +c2_status_t C2SoftAvcEnc::setBitRate() { + ive_ctl_set_bitrate_ip_t s_bitrate_ip; + ive_ctl_set_bitrate_op_t s_bitrate_op; + IV_STATUS_T status; + + s_bitrate_ip.e_cmd = IVE_CMD_VIDEO_CTL; + s_bitrate_ip.e_sub_cmd = IVE_CMD_CTL_SET_BITRATE; + + s_bitrate_ip.u4_target_bitrate = mBitrate->value; + + s_bitrate_ip.u4_timestamp_high = -1; + s_bitrate_ip.u4_timestamp_low = -1; + + s_bitrate_ip.u4_size = sizeof(ive_ctl_set_bitrate_ip_t); + s_bitrate_op.u4_size = sizeof(ive_ctl_set_bitrate_op_t); + + status = ive_api_function(mCodecCtx, &s_bitrate_ip, &s_bitrate_op); + if (status != IV_SUCCESS) { + ALOGE("Unable to set bit rate = 0x%x\n", s_bitrate_op.u4_error_code); + return C2_CORRUPTED; + } + return C2_OK; +} + +c2_status_t C2SoftAvcEnc::setFrameType(IV_PICTURE_CODING_TYPE_T e_frame_type) { + ive_ctl_set_frame_type_ip_t s_frame_type_ip; + ive_ctl_set_frame_type_op_t s_frame_type_op; + IV_STATUS_T status; + s_frame_type_ip.e_cmd = IVE_CMD_VIDEO_CTL; + s_frame_type_ip.e_sub_cmd = IVE_CMD_CTL_SET_FRAMETYPE; + + s_frame_type_ip.e_frame_type = e_frame_type; + + s_frame_type_ip.u4_timestamp_high = -1; + s_frame_type_ip.u4_timestamp_low = -1; + + s_frame_type_ip.u4_size = sizeof(ive_ctl_set_frame_type_ip_t); + s_frame_type_op.u4_size = sizeof(ive_ctl_set_frame_type_op_t); + + status = ive_api_function(mCodecCtx, &s_frame_type_ip, &s_frame_type_op); + if (status != IV_SUCCESS) { + ALOGE("Unable to set frame type = 0x%x\n", + s_frame_type_op.u4_error_code); + return C2_CORRUPTED; + } + return C2_OK; +} + +c2_status_t C2SoftAvcEnc::setQp() { + ive_ctl_set_qp_ip_t s_qp_ip; + ive_ctl_set_qp_op_t s_qp_op; + IV_STATUS_T status; + + s_qp_ip.e_cmd = IVE_CMD_VIDEO_CTL; + s_qp_ip.e_sub_cmd = IVE_CMD_CTL_SET_QP; + + s_qp_ip.u4_i_qp = DEFAULT_I_QP; + s_qp_ip.u4_i_qp_max = DEFAULT_QP_MAX; + s_qp_ip.u4_i_qp_min = DEFAULT_QP_MIN; + + s_qp_ip.u4_p_qp = DEFAULT_P_QP; + s_qp_ip.u4_p_qp_max = DEFAULT_QP_MAX; + s_qp_ip.u4_p_qp_min = DEFAULT_QP_MIN; + + s_qp_ip.u4_b_qp = DEFAULT_P_QP; + s_qp_ip.u4_b_qp_max = DEFAULT_QP_MAX; + s_qp_ip.u4_b_qp_min = DEFAULT_QP_MIN; + + s_qp_ip.u4_timestamp_high = -1; + s_qp_ip.u4_timestamp_low = -1; + + s_qp_ip.u4_size = sizeof(ive_ctl_set_qp_ip_t); + s_qp_op.u4_size = sizeof(ive_ctl_set_qp_op_t); + + status = ive_api_function(mCodecCtx, &s_qp_ip, &s_qp_op); + if (status != IV_SUCCESS) { + ALOGE("Unable to set qp 0x%x\n", s_qp_op.u4_error_code); + return C2_CORRUPTED; + } + return C2_OK; +} + +c2_status_t C2SoftAvcEnc::setEncMode(IVE_ENC_MODE_T e_enc_mode) { + IV_STATUS_T status; + ive_ctl_set_enc_mode_ip_t s_enc_mode_ip; + ive_ctl_set_enc_mode_op_t s_enc_mode_op; + + s_enc_mode_ip.e_cmd = IVE_CMD_VIDEO_CTL; + s_enc_mode_ip.e_sub_cmd = IVE_CMD_CTL_SET_ENC_MODE; + + s_enc_mode_ip.e_enc_mode = e_enc_mode; + + s_enc_mode_ip.u4_timestamp_high = -1; + s_enc_mode_ip.u4_timestamp_low = -1; + + s_enc_mode_ip.u4_size = sizeof(ive_ctl_set_enc_mode_ip_t); + s_enc_mode_op.u4_size = sizeof(ive_ctl_set_enc_mode_op_t); + + status = ive_api_function(mCodecCtx, &s_enc_mode_ip, &s_enc_mode_op); + if (status != IV_SUCCESS) { + ALOGE("Unable to set in header encode mode = 0x%x\n", + s_enc_mode_op.u4_error_code); + return C2_CORRUPTED; + } + return C2_OK; +} + +c2_status_t C2SoftAvcEnc::setVbvParams() { + ive_ctl_set_vbv_params_ip_t s_vbv_ip; + ive_ctl_set_vbv_params_op_t s_vbv_op; + IV_STATUS_T status; + + s_vbv_ip.e_cmd = IVE_CMD_VIDEO_CTL; + s_vbv_ip.e_sub_cmd = IVE_CMD_CTL_SET_VBV_PARAMS; + + s_vbv_ip.u4_vbv_buf_size = 0; + s_vbv_ip.u4_vbv_buffer_delay = 1000; + + s_vbv_ip.u4_timestamp_high = -1; + s_vbv_ip.u4_timestamp_low = -1; + + s_vbv_ip.u4_size = sizeof(ive_ctl_set_vbv_params_ip_t); + s_vbv_op.u4_size = sizeof(ive_ctl_set_vbv_params_op_t); + + status = ive_api_function(mCodecCtx, &s_vbv_ip, &s_vbv_op); + if (status != IV_SUCCESS) { + ALOGE("Unable to set VBV params = 0x%x\n", s_vbv_op.u4_error_code); + return C2_CORRUPTED; + } + return C2_OK; +} + +c2_status_t C2SoftAvcEnc::setAirParams() { + ive_ctl_set_air_params_ip_t s_air_ip; + ive_ctl_set_air_params_op_t s_air_op; + IV_STATUS_T status; + + s_air_ip.e_cmd = IVE_CMD_VIDEO_CTL; + s_air_ip.e_sub_cmd = IVE_CMD_CTL_SET_AIR_PARAMS; + + s_air_ip.e_air_mode = + (mIntraRefresh->mode == C2Config::INTRA_REFRESH_DISABLED || mIntraRefresh->period < 1) + ? IVE_AIR_MODE_NONE : IVE_AIR_MODE_CYCLIC; + s_air_ip.u4_air_refresh_period = mIntraRefresh->period; + + s_air_ip.u4_timestamp_high = -1; + s_air_ip.u4_timestamp_low = -1; + + s_air_ip.u4_size = sizeof(ive_ctl_set_air_params_ip_t); + s_air_op.u4_size = sizeof(ive_ctl_set_air_params_op_t); + + status = ive_api_function(mCodecCtx, &s_air_ip, &s_air_op); + if (status != IV_SUCCESS) { + ALOGE("Unable to set air params = 0x%x\n", s_air_op.u4_error_code); + return C2_CORRUPTED; + } + return C2_OK; +} + +c2_status_t C2SoftAvcEnc::setMeParams() { + IV_STATUS_T status; + ive_ctl_set_me_params_ip_t s_me_params_ip; + ive_ctl_set_me_params_op_t s_me_params_op; + + s_me_params_ip.e_cmd = IVE_CMD_VIDEO_CTL; + s_me_params_ip.e_sub_cmd = IVE_CMD_CTL_SET_ME_PARAMS; + + s_me_params_ip.u4_enable_fast_sad = mEnableFastSad; + s_me_params_ip.u4_enable_alt_ref = mEnableAltRef; + + s_me_params_ip.u4_enable_hpel = mHalfPelEnable; + s_me_params_ip.u4_enable_qpel = DEFAULT_QPEL; + s_me_params_ip.u4_me_speed_preset = DEFAULT_ME_SPEED; + s_me_params_ip.u4_srch_rng_x = DEFAULT_SRCH_RNG_X; + s_me_params_ip.u4_srch_rng_y = DEFAULT_SRCH_RNG_Y; + + s_me_params_ip.u4_timestamp_high = -1; + s_me_params_ip.u4_timestamp_low = -1; + + s_me_params_ip.u4_size = sizeof(ive_ctl_set_me_params_ip_t); + s_me_params_op.u4_size = sizeof(ive_ctl_set_me_params_op_t); + + status = ive_api_function(mCodecCtx, &s_me_params_ip, &s_me_params_op); + if (status != IV_SUCCESS) { + ALOGE("Unable to set me params = 0x%x\n", s_me_params_op.u4_error_code); + return C2_CORRUPTED; + } + return C2_OK; +} + +c2_status_t C2SoftAvcEnc::setGopParams() { + IV_STATUS_T status; + ive_ctl_set_gop_params_ip_t s_gop_params_ip; + ive_ctl_set_gop_params_op_t s_gop_params_op; + + s_gop_params_ip.e_cmd = IVE_CMD_VIDEO_CTL; + s_gop_params_ip.e_sub_cmd = IVE_CMD_CTL_SET_GOP_PARAMS; + + s_gop_params_ip.u4_i_frm_interval = mIInterval; + s_gop_params_ip.u4_idr_frm_interval = mIDRInterval; + + s_gop_params_ip.u4_timestamp_high = -1; + s_gop_params_ip.u4_timestamp_low = -1; + + s_gop_params_ip.u4_size = sizeof(ive_ctl_set_gop_params_ip_t); + s_gop_params_op.u4_size = sizeof(ive_ctl_set_gop_params_op_t); + + status = ive_api_function(mCodecCtx, &s_gop_params_ip, &s_gop_params_op); + if (status != IV_SUCCESS) { + ALOGE("Unable to set GOP params = 0x%x\n", + s_gop_params_op.u4_error_code); + return C2_CORRUPTED; + } + return C2_OK; +} + +c2_status_t C2SoftAvcEnc::setProfileParams() { + IntfImpl::Lock lock = mIntf->lock(); + + IV_STATUS_T status; + ive_ctl_set_profile_params_ip_t s_profile_params_ip; + ive_ctl_set_profile_params_op_t s_profile_params_op; + + s_profile_params_ip.e_cmd = IVE_CMD_VIDEO_CTL; + s_profile_params_ip.e_sub_cmd = IVE_CMD_CTL_SET_PROFILE_PARAMS; + + s_profile_params_ip.e_profile = mIntf->getProfile_l(); + s_profile_params_ip.u4_entropy_coding_mode = mEntropyMode; + s_profile_params_ip.u4_timestamp_high = -1; + s_profile_params_ip.u4_timestamp_low = -1; + + s_profile_params_ip.u4_size = sizeof(ive_ctl_set_profile_params_ip_t); + s_profile_params_op.u4_size = sizeof(ive_ctl_set_profile_params_op_t); + lock.unlock(); + + status = ive_api_function(mCodecCtx, &s_profile_params_ip, &s_profile_params_op); + if (status != IV_SUCCESS) { + ALOGE("Unable to set profile params = 0x%x\n", + s_profile_params_op.u4_error_code); + return C2_CORRUPTED; + } + return C2_OK; +} + +c2_status_t C2SoftAvcEnc::setDeblockParams() { + IV_STATUS_T status; + ive_ctl_set_deblock_params_ip_t s_deblock_params_ip; + ive_ctl_set_deblock_params_op_t s_deblock_params_op; + + s_deblock_params_ip.e_cmd = IVE_CMD_VIDEO_CTL; + s_deblock_params_ip.e_sub_cmd = IVE_CMD_CTL_SET_DEBLOCK_PARAMS; + + s_deblock_params_ip.u4_disable_deblock_level = mDisableDeblkLevel; + + s_deblock_params_ip.u4_timestamp_high = -1; + s_deblock_params_ip.u4_timestamp_low = -1; + + s_deblock_params_ip.u4_size = sizeof(ive_ctl_set_deblock_params_ip_t); + s_deblock_params_op.u4_size = sizeof(ive_ctl_set_deblock_params_op_t); + + status = ive_api_function(mCodecCtx, &s_deblock_params_ip, &s_deblock_params_op); + if (status != IV_SUCCESS) { + ALOGE("Unable to enable/disable deblock params = 0x%x\n", + s_deblock_params_op.u4_error_code); + return C2_CORRUPTED; + } + return C2_OK; +} + +void C2SoftAvcEnc::logVersion() { + ive_ctl_getversioninfo_ip_t s_ctl_ip; + ive_ctl_getversioninfo_op_t s_ctl_op; + UWORD8 au1_buf[512]; + IV_STATUS_T status; + + s_ctl_ip.e_cmd = IVE_CMD_VIDEO_CTL; + s_ctl_ip.e_sub_cmd = IVE_CMD_CTL_GETVERSION; + s_ctl_ip.u4_size = sizeof(ive_ctl_getversioninfo_ip_t); + s_ctl_op.u4_size = sizeof(ive_ctl_getversioninfo_op_t); + s_ctl_ip.pu1_version = au1_buf; + s_ctl_ip.u4_version_bufsize = sizeof(au1_buf); + + status = ive_api_function(mCodecCtx, (void *) &s_ctl_ip, (void *) &s_ctl_op); + + if (status != IV_SUCCESS) { + ALOGE("Error in getting version: 0x%x", s_ctl_op.u4_error_code); + } else { + ALOGV("Ittiam encoder version: %s", (char *)s_ctl_ip.pu1_version); + } + return; +} + +c2_status_t C2SoftAvcEnc::initEncoder() { + IV_STATUS_T status; + WORD32 level; + + CHECK(!mStarted); + + c2_status_t errType = C2_OK; + + { + IntfImpl::Lock lock = mIntf->lock(); + mSize = mIntf->getSize_l(); + mBitrate = mIntf->getBitrate_l(); + mFrameRate = mIntf->getFrameRate_l(); + mIntraRefresh = mIntf->getIntraRefresh_l(); + mAVCEncLevel = mIntf->getLevel_l(); + mIInterval = mIntf->getSyncFramePeriod_l(); + mIDRInterval = mIntf->getSyncFramePeriod_l(); + } + uint32_t width = mSize->width; + uint32_t height = mSize->height; + + mStride = width; + + // TODO + mIvVideoColorFormat = IV_YUV_420P; + + ALOGD("Params width %d height %d level %d colorFormat %d", width, + height, mAVCEncLevel, mIvVideoColorFormat); + + /* Getting Number of MemRecords */ + { + iv_num_mem_rec_ip_t s_num_mem_rec_ip; + iv_num_mem_rec_op_t s_num_mem_rec_op; + + s_num_mem_rec_ip.u4_size = sizeof(iv_num_mem_rec_ip_t); + s_num_mem_rec_op.u4_size = sizeof(iv_num_mem_rec_op_t); + + s_num_mem_rec_ip.e_cmd = IV_CMD_GET_NUM_MEM_REC; + + status = ive_api_function(nullptr, &s_num_mem_rec_ip, &s_num_mem_rec_op); + + if (status != IV_SUCCESS) { + ALOGE("Get number of memory records failed = 0x%x\n", + s_num_mem_rec_op.u4_error_code); + return C2_CORRUPTED; + } + + mNumMemRecords = s_num_mem_rec_op.u4_num_mem_rec; + } + + /* Allocate array to hold memory records */ + if (mNumMemRecords > SIZE_MAX / sizeof(iv_mem_rec_t)) { + ALOGE("requested memory size is too big."); + return C2_CORRUPTED; + } + mMemRecords = (iv_mem_rec_t *)malloc(mNumMemRecords * sizeof(iv_mem_rec_t)); + if (nullptr == mMemRecords) { + ALOGE("Unable to allocate memory for hold memory records: Size %zu", + mNumMemRecords * sizeof(iv_mem_rec_t)); + mSignalledError = true; + return C2_CORRUPTED; + } + + { + iv_mem_rec_t *ps_mem_rec; + ps_mem_rec = mMemRecords; + for (size_t i = 0; i < mNumMemRecords; i++) { + ps_mem_rec->u4_size = sizeof(iv_mem_rec_t); + ps_mem_rec->pv_base = nullptr; + ps_mem_rec->u4_mem_size = 0; + ps_mem_rec->u4_mem_alignment = 0; + ps_mem_rec->e_mem_type = IV_NA_MEM_TYPE; + + ps_mem_rec++; + } + } + + /* Getting MemRecords Attributes */ + { + iv_fill_mem_rec_ip_t s_fill_mem_rec_ip; + iv_fill_mem_rec_op_t s_fill_mem_rec_op; + + s_fill_mem_rec_ip.u4_size = sizeof(iv_fill_mem_rec_ip_t); + s_fill_mem_rec_op.u4_size = sizeof(iv_fill_mem_rec_op_t); + + s_fill_mem_rec_ip.e_cmd = IV_CMD_FILL_NUM_MEM_REC; + s_fill_mem_rec_ip.ps_mem_rec = mMemRecords; + s_fill_mem_rec_ip.u4_num_mem_rec = mNumMemRecords; + s_fill_mem_rec_ip.u4_max_wd = width; + s_fill_mem_rec_ip.u4_max_ht = height; + s_fill_mem_rec_ip.u4_max_level = mAVCEncLevel; + s_fill_mem_rec_ip.e_color_format = DEFAULT_INP_COLOR_FORMAT; + s_fill_mem_rec_ip.u4_max_ref_cnt = DEFAULT_MAX_REF_FRM; + s_fill_mem_rec_ip.u4_max_reorder_cnt = DEFAULT_MAX_REORDER_FRM; + s_fill_mem_rec_ip.u4_max_srch_rng_x = DEFAULT_MAX_SRCH_RANGE_X; + s_fill_mem_rec_ip.u4_max_srch_rng_y = DEFAULT_MAX_SRCH_RANGE_Y; + + status = ive_api_function(nullptr, &s_fill_mem_rec_ip, &s_fill_mem_rec_op); + + if (status != IV_SUCCESS) { + ALOGE("Fill memory records failed = 0x%x\n", + s_fill_mem_rec_op.u4_error_code); + return C2_CORRUPTED; + } + } + + /* Allocating Memory for Mem Records */ + { + WORD32 total_size; + iv_mem_rec_t *ps_mem_rec; + total_size = 0; + ps_mem_rec = mMemRecords; + + for (size_t i = 0; i < mNumMemRecords; i++) { + ps_mem_rec->pv_base = ive_aligned_malloc( + ps_mem_rec->u4_mem_alignment, ps_mem_rec->u4_mem_size); + if (ps_mem_rec->pv_base == nullptr) { + ALOGE("Allocation failure for mem record id %zu size %u\n", i, + ps_mem_rec->u4_mem_size); + return C2_CORRUPTED; + + } + total_size += ps_mem_rec->u4_mem_size; + + ps_mem_rec++; + } + } + + /* Codec Instance Creation */ + { + ive_init_ip_t s_init_ip; + ive_init_op_t s_init_op; + + mCodecCtx = (iv_obj_t *)mMemRecords[0].pv_base; + mCodecCtx->u4_size = sizeof(iv_obj_t); + mCodecCtx->pv_fxns = (void *)ive_api_function; + + s_init_ip.u4_size = sizeof(ive_init_ip_t); + s_init_op.u4_size = sizeof(ive_init_op_t); + + s_init_ip.e_cmd = IV_CMD_INIT; + s_init_ip.u4_num_mem_rec = mNumMemRecords; + s_init_ip.ps_mem_rec = mMemRecords; + s_init_ip.u4_max_wd = width; + s_init_ip.u4_max_ht = height; + s_init_ip.u4_max_ref_cnt = DEFAULT_MAX_REF_FRM; + s_init_ip.u4_max_reorder_cnt = DEFAULT_MAX_REORDER_FRM; + s_init_ip.u4_max_level = mAVCEncLevel; + s_init_ip.e_inp_color_fmt = mIvVideoColorFormat; + + if (mReconEnable || mPSNREnable) { + s_init_ip.u4_enable_recon = 1; + } else { + s_init_ip.u4_enable_recon = 0; + } + s_init_ip.e_recon_color_fmt = DEFAULT_RECON_COLOR_FORMAT; + s_init_ip.e_rc_mode = DEFAULT_RC_MODE; + s_init_ip.u4_max_framerate = DEFAULT_MAX_FRAMERATE; + s_init_ip.u4_max_bitrate = DEFAULT_MAX_BITRATE; + s_init_ip.u4_num_bframes = mBframes; + s_init_ip.e_content_type = IV_PROGRESSIVE; + s_init_ip.u4_max_srch_rng_x = DEFAULT_MAX_SRCH_RANGE_X; + s_init_ip.u4_max_srch_rng_y = DEFAULT_MAX_SRCH_RANGE_Y; + s_init_ip.e_slice_mode = mSliceMode; + s_init_ip.u4_slice_param = mSliceParam; + s_init_ip.e_arch = mArch; + s_init_ip.e_soc = DEFAULT_SOC; + + status = ive_api_function(mCodecCtx, &s_init_ip, &s_init_op); + + if (status != IV_SUCCESS) { + ALOGE("Init encoder failed = 0x%x\n", s_init_op.u4_error_code); + return C2_CORRUPTED; + } + } + + /* Get Codec Version */ + logVersion(); + + /* set processor details */ + setNumCores(); + + /* Video control Set Frame dimensions */ + setDimensions(); + + /* Video control Set Frame rates */ + setFrameRate(); + + /* Video control Set IPE Params */ + setIpeParams(); + + /* Video control Set Bitrate */ + setBitRate(); + + /* Video control Set QP */ + setQp(); + + /* Video control Set AIR params */ + setAirParams(); + + /* Video control Set VBV params */ + setVbvParams(); + + /* Video control Set Motion estimation params */ + setMeParams(); + + /* Video control Set GOP params */ + setGopParams(); + + /* Video control Set Deblock params */ + setDeblockParams(); + + /* Video control Set Profile params */ + setProfileParams(); + + /* Video control Set in Encode header mode */ + setEncMode(IVE_ENC_MODE_HEADER); + + ALOGV("init_codec successfull"); + + mSpsPpsHeaderReceived = false; + mStarted = true; + + return C2_OK; +} + +c2_status_t C2SoftAvcEnc::releaseEncoder() { + IV_STATUS_T status = IV_SUCCESS; + iv_retrieve_mem_rec_ip_t s_retrieve_mem_ip; + iv_retrieve_mem_rec_op_t s_retrieve_mem_op; + iv_mem_rec_t *ps_mem_rec; + + if (!mStarted) { + return C2_OK; + } + + s_retrieve_mem_ip.u4_size = sizeof(iv_retrieve_mem_rec_ip_t); + s_retrieve_mem_op.u4_size = sizeof(iv_retrieve_mem_rec_op_t); + s_retrieve_mem_ip.e_cmd = IV_CMD_RETRIEVE_MEMREC; + s_retrieve_mem_ip.ps_mem_rec = mMemRecords; + + status = ive_api_function(mCodecCtx, &s_retrieve_mem_ip, &s_retrieve_mem_op); + + if (status != IV_SUCCESS) { + ALOGE("Unable to retrieve memory records = 0x%x\n", + s_retrieve_mem_op.u4_error_code); + return C2_CORRUPTED; + } + + /* Free memory records */ + ps_mem_rec = mMemRecords; + for (size_t i = 0; i < s_retrieve_mem_op.u4_num_mem_rec_filled; i++) { + if (ps_mem_rec) ive_aligned_free(ps_mem_rec->pv_base); + else { + ALOGE("memory record is null."); + return C2_CORRUPTED; + } + ps_mem_rec++; + } + + if (mMemRecords) free(mMemRecords); + + // clear other pointers into the space being free()d + mCodecCtx = nullptr; + + mStarted = false; + + return C2_OK; +} + +c2_status_t C2SoftAvcEnc::setEncodeArgs( + ive_video_encode_ip_t *ps_encode_ip, + ive_video_encode_op_t *ps_encode_op, + const C2GraphicView *const input, + uint8_t *base, + uint32_t capacity, + uint64_t timestamp) { + iv_raw_buf_t *ps_inp_raw_buf; + + ps_inp_raw_buf = &ps_encode_ip->s_inp_buf; + ps_encode_ip->s_out_buf.pv_buf = base; + ps_encode_ip->s_out_buf.u4_bytes = 0; + ps_encode_ip->s_out_buf.u4_bufsize = capacity; + ps_encode_ip->u4_size = sizeof(ive_video_encode_ip_t); + ps_encode_op->u4_size = sizeof(ive_video_encode_op_t); + + ps_encode_ip->e_cmd = IVE_CMD_VIDEO_ENCODE; + ps_encode_ip->pv_bufs = nullptr; + ps_encode_ip->pv_mb_info = nullptr; + ps_encode_ip->pv_pic_info = nullptr; + ps_encode_ip->u4_mb_info_type = 0; + ps_encode_ip->u4_pic_info_type = 0; + ps_encode_ip->u4_is_last = 0; + ps_encode_ip->u4_timestamp_high = timestamp >> 32; + ps_encode_ip->u4_timestamp_low = timestamp & 0xFFFFFFFF; + ps_encode_op->s_out_buf.pv_buf = nullptr; + + /* Initialize color formats */ + memset(ps_inp_raw_buf, 0, sizeof(iv_raw_buf_t)); + ps_inp_raw_buf->u4_size = sizeof(iv_raw_buf_t); + ps_inp_raw_buf->e_color_fmt = mIvVideoColorFormat; + if (input == nullptr) { + if (mSawInputEOS){ + ps_encode_ip->u4_is_last = 1; + } + return C2_OK; + } + + if (input->width() < mSize->width || + input->height() < mSize->height) { + /* Expect width height to be configured */ + ALOGW("unexpected Capacity Aspect %d(%d) x %d(%d)", input->width(), + mSize->width, input->height(), mSize->height); + return C2_BAD_VALUE; + } + ALOGV("width = %d, height = %d", input->width(), input->height()); + const C2PlanarLayout &layout = input->layout(); + uint8_t *yPlane = const_cast(input->data()[C2PlanarLayout::PLANE_Y]); + uint8_t *uPlane = const_cast(input->data()[C2PlanarLayout::PLANE_U]); + uint8_t *vPlane = const_cast(input->data()[C2PlanarLayout::PLANE_V]); + int32_t yStride = layout.planes[C2PlanarLayout::PLANE_Y].rowInc; + int32_t uStride = layout.planes[C2PlanarLayout::PLANE_U].rowInc; + int32_t vStride = layout.planes[C2PlanarLayout::PLANE_V].rowInc; + + uint32_t width = mSize->width; + uint32_t height = mSize->height; + // width and height are always even (as block size is 16x16) + CHECK_EQ((width & 1u), 0u); + CHECK_EQ((height & 1u), 0u); + size_t yPlaneSize = width * height; + + switch (layout.type) { + case C2PlanarLayout::TYPE_RGB: + [[fallthrough]]; + case C2PlanarLayout::TYPE_RGBA: { + ALOGV("yPlaneSize = %zu", yPlaneSize); + MemoryBlock conversionBuffer = mConversionBuffers.fetch(yPlaneSize * 3 / 2); + mConversionBuffersInUse.emplace(conversionBuffer.data(), conversionBuffer); + yPlane = conversionBuffer.data(); + uPlane = yPlane + yPlaneSize; + vPlane = uPlane + yPlaneSize / 4; + yStride = width; + uStride = vStride = yStride / 2; + ConvertRGBToPlanarYUV(yPlane, yStride, height, conversionBuffer.size(), *input); + break; + } + case C2PlanarLayout::TYPE_YUV: { + if (!IsYUV420(*input)) { + ALOGE("input is not YUV420"); + return C2_BAD_VALUE; + } + + if (layout.planes[layout.PLANE_Y].colInc == 1 + && layout.planes[layout.PLANE_U].colInc == 1 + && layout.planes[layout.PLANE_V].colInc == 1 + && uStride == vStride + && yStride == 2 * vStride) { + // I420 compatible - already set up above + break; + } + + // copy to I420 + yStride = width; + uStride = vStride = yStride / 2; + MemoryBlock conversionBuffer = mConversionBuffers.fetch(yPlaneSize * 3 / 2); + mConversionBuffersInUse.emplace(conversionBuffer.data(), conversionBuffer); + MediaImage2 img = CreateYUV420PlanarMediaImage2(width, height, yStride, height); + status_t err = ImageCopy(conversionBuffer.data(), &img, *input); + if (err != OK) { + ALOGE("Buffer conversion failed: %d", err); + return C2_BAD_VALUE; + } + yPlane = conversionBuffer.data(); + uPlane = yPlane + yPlaneSize; + vPlane = uPlane + yPlaneSize / 4; + break; + + } + + case C2PlanarLayout::TYPE_YUVA: + ALOGE("YUVA plane type is not supported"); + return C2_BAD_VALUE; + + default: + ALOGE("Unrecognized plane type: %d", layout.type); + return C2_BAD_VALUE; + } + + switch (mIvVideoColorFormat) { + case IV_YUV_420P: + { + // input buffer is supposed to be const but Ittiam API wants bare pointer. + ps_inp_raw_buf->apv_bufs[0] = yPlane; + ps_inp_raw_buf->apv_bufs[1] = uPlane; + ps_inp_raw_buf->apv_bufs[2] = vPlane; + + ps_inp_raw_buf->au4_wd[0] = input->width(); + ps_inp_raw_buf->au4_wd[1] = input->width() / 2; + ps_inp_raw_buf->au4_wd[2] = input->width() / 2; + + ps_inp_raw_buf->au4_ht[0] = input->height(); + ps_inp_raw_buf->au4_ht[1] = input->height() / 2; + ps_inp_raw_buf->au4_ht[2] = input->height() / 2; + + ps_inp_raw_buf->au4_strd[0] = yStride; + ps_inp_raw_buf->au4_strd[1] = uStride; + ps_inp_raw_buf->au4_strd[2] = vStride; + break; + } + + case IV_YUV_422ILE: + { + // TODO + // ps_inp_raw_buf->apv_bufs[0] = pu1_buf; + // ps_inp_raw_buf->au4_wd[0] = mWidth * 2; + // ps_inp_raw_buf->au4_ht[0] = mHeight; + // ps_inp_raw_buf->au4_strd[0] = mStride * 2; + break; + } + + case IV_YUV_420SP_UV: + case IV_YUV_420SP_VU: + default: + { + ps_inp_raw_buf->apv_bufs[0] = yPlane; + ps_inp_raw_buf->apv_bufs[1] = uPlane; + + ps_inp_raw_buf->au4_wd[0] = input->width(); + ps_inp_raw_buf->au4_wd[1] = input->width(); + + ps_inp_raw_buf->au4_ht[0] = input->height(); + ps_inp_raw_buf->au4_ht[1] = input->height() / 2; + + ps_inp_raw_buf->au4_strd[0] = yStride; + ps_inp_raw_buf->au4_strd[1] = uStride; + break; + } + } + return C2_OK; +} + +void C2SoftAvcEnc::process( + const std::unique_ptr &work, + const std::shared_ptr &pool) { + // Initialize output work + work->result = C2_OK; + work->workletsProcessed = 1u; + work->worklets.front()->output.flags = work->input.flags; + + IV_STATUS_T status; + WORD32 timeDelay, timeTaken; + uint64_t timestamp = work->input.ordinal.timestamp.peekull(); + + // Initialize encoder if not already initialized + if (mCodecCtx == nullptr) { + if (C2_OK != initEncoder()) { + ALOGE("Failed to initialize encoder"); + mSignalledError = true; + work->result = C2_CORRUPTED; + return; + } + } + if (mSignalledError) { + return; + } + + // while (!mSawOutputEOS && !outQueue.empty()) { + c2_status_t error; + ive_video_encode_ip_t s_encode_ip; + ive_video_encode_op_t s_encode_op; + + if (!mSpsPpsHeaderReceived) { + constexpr uint32_t kHeaderLength = MIN_STREAM_SIZE; + uint8_t header[kHeaderLength]; + error = setEncodeArgs( + &s_encode_ip, &s_encode_op, nullptr, header, kHeaderLength, timestamp); + if (error != C2_OK) { + ALOGE("setEncodeArgs failed: %d", error); + mSignalledError = true; + work->result = C2_CORRUPTED; + return; + } + status = ive_api_function(mCodecCtx, &s_encode_ip, &s_encode_op); + + if (IV_SUCCESS != status) { + ALOGE("Encode header failed = 0x%x\n", + s_encode_op.u4_error_code); + return; + } else { + ALOGV("Bytes Generated in header %d\n", + s_encode_op.s_out_buf.u4_bytes); + } + + mSpsPpsHeaderReceived = true; + + std::unique_ptr csd = + C2StreamCsdInfo::output::AllocUnique(s_encode_op.s_out_buf.u4_bytes, 0u); + if (!csd) { + ALOGE("CSD allocation failed"); + mSignalledError = true; + work->result = C2_NO_MEMORY; + return; + } + memcpy(csd->m.value, header, s_encode_op.s_out_buf.u4_bytes); + work->worklets.front()->output.configUpdate.push_back(std::move(csd)); + + DUMP_TO_FILE( + mOutFile, csd->m.value, csd->flexCount()); + } + + // handle dynamic config parameters + { + IntfImpl::Lock lock = mIntf->lock(); + std::shared_ptr intraRefresh = mIntf->getIntraRefresh_l(); + std::shared_ptr bitrate = mIntf->getBitrate_l(); + std::shared_ptr requestSync = mIntf->getRequestSync_l(); + lock.unlock(); + + if (bitrate != mBitrate) { + mBitrate = bitrate; + setBitRate(); + } + + if (intraRefresh != mIntraRefresh) { + mIntraRefresh = intraRefresh; + setAirParams(); + } + + if (requestSync != mRequestSync) { + // we can handle IDR immediately + if (requestSync->value) { + // unset request + C2StreamRequestSyncFrameTuning::output clearSync(0u, C2_FALSE); + std::vector> failures; + mIntf->config({ &clearSync }, C2_MAY_BLOCK, &failures); + ALOGV("Got sync request"); + setFrameType(IV_IDR_FRAME); + } + mRequestSync = requestSync; + } + } + + if (work->input.flags & C2FrameData::FLAG_END_OF_STREAM) { + mSawInputEOS = true; + } + + /* In normal mode, store inputBufferInfo and this will be returned + when encoder consumes this input */ + // if (!mInputDataIsMeta && (inputBufferInfo != NULL)) { + // for (size_t i = 0; i < MAX_INPUT_BUFFER_HEADERS; i++) { + // if (NULL == mInputBufferInfo[i]) { + // mInputBufferInfo[i] = inputBufferInfo; + // break; + // } + // } + // } + std::shared_ptr view; + std::shared_ptr inputBuffer; + if (!work->input.buffers.empty()) { + inputBuffer = work->input.buffers[0]; + view = std::make_shared( + inputBuffer->data().graphicBlocks().front().map().get()); + if (view->error() != C2_OK) { + ALOGE("graphic view map err = %d", view->error()); + return; + } + } + + std::shared_ptr block; + + do { + C2MemoryUsage usage = { C2MemoryUsage::CPU_READ, C2MemoryUsage::CPU_WRITE }; + // TODO: error handling, proper usage, etc. + c2_status_t err = pool->fetchLinearBlock(mOutBufferSize, usage, &block); + if (err != C2_OK) { + ALOGE("fetch linear block err = %d", err); + work->result = err; + return; + } + C2WriteView wView = block->map().get(); + if (wView.error() != C2_OK) { + ALOGE("write view map err = %d", wView.error()); + work->result = wView.error(); + return; + } + + error = setEncodeArgs( + &s_encode_ip, &s_encode_op, view.get(), wView.base(), wView.capacity(), timestamp); + if (error != C2_OK) { + ALOGE("setEncodeArgs failed : %d", error); + mSignalledError = true; + work->result = error; + return; + } + + // DUMP_TO_FILE( + // mInFile, s_encode_ip.s_inp_buf.apv_bufs[0], + // (mHeight * mStride * 3 / 2)); + + GETTIME(&mTimeStart, nullptr); + /* Compute time elapsed between end of previous decode() + * to start of current decode() */ + TIME_DIFF(mTimeEnd, mTimeStart, timeDelay); + status = ive_api_function(mCodecCtx, &s_encode_ip, &s_encode_op); + + if (IV_SUCCESS != status) { + if ((s_encode_op.u4_error_code & 0xFF) == IH264E_BITSTREAM_BUFFER_OVERFLOW) { + // TODO: use IVE_CMD_CTL_GETBUFINFO for proper max input size? + mOutBufferSize *= 2; + continue; + } + ALOGE("Encode Frame failed = 0x%x\n", + s_encode_op.u4_error_code); + mSignalledError = true; + work->result = C2_CORRUPTED; + return; + } + } while (IV_SUCCESS != status); + + // Hold input buffer reference + if (inputBuffer) { + mBuffers[s_encode_ip.s_inp_buf.apv_bufs[0]] = inputBuffer; + } + + GETTIME(&mTimeEnd, nullptr); + /* Compute time taken for decode() */ + TIME_DIFF(mTimeStart, mTimeEnd, timeTaken); + + ALOGV("timeTaken=%6d delay=%6d numBytes=%6d", timeTaken, timeDelay, + s_encode_op.s_out_buf.u4_bytes); + + void *freed = s_encode_op.s_inp_buf.apv_bufs[0]; + /* If encoder frees up an input buffer, mark it as free */ + if (freed != nullptr) { + if (mBuffers.count(freed) == 0u) { + ALOGD("buffer not tracked"); + } else { + // Release input buffer reference + mBuffers.erase(freed); + mConversionBuffersInUse.erase(freed); + } + } + + work->worklets.front()->output.flags = work->input.flags; + work->worklets.front()->output.ordinal = work->input.ordinal; + work->worklets.front()->output.ordinal.timestamp = + ((uint64_t)s_encode_op.u4_timestamp_high << 32) | s_encode_op.u4_timestamp_low; + work->worklets.front()->output.buffers.clear(); + + if (s_encode_op.s_out_buf.u4_bytes) { + std::shared_ptr buffer = + createLinearBuffer(block, 0, s_encode_op.s_out_buf.u4_bytes); + if (IV_IDR_FRAME == s_encode_op.u4_encoded_frame_type) { + ALOGV("IDR frame produced"); + buffer->setInfo(std::make_shared( + 0u /* stream id */, C2PictureTypeKeyFrame)); + } + work->worklets.front()->output.buffers.push_back(buffer); + } + + if (s_encode_op.u4_is_last) { + // outputBufferHeader->nFlags |= OMX_BUFFERFLAG_EOS; + mSawOutputEOS = true; + } else { + // outputBufferHeader->nFlags &= ~OMX_BUFFERFLAG_EOS; + } +} + +c2_status_t C2SoftAvcEnc::drain( + uint32_t drainMode, + const std::shared_ptr &pool) { + // TODO: use IVE_CMD_CTL_FLUSH? + (void)drainMode; + (void)pool; + return C2_OK; +} + + +class C2SoftAvcEncFactory : public C2ComponentFactory { +public: + C2SoftAvcEncFactory() : mHelper(std::static_pointer_cast( + GetCodec2PlatformComponentStore()->getParamReflector())) { + } + + virtual c2_status_t createComponent( + c2_node_id_t id, + std::shared_ptr* const component, + std::function deleter) override { + *component = std::shared_ptr( + new C2SoftAvcEnc(COMPONENT_NAME, + id, + std::make_shared(mHelper)), + deleter); + return C2_OK; + } + + virtual c2_status_t createInterface( + c2_node_id_t id, + std::shared_ptr* const interface, + std::function deleter) override { + *interface = std::shared_ptr( + new SimpleInterface( + COMPONENT_NAME, id, std::make_shared(mHelper)), + deleter); + return C2_OK; + } + + virtual ~C2SoftAvcEncFactory() override = default; + +private: + std::shared_ptr mHelper; +}; + +} // namespace android + +extern "C" ::C2ComponentFactory* CreateCodec2Factory() { + ALOGV("in %s", __func__); + return new ::android::C2SoftAvcEncFactory(); +} + +extern "C" void DestroyCodec2Factory(::C2ComponentFactory* factory) { + ALOGV("in %s", __func__); + delete factory; +} diff --git a/media/codec2/components/avc/C2SoftAvcEnc.h b/media/codec2/components/avc/C2SoftAvcEnc.h new file mode 100644 index 0000000000000000000000000000000000000000..aa3ca615c8c005383654ad0d52ffa1171d6bc969 --- /dev/null +++ b/media/codec2/components/avc/C2SoftAvcEnc.h @@ -0,0 +1,296 @@ +/* + * Copyright 2018 The Android Open Source Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#ifndef ANDROID_C2_SOFT_AVC_ENC_H__ +#define ANDROID_C2_SOFT_AVC_ENC_H__ + +#include + +#include + +#include + +#include "ih264_typedefs.h" +#include "iv2.h" +#include "ive2.h" + +namespace android { + +#define CODEC_MAX_CORES 4 +#define LEN_STATUS_BUFFER (10 * 1024) +#define MAX_VBV_BUFF_SIZE (120 * 16384) +#define MAX_NUM_IO_BUFS 3 + +#define DEFAULT_MAX_REF_FRM 2 +#define DEFAULT_MAX_REORDER_FRM 0 +#define DEFAULT_QP_MIN 10 +#define DEFAULT_QP_MAX 40 +#define DEFAULT_MAX_BITRATE 20000000 +#define DEFAULT_MAX_SRCH_RANGE_X 256 +#define DEFAULT_MAX_SRCH_RANGE_Y 256 +#define DEFAULT_MAX_FRAMERATE 120000 +#define DEFAULT_NUM_CORES 1 +#define DEFAULT_NUM_CORES_PRE_ENC 0 +#define DEFAULT_FPS 30 +#define DEFAULT_ENC_SPEED IVE_NORMAL + +#define DEFAULT_MEM_REC_CNT 0 +#define DEFAULT_RECON_ENABLE 0 +#define DEFAULT_CHKSUM_ENABLE 0 +#define DEFAULT_START_FRM 0 +#define DEFAULT_NUM_FRMS 0xFFFFFFFF +#define DEFAULT_INP_COLOR_FORMAT IV_YUV_420SP_VU +#define DEFAULT_RECON_COLOR_FORMAT IV_YUV_420P +#define DEFAULT_LOOPBACK 0 +#define DEFAULT_SRC_FRAME_RATE 30 +#define DEFAULT_TGT_FRAME_RATE 30 +#define DEFAULT_MAX_WD 1920 +#define DEFAULT_MAX_HT 1920 +#define DEFAULT_MAX_LEVEL 41 +#define DEFAULT_STRIDE 0 +#define DEFAULT_WD 1280 +#define DEFAULT_HT 720 +#define DEFAULT_PSNR_ENABLE 0 +#define DEFAULT_ME_SPEED 100 +#define DEFAULT_ENABLE_FAST_SAD 0 +#define DEFAULT_ENABLE_ALT_REF 0 +#define DEFAULT_RC_MODE IVE_RC_STORAGE +#define DEFAULT_BITRATE 6000000 +#define DEFAULT_I_QP 22 +#define DEFAULT_I_QP_MAX DEFAULT_QP_MAX +#define DEFAULT_I_QP_MIN DEFAULT_QP_MIN +#define DEFAULT_P_QP 28 +#define DEFAULT_P_QP_MAX DEFAULT_QP_MAX +#define DEFAULT_P_QP_MIN DEFAULT_QP_MIN +#define DEFAULT_B_QP 22 +#define DEFAULT_B_QP_MAX DEFAULT_QP_MAX +#define DEFAULT_B_QP_MIN DEFAULT_QP_MIN +#define DEFAULT_AIR IVE_AIR_MODE_NONE +#define DEFAULT_AIR_REFRESH_PERIOD 30 +#define DEFAULT_SRCH_RNG_X 64 +#define DEFAULT_SRCH_RNG_Y 48 +#define DEFAULT_I_INTERVAL 30 +#define DEFAULT_IDR_INTERVAL 1000 +#define DEFAULT_B_FRAMES 0 +#define DEFAULT_DISABLE_DEBLK_LEVEL 0 +#define DEFAULT_HPEL 1 +#define DEFAULT_QPEL 1 +#define DEFAULT_I4 1 +#define DEFAULT_EPROFILE IV_PROFILE_BASE +#define DEFAULT_ENTROPY_MODE 0 +#define DEFAULT_SLICE_MODE IVE_SLICE_MODE_NONE +#define DEFAULT_SLICE_PARAM 256 +#define DEFAULT_ARCH ARCH_ARM_A9Q +#define DEFAULT_SOC SOC_GENERIC +#define DEFAULT_INTRA4x4 0 +#define STRLENGTH 500 +#define DEFAULT_CONSTRAINED_INTRA 0 + +#define MIN(a, b) ((a) < (b))? (a) : (b) +#define MAX(a, b) ((a) > (b))? (a) : (b) +#define ALIGN16(x) ((((x) + 15) >> 4) << 4) +#define ALIGN128(x) ((((x) + 127) >> 7) << 7) +#define ALIGN4096(x) ((((x) + 4095) >> 12) << 12) + +/** Used to remove warnings about unused parameters */ +#define UNUSED(x) ((void)(x)) + +/** Get time */ +#define GETTIME(a, b) gettimeofday(a, b); + +/** Compute difference between start and end */ +#define TIME_DIFF(start, end, diff) \ + diff = (((end).tv_sec - (start).tv_sec) * 1000000) + \ + ((end).tv_usec - (start).tv_usec); + +#define ive_aligned_malloc(alignment, size) memalign(alignment, size) +#define ive_aligned_free(buf) free(buf) + +struct C2SoftAvcEnc : public SimpleC2Component { + class IntfImpl; + + C2SoftAvcEnc(const char *name, c2_node_id_t id, const std::shared_ptr &intfImpl); + + // From SimpleC2Component + c2_status_t onInit() override; + c2_status_t onStop() override; + void onReset() override; + void onRelease() override; + c2_status_t onFlush_sm() override; + void process( + const std::unique_ptr &work, + const std::shared_ptr &pool) override; + c2_status_t drain( + uint32_t drainMode, + const std::shared_ptr &pool) override; + +protected: + virtual ~C2SoftAvcEnc(); + +private: + // OMX input buffer's timestamp and flags + typedef struct { + int64_t mTimeUs; + int32_t mFlags; + } InputBufferInfo; + + std::shared_ptr mIntf; + + int32_t mStride; + + struct timeval mTimeStart; // Time at the start of decode() + struct timeval mTimeEnd; // Time at the end of decode() + +#ifdef FILE_DUMP_ENABLE + char mInFile[200]; + char mOutFile[200]; +#endif /* FILE_DUMP_ENABLE */ + + IV_COLOR_FORMAT_T mIvVideoColorFormat; + + IV_PROFILE_T mAVCEncProfile __unused; + WORD32 mAVCEncLevel; + bool mStarted; + bool mSpsPpsHeaderReceived; + + bool mSawInputEOS; + bool mSawOutputEOS; + bool mSignalledError; + bool mIntra4x4; + bool mEnableFastSad; + bool mEnableAltRef; + bool mReconEnable; + bool mPSNREnable; + bool mEntropyMode; + bool mConstrainedIntraFlag; + IVE_SPEED_CONFIG mEncSpeed; + + iv_obj_t *mCodecCtx; // Codec context + iv_mem_rec_t *mMemRecords; // Memory records requested by the codec + size_t mNumMemRecords; // Number of memory records requested by codec + size_t mNumCores; // Number of cores used by the codec + + // configurations used by component in process + // (TODO: keep this in intf but make them internal only) + std::shared_ptr mSize; + std::shared_ptr mIntraRefresh; + std::shared_ptr mFrameRate; + std::shared_ptr mBitrate; + std::shared_ptr mRequestSync; + + uint32_t mOutBufferSize; + UWORD32 mHeaderGenerated; + UWORD32 mBframes; + IV_ARCH_T mArch; + IVE_SLICE_MODE_T mSliceMode; + UWORD32 mSliceParam; + bool mHalfPelEnable; + UWORD32 mIInterval; + UWORD32 mIDRInterval; + UWORD32 mDisableDeblkLevel; + std::map> mBuffers; + MemoryBlockPool mConversionBuffers; + std::map mConversionBuffersInUse; + + void initEncParams(); + c2_status_t initEncoder(); + c2_status_t releaseEncoder(); + + c2_status_t setFrameType(IV_PICTURE_CODING_TYPE_T e_frame_type); + c2_status_t setQp(); + c2_status_t setEncMode(IVE_ENC_MODE_T e_enc_mode); + c2_status_t setDimensions(); + c2_status_t setNumCores(); + c2_status_t setFrameRate(); + c2_status_t setIpeParams(); + c2_status_t setBitRate(); + c2_status_t setAirParams(); + c2_status_t setMeParams(); + c2_status_t setGopParams(); + c2_status_t setProfileParams(); + c2_status_t setDeblockParams(); + c2_status_t setVbvParams(); + void logVersion(); + c2_status_t setEncodeArgs( + ive_video_encode_ip_t *ps_encode_ip, + ive_video_encode_op_t *ps_encode_op, + const C2GraphicView *const input, + uint8_t *base, + uint32_t capacity, + uint64_t timestamp); + + C2_DO_NOT_COPY(C2SoftAvcEnc); +}; + +#ifdef FILE_DUMP_ENABLE + +#define INPUT_DUMP_PATH "/sdcard/media/avce_input" +#define INPUT_DUMP_EXT "yuv" +#define OUTPUT_DUMP_PATH "/sdcard/media/avce_output" +#define OUTPUT_DUMP_EXT "h264" + +#define GENERATE_FILE_NAMES() { \ + GETTIME(&mTimeStart, NULL); \ + strcpy(mInFile, ""); \ + sprintf(mInFile, "%s_%ld.%ld.%s", INPUT_DUMP_PATH, \ + mTimeStart.tv_sec, mTimeStart.tv_usec, \ + INPUT_DUMP_EXT); \ + strcpy(mOutFile, ""); \ + sprintf(mOutFile, "%s_%ld.%ld.%s", OUTPUT_DUMP_PATH,\ + mTimeStart.tv_sec, mTimeStart.tv_usec, \ + OUTPUT_DUMP_EXT); \ +} + +#define CREATE_DUMP_FILE(m_filename) { \ + FILE *fp = fopen(m_filename, "wb"); \ + if (fp != NULL) { \ + ALOGD("Opened file %s", m_filename); \ + fclose(fp); \ + } else { \ + ALOGD("Could not open file %s", m_filename); \ + } \ +} +#define DUMP_TO_FILE(m_filename, m_buf, m_size) \ +{ \ + FILE *fp = fopen(m_filename, "ab"); \ + if (fp != NULL && m_buf != NULL) { \ + int i; \ + i = fwrite(m_buf, 1, m_size, fp); \ + ALOGD("fwrite ret %d to write %d", i, m_size); \ + if (i != (int)m_size) { \ + ALOGD("Error in fwrite, returned %d", i); \ + perror("Error in write to file"); \ + } \ + fclose(fp); \ + } else { \ + ALOGD("Could not write to file %s", m_filename);\ + if (fp != NULL) \ + fclose(fp); \ + } \ +} +#else /* FILE_DUMP_ENABLE */ +#define INPUT_DUMP_PATH +#define INPUT_DUMP_EXT +#define OUTPUT_DUMP_PATH +#define OUTPUT_DUMP_EXT +#define GENERATE_FILE_NAMES() +#define CREATE_DUMP_FILE(m_filename) +#define DUMP_TO_FILE(m_filename, m_buf, m_size) +#endif /* FILE_DUMP_ENABLE */ + +} // namespace android + +#endif // ANDROID_C2_SOFT_AVC_ENC_H__ diff --git a/media/codec2/components/base/Android.bp b/media/codec2/components/base/Android.bp new file mode 100644 index 0000000000000000000000000000000000000000..ad456e20ae2ff034e50914b58774a57ce226c288 --- /dev/null +++ b/media/codec2/components/base/Android.bp @@ -0,0 +1,141 @@ +// DO NOT DEPEND ON THIS DIRECTLY +// use libstagefright_soft_c2-defaults instead +cc_library_shared { + name: "libstagefright_soft_c2common", + defaults: ["libstagefright_codec2-impl-defaults"], + vendor_available: true, + + srcs: [ + "SimpleC2Component.cpp", + "SimpleC2Interface.cpp", + ], + + export_include_dirs: [ + "include", + ], + + export_shared_lib_headers: [ + "libstagefright_ccodec_utils", + ], + + shared_libs: [ + "libcutils", // for properties + "liblog", // for ALOG + "libstagefright_ccodec_utils", // for ImageCopy + "libstagefright_foundation", // for Mutexed + ], + + sanitize: { + misc_undefined: [ + "unsigned-integer-overflow", + "signed-integer-overflow", + ], + cfi: true, + diag: { + cfi: true, + }, + }, + + ldflags: ["-Wl,-Bsymbolic"], +} + +// public dependency for software codec implementation +// to be used by code under media/codecs/* only as its stability is not guaranteed +cc_defaults { + name: "libstagefright_soft_c2-defaults", + defaults: ["libstagefright_codec2-impl-defaults"], + vendor_available: true, + + export_shared_lib_headers: [ + "libstagefright_ccodec_utils", + ], + + shared_libs: [ + "libcutils", // for properties + "liblog", // for ALOG + "libstagefright_foundation", // for ColorUtils and MIME + "libstagefright_ccodec_utils", // for ImageCopy + "libstagefright_soft_c2common", + ], + + cflags: [ + "-Wall", + "-Werror", + ], + + ldflags: ["-Wl,-Bsymbolic"], +} + +// public dependency for software codec implementation +// to be used by code under media/codecs/* only +cc_defaults { + name: "libstagefright_soft_c2_sanitize_all-defaults", + + sanitize: { + misc_undefined: [ + "unsigned-integer-overflow", + "signed-integer-overflow", + ], + cfi: true, + diag: { + cfi: true, + }, + }, +} + +// public dependency for software codec implementation +// to be used by code under media/codecs/* only +cc_defaults { + name: "libstagefright_soft_c2_sanitize_signed-defaults", + + sanitize: { + misc_undefined: [ + "signed-integer-overflow", + ], + cfi: true, + diag: { + cfi: true, + }, + }, +} + +// TEMP: used by cheets2 project - remove when no longer used +cc_library_shared { + name: "libstagefright_simple_c2component", + vendor_available: true, + + srcs: [ + "SimpleC2Interface.cpp", + ], + + local_include_dirs: [ + "include", + ], + + export_include_dirs: [ + "include", + ], + + shared_libs: [ + "libcutils", + "liblog", + "libstagefright_codec2", + "libstagefright_codec2_vndk", + "libstagefright_foundation", + "libutils", + ], + + sanitize: { + misc_undefined: [ + "unsigned-integer-overflow", + "signed-integer-overflow", + ], + cfi: true, + diag: { + cfi: true, + }, + }, + + ldflags: ["-Wl,-Bsymbolic"], +} + diff --git a/media/codec2/components/base/SimpleC2Component.cpp b/media/codec2/components/base/SimpleC2Component.cpp new file mode 100644 index 0000000000000000000000000000000000000000..7990ee5cd4f51d6bf5c7d9b3518a6e9e74fd4c47 --- /dev/null +++ b/media/codec2/components/base/SimpleC2Component.cpp @@ -0,0 +1,562 @@ +/* + * Copyright (C) 2017 The Android Open Source Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +//#define LOG_NDEBUG 0 +#define LOG_TAG "SimpleC2Component" +#include + +#include +#include + +#include + +#include +#include +#include +#include + +namespace android { + +std::unique_ptr SimpleC2Component::WorkQueue::pop_front() { + std::unique_ptr work = std::move(mQueue.front().work); + mQueue.pop_front(); + return work; +} + +void SimpleC2Component::WorkQueue::push_back(std::unique_ptr work) { + mQueue.push_back({ std::move(work), NO_DRAIN }); +} + +bool SimpleC2Component::WorkQueue::empty() const { + return mQueue.empty(); +} + +void SimpleC2Component::WorkQueue::clear() { + mQueue.clear(); +} + +uint32_t SimpleC2Component::WorkQueue::drainMode() const { + return mQueue.front().drainMode; +} + +void SimpleC2Component::WorkQueue::markDrain(uint32_t drainMode) { + mQueue.push_back({ nullptr, drainMode }); +} + +//////////////////////////////////////////////////////////////////////////////// + +SimpleC2Component::WorkHandler::WorkHandler() : mRunning(false) {} + +void SimpleC2Component::WorkHandler::setComponent( + const std::shared_ptr &thiz) { + mThiz = thiz; +} + +static void Reply(const sp &msg, int32_t *err = nullptr) { + sp replyId; + CHECK(msg->senderAwaitsResponse(&replyId)); + sp reply = new AMessage; + if (err) { + reply->setInt32("err", *err); + } + reply->postReply(replyId); +} + +void SimpleC2Component::WorkHandler::onMessageReceived(const sp &msg) { + std::shared_ptr thiz = mThiz.lock(); + if (!thiz) { + ALOGD("component not yet set; msg = %s", msg->debugString().c_str()); + sp replyId; + if (msg->senderAwaitsResponse(&replyId)) { + sp reply = new AMessage; + reply->setInt32("err", C2_CORRUPTED); + reply->postReply(replyId); + } + return; + } + + switch (msg->what()) { + case kWhatProcess: { + if (mRunning) { + if (thiz->processQueue()) { + (new AMessage(kWhatProcess, this))->post(); + } + } else { + ALOGV("Ignore process message as we're not running"); + } + break; + } + case kWhatInit: { + int32_t err = thiz->onInit(); + Reply(msg, &err); + [[fallthrough]]; + } + case kWhatStart: { + mRunning = true; + break; + } + case kWhatStop: { + int32_t err = thiz->onStop(); + Reply(msg, &err); + break; + } + case kWhatReset: { + thiz->onReset(); + mRunning = false; + Reply(msg); + break; + } + case kWhatRelease: { + thiz->onRelease(); + mRunning = false; + Reply(msg); + break; + } + default: { + ALOGD("Unrecognized msg: %d", msg->what()); + break; + } + } +} + +//////////////////////////////////////////////////////////////////////////////// + +namespace { + +struct DummyReadView : public C2ReadView { + DummyReadView() : C2ReadView(C2_NO_INIT) {} +}; + +} // namespace + +SimpleC2Component::SimpleC2Component( + const std::shared_ptr &intf) + : mDummyReadView(DummyReadView()), + mIntf(intf), + mLooper(new ALooper), + mHandler(new WorkHandler) { + mLooper->setName(intf->getName().c_str()); + (void)mLooper->registerHandler(mHandler); + mLooper->start(false, false, ANDROID_PRIORITY_VIDEO); +} + +SimpleC2Component::~SimpleC2Component() { + mLooper->unregisterHandler(mHandler->id()); + (void)mLooper->stop(); +} + +c2_status_t SimpleC2Component::setListener_vb( + const std::shared_ptr &listener, c2_blocking_t mayBlock) { + mHandler->setComponent(shared_from_this()); + + Mutexed::Locked state(mExecState); + if (state->mState == RUNNING) { + if (listener) { + return C2_BAD_STATE; + } else if (!mayBlock) { + return C2_BLOCKING; + } + } + state->mListener = listener; + // TODO: wait for listener change to have taken place before returning + // (e.g. if there is an ongoing listener callback) + return C2_OK; +} + +c2_status_t SimpleC2Component::queue_nb(std::list> * const items) { + { + Mutexed::Locked state(mExecState); + if (state->mState != RUNNING) { + return C2_BAD_STATE; + } + } + bool queueWasEmpty = false; + { + Mutexed::Locked queue(mWorkQueue); + queueWasEmpty = queue->empty(); + while (!items->empty()) { + queue->push_back(std::move(items->front())); + items->pop_front(); + } + } + if (queueWasEmpty) { + (new AMessage(WorkHandler::kWhatProcess, mHandler))->post(); + } + return C2_OK; +} + +c2_status_t SimpleC2Component::announce_nb(const std::vector &items) { + (void)items; + return C2_OMITTED; +} + +c2_status_t SimpleC2Component::flush_sm( + flush_mode_t flushMode, std::list>* const flushedWork) { + (void)flushMode; + { + Mutexed::Locked state(mExecState); + if (state->mState != RUNNING) { + return C2_BAD_STATE; + } + } + { + Mutexed::Locked queue(mWorkQueue); + queue->incGeneration(); + // TODO: queue->splicedBy(flushedWork, flushedWork->end()); + while (!queue->empty()) { + std::unique_ptr work = queue->pop_front(); + if (work) { + flushedWork->push_back(std::move(work)); + } + } + } + { + Mutexed::Locked pending(mPendingWork); + while (!pending->empty()) { + flushedWork->push_back(std::move(pending->begin()->second)); + pending->erase(pending->begin()); + } + } + + return C2_OK; +} + +c2_status_t SimpleC2Component::drain_nb(drain_mode_t drainMode) { + if (drainMode == DRAIN_CHAIN) { + return C2_OMITTED; + } + { + Mutexed::Locked state(mExecState); + if (state->mState != RUNNING) { + return C2_BAD_STATE; + } + } + bool queueWasEmpty = false; + { + Mutexed::Locked queue(mWorkQueue); + queueWasEmpty = queue->empty(); + queue->markDrain(drainMode); + } + if (queueWasEmpty) { + (new AMessage(WorkHandler::kWhatProcess, mHandler))->post(); + } + + return C2_OK; +} + +c2_status_t SimpleC2Component::start() { + Mutexed::Locked state(mExecState); + if (state->mState == RUNNING) { + return C2_BAD_STATE; + } + bool needsInit = (state->mState == UNINITIALIZED); + state.unlock(); + if (needsInit) { + sp reply; + (new AMessage(WorkHandler::kWhatInit, mHandler))->postAndAwaitResponse(&reply); + int32_t err; + CHECK(reply->findInt32("err", &err)); + if (err != C2_OK) { + return (c2_status_t)err; + } + } else { + (new AMessage(WorkHandler::kWhatStart, mHandler))->post(); + } + state.lock(); + state->mState = RUNNING; + return C2_OK; +} + +c2_status_t SimpleC2Component::stop() { + ALOGV("stop"); + { + Mutexed::Locked state(mExecState); + if (state->mState != RUNNING) { + return C2_BAD_STATE; + } + state->mState = STOPPED; + } + { + Mutexed::Locked queue(mWorkQueue); + queue->clear(); + } + { + Mutexed::Locked pending(mPendingWork); + pending->clear(); + } + sp reply; + (new AMessage(WorkHandler::kWhatStop, mHandler))->postAndAwaitResponse(&reply); + int32_t err; + CHECK(reply->findInt32("err", &err)); + if (err != C2_OK) { + return (c2_status_t)err; + } + return C2_OK; +} + +c2_status_t SimpleC2Component::reset() { + ALOGV("reset"); + { + Mutexed::Locked state(mExecState); + state->mState = UNINITIALIZED; + } + { + Mutexed::Locked queue(mWorkQueue); + queue->clear(); + } + { + Mutexed::Locked pending(mPendingWork); + pending->clear(); + } + sp reply; + (new AMessage(WorkHandler::kWhatReset, mHandler))->postAndAwaitResponse(&reply); + return C2_OK; +} + +c2_status_t SimpleC2Component::release() { + ALOGV("release"); + sp reply; + (new AMessage(WorkHandler::kWhatRelease, mHandler))->postAndAwaitResponse(&reply); + return C2_OK; +} + +std::shared_ptr SimpleC2Component::intf() { + return mIntf; +} + +namespace { + +std::list> vec(std::unique_ptr &work) { + std::list> ret; + ret.push_back(std::move(work)); + return ret; +} + +} // namespace + +void SimpleC2Component::finish( + uint64_t frameIndex, std::function &)> fillWork) { + std::unique_ptr work; + { + Mutexed::Locked pending(mPendingWork); + if (pending->count(frameIndex) == 0) { + ALOGW("unknown frame index: %" PRIu64, frameIndex); + return; + } + work = std::move(pending->at(frameIndex)); + pending->erase(frameIndex); + } + if (work) { + fillWork(work); + std::shared_ptr listener = mExecState.lock()->mListener; + listener->onWorkDone_nb(shared_from_this(), vec(work)); + ALOGV("returning pending work"); + } +} + +void SimpleC2Component::cloneAndSend( + uint64_t frameIndex, + const std::unique_ptr ¤tWork, + std::function &)> fillWork) { + std::unique_ptr work(new C2Work); + if (currentWork->input.ordinal.frameIndex == frameIndex) { + work->input.flags = currentWork->input.flags; + work->input.ordinal = currentWork->input.ordinal; + } else { + Mutexed::Locked pending(mPendingWork); + if (pending->count(frameIndex) == 0) { + ALOGW("unknown frame index: %" PRIu64, frameIndex); + return; + } + work->input.flags = pending->at(frameIndex)->input.flags; + work->input.ordinal = pending->at(frameIndex)->input.ordinal; + } + work->worklets.emplace_back(new C2Worklet); + if (work) { + fillWork(work); + std::shared_ptr listener = mExecState.lock()->mListener; + listener->onWorkDone_nb(shared_from_this(), vec(work)); + ALOGV("cloned and sending work"); + } +} + +bool SimpleC2Component::processQueue() { + std::unique_ptr work; + uint64_t generation; + int32_t drainMode; + bool isFlushPending = false; + bool hasQueuedWork = false; + { + Mutexed::Locked queue(mWorkQueue); + if (queue->empty()) { + return false; + } + + generation = queue->generation(); + drainMode = queue->drainMode(); + isFlushPending = queue->popPendingFlush(); + work = queue->pop_front(); + hasQueuedWork = !queue->empty(); + } + if (isFlushPending) { + ALOGV("processing pending flush"); + c2_status_t err = onFlush_sm(); + if (err != C2_OK) { + ALOGD("flush err: %d", err); + // TODO: error + } + } + + if (!mOutputBlockPool) { + c2_status_t err = [this] { + // TODO: don't use query_vb + C2StreamFormatConfig::output outputFormat(0u); + std::vector> params; + c2_status_t err = intf()->query_vb( + { &outputFormat }, + { C2PortBlockPoolsTuning::output::PARAM_TYPE }, + C2_DONT_BLOCK, + ¶ms); + if (err != C2_OK && err != C2_BAD_INDEX) { + ALOGD("query err = %d", err); + return err; + } + C2BlockPool::local_id_t poolId = + outputFormat.value == C2FormatVideo + ? C2BlockPool::BASIC_GRAPHIC + : C2BlockPool::BASIC_LINEAR; + if (params.size()) { + C2PortBlockPoolsTuning::output *outputPools = + C2PortBlockPoolsTuning::output::From(params[0].get()); + if (outputPools && outputPools->flexCount() >= 1) { + poolId = outputPools->m.values[0]; + } + } + + err = GetCodec2BlockPool(poolId, shared_from_this(), &mOutputBlockPool); + ALOGD("Using output block pool with poolID %llu => got %llu - %d", + (unsigned long long)poolId, + (unsigned long long)( + mOutputBlockPool ? mOutputBlockPool->getLocalId() : 111000111), + err); + return err; + }(); + if (err != C2_OK) { + Mutexed::Locked state(mExecState); + std::shared_ptr listener = state->mListener; + state.unlock(); + listener->onError_nb(shared_from_this(), err); + return hasQueuedWork; + } + } + + if (!work) { + c2_status_t err = drain(drainMode, mOutputBlockPool); + if (err != C2_OK) { + Mutexed::Locked state(mExecState); + std::shared_ptr listener = state->mListener; + state.unlock(); + listener->onError_nb(shared_from_this(), err); + } + return hasQueuedWork; + } + + { + std::vector updates; + for (const std::unique_ptr ¶m: work->input.configUpdate) { + if (param) { + updates.emplace_back(param.get()); + } + } + if (!updates.empty()) { + std::vector> failures; + c2_status_t err = intf()->config_vb(updates, C2_MAY_BLOCK, &failures); + ALOGD("applied %zu configUpdates => %s (%d)", updates.size(), asString(err), err); + } + } + + ALOGV("start processing frame #%" PRIu64, work->input.ordinal.frameIndex.peeku()); + process(work, mOutputBlockPool); + ALOGV("processed frame #%" PRIu64, work->input.ordinal.frameIndex.peeku()); + { + Mutexed::Locked queue(mWorkQueue); + if (queue->generation() != generation) { + ALOGD("work form old generation: was %" PRIu64 " now %" PRIu64, + queue->generation(), generation); + work->result = C2_NOT_FOUND; + queue.unlock(); + { + Mutexed::Locked state(mExecState); + std::shared_ptr listener = state->mListener; + state.unlock(); + listener->onWorkDone_nb(shared_from_this(), vec(work)); + } + queue.lock(); + return hasQueuedWork; + } + } + if (work->workletsProcessed != 0u) { + Mutexed::Locked state(mExecState); + ALOGV("returning this work"); + std::shared_ptr listener = state->mListener; + state.unlock(); + listener->onWorkDone_nb(shared_from_this(), vec(work)); + } else { + ALOGV("queue pending work"); + work->input.buffers.clear(); + std::unique_ptr unexpected; + { + Mutexed::Locked pending(mPendingWork); + uint64_t frameIndex = work->input.ordinal.frameIndex.peeku(); + if (pending->count(frameIndex) != 0) { + unexpected = std::move(pending->at(frameIndex)); + pending->erase(frameIndex); + } + (void)pending->insert({ frameIndex, std::move(work) }); + } + if (unexpected) { + ALOGD("unexpected pending work"); + unexpected->result = C2_CORRUPTED; + Mutexed::Locked state(mExecState); + std::shared_ptr listener = state->mListener; + state.unlock(); + listener->onWorkDone_nb(shared_from_this(), vec(unexpected)); + } + } + return hasQueuedWork; +} + +std::shared_ptr SimpleC2Component::createLinearBuffer( + const std::shared_ptr &block) { + return createLinearBuffer(block, block->offset(), block->size()); +} + +std::shared_ptr SimpleC2Component::createLinearBuffer( + const std::shared_ptr &block, size_t offset, size_t size) { + return C2Buffer::CreateLinearBuffer(block->share(offset, size, ::C2Fence())); +} + +std::shared_ptr SimpleC2Component::createGraphicBuffer( + const std::shared_ptr &block) { + return createGraphicBuffer(block, C2Rect(block->width(), block->height())); +} + +std::shared_ptr SimpleC2Component::createGraphicBuffer( + const std::shared_ptr &block, const C2Rect &crop) { + return C2Buffer::CreateGraphicBuffer(block->share(crop, ::C2Fence())); +} + +} // namespace android diff --git a/media/codec2/components/base/SimpleC2Interface.cpp b/media/codec2/components/base/SimpleC2Interface.cpp new file mode 100644 index 0000000000000000000000000000000000000000..c849a4e3cc459680b0676efca6442980b4f4842d --- /dev/null +++ b/media/codec2/components/base/SimpleC2Interface.cpp @@ -0,0 +1,315 @@ +/* + * Copyright (C) 2017 The Android Open Source Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +//#define LOG_NDEBUG 0 +#define LOG_TAG "SimpleC2Interface" +#include + +// use MediaDefs here vs. MediaCodecConstants as this is not MediaCodec specific/dependent +#include + +#include + +namespace android { + +/* SimpleInterface */ + +SimpleInterface::BaseParams::BaseParams( + const std::shared_ptr &reflector, + C2String name, + C2Component::kind_t kind, + C2Component::domain_t domain, + C2String mediaType, + std::vector aliases) + : C2InterfaceHelper(reflector) { + setDerivedInstance(this); + + addParameter( + DefineParam(mName, C2_PARAMKEY_COMPONENT_NAME) + .withConstValue(AllocSharedString(name.c_str())) + .build()); + + if (aliases.size()) { + C2String joined; + for (const C2String &alias : aliases) { + if (joined.length()) { + joined += ","; + } + joined += alias; + } + addParameter( + DefineParam(mAliases, C2_PARAMKEY_COMPONENT_ALIASES) + .withConstValue(AllocSharedString(joined.c_str())) + .build()); + } + + addParameter( + DefineParam(mKind, C2_PARAMKEY_COMPONENT_KIND) + .withConstValue(new C2ComponentKindSetting(kind)) + .build()); + + addParameter( + DefineParam(mDomain, C2_PARAMKEY_COMPONENT_DOMAIN) + .withConstValue(new C2ComponentDomainSetting(domain)) + .build()); + + // simple interfaces have single streams + addParameter( + DefineParam(mInputStreamCount, C2_PARAMKEY_INPUT_STREAM_COUNT) + .withConstValue(new C2PortStreamCountTuning::input(1)) + .build()); + + addParameter( + DefineParam(mOutputStreamCount, C2_PARAMKEY_OUTPUT_STREAM_COUNT) + .withConstValue(new C2PortStreamCountTuning::output(1)) + .build()); + + // set up buffer formats and allocators + + // default to linear buffers and no media type + C2BufferData::type_t rawBufferType = C2BufferData::LINEAR; + C2String rawMediaType; + C2Allocator::id_t rawAllocator = C2AllocatorStore::DEFAULT_LINEAR; + C2BlockPool::local_id_t rawPoolId = C2BlockPool::BASIC_LINEAR; + C2BufferData::type_t codedBufferType = C2BufferData::LINEAR; + C2Allocator::id_t codedAllocator = C2AllocatorStore::DEFAULT_LINEAR; + C2BlockPool::local_id_t codedPoolId = C2BlockPool::BASIC_LINEAR; + + switch (domain) { + case C2Component::DOMAIN_IMAGE: + case C2Component::DOMAIN_VIDEO: + // TODO: should we define raw image? The only difference is timestamp handling + rawBufferType = C2BufferData::GRAPHIC; + rawMediaType = MEDIA_MIMETYPE_VIDEO_RAW; + rawAllocator = C2AllocatorStore::DEFAULT_GRAPHIC; + rawPoolId = C2BlockPool::BASIC_GRAPHIC; + break; + case C2Component::DOMAIN_AUDIO: + rawBufferType = C2BufferData::LINEAR; + rawMediaType = MEDIA_MIMETYPE_AUDIO_RAW; + rawAllocator = C2AllocatorStore::DEFAULT_LINEAR; + rawPoolId = C2BlockPool::BASIC_LINEAR; + break; + default: + break; + } + bool isEncoder = kind == C2Component::KIND_ENCODER; + + // handle raw decoders + if (mediaType == rawMediaType) { + codedBufferType = rawBufferType; + codedAllocator = rawAllocator; + codedPoolId = rawPoolId; + } + + addParameter( + DefineParam(mInputFormat, C2_PARAMKEY_INPUT_STREAM_BUFFER_TYPE) + .withConstValue(new C2StreamBufferTypeSetting::input( + 0u, isEncoder ? rawBufferType : codedBufferType)) + .build()); + + addParameter( + DefineParam(mInputMediaType, C2_PARAMKEY_INPUT_MEDIA_TYPE) + .withConstValue(AllocSharedString( + isEncoder ? rawMediaType : mediaType)) + .build()); + + addParameter( + DefineParam(mOutputFormat, C2_PARAMKEY_OUTPUT_STREAM_BUFFER_TYPE) + .withConstValue(new C2StreamBufferTypeSetting::output( + 0u, isEncoder ? codedBufferType : rawBufferType)) + .build()); + + addParameter( + DefineParam(mOutputMediaType, C2_PARAMKEY_OUTPUT_MEDIA_TYPE) + .withConstValue(AllocSharedString( + isEncoder ? mediaType : rawMediaType)) + .build()); + + C2Allocator::id_t inputAllocators[1] = { isEncoder ? rawAllocator : codedAllocator }; + C2Allocator::id_t outputAllocators[1] = { isEncoder ? codedAllocator : rawAllocator }; + C2BlockPool::local_id_t outputPoolIds[1] = { isEncoder ? codedPoolId : rawPoolId }; + + addParameter( + DefineParam(mInputAllocators, C2_PARAMKEY_INPUT_ALLOCATORS) + .withDefault(C2PortAllocatorsTuning::input::AllocShared(inputAllocators)) + .withFields({ C2F(mInputAllocators, m.values[0]).any(), + C2F(mInputAllocators, m.values).inRange(0, 1) }) + .withSetter(Setter::NonStrictValuesWithNoDeps) + .build()); + + addParameter( + DefineParam(mOutputAllocators, C2_PARAMKEY_OUTPUT_ALLOCATORS) + .withDefault(C2PortAllocatorsTuning::output::AllocShared(outputAllocators)) + .withFields({ C2F(mOutputAllocators, m.values[0]).any(), + C2F(mOutputAllocators, m.values).inRange(0, 1) }) + .withSetter(Setter::NonStrictValuesWithNoDeps) + .build()); + + addParameter( + DefineParam(mOutputPoolIds, C2_PARAMKEY_OUTPUT_BLOCK_POOLS) + .withDefault(C2PortBlockPoolsTuning::output::AllocShared(outputPoolIds)) + .withFields({ C2F(mOutputPoolIds, m.values[0]).any(), + C2F(mOutputPoolIds, m.values).inRange(0, 1) }) + .withSetter(Setter::NonStrictValuesWithNoDeps) + .build()); + + // add stateless params + addParameter( + DefineParam(mSubscribedParamIndices, C2_PARAMKEY_SUBSCRIBED_PARAM_INDICES) + .withDefault(C2SubscribedParamIndicesTuning::AllocShared(0u)) + .withFields({ C2F(mSubscribedParamIndices, m.values[0]).any(), + C2F(mSubscribedParamIndices, m.values).any() }) + .withSetter(Setter::NonStrictValuesWithNoDeps) + .build()); + + /* TODO + + addParameter( + DefineParam(mCurrentWorkOrdinal, C2_PARAMKEY_CURRENT_WORK) + .withDefault(new C2CurrentWorkTuning()) + .withFields({ C2F(mCurrentWorkOrdinal, m.timeStamp).any(), + C2F(mCurrentWorkOrdinal, m.frameIndex).any(), + C2F(mCurrentWorkOrdinal, m.customOrdinal).any() }) + .withSetter(Setter::NonStrictValuesWithNoDeps) + .build()); + + addParameter( + DefineParam(mLastInputQueuedWorkOrdinal, C2_PARAMKEY_LAST_INPUT_QUEUED) + .withDefault(new C2LastWorkQueuedTuning::input()) + .withFields({ C2F(mLastInputQueuedWorkOrdinal, m.timeStamp).any(), + C2F(mLastInputQueuedWorkOrdinal, m.frameIndex).any(), + C2F(mLastInputQueuedWorkOrdinal, m.customOrdinal).any() }) + .withSetter(Setter::NonStrictValuesWithNoDeps) + .build()); + + addParameter( + DefineParam(mLastOutputQueuedWorkOrdinal, C2_PARAMKEY_LAST_OUTPUT_QUEUED) + .withDefault(new C2LastWorkQueuedTuning::output()) + .withFields({ C2F(mLastOutputQueuedWorkOrdinal, m.timeStamp).any(), + C2F(mLastOutputQueuedWorkOrdinal, m.frameIndex).any(), + C2F(mLastOutputQueuedWorkOrdinal, m.customOrdinal).any() }) + .withSetter(Setter::NonStrictValuesWithNoDeps) + .build()); + + std::shared_ptr mOutOfMemory; + + std::shared_ptr mInputConfigCounter; + std::shared_ptr mOutputConfigCounter; + std::shared_ptr mDirectConfigCounter; + + */ +} + +void SimpleInterface::BaseParams::noInputLatency() { + addParameter( + DefineParam(mRequestedInputDelay, C2_PARAMKEY_INPUT_DELAY_REQUEST) + .withConstValue(new C2PortRequestedDelayTuning::input(0u)) + .build()); + + addParameter( + DefineParam(mActualInputDelay, C2_PARAMKEY_INPUT_DELAY) + .withConstValue(new C2PortActualDelayTuning::input(0u)) + .build()); +} + +void SimpleInterface::BaseParams::noOutputLatency() { + addParameter( + DefineParam(mRequestedOutputDelay, C2_PARAMKEY_OUTPUT_DELAY_REQUEST) + .withConstValue(new C2PortRequestedDelayTuning::output(0u)) + .build()); + + addParameter( + DefineParam(mActualOutputDelay, C2_PARAMKEY_OUTPUT_DELAY) + .withConstValue(new C2PortActualDelayTuning::output(0u)) + .build()); +} + +void SimpleInterface::BaseParams::noPipelineLatency() { + addParameter( + DefineParam(mRequestedPipelineDelay, C2_PARAMKEY_PIPELINE_DELAY_REQUEST) + .withConstValue(new C2RequestedPipelineDelayTuning(0u)) + .build()); + + addParameter( + DefineParam(mActualPipelineDelay, C2_PARAMKEY_PIPELINE_DELAY) + .withConstValue(new C2ActualPipelineDelayTuning(0u)) + .build()); +} + +void SimpleInterface::BaseParams::noPrivateBuffers() { + addParameter( + DefineParam(mPrivateAllocators, C2_PARAMKEY_PRIVATE_ALLOCATORS) + .withConstValue(C2PrivateAllocatorsTuning::AllocShared(0u)) + .build()); + + addParameter( + DefineParam(mMaxPrivateBufferCount, C2_PARAMKEY_MAX_PRIVATE_BUFFER_COUNT) + .withConstValue(C2MaxPrivateBufferCountTuning::AllocShared(0u)) + .build()); + + addParameter( + DefineParam(mPrivatePoolIds, C2_PARAMKEY_PRIVATE_BLOCK_POOLS) + .withConstValue(C2PrivateBlockPoolsTuning::AllocShared(0u)) + .build()); +} + +void SimpleInterface::BaseParams::noInputReferences() { + addParameter( + DefineParam(mMaxInputReferenceAge, C2_PARAMKEY_INPUT_MAX_REFERENCE_AGE) + .withConstValue(new C2StreamMaxReferenceAgeTuning::input(0u)) + .build()); + + addParameter( + DefineParam(mMaxInputReferenceCount, C2_PARAMKEY_INPUT_MAX_REFERENCE_COUNT) + .withConstValue(new C2StreamMaxReferenceCountTuning::input(0u)) + .build()); +} + +void SimpleInterface::BaseParams::noOutputReferences() { + addParameter( + DefineParam(mMaxOutputReferenceAge, C2_PARAMKEY_OUTPUT_MAX_REFERENCE_AGE) + .withConstValue(new C2StreamMaxReferenceAgeTuning::output(0u)) + .build()); + + addParameter( + DefineParam(mMaxOutputReferenceCount, C2_PARAMKEY_OUTPUT_MAX_REFERENCE_COUNT) + .withConstValue(new C2StreamMaxReferenceCountTuning::output(0u)) + .build()); +} + +void SimpleInterface::BaseParams::noTimeStretch() { + addParameter( + DefineParam(mTimeStretch, C2_PARAMKEY_TIME_STRETCH) + .withConstValue(new C2ComponentTimeStretchTuning(1.f)) + .build()); +} + +/* + Clients need to handle the following base params due to custom dependency. + + std::shared_ptr mApiLevel; + std::shared_ptr mApiFeatures; + std::shared_ptr mAttrib; + + std::shared_ptr mSuggestedInputBufferCount; + std::shared_ptr mSuggestedOutputBufferCount; + + std::shared_ptr mTripped; + +*/ + +} // namespace android diff --git a/media/codec2/components/base/include/SimpleC2Component.h b/media/codec2/components/base/include/SimpleC2Component.h new file mode 100644 index 0000000000000000000000000000000000000000..b3a98f460736d1256907a826f416b4b8c29aa8b3 --- /dev/null +++ b/media/codec2/components/base/include/SimpleC2Component.h @@ -0,0 +1,244 @@ +/* + * Copyright (C) 2017 The Android Open Source Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#ifndef SIMPLE_C2_COMPONENT_H_ +#define SIMPLE_C2_COMPONENT_H_ + +#include +#include + +#include + +#include +#include +#include + +namespace android { + +class SimpleC2Component + : public C2Component, public std::enable_shared_from_this { +public: + explicit SimpleC2Component( + const std::shared_ptr &intf); + virtual ~SimpleC2Component(); + + // C2Component + // From C2Component + virtual c2_status_t setListener_vb( + const std::shared_ptr &listener, c2_blocking_t mayBlock) override; + virtual c2_status_t queue_nb(std::list>* const items) override; + virtual c2_status_t announce_nb(const std::vector &items) override; + virtual c2_status_t flush_sm( + flush_mode_t mode, std::list>* const flushedWork) override; + virtual c2_status_t drain_nb(drain_mode_t mode) override; + virtual c2_status_t start() override; + virtual c2_status_t stop() override; + virtual c2_status_t reset() override; + virtual c2_status_t release() override; + virtual std::shared_ptr intf() override; + + // for handler + bool processQueue(); + +protected: + /** + * Initialize internal states of the component according to the config set + * in the interface. + * + * This method is called during start(), but only at the first invocation or + * after reset(). + */ + virtual c2_status_t onInit() = 0; + + /** + * Stop the component. + */ + virtual c2_status_t onStop() = 0; + + /** + * Reset the component. + */ + virtual void onReset() = 0; + + /** + * Release the component. + */ + virtual void onRelease() = 0; + + /** + * Flush the component. + */ + virtual c2_status_t onFlush_sm() = 0; + + /** + * Process the given work and finish pending work using finish(). + * + * \param[in,out] work the work to process + * \param[in] pool the pool to use for allocating output blocks. + */ + virtual void process( + const std::unique_ptr &work, + const std::shared_ptr &pool) = 0; + + /** + * Drain the component and finish pending work using finish(). + * + * \param[in] drainMode mode of drain. + * \param[in] pool the pool to use for allocating output blocks. + * + * \retval C2_OK The component has drained all pending output + * work. + * \retval C2_OMITTED Unsupported mode (e.g. DRAIN_CHAIN) + */ + virtual c2_status_t drain( + uint32_t drainMode, + const std::shared_ptr &pool) = 0; + + // for derived classes + /** + * Finish pending work. + * + * This method will retrieve the pending work according to |frameIndex| and + * feed the work into |fillWork| function. |fillWork| must be + * "non-blocking". Once |fillWork| returns the filled work will be returned + * to the client. + * + * \param[in] frameIndex the index of the pending work + * \param[in] fillWork the function to fill the retrieved work. + */ + void finish(uint64_t frameIndex, std::function &)> fillWork); + + /** + * Clone pending or current work and send the work back to client. + * + * This method will retrieve and clone the pending or current work according + * to |frameIndex| and feed the work into |fillWork| function. |fillWork| + * must be "non-blocking". Once |fillWork| returns the filled work will be + * returned to the client. + * + * \param[in] frameIndex the index of the work + * \param[in] currentWork the current work under processing + * \param[in] fillWork the function to fill the retrieved work. + */ + void cloneAndSend( + uint64_t frameIndex, + const std::unique_ptr ¤tWork, + std::function &)> fillWork); + + + std::shared_ptr createLinearBuffer( + const std::shared_ptr &block); + + std::shared_ptr createLinearBuffer( + const std::shared_ptr &block, size_t offset, size_t size); + + std::shared_ptr createGraphicBuffer( + const std::shared_ptr &block); + + std::shared_ptr createGraphicBuffer( + const std::shared_ptr &block, + const C2Rect &crop); + + static constexpr uint32_t NO_DRAIN = ~0u; + + C2ReadView mDummyReadView; + +private: + const std::shared_ptr mIntf; + + class WorkHandler : public AHandler { + public: + enum { + kWhatProcess, + kWhatInit, + kWhatStart, + kWhatStop, + kWhatReset, + kWhatRelease, + }; + + WorkHandler(); + ~WorkHandler() override = default; + + void setComponent(const std::shared_ptr &thiz); + + protected: + void onMessageReceived(const sp &msg) override; + + private: + std::weak_ptr mThiz; + bool mRunning; + }; + + enum { + UNINITIALIZED, + STOPPED, + RUNNING, + }; + + struct ExecState { + ExecState() : mState(UNINITIALIZED) {} + + int mState; + std::shared_ptr mListener; + }; + Mutexed mExecState; + + sp mLooper; + sp mHandler; + + class WorkQueue { + public: + inline WorkQueue() : mFlush(false), mGeneration(0ul) {} + + inline uint64_t generation() const { return mGeneration; } + inline void incGeneration() { ++mGeneration; mFlush = true; } + + std::unique_ptr pop_front(); + void push_back(std::unique_ptr work); + bool empty() const; + uint32_t drainMode() const; + void markDrain(uint32_t drainMode); + inline bool popPendingFlush() { + bool flush = mFlush; + mFlush = false; + return flush; + } + void clear(); + + private: + struct Entry { + std::unique_ptr work; + uint32_t drainMode; + }; + + bool mFlush; + uint64_t mGeneration; + std::list mQueue; + }; + Mutexed mWorkQueue; + + typedef std::unordered_map> PendingWork; + Mutexed mPendingWork; + + std::shared_ptr mOutputBlockPool; + + SimpleC2Component() = delete; +}; + +} // namespace android + +#endif // SIMPLE_C2_COMPONENT_H_ diff --git a/media/codec2/components/base/include/SimpleC2Interface.h b/media/codec2/components/base/include/SimpleC2Interface.h new file mode 100644 index 0000000000000000000000000000000000000000..2051d3d028d207054c40e115c37a7374d77fbcbb --- /dev/null +++ b/media/codec2/components/base/include/SimpleC2Interface.h @@ -0,0 +1,236 @@ +/* + * Copyright (C) 2018 The Android Open Source Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#ifndef ANDROID_SIMPLE_C2_INTERFACE_H_ +#define ANDROID_SIMPLE_C2_INTERFACE_H_ + +#include +#include +#include + +namespace android { + +/** + * Wrap a common interface object (such as Codec2Client::Interface, or C2InterfaceHelper into + * a C2ComponentInterface. + * + * \param T common interface type + */ +template +class SimpleC2Interface : public C2ComponentInterface { +public: + SimpleC2Interface(const char *name, c2_node_id_t id, const std::shared_ptr &impl) + : mName(name), + mId(id), + mImpl(impl) { + } + + ~SimpleC2Interface() override = default; + + // From C2ComponentInterface + C2String getName() const override { return mName; } + c2_node_id_t getId() const override { return mId; } + c2_status_t query_vb( + const std::vector &stackParams, + const std::vector &heapParamIndices, + c2_blocking_t mayBlock, + std::vector>* const heapParams) const override { + return mImpl->query(stackParams, heapParamIndices, mayBlock, heapParams); + } + c2_status_t config_vb( + const std::vector ¶ms, + c2_blocking_t mayBlock, + std::vector>* const failures) override { + return mImpl->config(params, mayBlock, failures); + } + c2_status_t createTunnel_sm(c2_node_id_t) override { return C2_OMITTED; } + c2_status_t releaseTunnel_sm(c2_node_id_t) override { return C2_OMITTED; } + c2_status_t querySupportedParams_nb( + std::vector> * const params) const override { + return mImpl->querySupportedParams(params); + } + c2_status_t querySupportedValues_vb( + std::vector &fields, + c2_blocking_t mayBlock) const override { + return mImpl->querySupportedValues(fields, mayBlock); + } + +private: + C2String mName; + const c2_node_id_t mId; + const std::shared_ptr mImpl; +}; + +/** + * Utility classes for common interfaces. + */ +template<> +class SimpleC2Interface { +public: + /** + * Base Codec 2.0 parameters required for all components. + */ + struct BaseParams : C2InterfaceHelper { + explicit BaseParams( + const std::shared_ptr &helper, + C2String name, + C2Component::kind_t kind, + C2Component::domain_t domain, + C2String mediaType, + std::vector aliases = std::vector()); + + /// Marks that this component has no input latency. Otherwise, component must + /// add support for C2PortRequestedDelayTuning::input and C2PortActualDelayTuning::input. + void noInputLatency(); + + /// Marks that this component has no output latency. Otherwise, component must + /// add support for C2PortRequestedDelayTuning::output and C2PortActualDelayTuning::output. + void noOutputLatency(); + + /// Marks that this component has no pipeline latency. Otherwise, component must + /// add support for C2RequestedPipelineDelayTuning and C2ActualPipelineDelayTuning. + void noPipelineLatency(); + + /// Marks that this component has no need for private buffers. Otherwise, component must + /// add support for C2MaxPrivateBufferCountTuning, C2PrivateAllocatorsTuning and + /// C2PrivateBlockPoolsTuning. + void noPrivateBuffers(); + + /// Marks that this component holds no references to input buffers. Otherwise, component + /// must add support for C2StreamMaxReferenceAgeTuning::input and + /// C2StreamMaxReferenceCountTuning::input. + void noInputReferences(); + + /// Marks that this component holds no references to output buffers. Otherwise, component + /// must add support for C2StreamMaxReferenceAgeTuning::output and + /// C2StreamMaxReferenceCountTuning::output. + void noOutputReferences(); + + /// Marks that this component does not stretch time. Otherwise, component + /// must add support for C2ComponentTimeStretchTuning. + void noTimeStretch(); + + std::shared_ptr mApiLevel; + std::shared_ptr mApiFeatures; + + std::shared_ptr mPlatformLevel; + std::shared_ptr mPlatformFeatures; + + std::shared_ptr mName; + std::shared_ptr mAliases; + std::shared_ptr mKind; + std::shared_ptr mDomain; + std::shared_ptr mAttrib; + std::shared_ptr mTimeStretch; + + std::shared_ptr mInputMediaType; + std::shared_ptr mOutputMediaType; + std::shared_ptr mInputFormat; + std::shared_ptr mOutputFormat; + + std::shared_ptr mRequestedInputDelay; + std::shared_ptr mRequestedOutputDelay; + std::shared_ptr mRequestedPipelineDelay; + + std::shared_ptr mActualInputDelay; + std::shared_ptr mActualOutputDelay; + std::shared_ptr mActualPipelineDelay; + + std::shared_ptr mMaxInputReferenceAge; + std::shared_ptr mMaxInputReferenceCount; + std::shared_ptr mMaxOutputReferenceAge; + std::shared_ptr mMaxOutputReferenceCount; + std::shared_ptr mMaxPrivateBufferCount; + + std::shared_ptr mInputStreamCount; + std::shared_ptr mOutputStreamCount; + + std::shared_ptr mSubscribedParamIndices; + std::shared_ptr mSuggestedInputBufferCount; + std::shared_ptr mSuggestedOutputBufferCount; + + std::shared_ptr mCurrentWorkOrdinal; + std::shared_ptr mLastInputQueuedWorkOrdinal; + std::shared_ptr mLastOutputQueuedWorkOrdinal; + + std::shared_ptr mInputAllocators; + std::shared_ptr mOutputAllocators; + std::shared_ptr mPrivateAllocators; + std::shared_ptr mOutputPoolIds; + std::shared_ptr mPrivatePoolIds; + + std::shared_ptr mTripped; + std::shared_ptr mOutOfMemory; + + std::shared_ptr mInputConfigCounter; + std::shared_ptr mOutputConfigCounter; + std::shared_ptr mDirectConfigCounter; + }; +}; + +template +using SimpleInterface = SimpleC2Interface; + +template +std::shared_ptr AllocSharedString(const Args(&... args), const char *str) { + size_t len = strlen(str) + 1; + std::shared_ptr ret = T::AllocShared(len, args...); + strcpy(ret->m.value, str); + return ret; +} + +template +std::shared_ptr AllocSharedString(const Args(&... args), const std::string &str) { + std::shared_ptr ret = T::AllocShared(str.length() + 1, args...); + strcpy(ret->m.value, str.c_str()); + return ret; +} + +template +struct Setter { + typedef typename std::remove_reference::type type; + + static C2R NonStrictValueWithNoDeps( + bool mayBlock, C2InterfaceHelper::C2P &me) { + (void)mayBlock; + return me.F(me.v.value).validatePossible(me.v.value); + } + + static C2R NonStrictValuesWithNoDeps( + bool mayBlock, C2InterfaceHelper::C2P &me) { + (void)mayBlock; + C2R res = C2R::Ok(); + for (size_t ix = 0; ix < me.v.flexCount(); ++ix) { + res.plus(me.F(me.v.m.values[ix]).validatePossible(me.v.m.values[ix])); + } + return res; + } + + static C2R StrictValueWithNoDeps( + bool mayBlock, + const C2InterfaceHelper::C2P &old, + C2InterfaceHelper::C2P &me) { + (void)mayBlock; + if (!me.F(me.v.value).supportsNow(me.v.value)) { + me.set().value = old.v.value; + } + return me.F(me.v.value).validatePossible(me.v.value); + } +}; + +} // namespace android + +#endif // ANDROID_SIMPLE_C2_INTERFACE_H_ diff --git a/media/codec2/components/cmds/Android.bp b/media/codec2/components/cmds/Android.bp new file mode 100644 index 0000000000000000000000000000000000000000..8fb9bf990eac1b84c410f362f2174c197465a744 --- /dev/null +++ b/media/codec2/components/cmds/Android.bp @@ -0,0 +1,40 @@ +cc_binary { + name: "codec2", + defaults: ["libstagefright_codec2-impl-defaults"], + + srcs: [ + "codec2.cpp", + ], + + include_dirs: [ + ], + + shared_libs: [ + "libbase", + "libbinder", + "libcutils", + "libgui", + "liblog", + "libmediaextractor", + "libstagefright", + "libstagefright_foundation", + "libui", + "libutils", + ], + + cflags: [ + "-Werror", + "-Wall", + ], + + sanitize: { + cfi: true, + misc_undefined: [ + "unsigned-integer-overflow", + "signed-integer-overflow", + ], + diag: { + cfi: true, + }, + }, +} diff --git a/media/codec2/components/cmds/codec2.cpp b/media/codec2/components/cmds/codec2.cpp new file mode 100644 index 0000000000000000000000000000000000000000..f2cf545b8237eb8229635274491bb89382945933 --- /dev/null +++ b/media/codec2/components/cmds/codec2.cpp @@ -0,0 +1,483 @@ +/* + * Copyright (C) 2017 The Android Open Source Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#include +#include +#include +#include +#include +#include +#include + +#include + +//#define LOG_NDEBUG 0 +#define LOG_TAG "codec2" +#include + +#include +#include +#include +#include +#include +#include +#include +#include +#include +#include +#include +#include +#include +#include +#include +#include + +#include +#include +#include +#include + +#include +#include +#include +#include +#include +#include +#include +#include + +using namespace android; +using namespace std::chrono_literals; + +namespace { + +class LinearBuffer : public C2Buffer { +public: + explicit LinearBuffer(const std::shared_ptr &block) + : C2Buffer({ block->share(block->offset(), block->size(), ::C2Fence()) }) {} +}; + +class Listener; + +class SimplePlayer { +public: + SimplePlayer(); + ~SimplePlayer(); + + void onWorkDone(std::weak_ptr component, + std::list> workItems); + void onTripped(std::weak_ptr component, + std::vector> settingResult); + void onError(std::weak_ptr component, uint32_t errorCode); + + void play(const sp &source); + +private: + typedef std::unique_lock ULock; + + std::shared_ptr mListener; + std::shared_ptr mComponent; + + sp mProducerListener; + + std::atomic_int mLinearPoolId; + + std::shared_ptr mAllocIon; + std::shared_ptr mLinearPool; + + std::mutex mQueueLock; + std::condition_variable mQueueCondition; + std::list> mWorkQueue; + + std::mutex mProcessedLock; + std::condition_variable mProcessedCondition; + std::list> mProcessedWork; + + sp mSurface; + sp mComposerClient; + sp mControl; +}; + +class Listener : public C2Component::Listener { +public: + explicit Listener(SimplePlayer *thiz) : mThis(thiz) {} + virtual ~Listener() = default; + + virtual void onWorkDone_nb(std::weak_ptr component, + std::list> workItems) override { + mThis->onWorkDone(component, std::move(workItems)); + } + + virtual void onTripped_nb(std::weak_ptr component, + std::vector> settingResult) override { + mThis->onTripped(component, settingResult); + } + + virtual void onError_nb(std::weak_ptr component, + uint32_t errorCode) override { + mThis->onError(component, errorCode); + } + +private: + SimplePlayer * const mThis; +}; + + +SimplePlayer::SimplePlayer() + : mListener(new Listener(this)), + mProducerListener(new DummyProducerListener), + mLinearPoolId(C2BlockPool::PLATFORM_START), + mComposerClient(new SurfaceComposerClient) { + CHECK_EQ(mComposerClient->initCheck(), (status_t)OK); + + std::shared_ptr store = GetCodec2PlatformAllocatorStore(); + CHECK_EQ(store->fetchAllocator(C2AllocatorStore::DEFAULT_LINEAR, &mAllocIon), C2_OK); + mLinearPool = std::make_shared(mAllocIon, mLinearPoolId++); + + mControl = mComposerClient->createSurface( + String8("A Surface"), + 1280, + 800, + HAL_PIXEL_FORMAT_YV12); + //PIXEL_FORMAT_RGB_565); + + CHECK(mControl != nullptr); + CHECK(mControl->isValid()); + + SurfaceComposerClient::Transaction{} + .setLayer(mControl, INT_MAX) + .show(mControl) + .apply(); + + mSurface = mControl->getSurface(); + CHECK(mSurface != nullptr); + mSurface->connect(NATIVE_WINDOW_API_CPU, mProducerListener); +} + +SimplePlayer::~SimplePlayer() { + mComposerClient->dispose(); +} + +void SimplePlayer::onWorkDone( + std::weak_ptr component, std::list> workItems) { + ALOGV("SimplePlayer::onWorkDone"); + (void) component; + ULock l(mProcessedLock); + for (auto & item : workItems) { + mProcessedWork.push_back(std::move(item)); + } + mProcessedCondition.notify_all(); +} + +void SimplePlayer::onTripped( + std::weak_ptr component, + std::vector> settingResult) { + (void) component; + (void) settingResult; + // TODO +} + +void SimplePlayer::onError(std::weak_ptr component, uint32_t errorCode) { + (void) component; + (void) errorCode; + // TODO +} + +void SimplePlayer::play(const sp &source) { + ALOGV("SimplePlayer::play"); + sp format; + (void) convertMetaDataToMessage(source->getFormat(), &format); + + sp csd0, csd1; + format->findBuffer("csd-0", &csd0); + format->findBuffer("csd-1", &csd1); + + status_t err = source->start(); + + if (err != OK) { + fprintf(stderr, "source returned error %d (0x%08x)\n", err, err); + return; + } + + std::shared_ptr store = GetCodec2PlatformComponentStore(); + std::shared_ptr component; + (void)store->createComponent("c2.android.avc.decoder", &component); + + (void)component->setListener_vb(mListener, C2_DONT_BLOCK); + std::unique_ptr pools = + C2PortBlockPoolsTuning::output::AllocUnique({ (uint64_t)C2BlockPool::BASIC_GRAPHIC }); + std::vector> result; + (void)component->intf()->config_vb({pools.get()}, C2_DONT_BLOCK, &result); + component->start(); + + for (int i = 0; i < 8; ++i) { + mWorkQueue.emplace_back(new C2Work); + } + + std::atomic_bool running(true); + std::thread surfaceThread([this, &running]() { + const sp &igbp = mSurface->getIGraphicBufferProducer(); + while (running) { + std::unique_ptr work; + { + ULock l(mProcessedLock); + if (mProcessedWork.empty()) { + mProcessedCondition.wait_for(l, 100ms); + if (mProcessedWork.empty()) { + continue; + } + } + work.swap(mProcessedWork.front()); + mProcessedWork.pop_front(); + } + int slot; + sp fence; + ALOGV("Render: Frame #%lld", work->worklets.front()->output.ordinal.frameIndex.peekll()); + const std::shared_ptr &output = work->worklets.front()->output.buffers[0]; + if (output) { + const C2ConstGraphicBlock block = output->data().graphicBlocks().front(); + native_handle_t *grallocHandle = UnwrapNativeCodec2GrallocHandle(block.handle()); + sp buffer(new GraphicBuffer( + grallocHandle, + GraphicBuffer::CLONE_HANDLE, + block.width(), + block.height(), + HAL_PIXEL_FORMAT_YV12, + 1, + (uint64_t)GRALLOC_USAGE_SW_READ_OFTEN | GRALLOC_USAGE_SW_WRITE_OFTEN, + block.width())); + native_handle_delete(grallocHandle); + + status_t err = igbp->attachBuffer(&slot, buffer); + + IGraphicBufferProducer::QueueBufferInput qbi( + (work->worklets.front()->output.ordinal.timestamp * 1000ll).peekll(), + false, + HAL_DATASPACE_UNKNOWN, + Rect(block.width(), block.height()), + NATIVE_WINDOW_SCALING_MODE_SCALE_TO_WINDOW, + 0, + Fence::NO_FENCE, + 0); + IGraphicBufferProducer::QueueBufferOutput qbo; + err = igbp->queueBuffer(slot, qbi, &qbo); + } + + work->input.buffers.clear(); + work->worklets.clear(); + + ULock l(mQueueLock); + mWorkQueue.push_back(std::move(work)); + mQueueCondition.notify_all(); + } + ALOGV("render loop finished"); + }); + + long numFrames = 0; + mLinearPool.reset(new C2PooledBlockPool(mAllocIon, mLinearPoolId++)); + + for (;;) { + size_t size = 0u; + void *data = nullptr; + int64_t timestamp = 0u; + MediaBufferBase *buffer = nullptr; + sp csd; + if (csd0 != nullptr) { + csd = csd0; + csd0 = nullptr; + } else if (csd1 != nullptr) { + csd = csd1; + csd1 = nullptr; + } else { + status_t err = source->read(&buffer); + if (err != OK) { + CHECK(buffer == nullptr); + + if (err == INFO_FORMAT_CHANGED) { + continue; + } + + break; + } + MetaDataBase &meta = buffer->meta_data(); + CHECK(meta.findInt64(kKeyTime, ×tamp)); + + size = buffer->size(); + data = buffer->data(); + } + + if (csd != nullptr) { + size = csd->size(); + data = csd->data(); + } + + // Prepare C2Work + + std::unique_ptr work; + while (!work) { + ULock l(mQueueLock); + if (!mWorkQueue.empty()) { + work.swap(mWorkQueue.front()); + mWorkQueue.pop_front(); + } else { + mQueueCondition.wait_for(l, 100ms); + } + } + work->input.flags = (C2FrameData::flags_t)0; + work->input.ordinal.timestamp = timestamp; + work->input.ordinal.frameIndex = numFrames; + + std::shared_ptr block; + mLinearPool->fetchLinearBlock( + size, + { C2MemoryUsage::CPU_READ, C2MemoryUsage::CPU_WRITE }, + &block); + C2WriteView view = block->map().get(); + if (view.error() != C2_OK) { + fprintf(stderr, "C2LinearBlock::map() failed : %d", view.error()); + break; + } + memcpy(view.base(), data, size); + + work->input.buffers.clear(); + work->input.buffers.emplace_back(new LinearBuffer(block)); + work->worklets.clear(); + work->worklets.emplace_back(new C2Worklet); + + std::list> items; + items.push_back(std::move(work)); + + ALOGV("Frame #%ld size = %zu", numFrames, size); + // DO THE DECODING + component->queue_nb(&items); + + if (buffer) { + buffer->release(); + buffer = nullptr; + } + + ++numFrames; + } + ALOGV("main loop finished"); + source->stop(); + running.store(false); + surfaceThread.join(); + + component->release(); + printf("\n"); +} + +} // namespace + +static void usage(const char *me) { + fprintf(stderr, "usage: %s [options] [input_filename]\n", me); + fprintf(stderr, " -h(elp)\n"); +} + +int main(int argc, char **argv) { + android::ProcessState::self()->startThreadPool(); + + int res; + while ((res = getopt(argc, argv, "h")) >= 0) { + switch (res) { + case 'h': + default: + { + usage(argv[0]); + exit(1); + break; + } + } + } + + argc -= optind; + argv += optind; + + if (argc < 1) { + fprintf(stderr, "No input file specified\n"); + return 1; + } + + status_t err = OK; + SimplePlayer player; + + for (int k = 0; k < argc && err == OK; ++k) { + const char *filename = argv[k]; + + sp dataSource = + DataSourceFactory::CreateFromURI(nullptr /* httpService */, filename); + + if (strncasecmp(filename, "sine:", 5) && dataSource == nullptr) { + fprintf(stderr, "Unable to create data source.\n"); + return 1; + } + + Vector > mediaSources; + sp mediaSource; + + sp extractor = MediaExtractorFactory::Create(dataSource); + + if (extractor == nullptr) { + fprintf(stderr, "could not create extractor.\n"); + return -1; + } + + sp meta = extractor->getMetaData(); + + if (meta != nullptr) { + const char *mime; + if (!meta->findCString(kKeyMIMEType, &mime)) { + fprintf(stderr, "extractor did not provide MIME type.\n"); + return -1; + } + } + + size_t numTracks = extractor->countTracks(); + + size_t i; + for (i = 0; i < numTracks; ++i) { + meta = extractor->getTrackMetaData(i, 0); + + if (meta == nullptr) { + break; + } + const char *mime; + meta->findCString(kKeyMIMEType, &mime); + + // TODO: allowing AVC only for the time being + if (!strncasecmp(mime, "video/avc", 9)) { + break; + } + + meta = nullptr; + } + + if (meta == nullptr) { + fprintf(stderr, "No AVC track found.\n"); + return -1; + } + + mediaSource = extractor->getTrack(i); + if (mediaSource == nullptr) { + fprintf(stderr, "skip NULL track %zu, total tracks %zu.\n", i, numTracks); + return -1; + } + + player.play(mediaSource); + } + + return 0; +} diff --git a/media/codec2/components/flac/Android.bp b/media/codec2/components/flac/Android.bp new file mode 100644 index 0000000000000000000000000000000000000000..32c3b36cbaf0f441ea8ae8c7a00fb3f89e55f21a --- /dev/null +++ b/media/codec2/components/flac/Android.bp @@ -0,0 +1,27 @@ +cc_library_shared { + name: "libstagefright_soft_c2flacdec", + defaults: [ + "libstagefright_soft_c2-defaults", + "libstagefright_soft_c2_sanitize_all-defaults", + ], + + header_libs: ["libFLAC-headers"], + + srcs: ["C2SoftFlacDec.cpp"], + + shared_libs: [ + "libstagefright_flacdec", + ], +} + +cc_library_shared { + name: "libstagefright_soft_c2flacenc", + defaults: [ + "libstagefright_soft_c2-defaults", + "libstagefright_soft_c2_sanitize_all-defaults", + ], + + srcs: ["C2SoftFlacEnc.cpp"], + + static_libs: ["libFLAC"], +} diff --git a/media/codec2/components/flac/C2SoftFlacDec.cpp b/media/codec2/components/flac/C2SoftFlacDec.cpp new file mode 100644 index 0000000000000000000000000000000000000000..f1e2f5194643564934ffe901a4849a895609a887 --- /dev/null +++ b/media/codec2/components/flac/C2SoftFlacDec.cpp @@ -0,0 +1,372 @@ +/* + * Copyright (C) 2018 The Android Open Source Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +//#define LOG_NDEBUG 0 +#define LOG_TAG "C2SoftFlacDec" +#include + +#include + +#include +#include + +#include "C2SoftFlacDec.h" + +namespace android { + +constexpr char COMPONENT_NAME[] = "c2.android.flac.decoder"; + +class C2SoftFlacDec::IntfImpl : public C2InterfaceHelper { +public: + explicit IntfImpl(const std::shared_ptr &helper) + : C2InterfaceHelper(helper) { + + setDerivedInstance(this); + + addParameter( + DefineParam(mInputFormat, C2_NAME_INPUT_STREAM_FORMAT_SETTING) + .withConstValue(new C2StreamFormatConfig::input(0u, C2FormatCompressed)) + .build()); + + addParameter( + DefineParam(mOutputFormat, C2_NAME_OUTPUT_STREAM_FORMAT_SETTING) + .withConstValue(new C2StreamFormatConfig::output(0u, C2FormatAudio)) + .build()); + + addParameter( + DefineParam(mInputMediaType, C2_NAME_INPUT_PORT_MIME_SETTING) + .withConstValue(AllocSharedString( + MEDIA_MIMETYPE_AUDIO_FLAC)) + .build()); + + addParameter( + DefineParam(mOutputMediaType, C2_NAME_OUTPUT_PORT_MIME_SETTING) + .withConstValue(AllocSharedString( + MEDIA_MIMETYPE_AUDIO_RAW)) + .build()); + + addParameter( + DefineParam(mSampleRate, C2_NAME_STREAM_SAMPLE_RATE_SETTING) + .withDefault(new C2StreamSampleRateInfo::output(0u, 44100)) + .withFields({C2F(mSampleRate, value).inRange(1, 655350)}) + .withSetter((Setter::StrictValueWithNoDeps)) + .build()); + + addParameter( + DefineParam(mChannelCount, C2_NAME_STREAM_CHANNEL_COUNT_SETTING) + .withDefault(new C2StreamChannelCountInfo::output(0u, 1)) + .withFields({C2F(mChannelCount, value).inRange(1, 8)}) + .withSetter(Setter::StrictValueWithNoDeps) + .build()); + + addParameter( + DefineParam(mBitrate, C2_NAME_STREAM_BITRATE_SETTING) + .withDefault(new C2BitrateTuning::input(0u, 768000)) + .withFields({C2F(mBitrate, value).inRange(1, 21000000)}) + .withSetter(Setter::NonStrictValueWithNoDeps) + .build()); + + addParameter( + DefineParam(mInputMaxBufSize, C2_PARAMKEY_INPUT_MAX_BUFFER_SIZE) + .withConstValue(new C2StreamMaxBufferSizeInfo::input(0u, 32768)) + .build()); + } + +private: + std::shared_ptr mInputFormat; + std::shared_ptr mOutputFormat; + std::shared_ptr mInputMediaType; + std::shared_ptr mOutputMediaType; + std::shared_ptr mSampleRate; + std::shared_ptr mChannelCount; + std::shared_ptr mBitrate; + std::shared_ptr mInputMaxBufSize; +}; + +C2SoftFlacDec::C2SoftFlacDec( + const char *name, + c2_node_id_t id, + const std::shared_ptr &intfImpl) + : SimpleC2Component(std::make_shared>(name, id, intfImpl)), + mIntf(intfImpl), + mFLACDecoder(nullptr) { +} + +C2SoftFlacDec::~C2SoftFlacDec() { + onRelease(); +} + +c2_status_t C2SoftFlacDec::onInit() { + status_t err = initDecoder(); + return err == OK ? C2_OK : C2_NO_MEMORY; +} + +c2_status_t C2SoftFlacDec::onStop() { + if (mFLACDecoder) mFLACDecoder->flush(); + memset(&mStreamInfo, 0, sizeof(mStreamInfo)); + mHasStreamInfo = false; + mSignalledError = false; + mSignalledOutputEos = false; + return C2_OK; +} + +void C2SoftFlacDec::onReset() { + mInputBufferCount = 0; + (void)onStop(); +} + +void C2SoftFlacDec::onRelease() { + mInputBufferCount = 0; + if (mFLACDecoder) delete mFLACDecoder; + mFLACDecoder = nullptr; +} + +c2_status_t C2SoftFlacDec::onFlush_sm() { + return onStop(); +} + +status_t C2SoftFlacDec::initDecoder() { + if (mFLACDecoder) { + delete mFLACDecoder; + } + mFLACDecoder = FLACDecoder::Create(); + if (!mFLACDecoder) { + ALOGE("initDecoder: failed to create FLACDecoder"); + mSignalledError = true; + return NO_MEMORY; + } + + memset(&mStreamInfo, 0, sizeof(mStreamInfo)); + mHasStreamInfo = false; + mSignalledError = false; + mSignalledOutputEos = false; + mInputBufferCount = 0; + + return OK; +} + +static void fillEmptyWork(const std::unique_ptr &work) { + work->worklets.front()->output.flags = work->input.flags; + work->worklets.front()->output.buffers.clear(); + work->worklets.front()->output.ordinal = work->input.ordinal; + work->workletsProcessed = 1u; +} + +// (TODO) add multiframe support, in plugin and FLACDecoder.cpp +void C2SoftFlacDec::process( + const std::unique_ptr &work, + const std::shared_ptr &pool) { + // Initialize output work + work->result = C2_OK; + work->workletsProcessed = 1u; + work->worklets.front()->output.configUpdate.clear(); + work->worklets.front()->output.flags = work->input.flags; + + if (mSignalledError || mSignalledOutputEos) { + work->result = C2_BAD_VALUE; + return; + } + + C2ReadView rView = mDummyReadView; + size_t inOffset = 0u; + size_t inSize = 0u; + if (!work->input.buffers.empty()) { + rView = work->input.buffers[0]->data().linearBlocks().front().map().get(); + inSize = rView.capacity(); + if (inSize && rView.error()) { + ALOGE("read view map failed %d", rView.error()); + work->result = C2_CORRUPTED; + return; + } + } + bool eos = (work->input.flags & C2FrameData::FLAG_END_OF_STREAM) != 0; + bool codecConfig = (work->input.flags & C2FrameData::FLAG_CODEC_CONFIG) != 0; + + ALOGV("in buffer attr. size %zu timestamp %d frameindex %d", inSize, + (int)work->input.ordinal.timestamp.peeku(), (int)work->input.ordinal.frameIndex.peeku()); + + if (inSize == 0) { + fillEmptyWork(work); + if (eos) { + mSignalledOutputEos = true; + ALOGV("signalled EOS"); + } + return; + } + + if (mInputBufferCount == 0 && !codecConfig) { + ALOGV("First frame has to include configuration, forcing config"); + codecConfig = true; + } + + uint8_t *input = const_cast(rView.data() + inOffset); + if (codecConfig) { + status_t decoderErr = mFLACDecoder->parseMetadata(input, inSize); + if (decoderErr != OK && decoderErr != WOULD_BLOCK) { + ALOGE("process: FLACDecoder parseMetaData returns error %d", decoderErr); + mSignalledError = true; + work->result = C2_CORRUPTED; + return; + } + + mInputBufferCount++; + fillEmptyWork(work); + if (eos) { + mSignalledOutputEos = true; + ALOGV("signalled EOS"); + } + + if (decoderErr == WOULD_BLOCK) { + ALOGV("process: parseMetadata is Blocking, Continue %d", decoderErr); + } else { + mStreamInfo = mFLACDecoder->getStreamInfo(); + if (mStreamInfo.sample_rate && mStreamInfo.max_blocksize && + mStreamInfo.channels) { + mHasStreamInfo = true; + C2StreamSampleRateInfo::output sampleRateInfo( + 0u, mStreamInfo.sample_rate); + C2StreamChannelCountInfo::output channelCountInfo( + 0u, mStreamInfo.channels); + std::vector> failures; + c2_status_t err = + mIntf->config({&sampleRateInfo, &channelCountInfo}, + C2_MAY_BLOCK, &failures); + if (err == OK) { + work->worklets.front()->output.configUpdate.push_back( + C2Param::Copy(sampleRateInfo)); + work->worklets.front()->output.configUpdate.push_back( + C2Param::Copy(channelCountInfo)); + } else { + ALOGE("Config Update failed"); + mSignalledError = true; + work->result = C2_CORRUPTED; + return; + } + } + ALOGD("process: decoder configuration : %d Hz, %d channels, %d samples," + " %d block size", mStreamInfo.sample_rate, mStreamInfo.channels, + (int)mStreamInfo.total_samples, mStreamInfo.max_blocksize); + } + return; + } + + size_t outSize; + if (mHasStreamInfo) + outSize = mStreamInfo.max_blocksize * mStreamInfo.channels * sizeof(short); + else + outSize = kMaxBlockSize * FLACDecoder::kMaxChannels * sizeof(short); + + std::shared_ptr block; + C2MemoryUsage usage = { C2MemoryUsage::CPU_READ, C2MemoryUsage::CPU_WRITE }; + c2_status_t err = pool->fetchLinearBlock(outSize, usage, &block); + if (err != C2_OK) { + ALOGE("fetchLinearBlock for Output failed with status %d", err); + work->result = C2_NO_MEMORY; + return; + } + C2WriteView wView = block->map().get(); + if (wView.error()) { + ALOGE("write view map failed %d", wView.error()); + work->result = C2_CORRUPTED; + return; + } + + short *output = reinterpret_cast(wView.data()); + status_t decoderErr = mFLACDecoder->decodeOneFrame( + input, inSize, output, &outSize); + if (decoderErr != OK) { + ALOGE("process: FLACDecoder decodeOneFrame returns error %d", decoderErr); + mSignalledError = true; + work->result = C2_CORRUPTED; + return; + } + + mInputBufferCount++; + ALOGV("out buffer attr. size %zu", outSize); + work->worklets.front()->output.flags = work->input.flags; + work->worklets.front()->output.buffers.clear(); + work->worklets.front()->output.buffers.push_back(createLinearBuffer(block, 0, outSize)); + work->worklets.front()->output.ordinal = work->input.ordinal; + if (eos) { + mSignalledOutputEos = true; + ALOGV("signalled EOS"); + } +} + +c2_status_t C2SoftFlacDec::drain( + uint32_t drainMode, + const std::shared_ptr &pool) { + (void) pool; + if (drainMode == NO_DRAIN) { + ALOGW("drain with NO_DRAIN: no-op"); + return C2_OK; + } + if (drainMode == DRAIN_CHAIN) { + ALOGW("DRAIN_CHAIN not supported"); + return C2_OMITTED; + } + + if (mFLACDecoder) mFLACDecoder->flush(); + + return C2_OK; +} + +class C2SoftFlacDecFactory : public C2ComponentFactory { +public: + C2SoftFlacDecFactory() : mHelper(std::static_pointer_cast( + GetCodec2PlatformComponentStore()->getParamReflector())) { + } + + virtual c2_status_t createComponent( + c2_node_id_t id, + std::shared_ptr* const component, + std::function deleter) override { + *component = std::shared_ptr( + new C2SoftFlacDec(COMPONENT_NAME, + id, + std::make_shared(mHelper)), + deleter); + return C2_OK; + } + + virtual c2_status_t createInterface( + c2_node_id_t id, + std::shared_ptr* const interface, + std::function deleter) override { + *interface = std::shared_ptr( + new SimpleInterface( + COMPONENT_NAME, id, std::make_shared(mHelper)), + deleter); + return C2_OK; + } + + virtual ~C2SoftFlacDecFactory() override = default; + +private: + std::shared_ptr mHelper; +}; + +} // namespace android + +extern "C" ::C2ComponentFactory* CreateCodec2Factory() { + ALOGV("in %s", __func__); + return new ::android::C2SoftFlacDecFactory(); +} + +extern "C" void DestroyCodec2Factory(::C2ComponentFactory* factory) { + ALOGV("in %s", __func__); + delete factory; +} diff --git a/media/codec2/components/flac/C2SoftFlacDec.h b/media/codec2/components/flac/C2SoftFlacDec.h new file mode 100644 index 0000000000000000000000000000000000000000..b491bfda21c5d049e7e7e6b007285123cbc6beaa --- /dev/null +++ b/media/codec2/components/flac/C2SoftFlacDec.h @@ -0,0 +1,66 @@ +/* + * Copyright (C) 2018 The Android Open Source Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#ifndef ANDROID_C2_SOFT_FLAC_DEC_H_ +#define ANDROID_C2_SOFT_FLAC_DEC_H_ + +#include + +#include "FLACDecoder.h" + +namespace android { + +struct C2SoftFlacDec : public SimpleC2Component { + class IntfImpl; + + C2SoftFlacDec(const char *name, c2_node_id_t id, + const std::shared_ptr &intfImpl); + virtual ~C2SoftFlacDec(); + + // From SimpleC2Component + c2_status_t onInit() override; + c2_status_t onStop() override; + void onReset() override; + void onRelease() override; + c2_status_t onFlush_sm() override; + void process( + const std::unique_ptr &work, + const std::shared_ptr &pool) override; + c2_status_t drain( + uint32_t drainMode, + const std::shared_ptr &pool) override; + +private: + enum { + kMaxBlockSize = 4096 + }; + + std::shared_ptr mIntf; + FLACDecoder *mFLACDecoder; + FLAC__StreamMetadata_StreamInfo mStreamInfo; + bool mSignalledError; + bool mSignalledOutputEos; + bool mHasStreamInfo; + size_t mInputBufferCount; + + status_t initDecoder(); + + C2_DO_NOT_COPY(C2SoftFlacDec); +}; + +} // namespace android + +#endif // ANDROID_C2_SOFT_FLAC_DEC_H_ diff --git a/media/codec2/components/flac/C2SoftFlacEnc.cpp b/media/codec2/components/flac/C2SoftFlacEnc.cpp new file mode 100644 index 0000000000000000000000000000000000000000..e4192c72f00e030042a563fdf53d12e95ac4d7ce --- /dev/null +++ b/media/codec2/components/flac/C2SoftFlacEnc.cpp @@ -0,0 +1,460 @@ +/* + * Copyright (C) 2018 The Android Open Source Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +//#define LOG_NDEBUG 0 +#define LOG_TAG "C2SoftFlacEnc" +#include + +#include + +#include +#include + +#include "C2SoftFlacEnc.h" + +namespace android { + +class C2SoftFlacEnc::IntfImpl : public C2InterfaceHelper { +public: + explicit IntfImpl(const std::shared_ptr &helper) + : C2InterfaceHelper(helper) { + setDerivedInstance(this); + addParameter( + DefineParam(mInputFormat, C2_NAME_INPUT_STREAM_FORMAT_SETTING) + .withConstValue(new C2StreamFormatConfig::input(0u, C2FormatAudio)) + .build()); + addParameter( + DefineParam(mOutputFormat, C2_NAME_OUTPUT_STREAM_FORMAT_SETTING) + .withConstValue(new C2StreamFormatConfig::output(0u, C2FormatCompressed)) + .build()); + addParameter( + DefineParam(mInputMediaType, C2_NAME_INPUT_PORT_MIME_SETTING) + .withConstValue(AllocSharedString( + MEDIA_MIMETYPE_AUDIO_RAW)) + .build()); + addParameter( + DefineParam(mOutputMediaType, C2_NAME_OUTPUT_PORT_MIME_SETTING) + .withConstValue(AllocSharedString( + MEDIA_MIMETYPE_AUDIO_FLAC)) + .build()); + addParameter( + DefineParam(mSampleRate, C2_NAME_STREAM_SAMPLE_RATE_SETTING) + .withDefault(new C2StreamSampleRateInfo::input(0u, 44100)) + .withFields({C2F(mSampleRate, value).inRange(1, 655350)}) + .withSetter((Setter::StrictValueWithNoDeps)) + .build()); + addParameter( + DefineParam(mChannelCount, C2_NAME_STREAM_CHANNEL_COUNT_SETTING) + .withDefault(new C2StreamChannelCountInfo::input(0u, 1)) + .withFields({C2F(mChannelCount, value).inRange(1, 2)}) + .withSetter(Setter::StrictValueWithNoDeps) + .build()); + addParameter( + DefineParam(mBitrate, C2_NAME_STREAM_BITRATE_SETTING) + .withDefault(new C2BitrateTuning::output(0u, 768000)) + .withFields({C2F(mBitrate, value).inRange(1, 21000000)}) + .withSetter(Setter::NonStrictValueWithNoDeps) + .build()); + addParameter( + DefineParam(mInputMaxBufSize, C2_PARAMKEY_INPUT_MAX_BUFFER_SIZE) + .withConstValue(new C2StreamMaxBufferSizeInfo::input(0u, 4608)) + .build()); + } + + uint32_t getSampleRate() const { return mSampleRate->value; } + uint32_t getChannelCount() const { return mChannelCount->value; } + uint32_t getBitrate() const { return mBitrate->value; } + +private: + std::shared_ptr mInputFormat; + std::shared_ptr mOutputFormat; + std::shared_ptr mInputMediaType; + std::shared_ptr mOutputMediaType; + std::shared_ptr mSampleRate; + std::shared_ptr mChannelCount; + std::shared_ptr mBitrate; + std::shared_ptr mInputMaxBufSize; +}; +constexpr char COMPONENT_NAME[] = "c2.android.flac.encoder"; + +C2SoftFlacEnc::C2SoftFlacEnc( + const char *name, + c2_node_id_t id, + const std::shared_ptr &intfImpl) + : SimpleC2Component(std::make_shared>(name, id, intfImpl)), + mIntf(intfImpl), + mFlacStreamEncoder(nullptr), + mInputBufferPcm32(nullptr) { +} + +C2SoftFlacEnc::~C2SoftFlacEnc() { + onRelease(); +} + +c2_status_t C2SoftFlacEnc::onInit() { + mFlacStreamEncoder = FLAC__stream_encoder_new(); + if (!mFlacStreamEncoder) return C2_CORRUPTED; + + mInputBufferPcm32 = (FLAC__int32*) malloc( + kInBlockSize * kMaxNumChannels * sizeof(FLAC__int32)); + if (!mInputBufferPcm32) return C2_NO_MEMORY; + + mSignalledError = false; + mSignalledOutputEos = false; + mCompressionLevel = FLAC_COMPRESSION_LEVEL_DEFAULT; + mIsFirstFrame = true; + mAnchorTimeStamp = 0ull; + mProcessedSamples = 0u; + mEncoderWriteData = false; + mEncoderReturnedNbBytes = 0; + mHeaderOffset = 0; + mWroteHeader = false; + + status_t err = configureEncoder(); + return err == OK ? C2_OK : C2_CORRUPTED; +} + +void C2SoftFlacEnc::onRelease() { + if (mFlacStreamEncoder) { + FLAC__stream_encoder_delete(mFlacStreamEncoder); + mFlacStreamEncoder = nullptr; + } + + if (mInputBufferPcm32) { + free(mInputBufferPcm32); + mInputBufferPcm32 = nullptr; + } +} + +void C2SoftFlacEnc::onReset() { + mCompressionLevel = FLAC_COMPRESSION_LEVEL_DEFAULT; + (void) onStop(); +} + +c2_status_t C2SoftFlacEnc::onStop() { + mSignalledError = false; + mSignalledOutputEos = false; + mIsFirstFrame = true; + mAnchorTimeStamp = 0ull; + mProcessedSamples = 0u; + mEncoderWriteData = false; + mEncoderReturnedNbBytes = 0; + mHeaderOffset = 0; + mWroteHeader = false; + + c2_status_t status = drain(DRAIN_COMPONENT_NO_EOS, nullptr); + if (C2_OK != status) return status; + + status_t err = configureEncoder(); + if (err != OK) mSignalledError = true; + return C2_OK; +} + +c2_status_t C2SoftFlacEnc::onFlush_sm() { + return onStop(); +} + +static void fillEmptyWork(const std::unique_ptr &work) { + work->worklets.front()->output.flags = work->input.flags; + work->worklets.front()->output.buffers.clear(); + work->worklets.front()->output.ordinal = work->input.ordinal; +} + +void C2SoftFlacEnc::process( + const std::unique_ptr &work, + const std::shared_ptr &pool) { + // Initialize output work + work->result = C2_OK; + work->workletsProcessed = 1u; + work->worklets.front()->output.flags = work->input.flags; + + if (mSignalledError || mSignalledOutputEos) { + work->result = C2_BAD_VALUE; + return; + } + + bool eos = ((work->input.flags & C2FrameData::FLAG_END_OF_STREAM) != 0); + C2ReadView rView = mDummyReadView; + size_t inOffset = 0u; + size_t inSize = 0u; + if (!work->input.buffers.empty()) { + rView = work->input.buffers[0]->data().linearBlocks().front().map().get(); + inSize = rView.capacity(); + if (inSize && rView.error()) { + ALOGE("read view map failed %d", rView.error()); + work->result = C2_CORRUPTED; + return; + } + } + + ALOGV("in buffer attr. size %zu timestamp %d frameindex %d, flags %x", + inSize, (int)work->input.ordinal.timestamp.peeku(), + (int)work->input.ordinal.frameIndex.peeku(), work->input.flags); + if (mIsFirstFrame && inSize) { + mAnchorTimeStamp = work->input.ordinal.timestamp.peekull(); + mIsFirstFrame = false; + } + + if (!mWroteHeader) { + std::unique_ptr csd = + C2StreamCsdInfo::output::AllocUnique(mHeaderOffset, 0u); + if (!csd) { + ALOGE("CSD allocation failed"); + mSignalledError = true; + work->result = C2_NO_MEMORY; + return; + } + memcpy(csd->m.value, mHeader, mHeaderOffset); + ALOGV("put csd, %d bytes", mHeaderOffset); + + work->worklets.front()->output.configUpdate.push_back(std::move(csd)); + mWroteHeader = true; + } + + uint32_t sampleRate = mIntf->getSampleRate(); + uint32_t channelCount = mIntf->getChannelCount(); + uint64_t outTimeStamp = mProcessedSamples * 1000000ll / sampleRate; + + size_t outCapacity = inSize; + outCapacity += mBlockSize * channelCount * sizeof(int16_t); + + C2MemoryUsage usage = { C2MemoryUsage::CPU_READ, C2MemoryUsage::CPU_WRITE }; + c2_status_t err = pool->fetchLinearBlock(outCapacity, usage, &mOutputBlock); + if (err != C2_OK) { + ALOGE("fetchLinearBlock for Output failed with status %d", err); + work->result = C2_NO_MEMORY; + return; + } + C2WriteView wView = mOutputBlock->map().get(); + if (wView.error()) { + ALOGE("write view map failed %d", wView.error()); + work->result = C2_CORRUPTED; + return; + } + + mEncoderWriteData = true; + mEncoderReturnedNbBytes = 0; + size_t inPos = 0; + while (inPos < inSize) { + const uint8_t *inPtr = rView.data() + inOffset; + size_t processSize = MIN(kInBlockSize * channelCount * sizeof(int16_t), (inSize - inPos)); + const unsigned nbInputFrames = processSize / (channelCount * sizeof(int16_t)); + const unsigned nbInputSamples = processSize / sizeof(int16_t); + const int16_t *pcm16 = reinterpret_cast(inPtr + inPos); + ALOGV("about to encode %zu bytes", processSize); + + for (unsigned i = 0; i < nbInputSamples; i++) { + mInputBufferPcm32[i] = (FLAC__int32) pcm16[i]; + } + + FLAC__bool ok = FLAC__stream_encoder_process_interleaved( + mFlacStreamEncoder, mInputBufferPcm32, nbInputFrames); + if (!ok) { + ALOGE("error encountered during encoding"); + mSignalledError = true; + work->result = C2_CORRUPTED; + mOutputBlock.reset(); + return; + } + inPos += processSize; + } + if (eos && (C2_OK != drain(DRAIN_COMPONENT_WITH_EOS, pool))) { + ALOGE("error encountered during encoding"); + mSignalledError = true; + work->result = C2_CORRUPTED; + mOutputBlock.reset(); + return; + } + fillEmptyWork(work); + if (mEncoderReturnedNbBytes != 0) { + std::shared_ptr buffer = createLinearBuffer(std::move(mOutputBlock), 0, mEncoderReturnedNbBytes); + work->worklets.front()->output.buffers.push_back(buffer); + work->worklets.front()->output.ordinal.timestamp = mAnchorTimeStamp + outTimeStamp; + } else { + ALOGV("encoder process_interleaved returned without data to write"); + } + mOutputBlock = nullptr; + if (eos) { + mSignalledOutputEos = true; + ALOGV("signalled EOS"); + } + mEncoderWriteData = false; + mEncoderReturnedNbBytes = 0; +} + +FLAC__StreamEncoderWriteStatus C2SoftFlacEnc::onEncodedFlacAvailable( + const FLAC__byte buffer[], size_t bytes, unsigned samples, + unsigned current_frame) { + (void) current_frame; + ALOGV("%s (bytes=%zu, samples=%u, curr_frame=%u)", __func__, bytes, samples, + current_frame); + + if (samples == 0) { + ALOGI("saving %zu bytes of header", bytes); + memcpy(mHeader + mHeaderOffset, buffer, bytes); + mHeaderOffset += bytes;// will contain header size when finished receiving header + return FLAC__STREAM_ENCODER_WRITE_STATUS_OK; + } + + if ((samples == 0) || !mEncoderWriteData) { + // called by the encoder because there's header data to save, but it's not the role + // of this component (unless WRITE_FLAC_HEADER_IN_FIRST_BUFFER is defined) + ALOGV("ignoring %zu bytes of header data (samples=%d)", bytes, samples); + return FLAC__STREAM_ENCODER_WRITE_STATUS_OK; + } + + // write encoded data + C2WriteView wView = mOutputBlock->map().get(); + uint8_t* outData = wView.data(); + ALOGV("writing %zu bytes of encoded data on output", bytes); + // increment mProcessedSamples to maintain audio synchronization during + // play back + mProcessedSamples += samples; + if (bytes + mEncoderReturnedNbBytes > mOutputBlock->capacity()) { + ALOGE("not enough space left to write encoded data, dropping %zu bytes", bytes); + // a fatal error would stop the encoding + return FLAC__STREAM_ENCODER_WRITE_STATUS_OK; + } + memcpy(outData + mEncoderReturnedNbBytes, buffer, bytes); + mEncoderReturnedNbBytes += bytes; + return FLAC__STREAM_ENCODER_WRITE_STATUS_OK; +} + + +status_t C2SoftFlacEnc::configureEncoder() { + ALOGV("%s numChannel=%d, sampleRate=%d", __func__, mIntf->getChannelCount(), mIntf->getSampleRate()); + + if (mSignalledError || !mFlacStreamEncoder) { + ALOGE("can't configure encoder: no encoder or invalid state"); + return UNKNOWN_ERROR; + } + + FLAC__bool ok = true; + ok = ok && FLAC__stream_encoder_set_channels(mFlacStreamEncoder, mIntf->getChannelCount()); + ok = ok && FLAC__stream_encoder_set_sample_rate(mFlacStreamEncoder, mIntf->getSampleRate()); + ok = ok && FLAC__stream_encoder_set_bits_per_sample(mFlacStreamEncoder, 16); + ok = ok && FLAC__stream_encoder_set_compression_level(mFlacStreamEncoder, mCompressionLevel); + ok = ok && FLAC__stream_encoder_set_verify(mFlacStreamEncoder, false); + if (!ok) { + ALOGE("unknown error when configuring encoder"); + return UNKNOWN_ERROR; + } + + ok &= FLAC__STREAM_ENCODER_INIT_STATUS_OK == + FLAC__stream_encoder_init_stream(mFlacStreamEncoder, + flacEncoderWriteCallback /*write_callback*/, + nullptr /*seek_callback*/, + nullptr /*tell_callback*/, + nullptr /*metadata_callback*/, + (void *) this /*client_data*/); + + if (!ok) { + ALOGE("unknown error when configuring encoder"); + return UNKNOWN_ERROR; + } + + mBlockSize = FLAC__stream_encoder_get_blocksize(mFlacStreamEncoder); + + ALOGV("encoder successfully configured"); + return OK; +} + +FLAC__StreamEncoderWriteStatus C2SoftFlacEnc::flacEncoderWriteCallback( + const FLAC__StreamEncoder *, + const FLAC__byte buffer[], + size_t bytes, + unsigned samples, + unsigned current_frame, + void *client_data) { + return ((C2SoftFlacEnc*) client_data)->onEncodedFlacAvailable( + buffer, bytes, samples, current_frame); +} + +c2_status_t C2SoftFlacEnc::drain( + uint32_t drainMode, + const std::shared_ptr &pool) { + (void) pool; + switch (drainMode) { + case NO_DRAIN: + ALOGW("drain with NO_DRAIN: no-op"); + return C2_OK; + case DRAIN_CHAIN: + ALOGW("DRAIN_CHAIN not supported"); + return C2_OMITTED; + case DRAIN_COMPONENT_WITH_EOS: + // TODO: This flag is not being sent back to the client + // because there are no items in PendingWork queue as all the + // inputs are being sent back with emptywork or valid encoded data + // mSignalledOutputEos = true; + case DRAIN_COMPONENT_NO_EOS: + break; + default: + return C2_BAD_VALUE; + } + FLAC__bool ok = FLAC__stream_encoder_finish(mFlacStreamEncoder); + if (!ok) return C2_CORRUPTED; + mIsFirstFrame = true; + mAnchorTimeStamp = 0ull; + mProcessedSamples = 0u; + + return C2_OK; +} + +class C2SoftFlacEncFactory : public C2ComponentFactory { +public: + C2SoftFlacEncFactory() : mHelper(std::static_pointer_cast( + GetCodec2PlatformComponentStore()->getParamReflector())) { + } + + virtual c2_status_t createComponent( + c2_node_id_t id, + std::shared_ptr* const component, + std::function deleter) override { + *component = std::shared_ptr( + new C2SoftFlacEnc(COMPONENT_NAME, + id, + std::make_shared(mHelper)), + deleter); + return C2_OK; + } + + virtual c2_status_t createInterface( + c2_node_id_t id, + std::shared_ptr* const interface, + std::function deleter) override { + *interface = std::shared_ptr( + new SimpleInterface( + COMPONENT_NAME, id, std::make_shared(mHelper)), + deleter); + return C2_OK; + } + + virtual ~C2SoftFlacEncFactory() override = default; +private: + std::shared_ptr mHelper; +}; + +} // namespace android + +extern "C" ::C2ComponentFactory* CreateCodec2Factory() { + ALOGV("in %s", __func__); + return new ::android::C2SoftFlacEncFactory(); +} + +extern "C" void DestroyCodec2Factory(::C2ComponentFactory* factory) { + ALOGV("in %s", __func__); + delete factory; +} diff --git a/media/codec2/components/flac/C2SoftFlacEnc.h b/media/codec2/components/flac/C2SoftFlacEnc.h new file mode 100644 index 0000000000000000000000000000000000000000..cdf305e2cbba1da5e3b24d67633290486f48c730 --- /dev/null +++ b/media/codec2/components/flac/C2SoftFlacEnc.h @@ -0,0 +1,89 @@ +/* + * Copyright (C) 2018 The Android Open Source Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#ifndef ANDROID_C2_SOFT_FLAC_ENC_H_ +#define ANDROID_C2_SOFT_FLAC_ENC_H_ + +#include + +#include "FLAC/stream_encoder.h" + +#define FLAC_COMPRESSION_LEVEL_MIN 0 +#define FLAC_COMPRESSION_LEVEL_DEFAULT 5 +#define FLAC_COMPRESSION_LEVEL_MAX 8 + +#define FLAC_HEADER_SIZE 128 + +#define MIN(a, b) (((a) < (b)) ? (a) : (b)) + +namespace android { + +class C2SoftFlacEnc : public SimpleC2Component { +public: + class IntfImpl; + + C2SoftFlacEnc(const char *name, c2_node_id_t id, const std::shared_ptr &intfImpl); + virtual ~C2SoftFlacEnc(); + + // From SimpleC2Component + c2_status_t onInit() override; + c2_status_t onStop() override; + void onReset() override; + void onRelease() override; + c2_status_t onFlush_sm() override; + void process( + const std::unique_ptr &work, + const std::shared_ptr &pool) override; + c2_status_t drain( + uint32_t drainMode, + const std::shared_ptr &pool) override; + +private: + status_t configureEncoder(); + static FLAC__StreamEncoderWriteStatus flacEncoderWriteCallback( + const FLAC__StreamEncoder *encoder, const FLAC__byte buffer[], + size_t bytes, unsigned samples, unsigned current_frame, + void *client_data); + FLAC__StreamEncoderWriteStatus onEncodedFlacAvailable( + const FLAC__byte buffer[], size_t bytes, unsigned samples, + unsigned current_frame); + + std::shared_ptr mIntf; + const unsigned int kInBlockSize = 1152; + const unsigned int kMaxNumChannels = 2; + FLAC__StreamEncoder* mFlacStreamEncoder; + FLAC__int32* mInputBufferPcm32; + std::shared_ptr mOutputBlock; + bool mSignalledError; + bool mSignalledOutputEos; + uint32_t mCompressionLevel; + uint32_t mBlockSize; + bool mIsFirstFrame; + uint64_t mAnchorTimeStamp; + uint64_t mProcessedSamples; + // should the data received by the callback be written to the output port + bool mEncoderWriteData; + size_t mEncoderReturnedNbBytes; + unsigned mHeaderOffset; + bool mWroteHeader; + char mHeader[FLAC_HEADER_SIZE]; + + C2_DO_NOT_COPY(C2SoftFlacEnc); +}; + +} // namespace android + +#endif // ANDROID_C2_SOFT_FLAC_ENC_H_ diff --git a/media/codec2/components/flac/MODULE_LICENSE_APACHE2 b/media/codec2/components/flac/MODULE_LICENSE_APACHE2 new file mode 100644 index 0000000000000000000000000000000000000000..e69de29bb2d1d6434b8b29ae775ad8c2e48c5391 diff --git a/media/codec2/components/flac/NOTICE b/media/codec2/components/flac/NOTICE new file mode 100644 index 0000000000000000000000000000000000000000..c5b1efa7aac764ae6d8da63476a2d5cec02a6a5d --- /dev/null +++ b/media/codec2/components/flac/NOTICE @@ -0,0 +1,190 @@ + + Copyright (c) 2005-2008, The Android Open Source Project + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. + + + Apache License + Version 2.0, January 2004 + http://www.apache.org/licenses/ + + TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION + + 1. Definitions. + + "License" shall mean the terms and conditions for use, reproduction, + and distribution as defined by Sections 1 through 9 of this document. + + "Licensor" shall mean the copyright owner or entity authorized by + the copyright owner that is granting the License. + + "Legal Entity" shall mean the union of the acting entity and all + other entities that control, are controlled by, or are under common + control with that entity. For the purposes of this definition, + "control" means (i) the power, direct or indirect, to cause the + direction or management of such entity, whether by contract or + otherwise, or (ii) ownership of fifty percent (50%) or more of the + outstanding shares, or (iii) beneficial ownership of such entity. + + "You" (or "Your") shall mean an individual or Legal Entity + exercising permissions granted by this License. + + "Source" form shall mean the preferred form for making modifications, + including but not limited to software source code, documentation + source, and configuration files. + + "Object" form shall mean any form resulting from mechanical + transformation or translation of a Source form, including but + not limited to compiled object code, generated documentation, + and conversions to other media types. + + "Work" shall mean the work of authorship, whether in Source or + Object form, made available under the License, as indicated by a + copyright notice that is included in or attached to the work + (an example is provided in the Appendix below). + + "Derivative Works" shall mean any work, whether in Source or Object + form, that is based on (or derived from) the Work and for which the + editorial revisions, annotations, elaborations, or other modifications + represent, as a whole, an original work of authorship. For the purposes + of this License, Derivative Works shall not include works that remain + separable from, or merely link (or bind by name) to the interfaces of, + the Work and Derivative Works thereof. + + "Contribution" shall mean any work of authorship, including + the original version of the Work and any modifications or additions + to that Work or Derivative Works thereof, that is intentionally + submitted to Licensor for inclusion in the Work by the copyright owner + or by an individual or Legal Entity authorized to submit on behalf of + the copyright owner. For the purposes of this definition, "submitted" + means any form of electronic, verbal, or written communication sent + to the Licensor or its representatives, including but not limited to + communication on electronic mailing lists, source code control systems, + and issue tracking systems that are managed by, or on behalf of, the + Licensor for the purpose of discussing and improving the Work, but + excluding communication that is conspicuously marked or otherwise + designated in writing by the copyright owner as "Not a Contribution." + + "Contributor" shall mean Licensor and any individual or Legal Entity + on behalf of whom a Contribution has been received by Licensor and + subsequently incorporated within the Work. + + 2. Grant of Copyright License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + copyright license to reproduce, prepare Derivative Works of, + publicly display, publicly perform, sublicense, and distribute the + Work and such Derivative Works in Source or Object form. + + 3. Grant of Patent License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + (except as stated in this section) patent license to make, have made, + use, offer to sell, sell, import, and otherwise transfer the Work, + where such license applies only to those patent claims licensable + by such Contributor that are necessarily infringed by their + Contribution(s) alone or by combination of their Contribution(s) + with the Work to which such Contribution(s) was submitted. If You + institute patent litigation against any entity (including a + cross-claim or counterclaim in a lawsuit) alleging that the Work + or a Contribution incorporated within the Work constitutes direct + or contributory patent infringement, then any patent licenses + granted to You under this License for that Work shall terminate + as of the date such litigation is filed. + + 4. Redistribution. You may reproduce and distribute copies of the + Work or Derivative Works thereof in any medium, with or without + modifications, and in Source or Object form, provided that You + meet the following conditions: + + (a) You must give any other recipients of the Work or + Derivative Works a copy of this License; and + + (b) You must cause any modified files to carry prominent notices + stating that You changed the files; and + + (c) You must retain, in the Source form of any Derivative Works + that You distribute, all copyright, patent, trademark, and + attribution notices from the Source form of the Work, + excluding those notices that do not pertain to any part of + the Derivative Works; and + + (d) If the Work includes a "NOTICE" text file as part of its + distribution, then any Derivative Works that You distribute must + include a readable copy of the attribution notices contained + within such NOTICE file, excluding those notices that do not + pertain to any part of the Derivative Works, in at least one + of the following places: within a NOTICE text file distributed + as part of the Derivative Works; within the Source form or + documentation, if provided along with the Derivative Works; or, + within a display generated by the Derivative Works, if and + wherever such third-party notices normally appear. The contents + of the NOTICE file are for informational purposes only and + do not modify the License. You may add Your own attribution + notices within Derivative Works that You distribute, alongside + or as an addendum to the NOTICE text from the Work, provided + that such additional attribution notices cannot be construed + as modifying the License. + + You may add Your own copyright statement to Your modifications and + may provide additional or different license terms and conditions + for use, reproduction, or distribution of Your modifications, or + for any such Derivative Works as a whole, provided Your use, + reproduction, and distribution of the Work otherwise complies with + the conditions stated in this License. + + 5. Submission of Contributions. Unless You explicitly state otherwise, + any Contribution intentionally submitted for inclusion in the Work + by You to the Licensor shall be under the terms and conditions of + this License, without any additional terms or conditions. + Notwithstanding the above, nothing herein shall supersede or modify + the terms of any separate license agreement you may have executed + with Licensor regarding such Contributions. + + 6. Trademarks. This License does not grant permission to use the trade + names, trademarks, service marks, or product names of the Licensor, + except as required for reasonable and customary use in describing the + origin of the Work and reproducing the content of the NOTICE file. + + 7. Disclaimer of Warranty. Unless required by applicable law or + agreed to in writing, Licensor provides the Work (and each + Contributor provides its Contributions) on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or + implied, including, without limitation, any warranties or conditions + of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A + PARTICULAR PURPOSE. You are solely responsible for determining the + appropriateness of using or redistributing the Work and assume any + risks associated with Your exercise of permissions under this License. + + 8. Limitation of Liability. In no event and under no legal theory, + whether in tort (including negligence), contract, or otherwise, + unless required by applicable law (such as deliberate and grossly + negligent acts) or agreed to in writing, shall any Contributor be + liable to You for damages, including any direct, indirect, special, + incidental, or consequential damages of any character arising as a + result of this License or out of the use or inability to use the + Work (including but not limited to damages for loss of goodwill, + work stoppage, computer failure or malfunction, or any and all + other commercial damages or losses), even if such Contributor + has been advised of the possibility of such damages. + + 9. Accepting Warranty or Additional Liability. While redistributing + the Work or Derivative Works thereof, You may choose to offer, + and charge a fee for, acceptance of support, warranty, indemnity, + or other liability obligations and/or rights consistent with this + License. However, in accepting such obligations, You may act only + on Your own behalf and on Your sole responsibility, not on behalf + of any other Contributor, and only if You agree to indemnify, + defend, and hold each Contributor harmless for any liability + incurred by, or claims asserted against, such Contributor by reason + of your accepting any such warranty or additional liability. + + END OF TERMS AND CONDITIONS + diff --git a/media/codec2/components/g711/Android.bp b/media/codec2/components/g711/Android.bp new file mode 100644 index 0000000000000000000000000000000000000000..56cbc2020756fb7b0ab00e4f438cab08e587f208 --- /dev/null +++ b/media/codec2/components/g711/Android.bp @@ -0,0 +1,23 @@ +cc_library_shared { + name: "libstagefright_soft_c2g711alawdec", + defaults: [ + "libstagefright_soft_c2-defaults", + "libstagefright_soft_c2_sanitize_all-defaults", + ], + + srcs: ["C2SoftG711Dec.cpp"], + + cflags: [ + "-DALAW", + ], +} + +cc_library_shared { + name: "libstagefright_soft_c2g711mlawdec", + defaults: [ + "libstagefright_soft_c2-defaults", + "libstagefright_soft_c2_sanitize_all-defaults", + ], + + srcs: ["C2SoftG711Dec.cpp"], +} diff --git a/media/codec2/components/g711/C2SoftG711Dec.cpp b/media/codec2/components/g711/C2SoftG711Dec.cpp new file mode 100644 index 0000000000000000000000000000000000000000..1c71d458c739cf22e2ac950122c689c48061512f --- /dev/null +++ b/media/codec2/components/g711/C2SoftG711Dec.cpp @@ -0,0 +1,323 @@ +/* + * Copyright (C) 2018 The Android Open Source Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +//#define LOG_NDEBUG 0 +#define LOG_TAG "C2SoftG711Dec" +#include + +#include + +#include +#include + +#include "C2SoftG711Dec.h" + +namespace android { + +#ifdef ALAW +constexpr char COMPONENT_NAME[] = "c2.android.g711.alaw.decoder"; +#else +constexpr char COMPONENT_NAME[] = "c2.android.g711.mlaw.decoder"; +#endif + +class C2SoftG711Dec::IntfImpl : public C2InterfaceHelper { +public: + explicit IntfImpl(const std::shared_ptr &helper) + : C2InterfaceHelper(helper) { + + setDerivedInstance(this); + + addParameter( + DefineParam(mInputFormat, C2_NAME_INPUT_STREAM_FORMAT_SETTING) + .withConstValue(new C2StreamFormatConfig::input(0u, C2FormatCompressed)) + .build()); + + addParameter( + DefineParam(mOutputFormat, C2_NAME_OUTPUT_STREAM_FORMAT_SETTING) + .withConstValue(new C2StreamFormatConfig::output(0u, C2FormatAudio)) + .build()); + + addParameter( + DefineParam(mInputMediaType, C2_NAME_INPUT_PORT_MIME_SETTING) + .withConstValue(AllocSharedString( +#ifdef ALAW + MEDIA_MIMETYPE_AUDIO_G711_ALAW +#else + MEDIA_MIMETYPE_AUDIO_G711_MLAW +#endif + )).build()); + + addParameter( + DefineParam(mOutputMediaType, C2_NAME_OUTPUT_PORT_MIME_SETTING) + .withConstValue(AllocSharedString( + MEDIA_MIMETYPE_AUDIO_RAW)) + .build()); + + addParameter( + DefineParam(mSampleRate, C2_NAME_STREAM_SAMPLE_RATE_SETTING) + .withDefault(new C2StreamSampleRateInfo::output(0u, 8000)) + .withFields({C2F(mSampleRate, value).inRange(8000, 48000)}) + .withSetter((Setter::StrictValueWithNoDeps)) + .build()); + + addParameter( + DefineParam(mChannelCount, C2_NAME_STREAM_CHANNEL_COUNT_SETTING) + .withDefault(new C2StreamChannelCountInfo::output(0u, 1)) + .withFields({C2F(mChannelCount, value).equalTo(1)}) + .withSetter(Setter::StrictValueWithNoDeps) + .build()); + + addParameter( + DefineParam(mBitrate, C2_NAME_STREAM_BITRATE_SETTING) + .withDefault(new C2BitrateTuning::input(0u, 64000)) + .withFields({C2F(mBitrate, value).equalTo(64000)}) + .withSetter(Setter::NonStrictValueWithNoDeps) + .build()); + + addParameter( + DefineParam(mInputMaxBufSize, C2_PARAMKEY_INPUT_MAX_BUFFER_SIZE) + .withConstValue(new C2StreamMaxBufferSizeInfo::input(0u, 8192)) + .build()); + } + +private: + std::shared_ptr mInputFormat; + std::shared_ptr mOutputFormat; + std::shared_ptr mInputMediaType; + std::shared_ptr mOutputMediaType; + std::shared_ptr mSampleRate; + std::shared_ptr mChannelCount; + std::shared_ptr mBitrate; + std::shared_ptr mInputMaxBufSize; +}; + +C2SoftG711Dec::C2SoftG711Dec( + const char *name, + c2_node_id_t id, + const std::shared_ptr &intfImpl) + : SimpleC2Component(std::make_shared>(name, id, intfImpl)), + mIntf(intfImpl) { +} + +C2SoftG711Dec::~C2SoftG711Dec() { + onRelease(); +} + +c2_status_t C2SoftG711Dec::onInit() { + mSignalledOutputEos = false; + return C2_OK; +} + +c2_status_t C2SoftG711Dec::onStop() { + mSignalledOutputEos = false; + return C2_OK; +} + +void C2SoftG711Dec::onReset() { + (void)onStop(); +} + +void C2SoftG711Dec::onRelease() { +} + +c2_status_t C2SoftG711Dec::onFlush_sm() { + return onStop(); +} + +void C2SoftG711Dec::process( + const std::unique_ptr &work, + const std::shared_ptr &pool) { + // Initialize output work + work->result = C2_OK; + work->workletsProcessed = 1u; + work->worklets.front()->output.flags = work->input.flags; + + if (mSignalledOutputEos) { + work->result = C2_BAD_VALUE; + return; + } + + C2ReadView rView = mDummyReadView; + size_t inOffset = 0u; + size_t inSize = 0u; + if (!work->input.buffers.empty()) { + rView = work->input.buffers[0]->data().linearBlocks().front().map().get(); + inSize = rView.capacity(); + if (inSize && rView.error()) { + ALOGE("read view map failed %d", rView.error()); + work->result = C2_CORRUPTED; + return; + } + } + bool eos = (work->input.flags & C2FrameData::FLAG_END_OF_STREAM) != 0; + int outSize = inSize * sizeof(int16_t); + + ALOGV("in buffer attr. size %zu timestamp %d frameindex %d", inSize, + (int)work->input.ordinal.timestamp.peeku(), (int)work->input.ordinal.frameIndex.peeku()); + + if (inSize == 0) { + work->worklets.front()->output.flags = work->input.flags; + work->worklets.front()->output.buffers.clear(); + work->worklets.front()->output.ordinal = work->input.ordinal; + if (eos) { + mSignalledOutputEos = true; + ALOGV("signalled EOS"); + } + return; + } + + uint8_t *inputptr = const_cast(rView.data() + inOffset); + + std::shared_ptr block; + C2MemoryUsage usage = { C2MemoryUsage::CPU_READ, C2MemoryUsage::CPU_WRITE }; + c2_status_t err = pool->fetchLinearBlock(outSize, usage, &block); + if (err != C2_OK) { + ALOGE("fetchLinearBlock for Output failed with status %d", err); + work->result = C2_NO_MEMORY; + return; + } + C2WriteView wView = block->map().get(); + if (wView.error()) { + ALOGE("write view map failed %d", wView.error()); + work->result = C2_CORRUPTED; + return; + } + int16_t *outputptr = reinterpret_cast(wView.data()); + +#ifdef ALAW + DecodeALaw(outputptr, inputptr, inSize); +#else + DecodeMLaw(outputptr, inputptr, inSize); +#endif + + work->worklets.front()->output.flags = work->input.flags; + work->worklets.front()->output.buffers.clear(); + work->worklets.front()->output.buffers.push_back(createLinearBuffer(block)); + work->worklets.front()->output.ordinal = work->input.ordinal; + + if (eos) { + mSignalledOutputEos = true; + ALOGV("signalled EOS"); + } +} + +c2_status_t C2SoftG711Dec::drain( + uint32_t drainMode, + const std::shared_ptr &pool) { + (void) pool; + if (drainMode == NO_DRAIN) { + ALOGW("drain with NO_DRAIN: no-op"); + return C2_OK; + } + if (drainMode == DRAIN_CHAIN) { + ALOGW("DRAIN_CHAIN not supported"); + return C2_OMITTED; + } + + return C2_OK; +} + +#ifdef ALAW +void C2SoftG711Dec::DecodeALaw( + int16_t *out, const uint8_t *in, size_t inSize) { + while (inSize > 0) { + inSize--; + int32_t x = *in++; + + int32_t ix = x ^ 0x55; + ix &= 0x7f; + + int32_t iexp = ix >> 4; + int32_t mant = ix & 0x0f; + + if (iexp > 0) { + mant += 16; + } + + mant = (mant << 4) + 8; + + if (iexp > 1) { + mant = mant << (iexp - 1); + } + + *out++ = (x > 127) ? mant : -mant; + } +} +#else +void C2SoftG711Dec::DecodeMLaw( + int16_t *out, const uint8_t *in, size_t inSize) { + while (inSize > 0) { + inSize--; + int32_t x = *in++; + + int32_t mantissa = ~x; + int32_t exponent = (mantissa >> 4) & 7; + int32_t segment = exponent + 1; + mantissa &= 0x0f; + + int32_t step = 4 << segment; + + int32_t abs = (0x80l << exponent) + step * mantissa + step / 2 - 4 * 33; + + *out++ = (x < 0x80) ? -abs : abs; + } +} +#endif + +class C2SoftG711DecFactory : public C2ComponentFactory { +public: + C2SoftG711DecFactory() : mHelper(std::static_pointer_cast( + GetCodec2PlatformComponentStore()->getParamReflector())) { + } + + virtual c2_status_t createComponent( + c2_node_id_t id, + std::shared_ptr* const component, + std::function deleter) override { + *component = std::shared_ptr( + new C2SoftG711Dec(COMPONENT_NAME, id, + std::make_shared(mHelper)), + deleter); + return C2_OK; + } + + virtual c2_status_t createInterface( + c2_node_id_t id, std::shared_ptr* const interface, + std::function deleter) override { + *interface = std::shared_ptr( + new SimpleInterface( + COMPONENT_NAME, id, std::make_shared(mHelper)), + deleter); + return C2_OK; + } + + virtual ~C2SoftG711DecFactory() override = default; + +private: + std::shared_ptr mHelper; +}; + +} // namespace android + +extern "C" ::C2ComponentFactory* CreateCodec2Factory() { + ALOGV("in %s", __func__); + return new ::android::C2SoftG711DecFactory(); +} + +extern "C" void DestroyCodec2Factory(::C2ComponentFactory* factory) { + ALOGV("in %s", __func__); + delete factory; +} diff --git a/media/codec2/components/g711/C2SoftG711Dec.h b/media/codec2/components/g711/C2SoftG711Dec.h new file mode 100644 index 0000000000000000000000000000000000000000..23e8ffc1ee32df6711d902f4b817053ab9f08df8 --- /dev/null +++ b/media/codec2/components/g711/C2SoftG711Dec.h @@ -0,0 +1,59 @@ +/* + * Copyright (C) 2018 The Android Open Source Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#ifndef ANDROID_C2_SOFT_G711_DEC_H_ +#define ANDROID_C2_SOFT_G711_DEC_H_ + +#include + + +namespace android { + +struct C2SoftG711Dec : public SimpleC2Component { + class IntfImpl; + + C2SoftG711Dec(const char *name, c2_node_id_t id, + const std::shared_ptr &intfImpl); + virtual ~C2SoftG711Dec(); + + // From SimpleC2Component + c2_status_t onInit() override; + c2_status_t onStop() override; + void onReset() override; + void onRelease() override; + c2_status_t onFlush_sm() override; + void process( + const std::unique_ptr &work, + const std::shared_ptr &pool) override; + c2_status_t drain( + uint32_t drainMode, + const std::shared_ptr &pool) override; +private: + std::shared_ptr mIntf; + bool mSignalledOutputEos; + +#ifdef ALAW + void DecodeALaw(int16_t *out, const uint8_t *in, size_t inSize); +#else + void DecodeMLaw(int16_t *out, const uint8_t *in, size_t inSize); +#endif + + C2_DO_NOT_COPY(C2SoftG711Dec); +}; + +} // namespace android + +#endif // ANDROID_C2_SOFT_G711_DEC_H_ diff --git a/media/codec2/components/g711/MODULE_LICENSE_APACHE2 b/media/codec2/components/g711/MODULE_LICENSE_APACHE2 new file mode 100644 index 0000000000000000000000000000000000000000..e69de29bb2d1d6434b8b29ae775ad8c2e48c5391 diff --git a/media/codec2/components/g711/NOTICE b/media/codec2/components/g711/NOTICE new file mode 100644 index 0000000000000000000000000000000000000000..c5b1efa7aac764ae6d8da63476a2d5cec02a6a5d --- /dev/null +++ b/media/codec2/components/g711/NOTICE @@ -0,0 +1,190 @@ + + Copyright (c) 2005-2008, The Android Open Source Project + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. + + + Apache License + Version 2.0, January 2004 + http://www.apache.org/licenses/ + + TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION + + 1. Definitions. + + "License" shall mean the terms and conditions for use, reproduction, + and distribution as defined by Sections 1 through 9 of this document. + + "Licensor" shall mean the copyright owner or entity authorized by + the copyright owner that is granting the License. + + "Legal Entity" shall mean the union of the acting entity and all + other entities that control, are controlled by, or are under common + control with that entity. For the purposes of this definition, + "control" means (i) the power, direct or indirect, to cause the + direction or management of such entity, whether by contract or + otherwise, or (ii) ownership of fifty percent (50%) or more of the + outstanding shares, or (iii) beneficial ownership of such entity. + + "You" (or "Your") shall mean an individual or Legal Entity + exercising permissions granted by this License. + + "Source" form shall mean the preferred form for making modifications, + including but not limited to software source code, documentation + source, and configuration files. + + "Object" form shall mean any form resulting from mechanical + transformation or translation of a Source form, including but + not limited to compiled object code, generated documentation, + and conversions to other media types. + + "Work" shall mean the work of authorship, whether in Source or + Object form, made available under the License, as indicated by a + copyright notice that is included in or attached to the work + (an example is provided in the Appendix below). + + "Derivative Works" shall mean any work, whether in Source or Object + form, that is based on (or derived from) the Work and for which the + editorial revisions, annotations, elaborations, or other modifications + represent, as a whole, an original work of authorship. For the purposes + of this License, Derivative Works shall not include works that remain + separable from, or merely link (or bind by name) to the interfaces of, + the Work and Derivative Works thereof. + + "Contribution" shall mean any work of authorship, including + the original version of the Work and any modifications or additions + to that Work or Derivative Works thereof, that is intentionally + submitted to Licensor for inclusion in the Work by the copyright owner + or by an individual or Legal Entity authorized to submit on behalf of + the copyright owner. For the purposes of this definition, "submitted" + means any form of electronic, verbal, or written communication sent + to the Licensor or its representatives, including but not limited to + communication on electronic mailing lists, source code control systems, + and issue tracking systems that are managed by, or on behalf of, the + Licensor for the purpose of discussing and improving the Work, but + excluding communication that is conspicuously marked or otherwise + designated in writing by the copyright owner as "Not a Contribution." + + "Contributor" shall mean Licensor and any individual or Legal Entity + on behalf of whom a Contribution has been received by Licensor and + subsequently incorporated within the Work. + + 2. Grant of Copyright License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + copyright license to reproduce, prepare Derivative Works of, + publicly display, publicly perform, sublicense, and distribute the + Work and such Derivative Works in Source or Object form. + + 3. Grant of Patent License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + (except as stated in this section) patent license to make, have made, + use, offer to sell, sell, import, and otherwise transfer the Work, + where such license applies only to those patent claims licensable + by such Contributor that are necessarily infringed by their + Contribution(s) alone or by combination of their Contribution(s) + with the Work to which such Contribution(s) was submitted. If You + institute patent litigation against any entity (including a + cross-claim or counterclaim in a lawsuit) alleging that the Work + or a Contribution incorporated within the Work constitutes direct + or contributory patent infringement, then any patent licenses + granted to You under this License for that Work shall terminate + as of the date such litigation is filed. + + 4. Redistribution. You may reproduce and distribute copies of the + Work or Derivative Works thereof in any medium, with or without + modifications, and in Source or Object form, provided that You + meet the following conditions: + + (a) You must give any other recipients of the Work or + Derivative Works a copy of this License; and + + (b) You must cause any modified files to carry prominent notices + stating that You changed the files; and + + (c) You must retain, in the Source form of any Derivative Works + that You distribute, all copyright, patent, trademark, and + attribution notices from the Source form of the Work, + excluding those notices that do not pertain to any part of + the Derivative Works; and + + (d) If the Work includes a "NOTICE" text file as part of its + distribution, then any Derivative Works that You distribute must + include a readable copy of the attribution notices contained + within such NOTICE file, excluding those notices that do not + pertain to any part of the Derivative Works, in at least one + of the following places: within a NOTICE text file distributed + as part of the Derivative Works; within the Source form or + documentation, if provided along with the Derivative Works; or, + within a display generated by the Derivative Works, if and + wherever such third-party notices normally appear. The contents + of the NOTICE file are for informational purposes only and + do not modify the License. You may add Your own attribution + notices within Derivative Works that You distribute, alongside + or as an addendum to the NOTICE text from the Work, provided + that such additional attribution notices cannot be construed + as modifying the License. + + You may add Your own copyright statement to Your modifications and + may provide additional or different license terms and conditions + for use, reproduction, or distribution of Your modifications, or + for any such Derivative Works as a whole, provided Your use, + reproduction, and distribution of the Work otherwise complies with + the conditions stated in this License. + + 5. Submission of Contributions. Unless You explicitly state otherwise, + any Contribution intentionally submitted for inclusion in the Work + by You to the Licensor shall be under the terms and conditions of + this License, without any additional terms or conditions. + Notwithstanding the above, nothing herein shall supersede or modify + the terms of any separate license agreement you may have executed + with Licensor regarding such Contributions. + + 6. Trademarks. This License does not grant permission to use the trade + names, trademarks, service marks, or product names of the Licensor, + except as required for reasonable and customary use in describing the + origin of the Work and reproducing the content of the NOTICE file. + + 7. Disclaimer of Warranty. Unless required by applicable law or + agreed to in writing, Licensor provides the Work (and each + Contributor provides its Contributions) on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or + implied, including, without limitation, any warranties or conditions + of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A + PARTICULAR PURPOSE. You are solely responsible for determining the + appropriateness of using or redistributing the Work and assume any + risks associated with Your exercise of permissions under this License. + + 8. Limitation of Liability. In no event and under no legal theory, + whether in tort (including negligence), contract, or otherwise, + unless required by applicable law (such as deliberate and grossly + negligent acts) or agreed to in writing, shall any Contributor be + liable to You for damages, including any direct, indirect, special, + incidental, or consequential damages of any character arising as a + result of this License or out of the use or inability to use the + Work (including but not limited to damages for loss of goodwill, + work stoppage, computer failure or malfunction, or any and all + other commercial damages or losses), even if such Contributor + has been advised of the possibility of such damages. + + 9. Accepting Warranty or Additional Liability. While redistributing + the Work or Derivative Works thereof, You may choose to offer, + and charge a fee for, acceptance of support, warranty, indemnity, + or other liability obligations and/or rights consistent with this + License. However, in accepting such obligations, You may act only + on Your own behalf and on Your sole responsibility, not on behalf + of any other Contributor, and only if You agree to indemnify, + defend, and hold each Contributor harmless for any liability + incurred by, or claims asserted against, such Contributor by reason + of your accepting any such warranty or additional liability. + + END OF TERMS AND CONDITIONS + diff --git a/media/codec2/components/gsm/Android.bp b/media/codec2/components/gsm/Android.bp new file mode 100644 index 0000000000000000000000000000000000000000..80757470bcdfa13027481d12a59e9ea95192e9ba --- /dev/null +++ b/media/codec2/components/gsm/Android.bp @@ -0,0 +1,11 @@ +cc_library_shared { + name: "libstagefright_soft_c2gsmdec", + defaults: [ + "libstagefright_soft_c2-defaults", + "libstagefright_soft_c2_sanitize_all-defaults", + ], + + srcs: ["C2SoftGsmDec.cpp"], + + static_libs: ["libgsm"], +} diff --git a/media/codec2/components/gsm/C2SoftGsmDec.cpp b/media/codec2/components/gsm/C2SoftGsmDec.cpp new file mode 100644 index 0000000000000000000000000000000000000000..7101c799fec005874eb927a58a8e5751fabc2ea5 --- /dev/null +++ b/media/codec2/components/gsm/C2SoftGsmDec.cpp @@ -0,0 +1,311 @@ +/* + * Copyright (C) 2018 The Android Open Source Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +//#define LOG_NDEBUG 0 +#define LOG_TAG "C2SoftGsmDec" +#include + +#include + +#include +#include + +#include "C2SoftGsmDec.h" + +namespace android { + +constexpr char COMPONENT_NAME[] = "c2.android.gsm.decoder"; + +class C2SoftGsmDec::IntfImpl : public C2InterfaceHelper { + public: + explicit IntfImpl(const std::shared_ptr& helper) + : C2InterfaceHelper(helper) { + setDerivedInstance(this); + + addParameter( + DefineParam(mInputFormat, C2_NAME_INPUT_STREAM_FORMAT_SETTING) + .withConstValue(new C2StreamFormatConfig::input(0u, C2FormatCompressed)) + .build()); + + addParameter( + DefineParam(mOutputFormat, C2_NAME_OUTPUT_STREAM_FORMAT_SETTING) + .withConstValue(new C2StreamFormatConfig::output(0u, C2FormatAudio)) + .build()); + + addParameter( + DefineParam(mInputMediaType, C2_NAME_INPUT_PORT_MIME_SETTING) + .withConstValue(AllocSharedString( + MEDIA_MIMETYPE_AUDIO_MSGSM)) + .build()); + + addParameter( + DefineParam(mOutputMediaType, C2_NAME_OUTPUT_PORT_MIME_SETTING) + .withConstValue(AllocSharedString( + MEDIA_MIMETYPE_AUDIO_RAW)) + .build()); + + addParameter( + DefineParam(mSampleRate, C2_NAME_STREAM_SAMPLE_RATE_SETTING) + .withDefault(new C2StreamSampleRateInfo::output(0u, 8000)) + .withFields({C2F(mSampleRate, value).equalTo(8000)}) + .withSetter((Setter::StrictValueWithNoDeps)) + .build()); + + addParameter( + DefineParam(mChannelCount, C2_NAME_STREAM_CHANNEL_COUNT_SETTING) + .withDefault(new C2StreamChannelCountInfo::output(0u, 1)) + .withFields({C2F(mChannelCount, value).equalTo(1)}) + .withSetter(Setter::StrictValueWithNoDeps) + .build()); + + addParameter( + DefineParam(mBitrate, C2_NAME_STREAM_BITRATE_SETTING) + .withDefault(new C2BitrateTuning::input(0u, 13200)) + .withFields({C2F(mBitrate, value).equalTo(13200)}) + .withSetter(Setter::NonStrictValueWithNoDeps) + .build()); + + addParameter( + DefineParam(mInputMaxBufSize, C2_PARAMKEY_INPUT_MAX_BUFFER_SIZE) + .withConstValue(new C2StreamMaxBufferSizeInfo::input(0u, 1024 / MSGSM_IN_FRM_SZ * MSGSM_IN_FRM_SZ)) + .build()); + } + + private: + std::shared_ptr mInputFormat; + std::shared_ptr mOutputFormat; + std::shared_ptr mInputMediaType; + std::shared_ptr mOutputMediaType; + std::shared_ptr mSampleRate; + std::shared_ptr mChannelCount; + std::shared_ptr mBitrate; + std::shared_ptr mInputMaxBufSize; +}; + +C2SoftGsmDec::C2SoftGsmDec(const char *name, c2_node_id_t id, + const std::shared_ptr& intfImpl) + : SimpleC2Component( + std::make_shared>(name, id, intfImpl)), + mIntf(intfImpl), + mGsm(nullptr) { +} + +C2SoftGsmDec::~C2SoftGsmDec() { + onRelease(); +} + +c2_status_t C2SoftGsmDec::onInit() { + if (!mGsm) mGsm = gsm_create(); + if (!mGsm) return C2_NO_MEMORY; + int msopt = 1; + (void)gsm_option(mGsm, GSM_OPT_WAV49, &msopt); + mSignalledError = false; + mSignalledEos = false; + return C2_OK; +} + +c2_status_t C2SoftGsmDec::onStop() { + if (mGsm) { + gsm_destroy(mGsm); + mGsm = nullptr; + } + if (!mGsm) mGsm = gsm_create(); + if (!mGsm) return C2_NO_MEMORY; + int msopt = 1; + (void)gsm_option(mGsm, GSM_OPT_WAV49, &msopt); + mSignalledError = false; + mSignalledEos = false; + return C2_OK; +} + +void C2SoftGsmDec::onReset() { + (void)onStop(); +} + +void C2SoftGsmDec::onRelease() { + if (mGsm) { + gsm_destroy(mGsm); + mGsm = nullptr; + } +} + +c2_status_t C2SoftGsmDec::onFlush_sm() { + return onStop(); +} + +static size_t decodeGSM(gsm handle, int16_t *out, size_t outCapacity, + uint8_t *in, size_t inSize) { + size_t outSize = 0; + + if (inSize % MSGSM_IN_FRM_SZ == 0 + && (inSize / MSGSM_IN_FRM_SZ * MSGSM_OUT_FRM_SZ * sizeof(*out) + <= outCapacity)) { + while (inSize > 0) { + gsm_decode(handle, in, out); + in += FRGSM_IN_FRM_SZ; + inSize -= FRGSM_IN_FRM_SZ; + out += FRGSM_OUT_FRM_SZ; + outSize += FRGSM_OUT_FRM_SZ; + + gsm_decode(handle, in, out); + in += FRGSM_IN_FRM_SZ_MINUS_1; + inSize -= FRGSM_IN_FRM_SZ_MINUS_1; + out += FRGSM_OUT_FRM_SZ; + outSize += FRGSM_OUT_FRM_SZ; + } + } + + return outSize * sizeof(int16_t); +} + +void C2SoftGsmDec::process( + const std::unique_ptr &work, + const std::shared_ptr &pool) { + // Initialize output work + work->result = C2_OK; + work->workletsProcessed = 1u; + work->worklets.front()->output.flags = work->input.flags; + + if (mSignalledError || mSignalledEos) { + work->result = C2_BAD_VALUE; + return; + } + + bool eos = ((work->input.flags & C2FrameData::FLAG_END_OF_STREAM) != 0); + C2ReadView rView = mDummyReadView; + size_t inOffset = 0u; + size_t inSize = 0u; + if (!work->input.buffers.empty()) { + rView = work->input.buffers[0]->data().linearBlocks().front().map().get(); + inSize = rView.capacity(); + if (inSize && rView.error()) { + ALOGE("read view map failed %d", rView.error()); + work->result = rView.error(); + return; + } + } + + if (inSize == 0) { + work->worklets.front()->output.flags = work->input.flags; + work->worklets.front()->output.buffers.clear(); + work->worklets.front()->output.ordinal = work->input.ordinal; + if (eos) { + mSignalledEos = true; + ALOGV("signalled EOS"); + } + return; + } + ALOGV("in buffer attr. size %zu timestamp %d frameindex %d", inSize, + (int)work->input.ordinal.timestamp.peeku(), (int)work->input.ordinal.frameIndex.peeku()); + + size_t outCapacity = (inSize / MSGSM_IN_FRM_SZ ) * MSGSM_OUT_FRM_SZ * sizeof(int16_t); + std::shared_ptr block; + C2MemoryUsage usage = { C2MemoryUsage::CPU_READ, C2MemoryUsage::CPU_WRITE }; + c2_status_t err = pool->fetchLinearBlock(outCapacity, usage, &block); + if (err != C2_OK) { + ALOGE("fetchLinearBlock for Output failed with status %d", err); + work->result = C2_NO_MEMORY; + return; + } + C2WriteView wView = block->map().get(); + if (wView.error()) { + ALOGE("write view map failed %d", wView.error()); + work->result = wView.error(); + return; + } + + int16_t *output = reinterpret_cast(wView.data()); + uint8_t *input = const_cast(rView.data() + inOffset); + size_t outSize = decodeGSM(mGsm, output, outCapacity, input, inSize); + if (!outSize) { + ALOGE("encountered improper insize or outsize"); + mSignalledError = true; + work->result = C2_CORRUPTED; + return; + } + ALOGV("out buffer attr. size %zu", outSize); + work->worklets.front()->output.flags = work->input.flags; + work->worklets.front()->output.buffers.clear(); + work->worklets.front()->output.buffers.push_back(createLinearBuffer(block, 0, outSize)); + work->worklets.front()->output.ordinal = work->input.ordinal; + if (eos) { + mSignalledEos = true; + ALOGV("signalled EOS"); + } +} + +c2_status_t C2SoftGsmDec::drain( + uint32_t drainMode, + const std::shared_ptr &pool) { + (void) pool; + if (drainMode == NO_DRAIN) { + ALOGW("drain with NO_DRAIN: no-op"); + return C2_OK; + } + if (drainMode == DRAIN_CHAIN) { + ALOGW("DRAIN_CHAIN not supported"); + return C2_OMITTED; + } + + return C2_OK; +} + +class C2SoftGSMDecFactory : public C2ComponentFactory { +public: + C2SoftGSMDecFactory() : mHelper(std::static_pointer_cast( + GetCodec2PlatformComponentStore()->getParamReflector())) { + } + + virtual c2_status_t createComponent( + c2_node_id_t id, + std::shared_ptr* const component, + std::function deleter) override { + *component = std::shared_ptr( + new C2SoftGsmDec(COMPONENT_NAME, + id, + std::make_shared(mHelper)), + deleter); + return C2_OK; + } + + virtual c2_status_t createInterface( + c2_node_id_t id, + std::shared_ptr* const interface, + std::function deleter) override { + *interface = std::shared_ptr( + new SimpleInterface( + COMPONENT_NAME, id, std::make_shared(mHelper)), + deleter); + return C2_OK; + } + + virtual ~C2SoftGSMDecFactory() override = default; + +private: + std::shared_ptr mHelper; +}; + +} // namespace android + +extern "C" ::C2ComponentFactory* CreateCodec2Factory() { + ALOGV("in %s", __func__); + return new ::android::C2SoftGSMDecFactory(); +} + +extern "C" void DestroyCodec2Factory(::C2ComponentFactory* factory) { + ALOGV("in %s", __func__); + delete factory; +} diff --git a/media/codec2/components/gsm/C2SoftGsmDec.h b/media/codec2/components/gsm/C2SoftGsmDec.h new file mode 100644 index 0000000000000000000000000000000000000000..2b209fe626ea8c2cdb8c68fa06aa0dc3777e0bfa --- /dev/null +++ b/media/codec2/components/gsm/C2SoftGsmDec.h @@ -0,0 +1,65 @@ +/* + * Copyright (C) 2018 The Android Open Source Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#ifndef ANDROID_C2_SOFT_GSM_DEC_H_ +#define ANDROID_C2_SOFT_GSM_DEC_H_ + +#include + + +extern "C" { + #include "gsm.h" +} + +namespace android { + +#define FRGSM_IN_FRM_SZ 33 +#define FRGSM_IN_FRM_SZ_MINUS_1 32 +#define FRGSM_OUT_FRM_SZ 160 +#define MSGSM_IN_FRM_SZ (FRGSM_IN_FRM_SZ + FRGSM_IN_FRM_SZ_MINUS_1) +#define MSGSM_OUT_FRM_SZ (FRGSM_OUT_FRM_SZ * 2) + +struct C2SoftGsmDec : public SimpleC2Component { + class IntfImpl; + + C2SoftGsmDec(const char *name, c2_node_id_t id, + const std::shared_ptr &intfImpl); + virtual ~C2SoftGsmDec(); + + // From SimpleC2Component + c2_status_t onInit() override; + c2_status_t onStop() override; + void onReset() override; + void onRelease() override; + c2_status_t onFlush_sm() override; + void process( + const std::unique_ptr &work, + const std::shared_ptr &pool) override; + c2_status_t drain( + uint32_t drainMode, + const std::shared_ptr &pool) override; + private: + std::shared_ptr mIntf; + gsm mGsm; + bool mSignalledError; + bool mSignalledEos; + + C2_DO_NOT_COPY(C2SoftGsmDec); +}; + +} // namespace android + +#endif // ANDROID_C2_SOFT_GSM_DEC_H_ diff --git a/media/codec2/components/gsm/MODULE_LICENSE_APACHE2 b/media/codec2/components/gsm/MODULE_LICENSE_APACHE2 new file mode 100644 index 0000000000000000000000000000000000000000..e69de29bb2d1d6434b8b29ae775ad8c2e48c5391 diff --git a/media/codec2/components/gsm/NOTICE b/media/codec2/components/gsm/NOTICE new file mode 100644 index 0000000000000000000000000000000000000000..c5b1efa7aac764ae6d8da63476a2d5cec02a6a5d --- /dev/null +++ b/media/codec2/components/gsm/NOTICE @@ -0,0 +1,190 @@ + + Copyright (c) 2005-2008, The Android Open Source Project + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. + + + Apache License + Version 2.0, January 2004 + http://www.apache.org/licenses/ + + TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION + + 1. Definitions. + + "License" shall mean the terms and conditions for use, reproduction, + and distribution as defined by Sections 1 through 9 of this document. + + "Licensor" shall mean the copyright owner or entity authorized by + the copyright owner that is granting the License. + + "Legal Entity" shall mean the union of the acting entity and all + other entities that control, are controlled by, or are under common + control with that entity. For the purposes of this definition, + "control" means (i) the power, direct or indirect, to cause the + direction or management of such entity, whether by contract or + otherwise, or (ii) ownership of fifty percent (50%) or more of the + outstanding shares, or (iii) beneficial ownership of such entity. + + "You" (or "Your") shall mean an individual or Legal Entity + exercising permissions granted by this License. + + "Source" form shall mean the preferred form for making modifications, + including but not limited to software source code, documentation + source, and configuration files. + + "Object" form shall mean any form resulting from mechanical + transformation or translation of a Source form, including but + not limited to compiled object code, generated documentation, + and conversions to other media types. + + "Work" shall mean the work of authorship, whether in Source or + Object form, made available under the License, as indicated by a + copyright notice that is included in or attached to the work + (an example is provided in the Appendix below). + + "Derivative Works" shall mean any work, whether in Source or Object + form, that is based on (or derived from) the Work and for which the + editorial revisions, annotations, elaborations, or other modifications + represent, as a whole, an original work of authorship. For the purposes + of this License, Derivative Works shall not include works that remain + separable from, or merely link (or bind by name) to the interfaces of, + the Work and Derivative Works thereof. + + "Contribution" shall mean any work of authorship, including + the original version of the Work and any modifications or additions + to that Work or Derivative Works thereof, that is intentionally + submitted to Licensor for inclusion in the Work by the copyright owner + or by an individual or Legal Entity authorized to submit on behalf of + the copyright owner. For the purposes of this definition, "submitted" + means any form of electronic, verbal, or written communication sent + to the Licensor or its representatives, including but not limited to + communication on electronic mailing lists, source code control systems, + and issue tracking systems that are managed by, or on behalf of, the + Licensor for the purpose of discussing and improving the Work, but + excluding communication that is conspicuously marked or otherwise + designated in writing by the copyright owner as "Not a Contribution." + + "Contributor" shall mean Licensor and any individual or Legal Entity + on behalf of whom a Contribution has been received by Licensor and + subsequently incorporated within the Work. + + 2. Grant of Copyright License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + copyright license to reproduce, prepare Derivative Works of, + publicly display, publicly perform, sublicense, and distribute the + Work and such Derivative Works in Source or Object form. + + 3. Grant of Patent License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + (except as stated in this section) patent license to make, have made, + use, offer to sell, sell, import, and otherwise transfer the Work, + where such license applies only to those patent claims licensable + by such Contributor that are necessarily infringed by their + Contribution(s) alone or by combination of their Contribution(s) + with the Work to which such Contribution(s) was submitted. If You + institute patent litigation against any entity (including a + cross-claim or counterclaim in a lawsuit) alleging that the Work + or a Contribution incorporated within the Work constitutes direct + or contributory patent infringement, then any patent licenses + granted to You under this License for that Work shall terminate + as of the date such litigation is filed. + + 4. Redistribution. You may reproduce and distribute copies of the + Work or Derivative Works thereof in any medium, with or without + modifications, and in Source or Object form, provided that You + meet the following conditions: + + (a) You must give any other recipients of the Work or + Derivative Works a copy of this License; and + + (b) You must cause any modified files to carry prominent notices + stating that You changed the files; and + + (c) You must retain, in the Source form of any Derivative Works + that You distribute, all copyright, patent, trademark, and + attribution notices from the Source form of the Work, + excluding those notices that do not pertain to any part of + the Derivative Works; and + + (d) If the Work includes a "NOTICE" text file as part of its + distribution, then any Derivative Works that You distribute must + include a readable copy of the attribution notices contained + within such NOTICE file, excluding those notices that do not + pertain to any part of the Derivative Works, in at least one + of the following places: within a NOTICE text file distributed + as part of the Derivative Works; within the Source form or + documentation, if provided along with the Derivative Works; or, + within a display generated by the Derivative Works, if and + wherever such third-party notices normally appear. The contents + of the NOTICE file are for informational purposes only and + do not modify the License. You may add Your own attribution + notices within Derivative Works that You distribute, alongside + or as an addendum to the NOTICE text from the Work, provided + that such additional attribution notices cannot be construed + as modifying the License. + + You may add Your own copyright statement to Your modifications and + may provide additional or different license terms and conditions + for use, reproduction, or distribution of Your modifications, or + for any such Derivative Works as a whole, provided Your use, + reproduction, and distribution of the Work otherwise complies with + the conditions stated in this License. + + 5. Submission of Contributions. Unless You explicitly state otherwise, + any Contribution intentionally submitted for inclusion in the Work + by You to the Licensor shall be under the terms and conditions of + this License, without any additional terms or conditions. + Notwithstanding the above, nothing herein shall supersede or modify + the terms of any separate license agreement you may have executed + with Licensor regarding such Contributions. + + 6. Trademarks. This License does not grant permission to use the trade + names, trademarks, service marks, or product names of the Licensor, + except as required for reasonable and customary use in describing the + origin of the Work and reproducing the content of the NOTICE file. + + 7. Disclaimer of Warranty. Unless required by applicable law or + agreed to in writing, Licensor provides the Work (and each + Contributor provides its Contributions) on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or + implied, including, without limitation, any warranties or conditions + of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A + PARTICULAR PURPOSE. You are solely responsible for determining the + appropriateness of using or redistributing the Work and assume any + risks associated with Your exercise of permissions under this License. + + 8. Limitation of Liability. In no event and under no legal theory, + whether in tort (including negligence), contract, or otherwise, + unless required by applicable law (such as deliberate and grossly + negligent acts) or agreed to in writing, shall any Contributor be + liable to You for damages, including any direct, indirect, special, + incidental, or consequential damages of any character arising as a + result of this License or out of the use or inability to use the + Work (including but not limited to damages for loss of goodwill, + work stoppage, computer failure or malfunction, or any and all + other commercial damages or losses), even if such Contributor + has been advised of the possibility of such damages. + + 9. Accepting Warranty or Additional Liability. While redistributing + the Work or Derivative Works thereof, You may choose to offer, + and charge a fee for, acceptance of support, warranty, indemnity, + or other liability obligations and/or rights consistent with this + License. However, in accepting such obligations, You may act only + on Your own behalf and on Your sole responsibility, not on behalf + of any other Contributor, and only if You agree to indemnify, + defend, and hold each Contributor harmless for any liability + incurred by, or claims asserted against, such Contributor by reason + of your accepting any such warranty or additional liability. + + END OF TERMS AND CONDITIONS + diff --git a/media/codec2/components/hevc/Android.bp b/media/codec2/components/hevc/Android.bp new file mode 100644 index 0000000000000000000000000000000000000000..519de680f5475d818b5fd61eee584e4d05c80ae8 --- /dev/null +++ b/media/codec2/components/hevc/Android.bp @@ -0,0 +1,16 @@ +cc_library_shared { + name: "libstagefright_soft_c2hevcdec", + defaults: [ + "libstagefright_soft_c2-defaults", + "libstagefright_soft_c2_sanitize_signed-defaults", + ], + + srcs: ["C2SoftHevcDec.cpp"], + + static_libs: ["libhevcdec"], + + include_dirs: [ + "external/libhevc/decoder", + "external/libhevc/common", + ], +} diff --git a/media/codec2/components/hevc/C2SoftHevcDec.cpp b/media/codec2/components/hevc/C2SoftHevcDec.cpp new file mode 100644 index 0000000000000000000000000000000000000000..99892cef8585be633be08fea9799b6cac1d684cb --- /dev/null +++ b/media/codec2/components/hevc/C2SoftHevcDec.cpp @@ -0,0 +1,976 @@ +/* + * Copyright (C) 2018 The Android Open Source Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +//#define LOG_NDEBUG 0 +#define LOG_TAG "C2SoftHevcDec" +#include + +#include + +#include +#include +#include +#include + +#include "C2SoftHevcDec.h" +#include "ihevcd_cxa.h" + +namespace android { + +namespace { + +constexpr char COMPONENT_NAME[] = "c2.android.hevc.decoder"; + +} // namespace + +class C2SoftHevcDec::IntfImpl : public SimpleInterface::BaseParams { +public: + explicit IntfImpl(const std::shared_ptr &helper) + : SimpleInterface::BaseParams( + helper, + COMPONENT_NAME, + C2Component::KIND_DECODER, + C2Component::DOMAIN_VIDEO, + MEDIA_MIMETYPE_VIDEO_HEVC) { + noPrivateBuffers(); // TODO: account for our buffers here + noInputReferences(); + noOutputReferences(); + noInputLatency(); + noTimeStretch(); + + // TODO: output latency and reordering + + addParameter( + DefineParam(mAttrib, C2_PARAMKEY_COMPONENT_ATTRIBUTES) + .withConstValue(new C2ComponentAttributesSetting(C2Component::ATTRIB_IS_TEMPORAL)) + .build()); + + addParameter( + DefineParam(mSize, C2_PARAMKEY_PICTURE_SIZE) + .withDefault(new C2StreamPictureSizeInfo::output(0u, 320, 240)) + .withFields({ + C2F(mSize, width).inRange(2, 4096, 2), + C2F(mSize, height).inRange(2, 4096, 2), + }) + .withSetter(SizeSetter) + .build()); + + addParameter( + DefineParam(mProfileLevel, C2_PARAMKEY_PROFILE_LEVEL) + .withDefault(new C2StreamProfileLevelInfo::input(0u, + C2Config::PROFILE_HEVC_MAIN, C2Config::LEVEL_HEVC_MAIN_5_1)) + .withFields({ + C2F(mProfileLevel, profile).oneOf({ + C2Config::PROFILE_HEVC_MAIN, + C2Config::PROFILE_HEVC_MAIN_STILL}), + C2F(mProfileLevel, level).oneOf({ + C2Config::LEVEL_HEVC_MAIN_1, + C2Config::LEVEL_HEVC_MAIN_2, C2Config::LEVEL_HEVC_MAIN_2_1, + C2Config::LEVEL_HEVC_MAIN_3, C2Config::LEVEL_HEVC_MAIN_3_1, + C2Config::LEVEL_HEVC_MAIN_4, C2Config::LEVEL_HEVC_MAIN_4_1, + C2Config::LEVEL_HEVC_MAIN_5, C2Config::LEVEL_HEVC_MAIN_5_1, + C2Config::LEVEL_HEVC_MAIN_5_2, C2Config::LEVEL_HEVC_HIGH_4, + C2Config::LEVEL_HEVC_HIGH_4_1, C2Config::LEVEL_HEVC_HIGH_5, + C2Config::LEVEL_HEVC_HIGH_5_1, C2Config::LEVEL_HEVC_HIGH_5_2 + }) + }) + .withSetter(ProfileLevelSetter, mSize) + .build()); + + addParameter( + DefineParam(mMaxSize, C2_PARAMKEY_MAX_PICTURE_SIZE) + .withDefault(new C2StreamMaxPictureSizeTuning::output(0u, 320, 240)) + .withFields({ + C2F(mSize, width).inRange(2, 4096, 2), + C2F(mSize, height).inRange(2, 4096, 2), + }) + .withSetter(MaxPictureSizeSetter, mSize) + .build()); + + addParameter( + DefineParam(mMaxInputSize, C2_PARAMKEY_INPUT_MAX_BUFFER_SIZE) + .withDefault(new C2StreamMaxBufferSizeInfo::input(0u, 320 * 240 * 3 / 4)) + .withFields({ + C2F(mMaxInputSize, value).any(), + }) + .calculatedAs(MaxInputSizeSetter, mMaxSize) + .build()); + + C2ChromaOffsetStruct locations[1] = { C2ChromaOffsetStruct::ITU_YUV_420_0() }; + std::shared_ptr defaultColorInfo = + C2StreamColorInfo::output::AllocShared( + 1u, 0u, 8u /* bitDepth */, C2Color::YUV_420); + memcpy(defaultColorInfo->m.locations, locations, sizeof(locations)); + + defaultColorInfo = C2StreamColorInfo::output::AllocShared( + {C2ChromaOffsetStruct::ITU_YUV_420_0()}, 0u, 8u /* bitDepth */, + C2Color::YUV_420); + helper->addStructDescriptors(); + + addParameter( + DefineParam(mColorInfo, C2_PARAMKEY_CODED_COLOR_INFO) + .withConstValue(defaultColorInfo) + .build()); + + addParameter( + DefineParam(mDefaultColorAspects, C2_PARAMKEY_DEFAULT_COLOR_ASPECTS) + .withDefault(new C2StreamColorAspectsTuning::output( + 0u, C2Color::RANGE_UNSPECIFIED, C2Color::PRIMARIES_UNSPECIFIED, + C2Color::TRANSFER_UNSPECIFIED, C2Color::MATRIX_UNSPECIFIED)) + .withFields({ + C2F(mDefaultColorAspects, range).inRange( + C2Color::RANGE_UNSPECIFIED, C2Color::RANGE_OTHER), + C2F(mDefaultColorAspects, primaries).inRange( + C2Color::PRIMARIES_UNSPECIFIED, C2Color::PRIMARIES_OTHER), + C2F(mDefaultColorAspects, transfer).inRange( + C2Color::TRANSFER_UNSPECIFIED, C2Color::TRANSFER_OTHER), + C2F(mDefaultColorAspects, matrix).inRange( + C2Color::MATRIX_UNSPECIFIED, C2Color::MATRIX_OTHER) + }) + .withSetter(DefaultColorAspectsSetter) + .build()); + + addParameter( + DefineParam(mCodedColorAspects, C2_PARAMKEY_VUI_COLOR_ASPECTS) + .withDefault(new C2StreamColorAspectsInfo::input( + 0u, C2Color::RANGE_LIMITED, C2Color::PRIMARIES_UNSPECIFIED, + C2Color::TRANSFER_UNSPECIFIED, C2Color::MATRIX_UNSPECIFIED)) + .withFields({ + C2F(mCodedColorAspects, range).inRange( + C2Color::RANGE_UNSPECIFIED, C2Color::RANGE_OTHER), + C2F(mCodedColorAspects, primaries).inRange( + C2Color::PRIMARIES_UNSPECIFIED, C2Color::PRIMARIES_OTHER), + C2F(mCodedColorAspects, transfer).inRange( + C2Color::TRANSFER_UNSPECIFIED, C2Color::TRANSFER_OTHER), + C2F(mCodedColorAspects, matrix).inRange( + C2Color::MATRIX_UNSPECIFIED, C2Color::MATRIX_OTHER) + }) + .withSetter(CodedColorAspectsSetter) + .build()); + + addParameter( + DefineParam(mColorAspects, C2_PARAMKEY_COLOR_ASPECTS) + .withDefault(new C2StreamColorAspectsInfo::output( + 0u, C2Color::RANGE_UNSPECIFIED, C2Color::PRIMARIES_UNSPECIFIED, + C2Color::TRANSFER_UNSPECIFIED, C2Color::MATRIX_UNSPECIFIED)) + .withFields({ + C2F(mColorAspects, range).inRange( + C2Color::RANGE_UNSPECIFIED, C2Color::RANGE_OTHER), + C2F(mColorAspects, primaries).inRange( + C2Color::PRIMARIES_UNSPECIFIED, C2Color::PRIMARIES_OTHER), + C2F(mColorAspects, transfer).inRange( + C2Color::TRANSFER_UNSPECIFIED, C2Color::TRANSFER_OTHER), + C2F(mColorAspects, matrix).inRange( + C2Color::MATRIX_UNSPECIFIED, C2Color::MATRIX_OTHER) + }) + .withSetter(ColorAspectsSetter, mDefaultColorAspects, mCodedColorAspects) + .build()); + + // TODO: support more formats? + addParameter( + DefineParam(mPixelFormat, C2_PARAMKEY_PIXEL_FORMAT) + .withConstValue(new C2StreamPixelFormatInfo::output( + 0u, HAL_PIXEL_FORMAT_YCBCR_420_888)) + .build()); + } + + static C2R SizeSetter(bool mayBlock, const C2P &oldMe, + C2P &me) { + (void)mayBlock; + C2R res = C2R::Ok(); + if (!me.F(me.v.width).supportsAtAll(me.v.width)) { + res = res.plus(C2SettingResultBuilder::BadValue(me.F(me.v.width))); + me.set().width = oldMe.v.width; + } + if (!me.F(me.v.height).supportsAtAll(me.v.height)) { + res = res.plus(C2SettingResultBuilder::BadValue(me.F(me.v.height))); + me.set().height = oldMe.v.height; + } + return res; + } + + static C2R MaxPictureSizeSetter(bool mayBlock, C2P &me, + const C2P &size) { + (void)mayBlock; + // TODO: get max width/height from the size's field helpers vs. hardcoding + me.set().width = c2_min(c2_max(me.v.width, size.v.width), 4096u); + me.set().height = c2_min(c2_max(me.v.height, size.v.height), 4096u); + return C2R::Ok(); + } + + static C2R MaxInputSizeSetter(bool mayBlock, C2P &me, + const C2P &maxSize) { + (void)mayBlock; + // assume compression ratio of 2 + me.set().value = (((maxSize.v.width + 63) / 64) * ((maxSize.v.height + 63) / 64) * 3072); + return C2R::Ok(); + } + + static C2R ProfileLevelSetter(bool mayBlock, C2P &me, + const C2P &size) { + (void)mayBlock; + (void)size; + (void)me; // TODO: validate + return C2R::Ok(); + } + + static C2R DefaultColorAspectsSetter(bool mayBlock, C2P &me) { + (void)mayBlock; + if (me.v.range > C2Color::RANGE_OTHER) { + me.set().range = C2Color::RANGE_OTHER; + } + if (me.v.primaries > C2Color::PRIMARIES_OTHER) { + me.set().primaries = C2Color::PRIMARIES_OTHER; + } + if (me.v.transfer > C2Color::TRANSFER_OTHER) { + me.set().transfer = C2Color::TRANSFER_OTHER; + } + if (me.v.matrix > C2Color::MATRIX_OTHER) { + me.set().matrix = C2Color::MATRIX_OTHER; + } + return C2R::Ok(); + } + + static C2R CodedColorAspectsSetter(bool mayBlock, C2P &me) { + (void)mayBlock; + if (me.v.range > C2Color::RANGE_OTHER) { + me.set().range = C2Color::RANGE_OTHER; + } + if (me.v.primaries > C2Color::PRIMARIES_OTHER) { + me.set().primaries = C2Color::PRIMARIES_OTHER; + } + if (me.v.transfer > C2Color::TRANSFER_OTHER) { + me.set().transfer = C2Color::TRANSFER_OTHER; + } + if (me.v.matrix > C2Color::MATRIX_OTHER) { + me.set().matrix = C2Color::MATRIX_OTHER; + } + return C2R::Ok(); + } + + static C2R ColorAspectsSetter(bool mayBlock, C2P &me, + const C2P &def, + const C2P &coded) { + (void)mayBlock; + // take default values for all unspecified fields, and coded values for specified ones + me.set().range = coded.v.range == RANGE_UNSPECIFIED ? def.v.range : coded.v.range; + me.set().primaries = coded.v.primaries == PRIMARIES_UNSPECIFIED + ? def.v.primaries : coded.v.primaries; + me.set().transfer = coded.v.transfer == TRANSFER_UNSPECIFIED + ? def.v.transfer : coded.v.transfer; + me.set().matrix = coded.v.matrix == MATRIX_UNSPECIFIED ? def.v.matrix : coded.v.matrix; + return C2R::Ok(); + } + + std::shared_ptr getColorAspects_l() { + return mColorAspects; + } + +private: + std::shared_ptr mProfileLevel; + std::shared_ptr mSize; + std::shared_ptr mMaxSize; + std::shared_ptr mMaxInputSize; + std::shared_ptr mColorInfo; + std::shared_ptr mCodedColorAspects; + std::shared_ptr mDefaultColorAspects; + std::shared_ptr mColorAspects; + std::shared_ptr mPixelFormat; +}; + +static size_t getCpuCoreCount() { + long cpuCoreCount = 1; +#if defined(_SC_NPROCESSORS_ONLN) + cpuCoreCount = sysconf(_SC_NPROCESSORS_ONLN); +#else + // _SC_NPROC_ONLN must be defined... + cpuCoreCount = sysconf(_SC_NPROC_ONLN); +#endif + CHECK(cpuCoreCount >= 1); + ALOGV("Number of CPU cores: %ld", cpuCoreCount); + return (size_t)cpuCoreCount; +} + +static void *ivd_aligned_malloc(void *ctxt, WORD32 alignment, WORD32 size) { + (void) ctxt; + return memalign(alignment, size); +} + +static void ivd_aligned_free(void *ctxt, void *mem) { + (void) ctxt; + free(mem); +} + +C2SoftHevcDec::C2SoftHevcDec( + const char *name, + c2_node_id_t id, + const std::shared_ptr &intfImpl) + : SimpleC2Component(std::make_shared>(name, id, intfImpl)), + mIntf(intfImpl), + mDecHandle(nullptr), + mOutBufferFlush(nullptr), + mIvColorformat(IV_YUV_420P), + mWidth(320), + mHeight(240), + mHeaderDecoded(false) { +} + +C2SoftHevcDec::~C2SoftHevcDec() { + onRelease(); +} + +c2_status_t C2SoftHevcDec::onInit() { + status_t err = initDecoder(); + return err == OK ? C2_OK : C2_CORRUPTED; +} + +c2_status_t C2SoftHevcDec::onStop() { + if (OK != resetDecoder()) return C2_CORRUPTED; + resetPlugin(); + return C2_OK; +} + +void C2SoftHevcDec::onReset() { + (void) onStop(); +} + +void C2SoftHevcDec::onRelease() { + (void) deleteDecoder(); + if (mOutBufferFlush) { + ivd_aligned_free(nullptr, mOutBufferFlush); + mOutBufferFlush = nullptr; + } + if (mOutBlock) { + mOutBlock.reset(); + } +} + +c2_status_t C2SoftHevcDec::onFlush_sm() { + if (OK != setFlushMode()) return C2_CORRUPTED; + + uint32_t displayStride = mStride; + uint32_t displayHeight = mHeight; + uint32_t bufferSize = displayStride * displayHeight * 3 / 2; + mOutBufferFlush = (uint8_t *)ivd_aligned_malloc(nullptr, 128, bufferSize); + if (!mOutBufferFlush) { + ALOGE("could not allocate tmp output buffer (for flush) of size %u ", bufferSize); + return C2_NO_MEMORY; + } + + while (true) { + ivd_video_decode_ip_t s_decode_ip; + ivd_video_decode_op_t s_decode_op; + + setDecodeArgs(&s_decode_ip, &s_decode_op, nullptr, nullptr, 0, 0, 0); + (void) ivdec_api_function(mDecHandle, &s_decode_ip, &s_decode_op); + if (0 == s_decode_op.u4_output_present) { + resetPlugin(); + break; + } + } + + if (mOutBufferFlush) { + ivd_aligned_free(nullptr, mOutBufferFlush); + mOutBufferFlush = nullptr; + } + + return C2_OK; +} + +status_t C2SoftHevcDec::createDecoder() { + ivdext_create_ip_t s_create_ip; + ivdext_create_op_t s_create_op; + + s_create_ip.s_ivd_create_ip_t.u4_size = sizeof(ivdext_create_ip_t); + s_create_ip.s_ivd_create_ip_t.e_cmd = IVD_CMD_CREATE; + s_create_ip.s_ivd_create_ip_t.u4_share_disp_buf = 0; + s_create_ip.s_ivd_create_ip_t.e_output_format = mIvColorformat; + s_create_ip.s_ivd_create_ip_t.pf_aligned_alloc = ivd_aligned_malloc; + s_create_ip.s_ivd_create_ip_t.pf_aligned_free = ivd_aligned_free; + s_create_ip.s_ivd_create_ip_t.pv_mem_ctxt = nullptr; + s_create_op.s_ivd_create_op_t.u4_size = sizeof(ivdext_create_op_t); + IV_API_CALL_STATUS_T status = ivdec_api_function(mDecHandle, + &s_create_ip, + &s_create_op); + if (status != IV_SUCCESS) { + ALOGE("error in %s: 0x%x", __func__, + s_create_op.s_ivd_create_op_t.u4_error_code); + return UNKNOWN_ERROR; + } + mDecHandle = (iv_obj_t*)s_create_op.s_ivd_create_op_t.pv_handle; + mDecHandle->pv_fxns = (void *)ivdec_api_function; + mDecHandle->u4_size = sizeof(iv_obj_t); + + return OK; +} + +status_t C2SoftHevcDec::setNumCores() { + ivdext_ctl_set_num_cores_ip_t s_set_num_cores_ip; + ivdext_ctl_set_num_cores_op_t s_set_num_cores_op; + + s_set_num_cores_ip.u4_size = sizeof(ivdext_ctl_set_num_cores_ip_t); + s_set_num_cores_ip.e_cmd = IVD_CMD_VIDEO_CTL; + s_set_num_cores_ip.e_sub_cmd = IVDEXT_CMD_CTL_SET_NUM_CORES; + s_set_num_cores_ip.u4_num_cores = mNumCores; + s_set_num_cores_op.u4_size = sizeof(ivdext_ctl_set_num_cores_op_t); + IV_API_CALL_STATUS_T status = ivdec_api_function(mDecHandle, + &s_set_num_cores_ip, + &s_set_num_cores_op); + if (IV_SUCCESS != status) { + ALOGD("error in %s: 0x%x", __func__, s_set_num_cores_op.u4_error_code); + return UNKNOWN_ERROR; + } + + return OK; +} + +status_t C2SoftHevcDec::setParams(size_t stride, IVD_VIDEO_DECODE_MODE_T dec_mode) { + ivd_ctl_set_config_ip_t s_set_dyn_params_ip; + ivd_ctl_set_config_op_t s_set_dyn_params_op; + + s_set_dyn_params_ip.u4_size = sizeof(ivd_ctl_set_config_ip_t); + s_set_dyn_params_ip.e_cmd = IVD_CMD_VIDEO_CTL; + s_set_dyn_params_ip.e_sub_cmd = IVD_CMD_CTL_SETPARAMS; + s_set_dyn_params_ip.u4_disp_wd = (UWORD32) stride; + s_set_dyn_params_ip.e_frm_skip_mode = IVD_SKIP_NONE; + s_set_dyn_params_ip.e_frm_out_mode = IVD_DISPLAY_FRAME_OUT; + s_set_dyn_params_ip.e_vid_dec_mode = dec_mode; + s_set_dyn_params_op.u4_size = sizeof(ivd_ctl_set_config_op_t); + IV_API_CALL_STATUS_T status = ivdec_api_function(mDecHandle, + &s_set_dyn_params_ip, + &s_set_dyn_params_op); + if (status != IV_SUCCESS) { + ALOGE("error in %s: 0x%x", __func__, s_set_dyn_params_op.u4_error_code); + return UNKNOWN_ERROR; + } + + return OK; +} + +status_t C2SoftHevcDec::getVersion() { + ivd_ctl_getversioninfo_ip_t s_get_versioninfo_ip; + ivd_ctl_getversioninfo_op_t s_get_versioninfo_op; + UWORD8 au1_buf[512]; + + s_get_versioninfo_ip.u4_size = sizeof(ivd_ctl_getversioninfo_ip_t); + s_get_versioninfo_ip.e_cmd = IVD_CMD_VIDEO_CTL; + s_get_versioninfo_ip.e_sub_cmd = IVD_CMD_CTL_GETVERSION; + s_get_versioninfo_ip.pv_version_buffer = au1_buf; + s_get_versioninfo_ip.u4_version_buffer_size = sizeof(au1_buf); + s_get_versioninfo_op.u4_size = sizeof(ivd_ctl_getversioninfo_op_t); + IV_API_CALL_STATUS_T status = ivdec_api_function(mDecHandle, + &s_get_versioninfo_ip, + &s_get_versioninfo_op); + if (status != IV_SUCCESS) { + ALOGD("error in %s: 0x%x", __func__, + s_get_versioninfo_op.u4_error_code); + } else { + ALOGV("ittiam decoder version number: %s", + (char *) s_get_versioninfo_ip.pv_version_buffer); + } + + return OK; +} + +status_t C2SoftHevcDec::initDecoder() { + if (OK != createDecoder()) return UNKNOWN_ERROR; + mNumCores = MIN(getCpuCoreCount(), MAX_NUM_CORES); + mStride = ALIGN64(mWidth); + mSignalledError = false; + resetPlugin(); + (void) setNumCores(); + if (OK != setParams(mStride, IVD_DECODE_FRAME)) return UNKNOWN_ERROR; + (void) getVersion(); + + return OK; +} + +bool C2SoftHevcDec::setDecodeArgs(ivd_video_decode_ip_t *ps_decode_ip, + ivd_video_decode_op_t *ps_decode_op, + C2ReadView *inBuffer, + C2GraphicView *outBuffer, + size_t inOffset, + size_t inSize, + uint32_t tsMarker) { + uint32_t displayStride = mStride; + uint32_t displayHeight = mHeight; + size_t lumaSize = displayStride * displayHeight; + size_t chromaSize = lumaSize >> 2; + + ps_decode_ip->u4_size = sizeof(ivd_video_decode_ip_t); + ps_decode_ip->e_cmd = IVD_CMD_VIDEO_DECODE; + if (inBuffer) { + ps_decode_ip->u4_ts = tsMarker; + ps_decode_ip->pv_stream_buffer = const_cast(inBuffer->data() + inOffset); + ps_decode_ip->u4_num_Bytes = inSize; + } else { + ps_decode_ip->u4_ts = 0; + ps_decode_ip->pv_stream_buffer = nullptr; + ps_decode_ip->u4_num_Bytes = 0; + } + ps_decode_ip->s_out_buffer.u4_min_out_buf_size[0] = lumaSize; + ps_decode_ip->s_out_buffer.u4_min_out_buf_size[1] = chromaSize; + ps_decode_ip->s_out_buffer.u4_min_out_buf_size[2] = chromaSize; + if (outBuffer) { + if (outBuffer->width() < displayStride || outBuffer->height() < displayHeight) { + ALOGE("Output buffer too small: provided (%dx%d) required (%ux%u)", + outBuffer->width(), outBuffer->height(), displayStride, displayHeight); + return false; + } + ps_decode_ip->s_out_buffer.pu1_bufs[0] = outBuffer->data()[C2PlanarLayout::PLANE_Y]; + ps_decode_ip->s_out_buffer.pu1_bufs[1] = outBuffer->data()[C2PlanarLayout::PLANE_U]; + ps_decode_ip->s_out_buffer.pu1_bufs[2] = outBuffer->data()[C2PlanarLayout::PLANE_V]; + } else { + ps_decode_ip->s_out_buffer.pu1_bufs[0] = mOutBufferFlush; + ps_decode_ip->s_out_buffer.pu1_bufs[1] = mOutBufferFlush + lumaSize; + ps_decode_ip->s_out_buffer.pu1_bufs[2] = mOutBufferFlush + lumaSize + chromaSize; + } + ps_decode_ip->s_out_buffer.u4_num_bufs = 3; + ps_decode_op->u4_size = sizeof(ivd_video_decode_op_t); + ps_decode_op->u4_output_present = 0; + + return true; +} + +bool C2SoftHevcDec::getVuiParams() { + ivdext_ctl_get_vui_params_ip_t s_get_vui_params_ip; + ivdext_ctl_get_vui_params_op_t s_get_vui_params_op; + + s_get_vui_params_ip.u4_size = sizeof(ivdext_ctl_get_vui_params_ip_t); + s_get_vui_params_ip.e_cmd = IVD_CMD_VIDEO_CTL; + s_get_vui_params_ip.e_sub_cmd = + (IVD_CONTROL_API_COMMAND_TYPE_T) IHEVCD_CXA_CMD_CTL_GET_VUI_PARAMS; + s_get_vui_params_op.u4_size = sizeof(ivdext_ctl_get_vui_params_op_t); + IV_API_CALL_STATUS_T status = ivdec_api_function(mDecHandle, + &s_get_vui_params_ip, + &s_get_vui_params_op); + if (status != IV_SUCCESS) { + ALOGD("error in %s: 0x%x", __func__, s_get_vui_params_op.u4_error_code); + return false; + } + + VuiColorAspects vuiColorAspects; + vuiColorAspects.primaries = s_get_vui_params_op.u1_colour_primaries; + vuiColorAspects.transfer = s_get_vui_params_op.u1_transfer_characteristics; + vuiColorAspects.coeffs = s_get_vui_params_op.u1_matrix_coefficients; + vuiColorAspects.fullRange = s_get_vui_params_op.u1_video_full_range_flag; + + // convert vui aspects to C2 values if changed + if (!(vuiColorAspects == mBitstreamColorAspects)) { + mBitstreamColorAspects = vuiColorAspects; + ColorAspects sfAspects; + C2StreamColorAspectsInfo::input codedAspects = { 0u }; + ColorUtils::convertIsoColorAspectsToCodecAspects( + vuiColorAspects.primaries, vuiColorAspects.transfer, vuiColorAspects.coeffs, + vuiColorAspects.fullRange, sfAspects); + if (!C2Mapper::map(sfAspects.mPrimaries, &codedAspects.primaries)) { + codedAspects.primaries = C2Color::PRIMARIES_UNSPECIFIED; + } + if (!C2Mapper::map(sfAspects.mRange, &codedAspects.range)) { + codedAspects.range = C2Color::RANGE_UNSPECIFIED; + } + if (!C2Mapper::map(sfAspects.mMatrixCoeffs, &codedAspects.matrix)) { + codedAspects.matrix = C2Color::MATRIX_UNSPECIFIED; + } + if (!C2Mapper::map(sfAspects.mTransfer, &codedAspects.transfer)) { + codedAspects.transfer = C2Color::TRANSFER_UNSPECIFIED; + } + std::vector> failures; + (void)mIntf->config({&codedAspects}, C2_MAY_BLOCK, &failures); + } + return true; +} + +status_t C2SoftHevcDec::setFlushMode() { + ivd_ctl_flush_ip_t s_set_flush_ip; + ivd_ctl_flush_op_t s_set_flush_op; + + s_set_flush_ip.u4_size = sizeof(ivd_ctl_flush_ip_t); + s_set_flush_ip.e_cmd = IVD_CMD_VIDEO_CTL; + s_set_flush_ip.e_sub_cmd = IVD_CMD_CTL_FLUSH; + s_set_flush_op.u4_size = sizeof(ivd_ctl_flush_op_t); + IV_API_CALL_STATUS_T status = ivdec_api_function(mDecHandle, + &s_set_flush_ip, + &s_set_flush_op); + if (status != IV_SUCCESS) { + ALOGE("error in %s: 0x%x", __func__, s_set_flush_op.u4_error_code); + return UNKNOWN_ERROR; + } + + return OK; +} + +status_t C2SoftHevcDec::resetDecoder() { + ivd_ctl_reset_ip_t s_reset_ip; + ivd_ctl_reset_op_t s_reset_op; + + s_reset_ip.u4_size = sizeof(ivd_ctl_reset_ip_t); + s_reset_ip.e_cmd = IVD_CMD_VIDEO_CTL; + s_reset_ip.e_sub_cmd = IVD_CMD_CTL_RESET; + s_reset_op.u4_size = sizeof(ivd_ctl_reset_op_t); + IV_API_CALL_STATUS_T status = ivdec_api_function(mDecHandle, + &s_reset_ip, + &s_reset_op); + if (IV_SUCCESS != status) { + ALOGE("error in %s: 0x%x", __func__, s_reset_op.u4_error_code); + return UNKNOWN_ERROR; + } + mStride = 0; + (void) setNumCores(); + mSignalledError = false; + mHeaderDecoded = false; + return OK; +} + +void C2SoftHevcDec::resetPlugin() { + mSignalledOutputEos = false; + gettimeofday(&mTimeStart, nullptr); + gettimeofday(&mTimeEnd, nullptr); +} + +status_t C2SoftHevcDec::deleteDecoder() { + if (mDecHandle) { + ivdext_delete_ip_t s_delete_ip; + ivdext_delete_op_t s_delete_op; + + s_delete_ip.s_ivd_delete_ip_t.u4_size = sizeof(ivdext_delete_ip_t); + s_delete_ip.s_ivd_delete_ip_t.e_cmd = IVD_CMD_DELETE; + s_delete_op.s_ivd_delete_op_t.u4_size = sizeof(ivdext_delete_op_t); + IV_API_CALL_STATUS_T status = ivdec_api_function(mDecHandle, + &s_delete_ip, + &s_delete_op); + if (status != IV_SUCCESS) { + ALOGE("error in %s: 0x%x", __func__, + s_delete_op.s_ivd_delete_op_t.u4_error_code); + return UNKNOWN_ERROR; + } + mDecHandle = nullptr; + } + + return OK; +} + +void fillEmptyWork(const std::unique_ptr &work) { + uint32_t flags = 0; + if (work->input.flags & C2FrameData::FLAG_END_OF_STREAM) { + flags |= C2FrameData::FLAG_END_OF_STREAM; + ALOGV("signalling eos"); + } + work->worklets.front()->output.flags = (C2FrameData::flags_t)flags; + work->worklets.front()->output.buffers.clear(); + work->worklets.front()->output.ordinal = work->input.ordinal; + work->workletsProcessed = 1u; +} + +void C2SoftHevcDec::finishWork(uint64_t index, const std::unique_ptr &work) { + std::shared_ptr buffer = createGraphicBuffer(std::move(mOutBlock), + C2Rect(mWidth, mHeight)); + mOutBlock = nullptr; + { + IntfImpl::Lock lock = mIntf->lock(); + buffer->setInfo(mIntf->getColorAspects_l()); + } + + auto fillWork = [buffer](const std::unique_ptr &work) { + work->worklets.front()->output.flags = (C2FrameData::flags_t)0; + work->worklets.front()->output.buffers.clear(); + work->worklets.front()->output.buffers.push_back(buffer); + work->worklets.front()->output.ordinal = work->input.ordinal; + work->workletsProcessed = 1u; + }; + if (work && c2_cntr64_t(index) == work->input.ordinal.frameIndex) { + fillWork(work); + } else { + finish(index, fillWork); + } +} + +c2_status_t C2SoftHevcDec::ensureDecoderState(const std::shared_ptr &pool) { + if (!mDecHandle) { + ALOGE("not supposed to be here, invalid decoder context"); + return C2_CORRUPTED; + } + if (mStride != ALIGN64(mWidth)) { + mStride = ALIGN64(mWidth); + if (OK != setParams(mStride, IVD_DECODE_FRAME)) return C2_CORRUPTED; + } + if (mOutBlock && + (mOutBlock->width() != mStride || mOutBlock->height() != mHeight)) { + mOutBlock.reset(); + } + if (!mOutBlock) { + uint32_t format = HAL_PIXEL_FORMAT_YV12; + C2MemoryUsage usage = { C2MemoryUsage::CPU_READ, C2MemoryUsage::CPU_WRITE }; + c2_status_t err = pool->fetchGraphicBlock(mStride, mHeight, format, usage, &mOutBlock); + if (err != C2_OK) { + ALOGE("fetchGraphicBlock for Output failed with status %d", err); + return err; + } + ALOGV("provided (%dx%d) required (%dx%d)", + mOutBlock->width(), mOutBlock->height(), mStride, mHeight); + } + + return C2_OK; +} + +// TODO: can overall error checking be improved? +// TODO: allow configuration of color format and usage for graphic buffers instead +// of hard coding them to HAL_PIXEL_FORMAT_YV12 +// TODO: pass coloraspects information to surface +// TODO: test support for dynamic change in resolution +// TODO: verify if the decoder sent back all frames +void C2SoftHevcDec::process( + const std::unique_ptr &work, + const std::shared_ptr &pool) { + // Initialize output work + work->result = C2_OK; + work->workletsProcessed = 0u; + work->worklets.front()->output.configUpdate.clear(); + work->worklets.front()->output.flags = work->input.flags; + + if (mSignalledError || mSignalledOutputEos) { + work->result = C2_BAD_VALUE; + return; + } + + size_t inOffset = 0u; + size_t inSize = 0u; + uint32_t workIndex = work->input.ordinal.frameIndex.peeku() & 0xFFFFFFFF; + C2ReadView rView = mDummyReadView; + if (!work->input.buffers.empty()) { + rView = work->input.buffers[0]->data().linearBlocks().front().map().get(); + inSize = rView.capacity(); + if (inSize && rView.error()) { + ALOGE("read view map failed %d", rView.error()); + work->result = rView.error(); + return; + } + } + bool eos = ((work->input.flags & C2FrameData::FLAG_END_OF_STREAM) != 0); + bool hasPicture = false; + + ALOGV("in buffer attr. size %zu timestamp %d frameindex %d, flags %x", + inSize, (int)work->input.ordinal.timestamp.peeku(), + (int)work->input.ordinal.frameIndex.peeku(), work->input.flags); + size_t inPos = 0; + while (inPos < inSize) { + if (C2_OK != ensureDecoderState(pool)) { + mSignalledError = true; + work->workletsProcessed = 1u; + work->result = C2_CORRUPTED; + return; + } + C2GraphicView wView = mOutBlock->map().get(); + if (wView.error()) { + ALOGE("graphic view map failed %d", wView.error()); + work->result = wView.error(); + return; + } + ivd_video_decode_ip_t s_decode_ip; + ivd_video_decode_op_t s_decode_op; + if (!setDecodeArgs(&s_decode_ip, &s_decode_op, &rView, &wView, + inOffset + inPos, inSize - inPos, workIndex)) { + mSignalledError = true; + work->workletsProcessed = 1u; + work->result = C2_CORRUPTED; + return; + } + + if (false == mHeaderDecoded) { + /* Decode header and get dimensions */ + setParams(mStride, IVD_DECODE_HEADER); + } + WORD32 delay; + GETTIME(&mTimeStart, nullptr); + TIME_DIFF(mTimeEnd, mTimeStart, delay); + (void) ivdec_api_function(mDecHandle, &s_decode_ip, &s_decode_op); + WORD32 decodeTime; + GETTIME(&mTimeEnd, nullptr); + TIME_DIFF(mTimeStart, mTimeEnd, decodeTime); + ALOGV("decodeTime=%6d delay=%6d numBytes=%6d", decodeTime, delay, + s_decode_op.u4_num_bytes_consumed); + if (IVD_MEM_ALLOC_FAILED == (s_decode_op.u4_error_code & 0xFF)) { + ALOGE("allocation failure in decoder"); + mSignalledError = true; + work->workletsProcessed = 1u; + work->result = C2_CORRUPTED; + return; + } else if (IVD_STREAM_WIDTH_HEIGHT_NOT_SUPPORTED == (s_decode_op.u4_error_code & 0xFF)) { + ALOGE("unsupported resolution : %dx%d", mWidth, mHeight); + mSignalledError = true; + work->workletsProcessed = 1u; + work->result = C2_CORRUPTED; + return; + } else if (IVD_RES_CHANGED == (s_decode_op.u4_error_code & 0xFF)) { + ALOGV("resolution changed"); + drainInternal(DRAIN_COMPONENT_NO_EOS, pool, work); + resetDecoder(); + resetPlugin(); + work->workletsProcessed = 0u; + + /* Decode header and get new dimensions */ + setParams(mStride, IVD_DECODE_HEADER); + (void) ivdec_api_function(mDecHandle, &s_decode_ip, &s_decode_op); + } + if (0 < s_decode_op.u4_pic_wd && 0 < s_decode_op.u4_pic_ht) { + if (mHeaderDecoded == false) { + mHeaderDecoded = true; + setParams(ALIGN64(s_decode_op.u4_pic_wd), IVD_DECODE_FRAME); + } + if (s_decode_op.u4_pic_wd != mWidth || s_decode_op.u4_pic_ht != mHeight) { + mWidth = s_decode_op.u4_pic_wd; + mHeight = s_decode_op.u4_pic_ht; + CHECK_EQ(0u, s_decode_op.u4_output_present); + + C2VideoSizeStreamInfo::output size(0u, mWidth, mHeight); + std::vector> failures; + c2_status_t err = + mIntf->config({&size}, C2_MAY_BLOCK, &failures); + if (err == OK) { + work->worklets.front()->output.configUpdate.push_back( + C2Param::Copy(size)); + } else { + ALOGE("Cannot set width and height"); + mSignalledError = true; + work->workletsProcessed = 1u; + work->result = C2_CORRUPTED; + return; + } + continue; + } + } + (void) getVuiParams(); + hasPicture |= (1 == s_decode_op.u4_frame_decoded_flag); + if (s_decode_op.u4_output_present) { + finishWork(s_decode_op.u4_ts, work); + } + if (0 == s_decode_op.u4_num_bytes_consumed) { + ALOGD("Bytes consumed is zero. Ignoring remaining bytes"); + break; + } + inPos += s_decode_op.u4_num_bytes_consumed; + if (hasPicture && (inSize - inPos)) { + ALOGD("decoded frame in current access nal, ignoring further trailing bytes %d", + (int)inSize - (int)inPos); + break; + } + } + + if (eos) { + drainInternal(DRAIN_COMPONENT_WITH_EOS, pool, work); + mSignalledOutputEos = true; + } else if (!hasPicture) { + fillEmptyWork(work); + } +} + +c2_status_t C2SoftHevcDec::drainInternal( + uint32_t drainMode, + const std::shared_ptr &pool, + const std::unique_ptr &work) { + if (drainMode == NO_DRAIN) { + ALOGW("drain with NO_DRAIN: no-op"); + return C2_OK; + } + if (drainMode == DRAIN_CHAIN) { + ALOGW("DRAIN_CHAIN not supported"); + return C2_OMITTED; + } + + if (OK != setFlushMode()) return C2_CORRUPTED; + while (true) { + if (C2_OK != ensureDecoderState(pool)) { + mSignalledError = true; + work->workletsProcessed = 1u; + work->result = C2_CORRUPTED; + return C2_CORRUPTED; + } + C2GraphicView wView = mOutBlock->map().get(); + if (wView.error()) { + ALOGE("graphic view map failed %d", wView.error()); + return C2_CORRUPTED; + } + ivd_video_decode_ip_t s_decode_ip; + ivd_video_decode_op_t s_decode_op; + if (!setDecodeArgs(&s_decode_ip, &s_decode_op, nullptr, &wView, 0, 0, 0)) { + mSignalledError = true; + work->workletsProcessed = 1u; + return C2_CORRUPTED; + } + (void) ivdec_api_function(mDecHandle, &s_decode_ip, &s_decode_op); + if (s_decode_op.u4_output_present) { + finishWork(s_decode_op.u4_ts, work); + } else { + fillEmptyWork(work); + break; + } + } + + return C2_OK; +} + +c2_status_t C2SoftHevcDec::drain( + uint32_t drainMode, + const std::shared_ptr &pool) { + return drainInternal(drainMode, pool, nullptr); +} + +class C2SoftHevcDecFactory : public C2ComponentFactory { +public: + C2SoftHevcDecFactory() : mHelper(std::static_pointer_cast( + GetCodec2PlatformComponentStore()->getParamReflector())) { + } + + virtual c2_status_t createComponent( + c2_node_id_t id, + std::shared_ptr* const component, + std::function deleter) override { + *component = std::shared_ptr( + new C2SoftHevcDec(COMPONENT_NAME, + id, + std::make_shared(mHelper)), + deleter); + return C2_OK; + } + + virtual c2_status_t createInterface( + c2_node_id_t id, + std::shared_ptr* const interface, + std::function deleter) override { + *interface = std::shared_ptr( + new SimpleInterface( + COMPONENT_NAME, id, std::make_shared(mHelper)), + deleter); + return C2_OK; + } + + virtual ~C2SoftHevcDecFactory() override = default; + +private: + std::shared_ptr mHelper; +}; + +} // namespace android + +extern "C" ::C2ComponentFactory* CreateCodec2Factory() { + ALOGV("in %s", __func__); + return new ::android::C2SoftHevcDecFactory(); +} + +extern "C" void DestroyCodec2Factory(::C2ComponentFactory* factory) { + ALOGV("in %s", __func__); + delete factory; +} diff --git a/media/codec2/components/hevc/C2SoftHevcDec.h b/media/codec2/components/hevc/C2SoftHevcDec.h new file mode 100644 index 0000000000000000000000000000000000000000..75111fc271b7ceff2386f1ad17f3b4442face4ba --- /dev/null +++ b/media/codec2/components/hevc/C2SoftHevcDec.h @@ -0,0 +1,152 @@ +/* + * Copyright (C) 2018 The Android Open Source Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#ifndef ANDROID_C2_SOFT_HEVC_DEC_H_ +#define ANDROID_C2_SOFT_HEVC_DEC_H_ + +#include + +#include + +#include "ihevc_typedefs.h" +#include "iv.h" +#include "ivd.h" + +namespace android { + +#define ivdec_api_function ihevcd_cxa_api_function +#define ivdext_create_ip_t ihevcd_cxa_create_ip_t +#define ivdext_create_op_t ihevcd_cxa_create_op_t +#define ivdext_delete_ip_t ihevcd_cxa_delete_ip_t +#define ivdext_delete_op_t ihevcd_cxa_delete_op_t +#define ivdext_ctl_set_num_cores_ip_t ihevcd_cxa_ctl_set_num_cores_ip_t +#define ivdext_ctl_set_num_cores_op_t ihevcd_cxa_ctl_set_num_cores_op_t +#define ivdext_ctl_get_vui_params_ip_t ihevcd_cxa_ctl_get_vui_params_ip_t +#define ivdext_ctl_get_vui_params_op_t ihevcd_cxa_ctl_get_vui_params_op_t +#define ALIGN64(x) ((((x) + 63) >> 6) << 6) +#define MAX_NUM_CORES 4 +#define IVDEXT_CMD_CTL_SET_NUM_CORES \ + (IVD_CONTROL_API_COMMAND_TYPE_T)IHEVCD_CXA_CMD_CTL_SET_NUM_CORES +#define MIN(a, b) (((a) < (b)) ? (a) : (b)) +#define GETTIME(a, b) gettimeofday(a, b); +#define TIME_DIFF(start, end, diff) \ + diff = (((end).tv_sec - (start).tv_sec) * 1000000) + \ + ((end).tv_usec - (start).tv_usec); + + +struct C2SoftHevcDec : public SimpleC2Component { + class IntfImpl; + + C2SoftHevcDec(const char* name, c2_node_id_t id, + const std::shared_ptr& intfImpl); + virtual ~C2SoftHevcDec(); + + // From SimpleC2Component + c2_status_t onInit() override; + c2_status_t onStop() override; + void onReset() override; + void onRelease() override; + c2_status_t onFlush_sm() override; + void process( + const std::unique_ptr &work, + const std::shared_ptr &pool) override; + c2_status_t drain( + uint32_t drainMode, + const std::shared_ptr &pool) override; + private: + status_t createDecoder(); + status_t setNumCores(); + status_t setParams(size_t stride, IVD_VIDEO_DECODE_MODE_T dec_mode); + status_t getVersion(); + status_t initDecoder(); + bool setDecodeArgs(ivd_video_decode_ip_t *ps_decode_ip, + ivd_video_decode_op_t *ps_decode_op, + C2ReadView *inBuffer, + C2GraphicView *outBuffer, + size_t inOffset, + size_t inSize, + uint32_t tsMarker); + bool getVuiParams(); + // TODO:This is not the right place for colorAspects functions. These should + // be part of c2-vndk so that they can be accessed by all video plugins + // until then, make them feel at home + bool colorAspectsDiffer(const ColorAspects &a, const ColorAspects &b); + void updateFinalColorAspects( + const ColorAspects &otherAspects, const ColorAspects &preferredAspects); + status_t handleColorAspectsChange(); + c2_status_t ensureDecoderState(const std::shared_ptr &pool); + void finishWork(uint64_t index, const std::unique_ptr &work); + status_t setFlushMode(); + c2_status_t drainInternal( + uint32_t drainMode, + const std::shared_ptr &pool, + const std::unique_ptr &work); + status_t resetDecoder(); + void resetPlugin(); + status_t deleteDecoder(); + + // TODO:This is not the right place for this enum. These should + // be part of c2-vndk so that they can be accessed by all video plugins + // until then, make them feel at home + enum { + kNotSupported, + kPreferBitstream, + kPreferContainer, + }; + + std::shared_ptr mIntf; + iv_obj_t *mDecHandle; + std::shared_ptr mOutBlock; + uint8_t *mOutBufferFlush; + + size_t mNumCores; + IV_COLOR_FORMAT_T mIvColorformat; + + uint32_t mWidth; + uint32_t mHeight; + uint32_t mStride; + bool mSignalledOutputEos; + bool mSignalledError; + bool mHeaderDecoded; + + // Color aspects. These are ISO values and are meant to detect changes in aspects to avoid + // converting them to C2 values for each frame + struct VuiColorAspects { + uint8_t primaries; + uint8_t transfer; + uint8_t coeffs; + uint8_t fullRange; + + // default color aspects + VuiColorAspects() + : primaries(2), transfer(2), coeffs(2), fullRange(0) { } + + bool operator==(const VuiColorAspects &o) { + return primaries == o.primaries && transfer == o.transfer && coeffs == o.coeffs + && fullRange == o.fullRange; + } + } mBitstreamColorAspects; + + // profile + struct timeval mTimeStart; + struct timeval mTimeEnd; + + C2_DO_NOT_COPY(C2SoftHevcDec); +}; + +} // namespace android + +#endif // ANDROID_C2_SOFT_HEVC_DEC_H_ diff --git a/media/codec2/components/mp3/Android.bp b/media/codec2/components/mp3/Android.bp new file mode 100644 index 0000000000000000000000000000000000000000..6e013b8d203952a51106f301321421146fdc4d7d --- /dev/null +++ b/media/codec2/components/mp3/Android.bp @@ -0,0 +1,11 @@ +cc_library_shared { + name: "libstagefright_soft_c2mp3dec", + defaults: [ + "libstagefright_soft_c2-defaults", + "libstagefright_soft_c2_sanitize_all-defaults", + ], + + srcs: ["C2SoftMp3Dec.cpp"], + + static_libs: ["libstagefright_mp3dec"], +} diff --git a/media/codec2/components/mp3/C2SoftMp3Dec.cpp b/media/codec2/components/mp3/C2SoftMp3Dec.cpp new file mode 100644 index 0000000000000000000000000000000000000000..c8b8397f3482a4fa8c5f7fcc39589defe609427a --- /dev/null +++ b/media/codec2/components/mp3/C2SoftMp3Dec.cpp @@ -0,0 +1,558 @@ +/* + * Copyright (C) 2018 The Android Open Source Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +//#define LOG_NDEBUG 0 +#define LOG_TAG "C2SoftMp3Dec" +#include + +#include + +#include + +#include +#include + +#include "C2SoftMp3Dec.h" +#include "pvmp3decoder_api.h" + +namespace android { + +constexpr char COMPONENT_NAME[] = "c2.android.mp3.decoder"; + +class C2SoftMP3::IntfImpl : public C2InterfaceHelper { +public: + explicit IntfImpl(const std::shared_ptr &helper) + : C2InterfaceHelper(helper) { + + setDerivedInstance(this); + + addParameter( + DefineParam(mInputFormat, C2_NAME_INPUT_STREAM_FORMAT_SETTING) + .withConstValue(new C2StreamFormatConfig::input(0u, C2FormatCompressed)) + .build()); + + addParameter( + DefineParam(mOutputFormat, C2_NAME_OUTPUT_STREAM_FORMAT_SETTING) + .withConstValue(new C2StreamFormatConfig::output(0u, C2FormatAudio)) + .build()); + + addParameter( + DefineParam(mInputMediaType, C2_NAME_INPUT_PORT_MIME_SETTING) + .withConstValue(AllocSharedString( + MEDIA_MIMETYPE_AUDIO_MPEG)) + .build()); + + addParameter( + DefineParam(mOutputMediaType, C2_NAME_OUTPUT_PORT_MIME_SETTING) + .withConstValue(AllocSharedString( + MEDIA_MIMETYPE_AUDIO_RAW)) + .build()); + + addParameter( + DefineParam(mSampleRate, C2_NAME_STREAM_SAMPLE_RATE_SETTING) + .withDefault(new C2StreamSampleRateInfo::output(0u, 44100)) + .withFields({C2F(mSampleRate, value).oneOf({8000, 11025, 12000, 16000, + 22050, 24000, 32000, 44100, 48000})}) + .withSetter((Setter::StrictValueWithNoDeps)) + .build()); + + addParameter( + DefineParam(mChannelCount, C2_NAME_STREAM_CHANNEL_COUNT_SETTING) + .withDefault(new C2StreamChannelCountInfo::output(0u, 2)) + .withFields({C2F(mChannelCount, value).inRange(1, 2)}) + .withSetter(Setter::StrictValueWithNoDeps) + .build()); + + addParameter( + DefineParam(mBitrate, C2_NAME_STREAM_BITRATE_SETTING) + .withDefault(new C2BitrateTuning::input(0u, 64000)) + .withFields({C2F(mBitrate, value).inRange(8000, 320000)}) + .withSetter(Setter::NonStrictValueWithNoDeps) + .build()); + + addParameter( + DefineParam(mInputMaxBufSize, C2_PARAMKEY_INPUT_MAX_BUFFER_SIZE) + .withConstValue(new C2StreamMaxBufferSizeInfo::input(0u, 8192)) + .build()); + } + +private: + std::shared_ptr mInputFormat; + std::shared_ptr mOutputFormat; + std::shared_ptr mInputMediaType; + std::shared_ptr mOutputMediaType; + std::shared_ptr mSampleRate; + std::shared_ptr mChannelCount; + std::shared_ptr mBitrate; + std::shared_ptr mInputMaxBufSize; +}; + +C2SoftMP3::C2SoftMP3(const char *name, c2_node_id_t id, + const std::shared_ptr &intfImpl) + : SimpleC2Component(std::make_shared>(name, id, intfImpl)), + mIntf(intfImpl), + mConfig(nullptr), + mDecoderBuf(nullptr) { +} + +C2SoftMP3::~C2SoftMP3() { + onRelease(); +} + +c2_status_t C2SoftMP3::onInit() { + status_t err = initDecoder(); + return err == OK ? C2_OK : C2_NO_MEMORY; +} + +c2_status_t C2SoftMP3::onStop() { + // Make sure that the next buffer output does not still + // depend on fragments from the last one decoded. + pvmp3_InitDecoder(mConfig, mDecoderBuf); + mSignalledError = false; + mIsFirst = true; + mSignalledOutputEos = false; + mAnchorTimeStamp = 0; + mProcessedSamples = 0; + + return C2_OK; +} + +void C2SoftMP3::onReset() { + (void)onStop(); +} + +void C2SoftMP3::onRelease() { + mGaplessBytes = false; + if (mDecoderBuf) { + free(mDecoderBuf); + mDecoderBuf = nullptr; + } + + if (mConfig) { + delete mConfig; + mConfig = nullptr; + } +} + +status_t C2SoftMP3::initDecoder() { + mConfig = new tPVMP3DecoderExternal{}; + if (!mConfig) return NO_MEMORY; + mConfig->equalizerType = flat; + mConfig->crcEnabled = false; + + size_t memRequirements = pvmp3_decoderMemRequirements(); + mDecoderBuf = malloc(memRequirements); + if (!mDecoderBuf) return NO_MEMORY; + + pvmp3_InitDecoder(mConfig, mDecoderBuf); + + mIsFirst = true; + mGaplessBytes = false; + mSignalledError = false; + mSignalledOutputEos = false; + mAnchorTimeStamp = 0; + mProcessedSamples = 0; + + return OK; +} + +/* The below code is borrowed from ./test/mp3reader.cpp */ +static bool parseMp3Header(uint32_t header, size_t *frame_size, + uint32_t *out_sampling_rate = nullptr, + uint32_t *out_channels = nullptr, + uint32_t *out_bitrate = nullptr, + uint32_t *out_num_samples = nullptr) { + *frame_size = 0; + if (out_sampling_rate) *out_sampling_rate = 0; + if (out_channels) *out_channels = 0; + if (out_bitrate) *out_bitrate = 0; + if (out_num_samples) *out_num_samples = 1152; + + if ((header & 0xffe00000) != 0xffe00000) return false; + + unsigned version = (header >> 19) & 3; + if (version == 0x01) return false; + + unsigned layer = (header >> 17) & 3; + if (layer == 0x00) return false; + + unsigned bitrate_index = (header >> 12) & 0x0f; + if (bitrate_index == 0 || bitrate_index == 0x0f) return false; + + unsigned sampling_rate_index = (header >> 10) & 3; + if (sampling_rate_index == 3) return false; + + static const int kSamplingRateV1[] = { 44100, 48000, 32000 }; + int sampling_rate = kSamplingRateV1[sampling_rate_index]; + if (version == 2 /* V2 */) { + sampling_rate /= 2; + } else if (version == 0 /* V2.5 */) { + sampling_rate /= 4; + } + + unsigned padding = (header >> 9) & 1; + + if (layer == 3) { // layer I + static const int kBitrateV1[] = + { + 32, 64, 96, 128, 160, 192, 224, 256, 288, 320, 352, 384, 416, 448 + }; + static const int kBitrateV2[] = + { + 32, 48, 56, 64, 80, 96, 112, 128, 144, 160, 176, 192, 224, 256 + }; + + int bitrate = (version == 3 /* V1 */) ? kBitrateV1[bitrate_index - 1] : + kBitrateV2[bitrate_index - 1]; + + if (out_bitrate) { + *out_bitrate = bitrate; + } + *frame_size = (12000 * bitrate / sampling_rate + padding) * 4; + if (out_num_samples) { + *out_num_samples = 384; + } + } else { // layer II or III + static const int kBitrateV1L2[] = + { + 32, 48, 56, 64, 80, 96, 112, 128, 160, 192, 224, 256, 320, 384 + }; + + static const int kBitrateV1L3[] = + { + 32, 40, 48, 56, 64, 80, 96, 112, 128, 160, 192, 224, 256, 320 + }; + + static const int kBitrateV2[] = + { + 8, 16, 24, 32, 40, 48, 56, 64, 80, 96, 112, 128, 144, 160 + }; + + int bitrate; + if (version == 3 /* V1 */) { + bitrate = (layer == 2 /* L2 */) ? kBitrateV1L2[bitrate_index - 1] : + kBitrateV1L3[bitrate_index - 1]; + + if (out_num_samples) { + *out_num_samples = 1152; + } + } else { // V2 (or 2.5) + bitrate = kBitrateV2[bitrate_index - 1]; + if (out_num_samples) { + *out_num_samples = (layer == 1 /* L3 */) ? 576 : 1152; + } + } + + if (out_bitrate) { + *out_bitrate = bitrate; + } + + if (version == 3 /* V1 */) { + *frame_size = 144000 * bitrate / sampling_rate + padding; + } else { // V2 or V2.5 + size_t tmp = (layer == 1 /* L3 */) ? 72000 : 144000; + *frame_size = tmp * bitrate / sampling_rate + padding; + } + } + + if (out_sampling_rate) { + *out_sampling_rate = sampling_rate; + } + + if (out_channels) { + int channel_mode = (header >> 6) & 3; + + *out_channels = (channel_mode == 3) ? 1 : 2; + } + + return true; +} + +static uint32_t U32_AT(const uint8_t *ptr) { + return ptr[0] << 24 | ptr[1] << 16 | ptr[2] << 8 | ptr[3]; +} + +static status_t calculateOutSize(uint8 *header, size_t inSize, + std::vector *decodedSizes) { + uint32_t channels; + uint32_t numSamples; + size_t frameSize; + size_t totalInSize = 0; + + while (totalInSize + 4 < inSize) { + if (!parseMp3Header(U32_AT(header + totalInSize), &frameSize, + nullptr, &channels, nullptr, &numSamples)) { + ALOGE("Error in parse mp3 header during outSize estimation"); + return UNKNOWN_ERROR; + } + totalInSize += frameSize; + decodedSizes->push_back(numSamples * channels * sizeof(int16_t)); + } + + if (decodedSizes->empty()) return UNKNOWN_ERROR; + + return OK; +} + +c2_status_t C2SoftMP3::onFlush_sm() { + return onStop(); +} + +c2_status_t C2SoftMP3::drain( + uint32_t drainMode, + const std::shared_ptr &pool) { + (void) pool; + if (drainMode == NO_DRAIN) { + ALOGW("drain with NO_DRAIN: no-op"); + return C2_OK; + } + if (drainMode == DRAIN_CHAIN) { + ALOGW("DRAIN_CHAIN not supported"); + return C2_OMITTED; + } + + return C2_OK; +} + +// TODO: Can overall error checking be improved? As in the check for validity of +// work, pool ptr, work->input.buffers.size() == 1, ... +// TODO: Blind removal of 529 samples from the output may not work. Because +// mpeg layer 1 frame size is 384 samples per frame. This should introduce +// negative values and can cause SEG faults. Soft omx mp3 plugin can have +// this problem (CHECK!) +void C2SoftMP3::process( + const std::unique_ptr &work, + const std::shared_ptr &pool) { + // Initialize output work + work->result = C2_OK; + work->workletsProcessed = 1u; + work->worklets.front()->output.configUpdate.clear(); + work->worklets.front()->output.flags = work->input.flags; + + if (mSignalledError || mSignalledOutputEos) { + work->result = C2_BAD_VALUE; + return; + } + + bool eos = ((work->input.flags & C2FrameData::FLAG_END_OF_STREAM) != 0); + size_t inSize = 0u; + C2ReadView rView = mDummyReadView; + if (!work->input.buffers.empty()) { + rView = work->input.buffers[0]->data().linearBlocks().front().map().get(); + inSize = rView.capacity(); + if (inSize && rView.error()) { + ALOGE("read view map failed %d", rView.error()); + work->result = rView.error(); + return; + } + } + + if (inSize == 0 && (!mGaplessBytes || !eos)) { + work->worklets.front()->output.flags = work->input.flags; + work->worklets.front()->output.buffers.clear(); + work->worklets.front()->output.ordinal = work->input.ordinal; + return; + } + ALOGV("in buffer attr. size %zu timestamp %d frameindex %d", inSize, + (int)work->input.ordinal.timestamp.peeku(), (int)work->input.ordinal.frameIndex.peeku()); + + int32_t numChannels = mConfig->num_channels; + size_t calOutSize; + std::vector decodedSizes; + if (inSize && OK != calculateOutSize(const_cast(rView.data()), + inSize, &decodedSizes)) { + work->result = C2_CORRUPTED; + return; + } + calOutSize = std::accumulate(decodedSizes.begin(), decodedSizes.end(), 0); + if (eos) { + calOutSize += kPVMP3DecoderDelay * numChannels * sizeof(int16_t); + } + + std::shared_ptr block; + C2MemoryUsage usage = { C2MemoryUsage::CPU_READ, C2MemoryUsage::CPU_WRITE }; + c2_status_t err = pool->fetchLinearBlock(calOutSize, usage, &block); + if (err != C2_OK) { + ALOGE("fetchLinearBlock for Output failed with status %d", err); + work->result = C2_NO_MEMORY; + return; + } + C2WriteView wView = block->map().get(); + if (wView.error()) { + ALOGE("write view map failed %d", wView.error()); + work->result = wView.error(); + return; + } + + int outSize = 0; + int outOffset = 0; + auto it = decodedSizes.begin(); + size_t inPos = 0; + int32_t samplingRate = mConfig->samplingRate; + while (inPos < inSize) { + if (it == decodedSizes.end()) { + ALOGE("unexpected trailing bytes, ignoring them"); + break; + } + + mConfig->pInputBuffer = const_cast(rView.data() + inPos); + mConfig->inputBufferCurrentLength = (inSize - inPos); + mConfig->inputBufferMaxLength = 0; + mConfig->inputBufferUsedLength = 0; + mConfig->outputFrameSize = (calOutSize - outSize); + mConfig->pOutputBuffer = reinterpret_cast (wView.data() + outSize); + + ERROR_CODE decoderErr; + if ((decoderErr = pvmp3_framedecoder(mConfig, mDecoderBuf)) + != NO_DECODING_ERROR) { + ALOGE("mp3 decoder returned error %d", decoderErr); + if (decoderErr != NO_ENOUGH_MAIN_DATA_ERROR + && decoderErr != SIDE_INFO_ERROR) { + mSignalledError = true; + work->result = C2_CORRUPTED; + return; + } + + // This is recoverable, just ignore the current frame and + // play silence instead. + ALOGV("ignoring error and sending silence"); + if (mConfig->outputFrameSize == 0) { + mConfig->outputFrameSize = *it / sizeof(int16_t); + } + memset(mConfig->pOutputBuffer, 0, mConfig->outputFrameSize * sizeof(int16_t)); + } else if (mConfig->samplingRate != samplingRate + || mConfig->num_channels != numChannels) { + ALOGI("Reconfiguring decoder: %d->%d Hz, %d->%d channels", + samplingRate, mConfig->samplingRate, + numChannels, mConfig->num_channels); + samplingRate = mConfig->samplingRate; + numChannels = mConfig->num_channels; + + C2StreamSampleRateInfo::output sampleRateInfo(0u, samplingRate); + C2StreamChannelCountInfo::output channelCountInfo(0u, numChannels); + std::vector> failures; + c2_status_t err = mIntf->config( + { &sampleRateInfo, &channelCountInfo }, + C2_MAY_BLOCK, + &failures); + if (err == OK) { + work->worklets.front()->output.configUpdate.push_back(C2Param::Copy(sampleRateInfo)); + work->worklets.front()->output.configUpdate.push_back(C2Param::Copy(channelCountInfo)); + } else { + ALOGE("Config Update failed"); + mSignalledError = true; + work->result = C2_CORRUPTED; + return; + } + } + if (*it != mConfig->outputFrameSize * sizeof(int16_t)) { + ALOGE("panic, parsed size does not match decoded size"); + mSignalledError = true; + work->result = C2_CORRUPTED; + return; + } + outSize += mConfig->outputFrameSize * sizeof(int16_t); + inPos += mConfig->inputBufferUsedLength; + it++; + } + if (mIsFirst) { + mIsFirst = false; + mGaplessBytes = true; + // The decoder delay is 529 samples, so trim that many samples off + // the start of the first output buffer. This essentially makes this + // decoder have zero delay, which the rest of the pipeline assumes. + outOffset = kPVMP3DecoderDelay * numChannels * sizeof(int16_t); + mAnchorTimeStamp = work->input.ordinal.timestamp.peekull(); + } + if (eos) { + if (calOutSize >= + outSize + kPVMP3DecoderDelay * numChannels * sizeof(int16_t)) { + if (!memset(reinterpret_cast(wView.data() + outSize), 0, + kPVMP3DecoderDelay * numChannels * sizeof(int16_t))) { + mSignalledError = true; + work->result = C2_CORRUPTED; + return; + } + ALOGV("Adding 529 samples at end"); + mGaplessBytes = false; + outSize += kPVMP3DecoderDelay * numChannels * sizeof(int16_t); + } + } + + uint64_t outTimeStamp = mProcessedSamples * 1000000ll / samplingRate; + mProcessedSamples += ((outSize - outOffset) / (numChannels * sizeof(int16_t))); + ALOGV("out buffer attr. offset %d size %d timestamp %u", outOffset, outSize - outOffset, + (uint32_t)(mAnchorTimeStamp + outTimeStamp)); + decodedSizes.clear(); + work->worklets.front()->output.flags = work->input.flags; + work->worklets.front()->output.buffers.clear(); + work->worklets.front()->output.buffers.push_back( + createLinearBuffer(block, outOffset, outSize - outOffset)); + work->worklets.front()->output.ordinal = work->input.ordinal; + work->worklets.front()->output.ordinal.timestamp = mAnchorTimeStamp + outTimeStamp; + if (eos) { + mSignalledOutputEos = true; + ALOGV("signalled EOS"); + } +} + +class C2SoftMp3DecFactory : public C2ComponentFactory { +public: + C2SoftMp3DecFactory() : mHelper(std::static_pointer_cast( + GetCodec2PlatformComponentStore()->getParamReflector())) { + } + + virtual c2_status_t createComponent( + c2_node_id_t id, + std::shared_ptr* const component, + std::function deleter) override { + *component = std::shared_ptr( + new C2SoftMP3(COMPONENT_NAME, + id, + std::make_shared(mHelper)), + deleter); + return C2_OK; + } + + virtual c2_status_t createInterface( + c2_node_id_t id, + std::shared_ptr* const interface, + std::function deleter) override { + *interface = std::shared_ptr( + new SimpleInterface( + COMPONENT_NAME, id, std::make_shared(mHelper)), + deleter); + return C2_OK; + } + + virtual ~C2SoftMp3DecFactory() override = default; + +private: + std::shared_ptr mHelper; +}; + +} // namespace android + +extern "C" ::C2ComponentFactory* CreateCodec2Factory() { + ALOGV("in %s", __func__); + return new ::android::C2SoftMp3DecFactory(); +} + +extern "C" void DestroyCodec2Factory(::C2ComponentFactory* factory) { + ALOGV("in %s", __func__); + delete factory; +} + diff --git a/media/codec2/components/mp3/C2SoftMp3Dec.h b/media/codec2/components/mp3/C2SoftMp3Dec.h new file mode 100644 index 0000000000000000000000000000000000000000..402bdc40a75b2f0c552f2c5b1ce81a80bae10d34 --- /dev/null +++ b/media/codec2/components/mp3/C2SoftMp3Dec.h @@ -0,0 +1,76 @@ +/* + * Copyright (C) 2018 The Android Open Source Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#ifndef ANDROID_C2_SOFT_MP3_DEC_H_ +#define ANDROID_C2_SOFT_MP3_DEC_H_ + +#include + + +struct tPVMP3DecoderExternal; + +bool parseMp3Header(uint32_t header, size_t *frame_size, + uint32_t *out_sampling_rate = nullptr, + uint32_t *out_channels = nullptr, + uint32_t *out_bitrate = nullptr, + uint32_t *out_num_samples = nullptr); + +namespace android { + +struct C2SoftMP3 : public SimpleC2Component { + class IntfImpl; + + C2SoftMP3(const char *name, c2_node_id_t id, + const std::shared_ptr &intfImpl); + virtual ~C2SoftMP3(); + + // From SimpleC2Component + c2_status_t onInit() override; + c2_status_t onStop() override; + void onReset() override; + void onRelease() override; + c2_status_t onFlush_sm() override; + void process( + const std::unique_ptr &work, + const std::shared_ptr &pool) override; + c2_status_t drain( + uint32_t drainMode, + const std::shared_ptr &pool) override; + +private: + enum { + kPVMP3DecoderDelay = 529 // samples + }; + + std::shared_ptr mIntf; + tPVMP3DecoderExternal *mConfig; + void *mDecoderBuf; + + bool mIsFirst; + bool mSignalledError; + bool mSignalledOutputEos; + bool mGaplessBytes; + uint64_t mAnchorTimeStamp; + uint64_t mProcessedSamples; + + status_t initDecoder(); + + C2_DO_NOT_COPY(C2SoftMP3); +}; + +} // namespace android + +#endif // ANDROID_C2_SOFT_MP3_DEC_H_ diff --git a/media/codec2/components/mp3/MODULE_LICENSE_APACHE2 b/media/codec2/components/mp3/MODULE_LICENSE_APACHE2 new file mode 100644 index 0000000000000000000000000000000000000000..e69de29bb2d1d6434b8b29ae775ad8c2e48c5391 diff --git a/media/codec2/components/mp3/NOTICE b/media/codec2/components/mp3/NOTICE new file mode 100644 index 0000000000000000000000000000000000000000..c5b1efa7aac764ae6d8da63476a2d5cec02a6a5d --- /dev/null +++ b/media/codec2/components/mp3/NOTICE @@ -0,0 +1,190 @@ + + Copyright (c) 2005-2008, The Android Open Source Project + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. + + + Apache License + Version 2.0, January 2004 + http://www.apache.org/licenses/ + + TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION + + 1. Definitions. + + "License" shall mean the terms and conditions for use, reproduction, + and distribution as defined by Sections 1 through 9 of this document. + + "Licensor" shall mean the copyright owner or entity authorized by + the copyright owner that is granting the License. + + "Legal Entity" shall mean the union of the acting entity and all + other entities that control, are controlled by, or are under common + control with that entity. For the purposes of this definition, + "control" means (i) the power, direct or indirect, to cause the + direction or management of such entity, whether by contract or + otherwise, or (ii) ownership of fifty percent (50%) or more of the + outstanding shares, or (iii) beneficial ownership of such entity. + + "You" (or "Your") shall mean an individual or Legal Entity + exercising permissions granted by this License. + + "Source" form shall mean the preferred form for making modifications, + including but not limited to software source code, documentation + source, and configuration files. + + "Object" form shall mean any form resulting from mechanical + transformation or translation of a Source form, including but + not limited to compiled object code, generated documentation, + and conversions to other media types. + + "Work" shall mean the work of authorship, whether in Source or + Object form, made available under the License, as indicated by a + copyright notice that is included in or attached to the work + (an example is provided in the Appendix below). + + "Derivative Works" shall mean any work, whether in Source or Object + form, that is based on (or derived from) the Work and for which the + editorial revisions, annotations, elaborations, or other modifications + represent, as a whole, an original work of authorship. For the purposes + of this License, Derivative Works shall not include works that remain + separable from, or merely link (or bind by name) to the interfaces of, + the Work and Derivative Works thereof. + + "Contribution" shall mean any work of authorship, including + the original version of the Work and any modifications or additions + to that Work or Derivative Works thereof, that is intentionally + submitted to Licensor for inclusion in the Work by the copyright owner + or by an individual or Legal Entity authorized to submit on behalf of + the copyright owner. For the purposes of this definition, "submitted" + means any form of electronic, verbal, or written communication sent + to the Licensor or its representatives, including but not limited to + communication on electronic mailing lists, source code control systems, + and issue tracking systems that are managed by, or on behalf of, the + Licensor for the purpose of discussing and improving the Work, but + excluding communication that is conspicuously marked or otherwise + designated in writing by the copyright owner as "Not a Contribution." + + "Contributor" shall mean Licensor and any individual or Legal Entity + on behalf of whom a Contribution has been received by Licensor and + subsequently incorporated within the Work. + + 2. Grant of Copyright License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + copyright license to reproduce, prepare Derivative Works of, + publicly display, publicly perform, sublicense, and distribute the + Work and such Derivative Works in Source or Object form. + + 3. Grant of Patent License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + (except as stated in this section) patent license to make, have made, + use, offer to sell, sell, import, and otherwise transfer the Work, + where such license applies only to those patent claims licensable + by such Contributor that are necessarily infringed by their + Contribution(s) alone or by combination of their Contribution(s) + with the Work to which such Contribution(s) was submitted. If You + institute patent litigation against any entity (including a + cross-claim or counterclaim in a lawsuit) alleging that the Work + or a Contribution incorporated within the Work constitutes direct + or contributory patent infringement, then any patent licenses + granted to You under this License for that Work shall terminate + as of the date such litigation is filed. + + 4. Redistribution. You may reproduce and distribute copies of the + Work or Derivative Works thereof in any medium, with or without + modifications, and in Source or Object form, provided that You + meet the following conditions: + + (a) You must give any other recipients of the Work or + Derivative Works a copy of this License; and + + (b) You must cause any modified files to carry prominent notices + stating that You changed the files; and + + (c) You must retain, in the Source form of any Derivative Works + that You distribute, all copyright, patent, trademark, and + attribution notices from the Source form of the Work, + excluding those notices that do not pertain to any part of + the Derivative Works; and + + (d) If the Work includes a "NOTICE" text file as part of its + distribution, then any Derivative Works that You distribute must + include a readable copy of the attribution notices contained + within such NOTICE file, excluding those notices that do not + pertain to any part of the Derivative Works, in at least one + of the following places: within a NOTICE text file distributed + as part of the Derivative Works; within the Source form or + documentation, if provided along with the Derivative Works; or, + within a display generated by the Derivative Works, if and + wherever such third-party notices normally appear. The contents + of the NOTICE file are for informational purposes only and + do not modify the License. You may add Your own attribution + notices within Derivative Works that You distribute, alongside + or as an addendum to the NOTICE text from the Work, provided + that such additional attribution notices cannot be construed + as modifying the License. + + You may add Your own copyright statement to Your modifications and + may provide additional or different license terms and conditions + for use, reproduction, or distribution of Your modifications, or + for any such Derivative Works as a whole, provided Your use, + reproduction, and distribution of the Work otherwise complies with + the conditions stated in this License. + + 5. Submission of Contributions. Unless You explicitly state otherwise, + any Contribution intentionally submitted for inclusion in the Work + by You to the Licensor shall be under the terms and conditions of + this License, without any additional terms or conditions. + Notwithstanding the above, nothing herein shall supersede or modify + the terms of any separate license agreement you may have executed + with Licensor regarding such Contributions. + + 6. Trademarks. This License does not grant permission to use the trade + names, trademarks, service marks, or product names of the Licensor, + except as required for reasonable and customary use in describing the + origin of the Work and reproducing the content of the NOTICE file. + + 7. Disclaimer of Warranty. Unless required by applicable law or + agreed to in writing, Licensor provides the Work (and each + Contributor provides its Contributions) on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or + implied, including, without limitation, any warranties or conditions + of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A + PARTICULAR PURPOSE. You are solely responsible for determining the + appropriateness of using or redistributing the Work and assume any + risks associated with Your exercise of permissions under this License. + + 8. Limitation of Liability. In no event and under no legal theory, + whether in tort (including negligence), contract, or otherwise, + unless required by applicable law (such as deliberate and grossly + negligent acts) or agreed to in writing, shall any Contributor be + liable to You for damages, including any direct, indirect, special, + incidental, or consequential damages of any character arising as a + result of this License or out of the use or inability to use the + Work (including but not limited to damages for loss of goodwill, + work stoppage, computer failure or malfunction, or any and all + other commercial damages or losses), even if such Contributor + has been advised of the possibility of such damages. + + 9. Accepting Warranty or Additional Liability. While redistributing + the Work or Derivative Works thereof, You may choose to offer, + and charge a fee for, acceptance of support, warranty, indemnity, + or other liability obligations and/or rights consistent with this + License. However, in accepting such obligations, You may act only + on Your own behalf and on Your sole responsibility, not on behalf + of any other Contributor, and only if You agree to indemnify, + defend, and hold each Contributor harmless for any liability + incurred by, or claims asserted against, such Contributor by reason + of your accepting any such warranty or additional liability. + + END OF TERMS AND CONDITIONS + diff --git a/media/codec2/components/mp3/patent_disclaimer.txt b/media/codec2/components/mp3/patent_disclaimer.txt new file mode 100644 index 0000000000000000000000000000000000000000..b4bf11d4ab14011ba28ce5ff6090529d2b7b587f --- /dev/null +++ b/media/codec2/components/mp3/patent_disclaimer.txt @@ -0,0 +1,9 @@ + +THIS IS NOT A GRANT OF PATENT RIGHTS. + +Google makes no representation or warranty that the codecs for which +source code is made available hereunder are unencumbered by +third-party patents. Those intending to use this source code in +hardware or software products are advised that implementations of +these codecs, including in open source software or shareware, may +require patent licenses from the relevant patent holders. diff --git a/media/codec2/components/mpeg2/Android.bp b/media/codec2/components/mpeg2/Android.bp new file mode 100644 index 0000000000000000000000000000000000000000..85d867ee1b1293476e5e4dae1f705abd86d20de2 --- /dev/null +++ b/media/codec2/components/mpeg2/Android.bp @@ -0,0 +1,16 @@ +cc_library_shared { + name: "libstagefright_soft_c2mpeg2dec", + defaults: [ + "libstagefright_soft_c2-defaults", + "libstagefright_soft_c2_sanitize_signed-defaults", + ], + + srcs: ["C2SoftMpeg2Dec.cpp"], + + static_libs: ["libmpeg2dec"], + + include_dirs: [ + "external/libmpeg2/decoder", + "external/libmpeg2/common", + ], +} diff --git a/media/codec2/components/mpeg2/C2SoftMpeg2Dec.cpp b/media/codec2/components/mpeg2/C2SoftMpeg2Dec.cpp new file mode 100644 index 0000000000000000000000000000000000000000..da32ec027579316efe58d20f390f24e343e94354 --- /dev/null +++ b/media/codec2/components/mpeg2/C2SoftMpeg2Dec.cpp @@ -0,0 +1,1069 @@ +/* + * Copyright (C) 2018 The Android Open Source Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +//#define LOG_NDEBUG 0 +#define LOG_TAG "C2SoftMpeg2Dec" +#include + +#include + +#include +#include +#include +#include + +#include "C2SoftMpeg2Dec.h" +#include "impeg2d.h" + +namespace android { + +constexpr char COMPONENT_NAME[] = "c2.android.mpeg2.decoder"; + +class C2SoftMpeg2Dec::IntfImpl : public SimpleInterface::BaseParams { +public: + explicit IntfImpl(const std::shared_ptr &helper) + : SimpleInterface::BaseParams( + helper, + COMPONENT_NAME, + C2Component::KIND_DECODER, + C2Component::DOMAIN_VIDEO, + MEDIA_MIMETYPE_VIDEO_MPEG2) { + noPrivateBuffers(); // TODO: account for our buffers here + noInputReferences(); + noOutputReferences(); + noInputLatency(); + noTimeStretch(); + + // TODO: output latency and reordering + + addParameter( + DefineParam(mAttrib, C2_PARAMKEY_COMPONENT_ATTRIBUTES) + .withConstValue(new C2ComponentAttributesSetting(C2Component::ATTRIB_IS_TEMPORAL)) + .build()); + + addParameter( + DefineParam(mSize, C2_PARAMKEY_PICTURE_SIZE) + .withDefault(new C2StreamPictureSizeInfo::output(0u, 320, 240)) + .withFields({ + C2F(mSize, width).inRange(16, 1920, 4), + C2F(mSize, height).inRange(16, 1088, 4), + }) + .withSetter(SizeSetter) + .build()); + + addParameter( + DefineParam(mProfileLevel, C2_PARAMKEY_PROFILE_LEVEL) + .withDefault(new C2StreamProfileLevelInfo::input(0u, + C2Config::PROFILE_MP2V_SIMPLE, C2Config::LEVEL_MP2V_HIGH)) + .withFields({ + C2F(mProfileLevel, profile).oneOf({ + C2Config::PROFILE_MP2V_SIMPLE, + C2Config::PROFILE_MP2V_MAIN}), + C2F(mProfileLevel, level).oneOf({ + C2Config::LEVEL_MP2V_LOW, + C2Config::LEVEL_MP2V_MAIN, + C2Config::LEVEL_MP2V_HIGH_1440, + C2Config::LEVEL_MP2V_HIGH}) + }) + .withSetter(ProfileLevelSetter, mSize) + .build()); + + addParameter( + DefineParam(mMaxSize, C2_PARAMKEY_MAX_PICTURE_SIZE) + .withDefault(new C2StreamMaxPictureSizeTuning::output(0u, 320, 240)) + .withFields({ + C2F(mSize, width).inRange(2, 1920, 2), + C2F(mSize, height).inRange(2, 1088, 2), + }) + .withSetter(MaxPictureSizeSetter, mSize) + .build()); + + addParameter( + DefineParam(mMaxInputSize, C2_PARAMKEY_INPUT_MAX_BUFFER_SIZE) + .withDefault(new C2StreamMaxBufferSizeInfo::input(0u, 320 * 240 * 3 / 2)) + .withFields({ + C2F(mMaxInputSize, value).any(), + }) + .calculatedAs(MaxInputSizeSetter, mMaxSize) + .build()); + + C2ChromaOffsetStruct locations[1] = { C2ChromaOffsetStruct::ITU_YUV_420_0() }; + std::shared_ptr defaultColorInfo = + C2StreamColorInfo::output::AllocShared( + 1u, 0u, 8u /* bitDepth */, C2Color::YUV_420); + memcpy(defaultColorInfo->m.locations, locations, sizeof(locations)); + + defaultColorInfo = + C2StreamColorInfo::output::AllocShared( + { C2ChromaOffsetStruct::ITU_YUV_420_0() }, + 0u, 8u /* bitDepth */, C2Color::YUV_420); + helper->addStructDescriptors(); + + addParameter( + DefineParam(mColorInfo, C2_PARAMKEY_CODED_COLOR_INFO) + .withConstValue(defaultColorInfo) + .build()); + + addParameter( + DefineParam(mDefaultColorAspects, C2_PARAMKEY_DEFAULT_COLOR_ASPECTS) + .withDefault(new C2StreamColorAspectsTuning::output( + 0u, C2Color::RANGE_UNSPECIFIED, C2Color::PRIMARIES_UNSPECIFIED, + C2Color::TRANSFER_UNSPECIFIED, C2Color::MATRIX_UNSPECIFIED)) + .withFields({ + C2F(mDefaultColorAspects, range).inRange( + C2Color::RANGE_UNSPECIFIED, C2Color::RANGE_OTHER), + C2F(mDefaultColorAspects, primaries).inRange( + C2Color::PRIMARIES_UNSPECIFIED, C2Color::PRIMARIES_OTHER), + C2F(mDefaultColorAspects, transfer).inRange( + C2Color::TRANSFER_UNSPECIFIED, C2Color::TRANSFER_OTHER), + C2F(mDefaultColorAspects, matrix).inRange( + C2Color::MATRIX_UNSPECIFIED, C2Color::MATRIX_OTHER) + }) + .withSetter(DefaultColorAspectsSetter) + .build()); + + addParameter( + DefineParam(mCodedColorAspects, C2_PARAMKEY_VUI_COLOR_ASPECTS) + .withDefault(new C2StreamColorAspectsInfo::input( + 0u, C2Color::RANGE_LIMITED, C2Color::PRIMARIES_UNSPECIFIED, + C2Color::TRANSFER_UNSPECIFIED, C2Color::MATRIX_UNSPECIFIED)) + .withFields({ + C2F(mCodedColorAspects, range).inRange( + C2Color::RANGE_UNSPECIFIED, C2Color::RANGE_OTHER), + C2F(mCodedColorAspects, primaries).inRange( + C2Color::PRIMARIES_UNSPECIFIED, C2Color::PRIMARIES_OTHER), + C2F(mCodedColorAspects, transfer).inRange( + C2Color::TRANSFER_UNSPECIFIED, C2Color::TRANSFER_OTHER), + C2F(mCodedColorAspects, matrix).inRange( + C2Color::MATRIX_UNSPECIFIED, C2Color::MATRIX_OTHER) + }) + .withSetter(CodedColorAspectsSetter) + .build()); + + addParameter( + DefineParam(mColorAspects, C2_PARAMKEY_COLOR_ASPECTS) + .withDefault(new C2StreamColorAspectsInfo::output( + 0u, C2Color::RANGE_UNSPECIFIED, C2Color::PRIMARIES_UNSPECIFIED, + C2Color::TRANSFER_UNSPECIFIED, C2Color::MATRIX_UNSPECIFIED)) + .withFields({ + C2F(mColorAspects, range).inRange( + C2Color::RANGE_UNSPECIFIED, C2Color::RANGE_OTHER), + C2F(mColorAspects, primaries).inRange( + C2Color::PRIMARIES_UNSPECIFIED, C2Color::PRIMARIES_OTHER), + C2F(mColorAspects, transfer).inRange( + C2Color::TRANSFER_UNSPECIFIED, C2Color::TRANSFER_OTHER), + C2F(mColorAspects, matrix).inRange( + C2Color::MATRIX_UNSPECIFIED, C2Color::MATRIX_OTHER) + }) + .withSetter(ColorAspectsSetter, mDefaultColorAspects, mCodedColorAspects) + .build()); + + // TODO: support more formats? + addParameter( + DefineParam(mPixelFormat, C2_PARAMKEY_PIXEL_FORMAT) + .withConstValue(new C2StreamPixelFormatInfo::output( + 0u, HAL_PIXEL_FORMAT_YCBCR_420_888)) + .build()); + } + + static C2R SizeSetter(bool mayBlock, const C2P &oldMe, + C2P &me) { + (void)mayBlock; + C2R res = C2R::Ok(); + if (!me.F(me.v.width).supportsAtAll(me.v.width)) { + res = res.plus(C2SettingResultBuilder::BadValue(me.F(me.v.width))); + me.set().width = oldMe.v.width; + } + if (!me.F(me.v.height).supportsAtAll(me.v.height)) { + res = res.plus(C2SettingResultBuilder::BadValue(me.F(me.v.height))); + me.set().height = oldMe.v.height; + } + return res; + } + + static C2R MaxPictureSizeSetter(bool mayBlock, C2P &me, + const C2P &size) { + (void)mayBlock; + // TODO: get max width/height from the size's field helpers vs. hardcoding + me.set().width = c2_min(c2_max(me.v.width, size.v.width), 1920u); + me.set().height = c2_min(c2_max(me.v.height, size.v.height), 1088u); + return C2R::Ok(); + } + + static C2R MaxInputSizeSetter(bool mayBlock, C2P &me, + const C2P &maxSize) { + (void)mayBlock; + // assume compression ratio of 1 + me.set().value = (((maxSize.v.width + 15) / 16) * ((maxSize.v.height + 15) / 16) * 384); + return C2R::Ok(); + } + + static C2R ProfileLevelSetter(bool mayBlock, C2P &me, + const C2P &size) { + (void)mayBlock; + (void)size; + (void)me; // TODO: validate + return C2R::Ok(); + } + + static C2R DefaultColorAspectsSetter(bool mayBlock, C2P &me) { + (void)mayBlock; + if (me.v.range > C2Color::RANGE_OTHER) { + me.set().range = C2Color::RANGE_OTHER; + } + if (me.v.primaries > C2Color::PRIMARIES_OTHER) { + me.set().primaries = C2Color::PRIMARIES_OTHER; + } + if (me.v.transfer > C2Color::TRANSFER_OTHER) { + me.set().transfer = C2Color::TRANSFER_OTHER; + } + if (me.v.matrix > C2Color::MATRIX_OTHER) { + me.set().matrix = C2Color::MATRIX_OTHER; + } + return C2R::Ok(); + } + + static C2R CodedColorAspectsSetter(bool mayBlock, C2P &me) { + (void)mayBlock; + if (me.v.range > C2Color::RANGE_OTHER) { + me.set().range = C2Color::RANGE_OTHER; + } + if (me.v.primaries > C2Color::PRIMARIES_OTHER) { + me.set().primaries = C2Color::PRIMARIES_OTHER; + } + if (me.v.transfer > C2Color::TRANSFER_OTHER) { + me.set().transfer = C2Color::TRANSFER_OTHER; + } + if (me.v.matrix > C2Color::MATRIX_OTHER) { + me.set().matrix = C2Color::MATRIX_OTHER; + } + return C2R::Ok(); + } + + static C2R ColorAspectsSetter(bool mayBlock, C2P &me, + const C2P &def, + const C2P &coded) { + (void)mayBlock; + // take default values for all unspecified fields, and coded values for specified ones + me.set().range = coded.v.range == RANGE_UNSPECIFIED ? def.v.range : coded.v.range; + me.set().primaries = coded.v.primaries == PRIMARIES_UNSPECIFIED + ? def.v.primaries : coded.v.primaries; + me.set().transfer = coded.v.transfer == TRANSFER_UNSPECIFIED + ? def.v.transfer : coded.v.transfer; + me.set().matrix = coded.v.matrix == MATRIX_UNSPECIFIED ? def.v.matrix : coded.v.matrix; + return C2R::Ok(); + } + + std::shared_ptr getColorAspects_l() { + return mColorAspects; + } + +private: + std::shared_ptr mProfileLevel; + std::shared_ptr mSize; + std::shared_ptr mMaxSize; + std::shared_ptr mMaxInputSize; + std::shared_ptr mColorInfo; + std::shared_ptr mCodedColorAspects; + std::shared_ptr mDefaultColorAspects; + std::shared_ptr mColorAspects; + std::shared_ptr mPixelFormat; +}; + +static size_t getCpuCoreCount() { + long cpuCoreCount = 1; +#if defined(_SC_NPROCESSORS_ONLN) + cpuCoreCount = sysconf(_SC_NPROCESSORS_ONLN); +#else + // _SC_NPROC_ONLN must be defined... + cpuCoreCount = sysconf(_SC_NPROC_ONLN); +#endif + CHECK(cpuCoreCount >= 1); + ALOGV("Number of CPU cores: %ld", cpuCoreCount); + return (size_t)cpuCoreCount; +} + +static void *ivd_aligned_malloc(WORD32 alignment, WORD32 size) { + return memalign(alignment, size); +} + +static void ivd_aligned_free(void *mem) { + free(mem); +} + +C2SoftMpeg2Dec::C2SoftMpeg2Dec( + const char *name, + c2_node_id_t id, + const std::shared_ptr &intfImpl) + : SimpleC2Component(std::make_shared>(name, id, intfImpl)), + mIntf(intfImpl), + mDecHandle(nullptr), + mMemRecords(nullptr), + mOutBufferDrain(nullptr), + mIvColorformat(IV_YUV_420P), + mWidth(320), + mHeight(240) { + // If input dump is enabled, then open create an empty file + GENERATE_FILE_NAMES(); + CREATE_DUMP_FILE(mInFile); +} + +C2SoftMpeg2Dec::~C2SoftMpeg2Dec() { + onRelease(); +} + +c2_status_t C2SoftMpeg2Dec::onInit() { + status_t err = initDecoder(); + return err == OK ? C2_OK : C2_CORRUPTED; +} + +c2_status_t C2SoftMpeg2Dec::onStop() { + if (OK != resetDecoder()) return C2_CORRUPTED; + resetPlugin(); + return C2_OK; +} + +void C2SoftMpeg2Dec::onReset() { + (void) onStop(); +} + +void C2SoftMpeg2Dec::onRelease() { + (void) deleteDecoder(); + if (mOutBufferDrain) { + ivd_aligned_free(mOutBufferDrain); + mOutBufferDrain = nullptr; + } + if (mOutBlock) { + mOutBlock.reset(); + } + if (mMemRecords) { + ivd_aligned_free(mMemRecords); + mMemRecords = nullptr; + } +} + +c2_status_t C2SoftMpeg2Dec::onFlush_sm() { + if (OK != setFlushMode()) return C2_CORRUPTED; + + uint32_t displayStride = mStride; + uint32_t displayHeight = mHeight; + uint32_t bufferSize = displayStride * displayHeight * 3 / 2; + mOutBufferDrain = (uint8_t *)ivd_aligned_malloc(128, bufferSize); + if (!mOutBufferDrain) { + ALOGE("could not allocate tmp output buffer (for flush) of size %u ", bufferSize); + return C2_NO_MEMORY; + } + + while (true) { + ivd_video_decode_ip_t s_decode_ip; + ivd_video_decode_op_t s_decode_op; + + setDecodeArgs(&s_decode_ip, &s_decode_op, nullptr, nullptr, 0, 0, 0); + (void) ivdec_api_function(mDecHandle, &s_decode_ip, &s_decode_op); + if (0 == s_decode_op.u4_output_present) { + resetPlugin(); + break; + } + } + + if (mOutBufferDrain) { + ivd_aligned_free(mOutBufferDrain); + mOutBufferDrain = nullptr; + } + + return C2_OK; +} + +status_t C2SoftMpeg2Dec::getNumMemRecords() { + iv_num_mem_rec_ip_t s_num_mem_rec_ip; + iv_num_mem_rec_op_t s_num_mem_rec_op; + + s_num_mem_rec_ip.u4_size = sizeof(s_num_mem_rec_ip); + s_num_mem_rec_ip.e_cmd = IV_CMD_GET_NUM_MEM_REC; + s_num_mem_rec_op.u4_size = sizeof(s_num_mem_rec_op); + + IV_API_CALL_STATUS_T status = ivdec_api_function(mDecHandle, + &s_num_mem_rec_ip, + &s_num_mem_rec_op); + if (IV_SUCCESS != status) { + ALOGE("Error in getting mem records: 0x%x", s_num_mem_rec_op.u4_error_code); + return UNKNOWN_ERROR; + } + mNumMemRecords = s_num_mem_rec_op.u4_num_mem_rec; + + return OK; +} + +status_t C2SoftMpeg2Dec::fillMemRecords() { + iv_mem_rec_t *ps_mem_rec = (iv_mem_rec_t *) ivd_aligned_malloc( + 128, mNumMemRecords * sizeof(iv_mem_rec_t)); + if (!ps_mem_rec) { + ALOGE("Allocation failure"); + return NO_MEMORY; + } + memset(ps_mem_rec, 0, mNumMemRecords * sizeof(iv_mem_rec_t)); + for (size_t i = 0; i < mNumMemRecords; i++) + ps_mem_rec[i].u4_size = sizeof(iv_mem_rec_t); + mMemRecords = ps_mem_rec; + + ivdext_fill_mem_rec_ip_t s_fill_mem_ip; + ivdext_fill_mem_rec_op_t s_fill_mem_op; + + s_fill_mem_ip.s_ivd_fill_mem_rec_ip_t.u4_size = sizeof(ivdext_fill_mem_rec_ip_t); + s_fill_mem_ip.u4_share_disp_buf = 0; + s_fill_mem_ip.e_output_format = mIvColorformat; + s_fill_mem_ip.u4_deinterlace = 1; + s_fill_mem_ip.s_ivd_fill_mem_rec_ip_t.e_cmd = IV_CMD_FILL_NUM_MEM_REC; + s_fill_mem_ip.s_ivd_fill_mem_rec_ip_t.pv_mem_rec_location = mMemRecords; + s_fill_mem_ip.s_ivd_fill_mem_rec_ip_t.u4_max_frm_wd = mWidth; + s_fill_mem_ip.s_ivd_fill_mem_rec_ip_t.u4_max_frm_ht = mHeight; + s_fill_mem_op.s_ivd_fill_mem_rec_op_t.u4_size = sizeof(ivdext_fill_mem_rec_op_t); + IV_API_CALL_STATUS_T status = ivdec_api_function(mDecHandle, + &s_fill_mem_ip, + &s_fill_mem_op); + if (IV_SUCCESS != status) { + ALOGE("Error in filling mem records: 0x%x", + s_fill_mem_op.s_ivd_fill_mem_rec_op_t.u4_error_code); + return UNKNOWN_ERROR; + } + + CHECK_EQ(mNumMemRecords, s_fill_mem_op.s_ivd_fill_mem_rec_op_t.u4_num_mem_rec_filled); + for (size_t i = 0; i < mNumMemRecords; i++, ps_mem_rec++) { + ps_mem_rec->pv_base = ivd_aligned_malloc( + ps_mem_rec->u4_mem_alignment, ps_mem_rec->u4_mem_size); + if (!ps_mem_rec->pv_base) { + ALOGE("Allocation failure for memory record #%zu of size %u", + i, ps_mem_rec->u4_mem_size); + return NO_MEMORY; + } + } + + return OK; +} + +status_t C2SoftMpeg2Dec::createDecoder() { + ivdext_init_ip_t s_init_ip; + ivdext_init_op_t s_init_op; + + s_init_ip.s_ivd_init_ip_t.u4_size = sizeof(ivdext_init_ip_t); + s_init_ip.s_ivd_init_ip_t.e_cmd = (IVD_API_COMMAND_TYPE_T)IV_CMD_INIT; + s_init_ip.s_ivd_init_ip_t.pv_mem_rec_location = mMemRecords; + s_init_ip.s_ivd_init_ip_t.u4_frm_max_wd = mWidth; + s_init_ip.s_ivd_init_ip_t.u4_frm_max_ht = mHeight; + s_init_ip.u4_share_disp_buf = 0; + s_init_ip.u4_deinterlace = 1; + s_init_ip.s_ivd_init_ip_t.u4_num_mem_rec = mNumMemRecords; + s_init_ip.s_ivd_init_ip_t.e_output_format = mIvColorformat; + s_init_op.s_ivd_init_op_t.u4_size = sizeof(ivdext_init_op_t); + + mDecHandle = (iv_obj_t *)mMemRecords[0].pv_base; + mDecHandle->pv_fxns = (void *)ivdec_api_function; + mDecHandle->u4_size = sizeof(iv_obj_t); + + IV_API_CALL_STATUS_T status = ivdec_api_function(mDecHandle, + &s_init_ip, + &s_init_op); + if (status != IV_SUCCESS) { + ALOGE("error in %s: 0x%x", __func__, + s_init_op.s_ivd_init_op_t.u4_error_code); + return UNKNOWN_ERROR; + } + + return OK; +} + +status_t C2SoftMpeg2Dec::setNumCores() { + ivdext_ctl_set_num_cores_ip_t s_set_num_cores_ip; + ivdext_ctl_set_num_cores_op_t s_set_num_cores_op; + + s_set_num_cores_ip.u4_size = sizeof(ivdext_ctl_set_num_cores_ip_t); + s_set_num_cores_ip.e_cmd = IVD_CMD_VIDEO_CTL; + s_set_num_cores_ip.e_sub_cmd = IVDEXT_CMD_CTL_SET_NUM_CORES; + s_set_num_cores_ip.u4_num_cores = mNumCores; + s_set_num_cores_op.u4_size = sizeof(ivdext_ctl_set_num_cores_op_t); + IV_API_CALL_STATUS_T status = ivdec_api_function(mDecHandle, + &s_set_num_cores_ip, + &s_set_num_cores_op); + if (status != IV_SUCCESS) { + ALOGD("error in %s: 0x%x", __func__, s_set_num_cores_op.u4_error_code); + return UNKNOWN_ERROR; + } + + return OK; +} + +status_t C2SoftMpeg2Dec::setParams(size_t stride) { + ivd_ctl_set_config_ip_t s_set_dyn_params_ip; + ivd_ctl_set_config_op_t s_set_dyn_params_op; + + s_set_dyn_params_ip.u4_size = sizeof(ivd_ctl_set_config_ip_t); + s_set_dyn_params_ip.e_cmd = IVD_CMD_VIDEO_CTL; + s_set_dyn_params_ip.e_sub_cmd = IVD_CMD_CTL_SETPARAMS; + s_set_dyn_params_ip.u4_disp_wd = (UWORD32) stride; + s_set_dyn_params_ip.e_frm_skip_mode = IVD_SKIP_NONE; + s_set_dyn_params_ip.e_frm_out_mode = IVD_DISPLAY_FRAME_OUT; + s_set_dyn_params_ip.e_vid_dec_mode = IVD_DECODE_FRAME; + s_set_dyn_params_op.u4_size = sizeof(ivd_ctl_set_config_op_t); + IV_API_CALL_STATUS_T status = ivdec_api_function(mDecHandle, + &s_set_dyn_params_ip, + &s_set_dyn_params_op); + if (status != IV_SUCCESS) { + ALOGE("error in %s: 0x%x", __func__, s_set_dyn_params_op.u4_error_code); + return UNKNOWN_ERROR; + } + + return OK; +} + +status_t C2SoftMpeg2Dec::getVersion() { + ivd_ctl_getversioninfo_ip_t s_get_versioninfo_ip; + ivd_ctl_getversioninfo_op_t s_get_versioninfo_op; + UWORD8 au1_buf[512]; + + s_get_versioninfo_ip.u4_size = sizeof(ivd_ctl_getversioninfo_ip_t); + s_get_versioninfo_ip.e_cmd = IVD_CMD_VIDEO_CTL; + s_get_versioninfo_ip.e_sub_cmd = IVD_CMD_CTL_GETVERSION; + s_get_versioninfo_ip.pv_version_buffer = au1_buf; + s_get_versioninfo_ip.u4_version_buffer_size = sizeof(au1_buf); + s_get_versioninfo_op.u4_size = sizeof(ivd_ctl_getversioninfo_op_t); + IV_API_CALL_STATUS_T status = ivdec_api_function(mDecHandle, + &s_get_versioninfo_ip, + &s_get_versioninfo_op); + if (status != IV_SUCCESS) { + ALOGD("error in %s: 0x%x", __func__, + s_get_versioninfo_op.u4_error_code); + } else { + ALOGV("ittiam decoder version number: %s", + (char *) s_get_versioninfo_ip.pv_version_buffer); + } + + return OK; +} + +status_t C2SoftMpeg2Dec::initDecoder() { + status_t ret = getNumMemRecords(); + if (OK != ret) return ret; + + ret = fillMemRecords(); + if (OK != ret) return ret; + + if (OK != createDecoder()) return UNKNOWN_ERROR; + + mNumCores = MIN(getCpuCoreCount(), MAX_NUM_CORES); + mStride = ALIGN64(mWidth); + mSignalledError = false; + resetPlugin(); + (void) setNumCores(); + if (OK != setParams(mStride)) return UNKNOWN_ERROR; + (void) getVersion(); + + return OK; +} + +bool C2SoftMpeg2Dec::setDecodeArgs(ivd_video_decode_ip_t *ps_decode_ip, + ivd_video_decode_op_t *ps_decode_op, + C2ReadView *inBuffer, + C2GraphicView *outBuffer, + size_t inOffset, + size_t inSize, + uint32_t tsMarker) { + uint32_t displayStride = mStride; + uint32_t displayHeight = mHeight; + size_t lumaSize = displayStride * displayHeight; + size_t chromaSize = lumaSize >> 2; + + ps_decode_ip->u4_size = sizeof(ivd_video_decode_ip_t); + ps_decode_ip->e_cmd = IVD_CMD_VIDEO_DECODE; + if (inBuffer) { + ps_decode_ip->u4_ts = tsMarker; + ps_decode_ip->pv_stream_buffer = const_cast(inBuffer->data() + inOffset); + ps_decode_ip->u4_num_Bytes = inSize; + } else { + ps_decode_ip->u4_ts = 0; + ps_decode_ip->pv_stream_buffer = nullptr; + ps_decode_ip->u4_num_Bytes = 0; + } + ps_decode_ip->s_out_buffer.u4_min_out_buf_size[0] = lumaSize; + ps_decode_ip->s_out_buffer.u4_min_out_buf_size[1] = chromaSize; + ps_decode_ip->s_out_buffer.u4_min_out_buf_size[2] = chromaSize; + if (outBuffer) { + if (outBuffer->width() < displayStride || outBuffer->height() < displayHeight) { + ALOGE("Output buffer too small: provided (%dx%d) required (%ux%u)", + outBuffer->width(), outBuffer->height(), displayStride, displayHeight); + return false; + } + ps_decode_ip->s_out_buffer.pu1_bufs[0] = outBuffer->data()[C2PlanarLayout::PLANE_Y]; + ps_decode_ip->s_out_buffer.pu1_bufs[1] = outBuffer->data()[C2PlanarLayout::PLANE_U]; + ps_decode_ip->s_out_buffer.pu1_bufs[2] = outBuffer->data()[C2PlanarLayout::PLANE_V]; + } else { + ps_decode_ip->s_out_buffer.pu1_bufs[0] = mOutBufferDrain; + ps_decode_ip->s_out_buffer.pu1_bufs[1] = mOutBufferDrain + lumaSize; + ps_decode_ip->s_out_buffer.pu1_bufs[2] = mOutBufferDrain + lumaSize + chromaSize; + } + ps_decode_ip->s_out_buffer.u4_num_bufs = 3; + ps_decode_op->u4_size = sizeof(ivd_video_decode_op_t); + + return true; +} + + +bool C2SoftMpeg2Dec::getSeqInfo() { + ivdext_ctl_get_seq_info_ip_t s_ctl_get_seq_info_ip; + ivdext_ctl_get_seq_info_op_t s_ctl_get_seq_info_op; + + s_ctl_get_seq_info_ip.u4_size = sizeof(ivdext_ctl_get_seq_info_ip_t); + s_ctl_get_seq_info_ip.e_cmd = IVD_CMD_VIDEO_CTL; + s_ctl_get_seq_info_ip.e_sub_cmd = + (IVD_CONTROL_API_COMMAND_TYPE_T)IMPEG2D_CMD_CTL_GET_SEQ_INFO; + s_ctl_get_seq_info_op.u4_size = sizeof(ivdext_ctl_get_seq_info_op_t); + IV_API_CALL_STATUS_T status = ivdec_api_function(mDecHandle, + &s_ctl_get_seq_info_ip, + &s_ctl_get_seq_info_op); + if (status != IV_SUCCESS) { + ALOGW("Error in getting Sequence info: 0x%x", s_ctl_get_seq_info_op.u4_error_code); + return false; + } + + VuiColorAspects vuiColorAspects; + vuiColorAspects.primaries = s_ctl_get_seq_info_op.u1_colour_primaries; + vuiColorAspects.transfer = s_ctl_get_seq_info_op.u1_transfer_characteristics; + vuiColorAspects.coeffs = s_ctl_get_seq_info_op.u1_matrix_coefficients; + vuiColorAspects.fullRange = false; // mpeg2 video has limited range. + + // convert vui aspects to C2 values if changed + if (!(vuiColorAspects == mBitstreamColorAspects)) { + mBitstreamColorAspects = vuiColorAspects; + ColorAspects sfAspects; + C2StreamColorAspectsInfo::input codedAspects = { 0u }; + ColorUtils::convertIsoColorAspectsToCodecAspects( + vuiColorAspects.primaries, vuiColorAspects.transfer, vuiColorAspects.coeffs, + vuiColorAspects.fullRange, sfAspects); + if (!C2Mapper::map(sfAspects.mPrimaries, &codedAspects.primaries)) { + codedAspects.primaries = C2Color::PRIMARIES_UNSPECIFIED; + } + if (!C2Mapper::map(sfAspects.mRange, &codedAspects.range)) { + codedAspects.range = C2Color::RANGE_UNSPECIFIED; + } + if (!C2Mapper::map(sfAspects.mMatrixCoeffs, &codedAspects.matrix)) { + codedAspects.matrix = C2Color::MATRIX_UNSPECIFIED; + } + if (!C2Mapper::map(sfAspects.mTransfer, &codedAspects.transfer)) { + codedAspects.transfer = C2Color::TRANSFER_UNSPECIFIED; + } + std::vector> failures; + (void)mIntf->config({&codedAspects}, C2_MAY_BLOCK, &failures); + } + return true; +} + +status_t C2SoftMpeg2Dec::setFlushMode() { + ivd_ctl_flush_ip_t s_set_flush_ip; + ivd_ctl_flush_op_t s_set_flush_op; + + s_set_flush_ip.u4_size = sizeof(ivd_ctl_flush_ip_t); + s_set_flush_ip.e_cmd = IVD_CMD_VIDEO_CTL; + s_set_flush_ip.e_sub_cmd = IVD_CMD_CTL_FLUSH; + s_set_flush_op.u4_size = sizeof(ivd_ctl_flush_op_t); + IV_API_CALL_STATUS_T status = ivdec_api_function(mDecHandle, + &s_set_flush_ip, + &s_set_flush_op); + if (status != IV_SUCCESS) { + ALOGE("error in %s: 0x%x", __func__, s_set_flush_op.u4_error_code); + return UNKNOWN_ERROR; + } + + return OK; +} + +status_t C2SoftMpeg2Dec::resetDecoder() { + ivd_ctl_reset_ip_t s_reset_ip; + ivd_ctl_reset_op_t s_reset_op; + + s_reset_ip.u4_size = sizeof(ivd_ctl_reset_ip_t); + s_reset_ip.e_cmd = IVD_CMD_VIDEO_CTL; + s_reset_ip.e_sub_cmd = IVD_CMD_CTL_RESET; + s_reset_op.u4_size = sizeof(ivd_ctl_reset_op_t); + IV_API_CALL_STATUS_T status = ivdec_api_function(mDecHandle, + &s_reset_ip, + &s_reset_op); + if (IV_SUCCESS != status) { + ALOGE("error in %s: 0x%x", __func__, s_reset_op.u4_error_code); + return UNKNOWN_ERROR; + } + (void) setNumCores(); + mStride = 0; + mSignalledError = false; + + return OK; +} + +void C2SoftMpeg2Dec::resetPlugin() { + mSignalledOutputEos = false; + gettimeofday(&mTimeStart, nullptr); + gettimeofday(&mTimeEnd, nullptr); +} + +status_t C2SoftMpeg2Dec::deleteDecoder() { + if (mMemRecords) { + iv_mem_rec_t *ps_mem_rec = mMemRecords; + + for (size_t i = 0; i < mNumMemRecords; i++, ps_mem_rec++) { + if (ps_mem_rec->pv_base) { + ivd_aligned_free(ps_mem_rec->pv_base); + } + } + ivd_aligned_free(mMemRecords); + mMemRecords = nullptr; + } + mDecHandle = nullptr; + + return OK; +} + +status_t C2SoftMpeg2Dec::reInitDecoder() { + deleteDecoder(); + + status_t ret = initDecoder(); + if (OK != ret) { + ALOGE("Failed to initialize decoder"); + deleteDecoder(); + return ret; + } + return OK; +} + +void fillEmptyWork(const std::unique_ptr &work) { + uint32_t flags = 0; + if (work->input.flags & C2FrameData::FLAG_END_OF_STREAM) { + flags |= C2FrameData::FLAG_END_OF_STREAM; + ALOGV("signalling eos"); + } + work->worklets.front()->output.flags = (C2FrameData::flags_t)flags; + work->worklets.front()->output.buffers.clear(); + work->worklets.front()->output.ordinal = work->input.ordinal; + work->workletsProcessed = 1u; +} + +void C2SoftMpeg2Dec::finishWork(uint64_t index, const std::unique_ptr &work) { + std::shared_ptr buffer = createGraphicBuffer(std::move(mOutBlock), + C2Rect(mWidth, mHeight)); + mOutBlock = nullptr; + { + IntfImpl::Lock lock = mIntf->lock(); + buffer->setInfo(mIntf->getColorAspects_l()); + } + + auto fillWork = [buffer](const std::unique_ptr &work) { + work->worklets.front()->output.flags = (C2FrameData::flags_t)0; + work->worklets.front()->output.buffers.clear(); + work->worklets.front()->output.buffers.push_back(buffer); + work->worklets.front()->output.ordinal = work->input.ordinal; + work->workletsProcessed = 1u; + }; + if (work && c2_cntr64_t(index) == work->input.ordinal.frameIndex) { + fillWork(work); + } else { + finish(index, fillWork); + } +} + +c2_status_t C2SoftMpeg2Dec::ensureDecoderState(const std::shared_ptr &pool) { + if (!mDecHandle) { + ALOGE("not supposed to be here, invalid decoder context"); + return C2_CORRUPTED; + } + if (mStride != ALIGN64(mWidth)) { + mStride = ALIGN64(mWidth); + if (OK != setParams(mStride)) return C2_CORRUPTED; + } + if (mOutBlock && + (mOutBlock->width() != mStride || mOutBlock->height() != mHeight)) { + mOutBlock.reset(); + } + if (!mOutBlock) { + uint32_t format = HAL_PIXEL_FORMAT_YV12; + C2MemoryUsage usage = { C2MemoryUsage::CPU_READ, C2MemoryUsage::CPU_WRITE }; + c2_status_t err = pool->fetchGraphicBlock(mStride, mHeight, format, usage, &mOutBlock); + if (err != C2_OK) { + ALOGE("fetchGraphicBlock for Output failed with status %d", err); + return err; + } + ALOGV("provided (%dx%d) required (%dx%d)", + mOutBlock->width(), mOutBlock->height(), mStride, mHeight); + } + + return C2_OK; +} + +// TODO: can overall error checking be improved? +// TODO: allow configuration of color format and usage for graphic buffers instead +// of hard coding them to HAL_PIXEL_FORMAT_YV12 +// TODO: pass coloraspects information to surface +// TODO: test support for dynamic change in resolution +// TODO: verify if the decoder sent back all frames +void C2SoftMpeg2Dec::process( + const std::unique_ptr &work, + const std::shared_ptr &pool) { + // Initialize output work + work->result = C2_OK; + work->workletsProcessed = 0u; + work->worklets.front()->output.configUpdate.clear(); + work->worklets.front()->output.flags = work->input.flags; + + if (mSignalledError || mSignalledOutputEos) { + work->result = C2_BAD_VALUE; + return; + } + + size_t inOffset = 0u; + size_t inSize = 0u; + uint32_t workIndex = work->input.ordinal.frameIndex.peeku() & 0xFFFFFFFF; + C2ReadView rView = mDummyReadView; + if (!work->input.buffers.empty()) { + rView = work->input.buffers[0]->data().linearBlocks().front().map().get(); + inSize = rView.capacity(); + if (inSize && rView.error()) { + ALOGE("read view map failed %d", rView.error()); + work->result = C2_CORRUPTED; + return; + } + } + bool eos = ((work->input.flags & C2FrameData::FLAG_END_OF_STREAM) != 0); + bool hasPicture = false; + + ALOGV("in buffer attr. size %zu timestamp %d frameindex %d, flags %x", + inSize, (int)work->input.ordinal.timestamp.peeku(), + (int)work->input.ordinal.frameIndex.peeku(), work->input.flags); + size_t inPos = 0; + while (inPos < inSize) { + if (C2_OK != ensureDecoderState(pool)) { + mSignalledError = true; + work->workletsProcessed = 1u; + work->result = C2_CORRUPTED; + return; + } + C2GraphicView wView = mOutBlock->map().get(); + if (wView.error()) { + ALOGE("graphic view map failed %d", wView.error()); + work->result = C2_CORRUPTED; + return; + } + + ivd_video_decode_ip_t s_decode_ip; + ivd_video_decode_op_t s_decode_op; + if (!setDecodeArgs(&s_decode_ip, &s_decode_op, &rView, &wView, + inOffset + inPos, inSize - inPos, workIndex)) { + mSignalledError = true; + work->workletsProcessed = 1u; + work->result = C2_CORRUPTED; + return; + } + // If input dump is enabled, then write to file + DUMP_TO_FILE(mInFile, s_decode_ip.pv_stream_buffer, s_decode_ip.u4_num_Bytes); + WORD32 delay; + GETTIME(&mTimeStart, nullptr); + TIME_DIFF(mTimeEnd, mTimeStart, delay); + (void) ivdec_api_function(mDecHandle, &s_decode_ip, &s_decode_op); + WORD32 decodeTime; + GETTIME(&mTimeEnd, nullptr); + TIME_DIFF(mTimeStart, mTimeEnd, decodeTime); + ALOGV("decodeTime=%6d delay=%6d numBytes=%6d ", decodeTime, delay, + s_decode_op.u4_num_bytes_consumed); + if (IMPEG2D_UNSUPPORTED_DIMENSIONS == s_decode_op.u4_error_code) { + ALOGV("unsupported resolution : %dx%d", s_decode_op.u4_pic_wd, s_decode_op.u4_pic_ht); + drainInternal(DRAIN_COMPONENT_NO_EOS, pool, work); + resetPlugin(); + work->workletsProcessed = 0u; + mWidth = s_decode_op.u4_pic_wd; + mHeight = s_decode_op.u4_pic_ht; + + ALOGI("Configuring decoder: mWidth %d , mHeight %d ", + mWidth, mHeight); + C2VideoSizeStreamInfo::output size(0u, mWidth, mHeight); + std::vector> failures; + c2_status_t err = + mIntf->config({&size}, C2_MAY_BLOCK, &failures); + if (err == OK) { + work->worklets.front()->output.configUpdate.push_back( + C2Param::Copy(size)); + } else { + ALOGE("Cannot set width and height"); + mSignalledError = true; + work->workletsProcessed = 1u; + work->result = C2_CORRUPTED; + return; + } + + if (OK != reInitDecoder()) { + ALOGE("Failed to reinitialize decoder"); + mSignalledError = true; + work->workletsProcessed = 1u; + work->result = C2_CORRUPTED; + return; + } + continue; + } else if (IVD_RES_CHANGED == (s_decode_op.u4_error_code & 0xFF)) { + ALOGV("resolution changed"); + drainInternal(DRAIN_COMPONENT_NO_EOS, pool, work); + resetDecoder(); + resetPlugin(); + work->workletsProcessed = 0u; + continue; + } + if (0 < s_decode_op.u4_pic_wd && 0 < s_decode_op.u4_pic_ht) { + if (s_decode_op.u4_pic_wd != mWidth || s_decode_op.u4_pic_ht != mHeight) { + mWidth = s_decode_op.u4_pic_wd; + mHeight = s_decode_op.u4_pic_ht; + CHECK_EQ(0u, s_decode_op.u4_output_present); + + ALOGI("Configuring decoder out: mWidth %d , mHeight %d ", + mWidth, mHeight); + C2VideoSizeStreamInfo::output size(0u, mWidth, mHeight); + std::vector> failures; + c2_status_t err = + mIntf->config({&size}, C2_MAY_BLOCK, &failures); + if (err == OK) { + work->worklets.front()->output.configUpdate.push_back( + C2Param::Copy(size)); + } else { + ALOGE("Cannot set width and height"); + mSignalledError = true; + work->workletsProcessed = 1u; + work->result = C2_CORRUPTED; + return; + } + } + } + + (void) getSeqInfo(); + hasPicture |= (1 == s_decode_op.u4_frame_decoded_flag); + if (s_decode_op.u4_output_present) { + finishWork(s_decode_op.u4_ts, work); + } + + inPos += s_decode_op.u4_num_bytes_consumed; + if (hasPicture && (inSize - inPos) != 0) { + ALOGD("decoded frame in current access nal, ignoring further trailing bytes %d", + (int)inSize - (int)inPos); + break; + } + } + + if (eos) { + drainInternal(DRAIN_COMPONENT_WITH_EOS, pool, work); + mSignalledOutputEos = true; + } else if (!hasPicture) { + fillEmptyWork(work); + } +} + +c2_status_t C2SoftMpeg2Dec::drainInternal( + uint32_t drainMode, + const std::shared_ptr &pool, + const std::unique_ptr &work) { + if (drainMode == NO_DRAIN) { + ALOGW("drain with NO_DRAIN: no-op"); + return C2_OK; + } + if (drainMode == DRAIN_CHAIN) { + ALOGW("DRAIN_CHAIN not supported"); + return C2_OMITTED; + } + + if (OK != setFlushMode()) return C2_CORRUPTED; + while (true) { + if (C2_OK != ensureDecoderState(pool)) { + mSignalledError = true; + work->workletsProcessed = 1u; + work->result = C2_CORRUPTED; + return C2_CORRUPTED; + } + C2GraphicView wView = mOutBlock->map().get(); + if (wView.error()) { + ALOGE("graphic view map failed %d", wView.error()); + return C2_CORRUPTED; + } + ivd_video_decode_ip_t s_decode_ip; + ivd_video_decode_op_t s_decode_op; + if (!setDecodeArgs(&s_decode_ip, &s_decode_op, nullptr, &wView, 0, 0, 0)) { + mSignalledError = true; + work->workletsProcessed = 1u; + return C2_CORRUPTED; + } + (void) ivdec_api_function(mDecHandle, &s_decode_ip, &s_decode_op); + if (s_decode_op.u4_output_present) { + finishWork(s_decode_op.u4_ts, work); + } else { + fillEmptyWork(work); + break; + } + } + + return C2_OK; +} + +c2_status_t C2SoftMpeg2Dec::drain( + uint32_t drainMode, + const std::shared_ptr &pool) { + return drainInternal(drainMode, pool, nullptr); +} + +class C2SoftMpeg2DecFactory : public C2ComponentFactory { +public: + C2SoftMpeg2DecFactory() : mHelper(std::static_pointer_cast( + GetCodec2PlatformComponentStore()->getParamReflector())) { + } + + virtual c2_status_t createComponent( + c2_node_id_t id, + std::shared_ptr* const component, + std::function deleter) override { + *component = std::shared_ptr( + new C2SoftMpeg2Dec(COMPONENT_NAME, + id, + std::make_shared(mHelper)), + deleter); + return C2_OK; + } + + virtual c2_status_t createInterface( + c2_node_id_t id, + std::shared_ptr* const interface, + std::function deleter) override { + *interface = std::shared_ptr( + new SimpleInterface( + COMPONENT_NAME, id, std::make_shared(mHelper)), + deleter); + return C2_OK; + } + + virtual ~C2SoftMpeg2DecFactory() override = default; + +private: + std::shared_ptr mHelper; +}; + +} // namespace android + +extern "C" ::C2ComponentFactory* CreateCodec2Factory() { + ALOGV("in %s", __func__); + return new ::android::C2SoftMpeg2DecFactory(); +} + +extern "C" void DestroyCodec2Factory(::C2ComponentFactory* factory) { + ALOGV("in %s", __func__); + delete factory; +} diff --git a/media/codec2/components/mpeg2/C2SoftMpeg2Dec.h b/media/codec2/components/mpeg2/C2SoftMpeg2Dec.h new file mode 100644 index 0000000000000000000000000000000000000000..99998723f3a6cf0ac2ace25189a91d9bd4030d8f --- /dev/null +++ b/media/codec2/components/mpeg2/C2SoftMpeg2Dec.h @@ -0,0 +1,195 @@ +/* + * Copyright (C) 2018 The Android Open Source Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#ifndef ANDROID_C2_SOFT_MPEG2_DEC_H_ +#define ANDROID_C2_SOFT_MPEG2_DEC_H_ + +#include + +#include + +#include "iv_datatypedef.h" +#include "iv.h" +#include "ivd.h" + +namespace android { + +#define ivdec_api_function impeg2d_api_function +#define ivdext_init_ip_t impeg2d_init_ip_t +#define ivdext_init_op_t impeg2d_init_op_t +#define ivdext_fill_mem_rec_ip_t impeg2d_fill_mem_rec_ip_t +#define ivdext_fill_mem_rec_op_t impeg2d_fill_mem_rec_op_t +#define ivdext_ctl_set_num_cores_ip_t impeg2d_ctl_set_num_cores_ip_t +#define ivdext_ctl_set_num_cores_op_t impeg2d_ctl_set_num_cores_op_t +#define ivdext_ctl_get_seq_info_ip_t impeg2d_ctl_get_seq_info_ip_t +#define ivdext_ctl_get_seq_info_op_t impeg2d_ctl_get_seq_info_op_t +#define ALIGN64(x) ((((x) + 63) >> 6) << 6) +#define MAX_NUM_CORES 4 +#define IVDEXT_CMD_CTL_SET_NUM_CORES \ + (IVD_CONTROL_API_COMMAND_TYPE_T)IMPEG2D_CMD_CTL_SET_NUM_CORES +#define MIN(a, b) (((a) < (b)) ? (a) : (b)) +#define GETTIME(a, b) gettimeofday(a, b); +#define TIME_DIFF(start, end, diff) \ + diff = (((end).tv_sec - (start).tv_sec) * 1000000) + \ + ((end).tv_usec - (start).tv_usec); + +#ifdef FILE_DUMP_ENABLE + #define INPUT_DUMP_PATH "/sdcard/clips/mpeg2d_input" + #define INPUT_DUMP_EXT "m2v" + #define GENERATE_FILE_NAMES() { \ + GETTIME(&mTimeStart, NULL); \ + strcpy(mInFile, ""); \ + sprintf(mInFile, "%s_%ld.%ld.%s", INPUT_DUMP_PATH, \ + mTimeStart.tv_sec, mTimeStart.tv_usec, \ + INPUT_DUMP_EXT); \ + } + #define CREATE_DUMP_FILE(m_filename) { \ + FILE *fp = fopen(m_filename, "wb"); \ + if (fp != NULL) { \ + fclose(fp); \ + } else { \ + ALOGD("Could not open file %s", m_filename); \ + } \ + } + #define DUMP_TO_FILE(m_filename, m_buf, m_size) \ + { \ + FILE *fp = fopen(m_filename, "ab"); \ + if (fp != NULL && m_buf != NULL) { \ + uint32_t i; \ + i = fwrite(m_buf, 1, m_size, fp); \ + ALOGD("fwrite ret %d to write %d", i, m_size); \ + if (i != (uint32_t)m_size) { \ + ALOGD("Error in fwrite, returned %d", i); \ + perror("Error in write to file"); \ + } \ + fclose(fp); \ + } else { \ + ALOGD("Could not write to file %s", m_filename);\ + } \ + } +#else /* FILE_DUMP_ENABLE */ + #define INPUT_DUMP_PATH + #define INPUT_DUMP_EXT + #define OUTPUT_DUMP_PATH + #define OUTPUT_DUMP_EXT + #define GENERATE_FILE_NAMES() + #define CREATE_DUMP_FILE(m_filename) + #define DUMP_TO_FILE(m_filename, m_buf, m_size) +#endif /* FILE_DUMP_ENABLE */ + +struct C2SoftMpeg2Dec : public SimpleC2Component { + class IntfImpl; + + C2SoftMpeg2Dec(const char* name, c2_node_id_t id, + const std::shared_ptr& intfImpl); + virtual ~C2SoftMpeg2Dec(); + + // From SimpleC2Component + c2_status_t onInit() override; + c2_status_t onStop() override; + void onReset() override; + void onRelease() override; + c2_status_t onFlush_sm() override; + void process( + const std::unique_ptr &work, + const std::shared_ptr &pool) override; + c2_status_t drain( + uint32_t drainMode, + const std::shared_ptr &pool) override; + private: + status_t getNumMemRecords(); + status_t fillMemRecords(); + status_t createDecoder(); + status_t setNumCores(); + status_t setParams(size_t stride); + status_t getVersion(); + status_t initDecoder(); + bool setDecodeArgs(ivd_video_decode_ip_t *ps_decode_ip, + ivd_video_decode_op_t *ps_decode_op, + C2ReadView *inBuffer, + C2GraphicView *outBuffer, + size_t inOffset, + size_t inSize, + uint32_t tsMarker); + bool getSeqInfo(); + c2_status_t ensureDecoderState(const std::shared_ptr &pool); + void finishWork(uint64_t index, const std::unique_ptr &work); + status_t setFlushMode(); + c2_status_t drainInternal( + uint32_t drainMode, + const std::shared_ptr &pool, + const std::unique_ptr &work); + status_t resetDecoder(); + void resetPlugin(); + status_t deleteDecoder(); + status_t reInitDecoder(); + + // TODO:This is not the right place for this enum. These should + // be part of c2-vndk so that they can be accessed by all video plugins + // until then, make them feel at home + enum { + kNotSupported, + kPreferBitstream, + kPreferContainer, + }; + + std::shared_ptr mIntf; + iv_obj_t *mDecHandle; + iv_mem_rec_t *mMemRecords; + size_t mNumMemRecords; + std::shared_ptr mOutBlock; + uint8_t *mOutBufferDrain; + + size_t mNumCores; + IV_COLOR_FORMAT_T mIvColorformat; + + uint32_t mWidth; + uint32_t mHeight; + uint32_t mStride; + bool mSignalledOutputEos; + bool mSignalledError; + + // Color aspects. These are ISO values and are meant to detect changes in aspects to avoid + // converting them to C2 values for each frame + struct VuiColorAspects { + uint8_t primaries; + uint8_t transfer; + uint8_t coeffs; + uint8_t fullRange; + + // default color aspects + VuiColorAspects() + : primaries(2), transfer(2), coeffs(2), fullRange(0) { } + + bool operator==(const VuiColorAspects &o) { + return primaries == o.primaries && transfer == o.transfer && coeffs == o.coeffs + && fullRange == o.fullRange; + } + } mBitstreamColorAspects; + + // profile + struct timeval mTimeStart; + struct timeval mTimeEnd; +#ifdef FILE_DUMP_ENABLE + char mInFile[200]; +#endif /* FILE_DUMP_ENABLE */ + + C2_DO_NOT_COPY(C2SoftMpeg2Dec); +}; + +} // namespace android + +#endif // ANDROID_C2_SOFT_MPEG2_DEC_H_ diff --git a/media/codec2/components/mpeg4_h263/Android.bp b/media/codec2/components/mpeg4_h263/Android.bp new file mode 100644 index 0000000000000000000000000000000000000000..3155bc2d7660fe7f968a4024385c36db729c2d8b --- /dev/null +++ b/media/codec2/components/mpeg4_h263/Android.bp @@ -0,0 +1,66 @@ +cc_library_shared { + name: "libstagefright_soft_c2mpeg4dec", + defaults: [ + "libstagefright_soft_c2-defaults", + "libstagefright_soft_c2_sanitize_signed-defaults", + ], + + srcs: ["C2SoftMpeg4Dec.cpp"], + + static_libs: ["libstagefright_m4vh263dec"], + + cflags: [ + "-DOSCL_IMPORT_REF=", + "-DMPEG4", + ], +} + +cc_library_shared { + name: "libstagefright_soft_c2h263dec", + defaults: [ + "libstagefright_soft_c2-defaults", + "libstagefright_soft_c2_sanitize_signed-defaults", + ], + + srcs: ["C2SoftMpeg4Dec.cpp"], + + static_libs: ["libstagefright_m4vh263dec"], + + cflags: [ + "-DOSCL_IMPORT_REF=", + ], +} + +cc_library_shared { + name: "libstagefright_soft_c2mpeg4enc", + defaults: [ + "libstagefright_soft_c2-defaults", + "libstagefright_soft_c2_sanitize_signed-defaults", + ], + + + srcs: ["C2SoftMpeg4Enc.cpp"], + + static_libs: ["libstagefright_m4vh263enc"], + + cflags: [ + "-DMPEG4", + "-DOSCL_IMPORT_REF=", + ], +} + +cc_library_shared { + name: "libstagefright_soft_c2h263enc", + defaults: [ + "libstagefright_soft_c2-defaults", + "libstagefright_soft_c2_sanitize_signed-defaults", + ], + + srcs: ["C2SoftMpeg4Enc.cpp"], + + static_libs: [ "libstagefright_m4vh263enc" ], + + cflags: [ + "-DOSCL_IMPORT_REF=", + ], +} diff --git a/media/codec2/components/mpeg4_h263/C2SoftMpeg4Dec.cpp b/media/codec2/components/mpeg4_h263/C2SoftMpeg4Dec.cpp new file mode 100644 index 0000000000000000000000000000000000000000..901f5ed287c7a3a1f81c3436925128365d640b48 --- /dev/null +++ b/media/codec2/components/mpeg4_h263/C2SoftMpeg4Dec.cpp @@ -0,0 +1,746 @@ +/* + * Copyright (C) 2018 The Android Open Source Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +//#define LOG_NDEBUG 0 +#ifdef MPEG4 + #define LOG_TAG "C2SoftMpeg4Dec" +#else + #define LOG_TAG "C2SoftH263Dec" +#endif +#include + +#include +#include + +#include +#include +#include + +#include "C2SoftMpeg4Dec.h" +#include "mp4dec_api.h" + +namespace android { + +#ifdef MPEG4 +constexpr char COMPONENT_NAME[] = "c2.android.mpeg4.decoder"; +#else +constexpr char COMPONENT_NAME[] = "c2.android.h263.decoder"; +#endif + +class C2SoftMpeg4Dec::IntfImpl : public SimpleInterface::BaseParams { +public: + explicit IntfImpl(const std::shared_ptr &helper) + : SimpleInterface::BaseParams( + helper, + COMPONENT_NAME, + C2Component::KIND_DECODER, + C2Component::DOMAIN_VIDEO, +#ifdef MPEG4 + MEDIA_MIMETYPE_VIDEO_MPEG4 +#else + MEDIA_MIMETYPE_VIDEO_H263 +#endif + ) { + noPrivateBuffers(); // TODO: account for our buffers here + noInputReferences(); + noOutputReferences(); + noInputLatency(); + noTimeStretch(); + + // TODO: output latency and reordering + + addParameter( + DefineParam(mAttrib, C2_PARAMKEY_COMPONENT_ATTRIBUTES) + .withConstValue(new C2ComponentAttributesSetting(C2Component::ATTRIB_IS_TEMPORAL)) + .build()); + + addParameter( + DefineParam(mSize, C2_PARAMKEY_PICTURE_SIZE) + .withDefault(new C2StreamPictureSizeInfo::output(0u, 176, 144)) + .withFields({ +#ifdef MPEG4 + C2F(mSize, width).inRange(2, 1920, 2), + C2F(mSize, height).inRange(2, 1088, 2), +#else + C2F(mSize, width).inRange(2, 352, 2), + C2F(mSize, height).inRange(2, 288, 2), +#endif + }) + .withSetter(SizeSetter) + .build()); + +#ifdef MPEG4 + addParameter( + DefineParam(mProfileLevel, C2_PARAMKEY_PROFILE_LEVEL) + .withDefault(new C2StreamProfileLevelInfo::input(0u, + C2Config::PROFILE_MP4V_SIMPLE, C2Config::LEVEL_MP4V_3)) + .withFields({ + C2F(mProfileLevel, profile).equalTo( + C2Config::PROFILE_MP4V_SIMPLE), + C2F(mProfileLevel, level).oneOf({ + C2Config::LEVEL_MP4V_0, + C2Config::LEVEL_MP4V_0B, + C2Config::LEVEL_MP4V_1, + C2Config::LEVEL_MP4V_2, + C2Config::LEVEL_MP4V_3, + C2Config::LEVEL_MP4V_3B, + C2Config::LEVEL_MP4V_4, + C2Config::LEVEL_MP4V_4A, + C2Config::LEVEL_MP4V_5, + C2Config::LEVEL_MP4V_6}) + }) + .withSetter(ProfileLevelSetter, mSize) + .build()); +#else + addParameter( + DefineParam(mProfileLevel, C2_PARAMKEY_PROFILE_LEVEL) + .withDefault(new C2StreamProfileLevelInfo::input(0u, + C2Config::PROFILE_H263_BASELINE, C2Config::LEVEL_H263_30)) + .withFields({ + C2F(mProfileLevel, profile).oneOf({ + C2Config::PROFILE_H263_BASELINE, + C2Config::PROFILE_H263_ISWV2}), + C2F(mProfileLevel, level).oneOf({ + C2Config::LEVEL_H263_10, + C2Config::LEVEL_H263_20, + C2Config::LEVEL_H263_30, + C2Config::LEVEL_H263_40, + C2Config::LEVEL_H263_45}) + }) + .withSetter(ProfileLevelSetter, mSize) + .build()); +#endif + + addParameter( + DefineParam(mMaxSize, C2_PARAMKEY_MAX_PICTURE_SIZE) +#ifdef MPEG4 + .withDefault(new C2StreamMaxPictureSizeTuning::output(0u, 1920, 1088)) +#else + .withDefault(new C2StreamMaxPictureSizeTuning::output(0u, 352, 288)) +#endif + .withFields({ +#ifdef MPEG4 + C2F(mSize, width).inRange(2, 1920, 2), + C2F(mSize, height).inRange(2, 1088, 2), +#else + C2F(mSize, width).inRange(2, 352, 2), + C2F(mSize, height).inRange(2, 288, 2), +#endif + }) + .withSetter(MaxPictureSizeSetter, mSize) + .build()); + + addParameter( + DefineParam(mMaxInputSize, C2_PARAMKEY_INPUT_MAX_BUFFER_SIZE) +#ifdef MPEG4 + .withDefault(new C2StreamMaxBufferSizeInfo::input(0u, 1920 * 1088 * 3 / 2)) +#else + .withDefault(new C2StreamMaxBufferSizeInfo::input(0u, 352 * 288 * 3 / 2)) +#endif + .withFields({ + C2F(mMaxInputSize, value).any(), + }) + .calculatedAs(MaxInputSizeSetter, mMaxSize) + .build()); + + C2ChromaOffsetStruct locations[1] = { C2ChromaOffsetStruct::ITU_YUV_420_0() }; + std::shared_ptr defaultColorInfo = + C2StreamColorInfo::output::AllocShared( + 1u, 0u, 8u /* bitDepth */, C2Color::YUV_420); + memcpy(defaultColorInfo->m.locations, locations, sizeof(locations)); + + defaultColorInfo = + C2StreamColorInfo::output::AllocShared( + { C2ChromaOffsetStruct::ITU_YUV_420_0() }, + 0u, 8u /* bitDepth */, C2Color::YUV_420); + helper->addStructDescriptors(); + + addParameter( + DefineParam(mColorInfo, C2_PARAMKEY_CODED_COLOR_INFO) + .withConstValue(defaultColorInfo) + .build()); + + // TODO: support more formats? + addParameter( + DefineParam(mPixelFormat, C2_PARAMKEY_PIXEL_FORMAT) + .withConstValue(new C2StreamPixelFormatInfo::output( + 0u, HAL_PIXEL_FORMAT_YCBCR_420_888)) + .build()); + } + + static C2R SizeSetter(bool mayBlock, const C2P &oldMe, + C2P &me) { + (void)mayBlock; + C2R res = C2R::Ok(); + if (!me.F(me.v.width).supportsAtAll(me.v.width)) { + res = res.plus(C2SettingResultBuilder::BadValue(me.F(me.v.width))); + me.set().width = oldMe.v.width; + } + if (!me.F(me.v.height).supportsAtAll(me.v.height)) { + res = res.plus(C2SettingResultBuilder::BadValue(me.F(me.v.height))); + me.set().height = oldMe.v.height; + } + return res; + } + + static C2R MaxPictureSizeSetter(bool mayBlock, C2P &me, + const C2P &size) { + (void)mayBlock; + // TODO: get max width/height from the size's field helpers vs. hardcoding +#ifdef MPEG4 + me.set().width = c2_min(c2_max(me.v.width, size.v.width), 1920u); + me.set().height = c2_min(c2_max(me.v.height, size.v.height), 1088u); +#else + me.set().width = c2_min(c2_max(me.v.width, size.v.width), 352u); + me.set().height = c2_min(c2_max(me.v.height, size.v.height), 288u); +#endif + return C2R::Ok(); + } + + static C2R MaxInputSizeSetter(bool mayBlock, C2P &me, + const C2P &maxSize) { + (void)mayBlock; + // assume compression ratio of 1 + me.set().value = (((maxSize.v.width + 15) / 16) * ((maxSize.v.height + 15) / 16) * 384); + return C2R::Ok(); + } + + static C2R ProfileLevelSetter(bool mayBlock, C2P &me, + const C2P &size) { + (void)mayBlock; + (void)size; + (void)me; // TODO: validate + return C2R::Ok(); + } + + uint32_t getMaxWidth() const { return mMaxSize->width; } + uint32_t getMaxHeight() const { return mMaxSize->height; } + +private: + std::shared_ptr mProfileLevel; + std::shared_ptr mSize; + std::shared_ptr mMaxSize; + std::shared_ptr mMaxInputSize; + std::shared_ptr mColorInfo; + std::shared_ptr mPixelFormat; +}; + +C2SoftMpeg4Dec::C2SoftMpeg4Dec( + const char *name, + c2_node_id_t id, + const std::shared_ptr &intfImpl) + : SimpleC2Component(std::make_shared>(name, id, intfImpl)), + mIntf(intfImpl), + mDecHandle(nullptr), + mOutputBuffer{}, + mInitialized(false) { +} + +C2SoftMpeg4Dec::~C2SoftMpeg4Dec() { + onRelease(); +} + +c2_status_t C2SoftMpeg4Dec::onInit() { + status_t err = initDecoder(); + return err == OK ? C2_OK : C2_CORRUPTED; +} + +c2_status_t C2SoftMpeg4Dec::onStop() { + if (mInitialized) { + if (mDecHandle) { + PVCleanUpVideoDecoder(mDecHandle); + } + mInitialized = false; + } + for (int32_t i = 0; i < kNumOutputBuffers; ++i) { + if (mOutputBuffer[i]) { + free(mOutputBuffer[i]); + mOutputBuffer[i] = nullptr; + } + } + mNumSamplesOutput = 0; + mFramesConfigured = false; + mSignalledOutputEos = false; + mSignalledError = false; + + return C2_OK; +} + +void C2SoftMpeg4Dec::onReset() { + (void)onStop(); + (void)onInit(); +} + +void C2SoftMpeg4Dec::onRelease() { + if (mInitialized) { + if (mDecHandle) { + PVCleanUpVideoDecoder(mDecHandle); + delete mDecHandle; + mDecHandle = nullptr; + } + mInitialized = false; + } + if (mOutBlock) { + mOutBlock.reset(); + } + for (int32_t i = 0; i < kNumOutputBuffers; ++i) { + if (mOutputBuffer[i]) { + free(mOutputBuffer[i]); + mOutputBuffer[i] = nullptr; + } + } +} + +c2_status_t C2SoftMpeg4Dec::onFlush_sm() { + if (mInitialized) { + if (PV_TRUE != PVResetVideoDecoder(mDecHandle)) { + return C2_CORRUPTED; + } + } + mSignalledOutputEos = false; + mSignalledError = false; + return C2_OK; +} + +status_t C2SoftMpeg4Dec::initDecoder() { +#ifdef MPEG4 + mIsMpeg4 = true; +#else + mIsMpeg4 = false; +#endif + if (!mDecHandle) { + mDecHandle = new tagvideoDecControls; + } + if (!mDecHandle) { + ALOGE("mDecHandle is null"); + return NO_MEMORY; + } + memset(mDecHandle, 0, sizeof(tagvideoDecControls)); + + /* TODO: bring these values to 352 and 288. It cannot be done as of now + * because, h263 doesn't seem to allow port reconfiguration. In OMX, the + * problem of larger width and height than default width and height is + * overcome by adaptivePlayBack() api call. This call gets width and height + * information from extractor. Such a thing is not possible here. + * So we are configuring to larger values.*/ + mWidth = 1408; + mHeight = 1152; + mNumSamplesOutput = 0; + mInitialized = false; + mFramesConfigured = false; + mSignalledOutputEos = false; + mSignalledError = false; + + return OK; +} + +void fillEmptyWork(const std::unique_ptr &work) { + uint32_t flags = 0; + if (work->input.flags & C2FrameData::FLAG_END_OF_STREAM) { + flags |= C2FrameData::FLAG_END_OF_STREAM; + ALOGV("signalling eos"); + } + work->worklets.front()->output.flags = (C2FrameData::flags_t)flags; + work->worklets.front()->output.buffers.clear(); + work->worklets.front()->output.ordinal = work->input.ordinal; + work->workletsProcessed = 1u; +} + +void C2SoftMpeg4Dec::finishWork(uint64_t index, const std::unique_ptr &work) { + std::shared_ptr buffer = createGraphicBuffer(std::move(mOutBlock), + C2Rect(mWidth, mHeight)); + mOutBlock = nullptr; + auto fillWork = [buffer, index](const std::unique_ptr &work) { + uint32_t flags = 0; + if ((work->input.flags & C2FrameData::FLAG_END_OF_STREAM) && + (c2_cntr64_t(index) == work->input.ordinal.frameIndex)) { + flags |= C2FrameData::FLAG_END_OF_STREAM; + ALOGV("signalling eos"); + } + work->worklets.front()->output.flags = (C2FrameData::flags_t)flags; + work->worklets.front()->output.buffers.clear(); + work->worklets.front()->output.buffers.push_back(buffer); + work->worklets.front()->output.ordinal = work->input.ordinal; + work->workletsProcessed = 1u; + }; + if (work && c2_cntr64_t(index) == work->input.ordinal.frameIndex) { + fillWork(work); + } else { + finish(index, fillWork); + } +} + +c2_status_t C2SoftMpeg4Dec::ensureDecoderState(const std::shared_ptr &pool) { + if (!mDecHandle) { + ALOGE("not supposed to be here, invalid decoder context"); + return C2_CORRUPTED; + } + + mOutputBufferSize = align(mIntf->getMaxWidth(), 16) * align(mIntf->getMaxHeight(), 16) * 3 / 2; + for (int32_t i = 0; i < kNumOutputBuffers; ++i) { + if (!mOutputBuffer[i]) { + mOutputBuffer[i] = (uint8_t *)malloc(mOutputBufferSize); + if (!mOutputBuffer[i]) { + return C2_NO_MEMORY; + } + } + } + if (mOutBlock && + (mOutBlock->width() != align(mWidth, 16) || mOutBlock->height() != mHeight)) { + mOutBlock.reset(); + } + if (!mOutBlock) { + uint32_t format = HAL_PIXEL_FORMAT_YV12; + C2MemoryUsage usage = { C2MemoryUsage::CPU_READ, C2MemoryUsage::CPU_WRITE }; + c2_status_t err = pool->fetchGraphicBlock(align(mWidth, 16), mHeight, format, usage, &mOutBlock); + if (err != C2_OK) { + ALOGE("fetchGraphicBlock for Output failed with status %d", err); + return err; + } + ALOGV("provided (%dx%d) required (%dx%d)", + mOutBlock->width(), mOutBlock->height(), mWidth, mHeight); + } + return C2_OK; +} + +bool C2SoftMpeg4Dec::handleResChange(const std::unique_ptr &work) { + uint32_t disp_width, disp_height; + PVGetVideoDimensions(mDecHandle, (int32 *)&disp_width, (int32 *)&disp_height); + + uint32_t buf_width, buf_height; + PVGetBufferDimensions(mDecHandle, (int32 *)&buf_width, (int32 *)&buf_height); + + CHECK_LE(disp_width, buf_width); + CHECK_LE(disp_height, buf_height); + + ALOGV("display size (%dx%d), buffer size (%dx%d)", + disp_width, disp_height, buf_width, buf_height); + + bool resChanged = false; + if (disp_width != mWidth || disp_height != mHeight) { + mWidth = disp_width; + mHeight = disp_height; + resChanged = true; + for (int32_t i = 0; i < kNumOutputBuffers; ++i) { + if (mOutputBuffer[i]) { + free(mOutputBuffer[i]); + mOutputBuffer[i] = nullptr; + } + } + + if (!mIsMpeg4) { + PVCleanUpVideoDecoder(mDecHandle); + + uint8_t *vol_data[1]{}; + int32_t vol_size = 0; + + if (!PVInitVideoDecoder( + mDecHandle, vol_data, &vol_size, 1, mIntf->getMaxWidth(), mIntf->getMaxHeight(), H263_MODE)) { + ALOGE("Error in PVInitVideoDecoder H263_MODE while resChanged was set to true"); + mSignalledError = true; + work->result = C2_CORRUPTED; + return true; + } + } + mFramesConfigured = false; + } + return resChanged; +} + +/* TODO: can remove temporary copy after library supports writing to display + * buffer Y, U and V plane pointers using stride info. */ +static void copyOutputBufferToYV12Frame(uint8_t *dst, uint8_t *src, size_t dstYStride, + size_t srcYStride, uint32_t width, uint32_t height) { + size_t dstUVStride = align(dstYStride / 2, 16); + size_t srcUVStride = srcYStride / 2; + uint8_t *srcStart = src; + uint8_t *dstStart = dst; + size_t vStride = align(height, 16); + for (size_t i = 0; i < height; ++i) { + memcpy(dst, src, width); + src += srcYStride; + dst += dstYStride; + } + /* U buffer */ + src = srcStart + vStride * srcYStride; + dst = dstStart + (dstYStride * height) + (dstUVStride * height / 2); + for (size_t i = 0; i < height / 2; ++i) { + memcpy(dst, src, width / 2); + src += srcUVStride; + dst += dstUVStride; + } + /* V buffer */ + src = srcStart + vStride * srcYStride * 5 / 4; + dst = dstStart + (dstYStride * height); + for (size_t i = 0; i < height / 2; ++i) { + memcpy(dst, src, width / 2); + src += srcUVStride; + dst += dstUVStride; + } +} + +void C2SoftMpeg4Dec::process( + const std::unique_ptr &work, + const std::shared_ptr &pool) { + // Initialize output work + work->result = C2_OK; + work->workletsProcessed = 1u; + work->worklets.front()->output.configUpdate.clear(); + work->worklets.front()->output.flags = work->input.flags; + + if (mSignalledError || mSignalledOutputEos) { + work->result = C2_BAD_VALUE; + return; + } + + size_t inOffset = 0u; + size_t inSize = 0u; + uint32_t workIndex = work->input.ordinal.frameIndex.peeku() & 0xFFFFFFFF; + C2ReadView rView = mDummyReadView; + if (!work->input.buffers.empty()) { + rView = work->input.buffers[0]->data().linearBlocks().front().map().get(); + inSize = rView.capacity(); + if (inSize && rView.error()) { + ALOGE("read view map failed %d", rView.error()); + work->result = C2_CORRUPTED; + return; + } + } + ALOGV("in buffer attr. size %zu timestamp %d frameindex %d, flags %x", + inSize, (int)work->input.ordinal.timestamp.peeku(), + (int)work->input.ordinal.frameIndex.peeku(), work->input.flags); + + bool eos = ((work->input.flags & C2FrameData::FLAG_END_OF_STREAM) != 0); + if (inSize == 0) { + fillEmptyWork(work); + if (eos) { + mSignalledOutputEos = true; + } + return; + } + + uint8_t *bitstream = const_cast(rView.data() + inOffset); + uint32_t *start_code = (uint32_t *)bitstream; + bool volHeader = *start_code == 0xB0010000; + if (volHeader) { + PVCleanUpVideoDecoder(mDecHandle); + mInitialized = false; + } + + if (!mInitialized) { + uint8_t *vol_data[1]{}; + int32_t vol_size = 0; + + bool codecConfig = (work->input.flags & C2FrameData::FLAG_CODEC_CONFIG) != 0; + if (codecConfig || volHeader) { + vol_data[0] = bitstream; + vol_size = inSize; + } + MP4DecodingMode mode = (mIsMpeg4) ? MPEG4_MODE : H263_MODE; + if (!PVInitVideoDecoder( + mDecHandle, vol_data, &vol_size, 1, + mIntf->getMaxWidth(), mIntf->getMaxHeight(), mode)) { + ALOGE("PVInitVideoDecoder failed. Unsupported content?"); + mSignalledError = true; + work->result = C2_CORRUPTED; + return; + } + mInitialized = true; + MP4DecodingMode actualMode = PVGetDecBitstreamMode(mDecHandle); + if (mode != actualMode) { + ALOGE("Decoded mode not same as actual mode of the decoder"); + mSignalledError = true; + work->result = C2_CORRUPTED; + return; + } + + PVSetPostProcType(mDecHandle, 0); + if (handleResChange(work)) { + ALOGI("Setting width and height"); + C2VideoSizeStreamInfo::output size(0u, mWidth, mHeight); + std::vector> failures; + c2_status_t err = mIntf->config({&size}, C2_MAY_BLOCK, &failures); + if (err == OK) { + work->worklets.front()->output.configUpdate.push_back( + C2Param::Copy(size)); + } else { + ALOGE("Config update size failed"); + mSignalledError = true; + work->result = C2_CORRUPTED; + return; + } + } + if (codecConfig) { + fillEmptyWork(work); + return; + } + } + + size_t inPos = 0; + while (inPos < inSize) { + c2_status_t err = ensureDecoderState(pool); + if (C2_OK != err) { + mSignalledError = true; + work->result = err; + return; + } + C2GraphicView wView = mOutBlock->map().get(); + if (wView.error()) { + ALOGE("graphic view map failed %d", wView.error()); + work->result = C2_CORRUPTED; + return; + } + + uint32_t yFrameSize = sizeof(uint8) * mDecHandle->size; + if (mOutputBufferSize < yFrameSize * 3 / 2){ + ALOGE("Too small output buffer: %zu bytes", mOutputBufferSize); + mSignalledError = true; + work->result = C2_NO_MEMORY; + return; + } + + if (!mFramesConfigured) { + PVSetReferenceYUV(mDecHandle,mOutputBuffer[1]); + mFramesConfigured = true; + } + + // Need to check if header contains new info, e.g., width/height, etc. + VopHeaderInfo header_info; + uint32_t useExtTimestamp = (inPos == 0); + int32_t tmpInSize = (int32_t)inSize; + uint8_t *bitstreamTmp = bitstream; + uint32_t timestamp = workIndex; + if (PVDecodeVopHeader( + mDecHandle, &bitstreamTmp, ×tamp, &tmpInSize, + &header_info, &useExtTimestamp, + mOutputBuffer[mNumSamplesOutput & 1]) != PV_TRUE) { + ALOGE("failed to decode vop header."); + mSignalledError = true; + work->result = C2_CORRUPTED; + return; + } + + // H263 doesn't have VOL header, the frame size information is in short header, i.e. the + // decoder may detect size change after PVDecodeVopHeader. + bool resChange = handleResChange(work); + if (mIsMpeg4 && resChange) { + mSignalledError = true; + work->result = C2_CORRUPTED; + return; + } else if (resChange) { + ALOGI("Setting width and height"); + C2VideoSizeStreamInfo::output size(0u, mWidth, mHeight); + std::vector> failures; + c2_status_t err = mIntf->config({&size}, C2_MAY_BLOCK, &failures); + if (err == OK) { + work->worklets.front()->output.configUpdate.push_back(C2Param::Copy(size)); + } else { + ALOGE("Config update size failed"); + mSignalledError = true; + work->result = C2_CORRUPTED; + return; + } + continue; + } + + if (PVDecodeVopBody(mDecHandle, &tmpInSize) != PV_TRUE) { + ALOGE("failed to decode video frame."); + mSignalledError = true; + work->result = C2_CORRUPTED; + return; + } + if (handleResChange(work)) { + mSignalledError = true; + work->result = C2_CORRUPTED; + return; + } + + uint8_t *outputBufferY = wView.data()[C2PlanarLayout::PLANE_Y]; + (void)copyOutputBufferToYV12Frame(outputBufferY, mOutputBuffer[mNumSamplesOutput & 1], + wView.width(), align(mWidth, 16), mWidth, mHeight); + + inPos += inSize - (size_t)tmpInSize; + finishWork(workIndex, work); + ++mNumSamplesOutput; + if (inSize - inPos != 0) { + ALOGD("decoded frame, ignoring further trailing bytes %d", + (int)inSize - (int)inPos); + break; + } + } +} + +c2_status_t C2SoftMpeg4Dec::drain( + uint32_t drainMode, + const std::shared_ptr &pool) { + (void)pool; + if (drainMode == NO_DRAIN) { + ALOGW("drain with NO_DRAIN: no-op"); + return C2_OK; + } + if (drainMode == DRAIN_CHAIN) { + ALOGW("DRAIN_CHAIN not supported"); + return C2_OMITTED; + } + return C2_OK; +} + +class C2SoftMpeg4DecFactory : public C2ComponentFactory { +public: + C2SoftMpeg4DecFactory() : mHelper(std::static_pointer_cast( + GetCodec2PlatformComponentStore()->getParamReflector())) { + } + + virtual c2_status_t createComponent( + c2_node_id_t id, + std::shared_ptr* const component, + std::function deleter) override { + *component = std::shared_ptr( + new C2SoftMpeg4Dec(COMPONENT_NAME, + id, + std::make_shared(mHelper)), + deleter); + return C2_OK; + } + + virtual c2_status_t createInterface( + c2_node_id_t id, + std::shared_ptr* const interface, + std::function deleter) override { + *interface = std::shared_ptr( + new SimpleInterface( + COMPONENT_NAME, id, std::make_shared(mHelper)), + deleter); + return C2_OK; + } + + virtual ~C2SoftMpeg4DecFactory() override = default; + +private: + std::shared_ptr mHelper; +}; + +} // namespace android + +extern "C" ::C2ComponentFactory* CreateCodec2Factory() { + ALOGV("in %s", __func__); + return new ::android::C2SoftMpeg4DecFactory(); +} + +extern "C" void DestroyCodec2Factory(::C2ComponentFactory* factory) { + ALOGV("in %s", __func__); + delete factory; +} diff --git a/media/codec2/components/mpeg4_h263/C2SoftMpeg4Dec.h b/media/codec2/components/mpeg4_h263/C2SoftMpeg4Dec.h new file mode 100644 index 0000000000000000000000000000000000000000..716a0951b23081a666e77aba04c600fead75f893 --- /dev/null +++ b/media/codec2/components/mpeg4_h263/C2SoftMpeg4Dec.h @@ -0,0 +1,77 @@ +/* + * Copyright (C) 2018 The Android Open Source Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#ifndef C2_SOFT_MPEG4_DEC_H_ +#define C2_SOFT_MPEG4_DEC_H_ + +#include + + +struct tagvideoDecControls; + +namespace android { + +struct C2SoftMpeg4Dec : public SimpleC2Component { + class IntfImpl; + + C2SoftMpeg4Dec(const char* name, c2_node_id_t id, + const std::shared_ptr& intfImpl); + virtual ~C2SoftMpeg4Dec(); + + // From SimpleC2Component + c2_status_t onInit() override; + c2_status_t onStop() override; + void onReset() override; + void onRelease() override; + c2_status_t onFlush_sm() override; + void process( + const std::unique_ptr &work, + const std::shared_ptr &pool) override; + c2_status_t drain( + uint32_t drainMode, + const std::shared_ptr &pool) override; + private: + enum { + kNumOutputBuffers = 2, + }; + + status_t initDecoder(); + c2_status_t ensureDecoderState(const std::shared_ptr &pool); + void finishWork(uint64_t index, const std::unique_ptr &work); + bool handleResChange(const std::unique_ptr &work); + + std::shared_ptr mIntf; + tagvideoDecControls *mDecHandle; + std::shared_ptr mOutBlock; + uint8_t *mOutputBuffer[kNumOutputBuffers]; + size_t mOutputBufferSize; + + uint32_t mWidth; + uint32_t mHeight; + uint32_t mNumSamplesOutput; + + bool mIsMpeg4; + bool mInitialized; + bool mFramesConfigured; + bool mSignalledOutputEos; + bool mSignalledError; + + C2_DO_NOT_COPY(C2SoftMpeg4Dec); +}; + +} // namespace android + +#endif // C2_SOFT_MPEG4_DEC_H_ diff --git a/media/codec2/components/mpeg4_h263/C2SoftMpeg4Enc.cpp b/media/codec2/components/mpeg4_h263/C2SoftMpeg4Enc.cpp new file mode 100644 index 0000000000000000000000000000000000000000..c8796f339237d4ac665e1f488085fb0d3972bc6d --- /dev/null +++ b/media/codec2/components/mpeg4_h263/C2SoftMpeg4Enc.cpp @@ -0,0 +1,671 @@ +/* + * Copyright 2018 The Android Open Source Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +//#define LOG_NDEBUG 0 +#ifdef MPEG4 + #define LOG_TAG "C2SoftMpeg4Enc" +#else + #define LOG_TAG "C2SoftH263Enc" +#endif +#include + +#include + +#include +#include +#include +#include + +#include +#include +#include +#include + +#include "C2SoftMpeg4Enc.h" +#include "mp4enc_api.h" + +namespace android { + +#ifdef MPEG4 +constexpr char COMPONENT_NAME[] = "c2.android.mpeg4.encoder"; +#else +constexpr char COMPONENT_NAME[] = "c2.android.h263.encoder"; +#endif + +class C2SoftMpeg4Enc::IntfImpl : public C2InterfaceHelper { + public: + explicit IntfImpl(const std::shared_ptr& helper) + : C2InterfaceHelper(helper) { + setDerivedInstance(this); + + addParameter( + DefineParam(mInputFormat, C2_NAME_INPUT_STREAM_FORMAT_SETTING) + .withConstValue( + new C2StreamFormatConfig::input(0u, C2FormatVideo)) + .build()); + + addParameter( + DefineParam(mOutputFormat, C2_NAME_OUTPUT_STREAM_FORMAT_SETTING) + .withConstValue( + new C2StreamFormatConfig::output(0u, C2FormatCompressed)) + .build()); + + addParameter( + DefineParam(mInputMediaType, C2_NAME_INPUT_PORT_MIME_SETTING) + .withConstValue(AllocSharedString( + MEDIA_MIMETYPE_VIDEO_RAW)) + .build()); + + addParameter( + DefineParam(mOutputMediaType, C2_NAME_OUTPUT_PORT_MIME_SETTING) + .withConstValue(AllocSharedString( +#ifdef MPEG4 + MEDIA_MIMETYPE_VIDEO_MPEG4 +#else + MEDIA_MIMETYPE_VIDEO_H263 +#endif + )) + .build()); + + addParameter(DefineParam(mUsage, C2_NAME_INPUT_STREAM_USAGE_SETTING) + .withConstValue(new C2StreamUsageTuning::input( + 0u, (uint64_t)C2MemoryUsage::CPU_READ)) + .build()); + + addParameter( + DefineParam(mSize, C2_NAME_STREAM_VIDEO_SIZE_SETTING) + .withDefault(new C2VideoSizeStreamTuning::input(0u, 176, 144)) + .withFields({ +#ifdef MPEG4 + C2F(mSize, width).inRange(16, 176, 16), + C2F(mSize, height).inRange(16, 144, 16), +#else + C2F(mSize, width).oneOf({176, 352}), + C2F(mSize, height).oneOf({144, 288}), +#endif + }) + .withSetter(SizeSetter) + .build()); + + addParameter( + DefineParam(mFrameRate, C2_NAME_STREAM_FRAME_RATE_SETTING) + .withDefault(new C2StreamFrameRateInfo::output(0u, 17.)) + // TODO: More restriction? + .withFields({C2F(mFrameRate, value).greaterThan(0.)}) + .withSetter( + Setter::StrictValueWithNoDeps) + .build()); + + addParameter( + DefineParam(mBitrate, C2_NAME_STREAM_BITRATE_SETTING) + .withDefault(new C2BitrateTuning::output(0u, 64000)) + .withFields({C2F(mBitrate, value).inRange(4096, 12000000)}) + .withSetter(BitrateSetter) + .build()); + + addParameter( + DefineParam(mSyncFramePeriod, C2_PARAMKEY_SYNC_FRAME_INTERVAL) + .withDefault(new C2StreamSyncFrameIntervalTuning::output(0u, 1000000)) + .withFields({C2F(mSyncFramePeriod, value).any()}) + .withSetter(Setter::StrictValueWithNoDeps) + .build()); + +#ifdef MPEG4 + addParameter( + DefineParam(mProfileLevel, C2_PARAMKEY_PROFILE_LEVEL) + .withDefault(new C2StreamProfileLevelInfo::output( + 0u, PROFILE_MP4V_SIMPLE, LEVEL_MP4V_2)) + .withFields({ + C2F(mProfileLevel, profile).equalTo( + PROFILE_MP4V_SIMPLE), + C2F(mProfileLevel, level).oneOf({ + C2Config::LEVEL_MP4V_0, + C2Config::LEVEL_MP4V_0B, + C2Config::LEVEL_MP4V_1, + C2Config::LEVEL_MP4V_2}) + }) + .withSetter(ProfileLevelSetter) + .build()); +#else + addParameter( + DefineParam(mProfileLevel, C2_PARAMKEY_PROFILE_LEVEL) + .withDefault(new C2StreamProfileLevelInfo::output( + 0u, PROFILE_H263_BASELINE, LEVEL_H263_45)) + .withFields({ + C2F(mProfileLevel, profile).equalTo( + PROFILE_H263_BASELINE), + C2F(mProfileLevel, level).oneOf({ + C2Config::LEVEL_H263_10, + C2Config::LEVEL_H263_20, + C2Config::LEVEL_H263_30, + C2Config::LEVEL_H263_40, + C2Config::LEVEL_H263_45}) + }) + .withSetter(ProfileLevelSetter) + .build()); +#endif + } + + static C2R BitrateSetter(bool mayBlock, C2P &me) { + (void)mayBlock; + C2R res = C2R::Ok(); + if (me.v.value <= 4096) { + me.set().value = 4096; + } + return res; + } + + static C2R SizeSetter(bool mayBlock, const C2P &oldMe, + C2P &me) { + (void)mayBlock; + C2R res = C2R::Ok(); + if (!me.F(me.v.width).supportsAtAll(me.v.width)) { + res = res.plus(C2SettingResultBuilder::BadValue(me.F(me.v.width))); + me.set().width = oldMe.v.width; + } + if (!me.F(me.v.height).supportsAtAll(me.v.height)) { + res = res.plus(C2SettingResultBuilder::BadValue(me.F(me.v.height))); + me.set().height = oldMe.v.height; + } + return res; + } + + static C2R ProfileLevelSetter( + bool mayBlock, + C2P &me) { + (void)mayBlock; + if (!me.F(me.v.profile).supportsAtAll(me.v.profile)) { +#ifdef MPEG4 + me.set().profile = PROFILE_MP4V_SIMPLE; +#else + me.set().profile = PROFILE_H263_BASELINE; +#endif + } + if (!me.F(me.v.level).supportsAtAll(me.v.level)) { +#ifdef MPEG4 + me.set().level = LEVEL_MP4V_2; +#else + me.set().level = LEVEL_H263_45; +#endif + } + return C2R::Ok(); + } + + // unsafe getters + std::shared_ptr getSize_l() const { return mSize; } + std::shared_ptr getFrameRate_l() const { return mFrameRate; } + std::shared_ptr getBitrate_l() const { return mBitrate; } + uint32_t getSyncFramePeriod() const { + if (mSyncFramePeriod->value < 0 || mSyncFramePeriod->value == INT64_MAX) { + return 0; + } + double period = mSyncFramePeriod->value / 1e6 * mFrameRate->value; + return (uint32_t)c2_max(c2_min(period + 0.5, double(UINT32_MAX)), 1.); + } + + private: + std::shared_ptr mInputFormat; + std::shared_ptr mOutputFormat; + std::shared_ptr mInputMediaType; + std::shared_ptr mOutputMediaType; + std::shared_ptr mUsage; + std::shared_ptr mSize; + std::shared_ptr mFrameRate; + std::shared_ptr mBitrate; + std::shared_ptr mProfileLevel; + std::shared_ptr mSyncFramePeriod; +}; + +C2SoftMpeg4Enc::C2SoftMpeg4Enc(const char* name, c2_node_id_t id, + const std::shared_ptr& intfImpl) + : SimpleC2Component( + std::make_shared>(name, id, intfImpl)), + mIntf(intfImpl), + mHandle(nullptr), + mEncParams(nullptr), + mStarted(false), + mOutBufferSize(524288) { +} + +C2SoftMpeg4Enc::~C2SoftMpeg4Enc() { + onRelease(); +} + +c2_status_t C2SoftMpeg4Enc::onInit() { +#ifdef MPEG4 + mEncodeMode = COMBINE_MODE_WITH_ERR_RES; +#else + mEncodeMode = H263_MODE; +#endif + if (!mHandle) { + mHandle = new tagvideoEncControls; + } + + if (!mEncParams) { + mEncParams = new tagvideoEncOptions; + } + + if (!(mEncParams && mHandle)) return C2_NO_MEMORY; + + mSignalledOutputEos = false; + mSignalledError = false; + + return initEncoder(); +} + +c2_status_t C2SoftMpeg4Enc::onStop() { + if (!mStarted) { + return C2_OK; + } + if (mHandle) { + (void)PVCleanUpVideoEncoder(mHandle); + } + mStarted = false; + mSignalledOutputEos = false; + mSignalledError = false; + return C2_OK; +} + +void C2SoftMpeg4Enc::onReset() { + onStop(); + initEncoder(); +} + +void C2SoftMpeg4Enc::onRelease() { + onStop(); + if (mEncParams) { + delete mEncParams; + mEncParams = nullptr; + } + if (mHandle) { + delete mHandle; + mHandle = nullptr; + } +} + +c2_status_t C2SoftMpeg4Enc::onFlush_sm() { + return C2_OK; +} + +static void fillEmptyWork(const std::unique_ptr &work) { + uint32_t flags = 0; + if (work->input.flags & C2FrameData::FLAG_END_OF_STREAM) { + flags |= C2FrameData::FLAG_END_OF_STREAM; + ALOGV("signalling eos"); + } + work->worklets.front()->output.flags = (C2FrameData::flags_t)flags; + work->worklets.front()->output.buffers.clear(); + work->worklets.front()->output.ordinal = work->input.ordinal; + work->workletsProcessed = 1u; +} + +c2_status_t C2SoftMpeg4Enc::initEncParams() { + if (mHandle) { + memset(mHandle, 0, sizeof(tagvideoEncControls)); + } else return C2_CORRUPTED; + if (mEncParams) { + memset(mEncParams, 0, sizeof(tagvideoEncOptions)); + } else return C2_CORRUPTED; + + if (!PVGetDefaultEncOption(mEncParams, 0)) { + ALOGE("Failed to get default encoding parameters"); + return C2_CORRUPTED; + } + + if (mFrameRate->value == 0) { + ALOGE("Framerate should not be 0"); + return C2_BAD_VALUE; + } + + mEncParams->encMode = mEncodeMode; + mEncParams->encWidth[0] = mSize->width; + mEncParams->encHeight[0] = mSize->height; + mEncParams->encFrameRate[0] = mFrameRate->value + 0.5; + mEncParams->rcType = VBR_1; + mEncParams->vbvDelay = 5.0f; + mEncParams->profile_level = CORE_PROFILE_LEVEL2; + mEncParams->packetSize = 32; + mEncParams->rvlcEnable = PV_OFF; + mEncParams->numLayers = 1; + mEncParams->timeIncRes = 1000; + mEncParams->tickPerSrc = mEncParams->timeIncRes / (mFrameRate->value + 0.5); + mEncParams->bitRate[0] = mBitrate->value; + mEncParams->iQuant[0] = 15; + mEncParams->pQuant[0] = 12; + mEncParams->quantType[0] = 0; + mEncParams->noFrameSkipped = PV_OFF; + + // PV's MPEG4 encoder requires the video dimension of multiple + if (mSize->width % 16 != 0 || mSize->height % 16 != 0) { + ALOGE("Video frame size %dx%d must be a multiple of 16", + mSize->width, mSize->height); + return C2_BAD_VALUE; + } + + // Set IDR frame refresh interval + mEncParams->intraPeriod = mIntf->getSyncFramePeriod(); + mEncParams->numIntraMB = 0; + mEncParams->sceneDetect = PV_ON; + mEncParams->searchRange = 16; + mEncParams->mv8x8Enable = PV_OFF; + mEncParams->gobHeaderInterval = 0; + mEncParams->useACPred = PV_ON; + mEncParams->intraDCVlcTh = 0; + + return C2_OK; +} + +c2_status_t C2SoftMpeg4Enc::initEncoder() { + if (mStarted) { + return C2_OK; + } + { + IntfImpl::Lock lock = mIntf->lock(); + mSize = mIntf->getSize_l(); + mBitrate = mIntf->getBitrate_l(); + mFrameRate = mIntf->getFrameRate_l(); + } + c2_status_t err = initEncParams(); + if (C2_OK != err) { + ALOGE("Failed to initialized encoder params"); + mSignalledError = true; + return err; + } + if (!PVInitVideoEncoder(mHandle, mEncParams)) { + ALOGE("Failed to initialize the encoder"); + mSignalledError = true; + return C2_CORRUPTED; + } + + // 1st buffer for codec specific data + mNumInputFrames = -1; + mStarted = true; + return C2_OK; +} + +void C2SoftMpeg4Enc::process( + const std::unique_ptr &work, + const std::shared_ptr &pool) { + // Initialize output work + work->result = C2_OK; + work->workletsProcessed = 1u; + work->worklets.front()->output.flags = work->input.flags; + if (mSignalledError || mSignalledOutputEos) { + work->result = C2_BAD_VALUE; + return; + } + + // Initialize encoder if not already initialized + if (!mStarted && C2_OK != initEncoder()) { + ALOGE("Failed to initialize encoder"); + mSignalledError = true; + work->result = C2_CORRUPTED; + return; + } + + std::shared_ptr block; + C2MemoryUsage usage = { C2MemoryUsage::CPU_READ, C2MemoryUsage::CPU_WRITE }; + c2_status_t err = pool->fetchLinearBlock(mOutBufferSize, usage, &block); + if (err != C2_OK) { + ALOGE("fetchLinearBlock for Output failed with status %d", err); + work->result = C2_NO_MEMORY; + return; + } + + C2WriteView wView = block->map().get(); + if (wView.error()) { + ALOGE("write view map failed %d", wView.error()); + work->result = wView.error(); + return; + } + + uint8_t *outPtr = (uint8_t *)wView.data(); + if (mNumInputFrames < 0) { + // The very first thing we want to output is the codec specific data. + int32_t outputSize = mOutBufferSize; + if (!PVGetVolHeader(mHandle, outPtr, &outputSize, 0)) { + ALOGE("Failed to get VOL header"); + mSignalledError = true; + work->result = C2_CORRUPTED; + return; + } else { + ALOGV("Bytes Generated in header %d\n", outputSize); + } + + ++mNumInputFrames; + std::unique_ptr csd = + C2StreamCsdInfo::output::AllocUnique(outputSize, 0u); + if (!csd) { + ALOGE("CSD allocation failed"); + mSignalledError = true; + work->result = C2_NO_MEMORY; + return; + } + memcpy(csd->m.value, outPtr, outputSize); + work->worklets.front()->output.configUpdate.push_back(std::move(csd)); + } + + std::shared_ptr rView; + std::shared_ptr inputBuffer; + bool eos = ((work->input.flags & C2FrameData::FLAG_END_OF_STREAM) != 0); + if (!work->input.buffers.empty()) { + inputBuffer = work->input.buffers[0]; + rView = std::make_shared( + inputBuffer->data().graphicBlocks().front().map().get()); + if (rView->error() != C2_OK) { + ALOGE("graphic view map err = %d", rView->error()); + work->result = rView->error(); + return; + } + } else { + fillEmptyWork(work); + if (eos) { + mSignalledOutputEos = true; + ALOGV("signalled EOS"); + } + return; + } + + uint64_t inputTimeStamp = work->input.ordinal.timestamp.peekull(); + const C2ConstGraphicBlock inBuffer = inputBuffer->data().graphicBlocks().front(); + if (inBuffer.width() < mSize->width || + inBuffer.height() < mSize->height) { + /* Expect width height to be configured */ + ALOGW("unexpected Capacity Aspect %d(%d) x %d(%d)", inBuffer.width(), + mSize->width, inBuffer.height(), mSize->height); + work->result = C2_BAD_VALUE; + return; + } + + const C2PlanarLayout &layout = rView->layout(); + uint8_t *yPlane = const_cast(rView->data()[C2PlanarLayout::PLANE_Y]); + uint8_t *uPlane = const_cast(rView->data()[C2PlanarLayout::PLANE_U]); + uint8_t *vPlane = const_cast(rView->data()[C2PlanarLayout::PLANE_V]); + int32_t yStride = layout.planes[C2PlanarLayout::PLANE_Y].rowInc; + int32_t uStride = layout.planes[C2PlanarLayout::PLANE_U].rowInc; + int32_t vStride = layout.planes[C2PlanarLayout::PLANE_V].rowInc; + uint32_t width = mSize->width; + uint32_t height = mSize->height; + // width and height are always even (as block size is 16x16) + CHECK_EQ((width & 1u), 0u); + CHECK_EQ((height & 1u), 0u); + size_t yPlaneSize = width * height; + switch (layout.type) { + case C2PlanarLayout::TYPE_RGB: + [[fallthrough]]; + case C2PlanarLayout::TYPE_RGBA: { + MemoryBlock conversionBuffer = mConversionBuffers.fetch(yPlaneSize * 3 / 2); + mConversionBuffersInUse.emplace(conversionBuffer.data(), conversionBuffer); + yPlane = conversionBuffer.data(); + uPlane = yPlane + yPlaneSize; + vPlane = uPlane + yPlaneSize / 4; + yStride = width; + uStride = vStride = width / 2; + ConvertRGBToPlanarYUV(yPlane, yStride, height, conversionBuffer.size(), *rView.get()); + break; + } + case C2PlanarLayout::TYPE_YUV: { + if (!IsYUV420(*rView)) { + ALOGE("input is not YUV420"); + work->result = C2_BAD_VALUE; + break; + } + + if (layout.planes[layout.PLANE_Y].colInc == 1 + && layout.planes[layout.PLANE_U].colInc == 1 + && layout.planes[layout.PLANE_V].colInc == 1 + && uStride == vStride + && yStride == 2 * vStride) { + // I420 compatible - planes are already set up above + break; + } + + // copy to I420 + MemoryBlock conversionBuffer = mConversionBuffers.fetch(yPlaneSize * 3 / 2); + mConversionBuffersInUse.emplace(conversionBuffer.data(), conversionBuffer); + MediaImage2 img = CreateYUV420PlanarMediaImage2(width, height, width, height); + status_t err = ImageCopy(conversionBuffer.data(), &img, *rView); + if (err != OK) { + ALOGE("Buffer conversion failed: %d", err); + work->result = C2_BAD_VALUE; + return; + } + yPlane = conversionBuffer.data(); + uPlane = yPlane + yPlaneSize; + vPlane = uPlane + yPlaneSize / 4; + yStride = width; + uStride = vStride = width / 2; + break; + } + + case C2PlanarLayout::TYPE_YUVA: + ALOGE("YUVA plane type is not supported"); + work->result = C2_BAD_VALUE; + return; + + default: + ALOGE("Unrecognized plane type: %d", layout.type); + work->result = C2_BAD_VALUE; + return; + } + + CHECK(NULL != yPlane); + /* Encode frames */ + VideoEncFrameIO vin, vout; + memset(&vin, 0, sizeof(vin)); + memset(&vout, 0, sizeof(vout)); + vin.yChan = yPlane; + vin.uChan = uPlane; + vin.vChan = vPlane; + vin.timestamp = (inputTimeStamp + 500) / 1000; // in ms + vin.height = align(height, 16); + vin.pitch = align(width, 16); + + uint32_t modTimeMs = 0; + int32_t nLayer = 0; + MP4HintTrack hintTrack; + int32_t outputSize = mOutBufferSize; + if (!PVEncodeVideoFrame(mHandle, &vin, &vout, &modTimeMs, outPtr, &outputSize, &nLayer) || + !PVGetHintTrack(mHandle, &hintTrack)) { + ALOGE("Failed to encode frame or get hint track at frame %" PRId64, mNumInputFrames); + mSignalledError = true; + work->result = C2_CORRUPTED; + return; + } + ALOGV("outputSize filled : %d", outputSize); + ++mNumInputFrames; + CHECK(NULL == PVGetOverrunBuffer(mHandle)); + + fillEmptyWork(work); + if (outputSize) { + std::shared_ptr buffer = createLinearBuffer(block, 0, outputSize); + work->worklets.front()->output.ordinal.timestamp = inputTimeStamp; + if (hintTrack.CodeType == 0) { + buffer->setInfo(std::make_shared( + 0u /* stream id */, C2PictureTypeKeyFrame)); + } + work->worklets.front()->output.buffers.push_back(buffer); + } + if (eos) { + mSignalledOutputEos = true; + } + + mConversionBuffersInUse.erase(yPlane); +} + +c2_status_t C2SoftMpeg4Enc::drain( + uint32_t drainMode, + const std::shared_ptr &pool) { + (void)pool; + if (drainMode == NO_DRAIN) { + ALOGW("drain with NO_DRAIN: no-op"); + return C2_OK; + } + if (drainMode == DRAIN_CHAIN) { + ALOGW("DRAIN_CHAIN not supported"); + return C2_OMITTED; + } + + return C2_OK; +} + +class C2SoftMpeg4EncFactory : public C2ComponentFactory { +public: + C2SoftMpeg4EncFactory() + : mHelper(std::static_pointer_cast( + GetCodec2PlatformComponentStore()->getParamReflector())) {} + + virtual c2_status_t createComponent( + c2_node_id_t id, + std::shared_ptr* const component, + std::function deleter) override { + *component = std::shared_ptr( + new C2SoftMpeg4Enc( + COMPONENT_NAME, id, + std::make_shared(mHelper)), + deleter); + return C2_OK; + } + + virtual c2_status_t createInterface( + c2_node_id_t id, + std::shared_ptr* const interface, + std::function deleter) override { + *interface = std::shared_ptr( + new SimpleInterface( + COMPONENT_NAME, id, + std::make_shared(mHelper)), + deleter); + return C2_OK; + } + + virtual ~C2SoftMpeg4EncFactory() override = default; + +private: + std::shared_ptr mHelper; +}; + +} // namespace android + +extern "C" ::C2ComponentFactory* CreateCodec2Factory() { + ALOGV("in %s", __func__); + return new ::android::C2SoftMpeg4EncFactory(); +} + +extern "C" void DestroyCodec2Factory(::C2ComponentFactory* factory) { + ALOGV("in %s", __func__); + delete factory; +} diff --git a/media/codec2/components/mpeg4_h263/C2SoftMpeg4Enc.h b/media/codec2/components/mpeg4_h263/C2SoftMpeg4Enc.h new file mode 100644 index 0000000000000000000000000000000000000000..43461fcec59e03c71678f35c305b54c081f5a3e0 --- /dev/null +++ b/media/codec2/components/mpeg4_h263/C2SoftMpeg4Enc.h @@ -0,0 +1,83 @@ +/* + * Copyright 2018 The Android Open Source Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#ifndef C2_SOFT_MPEG4_ENC_H__ +#define C2_SOFT_MPEG4_ENC_H__ + +#include + +#include +#include + +#include "mp4enc_api.h" + +namespace android { + +struct C2SoftMpeg4Enc : public SimpleC2Component { + class IntfImpl; + + C2SoftMpeg4Enc(const char* name, c2_node_id_t id, + const std::shared_ptr& intfImpl); + + // From SimpleC2Component + c2_status_t onInit() override; + c2_status_t onStop() override; + void onReset() override; + void onRelease() override; + c2_status_t onFlush_sm() override; + void process( + const std::unique_ptr &work, + const std::shared_ptr &pool) override; + c2_status_t drain( + uint32_t drainMode, + const std::shared_ptr &pool) override; + +protected: + + virtual ~C2SoftMpeg4Enc(); + +private: + std::shared_ptr mIntf; + + tagvideoEncControls *mHandle; + tagvideoEncOptions *mEncParams; + + bool mStarted; + bool mSignalledOutputEos; + bool mSignalledError; + + uint32_t mOutBufferSize; + // configurations used by component in process + // (TODO: keep this in intf but make them internal only) + std::shared_ptr mSize; + std::shared_ptr mFrameRate; + std::shared_ptr mBitrate; + + int64_t mNumInputFrames; + MP4EncodingMode mEncodeMode; + + MemoryBlockPool mConversionBuffers; + std::map mConversionBuffersInUse; + + c2_status_t initEncParams(); + c2_status_t initEncoder(); + + C2_DO_NOT_COPY(C2SoftMpeg4Enc); +}; + +} // namespace android + +#endif // C2_SOFT_MPEG4_ENC_H__ diff --git a/media/codec2/components/mpeg4_h263/MODULE_LICENSE_APACHE2 b/media/codec2/components/mpeg4_h263/MODULE_LICENSE_APACHE2 new file mode 100644 index 0000000000000000000000000000000000000000..e69de29bb2d1d6434b8b29ae775ad8c2e48c5391 diff --git a/media/codec2/components/mpeg4_h263/NOTICE b/media/codec2/components/mpeg4_h263/NOTICE new file mode 100644 index 0000000000000000000000000000000000000000..c5b1efa7aac764ae6d8da63476a2d5cec02a6a5d --- /dev/null +++ b/media/codec2/components/mpeg4_h263/NOTICE @@ -0,0 +1,190 @@ + + Copyright (c) 2005-2008, The Android Open Source Project + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. + + + Apache License + Version 2.0, January 2004 + http://www.apache.org/licenses/ + + TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION + + 1. Definitions. + + "License" shall mean the terms and conditions for use, reproduction, + and distribution as defined by Sections 1 through 9 of this document. + + "Licensor" shall mean the copyright owner or entity authorized by + the copyright owner that is granting the License. + + "Legal Entity" shall mean the union of the acting entity and all + other entities that control, are controlled by, or are under common + control with that entity. For the purposes of this definition, + "control" means (i) the power, direct or indirect, to cause the + direction or management of such entity, whether by contract or + otherwise, or (ii) ownership of fifty percent (50%) or more of the + outstanding shares, or (iii) beneficial ownership of such entity. + + "You" (or "Your") shall mean an individual or Legal Entity + exercising permissions granted by this License. + + "Source" form shall mean the preferred form for making modifications, + including but not limited to software source code, documentation + source, and configuration files. + + "Object" form shall mean any form resulting from mechanical + transformation or translation of a Source form, including but + not limited to compiled object code, generated documentation, + and conversions to other media types. + + "Work" shall mean the work of authorship, whether in Source or + Object form, made available under the License, as indicated by a + copyright notice that is included in or attached to the work + (an example is provided in the Appendix below). + + "Derivative Works" shall mean any work, whether in Source or Object + form, that is based on (or derived from) the Work and for which the + editorial revisions, annotations, elaborations, or other modifications + represent, as a whole, an original work of authorship. For the purposes + of this License, Derivative Works shall not include works that remain + separable from, or merely link (or bind by name) to the interfaces of, + the Work and Derivative Works thereof. + + "Contribution" shall mean any work of authorship, including + the original version of the Work and any modifications or additions + to that Work or Derivative Works thereof, that is intentionally + submitted to Licensor for inclusion in the Work by the copyright owner + or by an individual or Legal Entity authorized to submit on behalf of + the copyright owner. For the purposes of this definition, "submitted" + means any form of electronic, verbal, or written communication sent + to the Licensor or its representatives, including but not limited to + communication on electronic mailing lists, source code control systems, + and issue tracking systems that are managed by, or on behalf of, the + Licensor for the purpose of discussing and improving the Work, but + excluding communication that is conspicuously marked or otherwise + designated in writing by the copyright owner as "Not a Contribution." + + "Contributor" shall mean Licensor and any individual or Legal Entity + on behalf of whom a Contribution has been received by Licensor and + subsequently incorporated within the Work. + + 2. Grant of Copyright License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + copyright license to reproduce, prepare Derivative Works of, + publicly display, publicly perform, sublicense, and distribute the + Work and such Derivative Works in Source or Object form. + + 3. Grant of Patent License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + (except as stated in this section) patent license to make, have made, + use, offer to sell, sell, import, and otherwise transfer the Work, + where such license applies only to those patent claims licensable + by such Contributor that are necessarily infringed by their + Contribution(s) alone or by combination of their Contribution(s) + with the Work to which such Contribution(s) was submitted. If You + institute patent litigation against any entity (including a + cross-claim or counterclaim in a lawsuit) alleging that the Work + or a Contribution incorporated within the Work constitutes direct + or contributory patent infringement, then any patent licenses + granted to You under this License for that Work shall terminate + as of the date such litigation is filed. + + 4. Redistribution. You may reproduce and distribute copies of the + Work or Derivative Works thereof in any medium, with or without + modifications, and in Source or Object form, provided that You + meet the following conditions: + + (a) You must give any other recipients of the Work or + Derivative Works a copy of this License; and + + (b) You must cause any modified files to carry prominent notices + stating that You changed the files; and + + (c) You must retain, in the Source form of any Derivative Works + that You distribute, all copyright, patent, trademark, and + attribution notices from the Source form of the Work, + excluding those notices that do not pertain to any part of + the Derivative Works; and + + (d) If the Work includes a "NOTICE" text file as part of its + distribution, then any Derivative Works that You distribute must + include a readable copy of the attribution notices contained + within such NOTICE file, excluding those notices that do not + pertain to any part of the Derivative Works, in at least one + of the following places: within a NOTICE text file distributed + as part of the Derivative Works; within the Source form or + documentation, if provided along with the Derivative Works; or, + within a display generated by the Derivative Works, if and + wherever such third-party notices normally appear. The contents + of the NOTICE file are for informational purposes only and + do not modify the License. You may add Your own attribution + notices within Derivative Works that You distribute, alongside + or as an addendum to the NOTICE text from the Work, provided + that such additional attribution notices cannot be construed + as modifying the License. + + You may add Your own copyright statement to Your modifications and + may provide additional or different license terms and conditions + for use, reproduction, or distribution of Your modifications, or + for any such Derivative Works as a whole, provided Your use, + reproduction, and distribution of the Work otherwise complies with + the conditions stated in this License. + + 5. Submission of Contributions. Unless You explicitly state otherwise, + any Contribution intentionally submitted for inclusion in the Work + by You to the Licensor shall be under the terms and conditions of + this License, without any additional terms or conditions. + Notwithstanding the above, nothing herein shall supersede or modify + the terms of any separate license agreement you may have executed + with Licensor regarding such Contributions. + + 6. Trademarks. This License does not grant permission to use the trade + names, trademarks, service marks, or product names of the Licensor, + except as required for reasonable and customary use in describing the + origin of the Work and reproducing the content of the NOTICE file. + + 7. Disclaimer of Warranty. Unless required by applicable law or + agreed to in writing, Licensor provides the Work (and each + Contributor provides its Contributions) on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or + implied, including, without limitation, any warranties or conditions + of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A + PARTICULAR PURPOSE. You are solely responsible for determining the + appropriateness of using or redistributing the Work and assume any + risks associated with Your exercise of permissions under this License. + + 8. Limitation of Liability. In no event and under no legal theory, + whether in tort (including negligence), contract, or otherwise, + unless required by applicable law (such as deliberate and grossly + negligent acts) or agreed to in writing, shall any Contributor be + liable to You for damages, including any direct, indirect, special, + incidental, or consequential damages of any character arising as a + result of this License or out of the use or inability to use the + Work (including but not limited to damages for loss of goodwill, + work stoppage, computer failure or malfunction, or any and all + other commercial damages or losses), even if such Contributor + has been advised of the possibility of such damages. + + 9. Accepting Warranty or Additional Liability. While redistributing + the Work or Derivative Works thereof, You may choose to offer, + and charge a fee for, acceptance of support, warranty, indemnity, + or other liability obligations and/or rights consistent with this + License. However, in accepting such obligations, You may act only + on Your own behalf and on Your sole responsibility, not on behalf + of any other Contributor, and only if You agree to indemnify, + defend, and hold each Contributor harmless for any liability + incurred by, or claims asserted against, such Contributor by reason + of your accepting any such warranty or additional liability. + + END OF TERMS AND CONDITIONS + diff --git a/media/codec2/components/mpeg4_h263/patent_disclaimer.txt b/media/codec2/components/mpeg4_h263/patent_disclaimer.txt new file mode 100644 index 0000000000000000000000000000000000000000..b4bf11d4ab14011ba28ce5ff6090529d2b7b587f --- /dev/null +++ b/media/codec2/components/mpeg4_h263/patent_disclaimer.txt @@ -0,0 +1,9 @@ + +THIS IS NOT A GRANT OF PATENT RIGHTS. + +Google makes no representation or warranty that the codecs for which +source code is made available hereunder are unencumbered by +third-party patents. Those intending to use this source code in +hardware or software products are advised that implementations of +these codecs, including in open source software or shareware, may +require patent licenses from the relevant patent holders. diff --git a/media/codec2/components/opus/Android.bp b/media/codec2/components/opus/Android.bp new file mode 100644 index 0000000000000000000000000000000000000000..a6233a6ddfc8c136c9052cdd85d4b74d3ee47442 --- /dev/null +++ b/media/codec2/components/opus/Android.bp @@ -0,0 +1,11 @@ +cc_library_shared { + name: "libstagefright_soft_c2opusdec", + defaults: [ + "libstagefright_soft_c2-defaults", + "libstagefright_soft_c2_sanitize_all-defaults", + ], + + srcs: ["C2SoftOpusDec.cpp"], + + shared_libs: ["libopus"], +} diff --git a/media/codec2/components/opus/C2SoftOpusDec.cpp b/media/codec2/components/opus/C2SoftOpusDec.cpp new file mode 100644 index 0000000000000000000000000000000000000000..2439c3c4a52cdae9a875762a9d4ee03377364e0a --- /dev/null +++ b/media/codec2/components/opus/C2SoftOpusDec.cpp @@ -0,0 +1,544 @@ +/* + * Copyright (C) 2018 The Android Open Source Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +//#define LOG_NDEBUG 0 +#define LOG_TAG "C2SoftOpusDec" +#include + +#include + +#include +#include + +#include "C2SoftOpusDec.h" + +extern "C" { + #include + #include +} + +namespace android { + +constexpr char COMPONENT_NAME[] = "c2.android.opus.decoder"; + +class C2SoftOpusDec::IntfImpl : public C2InterfaceHelper { + public: + explicit IntfImpl(const std::shared_ptr& helper) + : C2InterfaceHelper(helper) { + setDerivedInstance(this); + + addParameter( + DefineParam(mInputFormat, C2_NAME_INPUT_STREAM_FORMAT_SETTING) + .withConstValue(new C2StreamFormatConfig::input(0u, C2FormatCompressed)) + .build()); + + addParameter( + DefineParam(mOutputFormat, C2_NAME_OUTPUT_STREAM_FORMAT_SETTING) + .withConstValue(new C2StreamFormatConfig::output(0u, C2FormatAudio)) + .build()); + + addParameter( + DefineParam(mInputMediaType, C2_NAME_INPUT_PORT_MIME_SETTING) + .withConstValue(AllocSharedString( + MEDIA_MIMETYPE_AUDIO_OPUS)) + .build()); + + addParameter( + DefineParam(mOutputMediaType, C2_NAME_OUTPUT_PORT_MIME_SETTING) + .withConstValue(AllocSharedString( + MEDIA_MIMETYPE_AUDIO_RAW)) + .build()); + + addParameter( + DefineParam(mSampleRate, C2_NAME_STREAM_SAMPLE_RATE_SETTING) + .withDefault(new C2StreamSampleRateInfo::output(0u, 48000)) + .withFields({C2F(mSampleRate, value).equalTo(48000)}) + .withSetter((Setter::StrictValueWithNoDeps)) + .build()); + + addParameter( + DefineParam(mChannelCount, C2_NAME_STREAM_CHANNEL_COUNT_SETTING) + .withDefault(new C2StreamChannelCountInfo::output(0u, 1)) + .withFields({C2F(mChannelCount, value).inRange(1, 8)}) + .withSetter(Setter::StrictValueWithNoDeps) + .build()); + + addParameter( + DefineParam(mBitrate, C2_NAME_STREAM_BITRATE_SETTING) + .withDefault(new C2BitrateTuning::input(0u, 6000)) + .withFields({C2F(mBitrate, value).inRange(6000, 510000)}) + .withSetter(Setter::NonStrictValueWithNoDeps) + .build()); + + addParameter( + DefineParam(mInputMaxBufSize, C2_PARAMKEY_INPUT_MAX_BUFFER_SIZE) + .withConstValue(new C2StreamMaxBufferSizeInfo::input(0u, 960 * 6)) + .build()); + } + + private: + std::shared_ptr mInputFormat; + std::shared_ptr mOutputFormat; + std::shared_ptr mInputMediaType; + std::shared_ptr mOutputMediaType; + std::shared_ptr mSampleRate; + std::shared_ptr mChannelCount; + std::shared_ptr mBitrate; + std::shared_ptr mInputMaxBufSize; +}; + +C2SoftOpusDec::C2SoftOpusDec(const char *name, c2_node_id_t id, + const std::shared_ptr& intfImpl) + : SimpleC2Component( + std::make_shared>(name, id, intfImpl)), + mIntf(intfImpl), + mDecoder(nullptr) { +} + +C2SoftOpusDec::~C2SoftOpusDec() { + onRelease(); +} + +c2_status_t C2SoftOpusDec::onInit() { + status_t err = initDecoder(); + return err == OK ? C2_OK : C2_NO_MEMORY; +} + +c2_status_t C2SoftOpusDec::onStop() { + if (mDecoder) { + opus_multistream_decoder_destroy(mDecoder); + mDecoder = nullptr; + } + memset(&mHeader, 0, sizeof(mHeader)); + mCodecDelay = 0; + mSeekPreRoll = 0; + mSamplesToDiscard = 0; + mInputBufferCount = 0; + mSignalledError = false; + mSignalledOutputEos = false; + + return C2_OK; +} + +void C2SoftOpusDec::onReset() { + (void)onStop(); +} + +void C2SoftOpusDec::onRelease() { + if (mDecoder) { + opus_multistream_decoder_destroy(mDecoder); + mDecoder = nullptr; + } +} + +status_t C2SoftOpusDec::initDecoder() { + memset(&mHeader, 0, sizeof(mHeader)); + mCodecDelay = 0; + mSeekPreRoll = 0; + mSamplesToDiscard = 0; + mInputBufferCount = 0; + mSignalledError = false; + mSignalledOutputEos = false; + + return OK; +} + +c2_status_t C2SoftOpusDec::onFlush_sm() { + if (mDecoder) { + opus_multistream_decoder_ctl(mDecoder, OPUS_RESET_STATE); + mSamplesToDiscard = mSeekPreRoll; + mSignalledOutputEos = false; + } + return C2_OK; +} + +c2_status_t C2SoftOpusDec::drain( + uint32_t drainMode, + const std::shared_ptr &pool) { + (void) pool; + if (drainMode == NO_DRAIN) { + ALOGW("drain with NO_DRAIN: no-op"); + return C2_OK; + } + if (drainMode == DRAIN_CHAIN) { + ALOGW("DRAIN_CHAIN not supported"); + return C2_OMITTED; + } + + return C2_OK; +} + +static void fillEmptyWork(const std::unique_ptr &work) { + work->worklets.front()->output.flags = work->input.flags; + work->worklets.front()->output.buffers.clear(); + work->worklets.front()->output.ordinal = work->input.ordinal; + work->workletsProcessed = 1u; +} + +static uint16_t ReadLE16(const uint8_t *data, size_t data_size, + uint32_t read_offset) { + if (read_offset + 1 > data_size) + return 0; + uint16_t val; + val = data[read_offset]; + val |= data[read_offset + 1] << 8; + return val; +} + +static const int kRate = 48000; + +// Opus uses Vorbis channel mapping, and Vorbis channel mapping specifies +// mappings for up to 8 channels. This information is part of the Vorbis I +// Specification: +// http://www.xiph.org/vorbis/doc/Vorbis_I_spec.html +static const int kMaxChannels = 8; + +// Maximum packet size used in Xiph's opusdec. +static const int kMaxOpusOutputPacketSizeSamples = 960 * 6; + +// Default audio output channel layout. Used to initialize |stream_map| in +// OpusHeader, and passed to opus_multistream_decoder_create() when the header +// does not contain mapping information. The values are valid only for mono and +// stereo output: Opus streams with more than 2 channels require a stream map. +static const int kMaxChannelsWithDefaultLayout = 2; +static const uint8_t kDefaultOpusChannelLayout[kMaxChannelsWithDefaultLayout] = { 0, 1 }; + +// Parses Opus Header. Header spec: http://wiki.xiph.org/OggOpus#ID_Header +static bool ParseOpusHeader(const uint8_t *data, size_t data_size, + OpusHeader* header) { + // Size of the Opus header excluding optional mapping information. + const size_t kOpusHeaderSize = 19; + + // Offset to the channel count byte in the Opus header. + const size_t kOpusHeaderChannelsOffset = 9; + + // Offset to the pre-skip value in the Opus header. + const size_t kOpusHeaderSkipSamplesOffset = 10; + + // Offset to the gain value in the Opus header. + const size_t kOpusHeaderGainOffset = 16; + + // Offset to the channel mapping byte in the Opus header. + const size_t kOpusHeaderChannelMappingOffset = 18; + + // Opus Header contains a stream map. The mapping values are in the header + // beyond the always present |kOpusHeaderSize| bytes of data. The mapping + // data contains stream count, coupling information, and per channel mapping + // values: + // - Byte 0: Number of streams. + // - Byte 1: Number coupled. + // - Byte 2: Starting at byte 2 are |header->channels| uint8 mapping + // values. + const size_t kOpusHeaderNumStreamsOffset = kOpusHeaderSize; + const size_t kOpusHeaderNumCoupledOffset = kOpusHeaderNumStreamsOffset + 1; + const size_t kOpusHeaderStreamMapOffset = kOpusHeaderNumStreamsOffset + 2; + + if (data_size < kOpusHeaderSize) { + ALOGE("Header size is too small."); + return false; + } + header->channels = *(data + kOpusHeaderChannelsOffset); + if (header->channels <= 0 || header->channels > kMaxChannels) { + ALOGE("Invalid Header, wrong channel count: %d", header->channels); + return false; + } + + header->skip_samples = ReadLE16(data, + data_size, + kOpusHeaderSkipSamplesOffset); + + header->gain_db = static_cast(ReadLE16(data, + data_size, + kOpusHeaderGainOffset)); + + header->channel_mapping = *(data + kOpusHeaderChannelMappingOffset); + if (!header->channel_mapping) { + if (header->channels > kMaxChannelsWithDefaultLayout) { + ALOGE("Invalid Header, missing stream map."); + return false; + } + header->num_streams = 1; + header->num_coupled = header->channels > 1; + header->stream_map[0] = 0; + header->stream_map[1] = 1; + return true; + } + if (data_size < kOpusHeaderStreamMapOffset + header->channels) { + ALOGE("Invalid stream map; insufficient data for current channel " + "count: %d", header->channels); + return false; + } + header->num_streams = *(data + kOpusHeaderNumStreamsOffset); + header->num_coupled = *(data + kOpusHeaderNumCoupledOffset); + if (header->num_streams + header->num_coupled != header->channels) { + ALOGE("Inconsistent channel mapping."); + return false; + } + for (int i = 0; i < header->channels; ++i) + header->stream_map[i] = *(data + kOpusHeaderStreamMapOffset + i); + return true; +} + +// Convert nanoseconds to number of samples. +static uint64_t ns_to_samples(uint64_t ns, int rate) { + return static_cast(ns) * rate / 1000000000; +} + +void C2SoftOpusDec::process( + const std::unique_ptr &work, + const std::shared_ptr &pool) { + // Initialize output work + work->result = C2_OK; + work->workletsProcessed = 1u; + work->worklets.front()->output.configUpdate.clear(); + work->worklets.front()->output.flags = work->input.flags; + + if (mSignalledError || mSignalledOutputEos) { + work->result = C2_BAD_VALUE; + return; + } + + bool eos = ((work->input.flags & C2FrameData::FLAG_END_OF_STREAM) != 0); + size_t inOffset = 0u; + size_t inSize = 0u; + C2ReadView rView = mDummyReadView; + if (!work->input.buffers.empty()) { + rView = work->input.buffers[0]->data().linearBlocks().front().map().get(); + inSize = rView.capacity(); + if (inSize && rView.error()) { + ALOGE("read view map failed %d", rView.error()); + work->result = C2_CORRUPTED; + return; + } + } + if (inSize == 0) { + fillEmptyWork(work); + if (eos) { + mSignalledOutputEos = true; + ALOGV("signalled EOS"); + } + return; + } + + ALOGV("in buffer attr. size %zu timestamp %d frameindex %d", inSize, + (int)work->input.ordinal.timestamp.peeku(), (int)work->input.ordinal.frameIndex.peeku()); + const uint8_t *data = rView.data() + inOffset; + if (mInputBufferCount < 3) { + if (mInputBufferCount == 0) { + if (!ParseOpusHeader(data, inSize, &mHeader)) { + ALOGE("Encountered error while Parsing Opus Header."); + mSignalledError = true; + work->result = C2_CORRUPTED; + return; + } + uint8_t channel_mapping[kMaxChannels] = {0}; + if (mHeader.channels <= kMaxChannelsWithDefaultLayout) { + memcpy(&channel_mapping, + kDefaultOpusChannelLayout, + kMaxChannelsWithDefaultLayout); + } else { + memcpy(&channel_mapping, + mHeader.stream_map, + mHeader.channels); + } + int status = OPUS_INVALID_STATE; + mDecoder = opus_multistream_decoder_create(kRate, + mHeader.channels, + mHeader.num_streams, + mHeader.num_coupled, + channel_mapping, + &status); + if (!mDecoder || status != OPUS_OK) { + ALOGE("opus_multistream_decoder_create failed status = %s", + opus_strerror(status)); + mSignalledError = true; + work->result = C2_CORRUPTED; + return; + } + status = opus_multistream_decoder_ctl(mDecoder, + OPUS_SET_GAIN(mHeader.gain_db)); + if (status != OPUS_OK) { + ALOGE("Failed to set OPUS header gain; status = %s", + opus_strerror(status)); + mSignalledError = true; + work->result = C2_CORRUPTED; + return; + } + } else { + if (inSize < 8) { + ALOGE("Input sample size is too small."); + mSignalledError = true; + work->result = C2_CORRUPTED; + return; + } + int64_t samples = ns_to_samples( *(reinterpret_cast + (const_cast (data))), kRate); + if (mInputBufferCount == 1) { + mCodecDelay = samples; + mSamplesToDiscard = mCodecDelay; + } + else { + mSeekPreRoll = samples; + + ALOGI("Configuring decoder: %d Hz, %d channels", + kRate, mHeader.channels); + C2StreamSampleRateInfo::output sampleRateInfo(0u, kRate); + C2StreamChannelCountInfo::output channelCountInfo(0u, mHeader.channels); + std::vector> failures; + c2_status_t err = mIntf->config( + { &sampleRateInfo, &channelCountInfo }, + C2_MAY_BLOCK, + &failures); + if (err == OK) { + work->worklets.front()->output.configUpdate.push_back(C2Param::Copy(sampleRateInfo)); + work->worklets.front()->output.configUpdate.push_back(C2Param::Copy(channelCountInfo)); + } else { + ALOGE("Config Update failed"); + mSignalledError = true; + work->result = C2_CORRUPTED; + return; + } + } + } + + ++mInputBufferCount; + fillEmptyWork(work); + if (eos) { + mSignalledOutputEos = true; + ALOGV("signalled EOS"); + } + return; + } + + // Ignore CSD re-submissions. + if ((work->input.flags & C2FrameData::FLAG_CODEC_CONFIG)) { + fillEmptyWork(work); + return; + } + + // When seeking to zero, |mCodecDelay| samples has to be discarded + // instead of |mSeekPreRoll| samples (as we would when seeking to any + // other timestamp). + if (work->input.ordinal.timestamp.peeku() == 0) mSamplesToDiscard = mCodecDelay; + + std::shared_ptr block; + C2MemoryUsage usage = { C2MemoryUsage::CPU_READ, C2MemoryUsage::CPU_WRITE }; + c2_status_t err = pool->fetchLinearBlock( + kMaxNumSamplesPerBuffer * kMaxChannels * sizeof(int16_t), + usage, &block); + if (err != C2_OK) { + ALOGE("fetchLinearBlock for Output failed with status %d", err); + work->result = C2_NO_MEMORY; + return; + } + C2WriteView wView = block->map().get(); + if (wView.error()) { + ALOGE("write view map failed %d", wView.error()); + work->result = C2_CORRUPTED; + return; + } + + int numSamples = opus_multistream_decode(mDecoder, + data, + inSize, + reinterpret_cast (wView.data()), + kMaxOpusOutputPacketSizeSamples, + 0); + if (numSamples < 0) { + ALOGE("opus_multistream_decode returned numSamples %d", numSamples); + numSamples = 0; + mSignalledError = true; + work->result = C2_CORRUPTED; + return; + } + + int outOffset = 0; + if (mSamplesToDiscard > 0) { + if (mSamplesToDiscard > numSamples) { + mSamplesToDiscard -= numSamples; + numSamples = 0; + } else { + numSamples -= mSamplesToDiscard; + outOffset = mSamplesToDiscard * sizeof(int16_t) * mHeader.channels; + mSamplesToDiscard = 0; + } + } + + if (numSamples) { + int outSize = numSamples * sizeof(int16_t) * mHeader.channels; + ALOGV("out buffer attr. offset %d size %d ", outOffset, outSize); + + work->worklets.front()->output.flags = work->input.flags; + work->worklets.front()->output.buffers.clear(); + work->worklets.front()->output.buffers.push_back(createLinearBuffer(block, outOffset, outSize)); + work->worklets.front()->output.ordinal = work->input.ordinal; + work->workletsProcessed = 1u; + } else { + fillEmptyWork(work); + block.reset(); + } + if (eos) { + mSignalledOutputEos = true; + ALOGV("signalled EOS"); + } +} + +class C2SoftOpusDecFactory : public C2ComponentFactory { +public: + C2SoftOpusDecFactory() : mHelper(std::static_pointer_cast( + GetCodec2PlatformComponentStore()->getParamReflector())) { + } + + virtual c2_status_t createComponent( + c2_node_id_t id, + std::shared_ptr* const component, + std::function deleter) override { + *component = std::shared_ptr( + new C2SoftOpusDec(COMPONENT_NAME, + id, + std::make_shared(mHelper)), + deleter); + return C2_OK; + } + + virtual c2_status_t createInterface( + c2_node_id_t id, + std::shared_ptr* const interface, + std::function deleter) override { + *interface = std::shared_ptr( + new SimpleInterface( + COMPONENT_NAME, id, std::make_shared(mHelper)), + deleter); + return C2_OK; + } + + virtual ~C2SoftOpusDecFactory() override = default; + +private: + std::shared_ptr mHelper; +}; + +} // namespace android + +extern "C" ::C2ComponentFactory* CreateCodec2Factory() { + ALOGV("in %s", __func__); + return new ::android::C2SoftOpusDecFactory(); +} + +extern "C" void DestroyCodec2Factory(::C2ComponentFactory* factory) { + ALOGV("in %s", __func__); + delete factory; +} diff --git a/media/codec2/components/opus/C2SoftOpusDec.h b/media/codec2/components/opus/C2SoftOpusDec.h new file mode 100644 index 0000000000000000000000000000000000000000..92b7426d11ae3e02bfe74ae415f0d6d6eed8a89e --- /dev/null +++ b/media/codec2/components/opus/C2SoftOpusDec.h @@ -0,0 +1,79 @@ +/* + * Copyright (C) 2018 The Android Open Source Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#ifndef ANDROID_C2_SOFT_OPUS_DEC_H_ +#define ANDROID_C2_SOFT_OPUS_DEC_H_ + +#include + + +struct OpusMSDecoder; + +namespace android { + +struct OpusHeader { + int channels; + int skip_samples; + int channel_mapping; + int num_streams; + int num_coupled; + int16_t gain_db; + uint8_t stream_map[8]; +}; + +struct C2SoftOpusDec : public SimpleC2Component { + class IntfImpl; + + C2SoftOpusDec(const char *name, c2_node_id_t id, + const std::shared_ptr &intfImpl); + virtual ~C2SoftOpusDec(); + + // From SimpleC2Component + c2_status_t onInit() override; + c2_status_t onStop() override; + void onReset() override; + void onRelease() override; + c2_status_t onFlush_sm() override; + void process( + const std::unique_ptr &work, + const std::shared_ptr &pool) override; + c2_status_t drain( + uint32_t drainMode, + const std::shared_ptr &pool) override; +private: + enum { + kMaxNumSamplesPerBuffer = 960 * 6 + }; + + std::shared_ptr mIntf; + OpusMSDecoder *mDecoder; + OpusHeader mHeader; + + int64_t mCodecDelay; + int64_t mSeekPreRoll; + int64_t mSamplesToDiscard; + size_t mInputBufferCount; + bool mSignalledError; + bool mSignalledOutputEos; + + status_t initDecoder(); + + C2_DO_NOT_COPY(C2SoftOpusDec); +}; + +} // namespace android + +#endif // ANDROID_C2_SOFT_OPUS_DEC_H_ diff --git a/media/codec2/components/raw/Android.bp b/media/codec2/components/raw/Android.bp new file mode 100644 index 0000000000000000000000000000000000000000..150eb91040302ccdb773f01ce075903f043f99c0 --- /dev/null +++ b/media/codec2/components/raw/Android.bp @@ -0,0 +1,9 @@ +cc_library_shared { + name: "libstagefright_soft_c2rawdec", + defaults: [ + "libstagefright_soft_c2-defaults", + "libstagefright_soft_c2_sanitize_all-defaults", + ], + + srcs: ["C2SoftRawDec.cpp"], +} diff --git a/media/codec2/components/raw/C2SoftRawDec.cpp b/media/codec2/components/raw/C2SoftRawDec.cpp new file mode 100644 index 0000000000000000000000000000000000000000..8d2a652404b85da148a24a0567fdfd12a89cf260 --- /dev/null +++ b/media/codec2/components/raw/C2SoftRawDec.cpp @@ -0,0 +1,220 @@ +/* + * Copyright (C) 2018 The Android Open Source Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +//#define LOG_NDEBUG 0 +#define LOG_TAG "C2SoftRawDec" +#include + +#include + +#include +#include + +#include "C2SoftRawDec.h" + +namespace android { + +constexpr char COMPONENT_NAME[] = "c2.android.raw.decoder"; + +class C2SoftRawDec::IntfImpl : public C2InterfaceHelper { +public: + explicit IntfImpl(const std::shared_ptr &helper) + : C2InterfaceHelper(helper) { + + setDerivedInstance(this); + + addParameter( + DefineParam(mInputFormat, C2_NAME_INPUT_STREAM_FORMAT_SETTING) + .withConstValue(new C2StreamFormatConfig::input(0u, C2FormatCompressed)) + .build()); + + addParameter( + DefineParam(mOutputFormat, C2_NAME_OUTPUT_STREAM_FORMAT_SETTING) + .withConstValue(new C2StreamFormatConfig::output(0u, C2FormatAudio)) + .build()); + + addParameter( + DefineParam(mInputMediaType, C2_NAME_INPUT_PORT_MIME_SETTING) + .withConstValue(AllocSharedString( + MEDIA_MIMETYPE_AUDIO_RAW)) + .build()); + + addParameter( + DefineParam(mOutputMediaType, C2_NAME_OUTPUT_PORT_MIME_SETTING) + .withConstValue(AllocSharedString( + MEDIA_MIMETYPE_AUDIO_RAW)) + .build()); + + addParameter( + DefineParam(mSampleRate, C2_NAME_STREAM_SAMPLE_RATE_SETTING) + .withDefault(new C2StreamSampleRateInfo::output(0u, 44100)) + .withFields({C2F(mSampleRate, value).inRange(8000, 192000)}) + .withSetter((Setter::StrictValueWithNoDeps)) + .build()); + + addParameter( + DefineParam(mChannelCount, C2_NAME_STREAM_CHANNEL_COUNT_SETTING) + .withDefault(new C2StreamChannelCountInfo::output(0u, 2)) + .withFields({C2F(mChannelCount, value).inRange(1, 8)}) + .withSetter(Setter::StrictValueWithNoDeps) + .build()); + + addParameter( + DefineParam(mBitrate, C2_NAME_STREAM_BITRATE_SETTING) + .withDefault(new C2BitrateTuning::input(0u, 64000)) + .withFields({C2F(mBitrate, value).inRange(1, 10000000)}) + .withSetter(Setter::NonStrictValueWithNoDeps) + .build()); + + addParameter( + DefineParam(mInputMaxBufSize, C2_PARAMKEY_INPUT_MAX_BUFFER_SIZE) + .withConstValue(new C2StreamMaxBufferSizeInfo::input(0u, 64 * 1024)) + .build()); + } + +private: + std::shared_ptr mInputFormat; + std::shared_ptr mOutputFormat; + std::shared_ptr mInputMediaType; + std::shared_ptr mOutputMediaType; + std::shared_ptr mSampleRate; + std::shared_ptr mChannelCount; + std::shared_ptr mBitrate; + std::shared_ptr mInputMaxBufSize; +}; + +C2SoftRawDec::C2SoftRawDec( + const char *name, + c2_node_id_t id, + const std::shared_ptr &intfImpl) + : SimpleC2Component(std::make_shared>(name, id, intfImpl)), + mIntf(intfImpl) { +} + +C2SoftRawDec::~C2SoftRawDec() { + onRelease(); +} + +c2_status_t C2SoftRawDec::onInit() { + mSignalledEos = false; + return C2_OK; +} + +c2_status_t C2SoftRawDec::onStop() { + mSignalledEos = false; + return C2_OK; +} + +void C2SoftRawDec::onReset() { + (void)onStop(); +} + +void C2SoftRawDec::onRelease() { +} + +c2_status_t C2SoftRawDec::onFlush_sm() { + return onStop(); +} + +void C2SoftRawDec::process( + const std::unique_ptr &work, + const std::shared_ptr &pool) { + (void)pool; + work->result = C2_OK; + work->workletsProcessed = 1u; + + if (mSignalledEos) { + work->result = C2_BAD_VALUE; + return; + } + + ALOGV("in buffer attr. timestamp %d frameindex %d", + (int)work->input.ordinal.timestamp.peeku(), (int)work->input.ordinal.frameIndex.peeku()); + + work->worklets.front()->output.flags = work->input.flags; + work->worklets.front()->output.buffers.clear(); + work->worklets.front()->output.ordinal = work->input.ordinal; + if (!work->input.buffers.empty()) { + work->worklets.front()->output.buffers.push_back(work->input.buffers[0]); + } + if (work->input.flags & C2FrameData::FLAG_END_OF_STREAM) { + mSignalledEos = true; + ALOGV("signalled EOS"); + } +} + +c2_status_t C2SoftRawDec::drain( + uint32_t drainMode, + const std::shared_ptr &pool) { + (void) pool; + if (drainMode == NO_DRAIN) { + ALOGW("drain with NO_DRAIN: no-op"); + return C2_OK; + } + if (drainMode == DRAIN_CHAIN) { + ALOGW("DRAIN_CHAIN not supported"); + return C2_OMITTED; + } + + return C2_OK; +} + +class C2SoftRawDecFactory : public C2ComponentFactory { +public: + C2SoftRawDecFactory() : mHelper(std::static_pointer_cast( + GetCodec2PlatformComponentStore()->getParamReflector())) { + } + + virtual c2_status_t createComponent( + c2_node_id_t id, + std::shared_ptr* const component, + std::function deleter) override { + *component = std::shared_ptr( + new C2SoftRawDec(COMPONENT_NAME, + id, + std::make_shared(mHelper)), + deleter); + return C2_OK; + } + + virtual c2_status_t createInterface( + c2_node_id_t id, + std::shared_ptr* const interface, + std::function deleter) override { + *interface = std::shared_ptr( + new SimpleInterface( + COMPONENT_NAME, id, std::make_shared(mHelper)), + deleter); + return C2_OK; + } + + virtual ~C2SoftRawDecFactory() override = default; + +private: + std::shared_ptr mHelper; +}; + +} // namespace android + +extern "C" ::C2ComponentFactory* CreateCodec2Factory() { + ALOGV("in %s", __func__); + return new ::android::C2SoftRawDecFactory(); +} + +extern "C" void DestroyCodec2Factory(::C2ComponentFactory* factory) { + ALOGV("in %s", __func__); + delete factory; +} diff --git a/media/codec2/components/raw/C2SoftRawDec.h b/media/codec2/components/raw/C2SoftRawDec.h new file mode 100644 index 0000000000000000000000000000000000000000..7dfdec59d8bae6b48868a4892fbd4bf65c32b6f9 --- /dev/null +++ b/media/codec2/components/raw/C2SoftRawDec.h @@ -0,0 +1,53 @@ +/* + * Copyright (C) 2018 The Android Open Source Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#ifndef ANDROID_C2_SOFT_RAW_DEC_H_ +#define ANDROID_C2_SOFT_RAW_DEC_H_ + +#include + + +namespace android { + +struct C2SoftRawDec : public SimpleC2Component { + class IntfImpl; + + C2SoftRawDec(const char* name, c2_node_id_t id, + const std::shared_ptr& intfImpl); + virtual ~C2SoftRawDec(); + + // From SimpleC2Component + c2_status_t onInit() override; + c2_status_t onStop() override; + void onReset() override; + void onRelease() override; + c2_status_t onFlush_sm() override; + void process( + const std::unique_ptr &work, + const std::shared_ptr &pool) override; + c2_status_t drain( + uint32_t drainMode, + const std::shared_ptr &pool) override; +private: + std::shared_ptr mIntf; + bool mSignalledEos; + + C2_DO_NOT_COPY(C2SoftRawDec); +}; + +} // namespace android + +#endif // ANDROID_C2_SOFT_RAW_DEC_H_ diff --git a/media/codec2/components/raw/MODULE_LICENSE_APACHE2 b/media/codec2/components/raw/MODULE_LICENSE_APACHE2 new file mode 100644 index 0000000000000000000000000000000000000000..e69de29bb2d1d6434b8b29ae775ad8c2e48c5391 diff --git a/media/codec2/components/raw/NOTICE b/media/codec2/components/raw/NOTICE new file mode 100644 index 0000000000000000000000000000000000000000..c5b1efa7aac764ae6d8da63476a2d5cec02a6a5d --- /dev/null +++ b/media/codec2/components/raw/NOTICE @@ -0,0 +1,190 @@ + + Copyright (c) 2005-2008, The Android Open Source Project + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. + + + Apache License + Version 2.0, January 2004 + http://www.apache.org/licenses/ + + TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION + + 1. Definitions. + + "License" shall mean the terms and conditions for use, reproduction, + and distribution as defined by Sections 1 through 9 of this document. + + "Licensor" shall mean the copyright owner or entity authorized by + the copyright owner that is granting the License. + + "Legal Entity" shall mean the union of the acting entity and all + other entities that control, are controlled by, or are under common + control with that entity. For the purposes of this definition, + "control" means (i) the power, direct or indirect, to cause the + direction or management of such entity, whether by contract or + otherwise, or (ii) ownership of fifty percent (50%) or more of the + outstanding shares, or (iii) beneficial ownership of such entity. + + "You" (or "Your") shall mean an individual or Legal Entity + exercising permissions granted by this License. + + "Source" form shall mean the preferred form for making modifications, + including but not limited to software source code, documentation + source, and configuration files. + + "Object" form shall mean any form resulting from mechanical + transformation or translation of a Source form, including but + not limited to compiled object code, generated documentation, + and conversions to other media types. + + "Work" shall mean the work of authorship, whether in Source or + Object form, made available under the License, as indicated by a + copyright notice that is included in or attached to the work + (an example is provided in the Appendix below). + + "Derivative Works" shall mean any work, whether in Source or Object + form, that is based on (or derived from) the Work and for which the + editorial revisions, annotations, elaborations, or other modifications + represent, as a whole, an original work of authorship. For the purposes + of this License, Derivative Works shall not include works that remain + separable from, or merely link (or bind by name) to the interfaces of, + the Work and Derivative Works thereof. + + "Contribution" shall mean any work of authorship, including + the original version of the Work and any modifications or additions + to that Work or Derivative Works thereof, that is intentionally + submitted to Licensor for inclusion in the Work by the copyright owner + or by an individual or Legal Entity authorized to submit on behalf of + the copyright owner. For the purposes of this definition, "submitted" + means any form of electronic, verbal, or written communication sent + to the Licensor or its representatives, including but not limited to + communication on electronic mailing lists, source code control systems, + and issue tracking systems that are managed by, or on behalf of, the + Licensor for the purpose of discussing and improving the Work, but + excluding communication that is conspicuously marked or otherwise + designated in writing by the copyright owner as "Not a Contribution." + + "Contributor" shall mean Licensor and any individual or Legal Entity + on behalf of whom a Contribution has been received by Licensor and + subsequently incorporated within the Work. + + 2. Grant of Copyright License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + copyright license to reproduce, prepare Derivative Works of, + publicly display, publicly perform, sublicense, and distribute the + Work and such Derivative Works in Source or Object form. + + 3. Grant of Patent License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + (except as stated in this section) patent license to make, have made, + use, offer to sell, sell, import, and otherwise transfer the Work, + where such license applies only to those patent claims licensable + by such Contributor that are necessarily infringed by their + Contribution(s) alone or by combination of their Contribution(s) + with the Work to which such Contribution(s) was submitted. If You + institute patent litigation against any entity (including a + cross-claim or counterclaim in a lawsuit) alleging that the Work + or a Contribution incorporated within the Work constitutes direct + or contributory patent infringement, then any patent licenses + granted to You under this License for that Work shall terminate + as of the date such litigation is filed. + + 4. Redistribution. You may reproduce and distribute copies of the + Work or Derivative Works thereof in any medium, with or without + modifications, and in Source or Object form, provided that You + meet the following conditions: + + (a) You must give any other recipients of the Work or + Derivative Works a copy of this License; and + + (b) You must cause any modified files to carry prominent notices + stating that You changed the files; and + + (c) You must retain, in the Source form of any Derivative Works + that You distribute, all copyright, patent, trademark, and + attribution notices from the Source form of the Work, + excluding those notices that do not pertain to any part of + the Derivative Works; and + + (d) If the Work includes a "NOTICE" text file as part of its + distribution, then any Derivative Works that You distribute must + include a readable copy of the attribution notices contained + within such NOTICE file, excluding those notices that do not + pertain to any part of the Derivative Works, in at least one + of the following places: within a NOTICE text file distributed + as part of the Derivative Works; within the Source form or + documentation, if provided along with the Derivative Works; or, + within a display generated by the Derivative Works, if and + wherever such third-party notices normally appear. The contents + of the NOTICE file are for informational purposes only and + do not modify the License. You may add Your own attribution + notices within Derivative Works that You distribute, alongside + or as an addendum to the NOTICE text from the Work, provided + that such additional attribution notices cannot be construed + as modifying the License. + + You may add Your own copyright statement to Your modifications and + may provide additional or different license terms and conditions + for use, reproduction, or distribution of Your modifications, or + for any such Derivative Works as a whole, provided Your use, + reproduction, and distribution of the Work otherwise complies with + the conditions stated in this License. + + 5. Submission of Contributions. Unless You explicitly state otherwise, + any Contribution intentionally submitted for inclusion in the Work + by You to the Licensor shall be under the terms and conditions of + this License, without any additional terms or conditions. + Notwithstanding the above, nothing herein shall supersede or modify + the terms of any separate license agreement you may have executed + with Licensor regarding such Contributions. + + 6. Trademarks. This License does not grant permission to use the trade + names, trademarks, service marks, or product names of the Licensor, + except as required for reasonable and customary use in describing the + origin of the Work and reproducing the content of the NOTICE file. + + 7. Disclaimer of Warranty. Unless required by applicable law or + agreed to in writing, Licensor provides the Work (and each + Contributor provides its Contributions) on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or + implied, including, without limitation, any warranties or conditions + of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A + PARTICULAR PURPOSE. You are solely responsible for determining the + appropriateness of using or redistributing the Work and assume any + risks associated with Your exercise of permissions under this License. + + 8. Limitation of Liability. In no event and under no legal theory, + whether in tort (including negligence), contract, or otherwise, + unless required by applicable law (such as deliberate and grossly + negligent acts) or agreed to in writing, shall any Contributor be + liable to You for damages, including any direct, indirect, special, + incidental, or consequential damages of any character arising as a + result of this License or out of the use or inability to use the + Work (including but not limited to damages for loss of goodwill, + work stoppage, computer failure or malfunction, or any and all + other commercial damages or losses), even if such Contributor + has been advised of the possibility of such damages. + + 9. Accepting Warranty or Additional Liability. While redistributing + the Work or Derivative Works thereof, You may choose to offer, + and charge a fee for, acceptance of support, warranty, indemnity, + or other liability obligations and/or rights consistent with this + License. However, in accepting such obligations, You may act only + on Your own behalf and on Your sole responsibility, not on behalf + of any other Contributor, and only if You agree to indemnify, + defend, and hold each Contributor harmless for any liability + incurred by, or claims asserted against, such Contributor by reason + of your accepting any such warranty or additional liability. + + END OF TERMS AND CONDITIONS + diff --git a/media/codec2/components/vorbis/Android.bp b/media/codec2/components/vorbis/Android.bp new file mode 100644 index 0000000000000000000000000000000000000000..7477da623465eddafbaf6c686828acf078a3ed3e --- /dev/null +++ b/media/codec2/components/vorbis/Android.bp @@ -0,0 +1,11 @@ +cc_library_shared { + name: "libstagefright_soft_c2vorbisdec", + defaults: [ + "libstagefright_soft_c2-defaults", + "libstagefright_soft_c2_sanitize_all-defaults", + ], + + srcs: ["C2SoftVorbisDec.cpp"], + + shared_libs: ["libvorbisidec"], +} diff --git a/media/codec2/components/vorbis/C2SoftVorbisDec.cpp b/media/codec2/components/vorbis/C2SoftVorbisDec.cpp new file mode 100644 index 0000000000000000000000000000000000000000..280ae3616509f45fff86228d2df92a0b5babbcb3 --- /dev/null +++ b/media/codec2/components/vorbis/C2SoftVorbisDec.cpp @@ -0,0 +1,493 @@ +/* + * Copyright (C) 2018 The Android Open Source Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +//#define LOG_NDEBUG 0 +#define LOG_TAG "C2SoftVorbisDec" +#include + +#include + +#include +#include + +#include "C2SoftVorbisDec.h" + +extern "C" { + #include + + int _vorbis_unpack_books(vorbis_info *vi,oggpack_buffer *opb); + int _vorbis_unpack_info(vorbis_info *vi,oggpack_buffer *opb); + int _vorbis_unpack_comment(vorbis_comment *vc,oggpack_buffer *opb); +} + +namespace android { + +constexpr char COMPONENT_NAME[] = "c2.android.vorbis.decoder"; + +class C2SoftVorbisDec::IntfImpl : public C2InterfaceHelper { +public: + explicit IntfImpl(const std::shared_ptr &helper) + : C2InterfaceHelper(helper) { + + setDerivedInstance(this); + + addParameter( + DefineParam(mInputFormat, C2_NAME_INPUT_STREAM_FORMAT_SETTING) + .withConstValue(new C2StreamFormatConfig::input(0u, C2FormatCompressed)) + .build()); + + addParameter( + DefineParam(mOutputFormat, C2_NAME_OUTPUT_STREAM_FORMAT_SETTING) + .withConstValue(new C2StreamFormatConfig::output(0u, C2FormatAudio)) + .build()); + + addParameter( + DefineParam(mInputMediaType, C2_NAME_INPUT_PORT_MIME_SETTING) + .withConstValue(AllocSharedString( + MEDIA_MIMETYPE_AUDIO_VORBIS)) + .build()); + + addParameter( + DefineParam(mOutputMediaType, C2_NAME_OUTPUT_PORT_MIME_SETTING) + .withConstValue(AllocSharedString( + MEDIA_MIMETYPE_AUDIO_RAW)) + .build()); + + addParameter( + DefineParam(mSampleRate, C2_NAME_STREAM_SAMPLE_RATE_SETTING) + .withDefault(new C2StreamSampleRateInfo::output(0u, 48000)) + .withFields({C2F(mSampleRate, value).inRange(8000, 96000)}) + .withSetter((Setter::StrictValueWithNoDeps)) + .build()); + + addParameter( + DefineParam(mChannelCount, C2_NAME_STREAM_CHANNEL_COUNT_SETTING) + .withDefault(new C2StreamChannelCountInfo::output(0u, 1)) + .withFields({C2F(mChannelCount, value).inRange(1, 8)}) + .withSetter(Setter::StrictValueWithNoDeps) + .build()); + + addParameter( + DefineParam(mBitrate, C2_NAME_STREAM_BITRATE_SETTING) + .withDefault(new C2BitrateTuning::input(0u, 64000)) + .withFields({C2F(mBitrate, value).inRange(32000, 500000)}) + .withSetter(Setter::NonStrictValueWithNoDeps) + .build()); + + addParameter( + DefineParam(mInputMaxBufSize, C2_PARAMKEY_INPUT_MAX_BUFFER_SIZE) + .withConstValue(new C2StreamMaxBufferSizeInfo::input(0u, 8192 * 2 * sizeof(int16_t))) + .build()); + } + +private: + std::shared_ptr mInputFormat; + std::shared_ptr mOutputFormat; + std::shared_ptr mInputMediaType; + std::shared_ptr mOutputMediaType; + std::shared_ptr mSampleRate; + std::shared_ptr mChannelCount; + std::shared_ptr mBitrate; + std::shared_ptr mInputMaxBufSize; +}; + +C2SoftVorbisDec::C2SoftVorbisDec( + const char *name, + c2_node_id_t id, + const std::shared_ptr &intfImpl) + : SimpleC2Component(std::make_shared>(name, id, intfImpl)), + mIntf(intfImpl), + mState(nullptr), + mVi(nullptr) { +} + +C2SoftVorbisDec::~C2SoftVorbisDec() { + onRelease(); +} + +c2_status_t C2SoftVorbisDec::onInit() { + status_t err = initDecoder(); + return err == OK ? C2_OK : C2_NO_MEMORY; +} + +c2_status_t C2SoftVorbisDec::onStop() { + if (mState) { + vorbis_dsp_clear(mState); + delete mState; + mState = nullptr; + } + + if (mVi) { + vorbis_info_clear(mVi); + delete mVi; + mVi = nullptr; + } + mNumFramesLeftOnPage = -1; + mSignalledOutputEos = false; + mSignalledError = false; + + return (initDecoder() == OK ? C2_OK : C2_CORRUPTED); +} + +void C2SoftVorbisDec::onReset() { + (void)onStop(); +} + +void C2SoftVorbisDec::onRelease() { + if (mState) { + vorbis_dsp_clear(mState); + delete mState; + mState = nullptr; + } + + if (mVi) { + vorbis_info_clear(mVi); + delete mVi; + mVi = nullptr; + } +} + +status_t C2SoftVorbisDec::initDecoder() { + mVi = new vorbis_info{}; + if (!mVi) return NO_MEMORY; + vorbis_info_clear(mVi); + + mState = new vorbis_dsp_state{}; + if (!mState) return NO_MEMORY; + vorbis_dsp_clear(mState); + + mNumFramesLeftOnPage = -1; + mSignalledError = false; + mSignalledOutputEos = false; + mInfoUnpacked = false; + mBooksUnpacked = false; + return OK; +} + +c2_status_t C2SoftVorbisDec::onFlush_sm() { + mNumFramesLeftOnPage = -1; + mSignalledOutputEos = false; + if (mState) vorbis_dsp_restart(mState); + + return C2_OK; +} + +c2_status_t C2SoftVorbisDec::drain( + uint32_t drainMode, + const std::shared_ptr &pool) { + (void) pool; + if (drainMode == NO_DRAIN) { + ALOGW("drain with NO_DRAIN: no-op"); + return C2_OK; + } + if (drainMode == DRAIN_CHAIN) { + ALOGW("DRAIN_CHAIN not supported"); + return C2_OMITTED; + } + + return C2_OK; +} + +static void fillEmptyWork(const std::unique_ptr &work) { + work->worklets.front()->output.flags = work->input.flags; + work->worklets.front()->output.buffers.clear(); + work->worklets.front()->output.ordinal = work->input.ordinal; + work->workletsProcessed = 1u; +} + +static void makeBitReader( + const void *data, size_t size, + ogg_buffer *buf, ogg_reference *ref, oggpack_buffer *bits) { + buf->data = (uint8_t *)data; + buf->size = size; + buf->refcount = 1; + buf->ptr.owner = nullptr; + + ref->buffer = buf; + ref->begin = 0; + ref->length = size; + ref->next = nullptr; + + oggpack_readinit(bits, ref); +} + +// (CHECK!) multiframe is tricky. decode call doesnt return the number of bytes +// consumed by the component. Also it is unclear why numPageFrames is being +// tagged at the end of input buffers for new pages. Refer lines 297-300 in +// SimpleDecodingSource.cpp +void C2SoftVorbisDec::process( + const std::unique_ptr &work, + const std::shared_ptr &pool) { + // Initialize output work + work->result = C2_OK; + work->workletsProcessed = 1u; + work->worklets.front()->output.configUpdate.clear(); + work->worklets.front()->output.flags = work->input.flags; + + if (mSignalledError || mSignalledOutputEos) { + work->result = C2_BAD_VALUE; + return; + } + + bool eos = ((work->input.flags & C2FrameData::FLAG_END_OF_STREAM) != 0); + size_t inOffset = 0u; + size_t inSize = 0u; + C2ReadView rView = mDummyReadView; + if (!work->input.buffers.empty()) { + rView = work->input.buffers[0]->data().linearBlocks().front().map().get(); + inSize = rView.capacity(); + if (inSize && rView.error()) { + ALOGE("read view map failed %d", rView.error()); + work->result = rView.error(); + return; + } + } + + if (inSize == 0) { + fillEmptyWork(work); + if (eos) { + mSignalledOutputEos = true; + ALOGV("signalled EOS"); + } + return; + } + + ALOGV("in buffer attr. size %zu timestamp %d frameindex %d", inSize, + (int)work->input.ordinal.timestamp.peeku(), (int)work->input.ordinal.frameIndex.peeku()); + const uint8_t *data = rView.data() + inOffset; + int32_t numChannels = mVi->channels; + int32_t samplingRate = mVi->rate; + if (inSize > 7 && !memcmp(&data[1], "vorbis", 6)) { + if ((data[0] != 1) && (data[0] != 5)) { + ALOGE("unexpected type received %d", data[0]); + mSignalledError = true; + work->result = C2_CORRUPTED; + return; + } + + ogg_buffer buf; + ogg_reference ref; + oggpack_buffer bits; + + // skip 7 bytes + makeBitReader((const uint8_t *)data + 7, inSize - 7, &buf, &ref, &bits); + if (data[0] == 1) { + vorbis_info_init(mVi); + if (0 != _vorbis_unpack_info(mVi, &bits)) { + ALOGE("Encountered error while unpacking info"); + mSignalledError = true; + work->result = C2_CORRUPTED; + return; + } + if (mVi->rate != samplingRate || + mVi->channels != numChannels) { + ALOGV("vorbis: rate/channels changed: %ld/%d", mVi->rate, mVi->channels); + samplingRate = mVi->rate; + numChannels = mVi->channels; + + C2StreamSampleRateInfo::output sampleRateInfo(0u, samplingRate); + C2StreamChannelCountInfo::output channelCountInfo(0u, numChannels); + std::vector> failures; + c2_status_t err = mIntf->config( + { &sampleRateInfo, &channelCountInfo }, + C2_MAY_BLOCK, + &failures); + if (err == OK) { + work->worklets.front()->output.configUpdate.push_back(C2Param::Copy(sampleRateInfo)); + work->worklets.front()->output.configUpdate.push_back(C2Param::Copy(channelCountInfo)); + } else { + ALOGE("Config Update failed"); + mSignalledError = true; + work->result = C2_CORRUPTED; + return; + } + } + mInfoUnpacked = true; + } else { + if (!mInfoUnpacked) { + ALOGE("Data with type:5 sent before sending type:1"); + mSignalledError = true; + work->result = C2_CORRUPTED; + return; + } + if (0 != _vorbis_unpack_books(mVi, &bits)) { + ALOGE("Encountered error while unpacking books"); + mSignalledError = true; + work->result = C2_CORRUPTED; + return; + } + if (0 != vorbis_dsp_init(mState, mVi)) { + ALOGE("Encountered error while dsp init"); + mSignalledError = true; + work->result = C2_CORRUPTED; + return; + } + mBooksUnpacked = true; + } + fillEmptyWork(work); + if (eos) { + mSignalledOutputEos = true; + ALOGV("signalled EOS"); + } + return; + } + + if (!mInfoUnpacked || !mBooksUnpacked) { + ALOGE("Missing CODEC_CONFIG data mInfoUnpacked: %d mBooksUnpack %d", mInfoUnpacked, mBooksUnpacked); + mSignalledError = true; + work->result = C2_CORRUPTED; + return; + } + + int32_t numPageFrames = 0; + if (inSize < sizeof(numPageFrames)) { + ALOGE("input header has size %zu, expected %zu", inSize, sizeof(numPageFrames)); + mSignalledError = true; + work->result = C2_CORRUPTED; + return; + } + memcpy(&numPageFrames, data + inSize - sizeof(numPageFrames), sizeof(numPageFrames)); + inSize -= sizeof(numPageFrames); + if (numPageFrames >= 0) { + mNumFramesLeftOnPage = numPageFrames; + } + + ogg_buffer buf; + buf.data = const_cast(data); + buf.size = inSize; + buf.refcount = 1; + buf.ptr.owner = nullptr; + + ogg_reference ref; + ref.buffer = &buf; + ref.begin = 0; + ref.length = buf.size; + ref.next = nullptr; + + ogg_packet pack; + pack.packet = &ref; + pack.bytes = ref.length; + pack.b_o_s = 0; + pack.e_o_s = 0; + pack.granulepos = 0; + pack.packetno = 0; + + size_t maxSamplesInBuffer = kMaxNumSamplesPerChannel * mVi->channels; + size_t outCapacity = maxSamplesInBuffer * sizeof(int16_t); + std::shared_ptr block; + C2MemoryUsage usage = { C2MemoryUsage::CPU_READ, C2MemoryUsage::CPU_WRITE }; + c2_status_t err = pool->fetchLinearBlock(outCapacity, usage, &block); + if (err != C2_OK) { + ALOGE("fetchLinearBlock for Output failed with status %d", err); + work->result = C2_NO_MEMORY; + return; + } + C2WriteView wView = block->map().get(); + if (wView.error()) { + ALOGE("write view map failed %d", wView.error()); + work->result = wView.error(); + return; + } + + int numFrames = 0; + int ret = vorbis_dsp_synthesis(mState, &pack, 1); + if (0 != ret) { + ALOGE("vorbis_dsp_synthesis returned %d", ret); + mSignalledError = true; + work->result = C2_CORRUPTED; + return; + } else { + numFrames = vorbis_dsp_pcmout( + mState, reinterpret_cast (wView.data()), + kMaxNumSamplesPerChannel); + if (numFrames < 0) { + ALOGD("vorbis_dsp_pcmout returned %d", numFrames); + numFrames = 0; + } + } + + if (mNumFramesLeftOnPage >= 0) { + if (numFrames > mNumFramesLeftOnPage) { + ALOGV("discarding %d frames at end of page", numFrames - mNumFramesLeftOnPage); + numFrames = mNumFramesLeftOnPage; + } + mNumFramesLeftOnPage -= numFrames; + } + + if (numFrames) { + int outSize = numFrames * sizeof(int16_t) * mVi->channels; + + work->worklets.front()->output.flags = work->input.flags; + work->worklets.front()->output.buffers.clear(); + work->worklets.front()->output.buffers.push_back(createLinearBuffer(block, 0, outSize)); + work->worklets.front()->output.ordinal = work->input.ordinal; + work->workletsProcessed = 1u; + } else { + fillEmptyWork(work); + block.reset(); + } + if (eos) { + mSignalledOutputEos = true; + ALOGV("signalled EOS"); + } +} + +class C2SoftVorbisDecFactory : public C2ComponentFactory { +public: + C2SoftVorbisDecFactory() : mHelper(std::static_pointer_cast( + GetCodec2PlatformComponentStore()->getParamReflector())) { + } + + virtual c2_status_t createComponent( + c2_node_id_t id, + std::shared_ptr* const component, + std::function deleter) override { + *component = std::shared_ptr( + new C2SoftVorbisDec(COMPONENT_NAME, + id, + std::make_shared(mHelper)), + deleter); + return C2_OK; + } + + virtual c2_status_t createInterface( + c2_node_id_t id, + std::shared_ptr* const interface, + std::function deleter) override { + *interface = std::shared_ptr( + new SimpleInterface( + COMPONENT_NAME, id, std::make_shared(mHelper)), + deleter); + return C2_OK; + } + + virtual ~C2SoftVorbisDecFactory() override = default; + +private: + std::shared_ptr mHelper; +}; + +} // namespace android + +extern "C" ::C2ComponentFactory* CreateCodec2Factory() { + ALOGV("in %s", __func__); + return new ::android::C2SoftVorbisDecFactory(); +} + +extern "C" void DestroyCodec2Factory(::C2ComponentFactory* factory) { + ALOGV("in %s", __func__); + delete factory; +} diff --git a/media/codec2/components/vorbis/C2SoftVorbisDec.h b/media/codec2/components/vorbis/C2SoftVorbisDec.h new file mode 100644 index 0000000000000000000000000000000000000000..3bf7326a7c71ef95eacdb4ebe6c44e10c79dcb55 --- /dev/null +++ b/media/codec2/components/vorbis/C2SoftVorbisDec.h @@ -0,0 +1,70 @@ +/* + * Copyright (C) 2018 The Android Open Source Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#ifndef ANDROID_C2_SOFT_VORBIS_DEC_H_ +#define ANDROID_C2_SOFT_VORBIS_DEC_H_ + +#include + + +struct vorbis_dsp_state; +struct vorbis_info; + +namespace android { + +struct C2SoftVorbisDec : public SimpleC2Component { + class IntfImpl; + + C2SoftVorbisDec(const char *name, c2_node_id_t id, + const std::shared_ptr &intfImpl); + virtual ~C2SoftVorbisDec(); + + // From SimpleC2Component + c2_status_t onInit() override; + c2_status_t onStop() override; + void onReset() override; + void onRelease() override; + c2_status_t onFlush_sm() override; + void process( + const std::unique_ptr &work, + const std::shared_ptr &pool) override; + c2_status_t drain( + uint32_t drainMode, + const std::shared_ptr &pool) override; + + private: + enum { + kMaxNumSamplesPerChannel = 8192, + }; + + std::shared_ptr mIntf; + vorbis_dsp_state *mState; + vorbis_info *mVi; + + int32_t mNumFramesLeftOnPage; + bool mSignalledError; + bool mSignalledOutputEos; + bool mInfoUnpacked; + bool mBooksUnpacked; + status_t initDecoder(); + + C2_DO_NOT_COPY(C2SoftVorbisDec); +}; + +} // namespace android + +#endif // ANDROID_C2_SOFT_VORBIS_DEC_H_ + diff --git a/media/codec2/components/vorbis/MODULE_LICENSE_APACHE2 b/media/codec2/components/vorbis/MODULE_LICENSE_APACHE2 new file mode 100644 index 0000000000000000000000000000000000000000..e69de29bb2d1d6434b8b29ae775ad8c2e48c5391 diff --git a/media/codec2/components/vorbis/NOTICE b/media/codec2/components/vorbis/NOTICE new file mode 100644 index 0000000000000000000000000000000000000000..c5b1efa7aac764ae6d8da63476a2d5cec02a6a5d --- /dev/null +++ b/media/codec2/components/vorbis/NOTICE @@ -0,0 +1,190 @@ + + Copyright (c) 2005-2008, The Android Open Source Project + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. + + + Apache License + Version 2.0, January 2004 + http://www.apache.org/licenses/ + + TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION + + 1. Definitions. + + "License" shall mean the terms and conditions for use, reproduction, + and distribution as defined by Sections 1 through 9 of this document. + + "Licensor" shall mean the copyright owner or entity authorized by + the copyright owner that is granting the License. + + "Legal Entity" shall mean the union of the acting entity and all + other entities that control, are controlled by, or are under common + control with that entity. For the purposes of this definition, + "control" means (i) the power, direct or indirect, to cause the + direction or management of such entity, whether by contract or + otherwise, or (ii) ownership of fifty percent (50%) or more of the + outstanding shares, or (iii) beneficial ownership of such entity. + + "You" (or "Your") shall mean an individual or Legal Entity + exercising permissions granted by this License. + + "Source" form shall mean the preferred form for making modifications, + including but not limited to software source code, documentation + source, and configuration files. + + "Object" form shall mean any form resulting from mechanical + transformation or translation of a Source form, including but + not limited to compiled object code, generated documentation, + and conversions to other media types. + + "Work" shall mean the work of authorship, whether in Source or + Object form, made available under the License, as indicated by a + copyright notice that is included in or attached to the work + (an example is provided in the Appendix below). + + "Derivative Works" shall mean any work, whether in Source or Object + form, that is based on (or derived from) the Work and for which the + editorial revisions, annotations, elaborations, or other modifications + represent, as a whole, an original work of authorship. For the purposes + of this License, Derivative Works shall not include works that remain + separable from, or merely link (or bind by name) to the interfaces of, + the Work and Derivative Works thereof. + + "Contribution" shall mean any work of authorship, including + the original version of the Work and any modifications or additions + to that Work or Derivative Works thereof, that is intentionally + submitted to Licensor for inclusion in the Work by the copyright owner + or by an individual or Legal Entity authorized to submit on behalf of + the copyright owner. For the purposes of this definition, "submitted" + means any form of electronic, verbal, or written communication sent + to the Licensor or its representatives, including but not limited to + communication on electronic mailing lists, source code control systems, + and issue tracking systems that are managed by, or on behalf of, the + Licensor for the purpose of discussing and improving the Work, but + excluding communication that is conspicuously marked or otherwise + designated in writing by the copyright owner as "Not a Contribution." + + "Contributor" shall mean Licensor and any individual or Legal Entity + on behalf of whom a Contribution has been received by Licensor and + subsequently incorporated within the Work. + + 2. Grant of Copyright License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + copyright license to reproduce, prepare Derivative Works of, + publicly display, publicly perform, sublicense, and distribute the + Work and such Derivative Works in Source or Object form. + + 3. Grant of Patent License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + (except as stated in this section) patent license to make, have made, + use, offer to sell, sell, import, and otherwise transfer the Work, + where such license applies only to those patent claims licensable + by such Contributor that are necessarily infringed by their + Contribution(s) alone or by combination of their Contribution(s) + with the Work to which such Contribution(s) was submitted. If You + institute patent litigation against any entity (including a + cross-claim or counterclaim in a lawsuit) alleging that the Work + or a Contribution incorporated within the Work constitutes direct + or contributory patent infringement, then any patent licenses + granted to You under this License for that Work shall terminate + as of the date such litigation is filed. + + 4. Redistribution. You may reproduce and distribute copies of the + Work or Derivative Works thereof in any medium, with or without + modifications, and in Source or Object form, provided that You + meet the following conditions: + + (a) You must give any other recipients of the Work or + Derivative Works a copy of this License; and + + (b) You must cause any modified files to carry prominent notices + stating that You changed the files; and + + (c) You must retain, in the Source form of any Derivative Works + that You distribute, all copyright, patent, trademark, and + attribution notices from the Source form of the Work, + excluding those notices that do not pertain to any part of + the Derivative Works; and + + (d) If the Work includes a "NOTICE" text file as part of its + distribution, then any Derivative Works that You distribute must + include a readable copy of the attribution notices contained + within such NOTICE file, excluding those notices that do not + pertain to any part of the Derivative Works, in at least one + of the following places: within a NOTICE text file distributed + as part of the Derivative Works; within the Source form or + documentation, if provided along with the Derivative Works; or, + within a display generated by the Derivative Works, if and + wherever such third-party notices normally appear. The contents + of the NOTICE file are for informational purposes only and + do not modify the License. You may add Your own attribution + notices within Derivative Works that You distribute, alongside + or as an addendum to the NOTICE text from the Work, provided + that such additional attribution notices cannot be construed + as modifying the License. + + You may add Your own copyright statement to Your modifications and + may provide additional or different license terms and conditions + for use, reproduction, or distribution of Your modifications, or + for any such Derivative Works as a whole, provided Your use, + reproduction, and distribution of the Work otherwise complies with + the conditions stated in this License. + + 5. Submission of Contributions. Unless You explicitly state otherwise, + any Contribution intentionally submitted for inclusion in the Work + by You to the Licensor shall be under the terms and conditions of + this License, without any additional terms or conditions. + Notwithstanding the above, nothing herein shall supersede or modify + the terms of any separate license agreement you may have executed + with Licensor regarding such Contributions. + + 6. Trademarks. This License does not grant permission to use the trade + names, trademarks, service marks, or product names of the Licensor, + except as required for reasonable and customary use in describing the + origin of the Work and reproducing the content of the NOTICE file. + + 7. Disclaimer of Warranty. Unless required by applicable law or + agreed to in writing, Licensor provides the Work (and each + Contributor provides its Contributions) on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or + implied, including, without limitation, any warranties or conditions + of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A + PARTICULAR PURPOSE. You are solely responsible for determining the + appropriateness of using or redistributing the Work and assume any + risks associated with Your exercise of permissions under this License. + + 8. Limitation of Liability. In no event and under no legal theory, + whether in tort (including negligence), contract, or otherwise, + unless required by applicable law (such as deliberate and grossly + negligent acts) or agreed to in writing, shall any Contributor be + liable to You for damages, including any direct, indirect, special, + incidental, or consequential damages of any character arising as a + result of this License or out of the use or inability to use the + Work (including but not limited to damages for loss of goodwill, + work stoppage, computer failure or malfunction, or any and all + other commercial damages or losses), even if such Contributor + has been advised of the possibility of such damages. + + 9. Accepting Warranty or Additional Liability. While redistributing + the Work or Derivative Works thereof, You may choose to offer, + and charge a fee for, acceptance of support, warranty, indemnity, + or other liability obligations and/or rights consistent with this + License. However, in accepting such obligations, You may act only + on Your own behalf and on Your sole responsibility, not on behalf + of any other Contributor, and only if You agree to indemnify, + defend, and hold each Contributor harmless for any liability + incurred by, or claims asserted against, such Contributor by reason + of your accepting any such warranty or additional liability. + + END OF TERMS AND CONDITIONS + diff --git a/media/codec2/components/vpx/Android.bp b/media/codec2/components/vpx/Android.bp new file mode 100644 index 0000000000000000000000000000000000000000..c09f36581e5ff40886fddb67f1b7be89b3cf1c47 --- /dev/null +++ b/media/codec2/components/vpx/Android.bp @@ -0,0 +1,60 @@ +cc_library_shared { + name: "libstagefright_soft_c2vp9dec", + defaults: [ + "libstagefright_soft_c2-defaults", + "libstagefright_soft_c2_sanitize_all-defaults", + ], + + srcs: ["C2SoftVpxDec.cpp"], + + static_libs: ["libvpx"], + + cflags: [ + "-DVP9", + ], +} + +cc_library_shared { + name: "libstagefright_soft_c2vp8dec", + defaults: [ + "libstagefright_soft_c2-defaults", + "libstagefright_soft_c2_sanitize_all-defaults", + ], + + srcs: ["C2SoftVpxDec.cpp"], + + static_libs: ["libvpx"], +} + +cc_library_shared { + name: "libstagefright_soft_c2vp9enc", + defaults: [ + "libstagefright_soft_c2-defaults", + "libstagefright_soft_c2_sanitize_all-defaults", + ], + + srcs: [ + "C2SoftVp9Enc.cpp", + "C2SoftVpxEnc.cpp", + ], + + static_libs: ["libvpx"], + + cflags: ["-DVP9"], +} + +cc_library_shared { + name: "libstagefright_soft_c2vp8enc", + defaults: [ + "libstagefright_soft_c2-defaults", + "libstagefright_soft_c2_sanitize_all-defaults", + ], + + srcs: [ + "C2SoftVp8Enc.cpp", + "C2SoftVpxEnc.cpp", + ], + + static_libs: ["libvpx"], +} + diff --git a/media/codec2/components/vpx/C2SoftVp8Enc.cpp b/media/codec2/components/vpx/C2SoftVp8Enc.cpp new file mode 100644 index 0000000000000000000000000000000000000000..0ae717a5d3b3ee7e3495eb45458172e6825ce97c --- /dev/null +++ b/media/codec2/components/vpx/C2SoftVp8Enc.cpp @@ -0,0 +1,114 @@ +/* + * Copyright 2018 The Android Open Source Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +//#define LOG_NDEBUG 0 +#define LOG_TAG "C2SoftVp8Enc" +#include +#include + +#include "C2SoftVp8Enc.h" + +namespace android { + +constexpr char COMPONENT_NAME[] = "c2.android.vp8.encoder"; + +C2SoftVp8Enc::C2SoftVp8Enc(const char* name, c2_node_id_t id, + const std::shared_ptr& intfImpl) + : C2SoftVpxEnc(name, id, intfImpl), mDCTPartitions(0), mProfile(1) {} + +void C2SoftVp8Enc::setCodecSpecificInterface() { + mCodecInterface = vpx_codec_vp8_cx(); +} + +void C2SoftVp8Enc::setCodecSpecificConfiguration() { + switch (mProfile) { + case 1: + mCodecConfiguration->g_profile = 0; + break; + + case 2: + mCodecConfiguration->g_profile = 1; + break; + + case 4: + mCodecConfiguration->g_profile = 2; + break; + + case 8: + mCodecConfiguration->g_profile = 3; + break; + + default: + mCodecConfiguration->g_profile = 0; + } +} + +vpx_codec_err_t C2SoftVp8Enc::setCodecSpecificControls() { + vpx_codec_err_t codec_return = vpx_codec_control(mCodecContext, + VP8E_SET_TOKEN_PARTITIONS, + mDCTPartitions); + if (codec_return != VPX_CODEC_OK) { + ALOGE("Error setting dct partitions for vpx encoder."); + } + return codec_return; +} + +class C2SoftVp8EncFactory : public C2ComponentFactory { +public: + C2SoftVp8EncFactory() + : mHelper(std::static_pointer_cast( + GetCodec2PlatformComponentStore()->getParamReflector())) {} + + virtual c2_status_t createComponent( + c2_node_id_t id, + std::shared_ptr* const component, + std::function deleter) override { + *component = std::shared_ptr( + new C2SoftVp8Enc(COMPONENT_NAME, id, + std::make_shared(mHelper)), + deleter); + return C2_OK; + } + + virtual c2_status_t createInterface( + c2_node_id_t id, + std::shared_ptr* const interface, + std::function deleter) override { + *interface = std::shared_ptr( + new SimpleInterface( + COMPONENT_NAME, id, + std::make_shared(mHelper)), + deleter); + return C2_OK; + } + + virtual ~C2SoftVp8EncFactory() override = default; + +private: + std::shared_ptr mHelper; +}; + +} // namespace android + +extern "C" ::C2ComponentFactory* CreateCodec2Factory() { + ALOGV("in %s", __func__); + return new ::android::C2SoftVp8EncFactory(); +} + +extern "C" void DestroyCodec2Factory(::C2ComponentFactory* factory) { + ALOGV("in %s", __func__); + delete factory; +} diff --git a/media/codec2/components/vpx/C2SoftVp8Enc.h b/media/codec2/components/vpx/C2SoftVp8Enc.h new file mode 100644 index 0000000000000000000000000000000000000000..ed6f356e232c71fc359840eaa58445f3b80d0df6 --- /dev/null +++ b/media/codec2/components/vpx/C2SoftVp8Enc.h @@ -0,0 +1,60 @@ +/* + * Copyright 2018 The Android Open Source Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#ifndef ANDROID_C2_SOFT_VP8_ENC_H__ +#define ANDROID_C2_SOFT_VP8_ENC_H__ + +#include "C2SoftVpxEnc.h" + +namespace android { + +// Exposes vp8 encoder as a c2 Component +// +// In addition to the base class settings, Only following encoder settings are +// available: +// - token partitioning +struct C2SoftVp8Enc : public C2SoftVpxEnc { + C2SoftVp8Enc(const char* name, c2_node_id_t id, + const std::shared_ptr& intfImpl); + + protected: + // Populates |mCodecInterface| with codec specific settings. + virtual void setCodecSpecificInterface(); + + // Sets codec specific configuration. + virtual void setCodecSpecificConfiguration(); + + // Initializes codec specific encoder settings. + virtual vpx_codec_err_t setCodecSpecificControls(); + + private: + // Max value supported for DCT partitions + static const uint32_t kMaxDCTPartitions = 3; + + // vp8 specific configuration parameter + // that enables token partitioning of + // the stream into substreams + int32_t mDCTPartitions; + + // C2 Profile parameter + int32_t mProfile; + + C2_DO_NOT_COPY(C2SoftVp8Enc); +}; + +} // namespace android + +#endif // ANDROID_C2_SOFT_VP8_ENC_H__ diff --git a/media/codec2/components/vpx/C2SoftVp9Enc.cpp b/media/codec2/components/vpx/C2SoftVp9Enc.cpp new file mode 100644 index 0000000000000000000000000000000000000000..b26170f901ddd631aeb1e5068c3b0e45afe5ab10 --- /dev/null +++ b/media/codec2/components/vpx/C2SoftVp9Enc.cpp @@ -0,0 +1,144 @@ +/* + * Copyright 2018 The Android Open Source Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +//#define LOG_NDEBUG 0 +#define LOG_TAG "C2SoftVp9Enc" +#include +#include + +#include "C2SoftVp9Enc.h" + +namespace android { + +constexpr char COMPONENT_NAME[] = "c2.android.vp9.encoder"; + +C2SoftVp9Enc::C2SoftVp9Enc(const char* name, c2_node_id_t id, + const std::shared_ptr& intfImpl) + : C2SoftVpxEnc(name, id, intfImpl), + mProfile(1), + mLevel(0), + mTileColumns(0), + mFrameParallelDecoding(false) { +} + +void C2SoftVp9Enc::setCodecSpecificInterface() { + mCodecInterface = vpx_codec_vp9_cx(); +} + +void C2SoftVp9Enc::setCodecSpecificConfiguration() { + switch (mProfile) { + case 1: + mCodecConfiguration->g_profile = 0; + break; + + case 2: + mCodecConfiguration->g_profile = 1; + break; + + case 4: + mCodecConfiguration->g_profile = 2; + break; + + case 8: + mCodecConfiguration->g_profile = 3; + break; + + default: + mCodecConfiguration->g_profile = 0; + } +} + +vpx_codec_err_t C2SoftVp9Enc::setCodecSpecificControls() { + vpx_codec_err_t codecReturn = vpx_codec_control( + mCodecContext, VP9E_SET_TILE_COLUMNS, mTileColumns); + if (codecReturn != VPX_CODEC_OK) { + ALOGE("Error setting VP9E_SET_TILE_COLUMNS to %d. vpx_codec_control() " + "returned %d", mTileColumns, codecReturn); + return codecReturn; + } + codecReturn = vpx_codec_control( + mCodecContext, VP9E_SET_FRAME_PARALLEL_DECODING, + mFrameParallelDecoding); + if (codecReturn != VPX_CODEC_OK) { + ALOGE("Error setting VP9E_SET_FRAME_PARALLEL_DECODING to %d." + "vpx_codec_control() returned %d", mFrameParallelDecoding, + codecReturn); + return codecReturn; + } + codecReturn = vpx_codec_control(mCodecContext, VP9E_SET_ROW_MT, 1); + if (codecReturn != VPX_CODEC_OK) { + ALOGE("Error setting VP9E_SET_ROW_MT to 1. vpx_codec_control() " + "returned %d", codecReturn); + return codecReturn; + } + + // For VP9, we always set CPU_USED to 8 (because the realtime default is 0 + // which is too slow). + codecReturn = vpx_codec_control(mCodecContext, VP8E_SET_CPUUSED, 8); + if (codecReturn != VPX_CODEC_OK) { + ALOGE("Error setting VP8E_SET_CPUUSED to 8. vpx_codec_control() " + "returned %d", codecReturn); + return codecReturn; + } + return codecReturn; +} + +class C2SoftVp9EncFactory : public C2ComponentFactory { +public: + C2SoftVp9EncFactory() + : mHelper(std::static_pointer_cast( + GetCodec2PlatformComponentStore()->getParamReflector())) {} + + virtual c2_status_t createComponent( + c2_node_id_t id, + std::shared_ptr* const component, + std::function deleter) override { + *component = std::shared_ptr( + new C2SoftVp9Enc(COMPONENT_NAME, id, + std::make_shared(mHelper)), + deleter); + return C2_OK; + } + + virtual c2_status_t createInterface( + c2_node_id_t id, + std::shared_ptr* const interface, + std::function deleter) override { + *interface = std::shared_ptr( + new SimpleInterface( + COMPONENT_NAME, id, + std::make_shared(mHelper)), + deleter); + return C2_OK; + } + + virtual ~C2SoftVp9EncFactory() override = default; + +private: + std::shared_ptr mHelper; +}; + +} // namespace android + +extern "C" ::C2ComponentFactory* CreateCodec2Factory() { + ALOGV("in %s", __func__); + return new ::android::C2SoftVp9EncFactory(); +} + +extern "C" void DestroyCodec2Factory(::C2ComponentFactory* factory) { + ALOGV("in %s", __func__); + delete factory; +} diff --git a/media/codec2/components/vpx/C2SoftVp9Enc.h b/media/codec2/components/vpx/C2SoftVp9Enc.h new file mode 100644 index 0000000000000000000000000000000000000000..77ef8fd7732978d12861b5f214f21e6e71d620c9 --- /dev/null +++ b/media/codec2/components/vpx/C2SoftVp9Enc.h @@ -0,0 +1,59 @@ +/* + * Copyright 2018 The Android Open Source Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#ifndef ANDROID_C2_SOFT_VP9_ENC_H__ +#define ANDROID_C2_SOFT_VP9_ENC_H__ + +#include "C2SoftVpxEnc.h" + +namespace android { + +// Exposes vp9 encoder as a c2 Component +// +// In addition to the base class settings, Only following encoder settings are +// available: +// - tile rows +// - tile columns +// - frame parallel mode +struct C2SoftVp9Enc : public C2SoftVpxEnc { + C2SoftVp9Enc(const char* name, c2_node_id_t id, + const std::shared_ptr& intfImpl); + + protected: + // Populates |mCodecInterface| with codec specific settings. + virtual void setCodecSpecificInterface(); + + // Sets codec specific configuration. + virtual void setCodecSpecificConfiguration(); + + // Initializes codec specific encoder settings. + virtual vpx_codec_err_t setCodecSpecificControls(); + + private: + // C2 Profile & Level parameter + int32_t mProfile; + int32_t mLevel __unused; + + int32_t mTileColumns; + + bool mFrameParallelDecoding; + + C2_DO_NOT_COPY(C2SoftVp9Enc); +}; + +} // namespace android + +#endif // ANDROID_C2_SOFT_VP9_ENC_H__ diff --git a/media/codec2/components/vpx/C2SoftVpxDec.cpp b/media/codec2/components/vpx/C2SoftVpxDec.cpp new file mode 100644 index 0000000000000000000000000000000000000000..01de68134909865e2710061e41efd3aec5456ec1 --- /dev/null +++ b/media/codec2/components/vpx/C2SoftVpxDec.cpp @@ -0,0 +1,640 @@ +/* + * Copyright (C) 2018 The Android Open Source Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +//#define LOG_NDEBUG 0 +#define LOG_TAG "C2SoftVpxDec" +#include + +#include +#include + +#include +#include +#include + +#include "C2SoftVpxDec.h" + +namespace android { + +#ifdef VP9 +constexpr char COMPONENT_NAME[] = "c2.android.vp9.decoder"; +#else +constexpr char COMPONENT_NAME[] = "c2.android.vp8.decoder"; +#endif + +class C2SoftVpxDec::IntfImpl : public SimpleInterface::BaseParams { +public: + explicit IntfImpl(const std::shared_ptr &helper) + : SimpleInterface::BaseParams( + helper, + COMPONENT_NAME, + C2Component::KIND_DECODER, + C2Component::DOMAIN_VIDEO, +#ifdef VP9 + MEDIA_MIMETYPE_VIDEO_VP9 +#else + MEDIA_MIMETYPE_VIDEO_VP8 +#endif + ) { + noPrivateBuffers(); // TODO: account for our buffers here + noInputReferences(); + noOutputReferences(); + noInputLatency(); + noTimeStretch(); + + // TODO: output latency and reordering + + addParameter( + DefineParam(mAttrib, C2_PARAMKEY_COMPONENT_ATTRIBUTES) + .withConstValue(new C2ComponentAttributesSetting(C2Component::ATTRIB_IS_TEMPORAL)) + .build()); + + addParameter( + DefineParam(mSize, C2_PARAMKEY_PICTURE_SIZE) + .withDefault(new C2StreamPictureSizeInfo::output(0u, 320, 240)) + .withFields({ + C2F(mSize, width).inRange(2, 2048, 2), + C2F(mSize, height).inRange(2, 2048, 2), + }) + .withSetter(SizeSetter) + .build()); + +#ifdef VP9 + // TODO: Add C2Config::PROFILE_VP9_2HDR ?? + addParameter( + DefineParam(mProfileLevel, C2_PARAMKEY_PROFILE_LEVEL) + .withDefault(new C2StreamProfileLevelInfo::input(0u, + C2Config::PROFILE_VP9_0, C2Config::LEVEL_VP9_5)) + .withFields({ + C2F(mProfileLevel, profile).oneOf({ + C2Config::PROFILE_VP9_0, + C2Config::PROFILE_VP9_2}), + C2F(mProfileLevel, level).oneOf({ + C2Config::LEVEL_VP9_1, + C2Config::LEVEL_VP9_1_1, + C2Config::LEVEL_VP9_2, + C2Config::LEVEL_VP9_2_1, + C2Config::LEVEL_VP9_3, + C2Config::LEVEL_VP9_3_1, + C2Config::LEVEL_VP9_4, + C2Config::LEVEL_VP9_4_1, + C2Config::LEVEL_VP9_5, + }) + }) + .withSetter(ProfileLevelSetter, mSize) + .build()); + +#if 0 + // sample BT.2020 static info + mHdrStaticInfo = std::make_shared(); + mHdrStaticInfo->mastering = { + .red = { .x = 0.708, .y = 0.292 }, + .green = { .x = 0.170, .y = 0.797 }, + .blue = { .x = 0.131, .y = 0.046 }, + .white = { .x = 0.3127, .y = 0.3290 }, + .maxLuminance = 1000, + .minLuminance = 0.1, + }; + mHdrStaticInfo->maxCll = 1000; + mHdrStaticInfo->maxFall = 120; + + mHdrStaticInfo->maxLuminance = 0; // disable static info + + helper->addStructDescriptors(); + addParameter( + DefineParam(mHdrStaticInfo, C2_PARAMKEY_HDR_STATIC_INFO) + .withDefault(mHdrStaticInfo) + .withFields({ + C2F(mHdrStaticInfo, mastering.red.x).inRange(0, 1), + // TODO + }) + .withSetter(HdrStaticInfoSetter) + .build()); +#endif +#else + addParameter( + DefineParam(mProfileLevel, C2_PARAMKEY_PROFILE_LEVEL) + .withConstValue(new C2StreamProfileLevelInfo::input(0u, + C2Config::PROFILE_UNUSED, C2Config::LEVEL_UNUSED)) + .build()); +#endif + + addParameter( + DefineParam(mMaxSize, C2_PARAMKEY_MAX_PICTURE_SIZE) + .withDefault(new C2StreamMaxPictureSizeTuning::output(0u, 320, 240)) + .withFields({ + C2F(mSize, width).inRange(2, 2048, 2), + C2F(mSize, height).inRange(2, 2048, 2), + }) + .withSetter(MaxPictureSizeSetter, mSize) + .build()); + + addParameter( + DefineParam(mMaxInputSize, C2_PARAMKEY_INPUT_MAX_BUFFER_SIZE) + .withDefault(new C2StreamMaxBufferSizeInfo::input(0u, 320 * 240 * 3 / 4)) + .withFields({ + C2F(mMaxInputSize, value).any(), + }) + .calculatedAs(MaxInputSizeSetter, mMaxSize) + .build()); + + C2ChromaOffsetStruct locations[1] = { C2ChromaOffsetStruct::ITU_YUV_420_0() }; + std::shared_ptr defaultColorInfo = + C2StreamColorInfo::output::AllocShared( + 1u, 0u, 8u /* bitDepth */, C2Color::YUV_420); + memcpy(defaultColorInfo->m.locations, locations, sizeof(locations)); + + defaultColorInfo = + C2StreamColorInfo::output::AllocShared( + { C2ChromaOffsetStruct::ITU_YUV_420_0() }, + 0u, 8u /* bitDepth */, C2Color::YUV_420); + helper->addStructDescriptors(); + + addParameter( + DefineParam(mColorInfo, C2_PARAMKEY_CODED_COLOR_INFO) + .withConstValue(defaultColorInfo) + .build()); + + // TODO: support more formats? + addParameter( + DefineParam(mPixelFormat, C2_PARAMKEY_PIXEL_FORMAT) + .withConstValue(new C2StreamPixelFormatInfo::output( + 0u, HAL_PIXEL_FORMAT_YCBCR_420_888)) + .build()); + } + + static C2R SizeSetter(bool mayBlock, const C2P &oldMe, + C2P &me) { + (void)mayBlock; + C2R res = C2R::Ok(); + if (!me.F(me.v.width).supportsAtAll(me.v.width)) { + res = res.plus(C2SettingResultBuilder::BadValue(me.F(me.v.width))); + me.set().width = oldMe.v.width; + } + if (!me.F(me.v.height).supportsAtAll(me.v.height)) { + res = res.plus(C2SettingResultBuilder::BadValue(me.F(me.v.height))); + me.set().height = oldMe.v.height; + } + return res; + } + + static C2R MaxPictureSizeSetter(bool mayBlock, C2P &me, + const C2P &size) { + (void)mayBlock; + // TODO: get max width/height from the size's field helpers vs. hardcoding + me.set().width = c2_min(c2_max(me.v.width, size.v.width), 2048u); + me.set().height = c2_min(c2_max(me.v.height, size.v.height), 2048u); + return C2R::Ok(); + } + + static C2R MaxInputSizeSetter(bool mayBlock, C2P &me, + const C2P &maxSize) { + (void)mayBlock; + // assume compression ratio of 2 + me.set().value = (((maxSize.v.width + 63) / 64) * ((maxSize.v.height + 63) / 64) * 3072); + return C2R::Ok(); + } + + + static C2R ProfileLevelSetter(bool mayBlock, C2P &me, + const C2P &size) { + (void)mayBlock; + (void)size; + (void)me; // TODO: validate + return C2R::Ok(); + } + +private: + std::shared_ptr mProfileLevel; + std::shared_ptr mSize; + std::shared_ptr mMaxSize; + std::shared_ptr mMaxInputSize; + std::shared_ptr mColorInfo; + std::shared_ptr mPixelFormat; +#ifdef VP9 +#if 0 + std::shared_ptr mHdrStaticInfo; +#endif +#endif +}; + +C2SoftVpxDec::C2SoftVpxDec( + const char *name, + c2_node_id_t id, + const std::shared_ptr &intfImpl) + : SimpleC2Component(std::make_shared>(name, id, intfImpl)), + mIntf(intfImpl), + mCodecCtx(nullptr) { +} + +C2SoftVpxDec::~C2SoftVpxDec() { + onRelease(); +} + +c2_status_t C2SoftVpxDec::onInit() { + status_t err = initDecoder(); + return err == OK ? C2_OK : C2_CORRUPTED; +} + +c2_status_t C2SoftVpxDec::onStop() { + mSignalledError = false; + mSignalledOutputEos = false; + + return C2_OK; +} + +void C2SoftVpxDec::onReset() { + (void)onStop(); + c2_status_t err = onFlush_sm(); + if (err != C2_OK) + { + ALOGW("Failed to flush decoder. Try to hard reset decoder"); + destroyDecoder(); + (void)initDecoder(); + } +} + +void C2SoftVpxDec::onRelease() { + destroyDecoder(); +} + +c2_status_t C2SoftVpxDec::onFlush_sm() { + if (mFrameParallelMode) { + // Flush decoder by passing nullptr data ptr and 0 size. + // Ideally, this should never fail. + if (vpx_codec_decode(mCodecCtx, nullptr, 0, nullptr, 0)) { + ALOGE("Failed to flush on2 decoder."); + return C2_CORRUPTED; + } + } + + // Drop all the decoded frames in decoder. + vpx_codec_iter_t iter = nullptr; + while (vpx_codec_get_frame(mCodecCtx, &iter)) { + } + + mSignalledError = false; + mSignalledOutputEos = false; + return C2_OK; +} + +static int GetCPUCoreCount() { + int cpuCoreCount = 1; +#if defined(_SC_NPROCESSORS_ONLN) + cpuCoreCount = sysconf(_SC_NPROCESSORS_ONLN); +#else + // _SC_NPROC_ONLN must be defined... + cpuCoreCount = sysconf(_SC_NPROC_ONLN); +#endif + CHECK(cpuCoreCount >= 1); + ALOGV("Number of CPU cores: %d", cpuCoreCount); + return cpuCoreCount; +} + +status_t C2SoftVpxDec::initDecoder() { +#ifdef VP9 + mMode = MODE_VP9; +#else + mMode = MODE_VP8; +#endif + + mWidth = 320; + mHeight = 240; + mFrameParallelMode = false; + mSignalledOutputEos = false; + mSignalledError = false; + + if (!mCodecCtx) { + mCodecCtx = new vpx_codec_ctx_t; + } + if (!mCodecCtx) { + ALOGE("mCodecCtx is null"); + return NO_MEMORY; + } + + vpx_codec_dec_cfg_t cfg; + memset(&cfg, 0, sizeof(vpx_codec_dec_cfg_t)); + cfg.threads = GetCPUCoreCount(); + + vpx_codec_flags_t flags; + memset(&flags, 0, sizeof(vpx_codec_flags_t)); + if (mFrameParallelMode) flags |= VPX_CODEC_USE_FRAME_THREADING; + + vpx_codec_err_t vpx_err; + if ((vpx_err = vpx_codec_dec_init( + mCodecCtx, mMode == MODE_VP8 ? &vpx_codec_vp8_dx_algo : &vpx_codec_vp9_dx_algo, + &cfg, flags))) { + ALOGE("on2 decoder failed to initialize. (%d)", vpx_err); + return UNKNOWN_ERROR; + } + + return OK; +} + +status_t C2SoftVpxDec::destroyDecoder() { + if (mCodecCtx) { + vpx_codec_destroy(mCodecCtx); + delete mCodecCtx; + mCodecCtx = nullptr; + } + + return OK; +} + +void fillEmptyWork(const std::unique_ptr &work) { + uint32_t flags = 0; + if (work->input.flags & C2FrameData::FLAG_END_OF_STREAM) { + flags |= C2FrameData::FLAG_END_OF_STREAM; + ALOGV("signalling eos"); + } + work->worklets.front()->output.flags = (C2FrameData::flags_t)flags; + work->worklets.front()->output.buffers.clear(); + work->worklets.front()->output.ordinal = work->input.ordinal; + work->workletsProcessed = 1u; +} + +void C2SoftVpxDec::finishWork(uint64_t index, const std::unique_ptr &work, + const std::shared_ptr &block) { + std::shared_ptr buffer = createGraphicBuffer(block, + C2Rect(mWidth, mHeight)); + auto fillWork = [buffer, index](const std::unique_ptr &work) { + uint32_t flags = 0; + if ((work->input.flags & C2FrameData::FLAG_END_OF_STREAM) && + (c2_cntr64_t(index) == work->input.ordinal.frameIndex)) { + flags |= C2FrameData::FLAG_END_OF_STREAM; + ALOGV("signalling eos"); + } + work->worklets.front()->output.flags = (C2FrameData::flags_t)flags; + work->worklets.front()->output.buffers.clear(); + work->worklets.front()->output.buffers.push_back(buffer); + work->worklets.front()->output.ordinal = work->input.ordinal; + work->workletsProcessed = 1u; + }; + if (work && c2_cntr64_t(index) == work->input.ordinal.frameIndex) { + fillWork(work); + } else { + finish(index, fillWork); + } +} + +void C2SoftVpxDec::process( + const std::unique_ptr &work, + const std::shared_ptr &pool) { + // Initialize output work + work->result = C2_OK; + work->workletsProcessed = 0u; + work->worklets.front()->output.configUpdate.clear(); + work->worklets.front()->output.flags = work->input.flags; + + if (mSignalledError || mSignalledOutputEos) { + work->result = C2_BAD_VALUE; + return; + } + + size_t inOffset = 0u; + size_t inSize = 0u; + C2ReadView rView = mDummyReadView; + if (!work->input.buffers.empty()) { + rView = work->input.buffers[0]->data().linearBlocks().front().map().get(); + inSize = rView.capacity(); + if (inSize && rView.error()) { + ALOGE("read view map failed %d", rView.error()); + work->result = C2_CORRUPTED; + return; + } + } + + bool codecConfig = ((work->input.flags & C2FrameData::FLAG_CODEC_CONFIG) !=0); + bool eos = ((work->input.flags & C2FrameData::FLAG_END_OF_STREAM) != 0); + + ALOGV("in buffer attr. size %zu timestamp %d frameindex %d, flags %x", + inSize, (int)work->input.ordinal.timestamp.peeku(), + (int)work->input.ordinal.frameIndex.peeku(), work->input.flags); + + // Software VP9 Decoder does not need the Codec Specific Data (CSD) + // (specified in http://www.webmproject.org/vp9/profiles/). Ignore it if + // it was passed. + if (codecConfig) { + // Ignore CSD buffer for VP9. + if (mMode == MODE_VP9) { + fillEmptyWork(work); + return; + } else { + // Tolerate the CSD buffer for VP8. This is a workaround + // for b/28689536. continue + ALOGW("WARNING: Got CSD buffer for VP8. Continue"); + } + } + + int64_t frameIndex = work->input.ordinal.frameIndex.peekll(); + + if (inSize) { + uint8_t *bitstream = const_cast(rView.data() + inOffset); + vpx_codec_err_t err = vpx_codec_decode( + mCodecCtx, bitstream, inSize, &frameIndex, 0); + if (err != VPX_CODEC_OK) { + ALOGE("on2 decoder failed to decode frame. err: %d", err); + mSignalledError = true; + work->workletsProcessed = 1u; + work->result = C2_CORRUPTED; + return; + } + } + + (void)outputBuffer(pool, work); + + if (eos) { + drainInternal(DRAIN_COMPONENT_WITH_EOS, pool, work); + mSignalledOutputEos = true; + } else if (!inSize) { + fillEmptyWork(work); + } +} + +static void copyOutputBufferToYV12Frame(uint8_t *dst, + const uint8_t *srcY, const uint8_t *srcU, const uint8_t *srcV, + size_t srcYStride, size_t srcUStride, size_t srcVStride, + uint32_t width, uint32_t height, int32_t bpp) { + size_t dstYStride = align(width, 16) * bpp ; + size_t dstUVStride = align(dstYStride / 2, 16); + uint8_t *dstStart = dst; + + for (size_t i = 0; i < height; ++i) { + memcpy(dst, srcY, width * bpp); + srcY += srcYStride; + dst += dstYStride; + } + + dst = dstStart + dstYStride * height; + for (size_t i = 0; i < height / 2; ++i) { + memcpy(dst, srcV, width / 2 * bpp); + srcV += srcVStride; + dst += dstUVStride; + } + + dst = dstStart + (dstYStride * height) + (dstUVStride * height / 2); + for (size_t i = 0; i < height / 2; ++i) { + memcpy(dst, srcU, width / 2 * bpp); + srcU += srcUStride; + dst += dstUVStride; + } +} + +bool C2SoftVpxDec::outputBuffer( + const std::shared_ptr &pool, + const std::unique_ptr &work) +{ + if (!(work && pool)) return false; + + vpx_codec_iter_t iter = nullptr; + vpx_image_t *img = vpx_codec_get_frame(mCodecCtx, &iter); + + if (!img) return false; + + if (img->d_w != mWidth || img->d_h != mHeight) { + mWidth = img->d_w; + mHeight = img->d_h; + + C2VideoSizeStreamInfo::output size(0u, mWidth, mHeight); + std::vector> failures; + c2_status_t err = mIntf->config({&size}, C2_MAY_BLOCK, &failures); + if (err == C2_OK) { + work->worklets.front()->output.configUpdate.push_back( + C2Param::Copy(size)); + } else { + ALOGE("Config update size failed"); + mSignalledError = true; + work->workletsProcessed = 1u; + work->result = C2_CORRUPTED; + return false; + } + + } + CHECK(img->fmt == VPX_IMG_FMT_I420 || img->fmt == VPX_IMG_FMT_I42016); + int32_t bpp = 1; + if (img->fmt == VPX_IMG_FMT_I42016) { + bpp = 2; + } + + std::shared_ptr block; + uint32_t format = HAL_PIXEL_FORMAT_YV12; + C2MemoryUsage usage = { C2MemoryUsage::CPU_READ, C2MemoryUsage::CPU_WRITE }; + c2_status_t err = pool->fetchGraphicBlock(align(mWidth, 16) * bpp, mHeight, format, usage, &block); + if (err != C2_OK) { + ALOGE("fetchGraphicBlock for Output failed with status %d", err); + work->result = err; + return false; + } + + C2GraphicView wView = block->map().get(); + if (wView.error()) { + ALOGE("graphic view map failed %d", wView.error()); + work->result = C2_CORRUPTED; + return false; + } + + ALOGV("provided (%dx%d) required (%dx%d), out frameindex %d", + block->width(), block->height(), mWidth, mHeight, (int)*(int64_t *)img->user_priv); + + uint8_t *dst = const_cast(wView.data()[C2PlanarLayout::PLANE_Y]); + size_t srcYStride = img->stride[VPX_PLANE_Y]; + size_t srcUStride = img->stride[VPX_PLANE_U]; + size_t srcVStride = img->stride[VPX_PLANE_V]; + const uint8_t *srcY = (const uint8_t *)img->planes[VPX_PLANE_Y]; + const uint8_t *srcU = (const uint8_t *)img->planes[VPX_PLANE_U]; + const uint8_t *srcV = (const uint8_t *)img->planes[VPX_PLANE_V]; + copyOutputBufferToYV12Frame(dst, srcY, srcU, srcV, + srcYStride, srcUStride, srcVStride, mWidth, mHeight, bpp); + + finishWork(*(int64_t *)img->user_priv, work, std::move(block)); + return true; +} + +c2_status_t C2SoftVpxDec::drainInternal( + uint32_t drainMode, + const std::shared_ptr &pool, + const std::unique_ptr &work) { + if (drainMode == NO_DRAIN) { + ALOGW("drain with NO_DRAIN: no-op"); + return C2_OK; + } + if (drainMode == DRAIN_CHAIN) { + ALOGW("DRAIN_CHAIN not supported"); + return C2_OMITTED; + } + + while ((outputBuffer(pool, work))) { + } + + if (drainMode == DRAIN_COMPONENT_WITH_EOS && + work && work->workletsProcessed == 0u) { + fillEmptyWork(work); + } + + return C2_OK; +} +c2_status_t C2SoftVpxDec::drain( + uint32_t drainMode, + const std::shared_ptr &pool) { + return drainInternal(drainMode, pool, nullptr); +} + +class C2SoftVpxFactory : public C2ComponentFactory { +public: + C2SoftVpxFactory() : mHelper(std::static_pointer_cast( + GetCodec2PlatformComponentStore()->getParamReflector())) { + } + + virtual c2_status_t createComponent( + c2_node_id_t id, + std::shared_ptr* const component, + std::function deleter) override { + *component = std::shared_ptr( + new C2SoftVpxDec(COMPONENT_NAME, id, + std::make_shared(mHelper)), + deleter); + return C2_OK; + } + + virtual c2_status_t createInterface( + c2_node_id_t id, + std::shared_ptr* const interface, + std::function deleter) override { + *interface = std::shared_ptr( + new SimpleInterface( + COMPONENT_NAME, id, + std::make_shared(mHelper)), + deleter); + return C2_OK; + } + + virtual ~C2SoftVpxFactory() override = default; + +private: + std::shared_ptr mHelper; +}; + +} // namespace android + +extern "C" ::C2ComponentFactory* CreateCodec2Factory() { + ALOGV("in %s", __func__); + return new ::android::C2SoftVpxFactory(); +} + +extern "C" void DestroyCodec2Factory(::C2ComponentFactory* factory) { + ALOGV("in %s", __func__); + delete factory; +} diff --git a/media/codec2/components/vpx/C2SoftVpxDec.h b/media/codec2/components/vpx/C2SoftVpxDec.h new file mode 100644 index 0000000000000000000000000000000000000000..60c84847dbaf51c1d19c91568fbe994dac8c184d --- /dev/null +++ b/media/codec2/components/vpx/C2SoftVpxDec.h @@ -0,0 +1,79 @@ +/* + * Copyright (C) 2018 The Android Open Source Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#ifndef ANDROID_C2_SOFT_VPX_DEC_H_ +#define ANDROID_C2_SOFT_VPX_DEC_H_ + +#include + + +#include "vpx/vpx_decoder.h" +#include "vpx/vp8dx.h" + +namespace android { + +struct C2SoftVpxDec : public SimpleC2Component { + class IntfImpl; + + C2SoftVpxDec(const char* name, c2_node_id_t id, + const std::shared_ptr& intfImpl); + virtual ~C2SoftVpxDec(); + + // From SimpleC2Component + c2_status_t onInit() override; + c2_status_t onStop() override; + void onReset() override; + void onRelease() override; + c2_status_t onFlush_sm() override; + void process( + const std::unique_ptr &work, + const std::shared_ptr &pool) override; + c2_status_t drain( + uint32_t drainMode, + const std::shared_ptr &pool) override; + private: + enum { + MODE_VP8, + MODE_VP9, + } mMode; + + std::shared_ptr mIntf; + vpx_codec_ctx_t *mCodecCtx; + bool mFrameParallelMode; // Frame parallel is only supported by VP9 decoder. + + uint32_t mWidth; + uint32_t mHeight; + bool mSignalledOutputEos; + bool mSignalledError; + + status_t initDecoder(); + status_t destroyDecoder(); + void finishWork(uint64_t index, const std::unique_ptr &work, + const std::shared_ptr &block); + bool outputBuffer( + const std::shared_ptr &pool, + const std::unique_ptr &work); + c2_status_t drainInternal( + uint32_t drainMode, + const std::shared_ptr &pool, + const std::unique_ptr &work); + + C2_DO_NOT_COPY(C2SoftVpxDec); +}; + +} // namespace android + +#endif // ANDROID_C2_SOFT_VPX_DEC_H_ diff --git a/media/codec2/components/vpx/C2SoftVpxEnc.cpp b/media/codec2/components/vpx/C2SoftVpxEnc.cpp new file mode 100644 index 0000000000000000000000000000000000000000..155a84f89844d625214098567809421cb767e653 --- /dev/null +++ b/media/codec2/components/vpx/C2SoftVpxEnc.cpp @@ -0,0 +1,670 @@ +/* + * Copyright 2018 The Android Open Source Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +//#define LOG_NDEBUG 0 +#define LOG_TAG "C2SoftVpxEnc" +#include +#include + +#include + +#include +#include +#include "C2SoftVpxEnc.h" + +#ifndef INT32_MAX +#define INT32_MAX 2147483647 +#endif + +namespace android { + +#if 0 +static size_t getCpuCoreCount() { + long cpuCoreCount = 1; +#if defined(_SC_NPROCESSORS_ONLN) + cpuCoreCount = sysconf(_SC_NPROCESSORS_ONLN); +#else + // _SC_NPROC_ONLN must be defined... + cpuCoreCount = sysconf(_SC_NPROC_ONLN); +#endif + CHECK(cpuCoreCount >= 1); + ALOGV("Number of CPU cores: %ld", cpuCoreCount); + return (size_t)cpuCoreCount; +} +#endif + +C2SoftVpxEnc::C2SoftVpxEnc(const char* name, c2_node_id_t id, + const std::shared_ptr& intfImpl) + : SimpleC2Component( + std::make_shared>(name, id, intfImpl)), + mIntf(intfImpl), + mCodecContext(nullptr), + mCodecConfiguration(nullptr), + mCodecInterface(nullptr), + mStrideAlign(2), + mColorFormat(VPX_IMG_FMT_I420), + mBitrateControlMode(VPX_VBR), + mErrorResilience(false), + mMinQuantizer(0), + mMaxQuantizer(0), + mTemporalLayers(0), + mTemporalPatternType(VPXTemporalLayerPatternNone), + mTemporalPatternLength(0), + mTemporalPatternIdx(0), + mLastTimestamp(0x7FFFFFFFFFFFFFFFull), + mSignalledOutputEos(false), + mSignalledError(false) { + memset(mTemporalLayerBitrateRatio, 0, sizeof(mTemporalLayerBitrateRatio)); + mTemporalLayerBitrateRatio[0] = 100; +} + +C2SoftVpxEnc::~C2SoftVpxEnc() { + onRelease(); +} + +c2_status_t C2SoftVpxEnc::onInit() { + status_t err = initEncoder(); + return err == OK ? C2_OK : C2_CORRUPTED; +} + +void C2SoftVpxEnc::onRelease() { + if (mCodecContext) { + vpx_codec_destroy(mCodecContext); + delete mCodecContext; + mCodecContext = nullptr; + } + + if (mCodecConfiguration) { + delete mCodecConfiguration; + mCodecConfiguration = nullptr; + } + + // this one is not allocated by us + mCodecInterface = nullptr; +} + +c2_status_t C2SoftVpxEnc::onStop() { + onRelease(); + mLastTimestamp = 0x7FFFFFFFFFFFFFFFLL; + mSignalledOutputEos = false; + mSignalledError = false; + return C2_OK; +} + +void C2SoftVpxEnc::onReset() { + (void)onStop(); +} + +c2_status_t C2SoftVpxEnc::onFlush_sm() { + return onStop(); +} + +status_t C2SoftVpxEnc::initEncoder() { + vpx_codec_err_t codec_return; + status_t result = UNKNOWN_ERROR; + { + IntfImpl::Lock lock = mIntf->lock(); + mSize = mIntf->getSize_l(); + mBitrate = mIntf->getBitrate_l(); + mBitrateMode = mIntf->getBitrateMode_l(); + mFrameRate = mIntf->getFrameRate_l(); + mIntraRefresh = mIntf->getIntraRefresh_l(); + mRequestSync = mIntf->getRequestSync_l(); + mTemporalLayers = mIntf->getTemporalLayers_l()->m.layerCount; + } + + switch (mBitrateMode->value) { + case C2Config::BITRATE_VARIABLE: + mBitrateControlMode = VPX_VBR; + break; + case C2Config::BITRATE_CONST: + default: + mBitrateControlMode = VPX_CBR; + break; + break; + } + + setCodecSpecificInterface(); + if (!mCodecInterface) goto CleanUp; + + ALOGD("VPx: initEncoder. BRMode: %u. TSLayers: %zu. KF: %u. QP: %u - %u", + (uint32_t)mBitrateControlMode, mTemporalLayers, mIntf->getSyncFramePeriod(), + mMinQuantizer, mMaxQuantizer); + + mCodecConfiguration = new vpx_codec_enc_cfg_t; + if (!mCodecConfiguration) goto CleanUp; + codec_return = vpx_codec_enc_config_default(mCodecInterface, + mCodecConfiguration, + 0); + if (codec_return != VPX_CODEC_OK) { + ALOGE("Error populating default configuration for vpx encoder."); + goto CleanUp; + } + + mCodecConfiguration->g_w = mSize->width; + mCodecConfiguration->g_h = mSize->height; + //mCodecConfiguration->g_threads = getCpuCoreCount(); + mCodecConfiguration->g_threads = 0; + mCodecConfiguration->g_error_resilient = mErrorResilience; + + // timebase unit is microsecond + // g_timebase is in seconds (i.e. 1/1000000 seconds) + mCodecConfiguration->g_timebase.num = 1; + mCodecConfiguration->g_timebase.den = 1000000; + // rc_target_bitrate is in kbps, mBitrate in bps + mCodecConfiguration->rc_target_bitrate = (mBitrate->value + 500) / 1000; + mCodecConfiguration->rc_end_usage = mBitrateControlMode; + // Disable frame drop - not allowed in MediaCodec now. + mCodecConfiguration->rc_dropframe_thresh = 0; + // Disable lagged encoding. + mCodecConfiguration->g_lag_in_frames = 0; + if (mBitrateControlMode == VPX_CBR) { + // Disable spatial resizing. + mCodecConfiguration->rc_resize_allowed = 0; + // Single-pass mode. + mCodecConfiguration->g_pass = VPX_RC_ONE_PASS; + // Maximum amount of bits that can be subtracted from the target + // bitrate - expressed as percentage of the target bitrate. + mCodecConfiguration->rc_undershoot_pct = 100; + // Maximum amount of bits that can be added to the target + // bitrate - expressed as percentage of the target bitrate. + mCodecConfiguration->rc_overshoot_pct = 15; + // Initial value of the buffer level in ms. + mCodecConfiguration->rc_buf_initial_sz = 500; + // Amount of data that the encoder should try to maintain in ms. + mCodecConfiguration->rc_buf_optimal_sz = 600; + // The amount of data that may be buffered by the decoding + // application in ms. + mCodecConfiguration->rc_buf_sz = 1000; + // Enable error resilience - needed for packet loss. + mCodecConfiguration->g_error_resilient = 1; + // Maximum key frame interval - for CBR boost to 3000 + mCodecConfiguration->kf_max_dist = 3000; + // Encoder determines optimal key frame placement automatically. + mCodecConfiguration->kf_mode = VPX_KF_AUTO; + } + + // Frames temporal pattern - for now WebRTC like pattern is only supported. + switch (mTemporalLayers) { + case 0: + mTemporalPatternLength = 0; + break; + case 1: + mCodecConfiguration->ts_number_layers = 1; + mCodecConfiguration->ts_rate_decimator[0] = 1; + mCodecConfiguration->ts_periodicity = 1; + mCodecConfiguration->ts_layer_id[0] = 0; + mTemporalPattern[0] = kTemporalUpdateLastRefAll; + mTemporalPatternLength = 1; + break; + case 2: + mCodecConfiguration->ts_number_layers = 2; + mCodecConfiguration->ts_rate_decimator[0] = 2; + mCodecConfiguration->ts_rate_decimator[1] = 1; + mCodecConfiguration->ts_periodicity = 2; + mCodecConfiguration->ts_layer_id[0] = 0; + mCodecConfiguration->ts_layer_id[1] = 1; + mTemporalPattern[0] = kTemporalUpdateLastAndGoldenRefAltRef; + mTemporalPattern[1] = kTemporalUpdateGoldenWithoutDependencyRefAltRef; + mTemporalPattern[2] = kTemporalUpdateLastRefAltRef; + mTemporalPattern[3] = kTemporalUpdateGoldenRefAltRef; + mTemporalPattern[4] = kTemporalUpdateLastRefAltRef; + mTemporalPattern[5] = kTemporalUpdateGoldenRefAltRef; + mTemporalPattern[6] = kTemporalUpdateLastRefAltRef; + mTemporalPattern[7] = kTemporalUpdateNone; + mTemporalPatternLength = 8; + break; + case 3: + mCodecConfiguration->ts_number_layers = 3; + mCodecConfiguration->ts_rate_decimator[0] = 4; + mCodecConfiguration->ts_rate_decimator[1] = 2; + mCodecConfiguration->ts_rate_decimator[2] = 1; + mCodecConfiguration->ts_periodicity = 4; + mCodecConfiguration->ts_layer_id[0] = 0; + mCodecConfiguration->ts_layer_id[1] = 2; + mCodecConfiguration->ts_layer_id[2] = 1; + mCodecConfiguration->ts_layer_id[3] = 2; + mTemporalPattern[0] = kTemporalUpdateLastAndGoldenRefAltRef; + mTemporalPattern[1] = kTemporalUpdateNoneNoRefGoldenRefAltRef; + mTemporalPattern[2] = kTemporalUpdateGoldenWithoutDependencyRefAltRef; + mTemporalPattern[3] = kTemporalUpdateNone; + mTemporalPattern[4] = kTemporalUpdateLastRefAltRef; + mTemporalPattern[5] = kTemporalUpdateNone; + mTemporalPattern[6] = kTemporalUpdateGoldenRefAltRef; + mTemporalPattern[7] = kTemporalUpdateNone; + mTemporalPatternLength = 8; + break; + default: + ALOGE("Wrong number of temporal layers %zu", mTemporalLayers); + goto CleanUp; + } + // Set bitrate values for each layer + for (size_t i = 0; i < mCodecConfiguration->ts_number_layers; i++) { + mCodecConfiguration->ts_target_bitrate[i] = + mCodecConfiguration->rc_target_bitrate * + mTemporalLayerBitrateRatio[i] / 100; + } + if (mIntf->getSyncFramePeriod() >= 0) { + mCodecConfiguration->kf_max_dist = mIntf->getSyncFramePeriod(); + mCodecConfiguration->kf_min_dist = mIntf->getSyncFramePeriod(); + mCodecConfiguration->kf_mode = VPX_KF_AUTO; + } + if (mMinQuantizer > 0) { + mCodecConfiguration->rc_min_quantizer = mMinQuantizer; + } + if (mMaxQuantizer > 0) { + mCodecConfiguration->rc_max_quantizer = mMaxQuantizer; + } + setCodecSpecificConfiguration(); + mCodecContext = new vpx_codec_ctx_t; + if (!mCodecContext) goto CleanUp; + codec_return = vpx_codec_enc_init(mCodecContext, + mCodecInterface, + mCodecConfiguration, + 0); // flags + if (codec_return != VPX_CODEC_OK) { + ALOGE("Error initializing vpx encoder"); + goto CleanUp; + } + + // Extra CBR settings + if (mBitrateControlMode == VPX_CBR) { + codec_return = vpx_codec_control(mCodecContext, + VP8E_SET_STATIC_THRESHOLD, + 1); + if (codec_return == VPX_CODEC_OK) { + uint32_t rc_max_intra_target = + (uint32_t)(mCodecConfiguration->rc_buf_optimal_sz * mFrameRate->value / 20 + 0.5); + // Don't go below 3 times per frame bandwidth. + if (rc_max_intra_target < 300) { + rc_max_intra_target = 300; + } + codec_return = vpx_codec_control(mCodecContext, + VP8E_SET_MAX_INTRA_BITRATE_PCT, + rc_max_intra_target); + } + if (codec_return == VPX_CODEC_OK) { + codec_return = vpx_codec_control(mCodecContext, + VP8E_SET_CPUUSED, + -8); + } + if (codec_return != VPX_CODEC_OK) { + ALOGE("Error setting cbr parameters for vpx encoder."); + goto CleanUp; + } + } + + codec_return = setCodecSpecificControls(); + if (codec_return != VPX_CODEC_OK) goto CleanUp; + + { + uint32_t width = mSize->width; + uint32_t height = mSize->height; + if (((uint64_t)width * height) > + ((uint64_t)INT32_MAX / 3)) { + ALOGE("b/25812794, Buffer size is too big, width=%u, height=%u.", width, height); + } else { + uint32_t stride = (width + mStrideAlign - 1) & ~(mStrideAlign - 1); + uint32_t vstride = (height + mStrideAlign - 1) & ~(mStrideAlign - 1); + mConversionBuffer = MemoryBlock::Allocate(stride * vstride * 3 / 2); + if (!mConversionBuffer.size()) { + ALOGE("Allocating conversion buffer failed."); + } else { + mNumInputFrames = -1; + return OK; + } + } + } + +CleanUp: + onRelease(); + return result; +} + +vpx_enc_frame_flags_t C2SoftVpxEnc::getEncodeFlags() { + vpx_enc_frame_flags_t flags = 0; + if (mTemporalPatternLength > 0) { + int patternIdx = mTemporalPatternIdx % mTemporalPatternLength; + mTemporalPatternIdx++; + switch (mTemporalPattern[patternIdx]) { + case kTemporalUpdateLast: + flags |= VP8_EFLAG_NO_UPD_GF; + flags |= VP8_EFLAG_NO_UPD_ARF; + flags |= VP8_EFLAG_NO_REF_GF; + flags |= VP8_EFLAG_NO_REF_ARF; + break; + case kTemporalUpdateGoldenWithoutDependency: + flags |= VP8_EFLAG_NO_REF_GF; + [[fallthrough]]; + case kTemporalUpdateGolden: + flags |= VP8_EFLAG_NO_REF_ARF; + flags |= VP8_EFLAG_NO_UPD_ARF; + flags |= VP8_EFLAG_NO_UPD_LAST; + break; + case kTemporalUpdateAltrefWithoutDependency: + flags |= VP8_EFLAG_NO_REF_ARF; + flags |= VP8_EFLAG_NO_REF_GF; + [[fallthrough]]; + case kTemporalUpdateAltref: + flags |= VP8_EFLAG_NO_UPD_GF; + flags |= VP8_EFLAG_NO_UPD_LAST; + break; + case kTemporalUpdateNoneNoRefAltref: + flags |= VP8_EFLAG_NO_REF_ARF; + [[fallthrough]]; + case kTemporalUpdateNone: + flags |= VP8_EFLAG_NO_UPD_GF; + flags |= VP8_EFLAG_NO_UPD_ARF; + flags |= VP8_EFLAG_NO_UPD_LAST; + flags |= VP8_EFLAG_NO_UPD_ENTROPY; + break; + case kTemporalUpdateNoneNoRefGoldenRefAltRef: + flags |= VP8_EFLAG_NO_REF_GF; + flags |= VP8_EFLAG_NO_UPD_GF; + flags |= VP8_EFLAG_NO_UPD_ARF; + flags |= VP8_EFLAG_NO_UPD_LAST; + flags |= VP8_EFLAG_NO_UPD_ENTROPY; + break; + case kTemporalUpdateGoldenWithoutDependencyRefAltRef: + flags |= VP8_EFLAG_NO_REF_GF; + flags |= VP8_EFLAG_NO_UPD_ARF; + flags |= VP8_EFLAG_NO_UPD_LAST; + break; + case kTemporalUpdateLastRefAltRef: + flags |= VP8_EFLAG_NO_UPD_GF; + flags |= VP8_EFLAG_NO_UPD_ARF; + flags |= VP8_EFLAG_NO_REF_GF; + break; + case kTemporalUpdateGoldenRefAltRef: + flags |= VP8_EFLAG_NO_UPD_ARF; + flags |= VP8_EFLAG_NO_UPD_LAST; + break; + case kTemporalUpdateLastAndGoldenRefAltRef: + flags |= VP8_EFLAG_NO_UPD_ARF; + flags |= VP8_EFLAG_NO_REF_GF; + break; + case kTemporalUpdateLastRefAll: + flags |= VP8_EFLAG_NO_UPD_ARF; + flags |= VP8_EFLAG_NO_UPD_GF; + break; + } + } + return flags; +} + +// TODO: add support for YUV input color formats +// TODO: add support for SVC, ARF. SVC and ARF returns multiple frames +// (hierarchical / noshow) in one call. These frames should be combined in to +// a single buffer and sent back to the client +void C2SoftVpxEnc::process( + const std::unique_ptr &work, + const std::shared_ptr &pool) { + // Initialize output work + work->result = C2_OK; + work->workletsProcessed = 1u; + work->worklets.front()->output.flags = work->input.flags; + + if (mSignalledError || mSignalledOutputEos) { + work->result = C2_BAD_VALUE; + return; + } + // Initialize encoder if not already + if (!mCodecContext && OK != initEncoder()) { + ALOGE("Failed to initialize encoder"); + mSignalledError = true; + work->result = C2_CORRUPTED; + return; + } + + std::shared_ptr rView; + std::shared_ptr inputBuffer; + if (!work->input.buffers.empty()) { + inputBuffer = work->input.buffers[0]; + rView = std::make_shared( + inputBuffer->data().graphicBlocks().front().map().get()); + if (rView->error() != C2_OK) { + ALOGE("graphic view map err = %d", rView->error()); + work->result = C2_CORRUPTED; + return; + } + } else { + ALOGV("Empty input Buffer"); + uint32_t flags = 0; + if (work->input.flags & C2FrameData::FLAG_END_OF_STREAM) { + flags |= C2FrameData::FLAG_END_OF_STREAM; + } + work->worklets.front()->output.flags = (C2FrameData::flags_t)flags; + work->worklets.front()->output.buffers.clear(); + work->worklets.front()->output.ordinal = work->input.ordinal; + work->workletsProcessed = 1u; + return; + } + + const C2ConstGraphicBlock inBuffer = + inputBuffer->data().graphicBlocks().front(); + if (inBuffer.width() != mSize->width || + inBuffer.height() != mSize->height) { + ALOGE("unexpected Input buffer attributes %d(%d) x %d(%d)", + inBuffer.width(), mSize->width, inBuffer.height(), + mSize->height); + mSignalledError = true; + work->result = C2_BAD_VALUE; + return; + } + bool eos = ((work->input.flags & C2FrameData::FLAG_END_OF_STREAM) != 0); + vpx_image_t raw_frame; + const C2PlanarLayout &layout = rView->layout(); + uint32_t width = rView->width(); + uint32_t height = rView->height(); + if (width > 0x8000 || height > 0x8000) { + ALOGE("Image too big: %u x %u", width, height); + work->result = C2_BAD_VALUE; + return; + } + uint32_t stride = (width + mStrideAlign - 1) & ~(mStrideAlign - 1); + uint32_t vstride = (height + mStrideAlign - 1) & ~(mStrideAlign - 1); + switch (layout.type) { + case C2PlanarLayout::TYPE_RGB: + case C2PlanarLayout::TYPE_RGBA: { + ConvertRGBToPlanarYUV(mConversionBuffer.data(), stride, vstride, + mConversionBuffer.size(), *rView.get()); + vpx_img_wrap(&raw_frame, VPX_IMG_FMT_I420, width, height, + mStrideAlign, mConversionBuffer.data()); + break; + } + case C2PlanarLayout::TYPE_YUV: { + if (!IsYUV420(*rView)) { + ALOGE("input is not YUV420"); + work->result = C2_BAD_VALUE; + return; + } + + if (layout.planes[layout.PLANE_Y].colInc == 1 + && layout.planes[layout.PLANE_U].colInc == 1 + && layout.planes[layout.PLANE_V].colInc == 1) { + // I420 compatible - though with custom offset and stride + vpx_img_wrap(&raw_frame, VPX_IMG_FMT_I420, width, height, + mStrideAlign, (uint8_t*)rView->data()[0]); + raw_frame.planes[1] = (uint8_t*)rView->data()[1]; + raw_frame.planes[2] = (uint8_t*)rView->data()[2]; + raw_frame.stride[0] = layout.planes[layout.PLANE_Y].rowInc; + raw_frame.stride[1] = layout.planes[layout.PLANE_U].rowInc; + raw_frame.stride[2] = layout.planes[layout.PLANE_V].rowInc; + } else { + // copy to I420 + MediaImage2 img = CreateYUV420PlanarMediaImage2(width, height, stride, vstride); + if (mConversionBuffer.size() >= stride * vstride * 3 / 2) { + status_t err = ImageCopy(mConversionBuffer.data(), &img, *rView); + if (err != OK) { + ALOGE("Buffer conversion failed: %d", err); + work->result = C2_BAD_VALUE; + return; + } + vpx_img_wrap(&raw_frame, VPX_IMG_FMT_I420, stride, vstride, + mStrideAlign, (uint8_t*)rView->data()[0]); + vpx_img_set_rect(&raw_frame, 0, 0, width, height); + } else { + ALOGE("Conversion buffer is too small: %u x %u for %zu", + stride, vstride, mConversionBuffer.size()); + work->result = C2_BAD_VALUE; + return; + } + } + break; + } + default: + ALOGE("Unrecognized plane type: %d", layout.type); + work->result = C2_BAD_VALUE; + return; + } + + vpx_enc_frame_flags_t flags = getEncodeFlags(); + // handle dynamic config parameters + { + IntfImpl::Lock lock = mIntf->lock(); + std::shared_ptr intraRefresh = mIntf->getIntraRefresh_l(); + std::shared_ptr bitrate = mIntf->getBitrate_l(); + std::shared_ptr requestSync = mIntf->getRequestSync_l(); + lock.unlock(); + + if (intraRefresh != mIntraRefresh) { + mIntraRefresh = intraRefresh; + ALOGV("Got mIntraRefresh request"); + } + + if (requestSync != mRequestSync) { + // we can handle IDR immediately + if (requestSync->value) { + // unset request + C2StreamRequestSyncFrameTuning::output clearSync(0u, C2_FALSE); + std::vector> failures; + mIntf->config({ &clearSync }, C2_MAY_BLOCK, &failures); + ALOGV("Got sync request"); + flags |= VPX_EFLAG_FORCE_KF; + } + mRequestSync = requestSync; + } + + if (bitrate != mBitrate) { + mBitrate = bitrate; + mCodecConfiguration->rc_target_bitrate = + (mBitrate->value + 500) / 1000; + vpx_codec_err_t res = vpx_codec_enc_config_set(mCodecContext, + mCodecConfiguration); + if (res != VPX_CODEC_OK) { + ALOGE("vpx encoder failed to update bitrate: %s", + vpx_codec_err_to_string(res)); + mSignalledError = true; + work->result = C2_CORRUPTED; + return; + } + } + } + + uint64_t inputTimeStamp = work->input.ordinal.timestamp.peekull(); + uint32_t frameDuration; + if (inputTimeStamp > mLastTimestamp) { + frameDuration = (uint32_t)(inputTimeStamp - mLastTimestamp); + } else { + // Use default of 30 fps in case of 0 frame rate. + float frameRate = mFrameRate->value; + if (frameRate < 0.001) { + frameRate = 30; + } + frameDuration = (uint32_t)(1000000 / frameRate + 0.5); + } + mLastTimestamp = inputTimeStamp; + + vpx_codec_err_t codec_return = vpx_codec_encode(mCodecContext, &raw_frame, + inputTimeStamp, + frameDuration, flags, + VPX_DL_REALTIME); + if (codec_return != VPX_CODEC_OK) { + ALOGE("vpx encoder failed to encode frame"); + mSignalledError = true; + work->result = C2_CORRUPTED; + return; + } + + bool populated = false; + vpx_codec_iter_t encoded_packet_iterator = nullptr; + const vpx_codec_cx_pkt_t* encoded_packet; + while ((encoded_packet = vpx_codec_get_cx_data( + mCodecContext, &encoded_packet_iterator))) { + if (encoded_packet->kind == VPX_CODEC_CX_FRAME_PKT) { + std::shared_ptr block; + C2MemoryUsage usage = { C2MemoryUsage::CPU_READ, C2MemoryUsage::CPU_WRITE }; + c2_status_t err = pool->fetchLinearBlock(encoded_packet->data.frame.sz, usage, &block); + if (err != C2_OK) { + ALOGE("fetchLinearBlock for Output failed with status %d", err); + work->result = C2_NO_MEMORY; + return; + } + C2WriteView wView = block->map().get(); + if (wView.error()) { + ALOGE("write view map failed %d", wView.error()); + work->result = C2_CORRUPTED; + return; + } + + memcpy(wView.data(), encoded_packet->data.frame.buf, encoded_packet->data.frame.sz); + ++mNumInputFrames; + + ALOGD("bytes generated %zu", encoded_packet->data.frame.sz); + uint32_t flags = 0; + if (eos) { + flags |= C2FrameData::FLAG_END_OF_STREAM; + } + work->worklets.front()->output.flags = (C2FrameData::flags_t)flags; + work->worklets.front()->output.buffers.clear(); + std::shared_ptr buffer = createLinearBuffer(block); + if (encoded_packet->data.frame.flags & VPX_FRAME_IS_KEY) { + buffer->setInfo(std::make_shared( + 0u /* stream id */, C2PictureTypeKeyFrame)); + } + work->worklets.front()->output.buffers.push_back(buffer); + work->worklets.front()->output.ordinal = work->input.ordinal; + work->worklets.front()->output.ordinal.timestamp = encoded_packet->data.frame.pts; + work->workletsProcessed = 1u; + populated = true; + if (eos) { + mSignalledOutputEos = true; + ALOGV("signalled EOS"); + } + } + } + if (!populated) { + work->workletsProcessed = 0u; + } +} + +c2_status_t C2SoftVpxEnc::drain( + uint32_t drainMode, + const std::shared_ptr &pool) { + (void)pool; + if (drainMode == NO_DRAIN) { + ALOGW("drain with NO_DRAIN: no-op"); + return C2_OK; + } + if (drainMode == DRAIN_CHAIN) { + ALOGW("DRAIN_CHAIN not supported"); + return C2_OMITTED; + } + + return C2_OK; +} + +} // namespace android diff --git a/media/codec2/components/vpx/C2SoftVpxEnc.h b/media/codec2/components/vpx/C2SoftVpxEnc.h new file mode 100644 index 0000000000000000000000000000000000000000..87ed1a9e8b24ac60bfa0508010ecc46eb704113e --- /dev/null +++ b/media/codec2/components/vpx/C2SoftVpxEnc.h @@ -0,0 +1,437 @@ +/* + * Copyright 2018 The Android Open Source Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#ifndef ANDROID_C2_SOFT_VPX_ENC_H__ +#define ANDROID_C2_SOFT_VPX_ENC_H__ + +#include + +#include +#include +#include +#include +#include + +#include "vpx/vpx_encoder.h" +#include "vpx/vpx_codec.h" +#include "vpx/vpx_image.h" +#include "vpx/vp8cx.h" + +namespace android { + +// TODO: These defs taken from deprecated OMX_VideoExt.h. Move these definitions +// to a new header file and include it. + +/** Maximum number of temporal layers */ +#define MAXTEMPORALLAYERS 3 + +/** temporal layer patterns */ +typedef enum TemporalPatternType { + VPXTemporalLayerPatternNone = 0, + VPXTemporalLayerPatternWebRTC = 1, + VPXTemporalLayerPatternMax = 0x7FFFFFFF +} TemporalPatternType; + +// Base class for a VPX Encoder Component +// +// Only following encoder settings are available (codec specific settings might +// be available in the sub-classes): +// - video resolution +// - target bitrate +// - rate control (constant / variable) +// - frame rate +// - error resilience +// - reconstruction & loop filters (g_profile) +// +// Only following color formats are recognized +// - C2PlanarLayout::TYPE_RGB +// - C2PlanarLayout::TYPE_RGBA +// +// Following settings are not configurable by the client +// - encoding deadline is realtime +// - multithreaded encoding utilizes a number of threads equal +// to online cpu's available +// - the algorithm interface for encoder is decided by the sub-class in use +// - fractional bits of frame rate is discarded +// - timestamps are in microseconds, therefore encoder timebase is fixed +// to 1/1000000 + +struct C2SoftVpxEnc : public SimpleC2Component { + class IntfImpl; + + C2SoftVpxEnc(const char* name, c2_node_id_t id, + const std::shared_ptr& intfImpl); + + // From SimpleC2Component + c2_status_t onInit() override final; + c2_status_t onStop() override final; + void onReset() override final; + void onRelease() override final; + c2_status_t onFlush_sm() override final; + + void process( + const std::unique_ptr &work, + const std::shared_ptr &pool) override final; + c2_status_t drain( + uint32_t drainMode, + const std::shared_ptr &pool) override final; + + protected: + std::shared_ptr mIntf; + virtual ~C2SoftVpxEnc(); + + // Initializes vpx encoder with available settings. + status_t initEncoder(); + + // Populates mCodecInterface with codec specific settings. + virtual void setCodecSpecificInterface() = 0; + + // Sets codec specific configuration. + virtual void setCodecSpecificConfiguration() = 0; + + // Sets codec specific encoder controls. + virtual vpx_codec_err_t setCodecSpecificControls() = 0; + + // Get current encode flags. + virtual vpx_enc_frame_flags_t getEncodeFlags(); + + enum TemporalReferences { + // For 1 layer case: reference all (last, golden, and alt ref), but only + // update last. + kTemporalUpdateLastRefAll = 12, + // First base layer frame for 3 temporal layers, which updates last and + // golden with alt ref dependency. + kTemporalUpdateLastAndGoldenRefAltRef = 11, + // First enhancement layer with alt ref dependency. + kTemporalUpdateGoldenRefAltRef = 10, + // First enhancement layer with alt ref dependency. + kTemporalUpdateGoldenWithoutDependencyRefAltRef = 9, + // Base layer with alt ref dependency. + kTemporalUpdateLastRefAltRef = 8, + // Highest enhacement layer without dependency on golden with alt ref + // dependency. + kTemporalUpdateNoneNoRefGoldenRefAltRef = 7, + // Second layer and last frame in cycle, for 2 layers. + kTemporalUpdateNoneNoRefAltref = 6, + // Highest enhancement layer. + kTemporalUpdateNone = 5, + // Second enhancement layer. + kTemporalUpdateAltref = 4, + // Second enhancement layer without dependency on previous frames in + // the second enhancement layer. + kTemporalUpdateAltrefWithoutDependency = 3, + // First enhancement layer. + kTemporalUpdateGolden = 2, + // First enhancement layer without dependency on previous frames in + // the first enhancement layer. + kTemporalUpdateGoldenWithoutDependency = 1, + // Base layer. + kTemporalUpdateLast = 0, + }; + enum { + kMaxTemporalPattern = 8 + }; + + // vpx specific opaque data structure that + // stores encoder state + vpx_codec_ctx_t* mCodecContext; + + // vpx specific data structure that + // stores encoder configuration + vpx_codec_enc_cfg_t* mCodecConfiguration; + + // vpx specific read-only data structure + // that specifies algorithm interface (e.g. vp8) + vpx_codec_iface_t* mCodecInterface; + + // align stride to the power of 2 + int32_t mStrideAlign; + + // Color format for the input port + vpx_img_fmt_t mColorFormat; + + // Bitrate control mode, either constant or variable + vpx_rc_mode mBitrateControlMode; + + // Parameter that denotes whether error resilience + // is enabled in encoder + bool mErrorResilience; + + // Minimum (best quality) quantizer + uint32_t mMinQuantizer; + + // Maximum (worst quality) quantizer + uint32_t mMaxQuantizer; + + // Number of coding temporal layers to be used. + size_t mTemporalLayers; + + // Temporal layer bitrare ratio in percentage + uint32_t mTemporalLayerBitrateRatio[MAXTEMPORALLAYERS]; + + // Temporal pattern type + TemporalPatternType mTemporalPatternType; + + // Temporal pattern length + size_t mTemporalPatternLength; + + // Temporal pattern current index + size_t mTemporalPatternIdx; + + // Frame type temporal pattern + TemporalReferences mTemporalPattern[kMaxTemporalPattern]; + + // Last input buffer timestamp + uint64_t mLastTimestamp; + + // Number of input frames + int64_t mNumInputFrames; + + // Conversion buffer is needed to input to + // yuv420 planar format. + MemoryBlock mConversionBuffer; + + // Signalled EOS + bool mSignalledOutputEos; + + // Signalled Error + bool mSignalledError; + + // configurations used by component in process + // (TODO: keep this in intf but make them internal only) + std::shared_ptr mSize; + std::shared_ptr mIntraRefresh; + std::shared_ptr mFrameRate; + std::shared_ptr mBitrate; + std::shared_ptr mBitrateMode; + std::shared_ptr mRequestSync; + + C2_DO_NOT_COPY(C2SoftVpxEnc); +}; + +class C2SoftVpxEnc::IntfImpl : public C2InterfaceHelper { + public: + explicit IntfImpl(const std::shared_ptr& helper) + : C2InterfaceHelper(helper) { + setDerivedInstance(this); + + addParameter( + DefineParam(mInputFormat, C2_NAME_INPUT_STREAM_FORMAT_SETTING) + .withConstValue( + new C2StreamFormatConfig::input(0u, C2FormatVideo)) + .build()); + + addParameter( + DefineParam(mOutputFormat, C2_NAME_OUTPUT_STREAM_FORMAT_SETTING) + .withConstValue( + new C2StreamFormatConfig::output(0u, C2FormatCompressed)) + .build()); + + addParameter( + DefineParam(mInputMediaType, C2_NAME_INPUT_PORT_MIME_SETTING) + .withConstValue(AllocSharedString( + MEDIA_MIMETYPE_VIDEO_RAW)) + .build()); + + addParameter( + DefineParam(mOutputMediaType, C2_NAME_OUTPUT_PORT_MIME_SETTING) + .withConstValue(AllocSharedString( +#ifdef VP9 + MEDIA_MIMETYPE_VIDEO_VP9 +#else + MEDIA_MIMETYPE_VIDEO_VP8 +#endif + )) + .build()); + + addParameter(DefineParam(mUsage, C2_NAME_INPUT_STREAM_USAGE_SETTING) + .withConstValue(new C2StreamUsageTuning::input( + 0u, (uint64_t)C2MemoryUsage::CPU_READ)) + .build()); + + addParameter( + DefineParam(mSize, C2_NAME_STREAM_VIDEO_SIZE_SETTING) + .withDefault(new C2VideoSizeStreamTuning::input(0u, 320, 240)) + .withFields({ + C2F(mSize, width).inRange(2, 2048, 2), + C2F(mSize, height).inRange(2, 2048, 2), + }) + .withSetter(SizeSetter) + .build()); + + addParameter( + DefineParam(mBitrateMode, C2_PARAMKEY_BITRATE_MODE) + .withDefault(new C2StreamBitrateModeTuning::output( + 0u, C2Config::BITRATE_CONST)) + .withFields({ + C2F(mBitrateMode, value).oneOf({ + C2Config::BITRATE_CONST, C2Config::BITRATE_VARIABLE }) + }) + .withSetter( + Setter::StrictValueWithNoDeps) + .build()); + + addParameter( + DefineParam(mFrameRate, C2_NAME_STREAM_FRAME_RATE_SETTING) + .withDefault(new C2StreamFrameRateInfo::output(0u, 30.)) + // TODO: More restriction? + .withFields({C2F(mFrameRate, value).greaterThan(0.)}) + .withSetter( + Setter::StrictValueWithNoDeps) + .build()); + + addParameter( + DefineParam(mLayering, C2_PARAMKEY_TEMPORAL_LAYERING) + .withDefault(C2StreamTemporalLayeringTuning::output::AllocShared(0u, 0, 0, 0)) + .withFields({ + C2F(mLayering, m.layerCount).inRange(0, 4), + C2F(mLayering, m.bLayerCount).inRange(0, 0), + C2F(mLayering, m.bitrateRatios).inRange(0., 1.) + }) + .withSetter(LayeringSetter) + .build()); + + addParameter( + DefineParam(mSyncFramePeriod, C2_PARAMKEY_SYNC_FRAME_INTERVAL) + .withDefault(new C2StreamSyncFrameIntervalTuning::output(0u, 1000000)) + .withFields({C2F(mSyncFramePeriod, value).any()}) + .withSetter(Setter::StrictValueWithNoDeps) + .build()); + + addParameter( + DefineParam(mBitrate, C2_NAME_STREAM_BITRATE_SETTING) + .withDefault(new C2BitrateTuning::output(0u, 64000)) + .withFields({C2F(mBitrate, value).inRange(4096, 40000000)}) + .withSetter(BitrateSetter) + .build()); + + addParameter( + DefineParam(mIntraRefresh, C2_PARAMKEY_INTRA_REFRESH) + .withConstValue(new C2StreamIntraRefreshTuning::output( + 0u, C2Config::INTRA_REFRESH_DISABLED, 0.)) + .build()); + + addParameter( + DefineParam(mProfileLevel, C2_PARAMKEY_PROFILE_LEVEL) + .withDefault(new C2StreamProfileLevelInfo::output( + 0u, PROFILE_VP9_0, LEVEL_VP9_4_1)) + .withFields({ + C2F(mProfileLevel, profile).equalTo( + PROFILE_VP9_0 + ), + C2F(mProfileLevel, level).equalTo( + LEVEL_VP9_4_1), + }) + .withSetter(ProfileLevelSetter) + .build()); + + addParameter( + DefineParam(mRequestSync, C2_PARAMKEY_REQUEST_SYNC_FRAME) + .withDefault(new C2StreamRequestSyncFrameTuning::output(0u, C2_FALSE)) + .withFields({C2F(mRequestSync, value).oneOf({ C2_FALSE, C2_TRUE }) }) + .withSetter(Setter::NonStrictValueWithNoDeps) + .build()); + } + + static C2R BitrateSetter(bool mayBlock, C2P &me) { + (void)mayBlock; + C2R res = C2R::Ok(); + if (me.v.value <= 4096) { + me.set().value = 4096; + } + return res; + } + + static C2R SizeSetter(bool mayBlock, const C2P &oldMe, + C2P &me) { + (void)mayBlock; + C2R res = C2R::Ok(); + if (!me.F(me.v.width).supportsAtAll(me.v.width)) { + res = res.plus(C2SettingResultBuilder::BadValue(me.F(me.v.width))); + me.set().width = oldMe.v.width; + } + if (!me.F(me.v.height).supportsAtAll(me.v.height)) { + res = res.plus(C2SettingResultBuilder::BadValue(me.F(me.v.height))); + me.set().height = oldMe.v.height; + } + return res; + } + + static C2R ProfileLevelSetter( + bool mayBlock, + C2P &me) { + (void)mayBlock; + if (!me.F(me.v.profile).supportsAtAll(me.v.profile)) { + me.set().profile = PROFILE_VP9_0; + } + if (!me.F(me.v.level).supportsAtAll(me.v.level)) { + me.set().level = LEVEL_VP9_4_1; + } + return C2R::Ok(); + } + + static C2R LayeringSetter(bool mayBlock, C2P& me) { + (void)mayBlock; + C2R res = C2R::Ok(); + if (me.v.m.layerCount > 4) { + me.set().m.layerCount = 4; + } + me.set().m.bLayerCount = 0; + // ensure ratios are monotonic and clamped between 0 and 1 + for (size_t ix = 0; ix < me.v.flexCount(); ++ix) { + me.set().m.bitrateRatios[ix] = c2_clamp( + ix > 0 ? me.v.m.bitrateRatios[ix - 1] : 0, me.v.m.bitrateRatios[ix], 1.); + } + ALOGI("setting temporal layering %u + %u", me.v.m.layerCount, me.v.m.bLayerCount); + return res; + } + + // unsafe getters + std::shared_ptr getSize_l() const { return mSize; } + std::shared_ptr getIntraRefresh_l() const { return mIntraRefresh; } + std::shared_ptr getFrameRate_l() const { return mFrameRate; } + std::shared_ptr getBitrate_l() const { return mBitrate; } + std::shared_ptr getBitrateMode_l() const { return mBitrateMode; } + std::shared_ptr getRequestSync_l() const { return mRequestSync; } + std::shared_ptr getTemporalLayers_l() const { return mLayering; } + uint32_t getSyncFramePeriod() const { + if (mSyncFramePeriod->value < 0 || mSyncFramePeriod->value == INT64_MAX) { + return 0; + } + double period = mSyncFramePeriod->value / 1e6 * mFrameRate->value; + return (uint32_t)c2_max(c2_min(period + 0.5, double(UINT32_MAX)), 1.); + } + + private: + std::shared_ptr mInputFormat; + std::shared_ptr mOutputFormat; + std::shared_ptr mInputMediaType; + std::shared_ptr mOutputMediaType; + std::shared_ptr mUsage; + std::shared_ptr mSize; + std::shared_ptr mFrameRate; + std::shared_ptr mLayering; + std::shared_ptr mIntraRefresh; + std::shared_ptr mRequestSync; + std::shared_ptr mSyncFramePeriod; + std::shared_ptr mBitrate; + std::shared_ptr mBitrateMode; + std::shared_ptr mProfileLevel; +}; + +} // namespace android + +#endif // ANDROID_C2_SOFT_VPX_ENC_H__ diff --git a/media/codec2/components/vpx/MODULE_LICENSE_APACHE2 b/media/codec2/components/vpx/MODULE_LICENSE_APACHE2 new file mode 100644 index 0000000000000000000000000000000000000000..e69de29bb2d1d6434b8b29ae775ad8c2e48c5391 diff --git a/media/codec2/components/vpx/NOTICE b/media/codec2/components/vpx/NOTICE new file mode 100644 index 0000000000000000000000000000000000000000..faed58a1532f9936961dc9bfae1dcf2ccf606e0a --- /dev/null +++ b/media/codec2/components/vpx/NOTICE @@ -0,0 +1,190 @@ + + Copyright (c) 2005-2013, The Android Open Source Project + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. + + + Apache License + Version 2.0, January 2004 + http://www.apache.org/licenses/ + + TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION + + 1. Definitions. + + "License" shall mean the terms and conditions for use, reproduction, + and distribution as defined by Sections 1 through 9 of this document. + + "Licensor" shall mean the copyright owner or entity authorized by + the copyright owner that is granting the License. + + "Legal Entity" shall mean the union of the acting entity and all + other entities that control, are controlled by, or are under common + control with that entity. For the purposes of this definition, + "control" means (i) the power, direct or indirect, to cause the + direction or management of such entity, whether by contract or + otherwise, or (ii) ownership of fifty percent (50%) or more of the + outstanding shares, or (iii) beneficial ownership of such entity. + + "You" (or "Your") shall mean an individual or Legal Entity + exercising permissions granted by this License. + + "Source" form shall mean the preferred form for making modifications, + including but not limited to software source code, documentation + source, and configuration files. + + "Object" form shall mean any form resulting from mechanical + transformation or translation of a Source form, including but + not limited to compiled object code, generated documentation, + and conversions to other media types. + + "Work" shall mean the work of authorship, whether in Source or + Object form, made available under the License, as indicated by a + copyright notice that is included in or attached to the work + (an example is provided in the Appendix below). + + "Derivative Works" shall mean any work, whether in Source or Object + form, that is based on (or derived from) the Work and for which the + editorial revisions, annotations, elaborations, or other modifications + represent, as a whole, an original work of authorship. For the purposes + of this License, Derivative Works shall not include works that remain + separable from, or merely link (or bind by name) to the interfaces of, + the Work and Derivative Works thereof. + + "Contribution" shall mean any work of authorship, including + the original version of the Work and any modifications or additions + to that Work or Derivative Works thereof, that is intentionally + submitted to Licensor for inclusion in the Work by the copyright owner + or by an individual or Legal Entity authorized to submit on behalf of + the copyright owner. For the purposes of this definition, "submitted" + means any form of electronic, verbal, or written communication sent + to the Licensor or its representatives, including but not limited to + communication on electronic mailing lists, source code control systems, + and issue tracking systems that are managed by, or on behalf of, the + Licensor for the purpose of discussing and improving the Work, but + excluding communication that is conspicuously marked or otherwise + designated in writing by the copyright owner as "Not a Contribution." + + "Contributor" shall mean Licensor and any individual or Legal Entity + on behalf of whom a Contribution has been received by Licensor and + subsequently incorporated within the Work. + + 2. Grant of Copyright License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + copyright license to reproduce, prepare Derivative Works of, + publicly display, publicly perform, sublicense, and distribute the + Work and such Derivative Works in Source or Object form. + + 3. Grant of Patent License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + (except as stated in this section) patent license to make, have made, + use, offer to sell, sell, import, and otherwise transfer the Work, + where such license applies only to those patent claims licensable + by such Contributor that are necessarily infringed by their + Contribution(s) alone or by combination of their Contribution(s) + with the Work to which such Contribution(s) was submitted. If You + institute patent litigation against any entity (including a + cross-claim or counterclaim in a lawsuit) alleging that the Work + or a Contribution incorporated within the Work constitutes direct + or contributory patent infringement, then any patent licenses + granted to You under this License for that Work shall terminate + as of the date such litigation is filed. + + 4. Redistribution. You may reproduce and distribute copies of the + Work or Derivative Works thereof in any medium, with or without + modifications, and in Source or Object form, provided that You + meet the following conditions: + + (a) You must give any other recipients of the Work or + Derivative Works a copy of this License; and + + (b) You must cause any modified files to carry prominent notices + stating that You changed the files; and + + (c) You must retain, in the Source form of any Derivative Works + that You distribute, all copyright, patent, trademark, and + attribution notices from the Source form of the Work, + excluding those notices that do not pertain to any part of + the Derivative Works; and + + (d) If the Work includes a "NOTICE" text file as part of its + distribution, then any Derivative Works that You distribute must + include a readable copy of the attribution notices contained + within such NOTICE file, excluding those notices that do not + pertain to any part of the Derivative Works, in at least one + of the following places: within a NOTICE text file distributed + as part of the Derivative Works; within the Source form or + documentation, if provided along with the Derivative Works; or, + within a display generated by the Derivative Works, if and + wherever such third-party notices normally appear. The contents + of the NOTICE file are for informational purposes only and + do not modify the License. You may add Your own attribution + notices within Derivative Works that You distribute, alongside + or as an addendum to the NOTICE text from the Work, provided + that such additional attribution notices cannot be construed + as modifying the License. + + You may add Your own copyright statement to Your modifications and + may provide additional or different license terms and conditions + for use, reproduction, or distribution of Your modifications, or + for any such Derivative Works as a whole, provided Your use, + reproduction, and distribution of the Work otherwise complies with + the conditions stated in this License. + + 5. Submission of Contributions. Unless You explicitly state otherwise, + any Contribution intentionally submitted for inclusion in the Work + by You to the Licensor shall be under the terms and conditions of + this License, without any additional terms or conditions. + Notwithstanding the above, nothing herein shall supersede or modify + the terms of any separate license agreement you may have executed + with Licensor regarding such Contributions. + + 6. Trademarks. This License does not grant permission to use the trade + names, trademarks, service marks, or product names of the Licensor, + except as required for reasonable and customary use in describing the + origin of the Work and reproducing the content of the NOTICE file. + + 7. Disclaimer of Warranty. Unless required by applicable law or + agreed to in writing, Licensor provides the Work (and each + Contributor provides its Contributions) on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or + implied, including, without limitation, any warranties or conditions + of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A + PARTICULAR PURPOSE. You are solely responsible for determining the + appropriateness of using or redistributing the Work and assume any + risks associated with Your exercise of permissions under this License. + + 8. Limitation of Liability. In no event and under no legal theory, + whether in tort (including negligence), contract, or otherwise, + unless required by applicable law (such as deliberate and grossly + negligent acts) or agreed to in writing, shall any Contributor be + liable to You for damages, including any direct, indirect, special, + incidental, or consequential damages of any character arising as a + result of this License or out of the use or inability to use the + Work (including but not limited to damages for loss of goodwill, + work stoppage, computer failure or malfunction, or any and all + other commercial damages or losses), even if such Contributor + has been advised of the possibility of such damages. + + 9. Accepting Warranty or Additional Liability. While redistributing + the Work or Derivative Works thereof, You may choose to offer, + and charge a fee for, acceptance of support, warranty, indemnity, + or other liability obligations and/or rights consistent with this + License. However, in accepting such obligations, You may act only + on Your own behalf and on Your sole responsibility, not on behalf + of any other Contributor, and only if You agree to indemnify, + defend, and hold each Contributor harmless for any liability + incurred by, or claims asserted against, such Contributor by reason + of your accepting any such warranty or additional liability. + + END OF TERMS AND CONDITIONS + diff --git a/media/codec2/components/xaac/Android.bp b/media/codec2/components/xaac/Android.bp new file mode 100644 index 0000000000000000000000000000000000000000..19c12cf5438ac41e7d49ab961a1cb11233695cba --- /dev/null +++ b/media/codec2/components/xaac/Android.bp @@ -0,0 +1,11 @@ +cc_library_shared { + name: "libstagefright_soft_c2xaacdec", + defaults: [ + "libstagefright_soft_c2-defaults", + "libstagefright_soft_c2_sanitize_all-defaults", + ], + + srcs: ["C2SoftXaacDec.cpp"], + + static_libs: ["libxaacdec"], +} diff --git a/media/codec2/components/xaac/C2SoftXaacDec.cpp b/media/codec2/components/xaac/C2SoftXaacDec.cpp new file mode 100644 index 0000000000000000000000000000000000000000..1c0e70b2a2897e033054350e23409d2368addc73 --- /dev/null +++ b/media/codec2/components/xaac/C2SoftXaacDec.cpp @@ -0,0 +1,1583 @@ +/* + * Copyright (C) 2018 The Android Open Source Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +//#define LOG_NDEBUG 0 +#define LOG_TAG "C2SoftXaacDec" +#include + +#include + +#include +#include +#include +#include + +#include +#include + +#include "C2SoftXaacDec.h" + +#define DRC_DEFAULT_MOBILE_REF_LEVEL -16.0 /* 64*-0.25dB = -16 dB below full scale for mobile conf */ +#define DRC_DEFAULT_MOBILE_DRC_CUT 1.0 /* maximum compression of dynamic range for mobile conf */ +#define DRC_DEFAULT_MOBILE_DRC_BOOST 1.0 /* maximum compression of dynamic range for mobile conf */ +#define DRC_DEFAULT_MOBILE_DRC_HEAVY C2Config::DRC_COMPRESSION_HEAVY /* switch for heavy compression for mobile conf */ +#define DRC_DEFAULT_MOBILE_DRC_EFFECT 3 /* MPEG-D DRC effect type; 3 => Limited playback range */ +#define DRC_DEFAULT_MOBILE_ENC_LEVEL (0.25) /* encoder target level; -1 => the value is unknown, otherwise dB step value (e.g. 64 for -16 dB) */ +#define MAX_CHANNEL_COUNT 8 /* maximum number of audio channels that can be decoded */ +// names of properties that can be used to override the default DRC settings +#define PROP_DRC_OVERRIDE_REF_LEVEL "aac_drc_reference_level" +#define PROP_DRC_OVERRIDE_CUT "aac_drc_cut" +#define PROP_DRC_OVERRIDE_BOOST "aac_drc_boost" +#define PROP_DRC_OVERRIDE_HEAVY "aac_drc_heavy" +#define PROP_DRC_OVERRIDE_ENC_LEVEL "aac_drc_enc_target_level" +#define PROP_DRC_OVERRIDE_EFFECT_TYPE "ro.aac_drc_effect_type" + +#define RETURN_IF_FATAL(retval, str) \ + if (retval & IA_FATAL_ERROR) { \ + ALOGE("Error in %s: Returned: %d", str, retval); \ + return retval; \ + } else if (retval != IA_NO_ERROR) { \ + ALOGW("Warning in %s: Returned: %d", str, retval); \ + } + + +namespace android { + +constexpr char COMPONENT_NAME[] = "c2.android.xaac.decoder"; + +class C2SoftXaacDec::IntfImpl : public C2InterfaceHelper { +public: + explicit IntfImpl(const std::shared_ptr &helper) + : C2InterfaceHelper(helper) { + + setDerivedInstance(this); + + addParameter( + DefineParam(mInputFormat, C2_NAME_INPUT_STREAM_FORMAT_SETTING) + .withConstValue(new C2StreamFormatConfig::input(0u, C2FormatCompressed)) + .build()); + + addParameter( + DefineParam(mOutputFormat, C2_NAME_OUTPUT_STREAM_FORMAT_SETTING) + .withConstValue(new C2StreamFormatConfig::output(0u, C2FormatAudio)) + .build()); + + addParameter( + DefineParam(mInputMediaType, C2_NAME_INPUT_PORT_MIME_SETTING) + .withConstValue(AllocSharedString( + MEDIA_MIMETYPE_AUDIO_AAC)) + .build()); + + addParameter( + DefineParam(mOutputMediaType, C2_NAME_OUTPUT_PORT_MIME_SETTING) + .withConstValue(AllocSharedString( + MEDIA_MIMETYPE_AUDIO_RAW)) + .build()); + + addParameter( + DefineParam(mSampleRate, C2_NAME_STREAM_SAMPLE_RATE_SETTING) + .withDefault(new C2StreamSampleRateInfo::output(0u, 44100)) + .withFields({C2F(mSampleRate, value).oneOf({ + 7350, 8000, 11025, 12000, 16000, 22050, 24000, 32000, 44100, 48000 + })}) + .withSetter((Setter::StrictValueWithNoDeps)) + .build()); + + addParameter( + DefineParam(mChannelCount, C2_NAME_STREAM_CHANNEL_COUNT_SETTING) + .withDefault(new C2StreamChannelCountInfo::output(0u, 1)) + .withFields({C2F(mChannelCount, value).inRange(1, 8)}) + .withSetter(Setter::StrictValueWithNoDeps) + .build()); + + addParameter( + DefineParam(mBitrate, C2_NAME_STREAM_BITRATE_SETTING) + .withDefault(new C2BitrateTuning::input(0u, 64000)) + .withFields({C2F(mBitrate, value).inRange(8000, 960000)}) + .withSetter(Setter::NonStrictValueWithNoDeps) + .build()); + + addParameter( + DefineParam(mInputMaxBufSize, C2_PARAMKEY_INPUT_MAX_BUFFER_SIZE) + .withConstValue(new C2StreamMaxBufferSizeInfo::input(0u, 8192)) + .build()); + + addParameter( + DefineParam(mAacFormat, C2_NAME_STREAM_AAC_FORMAT_SETTING) + .withDefault(new C2StreamAacFormatInfo::input(0u, C2AacStreamFormatRaw)) + .withFields({C2F(mAacFormat, value).oneOf({ + C2AacStreamFormatRaw, C2AacStreamFormatAdts + })}) + .withSetter(Setter::StrictValueWithNoDeps) + .build()); + + addParameter( + DefineParam(mProfileLevel, C2_PARAMKEY_PROFILE_LEVEL) + .withDefault(new C2StreamProfileLevelInfo::input(0u, + C2Config::PROFILE_AAC_LC, C2Config::LEVEL_UNUSED)) + .withFields({ + C2F(mProfileLevel, profile).oneOf({ + C2Config::PROFILE_AAC_LC, + C2Config::PROFILE_AAC_HE, + C2Config::PROFILE_AAC_HE_PS, + C2Config::PROFILE_AAC_LD, + C2Config::PROFILE_AAC_ELD, + C2Config::PROFILE_AAC_XHE}), + C2F(mProfileLevel, level).oneOf({ + C2Config::LEVEL_UNUSED + }) + }) + .withSetter(ProfileLevelSetter) + .build()); + + addParameter( + DefineParam(mDrcCompressMode, C2_PARAMKEY_DRC_COMPRESSION_MODE) + .withDefault(new C2StreamDrcCompressionModeTuning::input(0u, C2Config::DRC_COMPRESSION_HEAVY)) + .withFields({ + C2F(mDrcCompressMode, value).oneOf({ + C2Config::DRC_COMPRESSION_ODM_DEFAULT, + C2Config::DRC_COMPRESSION_NONE, + C2Config::DRC_COMPRESSION_LIGHT, + C2Config::DRC_COMPRESSION_HEAVY}) + }) + .withSetter(Setter::StrictValueWithNoDeps) + .build()); + + addParameter( + DefineParam(mDrcTargetRefLevel, C2_PARAMKEY_DRC_TARGET_REFERENCE_LEVEL) + .withDefault(new C2StreamDrcTargetReferenceLevelTuning::input(0u, DRC_DEFAULT_MOBILE_REF_LEVEL)) + .withFields({C2F(mDrcTargetRefLevel, value).inRange(-31.75, 0.25)}) + .withSetter(Setter::StrictValueWithNoDeps) + .build()); + + addParameter( + DefineParam(mDrcEncTargetLevel, C2_PARAMKEY_DRC_ENCODED_TARGET_LEVEL) + .withDefault(new C2StreamDrcEncodedTargetLevelTuning::input(0u, DRC_DEFAULT_MOBILE_ENC_LEVEL)) + .withFields({C2F(mDrcEncTargetLevel, value).inRange(-31.75, 0.25)}) + .withSetter(Setter::StrictValueWithNoDeps) + .build()); + + addParameter( + DefineParam(mDrcBoostFactor, C2_PARAMKEY_DRC_BOOST_FACTOR) + .withDefault(new C2StreamDrcBoostFactorTuning::input(0u, DRC_DEFAULT_MOBILE_DRC_BOOST)) + .withFields({C2F(mDrcBoostFactor, value).inRange(0, 1.)}) + .withSetter(Setter::StrictValueWithNoDeps) + .build()); + + addParameter( + DefineParam(mDrcAttenuationFactor, C2_PARAMKEY_DRC_ATTENUATION_FACTOR) + .withDefault(new C2StreamDrcAttenuationFactorTuning::input(0u, DRC_DEFAULT_MOBILE_DRC_CUT)) + .withFields({C2F(mDrcAttenuationFactor, value).inRange(0, 1.)}) + .withSetter(Setter::StrictValueWithNoDeps) + .build()); + + addParameter( + DefineParam(mDrcEffectType, C2_PARAMKEY_DRC_EFFECT_TYPE) + .withDefault(new C2StreamDrcEffectTypeTuning::input(0u, C2Config::DRC_EFFECT_LIMITED_PLAYBACK_RANGE)) + .withFields({ + C2F(mDrcEffectType, value).oneOf({ + C2Config::DRC_EFFECT_ODM_DEFAULT, + C2Config::DRC_EFFECT_OFF, + C2Config::DRC_EFFECT_NONE, + C2Config::DRC_EFFECT_LATE_NIGHT, + C2Config::DRC_EFFECT_NOISY_ENVIRONMENT, + C2Config::DRC_EFFECT_LIMITED_PLAYBACK_RANGE, + C2Config::DRC_EFFECT_LOW_PLAYBACK_LEVEL, + C2Config::DRC_EFFECT_DIALOG_ENHANCEMENT, + C2Config::DRC_EFFECT_GENERAL_COMPRESSION}) + }) + .withSetter(Setter::StrictValueWithNoDeps) + .build()); + } + + bool isAdts() const { return mAacFormat->value == C2AacStreamFormatAdts; } + uint32_t getBitrate() const { return mBitrate->value; } + static C2R ProfileLevelSetter(bool mayBlock, C2P &me) { + (void)mayBlock; + (void)me; // TODO: validate + return C2R::Ok(); + } + int32_t getDrcCompressMode() const { return mDrcCompressMode->value == C2Config::DRC_COMPRESSION_HEAVY ? 1 : 0; } + int32_t getDrcTargetRefLevel() const { return (mDrcTargetRefLevel->value <= 0 ? -mDrcTargetRefLevel->value * 4. + 0.5 : -1); } + int32_t getDrcEncTargetLevel() const { return (mDrcEncTargetLevel->value <= 0 ? -mDrcEncTargetLevel->value * 4. + 0.5 : -1); } + int32_t getDrcBoostFactor() const { return mDrcBoostFactor->value * 127. + 0.5; } + int32_t getDrcAttenuationFactor() const { return mDrcAttenuationFactor->value * 127. + 0.5; } + int32_t getDrcEffectType() const { return mDrcEffectType->value; } + +private: + std::shared_ptr mInputFormat; + std::shared_ptr mOutputFormat; + std::shared_ptr mInputMediaType; + std::shared_ptr mOutputMediaType; + std::shared_ptr mSampleRate; + std::shared_ptr mChannelCount; + std::shared_ptr mBitrate; + std::shared_ptr mInputMaxBufSize; + std::shared_ptr mAacFormat; + std::shared_ptr mProfileLevel; + std::shared_ptr mDrcCompressMode; + std::shared_ptr mDrcTargetRefLevel; + std::shared_ptr mDrcEncTargetLevel; + std::shared_ptr mDrcBoostFactor; + std::shared_ptr mDrcAttenuationFactor; + std::shared_ptr mDrcEffectType; + // TODO Add : C2StreamAacSbrModeTuning +}; + +C2SoftXaacDec::C2SoftXaacDec( + const char* name, + c2_node_id_t id, + const std::shared_ptr &intfImpl) + : SimpleC2Component(std::make_shared>(name, id, intfImpl)), + mIntf(intfImpl), + mXheaacCodecHandle(nullptr), + mMpegDDrcHandle(nullptr), + mOutputDrainBuffer(nullptr) { +} + +C2SoftXaacDec::~C2SoftXaacDec() { + onRelease(); +} + +c2_status_t C2SoftXaacDec::onInit() { + mOutputFrameLength = 1024; + mInputBuffer = nullptr; + mOutputBuffer = nullptr; + mSampFreq = 0; + mNumChannels = 0; + mPcmWdSz = 0; + mChannelMask = 0; + mNumOutBytes = 0; + mCurFrameIndex = 0; + mCurTimestamp = 0; + mIsCodecInitialized = false; + mIsCodecConfigFlushRequired = false; + mSignalledOutputEos = false; + mSignalledError = false; + mOutputDrainBufferWritePos = 0; + mDRCFlag = 0; + mMpegDDRCPresent = 0; + mMemoryVec.clear(); + mDrcMemoryVec.clear(); + + IA_ERRORCODE err = initDecoder(); + return err == IA_NO_ERROR ? C2_OK : C2_CORRUPTED; + +} + +c2_status_t C2SoftXaacDec::onStop() { + mOutputFrameLength = 1024; + drainDecoder(); + // reset the "configured" state + mSampFreq = 0; + mNumChannels = 0; + mPcmWdSz = 0; + mChannelMask = 0; + mNumOutBytes = 0; + mCurFrameIndex = 0; + mCurTimestamp = 0; + mSignalledOutputEos = false; + mSignalledError = false; + mOutputDrainBufferWritePos = 0; + mDRCFlag = 0; + mMpegDDRCPresent = 0; + + return C2_OK; +} + +void C2SoftXaacDec::onReset() { + (void)onStop(); +} + +void C2SoftXaacDec::onRelease() { + IA_ERRORCODE errCode = deInitXAACDecoder(); + if (IA_NO_ERROR != errCode) ALOGE("deInitXAACDecoder() failed %d", errCode); + + errCode = deInitMPEGDDDrc(); + if (IA_NO_ERROR != errCode) ALOGE("deInitMPEGDDDrc() failed %d", errCode); + + if (mOutputDrainBuffer) { + delete[] mOutputDrainBuffer; + mOutputDrainBuffer = nullptr; + } +} + +IA_ERRORCODE C2SoftXaacDec::initDecoder() { + ALOGV("initDecoder()"); + IA_ERRORCODE err_code = IA_NO_ERROR; + + err_code = initXAACDecoder(); + if (err_code != IA_NO_ERROR) { + ALOGE("initXAACDecoder Failed"); + /* Call deInit to free any allocated memory */ + deInitXAACDecoder(); + return IA_FATAL_ERROR; + } + + if (!mOutputDrainBuffer) { + mOutputDrainBuffer = new (std::nothrow) char[kOutputDrainBufferSize]; + if (!mOutputDrainBuffer) return IA_FATAL_ERROR; + } + + err_code = initXAACDrc(); + RETURN_IF_FATAL(err_code, "initXAACDrc"); + + + return IA_NO_ERROR; +} + +static void fillEmptyWork(const std::unique_ptr& work) { + uint32_t flags = 0; + if (work->input.flags & C2FrameData::FLAG_END_OF_STREAM) { + flags |= C2FrameData::FLAG_END_OF_STREAM; + ALOGV("signalling eos"); + } + work->worklets.front()->output.flags = (C2FrameData::flags_t)flags; + work->worklets.front()->output.buffers.clear(); + work->worklets.front()->output.ordinal = work->input.ordinal; + work->workletsProcessed = 1u; +} + +void C2SoftXaacDec::finishWork(const std::unique_ptr& work, + const std::shared_ptr& pool) { + ALOGV("mCurFrameIndex = %" PRIu64, mCurFrameIndex); + + std::shared_ptr block; + C2MemoryUsage usage = {C2MemoryUsage::CPU_READ, C2MemoryUsage::CPU_WRITE}; + // TODO: error handling, proper usage, etc. + c2_status_t err = + pool->fetchLinearBlock(mOutputDrainBufferWritePos, usage, &block); + if (err != C2_OK) { + ALOGE("fetchLinearBlock failed : err = %d", err); + work->result = C2_NO_MEMORY; + return; + } + C2WriteView wView = block->map().get(); + int16_t* outBuffer = reinterpret_cast(wView.data()); + memcpy(outBuffer, mOutputDrainBuffer, mOutputDrainBufferWritePos); + mOutputDrainBufferWritePos = 0; + + auto fillWork = [buffer = createLinearBuffer(block)]( + const std::unique_ptr& work) { + uint32_t flags = 0; + if (work->input.flags & C2FrameData::FLAG_END_OF_STREAM) { + flags |= C2FrameData::FLAG_END_OF_STREAM; + ALOGV("signalling eos"); + } + work->worklets.front()->output.flags = (C2FrameData::flags_t)flags; + work->worklets.front()->output.buffers.clear(); + work->worklets.front()->output.buffers.push_back(buffer); + work->worklets.front()->output.ordinal = work->input.ordinal; + work->workletsProcessed = 1u; + }; + if (work && work->input.ordinal.frameIndex == c2_cntr64_t(mCurFrameIndex)) { + fillWork(work); + } else { + finish(mCurFrameIndex, fillWork); + } + + ALOGV("out timestamp %" PRIu64 " / %u", mCurTimestamp, block->capacity()); +} + +void C2SoftXaacDec::process(const std::unique_ptr& work, + const std::shared_ptr& pool) { + // Initialize output work + work->result = C2_OK; + work->workletsProcessed = 1u; + work->worklets.front()->output.configUpdate.clear(); + work->worklets.front()->output.flags = work->input.flags; + + if (mSignalledError || mSignalledOutputEos) { + work->result = C2_BAD_VALUE; + return; + } + uint8_t* inBuffer = nullptr; + uint32_t inBufferLength = 0; + C2ReadView view = mDummyReadView; + size_t offset = 0u; + size_t size = 0u; + if (!work->input.buffers.empty()) { + view = work->input.buffers[0]->data().linearBlocks().front().map().get(); + size = view.capacity(); + } + if (size && view.error()) { + ALOGE("read view map failed %d", view.error()); + work->result = view.error(); + return; + } + + bool eos = (work->input.flags & C2FrameData::FLAG_END_OF_STREAM) != 0; + bool codecConfig = + (work->input.flags & C2FrameData::FLAG_CODEC_CONFIG) != 0; + if (codecConfig) { + if (size == 0u) { + ALOGE("empty codec config"); + mSignalledError = true; + work->result = C2_CORRUPTED; + return; + } + // const_cast because of libAACdec method signature. + inBuffer = const_cast(view.data() + offset); + inBufferLength = size; + + /* GA header configuration sent to Decoder! */ + IA_ERRORCODE err_code = configXAACDecoder(inBuffer, inBufferLength); + if (IA_NO_ERROR != err_code) { + ALOGE("configXAACDecoder err_code = %d", err_code); + mSignalledError = true; + work->result = C2_CORRUPTED; + return; + } + work->worklets.front()->output.flags = work->input.flags; + work->worklets.front()->output.ordinal = work->input.ordinal; + work->worklets.front()->output.buffers.clear(); + return; + } + + mCurFrameIndex = work->input.ordinal.frameIndex.peeku(); + mCurTimestamp = work->input.ordinal.timestamp.peeku(); + mOutputDrainBufferWritePos = 0; + char* tempOutputDrainBuffer = mOutputDrainBuffer; + while (size > 0u) { + if ((kOutputDrainBufferSize * sizeof(int16_t) - + mOutputDrainBufferWritePos) < + (mOutputFrameLength * sizeof(int16_t) * mNumChannels)) { + ALOGV("skipping decode: not enough space left in DrainBuffer"); + break; + } + + ALOGV("inAttribute size = %zu", size); + if (mIntf->isAdts()) { + ALOGV("ADTS"); + size_t adtsHeaderSize = 0; + // skip 30 bits, aac_frame_length follows. + // ssssssss ssssiiip ppffffPc ccohCCll llllllll lll????? + + const uint8_t* adtsHeader = view.data() + offset; + bool signalError = false; + if (size < 7) { + ALOGE("Audio data too short to contain even the ADTS header. " + "Got %zu bytes.", size); + hexdump(adtsHeader, size); + signalError = true; + } else { + bool protectionAbsent = (adtsHeader[1] & 1); + unsigned aac_frame_length = ((adtsHeader[3] & 3) << 11) | + (adtsHeader[4] << 3) | + (adtsHeader[5] >> 5); + + if (size < aac_frame_length) { + ALOGE("Not enough audio data for the complete frame. " + "Got %zu bytes, frame size according to the ADTS " + "header is %u bytes.", size, aac_frame_length); + hexdump(adtsHeader, size); + signalError = true; + } else { + adtsHeaderSize = (protectionAbsent ? 7 : 9); + if (aac_frame_length < adtsHeaderSize) { + signalError = true; + } else { + // const_cast because of libAACdec method signature. + inBuffer = + const_cast(adtsHeader + adtsHeaderSize); + inBufferLength = aac_frame_length - adtsHeaderSize; + + offset += adtsHeaderSize; + size -= adtsHeaderSize; + } + } + } + + if (signalError) { + mSignalledError = true; + work->result = C2_CORRUPTED; + return; + } + } else { + ALOGV("Non ADTS"); + // const_cast because of libAACdec method signature. + inBuffer = const_cast(view.data() + offset); + inBufferLength = size; + } + + signed int prevSampleRate = mSampFreq; + signed int prevNumChannels = mNumChannels; + + /* XAAC decoder expects first frame to be fed via configXAACDecoder API + * which should initialize the codec. Once this state is reached, call the + * decodeXAACStream API with same frame to decode! */ + if (!mIsCodecInitialized) { + IA_ERRORCODE err_code = configXAACDecoder(inBuffer, inBufferLength); + if (IA_NO_ERROR != err_code) { + ALOGE("configXAACDecoder Failed 2 err_code = %d", err_code); + mSignalledError = true; + work->result = C2_CORRUPTED; + return; + } + + if ((mSampFreq != prevSampleRate) || + (mNumChannels != prevNumChannels)) { + ALOGI("Reconfiguring decoder: %d->%d Hz, %d->%d channels", + prevSampleRate, mSampFreq, prevNumChannels, mNumChannels); + + C2StreamSampleRateInfo::output sampleRateInfo(0u, mSampFreq); + C2StreamChannelCountInfo::output channelCountInfo(0u, mNumChannels); + std::vector> failures; + c2_status_t err = mIntf->config( + { &sampleRateInfo, &channelCountInfo }, + C2_MAY_BLOCK, + &failures); + if (err == OK) { + work->worklets.front()->output.configUpdate.push_back( + C2Param::Copy(sampleRateInfo)); + work->worklets.front()->output.configUpdate.push_back( + C2Param::Copy(channelCountInfo)); + } else { + ALOGE("Config Update failed"); + mSignalledError = true; + work->result = C2_CORRUPTED; + return; + } + } + } + + signed int bytesConsumed = 0; + IA_ERRORCODE errorCode = IA_NO_ERROR; + if (mIsCodecInitialized) { + mIsCodecConfigFlushRequired = true; + errorCode = decodeXAACStream(inBuffer, inBufferLength, + &bytesConsumed, &mNumOutBytes); + } else if (!mIsCodecConfigFlushRequired) { + ALOGW("Assumption that first frame after header initializes decoder Failed!"); + mSignalledError = true; + work->result = C2_CORRUPTED; + return; + } + size -= bytesConsumed; + offset += bytesConsumed; + + if (inBufferLength != (uint32_t)bytesConsumed) + ALOGW("All data not consumed"); + + /* In case of error, decoder would have given out empty buffer */ + if ((IA_NO_ERROR != errorCode) && (0 == mNumOutBytes) && mIsCodecInitialized) + mNumOutBytes = mOutputFrameLength * (mPcmWdSz / 8) * mNumChannels; + + if (!bytesConsumed) { + ALOGW("bytesConsumed = 0 should never happen"); + } + + if ((uint32_t)mNumOutBytes > + mOutputFrameLength * sizeof(int16_t) * mNumChannels) { + ALOGE("mNumOutBytes > mOutputFrameLength * sizeof(int16_t) * mNumChannels, should never happen"); + mSignalledError = true; + work->result = C2_CORRUPTED; + return; + } + + if (IA_NO_ERROR != errorCode) { + // TODO: check for overflow, ASAN + memset(mOutputBuffer, 0, mNumOutBytes); + + // Discard input buffer. + size = 0; + + // fall through + } + memcpy(tempOutputDrainBuffer, mOutputBuffer, mNumOutBytes); + tempOutputDrainBuffer += mNumOutBytes; + mOutputDrainBufferWritePos += mNumOutBytes; + } + + if (mOutputDrainBufferWritePos) { + finishWork(work, pool); + } else { + fillEmptyWork(work); + } + if (eos) mSignalledOutputEos = true; +} + +c2_status_t C2SoftXaacDec::drain(uint32_t drainMode, + const std::shared_ptr& pool) { + (void)pool; + if (drainMode == NO_DRAIN) { + ALOGW("drain with NO_DRAIN: no-op"); + return C2_OK; + } + if (drainMode == DRAIN_CHAIN) { + ALOGW("DRAIN_CHAIN not supported"); + return C2_OMITTED; + } + + return C2_OK; +} + +IA_ERRORCODE C2SoftXaacDec::configflushDecode() { + IA_ERRORCODE err_code; + uint32_t ui_init_done; + uint32_t inBufferLength = 8203; + + err_code = ixheaacd_dec_api(mXheaacCodecHandle, + IA_API_CMD_INIT, + IA_CMD_TYPE_FLUSH_MEM, + nullptr); + RETURN_IF_FATAL(err_code, "IA_CMD_TYPE_FLUSH_MEM"); + + err_code = ixheaacd_dec_api(mXheaacCodecHandle, + IA_API_CMD_SET_INPUT_BYTES, + 0, + &inBufferLength); + RETURN_IF_FATAL(err_code, "IA_API_CMD_SET_INPUT_BYTES"); + + err_code = ixheaacd_dec_api(mXheaacCodecHandle, + IA_API_CMD_INIT, + IA_CMD_TYPE_FLUSH_MEM, + nullptr); + RETURN_IF_FATAL(err_code, "IA_CMD_TYPE_FLUSH_MEM"); + + err_code = ixheaacd_dec_api(mXheaacCodecHandle, + IA_API_CMD_INIT, + IA_CMD_TYPE_INIT_DONE_QUERY, + &ui_init_done); + RETURN_IF_FATAL(err_code, "IA_CMD_TYPE_INIT_DONE_QUERY"); + + if (ui_init_done) { + err_code = getXAACStreamInfo(); + RETURN_IF_FATAL(err_code, "getXAACStreamInfo"); + ALOGV("Found Codec with below config---\nsampFreq %d\nnumChannels %d\npcmWdSz %d\nchannelMask %d\noutputFrameLength %d", + mSampFreq, mNumChannels, mPcmWdSz, mChannelMask, mOutputFrameLength); + mIsCodecInitialized = true; + } + return IA_NO_ERROR; +} + +c2_status_t C2SoftXaacDec::onFlush_sm() { + if (mIsCodecInitialized) { + IA_ERRORCODE err_code = configflushDecode(); + if (err_code != IA_NO_ERROR) { + ALOGE("Error in configflushDecode: Error %d", err_code); + } + } + drainDecoder(); + mSignalledOutputEos = false; + mSignalledError = false; + + return C2_OK; +} + +IA_ERRORCODE C2SoftXaacDec::drainDecoder() { + /* Output delay compensation logic should sit here. */ + /* Nothing to be done as XAAC decoder does not introduce output buffer delay */ + + return 0; +} + +IA_ERRORCODE C2SoftXaacDec::initXAACDecoder() { + /* First part */ + /* Error Handler Init */ + /* Get Library Name, Library Version and API Version */ + /* Initialize API structure + Default config set */ + /* Set config params from user */ + /* Initialize memory tables */ + /* Get memory information and allocate memory */ + + mInputBufferSize = 0; + mInputBuffer = nullptr; + mOutputBuffer = nullptr; + /* Process struct initing end */ + + /* ******************************************************************/ + /* Initialize API structure and set config params to default */ + /* ******************************************************************/ + /* API size */ + uint32_t pui_api_size; + /* Get the API size */ + IA_ERRORCODE err_code = ixheaacd_dec_api(nullptr, + IA_API_CMD_GET_API_SIZE, + 0, + &pui_api_size); + RETURN_IF_FATAL(err_code, "IA_API_CMD_GET_API_SIZE"); + + /* Allocate memory for API */ + mXheaacCodecHandle = memalign(4, pui_api_size); + if (!mXheaacCodecHandle) { + ALOGE("malloc for pui_api_size + 4 >> %d Failed", pui_api_size + 4); + return IA_FATAL_ERROR; + } + mMemoryVec.push(mXheaacCodecHandle); + + /* Set the config params to default values */ + err_code = ixheaacd_dec_api(mXheaacCodecHandle, + IA_API_CMD_INIT, + IA_CMD_TYPE_INIT_API_PRE_CONFIG_PARAMS, + nullptr); + RETURN_IF_FATAL(err_code, "IA_CMD_TYPE_INIT_API_PRE_CONFIG_PARAMS"); + + /* Get the API size */ + err_code = ia_drc_dec_api(nullptr, IA_API_CMD_GET_API_SIZE, 0, &pui_api_size); + + RETURN_IF_FATAL(err_code, "IA_API_CMD_GET_API_SIZE"); + + /* Allocate memory for API */ + mMpegDDrcHandle = memalign(4, pui_api_size); + if (!mMpegDDrcHandle) { + ALOGE("malloc for pui_api_size + 4 >> %d Failed", pui_api_size + 4); + return IA_FATAL_ERROR; + } + mMemoryVec.push(mMpegDDrcHandle); + + /* Set the config params to default values */ + err_code = ia_drc_dec_api(mMpegDDrcHandle, IA_API_CMD_INIT, + IA_CMD_TYPE_INIT_API_PRE_CONFIG_PARAMS, nullptr); + + RETURN_IF_FATAL(err_code, "IA_CMD_TYPE_INIT_API_PRE_CONFIG_PARAMS"); + + /* ******************************************************************/ + /* Set config parameters */ + /* ******************************************************************/ + uint32_t ui_mp4_flag = 1; + err_code = ixheaacd_dec_api(mXheaacCodecHandle, + IA_API_CMD_SET_CONFIG_PARAM, + IA_ENHAACPLUS_DEC_CONFIG_PARAM_ISMP4, + &ui_mp4_flag); + RETURN_IF_FATAL(err_code, "IA_ENHAACPLUS_DEC_CONFIG_PARAM_ISMP4"); + + /* ******************************************************************/ + /* Initialize Memory info tables */ + /* ******************************************************************/ + uint32_t ui_proc_mem_tabs_size; + pVOID pv_alloc_ptr; + /* Get memory info tables size */ + err_code = ixheaacd_dec_api(mXheaacCodecHandle, + IA_API_CMD_GET_MEMTABS_SIZE, + 0, + &ui_proc_mem_tabs_size); + RETURN_IF_FATAL(err_code, "IA_API_CMD_GET_MEMTABS_SIZE"); + + pv_alloc_ptr = memalign(4, ui_proc_mem_tabs_size); + if (!pv_alloc_ptr) { + ALOGE("Malloc for size (ui_proc_mem_tabs_size + 4) = %d failed!", ui_proc_mem_tabs_size + 4); + return IA_FATAL_ERROR; + } + mMemoryVec.push(pv_alloc_ptr); + + /* Set pointer for process memory tables */ + err_code = ixheaacd_dec_api(mXheaacCodecHandle, + IA_API_CMD_SET_MEMTABS_PTR, + 0, + pv_alloc_ptr); + RETURN_IF_FATAL(err_code, "IA_API_CMD_SET_MEMTABS_PTR"); + + /* initialize the API, post config, fill memory tables */ + err_code = ixheaacd_dec_api(mXheaacCodecHandle, + IA_API_CMD_INIT, + IA_CMD_TYPE_INIT_API_POST_CONFIG_PARAMS, + nullptr); + RETURN_IF_FATAL(err_code, "IA_CMD_TYPE_INIT_API_POST_CONFIG_PARAMS"); + + /* ******************************************************************/ + /* Allocate Memory with info from library */ + /* ******************************************************************/ + /* There are four different types of memories, that needs to be allocated */ + /* persistent,scratch,input and output */ + for (int i = 0; i < 4; i++) { + int ui_size = 0, ui_alignment = 0, ui_type = 0; + + /* Get memory size */ + err_code = ixheaacd_dec_api(mXheaacCodecHandle, + IA_API_CMD_GET_MEM_INFO_SIZE, + i, + &ui_size); + RETURN_IF_FATAL(err_code, "IA_API_CMD_GET_MEM_INFO_SIZE"); + + /* Get memory alignment */ + err_code = ixheaacd_dec_api(mXheaacCodecHandle, + IA_API_CMD_GET_MEM_INFO_ALIGNMENT, + i, + &ui_alignment); + RETURN_IF_FATAL(err_code, "IA_API_CMD_GET_MEM_INFO_ALIGNMENT"); + + /* Get memory type */ + err_code = ixheaacd_dec_api(mXheaacCodecHandle, + IA_API_CMD_GET_MEM_INFO_TYPE, + i, + &ui_type); + RETURN_IF_FATAL(err_code, "IA_API_CMD_GET_MEM_INFO_TYPE"); + + pv_alloc_ptr = memalign(ui_alignment, ui_size); + if (!pv_alloc_ptr) { + ALOGE("Malloc for size (ui_size + ui_alignment) = %d failed!", + ui_size + ui_alignment); + return IA_FATAL_ERROR; + } + mMemoryVec.push(pv_alloc_ptr); + + /* Set the buffer pointer */ + err_code = ixheaacd_dec_api(mXheaacCodecHandle, + IA_API_CMD_SET_MEM_PTR, + i, + pv_alloc_ptr); + RETURN_IF_FATAL(err_code, "IA_API_CMD_SET_MEM_PTR"); + if (ui_type == IA_MEMTYPE_INPUT) { + mInputBuffer = (pWORD8)pv_alloc_ptr; + mInputBufferSize = ui_size; + } + if (ui_type == IA_MEMTYPE_OUTPUT) + mOutputBuffer = (pWORD8)pv_alloc_ptr; + } + /* End first part */ + + return IA_NO_ERROR; +} + +status_t C2SoftXaacDec::initXAACDrc() { + IA_ERRORCODE err_code = IA_NO_ERROR; + unsigned int ui_drc_val; + // DRC_PRES_MODE_WRAP_DESIRED_TARGET + int32_t targetRefLevel = mIntf->getDrcTargetRefLevel(); + ALOGV("AAC decoder using desired DRC target reference level of %d", targetRefLevel); + ui_drc_val = (unsigned int)targetRefLevel; + err_code = ixheaacd_dec_api(mXheaacCodecHandle, + IA_API_CMD_SET_CONFIG_PARAM, + IA_ENHAACPLUS_DEC_CONFIG_PARAM_DRC_TARGET_LEVEL, + &ui_drc_val); + RETURN_IF_FATAL(err_code, "IA_ENHAACPLUS_DEC_CONFIG_PARAM_DRC_TARGET_LEVEL"); + + /* Use ui_drc_val from PROP_DRC_OVERRIDE_REF_LEVEL or DRC_DEFAULT_MOBILE_REF_LEVEL + * for IA_ENHAACPLUS_DEC_DRC_TARGET_LOUDNESS too */ + err_code = ixheaacd_dec_api(mXheaacCodecHandle, IA_API_CMD_SET_CONFIG_PARAM, + IA_ENHAACPLUS_DEC_DRC_TARGET_LOUDNESS, &ui_drc_val); + + RETURN_IF_FATAL(err_code, "IA_ENHAACPLUS_DEC_DRC_TARGET_LOUDNESS"); + + int32_t attenuationFactor = mIntf->getDrcAttenuationFactor(); + ALOGV("AAC decoder using desired DRC attenuation factor of %d", attenuationFactor); + ui_drc_val = (unsigned int)attenuationFactor; + err_code = ixheaacd_dec_api(mXheaacCodecHandle, + IA_API_CMD_SET_CONFIG_PARAM, + IA_ENHAACPLUS_DEC_CONFIG_PARAM_DRC_CUT, + &ui_drc_val); + RETURN_IF_FATAL(err_code, "IA_ENHAACPLUS_DEC_CONFIG_PARAM_DRC_CUT"); + + // DRC_PRES_MODE_WRAP_DESIRED_BOOST_FACTOR + int32_t boostFactor = mIntf->getDrcBoostFactor(); + ALOGV("AAC decoder using desired DRC boost factor of %d", boostFactor); + ui_drc_val = (unsigned int)boostFactor; + err_code = ixheaacd_dec_api(mXheaacCodecHandle, + IA_API_CMD_SET_CONFIG_PARAM, + IA_ENHAACPLUS_DEC_CONFIG_PARAM_DRC_BOOST, + &ui_drc_val); + RETURN_IF_FATAL(err_code, "IA_ENHAACPLUS_DEC_CONFIG_PARAM_DRC_BOOST"); + + // DRC_PRES_MODE_WRAP_DESIRED_HEAVY + int32_t compressMode = mIntf->getDrcCompressMode(); + ALOGV("AAC decoder using desried DRC heavy compression switch of %d", compressMode); + ui_drc_val = (unsigned int)compressMode; + + err_code = ixheaacd_dec_api(mXheaacCodecHandle, + IA_API_CMD_SET_CONFIG_PARAM, + IA_ENHAACPLUS_DEC_CONFIG_PARAM_DRC_HEAVY_COMP, + &ui_drc_val); + RETURN_IF_FATAL(err_code, "IA_ENHAACPLUS_DEC_CONFIG_PARAM_DRC_HEAVY_COMP"); + + // AAC_UNIDRC_SET_EFFECT + int32_t effectType = mIntf->getDrcEffectType(); + ALOGV("AAC decoder using MPEG-D DRC effect type %d", effectType); + ui_drc_val = (unsigned int)effectType; + err_code = ixheaacd_dec_api(mXheaacCodecHandle, IA_API_CMD_SET_CONFIG_PARAM, + IA_ENHAACPLUS_DEC_DRC_EFFECT_TYPE, &ui_drc_val); + + RETURN_IF_FATAL(err_code, "IA_ENHAACPLUS_DEC_DRC_EFFECT_TYPE"); + + return IA_NO_ERROR; +} + +IA_ERRORCODE C2SoftXaacDec::deInitXAACDecoder() { + ALOGV("deInitXAACDecoder"); + + /* Error code */ + IA_ERRORCODE err_code = IA_NO_ERROR; + + if (mXheaacCodecHandle) { + /* Tell that the input is over in this buffer */ + err_code = ixheaacd_dec_api(mXheaacCodecHandle, + IA_API_CMD_INPUT_OVER, + 0, + nullptr); + } + + /* Irrespective of error returned in IA_API_CMD_INPUT_OVER, free allocated memory */ + for (void* buf : mMemoryVec) { + if (buf) free(buf); + } + mMemoryVec.clear(); + mXheaacCodecHandle = nullptr; + + return err_code; +} + +IA_ERRORCODE C2SoftXaacDec::deInitMPEGDDDrc() { + ALOGV("deInitMPEGDDDrc"); + + for (void* buf : mDrcMemoryVec) { + if (buf) free(buf); + } + mDrcMemoryVec.clear(); + return IA_NO_ERROR; +} + +IA_ERRORCODE C2SoftXaacDec::configXAACDecoder(uint8_t* inBuffer, uint32_t inBufferLength) { + if (mInputBufferSize < inBufferLength) { + ALOGE("Cannot config AAC, input buffer size %d < inBufferLength %d", mInputBufferSize, inBufferLength); + return false; + } + /* Copy the buffer passed by Android plugin to codec input buffer */ + memcpy(mInputBuffer, inBuffer, inBufferLength); + + /* Set number of bytes to be processed */ + IA_ERRORCODE err_code = ixheaacd_dec_api(mXheaacCodecHandle, + IA_API_CMD_SET_INPUT_BYTES, + 0, + &inBufferLength); + RETURN_IF_FATAL(err_code, "IA_API_CMD_SET_INPUT_BYTES"); + + if (mIsCodecConfigFlushRequired) { + /* If codec is already initialized, then GA header is passed again */ + /* Need to call the Flush API instead of INIT_PROCESS */ + mIsCodecInitialized = false; /* Codec needs to be Reinitialized after flush */ + err_code = ixheaacd_dec_api(mXheaacCodecHandle, + IA_API_CMD_INIT, + IA_CMD_TYPE_GA_HDR, + nullptr); + RETURN_IF_FATAL(err_code, "IA_CMD_TYPE_GA_HDR"); + } else { + /* Initialize the process */ + err_code = ixheaacd_dec_api(mXheaacCodecHandle, + IA_API_CMD_INIT, + IA_CMD_TYPE_INIT_PROCESS, + nullptr); + RETURN_IF_FATAL(err_code, "IA_CMD_TYPE_INIT_PROCESS"); + } + + uint32_t ui_init_done; + /* Checking for end of initialization */ + err_code = ixheaacd_dec_api(mXheaacCodecHandle, + IA_API_CMD_INIT, + IA_CMD_TYPE_INIT_DONE_QUERY, + &ui_init_done); + RETURN_IF_FATAL(err_code, "IA_CMD_TYPE_INIT_DONE_QUERY"); + + /* How much buffer is used in input buffers */ + int32_t i_bytes_consumed; + err_code = ixheaacd_dec_api(mXheaacCodecHandle, + IA_API_CMD_GET_CURIDX_INPUT_BUF, + 0, + &i_bytes_consumed); + RETURN_IF_FATAL(err_code, "IA_API_CMD_GET_CURIDX_INPUT_BUF"); + + if (ui_init_done) { + err_code = getXAACStreamInfo(); + RETURN_IF_FATAL(err_code, "getXAACStreamInfo"); + ALOGI("Found Codec with below config---\nsampFreq %d\nnumChannels %d\npcmWdSz %d\nchannelMask %d\noutputFrameLength %d", + mSampFreq, mNumChannels, mPcmWdSz, mChannelMask, mOutputFrameLength); + mIsCodecInitialized = true; + + err_code = configMPEGDDrc(); + RETURN_IF_FATAL(err_code, "configMPEGDDrc"); + } + + return IA_NO_ERROR; +} +IA_ERRORCODE C2SoftXaacDec::initMPEGDDDrc() { + IA_ERRORCODE err_code = IA_NO_ERROR; + + for (int i = 0; i < (WORD32)2; i++) { + WORD32 ui_size, ui_alignment, ui_type; + pVOID pv_alloc_ptr; + + /* Get memory size */ + err_code = ia_drc_dec_api(mMpegDDrcHandle, IA_API_CMD_GET_MEM_INFO_SIZE, i, &ui_size); + + RETURN_IF_FATAL(err_code, "IA_API_CMD_GET_MEM_INFO_SIZE"); + + /* Get memory alignment */ + err_code = + ia_drc_dec_api(mMpegDDrcHandle, IA_API_CMD_GET_MEM_INFO_ALIGNMENT, i, &ui_alignment); + + RETURN_IF_FATAL(err_code, "IA_API_CMD_GET_MEM_INFO_ALIGNMENT"); + + /* Get memory type */ + err_code = ia_drc_dec_api(mMpegDDrcHandle, IA_API_CMD_GET_MEM_INFO_TYPE, i, &ui_type); + RETURN_IF_FATAL(err_code, "IA_API_CMD_GET_MEM_INFO_TYPE"); + + pv_alloc_ptr = memalign(4, ui_size); + if (pv_alloc_ptr == nullptr) { + ALOGE(" Cannot create requested memory %d", ui_size); + return IA_FATAL_ERROR; + } + mDrcMemoryVec.push(pv_alloc_ptr); + + /* Set the buffer pointer */ + err_code = ia_drc_dec_api(mMpegDDrcHandle, IA_API_CMD_SET_MEM_PTR, i, pv_alloc_ptr); + + RETURN_IF_FATAL(err_code, "IA_API_CMD_SET_MEM_PTR"); + } + + WORD32 ui_size; + ui_size = 8192 * 2; + + mDrcInBuf = (int8_t*)memalign(4, ui_size); + if (mDrcInBuf == nullptr) { + ALOGE(" Cannot create requested memory %d", ui_size); + return IA_FATAL_ERROR; + } + mDrcMemoryVec.push(mDrcInBuf); + + err_code = ia_drc_dec_api(mMpegDDrcHandle, IA_API_CMD_SET_MEM_PTR, 2, mDrcInBuf); + RETURN_IF_FATAL(err_code, "IA_API_CMD_SET_MEM_PTR"); + + mDrcOutBuf = (int8_t*)memalign(4, ui_size); + if (mDrcOutBuf == nullptr) { + ALOGE(" Cannot create requested memory %d", ui_size); + return IA_FATAL_ERROR; + } + mDrcMemoryVec.push(mDrcOutBuf); + + err_code = ia_drc_dec_api(mMpegDDrcHandle, IA_API_CMD_SET_MEM_PTR, 3, mDrcOutBuf); + RETURN_IF_FATAL(err_code, "IA_API_CMD_SET_MEM_PTR"); + + return IA_NO_ERROR; +} +int C2SoftXaacDec::configMPEGDDrc() { + IA_ERRORCODE err_code = IA_NO_ERROR; + int i_effect_type; + int i_loud_norm; + int i_target_loudness; + unsigned int i_sbr_mode; + + /* Sampling Frequency */ + err_code = ia_drc_dec_api(mMpegDDrcHandle, IA_API_CMD_SET_CONFIG_PARAM, + IA_DRC_DEC_CONFIG_PARAM_SAMP_FREQ, &mSampFreq); + RETURN_IF_FATAL(err_code, "IA_DRC_DEC_CONFIG_PARAM_SAMP_FREQ"); + /* Total Number of Channels */ + err_code = ia_drc_dec_api(mMpegDDrcHandle, IA_API_CMD_SET_CONFIG_PARAM, + IA_DRC_DEC_CONFIG_PARAM_NUM_CHANNELS, &mNumChannels); + RETURN_IF_FATAL(err_code, "IA_DRC_DEC_CONFIG_PARAM_NUM_CHANNELS"); + + /* PCM word size */ + err_code = ia_drc_dec_api(mMpegDDrcHandle, IA_API_CMD_SET_CONFIG_PARAM, + IA_DRC_DEC_CONFIG_PARAM_PCM_WDSZ, &mPcmWdSz); + RETURN_IF_FATAL(err_code, "IA_DRC_DEC_CONFIG_PARAM_PCM_WDSZ"); + + /*Set Effect Type*/ + + err_code = ixheaacd_dec_api(mXheaacCodecHandle, IA_API_CMD_GET_CONFIG_PARAM, + IA_ENHAACPLUS_DEC_CONFIG_PARAM_DRC_EFFECT_TYPE, &i_effect_type); + RETURN_IF_FATAL(err_code, "IA_ENHAACPLUS_DEC_CONFIG_PARAM_DRC_EFFECT_TYPE"); + + err_code = ia_drc_dec_api(mMpegDDrcHandle, IA_API_CMD_SET_CONFIG_PARAM, + IA_DRC_DEC_CONFIG_DRC_EFFECT_TYPE, &i_effect_type); + RETURN_IF_FATAL(err_code, "IA_DRC_DEC_CONFIG_DRC_EFFECT_TYPE"); + + /*Set target loudness */ + err_code = ixheaacd_dec_api(mXheaacCodecHandle, IA_API_CMD_GET_CONFIG_PARAM, + IA_ENHAACPLUS_DEC_CONFIG_PARAM_DRC_TARGET_LOUDNESS, + &i_target_loudness); + RETURN_IF_FATAL(err_code, "IA_ENHAACPLUS_DEC_CONFIG_PARAM_DRC_TARGET_LOUDNESS"); + + err_code = ia_drc_dec_api(mMpegDDrcHandle, IA_API_CMD_SET_CONFIG_PARAM, + IA_DRC_DEC_CONFIG_DRC_TARGET_LOUDNESS, &i_target_loudness); + RETURN_IF_FATAL(err_code, "IA_DRC_DEC_CONFIG_DRC_TARGET_LOUDNESS"); + + /*Set loud_norm_flag*/ + err_code = ixheaacd_dec_api(mXheaacCodecHandle, IA_API_CMD_GET_CONFIG_PARAM, + IA_ENHAACPLUS_DEC_CONFIG_PARAM_DRC_LOUD_NORM, &i_loud_norm); + RETURN_IF_FATAL(err_code, "IA_ENHAACPLUS_DEC_CONFIG_PARAM_DRC_LOUD_NORM"); + + err_code = ia_drc_dec_api(mMpegDDrcHandle, IA_API_CMD_SET_CONFIG_PARAM, + IA_DRC_DEC_CONFIG_DRC_LOUD_NORM, &i_loud_norm); + RETURN_IF_FATAL(err_code, "IA_DRC_DEC_CONFIG_DRC_LOUD_NORM"); + + err_code = ixheaacd_dec_api(mXheaacCodecHandle, IA_API_CMD_GET_CONFIG_PARAM, + IA_ENHAACPLUS_DEC_CONFIG_PARAM_SBR_MODE, &i_sbr_mode); + RETURN_IF_FATAL(err_code, "IA_ENHAACPLUS_DEC_CONFIG_PARAM_SBR_MODE"); + + err_code = ia_drc_dec_api(mMpegDDrcHandle, IA_API_CMD_INIT, + IA_CMD_TYPE_INIT_API_POST_CONFIG_PARAMS, nullptr); + + RETURN_IF_FATAL(err_code, "IA_CMD_TYPE_INIT_API_POST_CONFIG_PARAMS"); + + /* Free any memory that is allocated for MPEG D Drc so far */ + deInitMPEGDDDrc(); + + err_code = initMPEGDDDrc(); + if (err_code != IA_NO_ERROR) { + ALOGE("initMPEGDDDrc failed with error %d", err_code); + deInitMPEGDDDrc(); + return err_code; + } + + /* DRC buffers + buf[0] - contains extension element pay load loudness related + buf[1] - contains extension element pay load*/ + { + VOID* p_array[2][16]; + WORD32 ii; + WORD32 buf_sizes[2][16]; + WORD32 num_elements; + WORD32 num_config_ext; + WORD32 bit_str_fmt = 1; + + WORD32 uo_num_chan; + + memset(buf_sizes, 0, 32 * sizeof(WORD32)); + + err_code = + ixheaacd_dec_api(mXheaacCodecHandle, IA_API_CMD_GET_CONFIG_PARAM, + IA_ENHAACPLUS_DEC_CONFIG_EXT_ELE_BUF_SIZES, &buf_sizes[0][0]); + RETURN_IF_FATAL(err_code, "IA_ENHAACPLUS_DEC_CONFIG_EXT_ELE_BUF_SIZES"); + + err_code = ixheaacd_dec_api(mXheaacCodecHandle, IA_API_CMD_GET_CONFIG_PARAM, + IA_ENHAACPLUS_DEC_CONFIG_EXT_ELE_PTR, &p_array); + RETURN_IF_FATAL(err_code, "IA_ENHAACPLUS_DEC_CONFIG_EXT_ELE_PTR"); + + err_code = + ia_drc_dec_api(mMpegDDrcHandle, IA_API_CMD_INIT, IA_CMD_TYPE_INIT_SET_BUFF_PTR, nullptr); + RETURN_IF_FATAL(err_code, "IA_CMD_TYPE_INIT_SET_BUFF_PTR"); + + err_code = ixheaacd_dec_api(mXheaacCodecHandle, IA_API_CMD_GET_CONFIG_PARAM, + IA_ENHAACPLUS_DEC_CONFIG_NUM_ELE, &num_elements); + RETURN_IF_FATAL(err_code, "IA_ENHAACPLUS_DEC_CONFIG_NUM_ELE"); + + err_code = ixheaacd_dec_api(mXheaacCodecHandle, IA_API_CMD_GET_CONFIG_PARAM, + IA_ENHAACPLUS_DEC_CONFIG_NUM_CONFIG_EXT, &num_config_ext); + RETURN_IF_FATAL(err_code, "IA_ENHAACPLUS_DEC_CONFIG_NUM_CONFIG_EXT"); + + for (ii = 0; ii < num_config_ext; ii++) { + /*copy loudness bitstream*/ + if (buf_sizes[0][ii] > 0) { + memcpy(mDrcInBuf, p_array[0][ii], buf_sizes[0][ii]); + + /*Set bitstream_split_format */ + err_code = ia_drc_dec_api(mMpegDDrcHandle, IA_API_CMD_SET_CONFIG_PARAM, + IA_DRC_DEC_CONFIG_PARAM_BITS_FORMAT, &bit_str_fmt); + RETURN_IF_FATAL(err_code, "IA_DRC_DEC_CONFIG_PARAM_BITS_FORMAT"); + + /* Set number of bytes to be processed */ + err_code = ia_drc_dec_api(mMpegDDrcHandle, IA_API_CMD_SET_INPUT_BYTES_IL_BS, 0, + &buf_sizes[0][ii]); + RETURN_IF_FATAL(err_code, "IA_API_CMD_SET_INPUT_BYTES_IL_BS"); + + /* Execute process */ + err_code = ia_drc_dec_api(mMpegDDrcHandle, IA_API_CMD_INIT, + IA_CMD_TYPE_INIT_CPY_IL_BSF_BUFF, nullptr); + RETURN_IF_FATAL(err_code, "IA_CMD_TYPE_INIT_CPY_IL_BSF_BUFF"); + + mDRCFlag = 1; + } + } + + for (ii = 0; ii < num_elements; ii++) { + /*copy config bitstream*/ + if (buf_sizes[1][ii] > 0) { + memcpy(mDrcInBuf, p_array[1][ii], buf_sizes[1][ii]); + /* Set number of bytes to be processed */ + + /*Set bitstream_split_format */ + err_code = ia_drc_dec_api(mMpegDDrcHandle, IA_API_CMD_SET_CONFIG_PARAM, + IA_DRC_DEC_CONFIG_PARAM_BITS_FORMAT, &bit_str_fmt); + RETURN_IF_FATAL(err_code, "IA_DRC_DEC_CONFIG_PARAM_BITS_FORMAT"); + + err_code = ia_drc_dec_api(mMpegDDrcHandle, IA_API_CMD_SET_INPUT_BYTES_IC_BS, 0, + &buf_sizes[1][ii]); + RETURN_IF_FATAL(err_code, "IA_API_CMD_SET_INPUT_BYTES_IC_BS"); + + /* Execute process */ + err_code = ia_drc_dec_api(mMpegDDrcHandle, IA_API_CMD_INIT, + IA_CMD_TYPE_INIT_CPY_IC_BSF_BUFF, nullptr); + + RETURN_IF_FATAL(err_code, "IA_CMD_TYPE_INIT_CPY_IC_BSF_BUFF"); + + mDRCFlag = 1; + } + } + + if (mDRCFlag == 1) { + mMpegDDRCPresent = 1; + } else { + mMpegDDRCPresent = 0; + } + + /*Read interface buffer config file bitstream*/ + if (mMpegDDRCPresent == 1) { + WORD32 interface_is_present = 1; + + if (i_sbr_mode != 0) { + if (i_sbr_mode == 1) { + mOutputFrameLength = 2048; + } else if (i_sbr_mode == 3) { + mOutputFrameLength = 4096; + } else { + mOutputFrameLength = 1024; + } + } else { + mOutputFrameLength = 4096; + } + + err_code = ia_drc_dec_api(mMpegDDrcHandle, IA_API_CMD_SET_CONFIG_PARAM, + IA_DRC_DEC_CONFIG_PARAM_FRAME_SIZE, (WORD32 *)&mOutputFrameLength); + RETURN_IF_FATAL(err_code, "IA_DRC_DEC_CONFIG_PARAM_FRAME_SIZE"); + + err_code = + ia_drc_dec_api(mMpegDDrcHandle, IA_API_CMD_SET_CONFIG_PARAM, + IA_DRC_DEC_CONFIG_PARAM_INT_PRESENT, &interface_is_present); + RETURN_IF_FATAL(err_code, "IA_DRC_DEC_CONFIG_PARAM_INT_PRESENT"); + + /* Execute process */ + err_code = ia_drc_dec_api(mMpegDDrcHandle, IA_API_CMD_INIT, + IA_CMD_TYPE_INIT_CPY_IN_BSF_BUFF, nullptr); + RETURN_IF_FATAL(err_code, "IA_CMD_TYPE_INIT_CPY_IN_BSF_BUFF"); + + err_code = ia_drc_dec_api(mMpegDDrcHandle, IA_API_CMD_INIT, + IA_CMD_TYPE_INIT_PROCESS, nullptr); + RETURN_IF_FATAL(err_code, "IA_CMD_TYPE_INIT_PROCESS"); + + err_code = ia_drc_dec_api(mMpegDDrcHandle, IA_API_CMD_GET_CONFIG_PARAM, + IA_DRC_DEC_CONFIG_PARAM_NUM_CHANNELS, &uo_num_chan); + RETURN_IF_FATAL(err_code, "IA_DRC_DEC_CONFIG_PARAM_NUM_CHANNELS"); + } + } + + return err_code; +} + +IA_ERRORCODE C2SoftXaacDec::decodeXAACStream(uint8_t* inBuffer, + uint32_t inBufferLength, + int32_t* bytesConsumed, + int32_t* outBytes) { + if (mInputBufferSize < inBufferLength) { + ALOGE("Cannot config AAC, input buffer size %d < inBufferLength %d", mInputBufferSize, inBufferLength); + return -1; + } + /* Copy the buffer passed by Android plugin to codec input buffer */ + memcpy(mInputBuffer, inBuffer, inBufferLength); + + /* Set number of bytes to be processed */ + IA_ERRORCODE err_code = ixheaacd_dec_api(mXheaacCodecHandle, + IA_API_CMD_SET_INPUT_BYTES, + 0, + &inBufferLength); + RETURN_IF_FATAL(err_code, "IA_API_CMD_SET_INPUT_BYTES"); + + /* Execute process */ + err_code = ixheaacd_dec_api(mXheaacCodecHandle, + IA_API_CMD_EXECUTE, + IA_CMD_TYPE_DO_EXECUTE, + nullptr); + RETURN_IF_FATAL(err_code, "IA_CMD_TYPE_DO_EXECUTE"); + + /* Checking for end of processing */ + uint32_t ui_exec_done; + err_code = ixheaacd_dec_api(mXheaacCodecHandle, + IA_API_CMD_EXECUTE, + IA_CMD_TYPE_DONE_QUERY, + &ui_exec_done); + RETURN_IF_FATAL(err_code, "IA_CMD_TYPE_DONE_QUERY"); + + if (ui_exec_done != 1) { + VOID* p_array; // ITTIAM:buffer to handle gain payload + WORD32 buf_size = 0; // ITTIAM:gain payload length + WORD32 bit_str_fmt = 1; + WORD32 gain_stream_flag = 1; + + err_code = ixheaacd_dec_api(mXheaacCodecHandle, IA_API_CMD_GET_CONFIG_PARAM, + IA_ENHAACPLUS_DEC_CONFIG_GAIN_PAYLOAD_LEN, &buf_size); + RETURN_IF_FATAL(err_code, "IA_ENHAACPLUS_DEC_CONFIG_GAIN_PAYLOAD_LEN"); + + err_code = ixheaacd_dec_api(mXheaacCodecHandle, IA_API_CMD_GET_CONFIG_PARAM, + IA_ENHAACPLUS_DEC_CONFIG_GAIN_PAYLOAD_BUF, &p_array); + RETURN_IF_FATAL(err_code, "IA_ENHAACPLUS_DEC_CONFIG_GAIN_PAYLOAD_BUF"); + + if (buf_size > 0) { + /*Set bitstream_split_format */ + err_code = ia_drc_dec_api(mMpegDDrcHandle, IA_API_CMD_SET_CONFIG_PARAM, + IA_DRC_DEC_CONFIG_PARAM_BITS_FORMAT, &bit_str_fmt); + RETURN_IF_FATAL(err_code, "IA_DRC_DEC_CONFIG_PARAM_BITS_FORMAT"); + + memcpy(mDrcInBuf, p_array, buf_size); + /* Set number of bytes to be processed */ + err_code = + ia_drc_dec_api(mMpegDDrcHandle, IA_API_CMD_SET_INPUT_BYTES_BS, 0, &buf_size); + RETURN_IF_FATAL(err_code, "IA_DRC_DEC_CONFIG_PARAM_BITS_FORMAT"); + + err_code = ia_drc_dec_api(mMpegDDrcHandle, IA_API_CMD_SET_CONFIG_PARAM, + IA_DRC_DEC_CONFIG_GAIN_STREAM_FLAG, &gain_stream_flag); + RETURN_IF_FATAL(err_code, "IA_DRC_DEC_CONFIG_PARAM_BITS_FORMAT"); + + /* Execute process */ + err_code = ia_drc_dec_api(mMpegDDrcHandle, IA_API_CMD_INIT, + IA_CMD_TYPE_INIT_CPY_BSF_BUFF, nullptr); + RETURN_IF_FATAL(err_code, "IA_DRC_DEC_CONFIG_PARAM_BITS_FORMAT"); + + mMpegDDRCPresent = 1; + } + } + + /* How much buffer is used in input buffers */ + err_code = ixheaacd_dec_api(mXheaacCodecHandle, + IA_API_CMD_GET_CURIDX_INPUT_BUF, + 0, + bytesConsumed); + RETURN_IF_FATAL(err_code, "IA_API_CMD_GET_CURIDX_INPUT_BUF"); + + /* Get the output bytes */ + err_code = ixheaacd_dec_api(mXheaacCodecHandle, + IA_API_CMD_GET_OUTPUT_BYTES, + 0, + outBytes); + RETURN_IF_FATAL(err_code, "IA_API_CMD_GET_OUTPUT_BYTES"); + + if (mMpegDDRCPresent == 1) { + memcpy(mDrcInBuf, mOutputBuffer, *outBytes); + err_code = ia_drc_dec_api(mMpegDDrcHandle, IA_API_CMD_SET_INPUT_BYTES, 0, outBytes); + RETURN_IF_FATAL(err_code, "IA_API_CMD_SET_INPUT_BYTES"); + + err_code = + ia_drc_dec_api(mMpegDDrcHandle, IA_API_CMD_EXECUTE, IA_CMD_TYPE_DO_EXECUTE, nullptr); + RETURN_IF_FATAL(err_code, "IA_CMD_TYPE_DO_EXECUTE"); + + memcpy(mOutputBuffer, mDrcOutBuf, *outBytes); + } + return IA_NO_ERROR; +} + +IA_ERRORCODE C2SoftXaacDec::getXAACStreamInfo() { + IA_ERRORCODE err_code = IA_NO_ERROR; + + /* Sampling frequency */ + err_code = ixheaacd_dec_api(mXheaacCodecHandle, + IA_API_CMD_GET_CONFIG_PARAM, + IA_ENHAACPLUS_DEC_CONFIG_PARAM_SAMP_FREQ, + &mSampFreq); + RETURN_IF_FATAL(err_code, "IA_ENHAACPLUS_DEC_CONFIG_PARAM_SAMP_FREQ"); + + /* Total Number of Channels */ + err_code = ixheaacd_dec_api(mXheaacCodecHandle, + IA_API_CMD_GET_CONFIG_PARAM, + IA_ENHAACPLUS_DEC_CONFIG_PARAM_NUM_CHANNELS, + &mNumChannels); + RETURN_IF_FATAL(err_code, "IA_ENHAACPLUS_DEC_CONFIG_PARAM_NUM_CHANNELS"); + if (mNumChannels > MAX_CHANNEL_COUNT) { + ALOGE(" No of channels are more than max channels\n"); + return IA_FATAL_ERROR; + } + + /* PCM word size */ + err_code = ixheaacd_dec_api(mXheaacCodecHandle, + IA_API_CMD_GET_CONFIG_PARAM, + IA_ENHAACPLUS_DEC_CONFIG_PARAM_PCM_WDSZ, + &mPcmWdSz); + RETURN_IF_FATAL(err_code, "IA_ENHAACPLUS_DEC_CONFIG_PARAM_PCM_WDSZ"); + if ((mPcmWdSz / 8) != 2) { + ALOGE(" No of channels are more than max channels\n"); + return IA_FATAL_ERROR; + } + + /* channel mask to tell the arrangement of channels in bit stream */ + err_code = ixheaacd_dec_api(mXheaacCodecHandle, + IA_API_CMD_GET_CONFIG_PARAM, + IA_ENHAACPLUS_DEC_CONFIG_PARAM_CHANNEL_MASK, + &mChannelMask); + RETURN_IF_FATAL(err_code, "IA_ENHAACPLUS_DEC_CONFIG_PARAM_CHANNEL_MASK"); + + /* Channel mode to tell MONO/STEREO/DUAL-MONO/NONE_OF_THESE */ + uint32_t ui_channel_mode; + err_code = ixheaacd_dec_api(mXheaacCodecHandle, + IA_API_CMD_GET_CONFIG_PARAM, + IA_ENHAACPLUS_DEC_CONFIG_PARAM_CHANNEL_MODE, + &ui_channel_mode); + RETURN_IF_FATAL(err_code, "IA_ENHAACPLUS_DEC_CONFIG_PARAM_CHANNEL_MODE"); + if (ui_channel_mode == 0) + ALOGV("Channel Mode: MONO_OR_PS\n"); + else if (ui_channel_mode == 1) + ALOGV("Channel Mode: STEREO\n"); + else if (ui_channel_mode == 2) + ALOGV("Channel Mode: DUAL-MONO\n"); + else + ALOGV("Channel Mode: NONE_OF_THESE or MULTICHANNEL\n"); + + /* Channel mode to tell SBR PRESENT/NOT_PRESENT */ + uint32_t ui_sbr_mode; + err_code = ixheaacd_dec_api(mXheaacCodecHandle, + IA_API_CMD_GET_CONFIG_PARAM, + IA_ENHAACPLUS_DEC_CONFIG_PARAM_SBR_MODE, + &ui_sbr_mode); + RETURN_IF_FATAL(err_code, "IA_ENHAACPLUS_DEC_CONFIG_PARAM_SBR_MODE"); + if (ui_sbr_mode == 0) + ALOGV("SBR Mode: NOT_PRESENT\n"); + else if (ui_sbr_mode == 1) + ALOGV("SBR Mode: PRESENT\n"); + else + ALOGV("SBR Mode: ILLEGAL\n"); + + /* mOutputFrameLength = 1024 * (1 + SBR_MODE) for AAC */ + /* For USAC it could be 1024 * 3 , support to query */ + /* not yet added in codec */ + mOutputFrameLength = 1024 * (1 + ui_sbr_mode); + ALOGI("mOutputFrameLength %d ui_sbr_mode %d", mOutputFrameLength, ui_sbr_mode); + + return IA_NO_ERROR; +} + +IA_ERRORCODE C2SoftXaacDec::setXAACDRCInfo(int32_t drcCut, int32_t drcBoost, + int32_t drcRefLevel, + int32_t drcHeavyCompression, + int32_t drEffectType) { + IA_ERRORCODE err_code = IA_NO_ERROR; + + int32_t ui_drc_enable = 1; + err_code = ixheaacd_dec_api(mXheaacCodecHandle, IA_API_CMD_SET_CONFIG_PARAM, + IA_ENHAACPLUS_DEC_CONFIG_PARAM_DRC_ENABLE, + &ui_drc_enable); + RETURN_IF_FATAL(err_code, "IA_ENHAACPLUS_DEC_CONFIG_PARAM_DRC_ENABLE"); + if (drcCut != -1) { + err_code = + ixheaacd_dec_api(mXheaacCodecHandle, IA_API_CMD_SET_CONFIG_PARAM, + IA_ENHAACPLUS_DEC_CONFIG_PARAM_DRC_CUT, &drcCut); + RETURN_IF_FATAL(err_code, "IA_ENHAACPLUS_DEC_CONFIG_PARAM_DRC_CUT"); + } + + if (drcBoost != -1) { + err_code = ixheaacd_dec_api( + mXheaacCodecHandle, IA_API_CMD_SET_CONFIG_PARAM, + IA_ENHAACPLUS_DEC_CONFIG_PARAM_DRC_BOOST, &drcBoost); + RETURN_IF_FATAL(err_code, "IA_ENHAACPLUS_DEC_CONFIG_PARAM_DRC_BOOST"); + } + + if (drcRefLevel != -1) { + err_code = ixheaacd_dec_api( + mXheaacCodecHandle, IA_API_CMD_SET_CONFIG_PARAM, + IA_ENHAACPLUS_DEC_CONFIG_PARAM_DRC_TARGET_LEVEL, &drcRefLevel); + RETURN_IF_FATAL(err_code, + "IA_ENHAACPLUS_DEC_CONFIG_PARAM_DRC_TARGET_LEVEL"); + } + + if (drcRefLevel != -1) { + err_code = ixheaacd_dec_api( + mXheaacCodecHandle, IA_API_CMD_SET_CONFIG_PARAM, + IA_ENHAACPLUS_DEC_DRC_TARGET_LOUDNESS, &drcRefLevel); + RETURN_IF_FATAL(err_code, "IA_ENHAACPLUS_DEC_DRC_TARGET_LOUDNESS"); + } + + if (drcHeavyCompression != -1) { + err_code = + ixheaacd_dec_api(mXheaacCodecHandle, IA_API_CMD_SET_CONFIG_PARAM, + IA_ENHAACPLUS_DEC_CONFIG_PARAM_DRC_HEAVY_COMP, + &drcHeavyCompression); + RETURN_IF_FATAL(err_code, + "IA_ENHAACPLUS_DEC_CONFIG_PARAM_DRC_HEAVY_COMP"); + } + + err_code = + ixheaacd_dec_api(mXheaacCodecHandle, IA_API_CMD_SET_CONFIG_PARAM, + IA_ENHAACPLUS_DEC_DRC_EFFECT_TYPE, &drEffectType); + RETURN_IF_FATAL(err_code, "IA_ENHAACPLUS_DEC_DRC_EFFECT_TYPE"); + + int32_t i_effect_type, i_target_loudness, i_loud_norm; + /*Set Effect Type*/ + err_code = ixheaacd_dec_api(mXheaacCodecHandle, IA_API_CMD_GET_CONFIG_PARAM, + IA_ENHAACPLUS_DEC_CONFIG_PARAM_DRC_EFFECT_TYPE, + &i_effect_type); + RETURN_IF_FATAL(err_code, "IA_ENHAACPLUS_DEC_CONFIG_PARAM_DRC_EFFECT_TYPE"); + + err_code = + ia_drc_dec_api(mMpegDDrcHandle, IA_API_CMD_SET_CONFIG_PARAM, + IA_DRC_DEC_CONFIG_DRC_EFFECT_TYPE, &i_effect_type); + + RETURN_IF_FATAL(err_code, "IA_DRC_DEC_CONFIG_DRC_EFFECT_TYPE"); + + /*Set target loudness */ + err_code = ixheaacd_dec_api( + mXheaacCodecHandle, IA_API_CMD_GET_CONFIG_PARAM, + IA_ENHAACPLUS_DEC_CONFIG_PARAM_DRC_TARGET_LOUDNESS, &i_target_loudness); + RETURN_IF_FATAL(err_code, + "IA_ENHAACPLUS_DEC_CONFIG_PARAM_DRC_TARGET_LOUDNESS"); + + err_code = ia_drc_dec_api(mMpegDDrcHandle, IA_API_CMD_SET_CONFIG_PARAM, + IA_DRC_DEC_CONFIG_DRC_TARGET_LOUDNESS, + &i_target_loudness); + RETURN_IF_FATAL(err_code, "IA_DRC_DEC_CONFIG_DRC_TARGET_LOUDNESS"); + + /*Set loud_norm_flag*/ + err_code = ixheaacd_dec_api(mXheaacCodecHandle, IA_API_CMD_GET_CONFIG_PARAM, + IA_ENHAACPLUS_DEC_CONFIG_PARAM_DRC_LOUD_NORM, + &i_loud_norm); + RETURN_IF_FATAL(err_code, "IA_ENHAACPLUS_DEC_CONFIG_PARAM_DRC_LOUD_NORM"); + + err_code = ia_drc_dec_api(mMpegDDrcHandle, IA_API_CMD_SET_CONFIG_PARAM, + IA_DRC_DEC_CONFIG_DRC_LOUD_NORM, &i_loud_norm); + + RETURN_IF_FATAL(err_code, "IA_DRC_DEC_CONFIG_DRC_LOUD_NORM"); + + return IA_NO_ERROR; +} + +class C2SoftXaacDecFactory : public C2ComponentFactory { +public: + C2SoftXaacDecFactory() : mHelper(std::static_pointer_cast( + GetCodec2PlatformComponentStore()->getParamReflector())) { + } + + virtual c2_status_t createComponent( + c2_node_id_t id, + std::shared_ptr* const component, + std::function deleter) override { + *component = std::shared_ptr( + new C2SoftXaacDec(COMPONENT_NAME, + id, + std::make_shared(mHelper)), + deleter); + return C2_OK; + } + + virtual c2_status_t createInterface( + c2_node_id_t id, + std::shared_ptr* const interface, + std::function deleter) override { + *interface = std::shared_ptr( + new SimpleInterface( + COMPONENT_NAME, id, std::make_shared(mHelper)), + deleter); + return C2_OK; + } + + virtual ~C2SoftXaacDecFactory() override = default; + +private: + std::shared_ptr mHelper; +}; + +} // namespace android + +extern "C" ::C2ComponentFactory* CreateCodec2Factory() { + ALOGV("in %s", __func__); + return new ::android::C2SoftXaacDecFactory(); +} + +extern "C" void DestroyCodec2Factory(::C2ComponentFactory* factory) { + ALOGV("in %s", __func__); + delete factory; +} diff --git a/media/codec2/components/xaac/C2SoftXaacDec.h b/media/codec2/components/xaac/C2SoftXaacDec.h new file mode 100644 index 0000000000000000000000000000000000000000..5c8567fdc753bfc83dd21c2e0e8d2d05824a24e0 --- /dev/null +++ b/media/codec2/components/xaac/C2SoftXaacDec.h @@ -0,0 +1,131 @@ +/* + * Copyright (C) 2018 The Android Open Source Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#ifndef ANDROID_C2_SOFT_XAAC_DEC_H_ +#define ANDROID_C2_SOFT_XAAC_DEC_H_ +#include +#include + +#include "ixheaacd_type_def.h" +#include "ixheaacd_error_standards.h" +#include "ixheaacd_error_handler.h" +#include "ixheaacd_apicmd_standards.h" +#include "ixheaacd_memory_standards.h" +#include "ixheaacd_aac_config.h" + +#include "impd_apicmd_standards.h" +#include "impd_drc_config_params.h" + +#define MAX_CHANNEL_COUNT 8 /* maximum number of audio channels that can be decoded */ +#define MAX_NUM_BLOCKS 8 /* maximum number of audio blocks that can be decoded */ + +extern "C" IA_ERRORCODE ixheaacd_dec_api(pVOID p_ia_module_obj, + WORD32 i_cmd, WORD32 i_idx, pVOID pv_value); +extern "C" IA_ERRORCODE ia_drc_dec_api(pVOID p_ia_module_obj, + WORD32 i_cmd, WORD32 i_idx, pVOID pv_value); +extern "C" IA_ERRORCODE ixheaacd_get_config_param(pVOID p_ia_process_api_obj, + pWORD32 pi_samp_freq, + pWORD32 pi_num_chan, + pWORD32 pi_pcm_wd_sz, + pWORD32 pi_channel_mask); + +namespace android { + +struct C2SoftXaacDec : public SimpleC2Component { + class IntfImpl; + + C2SoftXaacDec(const char* name, c2_node_id_t id, + const std::shared_ptr& intfImpl); + virtual ~C2SoftXaacDec(); + + // From SimpleC2Component + c2_status_t onInit() override; + c2_status_t onStop() override; + void onReset() override; + void onRelease() override; + c2_status_t onFlush_sm() override; + void process( + const std::unique_ptr &work, + const std::shared_ptr &pool) override; + c2_status_t drain( + uint32_t drainMode, + const std::shared_ptr &pool) override; + +private: + enum { + kOutputDrainBufferSize = 2048 * MAX_CHANNEL_COUNT * MAX_NUM_BLOCKS, + }; + + std::shared_ptr mIntf; + void* mXheaacCodecHandle; + void* mMpegDDrcHandle; + uint32_t mInputBufferSize; + uint32_t mOutputFrameLength; + int8_t* mInputBuffer; + int8_t* mOutputBuffer; + int32_t mSampFreq; + int32_t mNumChannels; + int32_t mPcmWdSz; + int32_t mChannelMask; + int32_t mNumOutBytes; + uint64_t mCurFrameIndex; + uint64_t mCurTimestamp; + bool mIsCodecInitialized; + bool mIsCodecConfigFlushRequired; + int8_t* mDrcInBuf; + int8_t* mDrcOutBuf; + int32_t mMpegDDRCPresent; + int32_t mDRCFlag; + + Vector mMemoryVec; + Vector mDrcMemoryVec; + + size_t mInputBufferCount __unused; + size_t mOutputBufferCount __unused; + bool mSignalledOutputEos; + bool mSignalledError; + char* mOutputDrainBuffer; + uint32_t mOutputDrainBufferWritePos; + + IA_ERRORCODE initDecoder(); + IA_ERRORCODE setDrcParameter(); + IA_ERRORCODE configflushDecode(); + IA_ERRORCODE drainDecoder(); + void finishWork(const std::unique_ptr& work, + const std::shared_ptr& pool); + + IA_ERRORCODE initXAACDrc(); + IA_ERRORCODE initXAACDecoder(); + IA_ERRORCODE deInitXAACDecoder(); + IA_ERRORCODE initMPEGDDDrc(); + IA_ERRORCODE deInitMPEGDDDrc(); + IA_ERRORCODE configXAACDecoder(uint8_t* inBuffer, uint32_t inBufferLength); + int configMPEGDDrc(); + IA_ERRORCODE decodeXAACStream(uint8_t* inBuffer, + uint32_t inBufferLength, + int32_t* bytesConsumed, + int32_t* outBytes); + IA_ERRORCODE getXAACStreamInfo(); + IA_ERRORCODE setXAACDRCInfo(int32_t drcCut, int32_t drcBoost, + int32_t drcRefLevel, int32_t drcHeavyCompression, + int32_t drEffectType); + + C2_DO_NOT_COPY(C2SoftXaacDec); +}; + +} // namespace android + +#endif // C2_SOFT_XAAC_H_ diff --git a/media/codec2/docs/doxyfilter.sh b/media/codec2/docs/doxyfilter.sh new file mode 100755 index 0000000000000000000000000000000000000000..d813153a4d496dd1ee10e73c18b3366c15b816d0 --- /dev/null +++ b/media/codec2/docs/doxyfilter.sh @@ -0,0 +1,100 @@ +#!/usr/bin/env python3 +import re, sys + +global in_comment, current, indent, hold +in_comment, current, indent, hold = False, None, '', [] + +class ChangeCStyleCommentsToDoxy: + def dump_hold(): + global hold + for h in hold: + print(h, end='') + hold[:] = [] + + def doxy_hold(): + global current, hold + if current == '//': + for h in hold: + print(re.sub(r'^( *//(?!/))', r'\1/', h), end='') + else: + first = True + for h in hold: + if first: + h = re.sub(r'^( */[*](?![*]))', r'\1*', h) + first = False + print(h, end='') + hold[:] = [] + + def process_comment(t, ind, line): + global current, indent, hold + if t != current or ind not in (indent, indent + ' '): + dump_hold() + current, indent = t, ind + hold.append(line) + + def process_line(ind, line): + global current, indent + if ind in (indent, ''): + doxy_hold() + else: + dump_hold() + current, indent = None, None + print(line, end='') + + def process(self, input, path): + for line in input: + ind = re.match(r'^( *)', line).group(1) + if in_comment: + # TODO: this is not quite right, but good enough + m = re.match(r'^ *[*]/', line) + if m: + process_comment('/*', ind, line) + in_comment = False + else: + process_comment('/*', ind, line) + continue + m = re.match(r'^ *//', line) + if m: + # one-line comment + process_comment('//', ind, line) + continue + m = re.match(r'^ */[*]', line) + if m: + # multi-line comment + process_comment('/*', ind, line) + # TODO: this is not quite right, but good enough + in_comment = not re.match(r'^ *[*]/', line) + continue + process_line(ind, line) + +class AutoGroup: + def process(self, input, path): + if '/codec2/include/' in path: + group = 'API Codec2 API' + elif False: + return + elif '/codec2/vndk/' in path: + group = 'VNDK Platform provided glue' + elif '/codec2/tests/' in path: + group = 'Tests Unit tests' + else: + group = 'Random Misc. sandbox' + + print('#undef __APPLE__') + + for line in input: + if re.match(r'^namespace android {', line): + print(line, end='') + print() + print(r'/// \addtogroup {}'.format(group)) + print(r'/// @{') + continue + elif re.match(r'^} +// +namespace', line): + print(r'/// @}') + print() + print(line, end='') + +P = AutoGroup() +for path in sys.argv[1:]: + with open(path, 'rt') as input: + P.process(input, path) diff --git a/media/codec2/docs/doxygen.config b/media/codec2/docs/doxygen.config new file mode 100644 index 0000000000000000000000000000000000000000..5c3bea34bc7bac1df54e7283f0af202d7951e0b1 --- /dev/null +++ b/media/codec2/docs/doxygen.config @@ -0,0 +1,2446 @@ +# Doxyfile 1.8.11 + +# This file describes the settings to be used by the documentation system +# doxygen (www.doxygen.org) for a project. +# +# All text after a double hash (##) is considered a comment and is placed in +# front of the TAG it is preceding. +# +# All text after a single hash (#) is considered a comment and will be ignored. +# The format is: +# TAG = value [value, ...] +# For lists, items can also be appended using: +# TAG += value [value, ...] +# Values that contain spaces should be placed between quotes (\" \"). + +#--------------------------------------------------------------------------- +# Project related configuration options +#--------------------------------------------------------------------------- + +# This tag specifies the encoding used for all characters in the config file +# that follow. The default is UTF-8 which is also the encoding used for all text +# before the first occurrence of this tag. Doxygen uses libiconv (or the iconv +# built into libc) for the transcoding. See http://www.gnu.org/software/libiconv +# for the list of possible encodings. +# The default value is: UTF-8. + +DOXYFILE_ENCODING = UTF-8 + +# The PROJECT_NAME tag is a single word (or a sequence of words surrounded by +# double-quotes, unless you are using Doxywizard) that should identify the +# project for which the documentation is generated. This name is used in the +# title of most generated pages and in a few other places. +# The default value is: My Project. + +PROJECT_NAME = Codec2 + +# The PROJECT_NUMBER tag can be used to enter a project or revision number. This +# could be handy for archiving the generated documentation or if some version +# control system is used. + +PROJECT_NUMBER = + +# Using the PROJECT_BRIEF tag one can provide an optional one line description +# for a project that appears at the top of each page and should give viewer a +# quick idea about the purpose of the project. Keep the description short. + +PROJECT_BRIEF = + +# With the PROJECT_LOGO tag one can specify a logo or an icon that is included +# in the documentation. The maximum height of the logo should not exceed 55 +# pixels and the maximum width should not exceed 200 pixels. Doxygen will copy +# the logo to the output directory. + +PROJECT_LOGO = + +# The OUTPUT_DIRECTORY tag is used to specify the (relative or absolute) path +# into which the generated documentation will be written. If a relative path is +# entered, it will be relative to the location where doxygen was started. If +# left blank the current directory will be used. + +OUTPUT_DIRECTORY = out/target/common/docs/codec2/api + +# If the CREATE_SUBDIRS tag is set to YES then doxygen will create 4096 sub- +# directories (in 2 levels) under the output directory of each output format and +# will distribute the generated files over these directories. Enabling this +# option can be useful when feeding doxygen a huge amount of source files, where +# putting all generated files in the same directory would otherwise causes +# performance problems for the file system. +# The default value is: NO. + +CREATE_SUBDIRS = NO + +# If the ALLOW_UNICODE_NAMES tag is set to YES, doxygen will allow non-ASCII +# characters to appear in the names of generated files. If set to NO, non-ASCII +# characters will be escaped, for example _xE3_x81_x84 will be used for Unicode +# U+3044. +# The default value is: NO. + +ALLOW_UNICODE_NAMES = NO + +# The OUTPUT_LANGUAGE tag is used to specify the language in which all +# documentation generated by doxygen is written. Doxygen will use this +# information to generate all constant output in the proper language. +# Possible values are: Afrikaans, Arabic, Armenian, Brazilian, Catalan, Chinese, +# Chinese-Traditional, Croatian, Czech, Danish, Dutch, English (United States), +# Esperanto, Farsi (Persian), Finnish, French, German, Greek, Hungarian, +# Indonesian, Italian, Japanese, Japanese-en (Japanese with English messages), +# Korean, Korean-en (Korean with English messages), Latvian, Lithuanian, +# Macedonian, Norwegian, Persian (Farsi), Polish, Portuguese, Romanian, Russian, +# Serbian, Serbian-Cyrillic, Slovak, Slovene, Spanish, Swedish, Turkish, +# Ukrainian and Vietnamese. +# The default value is: English. + +OUTPUT_LANGUAGE = English + +# If the BRIEF_MEMBER_DESC tag is set to YES, doxygen will include brief member +# descriptions after the members that are listed in the file and class +# documentation (similar to Javadoc). Set to NO to disable this. +# The default value is: YES. + +BRIEF_MEMBER_DESC = YES + +# If the REPEAT_BRIEF tag is set to YES, doxygen will prepend the brief +# description of a member or function before the detailed description +# +# Note: If both HIDE_UNDOC_MEMBERS and BRIEF_MEMBER_DESC are set to NO, the +# brief descriptions will be completely suppressed. +# The default value is: YES. + +REPEAT_BRIEF = YES + +# This tag implements a quasi-intelligent brief description abbreviator that is +# used to form the text in various listings. Each string in this list, if found +# as the leading text of the brief description, will be stripped from the text +# and the result, after processing the whole list, is used as the annotated +# text. Otherwise, the brief description is used as-is. If left blank, the +# following values are used ($name is automatically replaced with the name of +# the entity):The $name class, The $name widget, The $name file, is, provides, +# specifies, contains, represents, a, an and the. + +ABBREVIATE_BRIEF = "The $name class" \ + "The $name widget" \ + "The $name file" \ + is \ + provides \ + specifies \ + contains \ + represents \ + a \ + an \ + the + +# If the ALWAYS_DETAILED_SEC and REPEAT_BRIEF tags are both set to YES then +# doxygen will generate a detailed section even if there is only a brief +# description. +# The default value is: NO. + +ALWAYS_DETAILED_SEC = NO + +# If the INLINE_INHERITED_MEMB tag is set to YES, doxygen will show all +# inherited members of a class in the documentation of that class as if those +# members were ordinary class members. Constructors, destructors and assignment +# operators of the base classes will not be shown. +# The default value is: NO. + +INLINE_INHERITED_MEMB = YES + +# If the FULL_PATH_NAMES tag is set to YES, doxygen will prepend the full path +# before files name in the file list and in the header files. If set to NO the +# shortest path that makes the file name unique will be used +# The default value is: YES. + +FULL_PATH_NAMES = YES + +# The STRIP_FROM_PATH tag can be used to strip a user-defined part of the path. +# Stripping is only done if one of the specified strings matches the left-hand +# part of the path. The tag can be used to show relative paths in the file list. +# If left blank the directory from which doxygen is run is used as the path to +# strip. +# +# Note that you can specify absolute paths here, but also relative paths, which +# will be relative from the directory where doxygen is started. +# This tag requires that the tag FULL_PATH_NAMES is set to YES. + +STRIP_FROM_PATH = frameworks/av/media/libstagefright/codec2 + +# The STRIP_FROM_INC_PATH tag can be used to strip a user-defined part of the +# path mentioned in the documentation of a class, which tells the reader which +# header file to include in order to use a class. If left blank only the name of +# the header file containing the class definition is used. Otherwise one should +# specify the list of include paths that are normally passed to the compiler +# using the -I flag. + +STRIP_FROM_INC_PATH = + +# If the SHORT_NAMES tag is set to YES, doxygen will generate much shorter (but +# less readable) file names. This can be useful is your file systems doesn't +# support long names like on DOS, Mac, or CD-ROM. +# The default value is: NO. + +SHORT_NAMES = NO + +# If the JAVADOC_AUTOBRIEF tag is set to YES then doxygen will interpret the +# first line (until the first dot) of a Javadoc-style comment as the brief +# description. If set to NO, the Javadoc-style will behave just like regular Qt- +# style comments (thus requiring an explicit @brief command for a brief +# description.) +# The default value is: NO. + +JAVADOC_AUTOBRIEF = YES + +# If the QT_AUTOBRIEF tag is set to YES then doxygen will interpret the first +# line (until the first dot) of a Qt-style comment as the brief description. If +# set to NO, the Qt-style will behave just like regular Qt-style comments (thus +# requiring an explicit \brief command for a brief description.) +# The default value is: NO. + +QT_AUTOBRIEF = YES + +# The MULTILINE_CPP_IS_BRIEF tag can be set to YES to make doxygen treat a +# multi-line C++ special comment block (i.e. a block of //! or /// comments) as +# a brief description. This used to be the default behavior. The new default is +# to treat a multi-line C++ comment block as a detailed description. Set this +# tag to YES if you prefer the old behavior instead. +# +# Note that setting this tag to YES also means that rational rose comments are +# not recognized any more. +# The default value is: NO. + +MULTILINE_CPP_IS_BRIEF = NO + +# If the INHERIT_DOCS tag is set to YES then an undocumented member inherits the +# documentation from any documented member that it re-implements. +# The default value is: YES. + +INHERIT_DOCS = YES + +# If the SEPARATE_MEMBER_PAGES tag is set to YES then doxygen will produce a new +# page for each member. If set to NO, the documentation of a member will be part +# of the file/class/namespace that contains it. +# The default value is: NO. + +SEPARATE_MEMBER_PAGES = NO + +# The TAB_SIZE tag can be used to set the number of spaces in a tab. Doxygen +# uses this value to replace tabs by spaces in code fragments. +# Minimum value: 1, maximum value: 16, default value: 4. + +TAB_SIZE = 4 + +# This tag can be used to specify a number of aliases that act as commands in +# the documentation. An alias has the form: +# name=value +# For example adding +# "sideeffect=@par Side Effects:\n" +# will allow you to put the command \sideeffect (or @sideeffect) in the +# documentation, which will result in a user-defined paragraph with heading +# "Side Effects:". You can put \n's in the value part of an alias to insert +# newlines. + +ALIASES = + +# This tag can be used to specify a number of word-keyword mappings (TCL only). +# A mapping has the form "name=value". For example adding "class=itcl::class" +# will allow you to use the command class in the itcl::class meaning. + +TCL_SUBST = + +# Set the OPTIMIZE_OUTPUT_FOR_C tag to YES if your project consists of C sources +# only. Doxygen will then generate output that is more tailored for C. For +# instance, some of the names that are used will be different. The list of all +# members will be omitted, etc. +# The default value is: NO. + +OPTIMIZE_OUTPUT_FOR_C = NO + +# Set the OPTIMIZE_OUTPUT_JAVA tag to YES if your project consists of Java or +# Python sources only. Doxygen will then generate output that is more tailored +# for that language. For instance, namespaces will be presented as packages, +# qualified scopes will look different, etc. +# The default value is: NO. + +OPTIMIZE_OUTPUT_JAVA = NO + +# Set the OPTIMIZE_FOR_FORTRAN tag to YES if your project consists of Fortran +# sources. Doxygen will then generate output that is tailored for Fortran. +# The default value is: NO. + +OPTIMIZE_FOR_FORTRAN = NO + +# Set the OPTIMIZE_OUTPUT_VHDL tag to YES if your project consists of VHDL +# sources. Doxygen will then generate output that is tailored for VHDL. +# The default value is: NO. + +OPTIMIZE_OUTPUT_VHDL = NO + +# Doxygen selects the parser to use depending on the extension of the files it +# parses. With this tag you can assign which parser to use for a given +# extension. Doxygen has a built-in mapping, but you can override or extend it +# using this tag. The format is ext=language, where ext is a file extension, and +# language is one of the parsers supported by doxygen: IDL, Java, Javascript, +# C#, C, C++, D, PHP, Objective-C, Python, Fortran (fixed format Fortran: +# FortranFixed, free formatted Fortran: FortranFree, unknown formatted Fortran: +# Fortran. In the later case the parser tries to guess whether the code is fixed +# or free formatted code, this is the default for Fortran type files), VHDL. For +# instance to make doxygen treat .inc files as Fortran files (default is PHP), +# and .f files as C (default is Fortran), use: inc=Fortran f=C. +# +# Note: For files without extension you can use no_extension as a placeholder. +# +# Note that for custom extensions you also need to set FILE_PATTERNS otherwise +# the files are not read by doxygen. + +EXTENSION_MAPPING = + +# If the MARKDOWN_SUPPORT tag is enabled then doxygen pre-processes all comments +# according to the Markdown format, which allows for more readable +# documentation. See http://daringfireball.net/projects/markdown/ for details. +# The output of markdown processing is further processed by doxygen, so you can +# mix doxygen, HTML, and XML commands with Markdown formatting. Disable only in +# case of backward compatibilities issues. +# The default value is: YES. + +MARKDOWN_SUPPORT = YES + +# When enabled doxygen tries to link words that correspond to documented +# classes, or namespaces to their corresponding documentation. Such a link can +# be prevented in individual cases by putting a % sign in front of the word or +# globally by setting AUTOLINK_SUPPORT to NO. +# The default value is: YES. + +AUTOLINK_SUPPORT = YES + +# If you use STL classes (i.e. std::string, std::vector, etc.) but do not want +# to include (a tag file for) the STL sources as input, then you should set this +# tag to YES in order to let doxygen match functions declarations and +# definitions whose arguments contain STL classes (e.g. func(std::string); +# versus func(std::string) {}). This also make the inheritance and collaboration +# diagrams that involve STL classes more complete and accurate. +# The default value is: NO. + +BUILTIN_STL_SUPPORT = YES + +# If you use Microsoft's C++/CLI language, you should set this option to YES to +# enable parsing support. +# The default value is: NO. + +CPP_CLI_SUPPORT = NO + +# Set the SIP_SUPPORT tag to YES if your project consists of sip (see: +# http://www.riverbankcomputing.co.uk/software/sip/intro) sources only. Doxygen +# will parse them like normal C++ but will assume all classes use public instead +# of private inheritance when no explicit protection keyword is present. +# The default value is: NO. + +SIP_SUPPORT = NO + +# For Microsoft's IDL there are propget and propput attributes to indicate +# getter and setter methods for a property. Setting this option to YES will make +# doxygen to replace the get and set methods by a property in the documentation. +# This will only work if the methods are indeed getting or setting a simple +# type. If this is not the case, or you want to show the methods anyway, you +# should set this option to NO. +# The default value is: YES. + +IDL_PROPERTY_SUPPORT = YES + +# If member grouping is used in the documentation and the DISTRIBUTE_GROUP_DOC +# tag is set to YES then doxygen will reuse the documentation of the first +# member in the group (if any) for the other members of the group. By default +# all members of a group must be documented explicitly. +# The default value is: NO. + +DISTRIBUTE_GROUP_DOC = NO + +# If one adds a struct or class to a group and this option is enabled, then also +# any nested class or struct is added to the same group. By default this option +# is disabled and one has to add nested compounds explicitly via \ingroup. +# The default value is: NO. + +GROUP_NESTED_COMPOUNDS = NO + +# Set the SUBGROUPING tag to YES to allow class member groups of the same type +# (for instance a group of public functions) to be put as a subgroup of that +# type (e.g. under the Public Functions section). Set it to NO to prevent +# subgrouping. Alternatively, this can be done per class using the +# \nosubgrouping command. +# The default value is: YES. + +SUBGROUPING = YES + +# When the INLINE_GROUPED_CLASSES tag is set to YES, classes, structs and unions +# are shown inside the group in which they are included (e.g. using \ingroup) +# instead of on a separate page (for HTML and Man pages) or section (for LaTeX +# and RTF). +# +# Note that this feature does not work in combination with +# SEPARATE_MEMBER_PAGES. +# The default value is: NO. + +INLINE_GROUPED_CLASSES = NO + +# When the INLINE_SIMPLE_STRUCTS tag is set to YES, structs, classes, and unions +# with only public data fields or simple typedef fields will be shown inline in +# the documentation of the scope in which they are defined (i.e. file, +# namespace, or group documentation), provided this scope is documented. If set +# to NO, structs, classes, and unions are shown on a separate page (for HTML and +# Man pages) or section (for LaTeX and RTF). +# The default value is: NO. + +INLINE_SIMPLE_STRUCTS = NO + +# When TYPEDEF_HIDES_STRUCT tag is enabled, a typedef of a struct, union, or +# enum is documented as struct, union, or enum with the name of the typedef. So +# typedef struct TypeS {} TypeT, will appear in the documentation as a struct +# with name TypeT. When disabled the typedef will appear as a member of a file, +# namespace, or class. And the struct will be named TypeS. This can typically be +# useful for C code in case the coding convention dictates that all compound +# types are typedef'ed and only the typedef is referenced, never the tag name. +# The default value is: NO. + +TYPEDEF_HIDES_STRUCT = YES + +# The size of the symbol lookup cache can be set using LOOKUP_CACHE_SIZE. This +# cache is used to resolve symbols given their name and scope. Since this can be +# an expensive process and often the same symbol appears multiple times in the +# code, doxygen keeps a cache of pre-resolved symbols. If the cache is too small +# doxygen will become slower. If the cache is too large, memory is wasted. The +# cache size is given by this formula: 2^(16+LOOKUP_CACHE_SIZE). The valid range +# is 0..9, the default is 0, corresponding to a cache size of 2^16=65536 +# symbols. At the end of a run doxygen will report the cache usage and suggest +# the optimal cache size from a speed point of view. +# Minimum value: 0, maximum value: 9, default value: 0. + +LOOKUP_CACHE_SIZE = 0 + +#--------------------------------------------------------------------------- +# Build related configuration options +#--------------------------------------------------------------------------- + +# If the EXTRACT_ALL tag is set to YES, doxygen will assume all entities in +# documentation are documented, even if no documentation was available. Private +# class members and static file members will be hidden unless the +# EXTRACT_PRIVATE respectively EXTRACT_STATIC tags are set to YES. +# Note: This will also disable the warnings about undocumented members that are +# normally produced when WARNINGS is set to YES. +# The default value is: NO. + +EXTRACT_ALL = YES + +# If the EXTRACT_PRIVATE tag is set to YES, all private members of a class will +# be included in the documentation. +# The default value is: NO. + +EXTRACT_PRIVATE = NO + +# If the EXTRACT_PACKAGE tag is set to YES, all members with package or internal +# scope will be included in the documentation. +# The default value is: NO. + +EXTRACT_PACKAGE = NO + +# If the EXTRACT_STATIC tag is set to YES, all static members of a file will be +# included in the documentation. +# The default value is: NO. + +EXTRACT_STATIC = NO + +# If the EXTRACT_LOCAL_CLASSES tag is set to YES, classes (and structs) defined +# locally in source files will be included in the documentation. If set to NO, +# only classes defined in header files are included. Does not have any effect +# for Java sources. +# The default value is: YES. + +EXTRACT_LOCAL_CLASSES = YES + +# This flag is only useful for Objective-C code. If set to YES, local methods, +# which are defined in the implementation section but not in the interface are +# included in the documentation. If set to NO, only methods in the interface are +# included. +# The default value is: NO. + +EXTRACT_LOCAL_METHODS = NO + +# If this flag is set to YES, the members of anonymous namespaces will be +# extracted and appear in the documentation as a namespace called +# 'anonymous_namespace{file}', where file will be replaced with the base name of +# the file that contains the anonymous namespace. By default anonymous namespace +# are hidden. +# The default value is: NO. + +EXTRACT_ANON_NSPACES = NO + +# If the HIDE_UNDOC_MEMBERS tag is set to YES, doxygen will hide all +# undocumented members inside documented classes or files. If set to NO these +# members will be included in the various overviews, but no documentation +# section is generated. This option has no effect if EXTRACT_ALL is enabled. +# The default value is: NO. + +HIDE_UNDOC_MEMBERS = NO + +# If the HIDE_UNDOC_CLASSES tag is set to YES, doxygen will hide all +# undocumented classes that are normally visible in the class hierarchy. If set +# to NO, these classes will be included in the various overviews. This option +# has no effect if EXTRACT_ALL is enabled. +# The default value is: NO. + +HIDE_UNDOC_CLASSES = NO + +# If the HIDE_FRIEND_COMPOUNDS tag is set to YES, doxygen will hide all friend +# (class|struct|union) declarations. If set to NO, these declarations will be +# included in the documentation. +# The default value is: NO. + +HIDE_FRIEND_COMPOUNDS = NO + +# If the HIDE_IN_BODY_DOCS tag is set to YES, doxygen will hide any +# documentation blocks found inside the body of a function. If set to NO, these +# blocks will be appended to the function's detailed documentation block. +# The default value is: NO. + +HIDE_IN_BODY_DOCS = NO + +# The INTERNAL_DOCS tag determines if documentation that is typed after a +# \internal command is included. If the tag is set to NO then the documentation +# will be excluded. Set it to YES to include the internal documentation. +# The default value is: NO. + +INTERNAL_DOCS = YES + +# If the CASE_SENSE_NAMES tag is set to NO then doxygen will only generate file +# names in lower-case letters. If set to YES, upper-case letters are also +# allowed. This is useful if you have classes or files whose names only differ +# in case and if your file system supports case sensitive file names. Windows +# and Mac users are advised to set this option to NO. +# The default value is: system dependent. + +CASE_SENSE_NAMES = NO + +# If the HIDE_SCOPE_NAMES tag is set to NO then doxygen will show members with +# their full class and namespace scopes in the documentation. If set to YES, the +# scope will be hidden. +# The default value is: NO. + +HIDE_SCOPE_NAMES = YES + +# If the HIDE_COMPOUND_REFERENCE tag is set to NO (default) then doxygen will +# append additional text to a page's title, such as Class Reference. If set to +# YES the compound reference will be hidden. +# The default value is: NO. + +HIDE_COMPOUND_REFERENCE= NO + +# If the SHOW_INCLUDE_FILES tag is set to YES then doxygen will put a list of +# the files that are included by a file in the documentation of that file. +# The default value is: YES. + +SHOW_INCLUDE_FILES = YES + +# If the SHOW_GROUPED_MEMB_INC tag is set to YES then Doxygen will add for each +# grouped member an include statement to the documentation, telling the reader +# which file to include in order to use the member. +# The default value is: NO. + +SHOW_GROUPED_MEMB_INC = NO + +# If the FORCE_LOCAL_INCLUDES tag is set to YES then doxygen will list include +# files with double quotes in the documentation rather than with sharp brackets. +# The default value is: NO. + +FORCE_LOCAL_INCLUDES = NO + +# If the INLINE_INFO tag is set to YES then a tag [inline] is inserted in the +# documentation for inline members. +# The default value is: YES. + +INLINE_INFO = YES + +# If the SORT_MEMBER_DOCS tag is set to YES then doxygen will sort the +# (detailed) documentation of file and class members alphabetically by member +# name. If set to NO, the members will appear in declaration order. +# The default value is: YES. + +SORT_MEMBER_DOCS = YES + +# If the SORT_BRIEF_DOCS tag is set to YES then doxygen will sort the brief +# descriptions of file, namespace and class members alphabetically by member +# name. If set to NO, the members will appear in declaration order. Note that +# this will also influence the order of the classes in the class list. +# The default value is: NO. + +SORT_BRIEF_DOCS = NO + +# If the SORT_MEMBERS_CTORS_1ST tag is set to YES then doxygen will sort the +# (brief and detailed) documentation of class members so that constructors and +# destructors are listed first. If set to NO the constructors will appear in the +# respective orders defined by SORT_BRIEF_DOCS and SORT_MEMBER_DOCS. +# Note: If SORT_BRIEF_DOCS is set to NO this option is ignored for sorting brief +# member documentation. +# Note: If SORT_MEMBER_DOCS is set to NO this option is ignored for sorting +# detailed member documentation. +# The default value is: NO. + +SORT_MEMBERS_CTORS_1ST = NO + +# If the SORT_GROUP_NAMES tag is set to YES then doxygen will sort the hierarchy +# of group names into alphabetical order. If set to NO the group names will +# appear in their defined order. +# The default value is: NO. + +SORT_GROUP_NAMES = NO + +# If the SORT_BY_SCOPE_NAME tag is set to YES, the class list will be sorted by +# fully-qualified names, including namespaces. If set to NO, the class list will +# be sorted only by class name, not including the namespace part. +# Note: This option is not very useful if HIDE_SCOPE_NAMES is set to YES. +# Note: This option applies only to the class list, not to the alphabetical +# list. +# The default value is: NO. + +SORT_BY_SCOPE_NAME = NO + +# If the STRICT_PROTO_MATCHING option is enabled and doxygen fails to do proper +# type resolution of all parameters of a function it will reject a match between +# the prototype and the implementation of a member function even if there is +# only one candidate or it is obvious which candidate to choose by doing a +# simple string match. By disabling STRICT_PROTO_MATCHING doxygen will still +# accept a match between prototype and implementation in such cases. +# The default value is: NO. + +STRICT_PROTO_MATCHING = NO + +# The GENERATE_TODOLIST tag can be used to enable (YES) or disable (NO) the todo +# list. This list is created by putting \todo commands in the documentation. +# The default value is: YES. + +GENERATE_TODOLIST = YES + +# The GENERATE_TESTLIST tag can be used to enable (YES) or disable (NO) the test +# list. This list is created by putting \test commands in the documentation. +# The default value is: YES. + +GENERATE_TESTLIST = YES + +# The GENERATE_BUGLIST tag can be used to enable (YES) or disable (NO) the bug +# list. This list is created by putting \bug commands in the documentation. +# The default value is: YES. + +GENERATE_BUGLIST = YES + +# The GENERATE_DEPRECATEDLIST tag can be used to enable (YES) or disable (NO) +# the deprecated list. This list is created by putting \deprecated commands in +# the documentation. +# The default value is: YES. + +GENERATE_DEPRECATEDLIST= YES + +# The ENABLED_SECTIONS tag can be used to enable conditional documentation +# sections, marked by \if ... \endif and \cond +# ... \endcond blocks. + +ENABLED_SECTIONS = INTERNAL + +# The MAX_INITIALIZER_LINES tag determines the maximum number of lines that the +# initial value of a variable or macro / define can have for it to appear in the +# documentation. If the initializer consists of more lines than specified here +# it will be hidden. Use a value of 0 to hide initializers completely. The +# appearance of the value of individual variables and macros / defines can be +# controlled using \showinitializer or \hideinitializer command in the +# documentation regardless of this setting. +# Minimum value: 0, maximum value: 10000, default value: 30. + +MAX_INITIALIZER_LINES = 30 + +# Set the SHOW_USED_FILES tag to NO to disable the list of files generated at +# the bottom of the documentation of classes and structs. If set to YES, the +# list will mention the files that were used to generate the documentation. +# The default value is: YES. + +SHOW_USED_FILES = YES + +# Set the SHOW_FILES tag to NO to disable the generation of the Files page. This +# will remove the Files entry from the Quick Index and from the Folder Tree View +# (if specified). +# The default value is: YES. + +SHOW_FILES = YES + +# Set the SHOW_NAMESPACES tag to NO to disable the generation of the Namespaces +# page. This will remove the Namespaces entry from the Quick Index and from the +# Folder Tree View (if specified). +# The default value is: YES. + +SHOW_NAMESPACES = YES + +# The FILE_VERSION_FILTER tag can be used to specify a program or script that +# doxygen should invoke to get the current version for each file (typically from +# the version control system). Doxygen will invoke the program by executing (via +# popen()) the command command input-file, where command is the value of the +# FILE_VERSION_FILTER tag, and input-file is the name of an input file provided +# by doxygen. Whatever the program writes to standard output is used as the file +# version. For an example see the documentation. + +FILE_VERSION_FILTER = + +# The LAYOUT_FILE tag can be used to specify a layout file which will be parsed +# by doxygen. The layout file controls the global structure of the generated +# output files in an output format independent way. To create the layout file +# that represents doxygen's defaults, run doxygen with the -l option. You can +# optionally specify a file name after the option, if omitted DoxygenLayout.xml +# will be used as the name of the layout file. +# +# Note that if you run doxygen from a directory containing a file called +# DoxygenLayout.xml, doxygen will parse it automatically even if the LAYOUT_FILE +# tag is left empty. + +LAYOUT_FILE = + +# The CITE_BIB_FILES tag can be used to specify one or more bib files containing +# the reference definitions. This must be a list of .bib files. The .bib +# extension is automatically appended if omitted. This requires the bibtex tool +# to be installed. See also http://en.wikipedia.org/wiki/BibTeX for more info. +# For LaTeX the style of the bibliography can be controlled using +# LATEX_BIB_STYLE. To use this feature you need bibtex and perl available in the +# search path. See also \cite for info how to create references. + +CITE_BIB_FILES = + +#--------------------------------------------------------------------------- +# Configuration options related to warning and progress messages +#--------------------------------------------------------------------------- + +# The QUIET tag can be used to turn on/off the messages that are generated to +# standard output by doxygen. If QUIET is set to YES this implies that the +# messages are off. +# The default value is: NO. + +QUIET = NO + +# The WARNINGS tag can be used to turn on/off the warning messages that are +# generated to standard error (stderr) by doxygen. If WARNINGS is set to YES +# this implies that the warnings are on. +# +# Tip: Turn warnings on while writing the documentation. +# The default value is: YES. + +WARNINGS = YES + +# If the WARN_IF_UNDOCUMENTED tag is set to YES then doxygen will generate +# warnings for undocumented members. If EXTRACT_ALL is set to YES then this flag +# will automatically be disabled. +# The default value is: YES. + +WARN_IF_UNDOCUMENTED = YES + +# If the WARN_IF_DOC_ERROR tag is set to YES, doxygen will generate warnings for +# potential errors in the documentation, such as not documenting some parameters +# in a documented function, or documenting parameters that don't exist or using +# markup commands wrongly. +# The default value is: YES. + +WARN_IF_DOC_ERROR = YES + +# This WARN_NO_PARAMDOC option can be enabled to get warnings for functions that +# are documented, but have no documentation for their parameters or return +# value. If set to NO, doxygen will only warn about wrong or incomplete +# parameter documentation, but not about the absence of documentation. +# The default value is: NO. + +WARN_NO_PARAMDOC = NO + +# If the WARN_AS_ERROR tag is set to YES then doxygen will immediately stop when +# a warning is encountered. +# The default value is: NO. + +WARN_AS_ERROR = NO + +# The WARN_FORMAT tag determines the format of the warning messages that doxygen +# can produce. The string should contain the $file, $line, and $text tags, which +# will be replaced by the file and line number from which the warning originated +# and the warning text. Optionally the format may contain $version, which will +# be replaced by the version of the file (if it could be obtained via +# FILE_VERSION_FILTER) +# The default value is: $file:$line: $text. + +WARN_FORMAT = "$file:$line: $text" + +# The WARN_LOGFILE tag can be used to specify a file to which warning and error +# messages should be written. If left blank the output is written to standard +# error (stderr). + +WARN_LOGFILE = + +#--------------------------------------------------------------------------- +# Configuration options related to the input files +#--------------------------------------------------------------------------- + +# The INPUT tag is used to specify the files and/or directories that contain +# documented source files. You may enter file names like myfile.cpp or +# directories like /usr/src/myproject. Separate the files or directories with +# spaces. See also FILE_PATTERNS and EXTENSION_MAPPING +# Note: If this tag is empty the current directory is searched. + +INPUT = frameworks/av/media/libstagefright/codec2/ + +# This tag can be used to specify the character encoding of the source files +# that doxygen parses. Internally doxygen uses the UTF-8 encoding. Doxygen uses +# libiconv (or the iconv built into libc) for the transcoding. See the libiconv +# documentation (see: http://www.gnu.org/software/libiconv) for the list of +# possible encodings. +# The default value is: UTF-8. + +INPUT_ENCODING = UTF-8 + +# If the value of the INPUT tag contains directories, you can use the +# FILE_PATTERNS tag to specify one or more wildcard patterns (like *.cpp and +# *.h) to filter out the source-files in the directories. +# +# Note that for custom extensions or not directly supported extensions you also +# need to set EXTENSION_MAPPING for the extension otherwise the files are not +# read by doxygen. +# +# If left blank the following patterns are tested:*.c, *.cc, *.cxx, *.cpp, +# *.c++, *.java, *.ii, *.ixx, *.ipp, *.i++, *.inl, *.idl, *.ddl, *.odl, *.h, +# *.hh, *.hxx, *.hpp, *.h++, *.cs, *.d, *.php, *.php4, *.php5, *.phtml, *.inc, +# *.m, *.markdown, *.md, *.mm, *.dox, *.py, *.pyw, *.f90, *.f, *.for, *.tcl, +# *.vhd, *.vhdl, *.ucf, *.qsf, *.as and *.js. + +FILE_PATTERNS = C2*.c \ + C2*.cpp \ + C2*.h + +# The RECURSIVE tag can be used to specify whether or not subdirectories should +# be searched for input files as well. +# The default value is: NO. + +RECURSIVE = YES + +# The EXCLUDE tag can be used to specify files and/or directories that should be +# excluded from the INPUT source files. This way you can easily exclude a +# subdirectory from a directory tree whose root is specified with the INPUT tag. +# +# Note that relative paths are relative to the directory from which doxygen is +# run. + +EXCLUDE = + +# The EXCLUDE_SYMLINKS tag can be used to select whether or not files or +# directories that are symbolic links (a Unix file system feature) are excluded +# from the input. +# The default value is: NO. + +EXCLUDE_SYMLINKS = NO + +# If the value of the INPUT tag contains directories, you can use the +# EXCLUDE_PATTERNS tag to specify one or more wildcard patterns to exclude +# certain files from those directories. +# +# Note that the wildcards are matched against the file with absolute path, so to +# exclude all test directories for example use the pattern */test/* + +EXCLUDE_PATTERNS = ._* + +# The EXCLUDE_SYMBOLS tag can be used to specify one or more symbol names +# (namespaces, classes, functions, etc.) that should be excluded from the +# output. The symbol name can be a fully qualified name, a word, or if the +# wildcard * is used, a substring. Examples: ANamespace, AClass, +# AClass::ANamespace, ANamespace::*Test +# +# Note that the wildcards are matched against the file with absolute path, so to +# exclude all test directories use the pattern */test/* + +EXCLUDE_SYMBOLS = + +# The EXAMPLE_PATH tag can be used to specify one or more files or directories +# that contain example code fragments that are included (see the \include +# command). + +EXAMPLE_PATH = + +# If the value of the EXAMPLE_PATH tag contains directories, you can use the +# EXAMPLE_PATTERNS tag to specify one or more wildcard pattern (like *.cpp and +# *.h) to filter out the source-files in the directories. If left blank all +# files are included. + +EXAMPLE_PATTERNS = * + +# If the EXAMPLE_RECURSIVE tag is set to YES then subdirectories will be +# searched for input files to be used with the \include or \dontinclude commands +# irrespective of the value of the RECURSIVE tag. +# The default value is: NO. + +EXAMPLE_RECURSIVE = NO + +# The IMAGE_PATH tag can be used to specify one or more files or directories +# that contain images that are to be included in the documentation (see the +# \image command). + +IMAGE_PATH = + +# The INPUT_FILTER tag can be used to specify a program that doxygen should +# invoke to filter for each input file. Doxygen will invoke the filter program +# by executing (via popen()) the command: +# +# +# +# where is the value of the INPUT_FILTER tag, and is the +# name of an input file. Doxygen will then use the output that the filter +# program writes to standard output. If FILTER_PATTERNS is specified, this tag +# will be ignored. +# +# Note that the filter must not add or remove lines; it is applied before the +# code is scanned, but not when the output code is generated. If lines are added +# or removed, the anchors will not be placed correctly. +# +# Note that for custom extensions or not directly supported extensions you also +# need to set EXTENSION_MAPPING for the extension otherwise the files are not +# properly processed by doxygen. + +INPUT_FILTER = frameworks/av/media/libstagefright/codec2/docs/doxyfilter.sh + +# The FILTER_PATTERNS tag can be used to specify filters on a per file pattern +# basis. Doxygen will compare the file name with each pattern and apply the +# filter if there is a match. The filters are a list of the form: pattern=filter +# (like *.cpp=my_cpp_filter). See INPUT_FILTER for further information on how +# filters are used. If the FILTER_PATTERNS tag is empty or if none of the +# patterns match the file name, INPUT_FILTER is applied. +# +# Note that for custom extensions or not directly supported extensions you also +# need to set EXTENSION_MAPPING for the extension otherwise the files are not +# properly processed by doxygen. + +FILTER_PATTERNS = + +# If the FILTER_SOURCE_FILES tag is set to YES, the input filter (if set using +# INPUT_FILTER) will also be used to filter the input files that are used for +# producing the source files to browse (i.e. when SOURCE_BROWSER is set to YES). +# The default value is: NO. + +FILTER_SOURCE_FILES = YES + +# The FILTER_SOURCE_PATTERNS tag can be used to specify source filters per file +# pattern. A pattern will override the setting for FILTER_PATTERN (if any) and +# it is also possible to disable source filtering for a specific pattern using +# *.ext= (so without naming a filter). +# This tag requires that the tag FILTER_SOURCE_FILES is set to YES. + +FILTER_SOURCE_PATTERNS = + +# If the USE_MDFILE_AS_MAINPAGE tag refers to the name of a markdown file that +# is part of the input, its contents will be placed on the main page +# (index.html). This can be useful if you have a project on for instance GitHub +# and want to reuse the introduction page also for the doxygen output. + +USE_MDFILE_AS_MAINPAGE = + +#--------------------------------------------------------------------------- +# Configuration options related to source browsing +#--------------------------------------------------------------------------- + +# If the SOURCE_BROWSER tag is set to YES then a list of source files will be +# generated. Documented entities will be cross-referenced with these sources. +# +# Note: To get rid of all source code in the generated output, make sure that +# also VERBATIM_HEADERS is set to NO. +# The default value is: NO. + +SOURCE_BROWSER = YES + +# Setting the INLINE_SOURCES tag to YES will include the body of functions, +# classes and enums directly into the documentation. +# The default value is: NO. + +INLINE_SOURCES = NO + +# Setting the STRIP_CODE_COMMENTS tag to YES will instruct doxygen to hide any +# special comment blocks from generated source code fragments. Normal C, C++ and +# Fortran comments will always remain visible. +# The default value is: YES. + +STRIP_CODE_COMMENTS = YES + +# If the REFERENCED_BY_RELATION tag is set to YES then for each documented +# function all documented functions referencing it will be listed. +# The default value is: NO. + +REFERENCED_BY_RELATION = YES + +# If the REFERENCES_RELATION tag is set to YES then for each documented function +# all documented entities called/used by that function will be listed. +# The default value is: NO. + +REFERENCES_RELATION = YES + +# If the REFERENCES_LINK_SOURCE tag is set to YES and SOURCE_BROWSER tag is set +# to YES then the hyperlinks from functions in REFERENCES_RELATION and +# REFERENCED_BY_RELATION lists will link to the source code. Otherwise they will +# link to the documentation. +# The default value is: YES. + +REFERENCES_LINK_SOURCE = YES + +# If SOURCE_TOOLTIPS is enabled (the default) then hovering a hyperlink in the +# source code will show a tooltip with additional information such as prototype, +# brief description and links to the definition and documentation. Since this +# will make the HTML file larger and loading of large files a bit slower, you +# can opt to disable this feature. +# The default value is: YES. +# This tag requires that the tag SOURCE_BROWSER is set to YES. + +SOURCE_TOOLTIPS = YES + +# If the USE_HTAGS tag is set to YES then the references to source code will +# point to the HTML generated by the htags(1) tool instead of doxygen built-in +# source browser. The htags tool is part of GNU's global source tagging system +# (see http://www.gnu.org/software/global/global.html). You will need version +# 4.8.6 or higher. +# +# To use it do the following: +# - Install the latest version of global +# - Enable SOURCE_BROWSER and USE_HTAGS in the config file +# - Make sure the INPUT points to the root of the source tree +# - Run doxygen as normal +# +# Doxygen will invoke htags (and that will in turn invoke gtags), so these +# tools must be available from the command line (i.e. in the search path). +# +# The result: instead of the source browser generated by doxygen, the links to +# source code will now point to the output of htags. +# The default value is: NO. +# This tag requires that the tag SOURCE_BROWSER is set to YES. + +USE_HTAGS = NO + +# If the VERBATIM_HEADERS tag is set the YES then doxygen will generate a +# verbatim copy of the header file for each class for which an include is +# specified. Set to NO to disable this. +# See also: Section \class. +# The default value is: YES. + +VERBATIM_HEADERS = YES + +# If the CLANG_ASSISTED_PARSING tag is set to YES then doxygen will use the +# clang parser (see: http://clang.llvm.org/) for more accurate parsing at the +# cost of reduced performance. This can be particularly helpful with template +# rich C++ code for which doxygen's built-in parser lacks the necessary type +# information. +# Note: The availability of this option depends on whether or not doxygen was +# generated with the -Duse-libclang=ON option for CMake. +# The default value is: NO. + +CLANG_ASSISTED_PARSING = YES + +# If clang assisted parsing is enabled you can provide the compiler with command +# line options that you would normally use when invoking the compiler. Note that +# the include paths will already be set by doxygen for the files and directories +# specified with INPUT and INCLUDE_PATH. +# This tag requires that the tag CLANG_ASSISTED_PARSING is set to YES. + +CLANG_OPTIONS = -std=c++14 + +#--------------------------------------------------------------------------- +# Configuration options related to the alphabetical class index +#--------------------------------------------------------------------------- + +# If the ALPHABETICAL_INDEX tag is set to YES, an alphabetical index of all +# compounds will be generated. Enable this if the project contains a lot of +# classes, structs, unions or interfaces. +# The default value is: YES. + +ALPHABETICAL_INDEX = YES + +# The COLS_IN_ALPHA_INDEX tag can be used to specify the number of columns in +# which the alphabetical index list will be split. +# Minimum value: 1, maximum value: 20, default value: 5. +# This tag requires that the tag ALPHABETICAL_INDEX is set to YES. + +COLS_IN_ALPHA_INDEX = 5 + +# In case all classes in a project start with a common prefix, all classes will +# be put under the same header in the alphabetical index. The IGNORE_PREFIX tag +# can be used to specify a prefix (or a list of prefixes) that should be ignored +# while generating the index headers. +# This tag requires that the tag ALPHABETICAL_INDEX is set to YES. + +IGNORE_PREFIX = + +#--------------------------------------------------------------------------- +# Configuration options related to the HTML output +#--------------------------------------------------------------------------- + +# If the GENERATE_HTML tag is set to YES, doxygen will generate HTML output +# The default value is: YES. + +GENERATE_HTML = YES + +# The HTML_OUTPUT tag is used to specify where the HTML docs will be put. If a +# relative path is entered the value of OUTPUT_DIRECTORY will be put in front of +# it. +# The default directory is: html. +# This tag requires that the tag GENERATE_HTML is set to YES. + +HTML_OUTPUT = html + +# The HTML_FILE_EXTENSION tag can be used to specify the file extension for each +# generated HTML page (for example: .htm, .php, .asp). +# The default value is: .html. +# This tag requires that the tag GENERATE_HTML is set to YES. + +HTML_FILE_EXTENSION = .html + +# The HTML_HEADER tag can be used to specify a user-defined HTML header file for +# each generated HTML page. If the tag is left blank doxygen will generate a +# standard header. +# +# To get valid HTML the header file that includes any scripts and style sheets +# that doxygen needs, which is dependent on the configuration options used (e.g. +# the setting GENERATE_TREEVIEW). It is highly recommended to start with a +# default header using +# doxygen -w html new_header.html new_footer.html new_stylesheet.css +# YourConfigFile +# and then modify the file new_header.html. See also section "Doxygen usage" +# for information on how to generate the default header that doxygen normally +# uses. +# Note: The header is subject to change so you typically have to regenerate the +# default header when upgrading to a newer version of doxygen. For a description +# of the possible markers and block names see the documentation. +# This tag requires that the tag GENERATE_HTML is set to YES. + +HTML_HEADER = + +# The HTML_FOOTER tag can be used to specify a user-defined HTML footer for each +# generated HTML page. If the tag is left blank doxygen will generate a standard +# footer. See HTML_HEADER for more information on how to generate a default +# footer and what special commands can be used inside the footer. See also +# section "Doxygen usage" for information on how to generate the default footer +# that doxygen normally uses. +# This tag requires that the tag GENERATE_HTML is set to YES. + +HTML_FOOTER = + +# The HTML_STYLESHEET tag can be used to specify a user-defined cascading style +# sheet that is used by each HTML page. It can be used to fine-tune the look of +# the HTML output. If left blank doxygen will generate a default style sheet. +# See also section "Doxygen usage" for information on how to generate the style +# sheet that doxygen normally uses. +# Note: It is recommended to use HTML_EXTRA_STYLESHEET instead of this tag, as +# it is more robust and this tag (HTML_STYLESHEET) will in the future become +# obsolete. +# This tag requires that the tag GENERATE_HTML is set to YES. + +HTML_STYLESHEET = + +# The HTML_EXTRA_STYLESHEET tag can be used to specify additional user-defined +# cascading style sheets that are included after the standard style sheets +# created by doxygen. Using this option one can overrule certain style aspects. +# This is preferred over using HTML_STYLESHEET since it does not replace the +# standard style sheet and is therefore more robust against future updates. +# Doxygen will copy the style sheet files to the output directory. +# Note: The order of the extra style sheet files is of importance (e.g. the last +# style sheet in the list overrules the setting of the previous ones in the +# list). For an example see the documentation. +# This tag requires that the tag GENERATE_HTML is set to YES. + +HTML_EXTRA_STYLESHEET = + +# The HTML_EXTRA_FILES tag can be used to specify one or more extra images or +# other source files which should be copied to the HTML output directory. Note +# that these files will be copied to the base HTML output directory. Use the +# $relpath^ marker in the HTML_HEADER and/or HTML_FOOTER files to load these +# files. In the HTML_STYLESHEET file, use the file name only. Also note that the +# files will be copied as-is; there are no commands or markers available. +# This tag requires that the tag GENERATE_HTML is set to YES. + +HTML_EXTRA_FILES = + +# The HTML_COLORSTYLE_HUE tag controls the color of the HTML output. Doxygen +# will adjust the colors in the style sheet and background images according to +# this color. Hue is specified as an angle on a colorwheel, see +# http://en.wikipedia.org/wiki/Hue for more information. For instance the value +# 0 represents red, 60 is yellow, 120 is green, 180 is cyan, 240 is blue, 300 +# purple, and 360 is red again. +# Minimum value: 0, maximum value: 359, default value: 220. +# This tag requires that the tag GENERATE_HTML is set to YES. + +HTML_COLORSTYLE_HUE = 220 + +# The HTML_COLORSTYLE_SAT tag controls the purity (or saturation) of the colors +# in the HTML output. For a value of 0 the output will use grayscales only. A +# value of 255 will produce the most vivid colors. +# Minimum value: 0, maximum value: 255, default value: 100. +# This tag requires that the tag GENERATE_HTML is set to YES. + +HTML_COLORSTYLE_SAT = 100 + +# The HTML_COLORSTYLE_GAMMA tag controls the gamma correction applied to the +# luminance component of the colors in the HTML output. Values below 100 +# gradually make the output lighter, whereas values above 100 make the output +# darker. The value divided by 100 is the actual gamma applied, so 80 represents +# a gamma of 0.8, The value 220 represents a gamma of 2.2, and 100 does not +# change the gamma. +# Minimum value: 40, maximum value: 240, default value: 80. +# This tag requires that the tag GENERATE_HTML is set to YES. + +HTML_COLORSTYLE_GAMMA = 80 + +# If the HTML_TIMESTAMP tag is set to YES then the footer of each generated HTML +# page will contain the date and time when the page was generated. Setting this +# to YES can help to show when doxygen was last run and thus if the +# documentation is up to date. +# The default value is: NO. +# This tag requires that the tag GENERATE_HTML is set to YES. + +HTML_TIMESTAMP = NO + +# If the HTML_DYNAMIC_SECTIONS tag is set to YES then the generated HTML +# documentation will contain sections that can be hidden and shown after the +# page has loaded. +# The default value is: NO. +# This tag requires that the tag GENERATE_HTML is set to YES. + +HTML_DYNAMIC_SECTIONS = YES + +# With HTML_INDEX_NUM_ENTRIES one can control the preferred number of entries +# shown in the various tree structured indices initially; the user can expand +# and collapse entries dynamically later on. Doxygen will expand the tree to +# such a level that at most the specified number of entries are visible (unless +# a fully collapsed tree already exceeds this amount). So setting the number of +# entries 1 will produce a full collapsed tree by default. 0 is a special value +# representing an infinite number of entries and will result in a full expanded +# tree by default. +# Minimum value: 0, maximum value: 9999, default value: 100. +# This tag requires that the tag GENERATE_HTML is set to YES. + +HTML_INDEX_NUM_ENTRIES = 100 + +# If the GENERATE_DOCSET tag is set to YES, additional index files will be +# generated that can be used as input for Apple's Xcode 3 integrated development +# environment (see: http://developer.apple.com/tools/xcode/), introduced with +# OSX 10.5 (Leopard). To create a documentation set, doxygen will generate a +# Makefile in the HTML output directory. Running make will produce the docset in +# that directory and running make install will install the docset in +# ~/Library/Developer/Shared/Documentation/DocSets so that Xcode will find it at +# startup. See http://developer.apple.com/tools/creatingdocsetswithdoxygen.html +# for more information. +# The default value is: NO. +# This tag requires that the tag GENERATE_HTML is set to YES. + +GENERATE_DOCSET = NO + +# This tag determines the name of the docset feed. A documentation feed provides +# an umbrella under which multiple documentation sets from a single provider +# (such as a company or product suite) can be grouped. +# The default value is: Doxygen generated docs. +# This tag requires that the tag GENERATE_DOCSET is set to YES. + +DOCSET_FEEDNAME = "Doxygen generated docs" + +# This tag specifies a string that should uniquely identify the documentation +# set bundle. This should be a reverse domain-name style string, e.g. +# com.mycompany.MyDocSet. Doxygen will append .docset to the name. +# The default value is: org.doxygen.Project. +# This tag requires that the tag GENERATE_DOCSET is set to YES. + +DOCSET_BUNDLE_ID = org.doxygen.Project + +# The DOCSET_PUBLISHER_ID tag specifies a string that should uniquely identify +# the documentation publisher. This should be a reverse domain-name style +# string, e.g. com.mycompany.MyDocSet.documentation. +# The default value is: org.doxygen.Publisher. +# This tag requires that the tag GENERATE_DOCSET is set to YES. + +DOCSET_PUBLISHER_ID = org.doxygen.Publisher + +# The DOCSET_PUBLISHER_NAME tag identifies the documentation publisher. +# The default value is: Publisher. +# This tag requires that the tag GENERATE_DOCSET is set to YES. + +DOCSET_PUBLISHER_NAME = Publisher + +# If the GENERATE_HTMLHELP tag is set to YES then doxygen generates three +# additional HTML index files: index.hhp, index.hhc, and index.hhk. The +# index.hhp is a project file that can be read by Microsoft's HTML Help Workshop +# (see: http://www.microsoft.com/en-us/download/details.aspx?id=21138) on +# Windows. +# +# The HTML Help Workshop contains a compiler that can convert all HTML output +# generated by doxygen into a single compiled HTML file (.chm). Compiled HTML +# files are now used as the Windows 98 help format, and will replace the old +# Windows help format (.hlp) on all Windows platforms in the future. Compressed +# HTML files also contain an index, a table of contents, and you can search for +# words in the documentation. The HTML workshop also contains a viewer for +# compressed HTML files. +# The default value is: NO. +# This tag requires that the tag GENERATE_HTML is set to YES. + +GENERATE_HTMLHELP = NO + +# The CHM_FILE tag can be used to specify the file name of the resulting .chm +# file. You can add a path in front of the file if the result should not be +# written to the html output directory. +# This tag requires that the tag GENERATE_HTMLHELP is set to YES. + +CHM_FILE = + +# The HHC_LOCATION tag can be used to specify the location (absolute path +# including file name) of the HTML help compiler (hhc.exe). If non-empty, +# doxygen will try to run the HTML help compiler on the generated index.hhp. +# The file has to be specified with full path. +# This tag requires that the tag GENERATE_HTMLHELP is set to YES. + +HHC_LOCATION = + +# The GENERATE_CHI flag controls if a separate .chi index file is generated +# (YES) or that it should be included in the master .chm file (NO). +# The default value is: NO. +# This tag requires that the tag GENERATE_HTMLHELP is set to YES. + +GENERATE_CHI = NO + +# The CHM_INDEX_ENCODING is used to encode HtmlHelp index (hhk), content (hhc) +# and project file content. +# This tag requires that the tag GENERATE_HTMLHELP is set to YES. + +CHM_INDEX_ENCODING = + +# The BINARY_TOC flag controls whether a binary table of contents is generated +# (YES) or a normal table of contents (NO) in the .chm file. Furthermore it +# enables the Previous and Next buttons. +# The default value is: NO. +# This tag requires that the tag GENERATE_HTMLHELP is set to YES. + +BINARY_TOC = NO + +# The TOC_EXPAND flag can be set to YES to add extra items for group members to +# the table of contents of the HTML help documentation and to the tree view. +# The default value is: NO. +# This tag requires that the tag GENERATE_HTMLHELP is set to YES. + +TOC_EXPAND = NO + +# If the GENERATE_QHP tag is set to YES and both QHP_NAMESPACE and +# QHP_VIRTUAL_FOLDER are set, an additional index file will be generated that +# can be used as input for Qt's qhelpgenerator to generate a Qt Compressed Help +# (.qch) of the generated HTML documentation. +# The default value is: NO. +# This tag requires that the tag GENERATE_HTML is set to YES. + +GENERATE_QHP = NO + +# If the QHG_LOCATION tag is specified, the QCH_FILE tag can be used to specify +# the file name of the resulting .qch file. The path specified is relative to +# the HTML output folder. +# This tag requires that the tag GENERATE_QHP is set to YES. + +QCH_FILE = + +# The QHP_NAMESPACE tag specifies the namespace to use when generating Qt Help +# Project output. For more information please see Qt Help Project / Namespace +# (see: http://qt-project.org/doc/qt-4.8/qthelpproject.html#namespace). +# The default value is: org.doxygen.Project. +# This tag requires that the tag GENERATE_QHP is set to YES. + +QHP_NAMESPACE = org.doxygen.Project + +# The QHP_VIRTUAL_FOLDER tag specifies the namespace to use when generating Qt +# Help Project output. For more information please see Qt Help Project / Virtual +# Folders (see: http://qt-project.org/doc/qt-4.8/qthelpproject.html#virtual- +# folders). +# The default value is: doc. +# This tag requires that the tag GENERATE_QHP is set to YES. + +QHP_VIRTUAL_FOLDER = doc + +# If the QHP_CUST_FILTER_NAME tag is set, it specifies the name of a custom +# filter to add. For more information please see Qt Help Project / Custom +# Filters (see: http://qt-project.org/doc/qt-4.8/qthelpproject.html#custom- +# filters). +# This tag requires that the tag GENERATE_QHP is set to YES. + +QHP_CUST_FILTER_NAME = + +# The QHP_CUST_FILTER_ATTRS tag specifies the list of the attributes of the +# custom filter to add. For more information please see Qt Help Project / Custom +# Filters (see: http://qt-project.org/doc/qt-4.8/qthelpproject.html#custom- +# filters). +# This tag requires that the tag GENERATE_QHP is set to YES. + +QHP_CUST_FILTER_ATTRS = + +# The QHP_SECT_FILTER_ATTRS tag specifies the list of the attributes this +# project's filter section matches. Qt Help Project / Filter Attributes (see: +# http://qt-project.org/doc/qt-4.8/qthelpproject.html#filter-attributes). +# This tag requires that the tag GENERATE_QHP is set to YES. + +QHP_SECT_FILTER_ATTRS = + +# The QHG_LOCATION tag can be used to specify the location of Qt's +# qhelpgenerator. If non-empty doxygen will try to run qhelpgenerator on the +# generated .qhp file. +# This tag requires that the tag GENERATE_QHP is set to YES. + +QHG_LOCATION = + +# If the GENERATE_ECLIPSEHELP tag is set to YES, additional index files will be +# generated, together with the HTML files, they form an Eclipse help plugin. To +# install this plugin and make it available under the help contents menu in +# Eclipse, the contents of the directory containing the HTML and XML files needs +# to be copied into the plugins directory of eclipse. The name of the directory +# within the plugins directory should be the same as the ECLIPSE_DOC_ID value. +# After copying Eclipse needs to be restarted before the help appears. +# The default value is: NO. +# This tag requires that the tag GENERATE_HTML is set to YES. + +GENERATE_ECLIPSEHELP = NO + +# A unique identifier for the Eclipse help plugin. When installing the plugin +# the directory name containing the HTML and XML files should also have this +# name. Each documentation set should have its own identifier. +# The default value is: org.doxygen.Project. +# This tag requires that the tag GENERATE_ECLIPSEHELP is set to YES. + +ECLIPSE_DOC_ID = org.doxygen.Project + +# If you want full control over the layout of the generated HTML pages it might +# be necessary to disable the index and replace it with your own. The +# DISABLE_INDEX tag can be used to turn on/off the condensed index (tabs) at top +# of each HTML page. A value of NO enables the index and the value YES disables +# it. Since the tabs in the index contain the same information as the navigation +# tree, you can set this option to YES if you also set GENERATE_TREEVIEW to YES. +# The default value is: NO. +# This tag requires that the tag GENERATE_HTML is set to YES. + +DISABLE_INDEX = NO + +# The GENERATE_TREEVIEW tag is used to specify whether a tree-like index +# structure should be generated to display hierarchical information. If the tag +# value is set to YES, a side panel will be generated containing a tree-like +# index structure (just like the one that is generated for HTML Help). For this +# to work a browser that supports JavaScript, DHTML, CSS and frames is required +# (i.e. any modern browser). Windows users are probably better off using the +# HTML help feature. Via custom style sheets (see HTML_EXTRA_STYLESHEET) one can +# further fine-tune the look of the index. As an example, the default style +# sheet generated by doxygen has an example that shows how to put an image at +# the root of the tree instead of the PROJECT_NAME. Since the tree basically has +# the same information as the tab index, you could consider setting +# DISABLE_INDEX to YES when enabling this option. +# The default value is: NO. +# This tag requires that the tag GENERATE_HTML is set to YES. + +GENERATE_TREEVIEW = YES + +# The ENUM_VALUES_PER_LINE tag can be used to set the number of enum values that +# doxygen will group on one line in the generated HTML documentation. +# +# Note that a value of 0 will completely suppress the enum values from appearing +# in the overview section. +# Minimum value: 0, maximum value: 20, default value: 4. +# This tag requires that the tag GENERATE_HTML is set to YES. + +ENUM_VALUES_PER_LINE = 4 + +# If the treeview is enabled (see GENERATE_TREEVIEW) then this tag can be used +# to set the initial width (in pixels) of the frame in which the tree is shown. +# Minimum value: 0, maximum value: 1500, default value: 250. +# This tag requires that the tag GENERATE_HTML is set to YES. + +TREEVIEW_WIDTH = 250 + +# If the EXT_LINKS_IN_WINDOW option is set to YES, doxygen will open links to +# external symbols imported via tag files in a separate window. +# The default value is: NO. +# This tag requires that the tag GENERATE_HTML is set to YES. + +EXT_LINKS_IN_WINDOW = NO + +# Use this tag to change the font size of LaTeX formulas included as images in +# the HTML documentation. When you change the font size after a successful +# doxygen run you need to manually remove any form_*.png images from the HTML +# output directory to force them to be regenerated. +# Minimum value: 8, maximum value: 50, default value: 10. +# This tag requires that the tag GENERATE_HTML is set to YES. + +FORMULA_FONTSIZE = 10 + +# Use the FORMULA_TRANPARENT tag to determine whether or not the images +# generated for formulas are transparent PNGs. Transparent PNGs are not +# supported properly for IE 6.0, but are supported on all modern browsers. +# +# Note that when changing this option you need to delete any form_*.png files in +# the HTML output directory before the changes have effect. +# The default value is: YES. +# This tag requires that the tag GENERATE_HTML is set to YES. + +FORMULA_TRANSPARENT = YES + +# Enable the USE_MATHJAX option to render LaTeX formulas using MathJax (see +# http://www.mathjax.org) which uses client side Javascript for the rendering +# instead of using pre-rendered bitmaps. Use this if you do not have LaTeX +# installed or if you want to formulas look prettier in the HTML output. When +# enabled you may also need to install MathJax separately and configure the path +# to it using the MATHJAX_RELPATH option. +# The default value is: NO. +# This tag requires that the tag GENERATE_HTML is set to YES. + +USE_MATHJAX = NO + +# When MathJax is enabled you can set the default output format to be used for +# the MathJax output. See the MathJax site (see: +# http://docs.mathjax.org/en/latest/output.html) for more details. +# Possible values are: HTML-CSS (which is slower, but has the best +# compatibility), NativeMML (i.e. MathML) and SVG. +# The default value is: HTML-CSS. +# This tag requires that the tag USE_MATHJAX is set to YES. + +MATHJAX_FORMAT = HTML-CSS + +# When MathJax is enabled you need to specify the location relative to the HTML +# output directory using the MATHJAX_RELPATH option. The destination directory +# should contain the MathJax.js script. For instance, if the mathjax directory +# is located at the same level as the HTML output directory, then +# MATHJAX_RELPATH should be ../mathjax. The default value points to the MathJax +# Content Delivery Network so you can quickly see the result without installing +# MathJax. However, it is strongly recommended to install a local copy of +# MathJax from http://www.mathjax.org before deployment. +# The default value is: http://cdn.mathjax.org/mathjax/latest. +# This tag requires that the tag USE_MATHJAX is set to YES. + +MATHJAX_RELPATH = http://cdn.mathjax.org/mathjax/latest + +# The MATHJAX_EXTENSIONS tag can be used to specify one or more MathJax +# extension names that should be enabled during MathJax rendering. For example +# MATHJAX_EXTENSIONS = TeX/AMSmath TeX/AMSsymbols +# This tag requires that the tag USE_MATHJAX is set to YES. + +MATHJAX_EXTENSIONS = + +# The MATHJAX_CODEFILE tag can be used to specify a file with javascript pieces +# of code that will be used on startup of the MathJax code. See the MathJax site +# (see: http://docs.mathjax.org/en/latest/output.html) for more details. For an +# example see the documentation. +# This tag requires that the tag USE_MATHJAX is set to YES. + +MATHJAX_CODEFILE = + +# When the SEARCHENGINE tag is enabled doxygen will generate a search box for +# the HTML output. The underlying search engine uses javascript and DHTML and +# should work on any modern browser. Note that when using HTML help +# (GENERATE_HTMLHELP), Qt help (GENERATE_QHP), or docsets (GENERATE_DOCSET) +# there is already a search function so this one should typically be disabled. +# For large projects the javascript based search engine can be slow, then +# enabling SERVER_BASED_SEARCH may provide a better solution. It is possible to +# search using the keyboard; to jump to the search box use + S +# (what the is depends on the OS and browser, but it is typically +# , /