diff --git a/CleanSpec.mk b/CleanSpec.mk index 3788bc6baa01446c040dc4852688736c0ccbe87e..531e44ec8e66a0d6fac17bda0d213e7c18e97b8f 100644 --- a/CleanSpec.mk +++ b/CleanSpec.mk @@ -60,6 +60,9 @@ $(call add-clean-step, rm -rf $(PRODUCT_OUT)/system/bin/hw/android.hardware.auto $(call add-clean-step, rm -rf $(PRODUCT_OUT)/system/lib/hw/android.hardware.automotive*) $(call add-clean-step, rm -rf $(PRODUCT_OUT)/system/lib64/hw/android.hardware.automotive*) $(call add-clean-step, rm -rf $(PRODUCT_OUT)/system/etc/init/android.hardware.automotive*) -$(call add-clean-step, find $(PRODUCT_OUT)/system $(PRODUCT_OUT)/vendor -type f -name "android\.hardware\.configstore\@1\.1*" -print0 | xargs -0 rm -f) $(call add-clean-step, rm -rf $(PRODUCT_OUT)/system/lib/android.hardware.tests*) $(call add-clean-step, rm -rf $(PRODUCT_OUT)/system/lib/vndk/android.hardware.tests*) +$(call add-clean-step, rm -rf $(PRODUCT_OUT)/system/lib/vndk-sp/android.hardware.graphics.allocator*) +$(call add-clean-step, find $(PRODUCT_OUT)/system $(PRODUCT_OUT)/vendor -type f -name "android\.hardware\.configstore\@1\.1*" -print0 | xargs -0 rm -f) +$(call add-clean-step, find $(PRODUCT_OUT)/system $(PRODUCT_OUT)/vendor -type f -name "android\.hardware\.configstore*" -print0 | xargs -0 rm -f) +$(call add-clean-step, rm -rf $(PRODUCT_OUT)/vendor/etc/seccomp_policy/configstore@1.0.policy) diff --git a/audio/2.0/config/audio_policy_configuration.xsd b/audio/2.0/config/audio_policy_configuration.xsd index eb59152c91735ee8b8ed898a0dd71049f47559be..7647cade7a36fdd581b572cc9c49294e091f0b8c 100644 --- a/audio/2.0/config/audio_policy_configuration.xsd +++ b/audio/2.0/config/audio_policy_configuration.xsd @@ -49,10 +49,6 @@ - - - - @@ -73,14 +69,28 @@ - - - - - - - - + + + + + + + + + + + + + + + + + + @@ -127,13 +137,15 @@ - + + + + + @@ -405,7 +417,7 @@ - + diff --git a/audio/2.0/default/Conversions.cpp b/audio/2.0/default/Conversions.cpp deleted file mode 100644 index e6691851bbaf3175c4003430208a5a763e9f322d..0000000000000000000000000000000000000000 --- a/audio/2.0/default/Conversions.cpp +++ /dev/null @@ -1,67 +0,0 @@ -/* - * Copyright (C) 2016 The Android Open Source Project - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -#include - -#include "Conversions.h" - -namespace android { -namespace hardware { -namespace audio { -namespace V2_0 { -namespace implementation { - -std::string deviceAddressToHal(const DeviceAddress& address) { - // HAL assumes that the address is NUL-terminated. - char halAddress[AUDIO_DEVICE_MAX_ADDRESS_LEN]; - memset(halAddress, 0, sizeof(halAddress)); - uint32_t halDevice = static_cast(address.device); - const bool isInput = (halDevice & AUDIO_DEVICE_BIT_IN) != 0; - if (isInput) halDevice &= ~AUDIO_DEVICE_BIT_IN; - if ((!isInput && (halDevice & AUDIO_DEVICE_OUT_ALL_A2DP) != 0) - || (isInput && (halDevice & AUDIO_DEVICE_IN_BLUETOOTH_A2DP) != 0)) { - snprintf(halAddress, sizeof(halAddress), - "%02X:%02X:%02X:%02X:%02X:%02X", - address.address.mac[0], address.address.mac[1], address.address.mac[2], - address.address.mac[3], address.address.mac[4], address.address.mac[5]); - } else if ((!isInput && (halDevice & AUDIO_DEVICE_OUT_IP) != 0) - || (isInput && (halDevice & AUDIO_DEVICE_IN_IP) != 0)) { - snprintf(halAddress, sizeof(halAddress), - "%d.%d.%d.%d", - address.address.ipv4[0], address.address.ipv4[1], - address.address.ipv4[2], address.address.ipv4[3]); - } else if ((!isInput && (halDevice & AUDIO_DEVICE_OUT_ALL_USB) != 0) - || (isInput && (halDevice & AUDIO_DEVICE_IN_ALL_USB) != 0)) { - snprintf(halAddress, sizeof(halAddress), - "card=%d;device=%d", - address.address.alsa.card, address.address.alsa.device); - } else if ((!isInput && (halDevice & AUDIO_DEVICE_OUT_BUS) != 0) - || (isInput && (halDevice & AUDIO_DEVICE_IN_BUS) != 0)) { - snprintf(halAddress, sizeof(halAddress), - "%s", address.busAddress.c_str()); - } else if ((!isInput && (halDevice & AUDIO_DEVICE_OUT_REMOTE_SUBMIX)) != 0 - || (isInput && (halDevice & AUDIO_DEVICE_IN_REMOTE_SUBMIX) != 0)) { - snprintf(halAddress, sizeof(halAddress), - "%s", address.rSubmixAddress.c_str()); - } - return halAddress; -} - -} // namespace implementation -} // namespace V2_0 -} // namespace audio -} // namespace hardware -} // namespace android diff --git a/audio/2.0/default/Device.h b/audio/2.0/default/Device.h deleted file mode 100644 index 77383610753bd4bb14c65d847adfc806a424b892..0000000000000000000000000000000000000000 --- a/audio/2.0/default/Device.h +++ /dev/null @@ -1,123 +0,0 @@ -/* - * Copyright (C) 2016 The Android Open Source Project - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -#ifndef ANDROID_HARDWARE_AUDIO_V2_0_DEVICE_H -#define ANDROID_HARDWARE_AUDIO_V2_0_DEVICE_H - -#include - -#include -#include - -#include -#include - -#include - -#include "ParametersUtil.h" - -namespace android { -namespace hardware { -namespace audio { -namespace V2_0 { -namespace implementation { - -using ::android::hardware::audio::common::V2_0::AudioConfig; -using ::android::hardware::audio::common::V2_0::AudioHwSync; -using ::android::hardware::audio::common::V2_0::AudioInputFlag; -using ::android::hardware::audio::common::V2_0::AudioOutputFlag; -using ::android::hardware::audio::common::V2_0::AudioPatchHandle; -using ::android::hardware::audio::common::V2_0::AudioPort; -using ::android::hardware::audio::common::V2_0::AudioPortConfig; -using ::android::hardware::audio::common::V2_0::AudioSource; -using ::android::hardware::audio::V2_0::DeviceAddress; -using ::android::hardware::audio::V2_0::IDevice; -using ::android::hardware::audio::V2_0::IStreamIn; -using ::android::hardware::audio::V2_0::IStreamOut; -using ::android::hardware::audio::V2_0::ParameterValue; -using ::android::hardware::audio::V2_0::Result; -using ::android::hardware::Return; -using ::android::hardware::Void; -using ::android::hardware::hidl_vec; -using ::android::hardware::hidl_string; -using ::android::sp; - -struct Device : public IDevice, public ParametersUtil { - explicit Device(audio_hw_device_t* device); - - // Methods from ::android::hardware::audio::V2_0::IDevice follow. - Return initCheck() override; - Return setMasterVolume(float volume) override; - Return getMasterVolume(getMasterVolume_cb _hidl_cb) override; - Return setMicMute(bool mute) override; - Return getMicMute(getMicMute_cb _hidl_cb) override; - Return setMasterMute(bool mute) override; - Return getMasterMute(getMasterMute_cb _hidl_cb) override; - Return getInputBufferSize( - const AudioConfig& config, getInputBufferSize_cb _hidl_cb) override; - Return openOutputStream( - int32_t ioHandle, - const DeviceAddress& device, - const AudioConfig& config, - AudioOutputFlag flags, - openOutputStream_cb _hidl_cb) override; - Return openInputStream( - int32_t ioHandle, - const DeviceAddress& device, - const AudioConfig& config, - AudioInputFlag flags, - AudioSource source, - openInputStream_cb _hidl_cb) override; - Return supportsAudioPatches() override; - Return createAudioPatch( - const hidl_vec& sources, - const hidl_vec& sinks, - createAudioPatch_cb _hidl_cb) override; - Return releaseAudioPatch(int32_t patch) override; - Return getAudioPort(const AudioPort& port, getAudioPort_cb _hidl_cb) override; - Return setAudioPortConfig(const AudioPortConfig& config) override; - Return getHwAvSync() override; - Return setScreenState(bool turnedOn) override; - Return getParameters( - const hidl_vec& keys, getParameters_cb _hidl_cb) override; - Return setParameters(const hidl_vec& parameters) override; - Return debugDump(const hidl_handle& fd) override; - - // Utility methods for extending interfaces. - Result analyzeStatus(const char* funcName, int status); - void closeInputStream(audio_stream_in_t* stream); - void closeOutputStream(audio_stream_out_t* stream); - audio_hw_device_t* device() const { return mDevice; } - - private: - audio_hw_device_t *mDevice; - - virtual ~Device(); - - // Methods from ParametersUtil. - char* halGetParameters(const char* keys) override; - int halSetParameters(const char* keysAndValues) override; - - uint32_t version() const { return mDevice->common.version; } -}; - -} // namespace implementation -} // namespace V2_0 -} // namespace audio -} // namespace hardware -} // namespace android - -#endif // ANDROID_HARDWARE_AUDIO_V2_0_DEVICE_H diff --git a/audio/2.0/default/DevicesFactory.cpp b/audio/2.0/default/DevicesFactory.cpp deleted file mode 100644 index b913bc799d7035ab1836536b34b85046cb5fd191..0000000000000000000000000000000000000000 --- a/audio/2.0/default/DevicesFactory.cpp +++ /dev/null @@ -1,108 +0,0 @@ -/* - * Copyright (C) 2016 The Android Open Source Project - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -#define LOG_TAG "DevicesFactoryHAL" - -#include - -#include - -#include "Device.h" -#include "DevicesFactory.h" -#include "PrimaryDevice.h" - -namespace android { -namespace hardware { -namespace audio { -namespace V2_0 { -namespace implementation { - -// static -const char* DevicesFactory::deviceToString(IDevicesFactory::Device device) { - switch (device) { - case IDevicesFactory::Device::PRIMARY: return AUDIO_HARDWARE_MODULE_ID_PRIMARY; - case IDevicesFactory::Device::A2DP: return AUDIO_HARDWARE_MODULE_ID_A2DP; - case IDevicesFactory::Device::USB: return AUDIO_HARDWARE_MODULE_ID_USB; - case IDevicesFactory::Device::R_SUBMIX: return AUDIO_HARDWARE_MODULE_ID_REMOTE_SUBMIX; - case IDevicesFactory::Device::STUB: return AUDIO_HARDWARE_MODULE_ID_STUB; - } - return nullptr; -} - -// static -int DevicesFactory::loadAudioInterface(const char *if_name, audio_hw_device_t **dev) -{ - const hw_module_t *mod; - int rc; - - rc = hw_get_module_by_class(AUDIO_HARDWARE_MODULE_ID, if_name, &mod); - if (rc) { - ALOGE("%s couldn't load audio hw module %s.%s (%s)", __func__, - AUDIO_HARDWARE_MODULE_ID, if_name, strerror(-rc)); - goto out; - } - rc = audio_hw_device_open(mod, dev); - if (rc) { - ALOGE("%s couldn't open audio hw device in %s.%s (%s)", __func__, - AUDIO_HARDWARE_MODULE_ID, if_name, strerror(-rc)); - goto out; - } - if ((*dev)->common.version < AUDIO_DEVICE_API_VERSION_MIN) { - ALOGE("%s wrong audio hw device version %04x", __func__, (*dev)->common.version); - rc = -EINVAL; - audio_hw_device_close(*dev); - goto out; - } - return OK; - -out: - *dev = NULL; - return rc; -} - -// Methods from ::android::hardware::audio::V2_0::IDevicesFactory follow. -Return DevicesFactory::openDevice(IDevicesFactory::Device device, openDevice_cb _hidl_cb) { - audio_hw_device_t *halDevice; - Result retval(Result::INVALID_ARGUMENTS); - sp result; - const char* moduleName = deviceToString(device); - if (moduleName != nullptr) { - int halStatus = loadAudioInterface(moduleName, &halDevice); - if (halStatus == OK) { - if (device == IDevicesFactory::Device::PRIMARY) { - result = new PrimaryDevice(halDevice); - } else { - result = new ::android::hardware::audio::V2_0::implementation:: - Device(halDevice); - } - retval = Result::OK; - } else if (halStatus == -EINVAL) { - retval = Result::NOT_INITIALIZED; - } - } - _hidl_cb(retval, result); - return Void(); -} - -IDevicesFactory* HIDL_FETCH_IDevicesFactory(const char* /* name */) { - return new DevicesFactory(); -} - -} // namespace implementation -} // namespace V2_0 -} // namespace audio -} // namespace hardware -} // namespace android diff --git a/audio/2.0/default/PrimaryDevice.h b/audio/2.0/default/PrimaryDevice.h deleted file mode 100644 index d95511b7745d509dce5a1754c43d82177b9053df..0000000000000000000000000000000000000000 --- a/audio/2.0/default/PrimaryDevice.h +++ /dev/null @@ -1,118 +0,0 @@ -/* - * Copyright (C) 2016 The Android Open Source Project - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -#ifndef ANDROID_HARDWARE_AUDIO_V2_0_PRIMARYDEVICE_H -#define ANDROID_HARDWARE_AUDIO_V2_0_PRIMARYDEVICE_H - -#include -#include - -#include - -#include "Device.h" - -namespace android { -namespace hardware { -namespace audio { -namespace V2_0 { -namespace implementation { - -using ::android::hardware::audio::common::V2_0::AudioConfig; -using ::android::hardware::audio::common::V2_0::AudioInputFlag; -using ::android::hardware::audio::common::V2_0::AudioMode; -using ::android::hardware::audio::common::V2_0::AudioOutputFlag; -using ::android::hardware::audio::common::V2_0::AudioPort; -using ::android::hardware::audio::common::V2_0::AudioPortConfig; -using ::android::hardware::audio::common::V2_0::AudioSource; -using ::android::hardware::audio::V2_0::DeviceAddress; -using ::android::hardware::audio::V2_0::IDevice; -using ::android::hardware::audio::V2_0::IPrimaryDevice; -using ::android::hardware::audio::V2_0::IStreamIn; -using ::android::hardware::audio::V2_0::IStreamOut; -using ::android::hardware::audio::V2_0::ParameterValue; -using ::android::hardware::audio::V2_0::Result; -using ::android::hardware::Return; -using ::android::hardware::Void; -using ::android::hardware::hidl_vec; -using ::android::hardware::hidl_string; -using ::android::sp; - -struct PrimaryDevice : public IPrimaryDevice { - explicit PrimaryDevice(audio_hw_device_t* device); - - // Methods from ::android::hardware::audio::V2_0::IDevice follow. - Return initCheck() override; - Return setMasterVolume(float volume) override; - Return getMasterVolume(getMasterVolume_cb _hidl_cb) override; - Return setMicMute(bool mute) override; - Return getMicMute(getMicMute_cb _hidl_cb) override; - Return setMasterMute(bool mute) override; - Return getMasterMute(getMasterMute_cb _hidl_cb) override; - Return getInputBufferSize( - const AudioConfig& config, getInputBufferSize_cb _hidl_cb) override; - Return openOutputStream( - int32_t ioHandle, - const DeviceAddress& device, - const AudioConfig& config, - AudioOutputFlag flags, - openOutputStream_cb _hidl_cb) override; - Return openInputStream( - int32_t ioHandle, - const DeviceAddress& device, - const AudioConfig& config, - AudioInputFlag flags, - AudioSource source, - openInputStream_cb _hidl_cb) override; - Return supportsAudioPatches() override; - Return createAudioPatch( - const hidl_vec& sources, - const hidl_vec& sinks, - createAudioPatch_cb _hidl_cb) override; - Return releaseAudioPatch(int32_t patch) override; - Return getAudioPort(const AudioPort& port, getAudioPort_cb _hidl_cb) override; - Return setAudioPortConfig(const AudioPortConfig& config) override; - Return getHwAvSync() override; - Return setScreenState(bool turnedOn) override; - Return getParameters( - const hidl_vec& keys, getParameters_cb _hidl_cb) override; - Return setParameters(const hidl_vec& parameters) override; - Return debugDump(const hidl_handle& fd) override; - - // Methods from ::android::hardware::audio::V2_0::IPrimaryDevice follow. - Return setVoiceVolume(float volume) override; - Return setMode(AudioMode mode) override; - Return getBtScoNrecEnabled(getBtScoNrecEnabled_cb _hidl_cb) override; - Return setBtScoNrecEnabled(bool enabled) override; - Return getBtScoWidebandEnabled(getBtScoWidebandEnabled_cb _hidl_cb) override; - Return setBtScoWidebandEnabled(bool enabled) override; - Return getTtyMode(getTtyMode_cb _hidl_cb) override; - Return setTtyMode(IPrimaryDevice::TtyMode mode) override; - Return getHacEnabled(getHacEnabled_cb _hidl_cb) override; - Return setHacEnabled(bool enabled) override; - - private: - sp mDevice; - - virtual ~PrimaryDevice(); -}; - -} // namespace implementation -} // namespace V2_0 -} // namespace audio -} // namespace hardware -} // namespace android - -#endif // ANDROID_HARDWARE_AUDIO_V2_0_PRIMARYDEVICE_H diff --git a/audio/2.0/default/StreamIn.h b/audio/2.0/default/StreamIn.h deleted file mode 100644 index 950d68fc73ed76e685c6f827dd3cd82f4587cb9b..0000000000000000000000000000000000000000 --- a/audio/2.0/default/StreamIn.h +++ /dev/null @@ -1,125 +0,0 @@ -/* - * Copyright (C) 2016 The Android Open Source Project - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -#ifndef ANDROID_HARDWARE_AUDIO_V2_0_STREAMIN_H -#define ANDROID_HARDWARE_AUDIO_V2_0_STREAMIN_H - -#include -#include - -#include -#include -#include -#include -#include -#include - -#include "Device.h" -#include "Stream.h" - -namespace android { -namespace hardware { -namespace audio { -namespace V2_0 { -namespace implementation { - -using ::android::hardware::audio::common::V2_0::AudioChannelMask; -using ::android::hardware::audio::common::V2_0::AudioDevice; -using ::android::hardware::audio::common::V2_0::AudioFormat; -using ::android::hardware::audio::common::V2_0::AudioSource; -using ::android::hardware::audio::V2_0::DeviceAddress; -using ::android::hardware::audio::V2_0::IStream; -using ::android::hardware::audio::V2_0::IStreamIn; -using ::android::hardware::audio::V2_0::ParameterValue; -using ::android::hardware::audio::V2_0::Result; -using ::android::hardware::Return; -using ::android::hardware::Void; -using ::android::hardware::hidl_vec; -using ::android::hardware::hidl_string; -using ::android::sp; - -struct StreamIn : public IStreamIn { - typedef MessageQueue CommandMQ; - typedef MessageQueue DataMQ; - typedef MessageQueue StatusMQ; - - StreamIn(const sp& device, audio_stream_in_t* stream); - - // Methods from ::android::hardware::audio::V2_0::IStream follow. - Return getFrameSize() override; - Return getFrameCount() override; - Return getBufferSize() override; - Return getSampleRate() override; - Return getSupportedSampleRates(getSupportedSampleRates_cb _hidl_cb) override; - Return setSampleRate(uint32_t sampleRateHz) override; - Return getChannelMask() override; - Return getSupportedChannelMasks(getSupportedChannelMasks_cb _hidl_cb) override; - Return setChannelMask(AudioChannelMask mask) override; - Return getFormat() override; - Return getSupportedFormats(getSupportedFormats_cb _hidl_cb) override; - Return setFormat(AudioFormat format) override; - Return getAudioProperties(getAudioProperties_cb _hidl_cb) override; - Return addEffect(uint64_t effectId) override; - Return removeEffect(uint64_t effectId) override; - Return standby() override; - Return getDevice() override; - Return setDevice(const DeviceAddress& address) override; - Return setConnectedState(const DeviceAddress& address, bool connected) override; - Return setHwAvSync(uint32_t hwAvSync) override; - Return getParameters( - const hidl_vec& keys, getParameters_cb _hidl_cb) override; - Return setParameters(const hidl_vec& parameters) override; - Return debugDump(const hidl_handle& fd) override; - Return close() override; - - // Methods from ::android::hardware::audio::V2_0::IStreamIn follow. - Return getAudioSource(getAudioSource_cb _hidl_cb) override; - Return setGain(float gain) override; - Return prepareForReading( - uint32_t frameSize, uint32_t framesCount, prepareForReading_cb _hidl_cb) override; - Return getInputFramesLost() override; - Return getCapturePosition(getCapturePosition_cb _hidl_cb) override; - Return start() override; - Return stop() override; - Return createMmapBuffer(int32_t minSizeFrames, createMmapBuffer_cb _hidl_cb) override; - Return getMmapPosition(getMmapPosition_cb _hidl_cb) override; - - static Result getCapturePositionImpl( - audio_stream_in_t *stream, uint64_t *frames, uint64_t *time); - - private: - bool mIsClosed; - const sp mDevice; - audio_stream_in_t *mStream; - const sp mStreamCommon; - const sp> mStreamMmap; - std::unique_ptr mCommandMQ; - std::unique_ptr mDataMQ; - std::unique_ptr mStatusMQ; - EventFlag* mEfGroup; - std::atomic mStopReadThread; - sp mReadThread; - - virtual ~StreamIn(); -}; - -} // namespace implementation -} // namespace V2_0 -} // namespace audio -} // namespace hardware -} // namespace android - -#endif // ANDROID_HARDWARE_AUDIO_V2_0_STREAMIN_H diff --git a/audio/2.0/default/StreamOut.h b/audio/2.0/default/StreamOut.h deleted file mode 100644 index 99352bc3057986aab994e895b06f455eabf2fa08..0000000000000000000000000000000000000000 --- a/audio/2.0/default/StreamOut.h +++ /dev/null @@ -1,139 +0,0 @@ -/* - * Copyright (C) 2016 The Android Open Source Project - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -#ifndef ANDROID_HARDWARE_AUDIO_V2_0_STREAMOUT_H -#define ANDROID_HARDWARE_AUDIO_V2_0_STREAMOUT_H - -#include -#include - -#include -#include -#include -#include -#include -#include - -#include "Device.h" -#include "Stream.h" - -namespace android { -namespace hardware { -namespace audio { -namespace V2_0 { -namespace implementation { - -using ::android::hardware::audio::common::V2_0::AudioChannelMask; -using ::android::hardware::audio::common::V2_0::AudioDevice; -using ::android::hardware::audio::common::V2_0::AudioFormat; -using ::android::hardware::audio::V2_0::AudioDrain; -using ::android::hardware::audio::V2_0::DeviceAddress; -using ::android::hardware::audio::V2_0::IStream; -using ::android::hardware::audio::V2_0::IStreamOut; -using ::android::hardware::audio::V2_0::IStreamOutCallback; -using ::android::hardware::audio::V2_0::ParameterValue; -using ::android::hardware::audio::V2_0::Result; -using ::android::hardware::audio::V2_0::TimeSpec; -using ::android::hardware::Return; -using ::android::hardware::Void; -using ::android::hardware::hidl_vec; -using ::android::hardware::hidl_string; -using ::android::sp; - -struct StreamOut : public IStreamOut { - typedef MessageQueue CommandMQ; - typedef MessageQueue DataMQ; - typedef MessageQueue StatusMQ; - - StreamOut(const sp& device, audio_stream_out_t* stream); - - // Methods from ::android::hardware::audio::V2_0::IStream follow. - Return getFrameSize() override; - Return getFrameCount() override; - Return getBufferSize() override; - Return getSampleRate() override; - Return getSupportedSampleRates(getSupportedSampleRates_cb _hidl_cb) override; - Return setSampleRate(uint32_t sampleRateHz) override; - Return getChannelMask() override; - Return getSupportedChannelMasks(getSupportedChannelMasks_cb _hidl_cb) override; - Return setChannelMask(AudioChannelMask mask) override; - Return getFormat() override; - Return getSupportedFormats(getSupportedFormats_cb _hidl_cb) override; - Return setFormat(AudioFormat format) override; - Return getAudioProperties(getAudioProperties_cb _hidl_cb) override; - Return addEffect(uint64_t effectId) override; - Return removeEffect(uint64_t effectId) override; - Return standby() override; - Return getDevice() override; - Return setDevice(const DeviceAddress& address) override; - Return setConnectedState(const DeviceAddress& address, bool connected) override; - Return setHwAvSync(uint32_t hwAvSync) override; - Return getParameters( - const hidl_vec& keys, getParameters_cb _hidl_cb) override; - Return setParameters(const hidl_vec& parameters) override; - Return debugDump(const hidl_handle& fd) override; - Return close() override; - - // Methods from ::android::hardware::audio::V2_0::IStreamOut follow. - Return getLatency() override; - Return setVolume(float left, float right) override; - Return prepareForWriting( - uint32_t frameSize, uint32_t framesCount, prepareForWriting_cb _hidl_cb) override; - Return getRenderPosition(getRenderPosition_cb _hidl_cb) override; - Return getNextWriteTimestamp(getNextWriteTimestamp_cb _hidl_cb) override; - Return setCallback(const sp& callback) override; - Return clearCallback() override; - Return supportsPauseAndResume(supportsPauseAndResume_cb _hidl_cb) override; - Return pause() override; - Return resume() override; - Return supportsDrain() override; - Return drain(AudioDrain type) override; - Return flush() override; - Return getPresentationPosition(getPresentationPosition_cb _hidl_cb) override; - Return start() override; - Return stop() override; - Return createMmapBuffer(int32_t minSizeFrames, createMmapBuffer_cb _hidl_cb) override; - Return getMmapPosition(getMmapPosition_cb _hidl_cb) override; - - static Result getPresentationPositionImpl( - audio_stream_out_t *stream, uint64_t *frames, TimeSpec *timeStamp); - - private: - bool mIsClosed; - const sp mDevice; - audio_stream_out_t *mStream; - const sp mStreamCommon; - const sp> mStreamMmap; - sp mCallback; - std::unique_ptr mCommandMQ; - std::unique_ptr mDataMQ; - std::unique_ptr mStatusMQ; - EventFlag* mEfGroup; - std::atomic mStopWriteThread; - sp mWriteThread; - - virtual ~StreamOut(); - - static int asyncCallback(stream_callback_event_t event, void *param, void *cookie); -}; - -} // namespace implementation -} // namespace V2_0 -} // namespace audio -} // namespace hardware -} // namespace android - -#endif // ANDROID_HARDWARE_AUDIO_V2_0_STREAMOUT_H diff --git a/audio/2.0/default/service.cpp b/audio/2.0/default/service.cpp deleted file mode 100644 index 29065234f8d50b0bed27478fcdf04dc85e1bc740..0000000000000000000000000000000000000000 --- a/audio/2.0/default/service.cpp +++ /dev/null @@ -1,52 +0,0 @@ -/* - * Copyright (C) 2016 The Android Open Source Project - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -#define LOG_TAG "audiohalservice" - -#include -#include -#include -#include -#include -#include - -using android::hardware::configureRpcThreadpool; -using android::hardware::joinRpcThreadpool; -using android::hardware::registerPassthroughServiceImplementation; - -using android::hardware::audio::effect::V2_0::IEffectsFactory; -using android::hardware::audio::V2_0::IDevicesFactory; -using android::hardware::soundtrigger::V2_0::ISoundTriggerHw; -using android::hardware::registerPassthroughServiceImplementation; -using android::hardware::bluetooth::a2dp::V1_0::IBluetoothAudioOffload; - -using android::OK; - -int main(int /* argc */, char* /* argv */ []) { - configureRpcThreadpool(16, true /*callerWillJoin*/); - android::status_t status; - status = registerPassthroughServiceImplementation(); - LOG_ALWAYS_FATAL_IF(status != OK, "Error while registering audio service: %d", status); - status = registerPassthroughServiceImplementation(); - LOG_ALWAYS_FATAL_IF(status != OK, "Error while registering audio effects service: %d", status); - // Soundtrigger might be not present. - status = registerPassthroughServiceImplementation(); - ALOGE_IF(status != OK, "Error while registering soundtrigger service: %d", status); - status = registerPassthroughServiceImplementation(); - ALOGE_IF(status != OK, "Error while registering bluetooth_audio service: %d", status); - joinRpcThreadpool(); - return status; -} diff --git a/audio/4.0/Android.bp b/audio/4.0/Android.bp new file mode 100644 index 0000000000000000000000000000000000000000..6e217d9572eaa7aad6e7c5284562de289d665a50 --- /dev/null +++ b/audio/4.0/Android.bp @@ -0,0 +1,48 @@ +// This file is autogenerated by hidl-gen -Landroidbp. + +hidl_interface { + name: "android.hardware.audio@4.0", + root: "android.hardware", + vndk: { + enabled: true, + }, + srcs: [ + "types.hal", + "IDevice.hal", + "IDevicesFactory.hal", + "IPrimaryDevice.hal", + "IStream.hal", + "IStreamIn.hal", + "IStreamOut.hal", + "IStreamOutCallback.hal", + ], + interfaces: [ + "android.hardware.audio.common@4.0", + "android.hardware.audio.effect@4.0", + "android.hidl.base@1.0", + ], + types: [ + "AudioDrain", + "AudioFrequencyResponsePoint", + "AudioMicrophoneChannelMapping", + "AudioMicrophoneCoordinate", + "AudioMicrophoneDirectionality", + "AudioMicrophoneLocation", + "DeviceAddress", + "MessageQueueFlagBits", + "MicrophoneInfo", + "MmapBufferFlag", + "MmapBufferInfo", + "MmapPosition", + "ParameterValue", + "PlaybackTrackMetadata", + "RecordTrackMetadata", + "Result", + "SinkMetadata", + "SourceMetadata", + "TimeSpec", + ], + gen_java: false, + gen_java_constants: true, +} + diff --git a/audio/4.0/IDevice.hal b/audio/4.0/IDevice.hal new file mode 100644 index 0000000000000000000000000000000000000000..1bb5abaa61aa9b2ac4360b71f1277ea5972b3f30 --- /dev/null +++ b/audio/4.0/IDevice.hal @@ -0,0 +1,282 @@ +/* + * Copyright (C) 2018 The Android Open Source Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package android.hardware.audio@4.0; + +import android.hardware.audio.common@4.0; +import IStreamIn; +import IStreamOut; + +interface IDevice { + /** + * Returns whether the audio hardware interface has been initialized. + * + * @return retval OK on success, NOT_INITIALIZED on failure. + */ + initCheck() generates (Result retval); + + /** + * Sets the audio volume for all audio activities other than voice call. If + * NOT_SUPPORTED is returned, the software mixer will emulate this + * capability. + * + * @param volume 1.0f means unity, 0.0f is zero. + * @return retval operation completion status. + */ + setMasterVolume(float volume) generates (Result retval); + + /** + * Get the current master volume value for the HAL, if the HAL supports + * master volume control. For example, AudioFlinger will query this value + * from the primary audio HAL when the service starts and use the value for + * setting the initial master volume across all HALs. HALs which do not + * support this method must return NOT_SUPPORTED in 'retval'. + * + * @return retval operation completion status. + * @return volume 1.0f means unity, 0.0f is zero. + */ + getMasterVolume() generates (Result retval, float volume); + + /** + * Sets microphone muting state. + * + * @param mute whether microphone is muted. + * @return retval operation completion status. + */ + setMicMute(bool mute) generates (Result retval); + + /** + * Gets whether microphone is muted. + * + * @return retval operation completion status. + * @return mute whether microphone is muted. + */ + getMicMute() generates (Result retval, bool mute); + + /** + * Set the audio mute status for all audio activities. If the return value + * is NOT_SUPPORTED, the software mixer will emulate this capability. + * + * @param mute whether audio is muted. + * @return retval operation completion status. + */ + setMasterMute(bool mute) generates (Result retval); + + /** + * Get the current master mute status for the HAL, if the HAL supports + * master mute control. AudioFlinger will query this value from the primary + * audio HAL when the service starts and use the value for setting the + * initial master mute across all HALs. HAL must indicate that the feature + * is not supported by returning NOT_SUPPORTED status. + * + * @return retval operation completion status. + * @return mute whether audio is muted. + */ + getMasterMute() generates (Result retval, bool mute); + + /** + * Returns audio input buffer size according to parameters passed or + * INVALID_ARGUMENTS if one of the parameters is not supported. + * + * @param config audio configuration. + * @return retval operation completion status. + * @return bufferSize input buffer size in bytes. + */ + getInputBufferSize(AudioConfig config) + generates (Result retval, uint64_t bufferSize); + + /** + * This method creates and opens the audio hardware output stream. + * If the stream can not be opened with the proposed audio config, + * HAL must provide suggested values for the audio config. + * + * @param ioHandle handle assigned by AudioFlinger. + * @param device device type and (if needed) address. + * @param config stream configuration. + * @param flags additional flags. + * @param sourceMetadata Description of the audio that will be played. + May be used by implementations to configure hardware effects. + * @return retval operation completion status. + * @return outStream created output stream. + * @return suggestedConfig in case of invalid parameters, suggested config. + */ + openOutputStream( + AudioIoHandle ioHandle, + DeviceAddress device, + AudioConfig config, + bitfield flags, + SourceMetadata sourceMetadata) generates ( + Result retval, + IStreamOut outStream, + AudioConfig suggestedConfig); + + /** + * This method creates and opens the audio hardware input stream. + * If the stream can not be opened with the proposed audio config, + * HAL must provide suggested values for the audio config. + * + * @param ioHandle handle assigned by AudioFlinger. + * @param device device type and (if needed) address. + * @param config stream configuration. + * @param flags additional flags. + * @param sinkMetadata Description of the audio that is suggested by the client. + * May be used by implementations to configure hardware effects. + * @return retval operation completion status. + * @return inStream in case of success, created input stream. + * @return suggestedConfig in case of invalid parameters, suggested config. + */ + openInputStream( + AudioIoHandle ioHandle, + DeviceAddress device, + AudioConfig config, + bitfield flags, + SinkMetadata sinkMetadata) generates ( + Result retval, + IStreamIn inStream, + AudioConfig suggestedConfig); + + /** + * Returns whether HAL supports audio patches. + * + * @return supports true if audio patches are supported. + */ + supportsAudioPatches() generates (bool supports); + + /** + * Creates an audio patch between several source and sink ports. The handle + * is allocated by the HAL and must be unique for this audio HAL module. + * + * @param sources patch sources. + * @param sinks patch sinks. + * @return retval operation completion status. + * @return patch created patch handle. + */ + createAudioPatch(vec sources, vec sinks) + generates (Result retval, AudioPatchHandle patch); + + /** + * Release an audio patch. + * + * @param patch patch handle. + * @return retval operation completion status. + */ + releaseAudioPatch(AudioPatchHandle patch) generates (Result retval); + + /** + * Returns the list of supported attributes for a given audio port. + * + * As input, 'port' contains the information (type, role, address etc...) + * needed by the HAL to identify the port. + * + * As output, 'resultPort' contains possible attributes (sampling rates, + * formats, channel masks, gain controllers...) for this port. + * + * @param port port identifier. + * @return retval operation completion status. + * @return resultPort port descriptor with all parameters filled up. + */ + getAudioPort(AudioPort port) + generates (Result retval, AudioPort resultPort); + + /** + * Set audio port configuration. + * + * @param config audio port configuration. + * @return retval operation completion status. + */ + setAudioPortConfig(AudioPortConfig config) generates (Result retval); + + /** + * Gets the HW synchronization source of the device. Calling this method is + * equivalent to getting AUDIO_PARAMETER_HW_AV_SYNC on the legacy HAL. + * Optional method + * + * @return retval operation completion status: OK or NOT_SUPPORTED. + * @return hwAvSync HW synchronization source + */ + getHwAvSync() generates (Result retval, AudioHwSync hwAvSync); + + /** + * Sets whether the screen is on. Calling this method is equivalent to + * setting AUDIO_PARAMETER_KEY_SCREEN_STATE on the legacy HAL. + * Optional method + * + * @param turnedOn whether the screen is turned on. + * @return retval operation completion status. + */ + setScreenState(bool turnedOn) generates (Result retval); + + /** + * Generic method for retrieving vendor-specific parameter values. + * The framework does not interpret the parameters, they are passed + * in an opaque manner between a vendor application and HAL. + * + * Multiple parameters can be retrieved at the same time. + * The implementation should return as many requested parameters + * as possible, even if one or more is not supported + * + * @param context provides more information about the request + * @param keys keys of the requested parameters + * @return retval operation completion status. + * OK must be returned if keys is empty. + * NOT_SUPPORTED must be returned if at least one key is unknown. + * @return parameters parameter key value pairs. + * Must contain the value of all requested keys if retval == OK + */ + getParameters(vec context, vec keys) + generates (Result retval, vec parameters); + + /** + * Generic method for setting vendor-specific parameter values. + * The framework does not interpret the parameters, they are passed + * in an opaque manner between a vendor application and HAL. + * + * Multiple parameters can be set at the same time though this is + * discouraged as it make failure analysis harder. + * + * If possible, a failed setParameters should not impact the platform state. + * + * @param context provides more information about the request + * @param parameters parameter key value pairs. + * @return retval operation completion status. + * All parameters must be successfully set for OK to be returned + */ + setParameters(vec context, vec parameters) + generates (Result retval); + + /** + * Returns an array with available microphones in device. + * + * @return retval INVALID_STATE if the call is not successful, + * OK otherwise. + * + * @return microphones array with microphones info + */ + getMicrophones() + generates(Result retval, vec microphones); + + /** + * Notifies the device module about the connection state of an input/output + * device attached to it. Calling this method is equivalent to setting + * AUDIO_PARAMETER_DEVICE_[DIS]CONNECT on the legacy HAL. + * + * @param address audio device specification. + * @param connected whether the device is connected. + * @return retval operation completion status. + */ + setConnectedState(DeviceAddress address, bool connected) + generates (Result retval); +}; diff --git a/audio/4.0/IDevicesFactory.hal b/audio/4.0/IDevicesFactory.hal new file mode 100644 index 0000000000000000000000000000000000000000..489294e5cda35509be57b73ca14c03e532c46db3 --- /dev/null +++ b/audio/4.0/IDevicesFactory.hal @@ -0,0 +1,70 @@ +/* + * Copyright (C) 2018 The Android Open Source Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package android.hardware.audio@4.0; + +import android.hardware.audio.common@4.0; +import IDevice; +import IPrimaryDevice; + +/** This factory allows a HAL implementation to be split in multiple independent + * devices (called module in the pre-treble API). + * Note that this division is arbitrary and implementation are free + * to only have a Primary. + * The framework will query the devices according to audio_policy_configuration.xml + * + * Each device name is arbitrary, provided by the vendor's audio_policy_configuration.xml + * and only used to identify a device in this factory. + * The framework must not interpret the name, treating it as a vendor opaque data + * with the following exception: + * - the "r_submix" device that must be present to support policyMixes (Eg: Android projected). + * Note that this Device is included by default in a build derived from AOSP. + * + * Note that on AOSP Oreo (including MR1) the "a2dp" module is not using this API + * but is loaded directly from the system partition using the legacy API + * due to limitations with the Bluetooth framework. + */ +interface IDevicesFactory { + + /** + * Opens an audio device. To close the device, it is necessary to release + * references to the returned device object. + * + * @param device device name. + * @return retval operation completion status. Returns INVALID_ARGUMENTS + * if there is no corresponding hardware module found, + * NOT_INITIALIZED if an error occured while opening the hardware + * module. + * @return result the interface for the created device. + */ + openDevice(string device) generates (Result retval, IDevice result); + + /** + * Opens the Primary audio device that must be present. + * This function is not optional and must return successfully the primary device. + * + * This device must have the name "primary". + * + * The telephony stack uses this device to control the audio during a voice call. + * + * @return retval operation completion status. Must be SUCCESS. + * For debuging, return INVALID_ARGUMENTS if there is no corresponding + * hardware module found, NOT_INITIALIZED if an error occurred + * while opening the hardware module. + * @return result the interface for the created device. + */ + openPrimaryDevice() generates (Result retval, IPrimaryDevice result); +}; diff --git a/audio/4.0/IPrimaryDevice.hal b/audio/4.0/IPrimaryDevice.hal new file mode 100644 index 0000000000000000000000000000000000000000..258c28b6930747ae44175229ade9c256fdd29f8a --- /dev/null +++ b/audio/4.0/IPrimaryDevice.hal @@ -0,0 +1,195 @@ +/* + * Copyright (C) 2018 The Android Open Source Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package android.hardware.audio@4.0; + +import android.hardware.audio.common@4.0; +import IDevice; + +interface IPrimaryDevice extends IDevice { + /** + * Sets the audio volume of a voice call. + * + * @param volume 1.0f means unity, 0.0f is zero. + * @return retval operation completion status. + */ + setVoiceVolume(float volume) generates (Result retval); + + /** + * This method is used to notify the HAL about audio mode changes. + * + * @param mode new mode. + * @return retval operation completion status. + */ + setMode(AudioMode mode) generates (Result retval); + + /** + * Sets the name of the current BT SCO headset. Calling this method + * is equivalent to setting legacy "bt_headset_name" parameter. + * The BT SCO headset name must only be used for debugging purposes. + * Optional method + * + * @param name the name of the current BT SCO headset (can be empty). + * @return retval operation completion status. + */ + setBtScoHeadsetDebugName(string name) generates (Result retval); + + /** + * Gets whether BT SCO Noise Reduction and Echo Cancellation are enabled. + * Calling this method is equivalent to getting AUDIO_PARAMETER_KEY_BT_NREC + * on the legacy HAL. + * + * @return retval operation completion status. + * @return enabled whether BT SCO NR + EC are enabled. + */ + getBtScoNrecEnabled() generates (Result retval, bool enabled); + + /** + * Sets whether BT SCO Noise Reduction and Echo Cancellation are enabled. + * Calling this method is equivalent to setting AUDIO_PARAMETER_KEY_BT_NREC + * on the legacy HAL. + * Optional method + * + * @param enabled whether BT SCO NR + EC are enabled. + * @return retval operation completion status. + */ + setBtScoNrecEnabled(bool enabled) generates (Result retval); + + /** + * Gets whether BT SCO Wideband mode is enabled. Calling this method is + * equivalent to getting AUDIO_PARAMETER_KEY_BT_SCO_WB on the legacy HAL. + * + * @return retval operation completion status. + * @return enabled whether BT Wideband is enabled. + */ + getBtScoWidebandEnabled() generates (Result retval, bool enabled); + + /** + * Sets whether BT SCO Wideband mode is enabled. Calling this method is + * equivalent to setting AUDIO_PARAMETER_KEY_BT_SCO_WB on the legacy HAL. + * Optional method + * + * @param enabled whether BT Wideband is enabled. + * @return retval operation completion status. + */ + setBtScoWidebandEnabled(bool enabled) generates (Result retval); + + /** + * Gets whether BT HFP (Hands-Free Profile) is enabled. Calling this method + * is equivalent to getting "hfp_enable" parameter value on the legacy HAL. + * + * @return retval operation completion status. + * @return enabled whether BT HFP is enabled. + */ + getBtHfpEnabled() generates (Result retval, bool enabled); + + /** + * Sets whether BT HFP (Hands-Free Profile) is enabled. Calling this method + * is equivalent to setting "hfp_enable" parameter on the legacy HAL. + * Optional method + * + * @param enabled whether BT HFP is enabled. + * @return retval operation completion status. + */ + setBtHfpEnabled(bool enabled) generates (Result retval); + + /** + * Sets the sampling rate of BT HFP (Hands-Free Profile). Calling this + * method is equivalent to setting "hfp_set_sampling_rate" parameter + * on the legacy HAL. + * Optional method + * + * @param sampleRateHz sample rate in Hz. + * @return retval operation completion status. + */ + setBtHfpSampleRate(uint32_t sampleRateHz) generates (Result retval); + + /** + * Sets the current output volume Hz for BT HFP (Hands-Free Profile). + * Calling this method is equivalent to setting "hfp_volume" parameter value + * on the legacy HAL (except that legacy HAL implementations expect + * an integer value in the range from 0 to 15.) + * Optional method + * + * @param volume 1.0f means unity, 0.0f is zero. + * @return retval operation completion status. + */ + setBtHfpVolume(float volume) generates (Result retval); + + enum TtyMode : int32_t { + OFF, + VCO, + HCO, + FULL + }; + + /** + * Gets current TTY mode selection. Calling this method is equivalent to + * getting AUDIO_PARAMETER_KEY_TTY_MODE on the legacy HAL. + * + * @return retval operation completion status. + * @return mode TTY mode. + */ + getTtyMode() generates (Result retval, TtyMode mode); + + /** + * Sets current TTY mode. Calling this method is equivalent to setting + * AUDIO_PARAMETER_KEY_TTY_MODE on the legacy HAL. + * + * @param mode TTY mode. + * @return retval operation completion status. + */ + setTtyMode(TtyMode mode) generates (Result retval); + + /** + * Gets whether Hearing Aid Compatibility - Telecoil (HAC-T) mode is + * enabled. Calling this method is equivalent to getting + * AUDIO_PARAMETER_KEY_HAC on the legacy HAL. + * + * @return retval operation completion status. + * @return enabled whether HAC mode is enabled. + */ + getHacEnabled() generates (Result retval, bool enabled); + + /** + * Sets whether Hearing Aid Compatibility - Telecoil (HAC-T) mode is + * enabled. Calling this method is equivalent to setting + * AUDIO_PARAMETER_KEY_HAC on the legacy HAL. + * Optional method + * + * @param enabled whether HAC mode is enabled. + * @return retval operation completion status. + */ + setHacEnabled(bool enabled) generates (Result retval); + + enum Rotation : int32_t { + DEG_0, + DEG_90, + DEG_180, + DEG_270 + }; + + /** + * Updates HAL on the current rotation of the device relative to natural + * orientation. Calling this method is equivalent to setting legacy + * parameter "rotation". + * + * @param rotation rotation in degrees relative to natural device + * orientation. + * @return retval operation completion status. + */ + updateRotation(Rotation rotation) generates (Result retval); +}; diff --git a/audio/4.0/IStream.hal b/audio/4.0/IStream.hal new file mode 100644 index 0000000000000000000000000000000000000000..e7a4b7db954093d6b969ffc39164a82da1fa83a3 --- /dev/null +++ b/audio/4.0/IStream.hal @@ -0,0 +1,310 @@ +/* + * Copyright (C) 2018 The Android Open Source Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package android.hardware.audio@4.0; + +import android.hardware.audio.common@4.0; +import android.hardware.audio.effect@4.0::IEffect; + +interface IStream { + /** + * Return the frame size (number of bytes per sample). + * + * @return frameSize frame size in bytes. + */ + getFrameSize() generates (uint64_t frameSize); + + /** + * Return the frame count of the buffer. Calling this method is equivalent + * to getting AUDIO_PARAMETER_STREAM_FRAME_COUNT on the legacy HAL. + * + * @return count frame count. + */ + getFrameCount() generates (uint64_t count); + + /** + * Return the size of input/output buffer in bytes for this stream. + * It must be a multiple of the frame size. + * + * @return buffer buffer size in bytes. + */ + getBufferSize() generates (uint64_t bufferSize); + + /** + * Return the sampling rate in Hz. + * + * @return sampleRateHz sample rate in Hz. + */ + getSampleRate() generates (uint32_t sampleRateHz); + + /** + * Return supported native sampling rates of the stream for a given format. + * A supported native sample rate is a sample rate that can be efficiently + * played by the hardware (typically without sample-rate conversions). + * + * This function is only called for dynamic profile. If called for + * non-dynamic profile is should return NOT_SUPPORTED or the same list + * as in audio_policy_configuration.xml. + * + * Calling this method is equivalent to getting + * AUDIO_PARAMETER_STREAM_SUP_SAMPLING_RATES on the legacy HAL. + * + * + * @param format audio format for which the sample rates are supported. + * @return retval operation completion status. + * Must be OK if the format is supported. + * @return sampleRateHz supported sample rates. + */ + getSupportedSampleRates(AudioFormat format) + generates (Result retval, vec sampleRates); + + /** + * Sets the sampling rate of the stream. Calling this method is equivalent + * to setting AUDIO_PARAMETER_STREAM_SAMPLING_RATE on the legacy HAL. + * Optional method. If implemented, only called on a stopped stream. + * + * @param sampleRateHz sample rate in Hz. + * @return retval operation completion status. + */ + setSampleRate(uint32_t sampleRateHz) generates (Result retval); + + /** + * Return the channel mask of the stream. + * + * @return mask channel mask. + */ + getChannelMask() generates (bitfield mask); + + /** + * Return supported channel masks of the stream. Calling this method is + * equivalent to getting AUDIO_PARAMETER_STREAM_SUP_CHANNELS on the legacy + * HAL. + * + * @param format audio format for which the channel masks are supported. + * @return retval operation completion status. + * Must be OK if the format is supported. + * @return masks supported audio masks. + */ + getSupportedChannelMasks(AudioFormat format) + generates (Result retval, vec> masks); + + /** + * Sets the channel mask of the stream. Calling this method is equivalent to + * setting AUDIO_PARAMETER_STREAM_CHANNELS on the legacy HAL. + * Optional method + * + * @param format audio format. + * @return retval operation completion status. + */ + setChannelMask(bitfield mask) generates (Result retval); + + /** + * Return the audio format of the stream. + * + * @return format audio format. + */ + getFormat() generates (AudioFormat format); + + /** + * Return supported audio formats of the stream. Calling this method is + * equivalent to getting AUDIO_PARAMETER_STREAM_SUP_FORMATS on the legacy + * HAL. + * + * @return formats supported audio formats. + */ + getSupportedFormats() generates (vec formats); + + /** + * Sets the audio format of the stream. Calling this method is equivalent to + * setting AUDIO_PARAMETER_STREAM_FORMAT on the legacy HAL. + * Optional method + * + * @param format audio format. + * @return retval operation completion status. + */ + setFormat(AudioFormat format) generates (Result retval); + + /** + * Convenience method for retrieving several stream parameters in + * one transaction. + * + * @return sampleRateHz sample rate in Hz. + * @return mask channel mask. + * @return format audio format. + */ + getAudioProperties() generates ( + uint32_t sampleRateHz, bitfield mask, AudioFormat format); + + /** + * Applies audio effect to the stream. + * + * @param effectId effect ID (obtained from IEffectsFactory.createEffect) of + * the effect to apply. + * @return retval operation completion status. + */ + addEffect(uint64_t effectId) generates (Result retval); + + /** + * Stops application of the effect to the stream. + * + * @param effectId effect ID (obtained from IEffectsFactory.createEffect) of + * the effect to remove. + * @return retval operation completion status. + */ + removeEffect(uint64_t effectId) generates (Result retval); + + /** + * Put the audio hardware input/output into standby mode. + * Driver must exit from standby mode at the next I/O operation. + * + * @return retval operation completion status. + */ + standby() generates (Result retval); + + /** + * Return the set of devices which this stream is connected to. + * Optional method + * + * @return retval operation completion status: OK or NOT_SUPPORTED. + * @return device set of devices which this stream is connected to. + */ + getDevices() generates (Result retval, vec devices); + + /** + * Connects the stream to one or multiple devices. + * + * This method must only be used for HALs that do not support + * 'IDevice.createAudioPatch' method. Calling this method is + * equivalent to setting AUDIO_PARAMETER_STREAM_ROUTING preceeded + * with a device address in the legacy HAL interface. + * + * @param address device to connect the stream to. + * @return retval operation completion status. + */ + setDevices(vec devices) generates (Result retval); + + /** + * Sets the HW synchronization source. Calling this method is equivalent to + * setting AUDIO_PARAMETER_STREAM_HW_AV_SYNC on the legacy HAL. + * Optional method + * + * @param hwAvSync HW synchronization source + * @return retval operation completion status. + */ + setHwAvSync(AudioHwSync hwAvSync) generates (Result retval); + + /** + * Generic method for retrieving vendor-specific parameter values. + * The framework does not interpret the parameters, they are passed + * in an opaque manner between a vendor application and HAL. + * + * Multiple parameters can be retrieved at the same time. + * The implementation should return as many requested parameters + * as possible, even if one or more is not supported + * + * @param context provides more information about the request + * @param keys keys of the requested parameters + * @return retval operation completion status. + * OK must be returned if keys is empty. + * NOT_SUPPORTED must be returned if at least one key is unknown. + * @return parameters parameter key value pairs. + * Must contain the value of all requested keys if retval == OK + */ + getParameters(vec context, vec keys) + generates (Result retval, vec parameters); + + /** + * Generic method for setting vendor-specific parameter values. + * The framework does not interpret the parameters, they are passed + * in an opaque manner between a vendor application and HAL. + * + * Multiple parameters can be set at the same time though this is + * discouraged as it make failure analysis harder. + * + * If possible, a failed setParameters should not impact the platform state. + * + * @param context provides more information about the request + * @param parameters parameter key value pairs. + * @return retval operation completion status. + * All parameters must be successfully set for OK to be returned + */ + setParameters(vec context, vec parameters) + generates (Result retval); + + /** + * Called by the framework to start a stream operating in mmap mode. + * createMmapBuffer() must be called before calling start(). + * Function only implemented by streams operating in mmap mode. + * + * @return retval OK in case the success. + * NOT_SUPPORTED on non mmap mode streams + * INVALID_STATE if called out of sequence + */ + start() generates (Result retval); + + /** + * Called by the framework to stop a stream operating in mmap mode. + * Function only implemented by streams operating in mmap mode. + * + * @return retval OK in case the succes. + * NOT_SUPPORTED on non mmap mode streams + * INVALID_STATE if called out of sequence + */ + stop() generates (Result retval) ; + + /** + * Called by the framework to retrieve information on the mmap buffer used for audio + * samples transfer. + * Function only implemented by streams operating in mmap mode. + * + * @param minSizeFrames minimum buffer size requested. The actual buffer + * size returned in struct MmapBufferInfo can be larger. + * @return retval OK in case the success. + * NOT_SUPPORTED on non mmap mode streams + * NOT_INITIALIZED in case of memory allocation error + * INVALID_ARGUMENTS if the requested buffer size is too large + * INVALID_STATE if called out of sequence + * @return info a MmapBufferInfo struct containing information on the MMMAP buffer created. + */ + createMmapBuffer(int32_t minSizeFrames) + generates (Result retval, MmapBufferInfo info); + + /** + * Called by the framework to read current read/write position in the mmap buffer + * with associated time stamp. + * Function only implemented by streams operating in mmap mode. + * + * @return retval OK in case the success. + * NOT_SUPPORTED on non mmap mode streams + * INVALID_STATE if called out of sequence + * @return position a MmapPosition struct containing current HW read/write position in frames + * with associated time stamp. + */ + getMmapPosition() + generates (Result retval, MmapPosition position); + + /** + * Called by the framework to deinitialize the stream and free up + * all the currently allocated resources. It is recommended to close + * the stream on the client side as soon as it is becomes unused. + * + * @return retval OK in case the success. + * NOT_SUPPORTED if called on IStream instead of input or + * output stream interface. + * INVALID_STATE if the stream was already closed. + */ + close() generates (Result retval); +}; diff --git a/audio/4.0/IStreamIn.hal b/audio/4.0/IStreamIn.hal new file mode 100644 index 0000000000000000000000000000000000000000..247e826db9434c1d8ae7284d7d123c9bc4b63d40 --- /dev/null +++ b/audio/4.0/IStreamIn.hal @@ -0,0 +1,168 @@ +/* + * Copyright (C) 2018 The Android Open Source Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package android.hardware.audio@4.0; + +import android.hardware.audio.common@4.0; +import IStream; + +interface IStreamIn extends IStream { + /** + * Returns the source descriptor of the input stream. Calling this method is + * equivalent to getting AUDIO_PARAMETER_STREAM_INPUT_SOURCE on the legacy + * HAL. + * Optional method + * + * @return retval operation completion status. + * @return source audio source. + */ + getAudioSource() generates (Result retval, AudioSource source); + + /** + * Set the input gain for the audio driver. + * Optional method + * + * @param gain 1.0f is unity, 0.0f is zero. + * @result retval operation completion status. + */ + setGain(float gain) generates (Result retval); + + /** + * Commands that can be executed on the driver reader thread. + */ + enum ReadCommand : int32_t { + READ, + GET_CAPTURE_POSITION + }; + + /** + * Data structure passed to the driver for executing commands + * on the driver reader thread. + */ + struct ReadParameters { + ReadCommand command; // discriminator + union Params { + uint64_t read; // READ command, amount of bytes to read, >= 0. + // No parameters for GET_CAPTURE_POSITION. + } params; + }; + + /** + * Data structure passed back to the client via status message queue + * of 'read' operation. + * + * Possible values of 'retval' field: + * - OK, read operation was successful; + * - INVALID_ARGUMENTS, stream was not configured properly; + * - INVALID_STATE, stream is in a state that doesn't allow reads. + */ + struct ReadStatus { + Result retval; + ReadCommand replyTo; // discriminator + union Reply { + uint64_t read; // READ command, amount of bytes read, >= 0. + struct CapturePosition { // same as generated by getCapturePosition. + uint64_t frames; + uint64_t time; + } capturePosition; + } reply; + }; + + /** + * Called when the metadata of the stream's sink has been changed. + * @param sinkMetadata Description of the audio that is suggested by the clients. + */ + updateSinkMetadata(SinkMetadata sinkMetadata); + + /** + * Set up required transports for receiving audio buffers from the driver. + * + * The transport consists of three message queues: + * -- command queue is used to instruct the reader thread what operation + * to perform; + * -- data queue is used for passing audio data from the driver + * to the client; + * -- status queue is used for reporting operation status + * (e.g. amount of bytes actually read or error code). + * + * The driver operates on a dedicated thread. The client must ensure that + * the thread is given an appropriate priority and assigned to correct + * scheduler and cgroup. For this purpose, the method returns identifiers + * of the driver thread. + * + * @param frameSize the size of a single frame, in bytes. + * @param framesCount the number of frames in a buffer. + * @param threadPriority priority of the driver thread. + * @return retval OK if both message queues were created successfully. + * INVALID_STATE if the method was already called. + * INVALID_ARGUMENTS if there was a problem setting up + * the queues. + * @return commandMQ a message queue used for passing commands. + * @return dataMQ a message queue used for passing audio data in the format + * specified at the stream opening. + * @return statusMQ a message queue used for passing status from the driver + * using ReadStatus structures. + * @return threadInfo identifiers of the driver's dedicated thread. + */ + prepareForReading(uint32_t frameSize, uint32_t framesCount) + generates ( + Result retval, + fmq_sync commandMQ, + fmq_sync dataMQ, + fmq_sync statusMQ, + ThreadInfo threadInfo); + + /** + * Return the amount of input frames lost in the audio driver since the last + * call of this function. + * + * Audio driver is expected to reset the value to 0 and restart counting + * upon returning the current value by this function call. Such loss + * typically occurs when the user space process is blocked longer than the + * capacity of audio driver buffers. + * + * @return framesLost the number of input audio frames lost. + */ + getInputFramesLost() generates (uint32_t framesLost); + + /** + * Return a recent count of the number of audio frames received and the + * clock time associated with that frame count. + * + * @return retval INVALID_STATE if the device is not ready/available, + * NOT_SUPPORTED if the command is not supported, + * OK otherwise. + * @return frames the total frame count received. This must be as early in + * the capture pipeline as possible. In general, frames + * must be non-negative and must not go "backwards". + * @return time is the clock monotonic time when frames was measured. In + * general, time must be a positive quantity and must not + * go "backwards". + */ + getCapturePosition() + generates (Result retval, uint64_t frames, uint64_t time); + + /** + * Returns an array with active microphones in the stream. + * + * @return retval INVALID_STATE if the call is not successful, + * OK otherwise. + * + * @return microphones array with microphones info + */ + getActiveMicrophones() + generates(Result retval, vec microphones); +}; diff --git a/audio/4.0/IStreamOut.hal b/audio/4.0/IStreamOut.hal new file mode 100644 index 0000000000000000000000000000000000000000..65eba6028b1ec767e250d321c8026dd0fe05f916 --- /dev/null +++ b/audio/4.0/IStreamOut.hal @@ -0,0 +1,279 @@ +/* + * Copyright (C) 2018 The Android Open Source Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package android.hardware.audio@4.0; + +import android.hardware.audio.common@4.0; +import IStream; +import IStreamOutCallback; + +interface IStreamOut extends IStream { + /** + * Return the audio hardware driver estimated latency in milliseconds. + * + * @return latencyMs latency in milliseconds. + */ + getLatency() generates (uint32_t latencyMs); + + /** + * This method is used in situations where audio mixing is done in the + * hardware. This method serves as a direct interface with hardware, + * allowing to directly set the volume as apposed to via the framework. + * This method might produce multiple PCM outputs or hardware accelerated + * codecs, such as MP3 or AAC. + * Optional method + * + * @param left left channel attenuation, 1.0f is unity, 0.0f is zero. + * @param right right channel attenuation, 1.0f is unity, 0.0f is zero. + * @return retval operation completion status. + * If a volume is outside [0,1], return INVALID_ARGUMENTS + */ + setVolume(float left, float right) generates (Result retval); + + /** + * Commands that can be executed on the driver writer thread. + */ + enum WriteCommand : int32_t { + WRITE, + GET_PRESENTATION_POSITION, + GET_LATENCY + }; + + /** + * Data structure passed back to the client via status message queue + * of 'write' operation. + * + * Possible values of 'retval' field: + * - OK, write operation was successful; + * - INVALID_ARGUMENTS, stream was not configured properly; + * - INVALID_STATE, stream is in a state that doesn't allow writes; + * - INVALID_OPERATION, retrieving presentation position isn't supported. + */ + struct WriteStatus { + Result retval; + WriteCommand replyTo; // discriminator + union Reply { + uint64_t written; // WRITE command, amount of bytes written, >= 0. + struct PresentationPosition { // same as generated by + uint64_t frames; // getPresentationPosition. + TimeSpec timeStamp; + } presentationPosition; + uint32_t latencyMs; // Same as generated by getLatency. + } reply; + }; + + /** + * Called when the metadata of the stream's source has been changed. + * @param sourceMetadata Description of the audio that is played by the clients. + */ + updateSourceMetadata(SourceMetadata sourceMetadata); + + /** + * Set up required transports for passing audio buffers to the driver. + * + * The transport consists of three message queues: + * -- command queue is used to instruct the writer thread what operation + * to perform; + * -- data queue is used for passing audio data from the client + * to the driver; + * -- status queue is used for reporting operation status + * (e.g. amount of bytes actually written or error code). + * + * The driver operates on a dedicated thread. The client must ensure that + * the thread is given an appropriate priority and assigned to correct + * scheduler and cgroup. For this purpose, the method returns identifiers + * of the driver thread. + * + * @param frameSize the size of a single frame, in bytes. + * @param framesCount the number of frames in a buffer. + * @return retval OK if both message queues were created successfully. + * INVALID_STATE if the method was already called. + * INVALID_ARGUMENTS if there was a problem setting up + * the queues. + * @return commandMQ a message queue used for passing commands. + * @return dataMQ a message queue used for passing audio data in the format + * specified at the stream opening. + * @return statusMQ a message queue used for passing status from the driver + * using WriteStatus structures. + * @return threadInfo identifiers of the driver's dedicated thread. + */ + prepareForWriting(uint32_t frameSize, uint32_t framesCount) + generates ( + Result retval, + fmq_sync commandMQ, + fmq_sync dataMQ, + fmq_sync statusMQ, + ThreadInfo threadInfo); + + /** + * Return the number of audio frames written by the audio DSP to DAC since + * the output has exited standby. + * Optional method + * + * @return retval operation completion status. + * @return dspFrames number of audio frames written. + */ + getRenderPosition() generates (Result retval, uint32_t dspFrames); + + /** + * Get the local time at which the next write to the audio driver will be + * presented. The units are microseconds, where the epoch is decided by the + * local audio HAL. + * Optional method + * + * @return retval operation completion status. + * @return timestampUs time of the next write. + */ + getNextWriteTimestamp() generates (Result retval, int64_t timestampUs); + + /** + * Set the callback interface for notifying completion of non-blocking + * write and drain. + * + * Calling this function implies that all future 'write' and 'drain' + * must be non-blocking and use the callback to signal completion. + * + * 'clearCallback' method needs to be called in order to release the local + * callback proxy on the server side and thus dereference the callback + * implementation on the client side. + * + * @return retval operation completion status. + */ + setCallback(IStreamOutCallback callback) generates (Result retval); + + /** + * Clears the callback previously set via 'setCallback' method. + * + * Warning: failure to call this method results in callback implementation + * on the client side being held until the HAL server termination. + * + * If no callback was previously set, the method should be a no-op + * and return OK. + * + * @return retval operation completion status: OK or NOT_SUPPORTED. + */ + clearCallback() generates (Result retval); + + /** + * Returns whether HAL supports pausing and resuming of streams. + * + * @return supportsPause true if pausing is supported. + * @return supportsResume true if resume is supported. + */ + supportsPauseAndResume() + generates (bool supportsPause, bool supportsResume); + + /** + * Notifies to the audio driver to stop playback however the queued buffers + * are retained by the hardware. Useful for implementing pause/resume. Empty + * implementation if not supported however must be implemented for hardware + * with non-trivial latency. In the pause state, some audio hardware may + * still be using power. Client code may consider calling 'suspend' after a + * timeout to prevent that excess power usage. + * + * Implementation of this function is mandatory for offloaded playback. + * + * @return retval operation completion status. + */ + pause() generates (Result retval); + + /** + * Notifies to the audio driver to resume playback following a pause. + * Returns error INVALID_STATE if called without matching pause. + * + * Implementation of this function is mandatory for offloaded playback. + * + * @return retval operation completion status. + */ + resume() generates (Result retval); + + /** + * Returns whether HAL supports draining of streams. + * + * @return supports true if draining is supported. + */ + supportsDrain() generates (bool supports); + + /** + * Requests notification when data buffered by the driver/hardware has been + * played. If 'setCallback' has previously been called to enable + * non-blocking mode, then 'drain' must not block, instead it must return + * quickly and completion of the drain is notified through the callback. If + * 'setCallback' has not been called, then 'drain' must block until + * completion. + * + * If 'type' is 'ALL', the drain completes when all previously written data + * has been played. + * + * If 'type' is 'EARLY_NOTIFY', the drain completes shortly before all data + * for the current track has played to allow time for the framework to + * perform a gapless track switch. + * + * Drain must return immediately on 'stop' and 'flush' calls. + * + * Implementation of this function is mandatory for offloaded playback. + * + * @param type type of drain. + * @return retval operation completion status. + */ + drain(AudioDrain type) generates (Result retval); + + /** + * Notifies to the audio driver to flush the queued data. Stream must + * already be paused before calling 'flush'. + * Optional method + * + * Implementation of this function is mandatory for offloaded playback. + * + * @return retval operation completion status. + */ + flush() generates (Result retval); + + /** + * Return a recent count of the number of audio frames presented to an + * external observer. This excludes frames which have been written but are + * still in the pipeline. The count is not reset to zero when output enters + * standby. Also returns the value of CLOCK_MONOTONIC as of this + * presentation count. The returned count is expected to be 'recent', but + * does not need to be the most recent possible value. However, the + * associated time must correspond to whatever count is returned. + * + * Example: assume that N+M frames have been presented, where M is a 'small' + * number. Then it is permissible to return N instead of N+M, and the + * timestamp must correspond to N rather than N+M. The terms 'recent' and + * 'small' are not defined. They reflect the quality of the implementation. + * + * Optional method + * + * @return retval operation completion status. + * @return frames count of presented audio frames. + * @return timeStamp associated clock time. + */ + getPresentationPosition() + generates (Result retval, uint64_t frames, TimeSpec timeStamp); + + /** + * Selects a presentation for decoding from a next generation media stream + * (as defined per ETSI TS 103 190-2) and a program within the presentation. + * Optional method + * + * @param presentationId selected audio presentation. + * @param programId refinement for the presentation. + * @return retval operation completion status. + */ + selectPresentation(int32_t presentationId, int32_t programId) + generates (Result retval); +}; diff --git a/audio/4.0/IStreamOutCallback.hal b/audio/4.0/IStreamOutCallback.hal new file mode 100644 index 0000000000000000000000000000000000000000..9a19d325cfdf766ec25240de9d9a623bc1dd399e --- /dev/null +++ b/audio/4.0/IStreamOutCallback.hal @@ -0,0 +1,37 @@ +/* + * Copyright (C) 2018 The Android Open Source Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package android.hardware.audio@4.0; + +/** + * Asynchronous write callback interface. + */ +interface IStreamOutCallback { + /** + * Non blocking write completed. + */ + oneway onWriteReady(); + + /** + * Drain completed. + */ + oneway onDrainReady(); + + /** + * Stream hit an error. + */ + oneway onError(); +}; diff --git a/audio/4.0/config/audio_policy_configuration.xsd b/audio/4.0/config/audio_policy_configuration.xsd new file mode 100644 index 0000000000000000000000000000000000000000..ee17fc983b4b3073c1997b68622a4b9e3afa4ecc --- /dev/null +++ b/audio/4.0/config/audio_policy_configuration.xsd @@ -0,0 +1,564 @@ + + + + + + + + + + + + + + Version of the interface the hal implements. + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + There should be one section per audio HW module present on the platform. + Each contains two mandatory tags: “halVersion” and “name”. + The module "name" is the same as in previous .conf file. + Each module must contain the following sections: + - : a list of device descriptors for all + input and output devices accessible via this module. + This contains both permanently attached devices and removable devices. + - : listing all output and input streams exposed by the audio HAL + - : list of possible connections between input + and output devices or between stream and devices. + A is defined by a set of 3 attributes: + -"type": mux|mix means all sources are mutual exclusive (mux) or can be mixed (mix) + -"sink": the sink involved in this route + -"sources": all the sources than can be connected to the sink via this route + - : permanently attached devices. + The attachedDevices section is a list of devices names. + Their names correspond to device names defined in "devicePorts" section. + - is the device to be used when no policy rule applies + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + "|" separated list of audio_output_flags_t or audio_input_flags_t. + + + + + + + + + + + + + + + + + + + + + + + + + + + + + When choosing the mixPort of an audio track, the audioPolicy + first considers the mixPorts with a preferredUsage including + the track AudioUsage preferred . + If non support the track format, the other mixPorts are considered. + Eg: a will receive + the audio of all apps playing with a MEDIA usage. + It may receive audio from ALARM if there are no audio compatible + . + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + Comma (",") separated list of channel flags + from audio_channel_mask_t. + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + The default device will be used if multiple have the same type + and no explicit route request exists for a specific device of + that type. + + + + + + + + + + + + + + + + + + + + + + + + + + + + + List all available sources for a given sink. + + + + + + + + + + + + + + + + + + + + + + Comma separated pair of number. + The fist one is the framework level (between 0 and 100). + The second one is the volume to send to the HAL. + The framework will interpolate volumes not specified. + Their MUST be at least 2 points specified. + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + Volume section defines a volume curve for a given use case and device category. + It contains a list of points of this curve expressing the attenuation in Millibels + for a given volume index from 0 to 100. + + 0,-9600 + 100,0 + + + It may also reference a reference/@name to avoid duplicating curves. + + + 0,-9600 + 100,0 + + + + + + + + + + + + + + + + + diff --git a/audio/4.0/types.hal b/audio/4.0/types.hal new file mode 100644 index 0000000000000000000000000000000000000000..6b46b10386eca83932d756f76f47191d4c5b33ca --- /dev/null +++ b/audio/4.0/types.hal @@ -0,0 +1,279 @@ +/* + * Copyright (C) 2018 The Android Open Source Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package android.hardware.audio@4.0; + +import android.hardware.audio.common@4.0; + +enum Result : int32_t { + OK, + NOT_INITIALIZED, + INVALID_ARGUMENTS, + INVALID_STATE, + /** + * Methods marked as "Optional method" must return this result value + * if the operation is not supported by HAL. + */ + NOT_SUPPORTED +}; + +@export(name="audio_drain_type_t", value_prefix="AUDIO_DRAIN_") +enum AudioDrain : int32_t { + /** drain() returns when all data has been played. */ + ALL, + /** + * drain() returns a short time before all data from the current track has + * been played to give time for gapless track switch. + */ + EARLY_NOTIFY +}; + +/** + * A substitute for POSIX timespec. + */ +struct TimeSpec { + uint64_t tvSec; // seconds + uint64_t tvNSec; // nanoseconds +}; + +/** + * IEEE 802 MAC address. + */ +typedef uint8_t[6] MacAddress; + +struct ParameterValue { + string key; + string value; +}; + +/** + * Specifies a device in case when several devices of the same type + * can be connected (e.g. BT A2DP, USB). + */ +struct DeviceAddress { + AudioDevice device; // discriminator + union Address { + MacAddress mac; // used for BLUETOOTH_A2DP_* + uint8_t[4] ipv4; // used for IP + struct Alsa { + int32_t card; + int32_t device; + } alsa; // used for USB_* + } address; + string busAddress; // used for BUS + string rSubmixAddress; // used for REMOTE_SUBMIX +}; + +enum MmapBufferFlag : uint32_t { + NONE = 0x0, + /** + * If the buffer can be securely shared to untrusted applications + * through the AAudio exclusive mode. + * Only set this flag if applications are restricted from accessing the + * memory surrounding the audio data buffer by a kernel mechanism. + * See Linux kernel's dma_buf. + */ + APPLICATION_SHAREABLE = 0x1, +}; + +/** + * Mmap buffer descriptor returned by IStream.createMmapBuffer(). + * Used by streams opened in mmap mode. + */ +struct MmapBufferInfo { + /** Mmap memory buffer */ + memory sharedMemory; + /** Total buffer size in frames */ + uint32_t bufferSizeFrames; + /** Transfer size granularity in frames */ + uint32_t burstSizeFrames; + /** Attributes describing the buffer. */ + bitfield flags; +}; + +/** + * Mmap buffer read/write position returned by IStream.getMmapPosition(). + * Used by streams opened in mmap mode. + */ +struct MmapPosition { + int64_t timeNanoseconds; // time stamp in ns, CLOCK_MONOTONIC + int32_t positionFrames; // increasing 32 bit frame count reset when IStream.stop() is called +}; + +/** + * The message queue flags used to synchronize reads and writes from + * message queues used by StreamIn and StreamOut. + */ +enum MessageQueueFlagBits : uint32_t { + NOT_EMPTY = 1 << 0, + NOT_FULL = 1 << 1 +}; + +/** Metadata of a playback track for a StreamOut. */ +struct PlaybackTrackMetadata { + AudioUsage usage; + AudioContentType contentType; + /** + * Positive linear gain applied to the track samples. 0 being muted and 1 is no attenuation, + * 2 means double amplification... + * Must not be negative. + */ + float gain; +}; + +/** Metadatas of the source of a StreamOut. */ +struct SourceMetadata { + vec tracks; +}; + +/** Metadata of a record track for a StreamIn. */ +struct RecordTrackMetadata { + AudioSource source; + /** + * Positive linear gain applied to the track samples. 0 being muted and 1 is no attenuation, + * 2 means double amplification... + * Must not be negative. + */ + float gain; +}; + +/** Metadatas of the source of a StreamIn. */ +struct SinkMetadata { + vec tracks; +}; + +/* + * Microphone information + * + */ + +/** + * A 3D point used to represent position or orientation of a microphone. + * + * Position: Coordinates of the microphone's capsule, in meters, from the + * bottom-left-back corner of the bounding box of android device in natural + * orientation (PORTRAIT for phones, LANDSCAPE for tablets, tvs, etc). + * The orientation musth match the reported by the api Display.getRotation(). + * + * Orientation: Normalized vector to signal the main orientation of the + * microphone's capsule. Magnitude = sqrt(x^2 + y^2 + z^2) = 1 + */ +struct AudioMicrophoneCoordinate { + float x; + float y; + float z; +}; + +/** + * Enum to identify the type of channel mapping for active microphones. + * Used channels further identify if the microphone has any significative + * process (e.g. High Pass Filtering, dynamic compression) + * Simple processing as constant gain adjustment must be DIRECT. + */ +enum AudioMicrophoneChannelMapping : uint32_t { + UNUSED = 0, /* Channel not used */ + DIRECT = 1, /* Channel used and signal not processed */ + PROCESSED = 2, /* Channel used and signal has some process */ +}; + +/** + * Enum to identify locations of microphones in regards to the body of the + * android device. + */ +enum AudioMicrophoneLocation : uint32_t { + UNKNOWN = 0, + MAINBODY = 1, + MAINBODY_MOVABLE = 2, + PERIPHERAL = 3, +}; + +/** + * Identifier to help group related microphones together + * e.g. microphone arrays should belong to the same group + */ +typedef int32_t AudioMicrophoneGroup; + +/** + * Enum with standard polar patterns of microphones + */ +enum AudioMicrophoneDirectionality : uint32_t { + UNKNOWN = 0, + OMNI = 1, + BI_DIRECTIONAL = 2, + CARDIOID = 3, + HYPER_CARDIOID = 4, + SUPER_CARDIOID = 5, +}; + +/** + * A (frequency, level) pair. Used to represent frequency response. + */ +struct AudioFrequencyResponsePoint { + /** In Hz */ + float frequency; + /** In dB */ + float level; +}; + +/** + * Structure used by the HAL to describe microphone's characteristics + * Used by StreamIn and Device + */ +struct MicrophoneInfo { + /** Unique alphanumeric id for microphone. Guaranteed to be the same + * even after rebooting. + */ + string deviceId; + /** + * Device specific information + */ + DeviceAddress deviceAddress; + /** Each element of the vector must describe the channel with the same + * index. + */ + vec channelMapping; + /** Location of the microphone in regard to the body of the device */ + AudioMicrophoneLocation location; + /** Identifier to help group related microphones together + * e.g. microphone arrays should belong to the same group + */ + AudioMicrophoneGroup group; + /** Index of this microphone within the group. + * (group, index) must be unique within the same device. + */ + uint32_t indexInTheGroup; + /** Level in dBFS produced by a 1000 Hz tone at 94 dB SPL */ + float sensitivity; + /** Level in dB of the max SPL supported at 1000 Hz */ + float maxSpl; + /** Level in dB of the min SPL supported at 1000 Hz */ + float minSpl; + /** Standard polar pattern of the microphone */ + AudioMicrophoneDirectionality directionality; + /** Vector with ordered frequency responses (from low to high frequencies) + * with the frequency response of the microphone. + * Levels are in dB, relative to level at 1000 Hz + */ + vec frequencyResponse; + /** Position of the microphone's capsule in meters, from the + * bottom-left-back corner of the bounding box of device. + */ + AudioMicrophoneCoordinate position; + /** Normalized point to signal the main orientation of the microphone's + * capsule. sqrt(x^2 + y^2 + z^2) = 1 + */ + AudioMicrophoneCoordinate orientation; +}; diff --git a/audio/README b/audio/README new file mode 100644 index 0000000000000000000000000000000000000000..1f1e8e3ad89bdc92a5952a109421779a1ab0675e --- /dev/null +++ b/audio/README @@ -0,0 +1,48 @@ +Directory structure of the audio HIDL related code. + +audio +|-- 2.0 <== HIDL (.hal) can not be moved to fit the directory structure +| because that would create a separate HAL +|-- 4.0 <== Version 4.0 of the core API +| +|-- common <== code common to audio core and effect API +| |-- 2.0 +| | |-- default <== code that wraps the legacy API +| | `-- vts <== vts of 2.0 core and effect API common code +| |-- 4.0 +| | |-- default +| | `-- vts +| |-- ... <== The future versions should continue this structure +| | |-- default +| | `-- vts +| `-- all_versions <== code common to all version of both core and effect API +| |-- default +| `-- vts <== vts of core and effect API common version independent code +| +|-- core <== code relative to the core API +| |-- 2.0 <== 2.0 core API code (except .hal, see audio/2.0) +| | |-- default +| | `-- vts +| |-- 4.0 +| | |-- default <== default implementation of the core 4.0 api +| | `-- vts <== vts code of the 4.0 API +| |-- ... +| | |-- default +| | `-- vts +| `-- all_versions +| |-- default +| `-- vts <== vts of core API common version independent code +| +`-- effect <== idem for the effect API + |-- 2.0 + | |-- default + | `-- vts + |-- 4.0 + | |-- default + | `-- vts + |-- ... + | |-- default + | `-- vts + `-- all_versions + |-- default + `-- vts diff --git a/audio/common/2.0/default/Android.bp b/audio/common/2.0/default/Android.bp index 82b38c0877e43f209a41d3f98fe145298d7f7a95..ac66479c93e7b332e1ce94dfc0ab096383e06ee8 100644 --- a/audio/common/2.0/default/Android.bp +++ b/audio/common/2.0/default/Android.bp @@ -21,18 +21,24 @@ cc_library_shared { enabled: true, }, srcs: [ - "EffectMap.cpp", "HidlUtils.cpp", ], export_include_dirs: ["."], + static_libs: [ + ], + shared_libs: [ "liblog", "libutils", "libhidlbase", + "android.hardware.audio.common-util", "android.hardware.audio.common@2.0", ], + export_shared_lib_headers: [ + "android.hardware.audio.common-util" + ], header_libs: [ "libaudio_system_headers", diff --git a/audio/common/2.0/default/HidlUtils.cpp b/audio/common/2.0/default/HidlUtils.cpp index 79cb37cefc3b765518a25d7722bd151e0d30bf90..9771b7bda2576eed012081d0be6d23553599cfe3 100644 --- a/audio/common/2.0/default/HidlUtils.cpp +++ b/audio/common/2.0/default/HidlUtils.cpp @@ -14,324 +14,8 @@ * limitations under the License. */ -#include - #include "HidlUtils.h" -using ::android::hardware::audio::common::V2_0::AudioChannelMask; -using ::android::hardware::audio::common::V2_0::AudioDevice; -using ::android::hardware::audio::common::V2_0::AudioFormat; -using ::android::hardware::audio::common::V2_0::AudioGainMode; -using ::android::hardware::audio::common::V2_0::AudioMixLatencyClass; -using ::android::hardware::audio::common::V2_0::AudioPortConfigMask; -using ::android::hardware::audio::common::V2_0::AudioPortRole; -using ::android::hardware::audio::common::V2_0::AudioPortType; -using ::android::hardware::audio::common::V2_0::AudioSource; -using ::android::hardware::audio::common::V2_0::AudioStreamType; -using ::android::hardware::audio::common::V2_0::AudioUsage; - -namespace android { - -void HidlUtils::audioConfigFromHal(const audio_config_t& halConfig, AudioConfig* config) { - config->sampleRateHz = halConfig.sample_rate; - config->channelMask = AudioChannelMask(halConfig.channel_mask); - config->format = AudioFormat(halConfig.format); - audioOffloadInfoFromHal(halConfig.offload_info, &config->offloadInfo); - config->frameCount = halConfig.frame_count; -} - -void HidlUtils::audioConfigToHal(const AudioConfig& config, audio_config_t* halConfig) { - memset(halConfig, 0, sizeof(audio_config_t)); - halConfig->sample_rate = config.sampleRateHz; - halConfig->channel_mask = static_cast(config.channelMask); - halConfig->format = static_cast(config.format); - audioOffloadInfoToHal(config.offloadInfo, &halConfig->offload_info); - halConfig->frame_count = config.frameCount; -} - -void HidlUtils::audioGainConfigFromHal( - const struct audio_gain_config& halConfig, AudioGainConfig* config) { - config->index = halConfig.index; - config->mode = AudioGainMode(halConfig.mode); - config->channelMask = AudioChannelMask(halConfig.channel_mask); - for (size_t i = 0; i < sizeof(audio_channel_mask_t) * 8; ++i) { - config->values[i] = halConfig.values[i]; - } - config->rampDurationMs = halConfig.ramp_duration_ms; -} - -void HidlUtils::audioGainConfigToHal( - const AudioGainConfig& config, struct audio_gain_config* halConfig) { - halConfig->index = config.index; - halConfig->mode = static_cast(config.mode); - halConfig->channel_mask = static_cast(config.channelMask); - memset(halConfig->values, 0, sizeof(halConfig->values)); - for (size_t i = 0; i < sizeof(audio_channel_mask_t) * 8; ++i) { - halConfig->values[i] = config.values[i]; - } - halConfig->ramp_duration_ms = config.rampDurationMs; -} - -void HidlUtils::audioGainFromHal(const struct audio_gain& halGain, AudioGain* gain) { - gain->mode = AudioGainMode(halGain.mode); - gain->channelMask = AudioChannelMask(halGain.channel_mask); - gain->minValue = halGain.min_value; - gain->maxValue = halGain.max_value; - gain->defaultValue = halGain.default_value; - gain->stepValue = halGain.step_value; - gain->minRampMs = halGain.min_ramp_ms; - gain->maxRampMs = halGain.max_ramp_ms; -} - -void HidlUtils::audioGainToHal(const AudioGain& gain, struct audio_gain* halGain) { - halGain->mode = static_cast(gain.mode); - halGain->channel_mask = static_cast(gain.channelMask); - halGain->min_value = gain.minValue; - halGain->max_value = gain.maxValue; - halGain->default_value = gain.defaultValue; - halGain->step_value = gain.stepValue; - halGain->min_ramp_ms = gain.minRampMs; - halGain->max_ramp_ms = gain.maxRampMs; -} - -void HidlUtils::audioOffloadInfoFromHal( - const audio_offload_info_t& halOffload, AudioOffloadInfo* offload) { - offload->sampleRateHz = halOffload.sample_rate; - offload->channelMask = AudioChannelMask(halOffload.channel_mask); - offload->format = AudioFormat(halOffload.format); - offload->streamType = AudioStreamType(halOffload.stream_type); - offload->bitRatePerSecond = halOffload.bit_rate; - offload->durationMicroseconds = halOffload.duration_us; - offload->hasVideo = halOffload.has_video; - offload->isStreaming = halOffload.is_streaming; - offload->bitWidth = halOffload.bit_width; - offload->bufferSize = halOffload.offload_buffer_size; - offload->usage = static_cast(halOffload.usage); -} - -void HidlUtils::audioOffloadInfoToHal( - const AudioOffloadInfo& offload, audio_offload_info_t* halOffload) { - *halOffload = AUDIO_INFO_INITIALIZER; - halOffload->sample_rate = offload.sampleRateHz; - halOffload->channel_mask = static_cast(offload.channelMask); - halOffload->format = static_cast(offload.format); - halOffload->stream_type = static_cast(offload.streamType); - halOffload->bit_rate = offload.bitRatePerSecond; - halOffload->duration_us = offload.durationMicroseconds; - halOffload->has_video = offload.hasVideo; - halOffload->is_streaming = offload.isStreaming; - halOffload->bit_width = offload.bitWidth; - halOffload->offload_buffer_size = offload.bufferSize; - halOffload->usage = static_cast(offload.usage); -} - -void HidlUtils::audioPortConfigFromHal( - const struct audio_port_config& halConfig, AudioPortConfig* config) { - config->id = halConfig.id; - config->role = AudioPortRole(halConfig.role); - config->type = AudioPortType(halConfig.type); - config->configMask = AudioPortConfigMask(halConfig.config_mask); - config->sampleRateHz = halConfig.sample_rate; - config->channelMask = AudioChannelMask(halConfig.channel_mask); - config->format = AudioFormat(halConfig.format); - audioGainConfigFromHal(halConfig.gain, &config->gain); - switch (halConfig.type) { - case AUDIO_PORT_TYPE_NONE: break; - case AUDIO_PORT_TYPE_DEVICE: { - config->ext.device.hwModule = halConfig.ext.device.hw_module; - config->ext.device.type = AudioDevice(halConfig.ext.device.type); - memcpy(config->ext.device.address.data(), - halConfig.ext.device.address, - AUDIO_DEVICE_MAX_ADDRESS_LEN); - break; - } - case AUDIO_PORT_TYPE_MIX: { - config->ext.mix.hwModule = halConfig.ext.mix.hw_module; - config->ext.mix.ioHandle = halConfig.ext.mix.handle; - if (halConfig.role == AUDIO_PORT_ROLE_SOURCE) { - config->ext.mix.useCase.source = AudioSource(halConfig.ext.mix.usecase.source); - } else if (halConfig.role == AUDIO_PORT_ROLE_SINK) { - config->ext.mix.useCase.stream = AudioStreamType(halConfig.ext.mix.usecase.stream); - } - break; - } - case AUDIO_PORT_TYPE_SESSION: { - config->ext.session.session = halConfig.ext.session.session; - break; - } - } -} - -void HidlUtils::audioPortConfigToHal( - const AudioPortConfig& config, struct audio_port_config* halConfig) { - memset(halConfig, 0, sizeof(audio_port_config)); - halConfig->id = config.id; - halConfig->role = static_cast(config.role); - halConfig->type = static_cast(config.type); - halConfig->config_mask = static_cast(config.configMask); - halConfig->sample_rate = config.sampleRateHz; - halConfig->channel_mask = static_cast(config.channelMask); - halConfig->format = static_cast(config.format); - audioGainConfigToHal(config.gain, &halConfig->gain); - switch (config.type) { - case AudioPortType::NONE: break; - case AudioPortType::DEVICE: { - halConfig->ext.device.hw_module = config.ext.device.hwModule; - halConfig->ext.device.type = static_cast(config.ext.device.type); - memcpy(halConfig->ext.device.address, - config.ext.device.address.data(), - AUDIO_DEVICE_MAX_ADDRESS_LEN); - break; - } - case AudioPortType::MIX: { - halConfig->ext.mix.hw_module = config.ext.mix.hwModule; - halConfig->ext.mix.handle = config.ext.mix.ioHandle; - if (config.role == AudioPortRole::SOURCE) { - halConfig->ext.mix.usecase.source = - static_cast(config.ext.mix.useCase.source); - } else if (config.role == AudioPortRole::SINK) { - halConfig->ext.mix.usecase.stream = - static_cast(config.ext.mix.useCase.stream); - } - break; - } - case AudioPortType::SESSION: { - halConfig->ext.session.session = - static_cast(config.ext.session.session); - break; - } - } -} - -void HidlUtils::audioPortConfigsFromHal( - unsigned int numHalConfigs, const struct audio_port_config *halConfigs, - hidl_vec *configs) { - configs->resize(numHalConfigs); - for (unsigned int i = 0; i < numHalConfigs; ++i) { - audioPortConfigFromHal(halConfigs[i], &(*configs)[i]); - } -} - -std::unique_ptr HidlUtils::audioPortConfigsToHal( - const hidl_vec& configs) { - std::unique_ptr halConfigs(new audio_port_config[configs.size()]); - for (size_t i = 0; i < configs.size(); ++i) { - audioPortConfigToHal(configs[i], &halConfigs[i]); - } - return halConfigs; -} - -void HidlUtils::audioPortFromHal(const struct audio_port& halPort, AudioPort* port) { - port->id = halPort.id; - port->role = AudioPortRole(halPort.role); - port->type = AudioPortType(halPort.type); - port->name.setToExternal(halPort.name, strlen(halPort.name)); - port->sampleRates.resize(halPort.num_sample_rates); - for (size_t i = 0; i < halPort.num_sample_rates; ++i) { - port->sampleRates[i] = halPort.sample_rates[i]; - } - port->channelMasks.resize(halPort.num_channel_masks); - for (size_t i = 0; i < halPort.num_channel_masks; ++i) { - port->channelMasks[i] = AudioChannelMask(halPort.channel_masks[i]); - } - port->formats.resize(halPort.num_formats); - for (size_t i = 0; i < halPort.num_formats; ++i) { - port->formats[i] = AudioFormat(halPort.formats[i]); - } - port->gains.resize(halPort.num_gains); - for (size_t i = 0; i < halPort.num_gains; ++i) { - audioGainFromHal(halPort.gains[i], &port->gains[i]); - } - audioPortConfigFromHal(halPort.active_config, &port->activeConfig); - switch (halPort.type) { - case AUDIO_PORT_TYPE_NONE: break; - case AUDIO_PORT_TYPE_DEVICE: { - port->ext.device.hwModule = halPort.ext.device.hw_module; - port->ext.device.type = AudioDevice(halPort.ext.device.type); - memcpy(port->ext.device.address.data(), - halPort.ext.device.address, - AUDIO_DEVICE_MAX_ADDRESS_LEN); - break; - } - case AUDIO_PORT_TYPE_MIX: { - port->ext.mix.hwModule = halPort.ext.mix.hw_module; - port->ext.mix.ioHandle = halPort.ext.mix.handle; - port->ext.mix.latencyClass = AudioMixLatencyClass(halPort.ext.mix.latency_class); - break; - } - case AUDIO_PORT_TYPE_SESSION: { - port->ext.session.session = halPort.ext.session.session; - break; - } - } -} - -void HidlUtils::audioPortToHal(const AudioPort& port, struct audio_port* halPort) { - memset(halPort, 0, sizeof(audio_port)); - halPort->id = port.id; - halPort->role = static_cast(port.role); - halPort->type = static_cast(port.type); - memcpy(halPort->name, - port.name.c_str(), - std::min(port.name.size(), static_cast(AUDIO_PORT_MAX_NAME_LEN))); - halPort->num_sample_rates = - std::min(port.sampleRates.size(), static_cast(AUDIO_PORT_MAX_SAMPLING_RATES)); - for (size_t i = 0; i < halPort->num_sample_rates; ++i) { - halPort->sample_rates[i] = port.sampleRates[i]; - } - halPort->num_channel_masks = - std::min(port.channelMasks.size(), static_cast(AUDIO_PORT_MAX_CHANNEL_MASKS)); - for (size_t i = 0; i < halPort->num_channel_masks; ++i) { - halPort->channel_masks[i] = static_cast(port.channelMasks[i]); - } - halPort->num_formats = - std::min(port.formats.size(), static_cast(AUDIO_PORT_MAX_FORMATS)); - for (size_t i = 0; i < halPort->num_formats; ++i) { - halPort->formats[i] = static_cast(port.formats[i]); - } - halPort->num_gains = std::min(port.gains.size(), static_cast(AUDIO_PORT_MAX_GAINS)); - for (size_t i = 0; i < halPort->num_gains; ++i) { - audioGainToHal(port.gains[i], &halPort->gains[i]); - } - audioPortConfigToHal(port.activeConfig, &halPort->active_config); - switch (port.type) { - case AudioPortType::NONE: break; - case AudioPortType::DEVICE: { - halPort->ext.device.hw_module = port.ext.device.hwModule; - halPort->ext.device.type = static_cast(port.ext.device.type); - memcpy(halPort->ext.device.address, - port.ext.device.address.data(), - AUDIO_DEVICE_MAX_ADDRESS_LEN); - break; - } - case AudioPortType::MIX: { - halPort->ext.mix.hw_module = port.ext.mix.hwModule; - halPort->ext.mix.handle = port.ext.mix.ioHandle; - halPort->ext.mix.latency_class = - static_cast(port.ext.mix.latencyClass); - break; - } - case AudioPortType::SESSION: { - halPort->ext.session.session = static_cast(port.ext.session.session); - break; - } - } -} - -void HidlUtils::uuidFromHal(const audio_uuid_t& halUuid, Uuid* uuid) { - uuid->timeLow = halUuid.timeLow; - uuid->timeMid = halUuid.timeMid; - uuid->versionAndTimeHigh = halUuid.timeHiAndVersion; - uuid->variantAndClockSeqHigh = halUuid.clockSeq; - memcpy(uuid->node.data(), halUuid.node, uuid->node.size()); -} - -void HidlUtils::uuidToHal(const Uuid& uuid, audio_uuid_t* halUuid) { - halUuid->timeLow = uuid.timeLow; - halUuid->timeMid = uuid.timeMid; - halUuid->timeHiAndVersion = uuid.versionAndTimeHigh; - halUuid->clockSeq = uuid.variantAndClockSeqHigh; - memcpy(halUuid->node, uuid.node.data(), uuid.node.size()); -} - -} // namespace android +#define AUDIO_HAL_VERSION V2_0 +#include +#undef AUDIO_HAL_VERSION diff --git a/audio/common/2.0/default/HidlUtils.h b/audio/common/2.0/default/HidlUtils.h index 3fde4d7c8afb3b89087241cc6c2310b10275d6a9..24543b161743a864669cc5597dafdc2e74bbd7b7 100644 --- a/audio/common/2.0/default/HidlUtils.h +++ b/audio/common/2.0/default/HidlUtils.h @@ -17,51 +17,10 @@ #ifndef android_hardware_audio_V2_0_Hidl_Utils_H_ #define android_hardware_audio_V2_0_Hidl_Utils_H_ -#include - #include -#include - -using ::android::hardware::audio::common::V2_0::AudioConfig; -using ::android::hardware::audio::common::V2_0::AudioGain; -using ::android::hardware::audio::common::V2_0::AudioGainConfig; -using ::android::hardware::audio::common::V2_0::AudioOffloadInfo; -using ::android::hardware::audio::common::V2_0::AudioPort; -using ::android::hardware::audio::common::V2_0::AudioPortConfig; -using ::android::hardware::audio::common::V2_0::Uuid; -using ::android::hardware::hidl_vec; - -namespace android { - -class HidlUtils { - public: - static void audioConfigFromHal(const audio_config_t& halConfig, AudioConfig* config); - static void audioConfigToHal(const AudioConfig& config, audio_config_t* halConfig); - static void audioGainConfigFromHal( - const struct audio_gain_config& halConfig, AudioGainConfig* config); - static void audioGainConfigToHal( - const AudioGainConfig& config, struct audio_gain_config* halConfig); - static void audioGainFromHal(const struct audio_gain& halGain, AudioGain* gain); - static void audioGainToHal(const AudioGain& gain, struct audio_gain* halGain); - static void audioOffloadInfoFromHal( - const audio_offload_info_t& halOffload, AudioOffloadInfo* offload); - static void audioOffloadInfoToHal( - const AudioOffloadInfo& offload, audio_offload_info_t* halOffload); - static void audioPortConfigFromHal( - const struct audio_port_config& halConfig, AudioPortConfig* config); - static void audioPortConfigToHal( - const AudioPortConfig& config, struct audio_port_config* halConfig); - static void audioPortConfigsFromHal( - unsigned int numHalConfigs, const struct audio_port_config *halConfigs, - hidl_vec *configs); - static std::unique_ptr audioPortConfigsToHal( - const hidl_vec& configs); - static void audioPortFromHal(const struct audio_port& halPort, AudioPort* port); - static void audioPortToHal(const AudioPort& port, struct audio_port* halPort); - static void uuidFromHal(const audio_uuid_t& halUuid, Uuid* uuid); - static void uuidToHal(const Uuid& uuid, audio_uuid_t* halUuid); -}; -} // namespace android +#define AUDIO_HAL_VERSION V2_0 +#include +#undef AUDIO_HAL_VERSION #endif // android_hardware_audio_V2_0_Hidl_Utils_H_ diff --git a/audio/common/2.0/default/VersionUtils.h b/audio/common/2.0/default/VersionUtils.h new file mode 100644 index 0000000000000000000000000000000000000000..60d1f9cb6dd876f763ab1ab94e91ec1a62813051 --- /dev/null +++ b/audio/common/2.0/default/VersionUtils.h @@ -0,0 +1,41 @@ +/* + * Copyright (C) 2018 The Android Open Source Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#ifndef ANDROID_HARDWARE_AUDIO_EFFECT_VERSION_UTILS_H +#define ANDROID_HARDWARE_AUDIO_EFFECT_VERSION_UTILS_H + +#include + +namespace android { +namespace hardware { +namespace audio { +namespace common { +namespace V2_0 { +namespace implementation { + +typedef common::V2_0::AudioDevice AudioDeviceBitfield; +typedef common::V2_0::AudioChannelMask AudioChannelBitfield; +typedef common::V2_0::AudioOutputFlag AudioOutputFlagBitfield; +typedef common::V2_0::AudioInputFlag AudioInputFlagBitfield; + +} // namespace implementation +} // namespace V2_0 +} // namespace common +} // namespace audio +} // namespace hardware +} // namespace android + +#endif // ANDROID_HARDWARE_AUDIO_EFFECT_VERSION_UTILS_H diff --git a/audio/common/4.0/Android.bp b/audio/common/4.0/Android.bp new file mode 100644 index 0000000000000000000000000000000000000000..9b737dcf55a7b0e3d4436a9dfc99814bcec974c9 --- /dev/null +++ b/audio/common/4.0/Android.bp @@ -0,0 +1,48 @@ +// This file is autogenerated by hidl-gen -Landroidbp. + +hidl_interface { + name: "android.hardware.audio.common@4.0", + root: "android.hardware", + vndk: { + enabled: true, + }, + srcs: [ + "types.hal", + ], + types: [ + "AudioChannelMask", + "AudioConfig", + "AudioContentType", + "AudioDevice", + "AudioFormat", + "AudioGain", + "AudioGainConfig", + "AudioGainMode", + "AudioHandleConsts", + "AudioInputFlag", + "AudioMixLatencyClass", + "AudioMode", + "AudioOffloadInfo", + "AudioOutputFlag", + "AudioPort", + "AudioPortConfig", + "AudioPortConfigDeviceExt", + "AudioPortConfigMask", + "AudioPortConfigSessionExt", + "AudioPortDeviceExt", + "AudioPortMixExt", + "AudioPortRole", + "AudioPortSessionExt", + "AudioPortType", + "AudioSessionConsts", + "AudioSource", + "AudioStreamType", + "AudioUsage", + "FixedChannelCount", + "ThreadInfo", + "Uuid", + ], + gen_java: false, + gen_java_constants: true, +} + diff --git a/audio/common/4.0/default/Android.bp b/audio/common/4.0/default/Android.bp new file mode 100644 index 0000000000000000000000000000000000000000..57b2e01f3d2ebb65a8e6ae569062d1137da7d2fc --- /dev/null +++ b/audio/common/4.0/default/Android.bp @@ -0,0 +1,47 @@ +// +// Copyright (C) 2018 The Android Open Source Project +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +cc_library_shared { + name: "android.hardware.audio.common@4.0-util", + defaults: ["hidl_defaults"], + vendor_available: true, + vndk: { + enabled: true, + }, + srcs: [ + "HidlUtils.cpp", + ], + + export_include_dirs: ["."], + + static_libs: [ + ], + + shared_libs: [ + "liblog", + "libutils", + "libhidlbase", + "android.hardware.audio.common-util", + "android.hardware.audio.common@4.0", + ], + export_shared_lib_headers: [ + "android.hardware.audio.common-util" + ], + + header_libs: [ + "libaudio_system_headers", + "libhardware_headers", + ], +} diff --git a/audio/common/4.0/default/HidlUtils.cpp b/audio/common/4.0/default/HidlUtils.cpp new file mode 100644 index 0000000000000000000000000000000000000000..b66eff975a2421feb871858c22ab89463c17e0a1 --- /dev/null +++ b/audio/common/4.0/default/HidlUtils.cpp @@ -0,0 +1,21 @@ +/* + * Copyright (C) 2018 The Android Open Source Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#include "HidlUtils.h" + +#define AUDIO_HAL_VERSION V4_0 +#include +#undef AUDIO_HAL_VERSION diff --git a/audio/common/4.0/default/HidlUtils.h b/audio/common/4.0/default/HidlUtils.h new file mode 100644 index 0000000000000000000000000000000000000000..91e6a9e12138daabdcd9f9a5fe37e7fdf76c12c6 --- /dev/null +++ b/audio/common/4.0/default/HidlUtils.h @@ -0,0 +1,26 @@ +/* + * Copyright (C) 2018 The Android Open Source Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#ifndef android_hardware_audio_V4_0_Hidl_Utils_H_ +#define android_hardware_audio_V4_0_Hidl_Utils_H_ + +#include + +#define AUDIO_HAL_VERSION V4_0 +#include +#undef AUDIO_HAL_VERSION + +#endif // android_hardware_audio_V4_0_Hidl_Utils_H_ diff --git a/audio/2.0/default/OWNERS b/audio/common/4.0/default/OWNERS similarity index 100% rename from audio/2.0/default/OWNERS rename to audio/common/4.0/default/OWNERS diff --git a/audio/common/4.0/default/VersionUtils.h b/audio/common/4.0/default/VersionUtils.h new file mode 100644 index 0000000000000000000000000000000000000000..b7f2aec8f9113dd16caea624c33f45728196cfa3 --- /dev/null +++ b/audio/common/4.0/default/VersionUtils.h @@ -0,0 +1,41 @@ +/* + * Copyright (C) 2018 The Android Open Source Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#ifndef ANDROID_HARDWARE_AUDIO_EFFECT_VERSION_UTILS_H +#define ANDROID_HARDWARE_AUDIO_EFFECT_VERSION_UTILS_H + +#include + +namespace android { +namespace hardware { +namespace audio { +namespace common { +namespace V4_0 { +namespace implementation { + +typedef hidl_bitfield AudioDeviceBitfield; +typedef hidl_bitfield AudioChannelBitfield; +typedef hidl_bitfield AudioOutputFlagBitfield; +typedef hidl_bitfield AudioInputFlagBitfield; + +} // namespace implementation +} // namespace V4_0 +} // namespace common +} // namespace audio +} // namespace hardware +} // namespace android + +#endif // ANDROID_HARDWARE_AUDIO_EFFECT_VERSION_UTILS_H diff --git a/audio/common/4.0/types.hal b/audio/common/4.0/types.hal new file mode 100644 index 0000000000000000000000000000000000000000..e55491d885cfc563614d119e479469786f6c81fd --- /dev/null +++ b/audio/common/4.0/types.hal @@ -0,0 +1,903 @@ +/* + * Copyright (C) 2018 The Android Open Source Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package android.hardware.audio.common@4.0; + +/* + * + * IDs and Handles + * + */ + +/** + * Handle type for identifying audio sources and sinks. + */ +typedef int32_t AudioIoHandle; + +/** + * Audio hw module handle functions or structures referencing a module. + */ +typedef int32_t AudioModuleHandle; + +/** + * Each port has a unique ID or handle allocated by policy manager. + */ +typedef int32_t AudioPortHandle; + +/** + * Each patch is identified by a handle at the interface used to create that + * patch. For instance, when a patch is created by the audio HAL, the HAL + * allocates and returns a handle. This handle is unique to a given audio HAL + * hardware module. But the same patch receives another system wide unique + * handle allocated by the framework. This unique handle is used for all + * transactions inside the framework. + */ +typedef int32_t AudioPatchHandle; + +/** + * A HW synchronization source returned by the audio HAL. + */ +typedef uint32_t AudioHwSync; + +/** + * Each port has a unique ID or handle allocated by policy manager. + */ +@export(name="") +enum AudioHandleConsts : int32_t { + AUDIO_IO_HANDLE_NONE = 0, + AUDIO_MODULE_HANDLE_NONE = 0, + AUDIO_PORT_HANDLE_NONE = 0, + AUDIO_PATCH_HANDLE_NONE = 0, +}; + +/** + * Commonly used structure for passing unique identifieds (UUID). + * For the definition of UUID, refer to ITU-T X.667 spec. + */ +struct Uuid { + uint32_t timeLow; + uint16_t timeMid; + uint16_t versionAndTimeHigh; + uint16_t variantAndClockSeqHigh; + uint8_t[6] node; +}; + + +/* + * + * Audio streams + * + */ + +/** + * Audio stream type describing the intended use case of a stream. + */ +@export(name="audio_stream_type_t", value_prefix="AUDIO_STREAM_") +enum AudioStreamType : int32_t { + // These values must kept in sync with + // frameworks/base/media/java/android/media/AudioSystem.java + DEFAULT = -1, + MIN = 0, + VOICE_CALL = 0, + SYSTEM = 1, + RING = 2, + MUSIC = 3, + ALARM = 4, + NOTIFICATION = 5, + BLUETOOTH_SCO = 6, + ENFORCED_AUDIBLE = 7, // Sounds that cannot be muted by user and must be + // routed to speaker + DTMF = 8, + TTS = 9, // Transmitted Through Speaker. Plays over speaker + // only, silent on other devices + ACCESSIBILITY = 10, // For accessibility talk back prompts +}; + +@export(name="audio_source_t", value_prefix="AUDIO_SOURCE_") +enum AudioSource : int32_t { + // These values must kept in sync with + // frameworks/base/media/java/android/media/MediaRecorder.java, + // frameworks/av/services/audiopolicy/AudioPolicyService.cpp, + // system/media/audio_effects/include/audio_effects/audio_effects_conf.h + DEFAULT = 0, + MIC = 1, + VOICE_UPLINK = 2, + VOICE_DOWNLINK = 3, + VOICE_CALL = 4, + CAMCORDER = 5, + VOICE_RECOGNITION = 6, + VOICE_COMMUNICATION = 7, + /** + * Source for the mix to be presented remotely. An example of remote + * presentation is Wifi Display where a dongle attached to a TV can be used + * to play the mix captured by this audio source. + */ + REMOTE_SUBMIX = 8, + /** + * Source for unprocessed sound. Usage examples include level measurement + * and raw signal analysis. + */ + UNPROCESSED = 9, + + FM_TUNER = 1998, +}; + +typedef int32_t AudioSession; +/** + * Special audio session values. + */ +@export(name="audio_session_t", value_prefix="AUDIO_SESSION_") +enum AudioSessionConsts : int32_t { + /** + * Session for effects attached to a particular output stream + * (value must be less than 0) + */ + OUTPUT_STAGE = -1, + /** + * Session for effects applied to output mix. These effects can + * be moved by audio policy manager to another output stream + * (value must be 0) + */ + OUTPUT_MIX = 0, + /** + * Application does not specify an explicit session ID to be used, and + * requests a new session ID to be allocated. Corresponds to + * AudioManager.AUDIO_SESSION_ID_GENERATE and + * AudioSystem.AUDIO_SESSION_ALLOCATE. + */ + ALLOCATE = 0, + /** + * For use with AudioRecord::start(), this indicates no trigger session. + * It is also used with output tracks and patch tracks, which never have a + * session. + */ + NONE = 0 +}; + +/** + * Audio format is a 32-bit word that consists of: + * main format field (upper 8 bits) + * sub format field (lower 24 bits). + * + * The main format indicates the main codec type. The sub format field indicates + * options and parameters for each format. The sub format is mainly used for + * record to indicate for instance the requested bitrate or profile. It can + * also be used for certain formats to give informations not present in the + * encoded audio stream (e.g. octet alignement for AMR). + */ +@export(name="audio_format_t", value_prefix="AUDIO_FORMAT_") +enum AudioFormat : uint32_t { + INVALID = 0xFFFFFFFFUL, + DEFAULT = 0, + PCM = 0x00000000UL, + MP3 = 0x01000000UL, + AMR_NB = 0x02000000UL, + AMR_WB = 0x03000000UL, + AAC = 0x04000000UL, + /** Deprecated, Use AAC_HE_V1 */ + HE_AAC_V1 = 0x05000000UL, + /** Deprecated, Use AAC_HE_V2 */ + HE_AAC_V2 = 0x06000000UL, + VORBIS = 0x07000000UL, + OPUS = 0x08000000UL, + AC3 = 0x09000000UL, + E_AC3 = 0x0A000000UL, + DTS = 0x0B000000UL, + DTS_HD = 0x0C000000UL, + /** IEC61937 is encoded audio wrapped in 16-bit PCM. */ + IEC61937 = 0x0D000000UL, + DOLBY_TRUEHD = 0x0E000000UL, + EVRC = 0x10000000UL, + EVRCB = 0x11000000UL, + EVRCWB = 0x12000000UL, + EVRCNW = 0x13000000UL, + AAC_ADIF = 0x14000000UL, + WMA = 0x15000000UL, + WMA_PRO = 0x16000000UL, + AMR_WB_PLUS = 0x17000000UL, + MP2 = 0x18000000UL, + QCELP = 0x19000000UL, + DSD = 0x1A000000UL, + FLAC = 0x1B000000UL, + ALAC = 0x1C000000UL, + APE = 0x1D000000UL, + AAC_ADTS = 0x1E000000UL, + SBC = 0x1F000000UL, + APTX = 0x20000000UL, + APTX_HD = 0x21000000UL, + AC4 = 0x22000000UL, + LDAC = 0x23000000UL, + /** Dolby Metadata-enhanced Audio Transmission */ + MAT = 0x24000000UL, + /** Deprecated */ + MAIN_MASK = 0xFF000000UL, + SUB_MASK = 0x00FFFFFFUL, + + /* Subformats */ + PCM_SUB_16_BIT = 0x1, // PCM signed 16 bits + PCM_SUB_8_BIT = 0x2, // PCM unsigned 8 bits + PCM_SUB_32_BIT = 0x3, // PCM signed .31 fixed point + PCM_SUB_8_24_BIT = 0x4, // PCM signed 8.23 fixed point + PCM_SUB_FLOAT = 0x5, // PCM single-precision float pt + PCM_SUB_24_BIT_PACKED = 0x6, // PCM signed .23 fix pt (3 bytes) + + MP3_SUB_NONE = 0x0, + + AMR_SUB_NONE = 0x0, + + AAC_SUB_MAIN = 0x1, + AAC_SUB_LC = 0x2, + AAC_SUB_SSR = 0x4, + AAC_SUB_LTP = 0x8, + AAC_SUB_HE_V1 = 0x10, + AAC_SUB_SCALABLE = 0x20, + AAC_SUB_ERLC = 0x40, + AAC_SUB_LD = 0x80, + AAC_SUB_HE_V2 = 0x100, + AAC_SUB_ELD = 0x200, + AAC_SUB_XHE = 0x300, + + VORBIS_SUB_NONE = 0x0, + + E_AC3_SUB_JOC = 0x1, + + MAT_SUB_1_0 = 0x1, + MAT_SUB_2_0 = 0x2, + MAT_SUB_2_1 = 0x3, + + /* Aliases */ + /** note != AudioFormat.ENCODING_PCM_16BIT */ + PCM_16_BIT = (PCM | PCM_SUB_16_BIT), + /** note != AudioFormat.ENCODING_PCM_8BIT */ + PCM_8_BIT = (PCM | PCM_SUB_8_BIT), + PCM_32_BIT = (PCM | PCM_SUB_32_BIT), + PCM_8_24_BIT = (PCM | PCM_SUB_8_24_BIT), + PCM_FLOAT = (PCM | PCM_SUB_FLOAT), + PCM_24_BIT_PACKED = (PCM | PCM_SUB_24_BIT_PACKED), + AAC_MAIN = (AAC | AAC_SUB_MAIN), + AAC_LC = (AAC | AAC_SUB_LC), + AAC_SSR = (AAC | AAC_SUB_SSR), + AAC_LTP = (AAC | AAC_SUB_LTP), + AAC_HE_V1 = (AAC | AAC_SUB_HE_V1), + AAC_SCALABLE = (AAC | AAC_SUB_SCALABLE), + AAC_ERLC = (AAC | AAC_SUB_ERLC), + AAC_LD = (AAC | AAC_SUB_LD), + AAC_HE_V2 = (AAC | AAC_SUB_HE_V2), + AAC_ELD = (AAC | AAC_SUB_ELD), + AAC_XHE = (AAC | AAC_SUB_XHE), + AAC_ADTS_MAIN = (AAC_ADTS | AAC_SUB_MAIN), + AAC_ADTS_LC = (AAC_ADTS | AAC_SUB_LC), + AAC_ADTS_SSR = (AAC_ADTS | AAC_SUB_SSR), + AAC_ADTS_LTP = (AAC_ADTS | AAC_SUB_LTP), + AAC_ADTS_HE_V1 = (AAC_ADTS | AAC_SUB_HE_V1), + AAC_ADTS_SCALABLE = (AAC_ADTS | AAC_SUB_SCALABLE), + AAC_ADTS_ERLC = (AAC_ADTS | AAC_SUB_ERLC), + AAC_ADTS_LD = (AAC_ADTS | AAC_SUB_LD), + AAC_ADTS_HE_V2 = (AAC_ADTS | AAC_SUB_HE_V2), + AAC_ADTS_ELD = (AAC_ADTS | AAC_SUB_ELD), + AAC_ADTS_XHE = (AAC_ADTS | AAC_SUB_XHE), + E_AC3_JOC = (E_AC3 | E_AC3_SUB_JOC), + MAT_1_0 = (MAT | MAT_SUB_1_0), + MAT_2_0 = (MAT | MAT_SUB_2_0), + MAT_2_1 = (MAT | MAT_SUB_2_1), +}; + +/** + * Usage of these values highlights places in the code that use 2- or 8- channel + * assumptions. + */ +@export(name="") +enum FixedChannelCount : int32_t { + FCC_2 = 2, // This is typically due to legacy implementation of stereo I/O + FCC_8 = 8 // This is typically due to audio mixer and resampler limitations +}; + +/** + * A channel mask per se only defines the presence or absence of a channel, not + * the order. See AUDIO_INTERLEAVE_* for the platform convention of order. + * + * AudioChannelMask is an opaque type and its internal layout should not be + * assumed as it may change in the future. Instead, always use functions + * to examine it. + * + * These are the current representations: + * + * REPRESENTATION_POSITION + * is a channel mask representation for position assignment. Each low-order + * bit corresponds to the spatial position of a transducer (output), or + * interpretation of channel (input). The user of a channel mask needs to + * know the context of whether it is for output or input. The constants + * OUT_* or IN_* apply to the bits portion. It is not permitted for no bits + * to be set. + * + * REPRESENTATION_INDEX + * is a channel mask representation for index assignment. Each low-order + * bit corresponds to a selected channel. There is no platform + * interpretation of the various bits. There is no concept of output or + * input. It is not permitted for no bits to be set. + * + * All other representations are reserved for future use. + * + * Warning: current representation distinguishes between input and output, but + * this will not the be case in future revisions of the platform. Wherever there + * is an ambiguity between input and output that is currently resolved by + * checking the channel mask, the implementer should look for ways to fix it + * with additional information outside of the mask. + */ +@export(name="", value_prefix="AUDIO_CHANNEL_") +enum AudioChannelMask : uint32_t { + /** must be 0 for compatibility */ + REPRESENTATION_POSITION = 0, + /** 1 is reserved for future use */ + REPRESENTATION_INDEX = 2, + /* 3 is reserved for future use */ + + /** These can be a complete value of AudioChannelMask */ + NONE = 0x0, + INVALID = 0xC0000000, + + /* + * These can be the bits portion of an AudioChannelMask + * with representation REPRESENTATION_POSITION. + */ + + /** output channels */ + OUT_FRONT_LEFT = 0x1, + OUT_FRONT_RIGHT = 0x2, + OUT_FRONT_CENTER = 0x4, + OUT_LOW_FREQUENCY = 0x8, + OUT_BACK_LEFT = 0x10, + OUT_BACK_RIGHT = 0x20, + OUT_FRONT_LEFT_OF_CENTER = 0x40, + OUT_FRONT_RIGHT_OF_CENTER = 0x80, + OUT_BACK_CENTER = 0x100, + OUT_SIDE_LEFT = 0x200, + OUT_SIDE_RIGHT = 0x400, + OUT_TOP_CENTER = 0x800, + OUT_TOP_FRONT_LEFT = 0x1000, + OUT_TOP_FRONT_CENTER = 0x2000, + OUT_TOP_FRONT_RIGHT = 0x4000, + OUT_TOP_BACK_LEFT = 0x8000, + OUT_TOP_BACK_CENTER = 0x10000, + OUT_TOP_BACK_RIGHT = 0x20000, + OUT_TOP_SIDE_LEFT = 0x40000, + OUT_TOP_SIDE_RIGHT = 0x80000, + + OUT_MONO = OUT_FRONT_LEFT, + OUT_STEREO = (OUT_FRONT_LEFT | OUT_FRONT_RIGHT), + OUT_2POINT1 = (OUT_FRONT_LEFT | OUT_FRONT_RIGHT | OUT_LOW_FREQUENCY), + OUT_2POINT0POINT2 = (OUT_FRONT_LEFT | OUT_FRONT_RIGHT | + OUT_TOP_SIDE_LEFT | OUT_TOP_SIDE_RIGHT), + OUT_2POINT1POINT2 = (OUT_FRONT_LEFT | OUT_FRONT_RIGHT | + OUT_TOP_SIDE_LEFT | OUT_TOP_SIDE_RIGHT | + OUT_LOW_FREQUENCY), + OUT_3POINT0POINT2 = (OUT_FRONT_LEFT | OUT_FRONT_CENTER | OUT_FRONT_RIGHT | + OUT_TOP_SIDE_LEFT | OUT_TOP_SIDE_RIGHT), + OUT_3POINT1POINT2 = (OUT_FRONT_LEFT | OUT_FRONT_CENTER | OUT_FRONT_RIGHT | + OUT_TOP_SIDE_LEFT | OUT_TOP_SIDE_RIGHT | + OUT_LOW_FREQUENCY), + OUT_QUAD = (OUT_FRONT_LEFT | OUT_FRONT_RIGHT | + OUT_BACK_LEFT | OUT_BACK_RIGHT), + OUT_QUAD_BACK = OUT_QUAD, + /** like OUT_QUAD_BACK with *_SIDE_* instead of *_BACK_* */ + OUT_QUAD_SIDE = (OUT_FRONT_LEFT | OUT_FRONT_RIGHT | + OUT_SIDE_LEFT | OUT_SIDE_RIGHT), + OUT_SURROUND = (OUT_FRONT_LEFT | OUT_FRONT_RIGHT | + OUT_FRONT_CENTER | OUT_BACK_CENTER), + OUT_PENTA = (OUT_QUAD | OUT_FRONT_CENTER), + OUT_5POINT1 = (OUT_FRONT_LEFT | OUT_FRONT_RIGHT | + OUT_FRONT_CENTER | OUT_LOW_FREQUENCY | + OUT_BACK_LEFT | OUT_BACK_RIGHT), + OUT_5POINT1_BACK = OUT_5POINT1, + /** like OUT_5POINT1_BACK with *_SIDE_* instead of *_BACK_* */ + OUT_5POINT1_SIDE = (OUT_FRONT_LEFT | OUT_FRONT_RIGHT | + OUT_FRONT_CENTER | OUT_LOW_FREQUENCY | + OUT_SIDE_LEFT | OUT_SIDE_RIGHT), + OUT_5POINT1POINT2 = (OUT_5POINT1 | OUT_TOP_SIDE_LEFT | OUT_TOP_SIDE_RIGHT), + OUT_5POINT1POINT4 = (OUT_5POINT1 | + OUT_TOP_FRONT_LEFT | OUT_TOP_FRONT_RIGHT | + OUT_TOP_BACK_LEFT | OUT_TOP_BACK_RIGHT), + OUT_6POINT1 = (OUT_FRONT_LEFT | OUT_FRONT_RIGHT | + OUT_FRONT_CENTER | OUT_LOW_FREQUENCY | + OUT_BACK_LEFT | OUT_BACK_RIGHT | + OUT_BACK_CENTER), + /** matches the correct AudioFormat.CHANNEL_OUT_7POINT1_SURROUND */ + OUT_7POINT1 = (OUT_FRONT_LEFT | OUT_FRONT_RIGHT | + OUT_FRONT_CENTER | OUT_LOW_FREQUENCY | + OUT_BACK_LEFT | OUT_BACK_RIGHT | + OUT_SIDE_LEFT | OUT_SIDE_RIGHT), + OUT_7POINT1POINT2 = (OUT_7POINT1 | OUT_TOP_SIDE_LEFT | OUT_TOP_SIDE_RIGHT), + OUT_7POINT1POINT4 = (OUT_7POINT1 | + OUT_TOP_FRONT_LEFT | OUT_TOP_FRONT_RIGHT | + OUT_TOP_BACK_LEFT | OUT_TOP_BACK_RIGHT), + // Note that the 2.0 OUT_ALL* have been moved to helper functions + + /* These are bits only, not complete values */ + + /** input channels */ + IN_LEFT = 0x4, + IN_RIGHT = 0x8, + IN_FRONT = 0x10, + IN_BACK = 0x20, + IN_LEFT_PROCESSED = 0x40, + IN_RIGHT_PROCESSED = 0x80, + IN_FRONT_PROCESSED = 0x100, + IN_BACK_PROCESSED = 0x200, + IN_PRESSURE = 0x400, + IN_X_AXIS = 0x800, + IN_Y_AXIS = 0x1000, + IN_Z_AXIS = 0x2000, + IN_BACK_LEFT = 0x10000, + IN_BACK_RIGHT = 0x20000, + IN_CENTER = 0x40000, + IN_LOW_FREQUENCY = 0x100000, + IN_TOP_LEFT = 0x200000, + IN_TOP_RIGHT = 0x400000, + + IN_VOICE_UPLINK = 0x4000, + IN_VOICE_DNLINK = 0x8000, + + IN_MONO = IN_FRONT, + IN_STEREO = (IN_LEFT | IN_RIGHT), + IN_FRONT_BACK = (IN_FRONT | IN_BACK), + IN_6 = (IN_LEFT | IN_RIGHT | + IN_FRONT | IN_BACK | + IN_LEFT_PROCESSED | IN_RIGHT_PROCESSED), + IN_2POINT0POINT2 = (IN_LEFT | IN_RIGHT | IN_TOP_LEFT | IN_TOP_RIGHT), + IN_2POINT1POINT2 = (IN_LEFT | IN_RIGHT | IN_TOP_LEFT | IN_TOP_RIGHT | + IN_LOW_FREQUENCY), + IN_3POINT0POINT2 = (IN_LEFT | IN_CENTER | IN_RIGHT | IN_TOP_LEFT | IN_TOP_RIGHT), + IN_3POINT1POINT2 = (IN_LEFT | IN_CENTER | IN_RIGHT | + IN_TOP_LEFT | IN_TOP_RIGHT | IN_LOW_FREQUENCY), + IN_5POINT1 = (IN_LEFT | IN_CENTER | IN_RIGHT | + IN_BACK_LEFT | IN_BACK_RIGHT | IN_LOW_FREQUENCY), + IN_VOICE_UPLINK_MONO = (IN_VOICE_UPLINK | IN_MONO), + IN_VOICE_DNLINK_MONO = (IN_VOICE_DNLINK | IN_MONO), + IN_VOICE_CALL_MONO = (IN_VOICE_UPLINK_MONO | + IN_VOICE_DNLINK_MONO), + // Note that the 2.0 IN_ALL* have been moved to helper functions + + COUNT_MAX = 30, + INDEX_HDR = REPRESENTATION_INDEX << COUNT_MAX, + INDEX_MASK_1 = INDEX_HDR | ((1 << 1) - 1), + INDEX_MASK_2 = INDEX_HDR | ((1 << 2) - 1), + INDEX_MASK_3 = INDEX_HDR | ((1 << 3) - 1), + INDEX_MASK_4 = INDEX_HDR | ((1 << 4) - 1), + INDEX_MASK_5 = INDEX_HDR | ((1 << 5) - 1), + INDEX_MASK_6 = INDEX_HDR | ((1 << 6) - 1), + INDEX_MASK_7 = INDEX_HDR | ((1 << 7) - 1), + INDEX_MASK_8 = INDEX_HDR | ((1 << 8) - 1) +}; + +/** + * Major modes for a mobile device. The current mode setting affects audio + * routing. + */ +@export(name="audio_mode_t", value_prefix="AUDIO_MODE_") +enum AudioMode : int32_t { + NORMAL = 0, + RINGTONE = 1, + /** Calls handled by the telephony stack (Eg: PSTN). */ + IN_CALL = 2, + /** Calls handled by apps (Eg: Hangout). */ + IN_COMMUNICATION = 3, +}; + +@export(name="", value_prefix="AUDIO_DEVICE_") +enum AudioDevice : uint32_t { + NONE = 0x0, + /** reserved bits */ + BIT_IN = 0x80000000, + BIT_DEFAULT = 0x40000000, + /** output devices */ + OUT_EARPIECE = 0x1, + OUT_SPEAKER = 0x2, + OUT_WIRED_HEADSET = 0x4, + OUT_WIRED_HEADPHONE = 0x8, + OUT_BLUETOOTH_SCO = 0x10, + OUT_BLUETOOTH_SCO_HEADSET = 0x20, + OUT_BLUETOOTH_SCO_CARKIT = 0x40, + OUT_BLUETOOTH_A2DP = 0x80, + OUT_BLUETOOTH_A2DP_HEADPHONES = 0x100, + OUT_BLUETOOTH_A2DP_SPEAKER = 0x200, + OUT_AUX_DIGITAL = 0x400, + OUT_HDMI = OUT_AUX_DIGITAL, + /** uses an analog connection (multiplexed over the USB pins for instance) */ + OUT_ANLG_DOCK_HEADSET = 0x800, + OUT_DGTL_DOCK_HEADSET = 0x1000, + /** USB accessory mode: Android device is USB device and dock is USB host */ + OUT_USB_ACCESSORY = 0x2000, + /** USB host mode: Android device is USB host and dock is USB device */ + OUT_USB_DEVICE = 0x4000, + OUT_REMOTE_SUBMIX = 0x8000, + /** Telephony voice TX path */ + OUT_TELEPHONY_TX = 0x10000, + /** Analog jack with line impedance detected */ + OUT_LINE = 0x20000, + /** HDMI Audio Return Channel */ + OUT_HDMI_ARC = 0x40000, + /** S/PDIF out */ + OUT_SPDIF = 0x80000, + /** FM transmitter out */ + OUT_FM = 0x100000, + /** Line out for av devices */ + OUT_AUX_LINE = 0x200000, + /** limited-output speaker device for acoustic safety */ + OUT_SPEAKER_SAFE = 0x400000, + OUT_IP = 0x800000, + /** audio bus implemented by the audio system (e.g an MOST stereo channel) */ + OUT_BUS = 0x1000000, + OUT_PROXY = 0x2000000, + OUT_USB_HEADSET = 0x4000000, + OUT_HEARING_AID = 0x8000000, + OUT_ECHO_CANCELLER = 0x10000000, + OUT_DEFAULT = BIT_DEFAULT, + // Note that the 2.0 OUT_ALL* have been moved to helper functions + + /** input devices */ + IN_COMMUNICATION = BIT_IN | 0x1, + IN_AMBIENT = BIT_IN | 0x2, + IN_BUILTIN_MIC = BIT_IN | 0x4, + IN_BLUETOOTH_SCO_HEADSET = BIT_IN | 0x8, + IN_WIRED_HEADSET = BIT_IN | 0x10, + IN_AUX_DIGITAL = BIT_IN | 0x20, + IN_HDMI = IN_AUX_DIGITAL, + /** Telephony voice RX path */ + IN_VOICE_CALL = BIT_IN | 0x40, + IN_TELEPHONY_RX = IN_VOICE_CALL, + IN_BACK_MIC = BIT_IN | 0x80, + IN_REMOTE_SUBMIX = BIT_IN | 0x100, + IN_ANLG_DOCK_HEADSET = BIT_IN | 0x200, + IN_DGTL_DOCK_HEADSET = BIT_IN | 0x400, + IN_USB_ACCESSORY = BIT_IN | 0x800, + IN_USB_DEVICE = BIT_IN | 0x1000, + /** FM tuner input */ + IN_FM_TUNER = BIT_IN | 0x2000, + /** TV tuner input */ + IN_TV_TUNER = BIT_IN | 0x4000, + /** Analog jack with line impedance detected */ + IN_LINE = BIT_IN | 0x8000, + /** S/PDIF in */ + IN_SPDIF = BIT_IN | 0x10000, + IN_BLUETOOTH_A2DP = BIT_IN | 0x20000, + IN_LOOPBACK = BIT_IN | 0x40000, + IN_IP = BIT_IN | 0x80000, + /** audio bus implemented by the audio system (e.g an MOST stereo channel) */ + IN_BUS = BIT_IN | 0x100000, + IN_PROXY = BIT_IN | 0x1000000, + IN_USB_HEADSET = BIT_IN | 0x2000000, + IN_BLUETOOTH_BLE = BIT_IN | 0x4000000, + IN_DEFAULT = BIT_IN | BIT_DEFAULT, + + // Note that the 2.0 IN_ALL* have been moved to helper functions +}; + +/** + * The audio output flags serve two purposes: + * + * - when an AudioTrack is created they indicate a "wish" to be connected to an + * output stream with attributes corresponding to the specified flags; + * + * - when present in an output profile descriptor listed for a particular audio + * hardware module, they indicate that an output stream can be opened that + * supports the attributes indicated by the flags. + * + * The audio policy manager will try to match the flags in the request + * (when getOuput() is called) to an available output stream. + */ +@export(name="audio_output_flags_t", value_prefix="AUDIO_OUTPUT_FLAG_") +enum AudioOutputFlag : int32_t { + NONE = 0x0, // no attributes + DIRECT = 0x1, // this output directly connects a track + // to one output stream: no software mixer + PRIMARY = 0x2, // this output is the primary output of the device. It is + // unique and must be present. It is opened by default and + // receives routing, audio mode and volume controls related + // to voice calls. + FAST = 0x4, // output supports "fast tracks", defined elsewhere + DEEP_BUFFER = 0x8, // use deep audio buffers + COMPRESS_OFFLOAD = 0x10, // offload playback of compressed streams to + // hardware codec + NON_BLOCKING = 0x20, // use non-blocking write + HW_AV_SYNC = 0x40, // output uses a hardware A/V sync + TTS = 0x80, // output for streams transmitted through speaker at a + // sample rate high enough to accommodate lower-range + // ultrasonic p/b + RAW = 0x100, // minimize signal processing + SYNC = 0x200, // synchronize I/O streams + IEC958_NONAUDIO = 0x400, // Audio stream contains compressed audio in SPDIF + // data bursts, not PCM. + DIRECT_PCM = 0x2000, // Audio stream containing PCM data that needs + // to pass through compress path for DSP post proc. + MMAP_NOIRQ = 0x4000, // output operates in MMAP no IRQ mode. + VOIP_RX = 0x8000, // preferred output for VoIP calls. + /** preferred output for call music */ + INCALL_MUSIC = 0x10000, +}; + +/** + * The audio input flags are analogous to audio output flags. + * Currently they are used only when an AudioRecord is created, + * to indicate a preference to be connected to an input stream with + * attributes corresponding to the specified flags. + */ +@export(name="audio_input_flags_t", value_prefix="AUDIO_INPUT_FLAG_") +enum AudioInputFlag : int32_t { + NONE = 0x0, // no attributes + FAST = 0x1, // prefer an input that supports "fast tracks" + HW_HOTWORD = 0x2, // prefer an input that captures from hw hotword source + RAW = 0x4, // minimize signal processing + SYNC = 0x8, // synchronize I/O streams + MMAP_NOIRQ = 0x10, // input operates in MMAP no IRQ mode. + VOIP_TX = 0x20, // preferred input for VoIP calls. + HW_AV_SYNC = 0x40, // input connected to an output that uses a hardware A/V sync +}; + +@export(name="audio_usage_t", value_prefix="AUDIO_USAGE_") +enum AudioUsage : int32_t { + // These values must kept in sync with + // frameworks/base/media/java/android/media/AudioAttributes.java + // Note that not all framework values are exposed + UNKNOWN = 0, + MEDIA = 1, + VOICE_COMMUNICATION = 2, + VOICE_COMMUNICATION_SIGNALLING = 3, + ALARM = 4, + NOTIFICATION = 5, + NOTIFICATION_TELEPHONY_RINGTONE = 6, + ASSISTANCE_ACCESSIBILITY = 11, + ASSISTANCE_NAVIGATION_GUIDANCE = 12, + ASSISTANCE_SONIFICATION = 13, + GAME = 14, + VIRTUAL_SOURCE = 15, + ASSISTANT = 16, +}; + +/** Type of audio generated by an application. */ +@export(name="audio_content_type_t", value_prefix="AUDIO_CONTENT_TYPE_") +enum AudioContentType : uint32_t { + // Do not change these values without updating their counterparts + // in frameworks/base/media/java/android/media/AudioAttributes.java + UNKNOWN = 0, + SPEECH = 1, + MUSIC = 2, + MOVIE = 3, + SONIFICATION = 4, +}; + +/** + * Additional information about the stream passed to hardware decoders. + */ +struct AudioOffloadInfo { + uint32_t sampleRateHz; + bitfield channelMask; + AudioFormat format; + AudioStreamType streamType; + uint32_t bitRatePerSecond; + int64_t durationMicroseconds; // -1 if unknown + bool hasVideo; + bool isStreaming; + uint32_t bitWidth; + uint32_t bufferSize; + AudioUsage usage; +}; + +/** + * Commonly used audio stream configuration parameters. + */ +struct AudioConfig { + uint32_t sampleRateHz; + bitfield channelMask; + AudioFormat format; + AudioOffloadInfo offloadInfo; + uint64_t frameCount; +}; + + +/* + * + * Volume control + * + */ + +/** + * Type of gain control exposed by an audio port. + */ +@export(name="", value_prefix="AUDIO_GAIN_MODE_") +enum AudioGainMode : uint32_t { + JOINT = 0x1, // supports joint channel gain control + CHANNELS = 0x2, // supports separate channel gain control + RAMP = 0x4 // supports gain ramps +}; + +/** + * An audio_gain struct is a representation of a gain stage. + * A gain stage is always attached to an audio port. + */ +struct AudioGain { + bitfield mode; + bitfield channelMask; // channels which gain an be controlled + int32_t minValue; // minimum gain value in millibels + int32_t maxValue; // maximum gain value in millibels + int32_t defaultValue; // default gain value in millibels + uint32_t stepValue; // gain step in millibels + uint32_t minRampMs; // minimum ramp duration in ms + uint32_t maxRampMs; // maximum ramp duration in ms +}; + +/** + * The gain configuration structure is used to get or set the gain values of a + * given port. + */ +struct AudioGainConfig { + int32_t index; // index of the corresponding AudioGain in AudioPort.gains + AudioGainMode mode; + AudioChannelMask channelMask; // channels which gain value follows + /** + * 4 = sizeof(AudioChannelMask), + * 8 is not "FCC_8", so it won't need to be changed for > 8 channels. + * Gain values in millibels for each channel ordered from LSb to MSb in + * channel mask. The number of values is 1 in joint mode or + * popcount(channel_mask). + */ + int32_t[4 * 8] values; + uint32_t rampDurationMs; // ramp duration in ms +}; + + +/* + * + * Routing control + * + */ + +/* + * Types defined here are used to describe an audio source or sink at internal + * framework interfaces (audio policy, patch panel) or at the audio HAL. + * Sink and sources are grouped in a concept of “audio port” representing an + * audio end point at the edge of the system managed by the module exposing + * the interface. + */ + +/** Audio port role: either source or sink */ +@export(name="audio_port_role_t", value_prefix="AUDIO_PORT_ROLE_") +enum AudioPortRole : int32_t { + NONE, + SOURCE, + SINK, +}; + +/** + * Audio port type indicates if it is a session (e.g AudioTrack), a mix (e.g + * PlaybackThread output) or a physical device (e.g OUT_SPEAKER) + */ +@export(name="audio_port_type_t", value_prefix="AUDIO_PORT_TYPE_") +enum AudioPortType : int32_t { + NONE, + DEVICE, + MIX, + SESSION, +}; + +/** + * Extension for audio port configuration structure when the audio port is a + * hardware device. + */ +struct AudioPortConfigDeviceExt { + AudioModuleHandle hwModule; // module the device is attached to + AudioDevice type; // device type (e.g OUT_SPEAKER) + uint8_t[32] address; // device address. "" if N/A +}; + +/** + * Extension for audio port configuration structure when the audio port is an + * audio session. + */ +struct AudioPortConfigSessionExt { + AudioSession session; +}; + +/** + * Flags indicating which fields are to be considered in AudioPortConfig. + */ +@export(name="", value_prefix="AUDIO_PORT_CONFIG_") +enum AudioPortConfigMask : uint32_t { + SAMPLE_RATE = 0x1, + CHANNEL_MASK = 0x2, + FORMAT = 0x4, + GAIN = 0x8, +}; + +/** + * Audio port configuration structure used to specify a particular configuration + * of an audio port. + */ +struct AudioPortConfig { + AudioPortHandle id; + bitfield configMask; + uint32_t sampleRateHz; + bitfield channelMask; + AudioFormat format; + AudioGainConfig gain; + AudioPortType type; // type is used as a discriminator for Ext union + AudioPortRole role; // role is used as a discriminator for UseCase union + union Ext { + AudioPortConfigDeviceExt device; + struct AudioPortConfigMixExt { + AudioModuleHandle hwModule; // module the stream is attached to + AudioIoHandle ioHandle; // I/O handle of the input/output stream + union UseCase { + AudioStreamType stream; + AudioSource source; + } useCase; + } mix; + AudioPortConfigSessionExt session; + } ext; +}; + +/** + * Extension for audio port structure when the audio port is a hardware device. + */ +struct AudioPortDeviceExt { + AudioModuleHandle hwModule; // module the device is attached to + AudioDevice type; + /** 32 byte string identifying the port. */ + uint8_t[32] address; +}; + +/** + * Latency class of the audio mix. + */ +@export(name="audio_mix_latency_class_t", value_prefix="AUDIO_LATENCY_") +enum AudioMixLatencyClass : int32_t { + LOW, + NORMAL +}; + +struct AudioPortMixExt { + AudioModuleHandle hwModule; // module the stream is attached to + AudioIoHandle ioHandle; // I/O handle of the stream + AudioMixLatencyClass latencyClass; +}; + +/** + * Extension for audio port structure when the audio port is an audio session. + */ +struct AudioPortSessionExt { + AudioSession session; +}; + +struct AudioPort { + AudioPortHandle id; + AudioPortRole role; + string name; + vec sampleRates; + vec> channelMasks; + vec formats; + vec gains; + AudioPortConfig activeConfig; // current audio port configuration + AudioPortType type; // type is used as a discriminator + union Ext { + AudioPortDeviceExt device; + AudioPortMixExt mix; + AudioPortSessionExt session; + } ext; +}; + +struct ThreadInfo { + int64_t pid; + int64_t tid; +}; diff --git a/audio/common/README b/audio/common/README new file mode 100644 index 0000000000000000000000000000000000000000..cd03106ac699e8b49398e8a204b8cc655a9ce1a5 --- /dev/null +++ b/audio/common/README @@ -0,0 +1 @@ +This folder contains code common to audio core and effect API diff --git a/audio/common/test/utility/OWNERS b/audio/common/all-versions/OWNERS similarity index 100% rename from audio/common/test/utility/OWNERS rename to audio/common/all-versions/OWNERS diff --git a/audio/common/all-versions/README b/audio/common/all-versions/README new file mode 100644 index 0000000000000000000000000000000000000000..d8df022906cd74f5255a129792273d606ea8d7c4 --- /dev/null +++ b/audio/common/all-versions/README @@ -0,0 +1 @@ +This folder contains code common to all versions of the audio API diff --git a/audio/common/all-versions/default/Android.bp b/audio/common/all-versions/default/Android.bp new file mode 100644 index 0000000000000000000000000000000000000000..4a27bb76cb179cfb55d6df4613b723414184ddce --- /dev/null +++ b/audio/common/all-versions/default/Android.bp @@ -0,0 +1,43 @@ +// +// Copyright (C) 2016 The Android Open Source Project +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +cc_library_shared { + name: "android.hardware.audio.common-util", + defaults: ["hidl_defaults"], + vendor_available: true, + vndk: { + enabled: true, + }, + srcs: [ + "EffectMap.cpp", + ], + + export_include_dirs: ["include"], + + shared_libs: [ + "liblog", + "libutils", + "libhidlbase", + ], + + header_libs: [ + "android.hardware.audio.common.util@all-versions", + "libaudio_system_headers", + "libhardware_headers", + ], + export_header_lib_headers: [ + "android.hardware.audio.common.util@all-versions", + ] +} diff --git a/audio/common/2.0/default/EffectMap.cpp b/audio/common/all-versions/default/EffectMap.cpp similarity index 96% rename from audio/common/2.0/default/EffectMap.cpp rename to audio/common/all-versions/default/EffectMap.cpp index 703b91cfec710d45e157fe6b49f19a04e21f4e96..7f8da1e2ab40bc89902b5a094946b75ed0f968c7 100644 --- a/audio/common/2.0/default/EffectMap.cpp +++ b/audio/common/all-versions/default/EffectMap.cpp @@ -16,7 +16,7 @@ #include -#include "EffectMap.h" +#include "common/all-versions/default/EffectMap.h" namespace android { diff --git a/audio/common/2.0/default/EffectMap.h b/audio/common/all-versions/default/include/common/all-versions/default/EffectMap.h similarity index 86% rename from audio/common/2.0/default/EffectMap.h rename to audio/common/all-versions/default/include/common/all-versions/default/EffectMap.h index 82bbb1fb09a4a7b6cf02a7c56a7c9f61b36abe9d..547c6d5a9f815edcff2c1c5dc1f2f1c8fcdb871e 100644 --- a/audio/common/2.0/default/EffectMap.h +++ b/audio/common/all-versions/default/include/common/all-versions/default/EffectMap.h @@ -14,8 +14,8 @@ * limitations under the License. */ -#ifndef android_hardware_audio_V2_0_EffectMap_H_ -#define android_hardware_audio_V2_0_EffectMap_H_ +#ifndef android_hardware_audio_common_EffectMap_H_ +#define android_hardware_audio_common_EffectMap_H_ #include @@ -27,14 +27,14 @@ namespace android { // This class needs to be in 'android' ns because Singleton macros require that. class EffectMap : public Singleton { - public: + public: static const uint64_t INVALID_ID; uint64_t add(effect_handle_t handle); effect_handle_t get(const uint64_t& id); void remove(effect_handle_t handle); - private: + private: static uint64_t makeUniqueId(); std::mutex mLock; @@ -43,4 +43,4 @@ class EffectMap : public Singleton { } // namespace android -#endif // android_hardware_audio_V2_0_EffectMap_H_ +#endif // android_hardware_audio_common_EffectMap_H_ diff --git a/audio/common/all-versions/default/include/common/all-versions/default/HidlUtils.h b/audio/common/all-versions/default/include/common/all-versions/default/HidlUtils.h new file mode 100644 index 0000000000000000000000000000000000000000..f9a569741849621cf5624ec9a5dbf77cc193b603 --- /dev/null +++ b/audio/common/all-versions/default/include/common/all-versions/default/HidlUtils.h @@ -0,0 +1,75 @@ +/* + * Copyright (C) 2016 The Android Open Source Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#ifndef AUDIO_HAL_VERSION +#error "AUDIO_HAL_VERSION must be set before including this file." +#endif + +#include + +#include + +using ::android::hardware::audio::common::AUDIO_HAL_VERSION::AudioConfig; +using ::android::hardware::audio::common::AUDIO_HAL_VERSION::AudioGain; +using ::android::hardware::audio::common::AUDIO_HAL_VERSION::AudioGainConfig; +using ::android::hardware::audio::common::AUDIO_HAL_VERSION::AudioOffloadInfo; +using ::android::hardware::audio::common::AUDIO_HAL_VERSION::AudioPort; +using ::android::hardware::audio::common::AUDIO_HAL_VERSION::AudioPortConfig; +using ::android::hardware::audio::common::AUDIO_HAL_VERSION::Uuid; +using ::android::hardware::hidl_vec; + +namespace android { +namespace hardware { +namespace audio { +namespace common { +namespace AUDIO_HAL_VERSION { + +class HidlUtils { + public: + static void audioConfigFromHal(const audio_config_t& halConfig, AudioConfig* config); + static void audioConfigToHal(const AudioConfig& config, audio_config_t* halConfig); + static void audioGainConfigFromHal(const struct audio_gain_config& halConfig, + AudioGainConfig* config); + static void audioGainConfigToHal(const AudioGainConfig& config, + struct audio_gain_config* halConfig); + static void audioGainFromHal(const struct audio_gain& halGain, AudioGain* gain); + static void audioGainToHal(const AudioGain& gain, struct audio_gain* halGain); + static AudioUsage audioUsageFromHal(const audio_usage_t halUsage); + static audio_usage_t audioUsageToHal(const AudioUsage usage); + static void audioOffloadInfoFromHal(const audio_offload_info_t& halOffload, + AudioOffloadInfo* offload); + static void audioOffloadInfoToHal(const AudioOffloadInfo& offload, + audio_offload_info_t* halOffload); + static void audioPortConfigFromHal(const struct audio_port_config& halConfig, + AudioPortConfig* config); + static void audioPortConfigToHal(const AudioPortConfig& config, + struct audio_port_config* halConfig); + static void audioPortConfigsFromHal(unsigned int numHalConfigs, + const struct audio_port_config* halConfigs, + hidl_vec* configs); + static std::unique_ptr audioPortConfigsToHal( + const hidl_vec& configs); + static void audioPortFromHal(const struct audio_port& halPort, AudioPort* port); + static void audioPortToHal(const AudioPort& port, struct audio_port* halPort); + static void uuidFromHal(const audio_uuid_t& halUuid, Uuid* uuid); + static void uuidToHal(const Uuid& uuid, audio_uuid_t* halUuid); +}; + +} // namespace AUDIO_HAL_VERSION +} // namespace common +} // namespace audio +} // namespace hardware +} // namespace android diff --git a/audio/common/all-versions/default/include/common/all-versions/default/HidlUtils.impl.h b/audio/common/all-versions/default/include/common/all-versions/default/HidlUtils.impl.h new file mode 100644 index 0000000000000000000000000000000000000000..8ab73501bc2a3207739433e0f336e0d5924de3a2 --- /dev/null +++ b/audio/common/all-versions/default/include/common/all-versions/default/HidlUtils.impl.h @@ -0,0 +1,365 @@ +/* + * Copyright (C) 2016 The Android Open Source Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#ifndef AUDIO_HAL_VERSION +#error "AUDIO_HAL_VERSION must be set before including this file." +#endif + +#include +#include + +using ::android::hardware::audio::common::AUDIO_HAL_VERSION::AudioChannelMask; +using ::android::hardware::audio::common::AUDIO_HAL_VERSION::AudioDevice; +using ::android::hardware::audio::common::AUDIO_HAL_VERSION::AudioFormat; +using ::android::hardware::audio::common::AUDIO_HAL_VERSION::AudioGainMode; +using ::android::hardware::audio::common::AUDIO_HAL_VERSION::AudioMixLatencyClass; +using ::android::hardware::audio::common::AUDIO_HAL_VERSION::AudioPortConfigMask; +using ::android::hardware::audio::common::AUDIO_HAL_VERSION::AudioPortRole; +using ::android::hardware::audio::common::AUDIO_HAL_VERSION::AudioPortType; +using ::android::hardware::audio::common::AUDIO_HAL_VERSION::AudioSource; +using ::android::hardware::audio::common::AUDIO_HAL_VERSION::AudioStreamType; +using ::android::hardware::audio::common::AUDIO_HAL_VERSION::AudioUsage; + +using ::android::hardware::audio::common::utils::mkEnumConverter; + +namespace android { +namespace hardware { +namespace audio { +namespace common { +namespace AUDIO_HAL_VERSION { + +void HidlUtils::audioConfigFromHal(const audio_config_t& halConfig, AudioConfig* config) { + config->sampleRateHz = halConfig.sample_rate; + config->channelMask = mkEnumConverter(halConfig.channel_mask); + config->format = AudioFormat(halConfig.format); + audioOffloadInfoFromHal(halConfig.offload_info, &config->offloadInfo); + config->frameCount = halConfig.frame_count; +} + +void HidlUtils::audioConfigToHal(const AudioConfig& config, audio_config_t* halConfig) { + memset(halConfig, 0, sizeof(audio_config_t)); + halConfig->sample_rate = config.sampleRateHz; + halConfig->channel_mask = static_cast(config.channelMask); + halConfig->format = static_cast(config.format); + audioOffloadInfoToHal(config.offloadInfo, &halConfig->offload_info); + halConfig->frame_count = config.frameCount; +} + +void HidlUtils::audioGainConfigFromHal(const struct audio_gain_config& halConfig, + AudioGainConfig* config) { + config->index = halConfig.index; + config->mode = mkEnumConverter(halConfig.mode); + config->channelMask = mkEnumConverter(halConfig.channel_mask); + for (size_t i = 0; i < sizeof(audio_channel_mask_t) * 8; ++i) { + config->values[i] = halConfig.values[i]; + } + config->rampDurationMs = halConfig.ramp_duration_ms; +} + +void HidlUtils::audioGainConfigToHal(const AudioGainConfig& config, + struct audio_gain_config* halConfig) { + halConfig->index = config.index; + halConfig->mode = static_cast(config.mode); + halConfig->channel_mask = static_cast(config.channelMask); + memset(halConfig->values, 0, sizeof(halConfig->values)); + for (size_t i = 0; i < sizeof(audio_channel_mask_t) * 8; ++i) { + halConfig->values[i] = config.values[i]; + } + halConfig->ramp_duration_ms = config.rampDurationMs; +} + +void HidlUtils::audioGainFromHal(const struct audio_gain& halGain, AudioGain* gain) { + gain->mode = mkEnumConverter(halGain.mode); + gain->channelMask = mkEnumConverter(halGain.channel_mask); + gain->minValue = halGain.min_value; + gain->maxValue = halGain.max_value; + gain->defaultValue = halGain.default_value; + gain->stepValue = halGain.step_value; + gain->minRampMs = halGain.min_ramp_ms; + gain->maxRampMs = halGain.max_ramp_ms; +} + +void HidlUtils::audioGainToHal(const AudioGain& gain, struct audio_gain* halGain) { + halGain->mode = static_cast(gain.mode); + halGain->channel_mask = static_cast(gain.channelMask); + halGain->min_value = gain.minValue; + halGain->max_value = gain.maxValue; + halGain->default_value = gain.defaultValue; + halGain->step_value = gain.stepValue; + halGain->min_ramp_ms = gain.minRampMs; + halGain->max_ramp_ms = gain.maxRampMs; +} + +AudioUsage HidlUtils::audioUsageFromHal(const audio_usage_t halUsage) { + switch (halUsage) { + case AUDIO_USAGE_NOTIFICATION_COMMUNICATION_REQUEST: + case AUDIO_USAGE_NOTIFICATION_COMMUNICATION_INSTANT: + case AUDIO_USAGE_NOTIFICATION_COMMUNICATION_DELAYED: + case AUDIO_USAGE_NOTIFICATION_EVENT: + return AudioUsage::NOTIFICATION; + default: + return static_cast(halUsage); + } +} + +audio_usage_t HidlUtils::audioUsageToHal(const AudioUsage usage) { + return static_cast(usage); +} + +void HidlUtils::audioOffloadInfoFromHal(const audio_offload_info_t& halOffload, + AudioOffloadInfo* offload) { + offload->sampleRateHz = halOffload.sample_rate; + offload->channelMask = mkEnumConverter(halOffload.channel_mask); + offload->format = AudioFormat(halOffload.format); + offload->streamType = AudioStreamType(halOffload.stream_type); + offload->bitRatePerSecond = halOffload.bit_rate; + offload->durationMicroseconds = halOffload.duration_us; + offload->hasVideo = halOffload.has_video; + offload->isStreaming = halOffload.is_streaming; + offload->bitWidth = halOffload.bit_width; + offload->bufferSize = halOffload.offload_buffer_size; + offload->usage = audioUsageFromHal(halOffload.usage); +} + +void HidlUtils::audioOffloadInfoToHal(const AudioOffloadInfo& offload, + audio_offload_info_t* halOffload) { + *halOffload = AUDIO_INFO_INITIALIZER; + halOffload->sample_rate = offload.sampleRateHz; + halOffload->channel_mask = static_cast(offload.channelMask); + halOffload->format = static_cast(offload.format); + halOffload->stream_type = static_cast(offload.streamType); + halOffload->bit_rate = offload.bitRatePerSecond; + halOffload->duration_us = offload.durationMicroseconds; + halOffload->has_video = offload.hasVideo; + halOffload->is_streaming = offload.isStreaming; + halOffload->bit_width = offload.bitWidth; + halOffload->offload_buffer_size = offload.bufferSize; + halOffload->usage = audioUsageToHal(offload.usage); +} + +void HidlUtils::audioPortConfigFromHal(const struct audio_port_config& halConfig, + AudioPortConfig* config) { + config->id = halConfig.id; + config->role = AudioPortRole(halConfig.role); + config->type = AudioPortType(halConfig.type); + config->configMask = mkEnumConverter(halConfig.config_mask); + config->sampleRateHz = halConfig.sample_rate; + config->channelMask = mkEnumConverter(halConfig.channel_mask); + config->format = AudioFormat(halConfig.format); + audioGainConfigFromHal(halConfig.gain, &config->gain); + switch (halConfig.type) { + case AUDIO_PORT_TYPE_NONE: + break; + case AUDIO_PORT_TYPE_DEVICE: { + config->ext.device.hwModule = halConfig.ext.device.hw_module; + config->ext.device.type = AudioDevice(halConfig.ext.device.type); + memcpy(config->ext.device.address.data(), halConfig.ext.device.address, + AUDIO_DEVICE_MAX_ADDRESS_LEN); + break; + } + case AUDIO_PORT_TYPE_MIX: { + config->ext.mix.hwModule = halConfig.ext.mix.hw_module; + config->ext.mix.ioHandle = halConfig.ext.mix.handle; + if (halConfig.role == AUDIO_PORT_ROLE_SOURCE) { + config->ext.mix.useCase.source = AudioSource(halConfig.ext.mix.usecase.source); + } else if (halConfig.role == AUDIO_PORT_ROLE_SINK) { + config->ext.mix.useCase.stream = AudioStreamType(halConfig.ext.mix.usecase.stream); + } + break; + } + case AUDIO_PORT_TYPE_SESSION: { + config->ext.session.session = halConfig.ext.session.session; + break; + } + } +} + +void HidlUtils::audioPortConfigToHal(const AudioPortConfig& config, + struct audio_port_config* halConfig) { + memset(halConfig, 0, sizeof(audio_port_config)); + halConfig->id = config.id; + halConfig->role = static_cast(config.role); + halConfig->type = static_cast(config.type); + halConfig->config_mask = static_cast(config.configMask); + halConfig->sample_rate = config.sampleRateHz; + halConfig->channel_mask = static_cast(config.channelMask); + halConfig->format = static_cast(config.format); + audioGainConfigToHal(config.gain, &halConfig->gain); + switch (config.type) { + case AudioPortType::NONE: + break; + case AudioPortType::DEVICE: { + halConfig->ext.device.hw_module = config.ext.device.hwModule; + halConfig->ext.device.type = static_cast(config.ext.device.type); + memcpy(halConfig->ext.device.address, config.ext.device.address.data(), + AUDIO_DEVICE_MAX_ADDRESS_LEN); + break; + } + case AudioPortType::MIX: { + halConfig->ext.mix.hw_module = config.ext.mix.hwModule; + halConfig->ext.mix.handle = config.ext.mix.ioHandle; + if (config.role == AudioPortRole::SOURCE) { + halConfig->ext.mix.usecase.source = + static_cast(config.ext.mix.useCase.source); + } else if (config.role == AudioPortRole::SINK) { + halConfig->ext.mix.usecase.stream = + static_cast(config.ext.mix.useCase.stream); + } + break; + } + case AudioPortType::SESSION: { + halConfig->ext.session.session = + static_cast(config.ext.session.session); + break; + } + } +} + +void HidlUtils::audioPortConfigsFromHal(unsigned int numHalConfigs, + const struct audio_port_config* halConfigs, + hidl_vec* configs) { + configs->resize(numHalConfigs); + for (unsigned int i = 0; i < numHalConfigs; ++i) { + audioPortConfigFromHal(halConfigs[i], &(*configs)[i]); + } +} + +std::unique_ptr HidlUtils::audioPortConfigsToHal( + const hidl_vec& configs) { + std::unique_ptr halConfigs(new audio_port_config[configs.size()]); + for (size_t i = 0; i < configs.size(); ++i) { + audioPortConfigToHal(configs[i], &halConfigs[i]); + } + return halConfigs; +} + +void HidlUtils::audioPortFromHal(const struct audio_port& halPort, AudioPort* port) { + port->id = halPort.id; + port->role = AudioPortRole(halPort.role); + port->type = AudioPortType(halPort.type); + port->name.setToExternal(halPort.name, strlen(halPort.name)); + port->sampleRates.resize(halPort.num_sample_rates); + for (size_t i = 0; i < halPort.num_sample_rates; ++i) { + port->sampleRates[i] = halPort.sample_rates[i]; + } + port->channelMasks.resize(halPort.num_channel_masks); + for (size_t i = 0; i < halPort.num_channel_masks; ++i) { + port->channelMasks[i] = mkEnumConverter(halPort.channel_masks[i]); + } + port->formats.resize(halPort.num_formats); + for (size_t i = 0; i < halPort.num_formats; ++i) { + port->formats[i] = AudioFormat(halPort.formats[i]); + } + port->gains.resize(halPort.num_gains); + for (size_t i = 0; i < halPort.num_gains; ++i) { + audioGainFromHal(halPort.gains[i], &port->gains[i]); + } + audioPortConfigFromHal(halPort.active_config, &port->activeConfig); + switch (halPort.type) { + case AUDIO_PORT_TYPE_NONE: + break; + case AUDIO_PORT_TYPE_DEVICE: { + port->ext.device.hwModule = halPort.ext.device.hw_module; + port->ext.device.type = AudioDevice(halPort.ext.device.type); + memcpy(port->ext.device.address.data(), halPort.ext.device.address, + AUDIO_DEVICE_MAX_ADDRESS_LEN); + break; + } + case AUDIO_PORT_TYPE_MIX: { + port->ext.mix.hwModule = halPort.ext.mix.hw_module; + port->ext.mix.ioHandle = halPort.ext.mix.handle; + port->ext.mix.latencyClass = AudioMixLatencyClass(halPort.ext.mix.latency_class); + break; + } + case AUDIO_PORT_TYPE_SESSION: { + port->ext.session.session = halPort.ext.session.session; + break; + } + } +} + +void HidlUtils::audioPortToHal(const AudioPort& port, struct audio_port* halPort) { + memset(halPort, 0, sizeof(audio_port)); + halPort->id = port.id; + halPort->role = static_cast(port.role); + halPort->type = static_cast(port.type); + strncpy(halPort->name, port.name.c_str(), AUDIO_PORT_MAX_NAME_LEN); + halPort->name[AUDIO_PORT_MAX_NAME_LEN - 1] = '\0'; + halPort->num_sample_rates = + std::min(port.sampleRates.size(), static_cast(AUDIO_PORT_MAX_SAMPLING_RATES)); + for (size_t i = 0; i < halPort->num_sample_rates; ++i) { + halPort->sample_rates[i] = port.sampleRates[i]; + } + halPort->num_channel_masks = + std::min(port.channelMasks.size(), static_cast(AUDIO_PORT_MAX_CHANNEL_MASKS)); + for (size_t i = 0; i < halPort->num_channel_masks; ++i) { + halPort->channel_masks[i] = static_cast(port.channelMasks[i]); + } + halPort->num_formats = + std::min(port.formats.size(), static_cast(AUDIO_PORT_MAX_FORMATS)); + for (size_t i = 0; i < halPort->num_formats; ++i) { + halPort->formats[i] = static_cast(port.formats[i]); + } + halPort->num_gains = std::min(port.gains.size(), static_cast(AUDIO_PORT_MAX_GAINS)); + for (size_t i = 0; i < halPort->num_gains; ++i) { + audioGainToHal(port.gains[i], &halPort->gains[i]); + } + audioPortConfigToHal(port.activeConfig, &halPort->active_config); + switch (port.type) { + case AudioPortType::NONE: + break; + case AudioPortType::DEVICE: { + halPort->ext.device.hw_module = port.ext.device.hwModule; + halPort->ext.device.type = static_cast(port.ext.device.type); + memcpy(halPort->ext.device.address, port.ext.device.address.data(), + AUDIO_DEVICE_MAX_ADDRESS_LEN); + break; + } + case AudioPortType::MIX: { + halPort->ext.mix.hw_module = port.ext.mix.hwModule; + halPort->ext.mix.handle = port.ext.mix.ioHandle; + halPort->ext.mix.latency_class = + static_cast(port.ext.mix.latencyClass); + break; + } + case AudioPortType::SESSION: { + halPort->ext.session.session = static_cast(port.ext.session.session); + break; + } + } +} + +void HidlUtils::uuidFromHal(const audio_uuid_t& halUuid, Uuid* uuid) { + uuid->timeLow = halUuid.timeLow; + uuid->timeMid = halUuid.timeMid; + uuid->versionAndTimeHigh = halUuid.timeHiAndVersion; + uuid->variantAndClockSeqHigh = halUuid.clockSeq; + memcpy(uuid->node.data(), halUuid.node, uuid->node.size()); +} + +void HidlUtils::uuidToHal(const Uuid& uuid, audio_uuid_t* halUuid) { + halUuid->timeLow = uuid.timeLow; + halUuid->timeMid = uuid.timeMid; + halUuid->timeHiAndVersion = uuid.versionAndTimeHigh; + halUuid->clockSeq = uuid.variantAndClockSeqHigh; + memcpy(halUuid->node, uuid.node.data(), uuid.node.size()); +} + +} // namespace AUDIO_HAL_VERSION +} // namespace common +} // namespace audio +} // namespace hardware +} // namespace android diff --git a/audio/2.0/default/Android.mk b/audio/common/all-versions/default/service/Android.mk similarity index 57% rename from audio/2.0/default/Android.mk rename to audio/common/all-versions/default/service/Android.mk index 3587d60b07ace0628a64cfc6f14c2066a6aa2585..43d7fe19c52ffbc95ec812b0beaa827f28e6503f 100644 --- a/audio/2.0/default/Android.mk +++ b/audio/common/all-versions/default/service/Android.mk @@ -16,45 +16,6 @@ LOCAL_PATH := $(call my-dir) -include $(CLEAR_VARS) -LOCAL_MODULE := android.hardware.audio@2.0-impl -LOCAL_MODULE_RELATIVE_PATH := hw -LOCAL_PROPRIETARY_MODULE := true -LOCAL_SRC_FILES := \ - Conversions.cpp \ - Device.cpp \ - DevicesFactory.cpp \ - ParametersUtil.cpp \ - PrimaryDevice.cpp \ - Stream.cpp \ - StreamIn.cpp \ - StreamOut.cpp \ - -LOCAL_CFLAGS := -Wall -Werror - -LOCAL_SHARED_LIBRARIES := \ - libbase \ - libcutils \ - libfmq \ - libhardware \ - libhidlbase \ - libhidltransport \ - liblog \ - libutils \ - android.hardware.audio@2.0 \ - android.hardware.audio.common@2.0 \ - android.hardware.audio.common@2.0-util \ - -LOCAL_HEADER_LIBRARIES := \ - libaudioclient_headers \ - libaudio_system_headers \ - libhardware_headers \ - libmedia_headers \ - -LOCAL_WHOLE_STATIC_LIBRARIES := libmedia_helper - -include $(BUILD_SHARED_LIBRARY) - # # Service # @@ -70,19 +31,24 @@ LOCAL_SRC_FILES := \ LOCAL_CFLAGS := -Wall -Werror LOCAL_SHARED_LIBRARIES := \ + libbinder \ libhidlbase \ libhidltransport \ liblog \ libutils \ libhardware \ android.hardware.audio@2.0 \ + android.hardware.audio@4.0 \ android.hardware.audio.common@2.0 \ + android.hardware.audio.common@4.0 \ android.hardware.audio.effect@2.0 \ + android.hardware.audio.effect@4.0 \ + android.hardware.bluetooth.a2dp@1.0 \ android.hardware.soundtrigger@2.0 \ - android.hardware.broadcastradio@1.0 \ - android.hardware.broadcastradio@1.1 \ - android.hardware.bluetooth.a2dp@1.0 + android.hardware.soundtrigger@2.1 +# Can not switch to Android.bp until AUDIOSERVER_MULTILIB +# is deprecated as build config variable are not supported ifeq ($(strip $(AUDIOSERVER_MULTILIB)),) LOCAL_MULTILIB := 32 else diff --git a/audio/2.0/default/android.hardware.audio@2.0-service.rc b/audio/common/all-versions/default/service/android.hardware.audio@2.0-service.rc similarity index 70% rename from audio/2.0/default/android.hardware.audio@2.0-service.rc rename to audio/common/all-versions/default/service/android.hardware.audio@2.0-service.rc index eeaf71b26776d91b3b6f4dd04c44b5c13e047cfb..8217b946dc96a3b42e458a5026898f03ca17261c 100644 --- a/audio/2.0/default/android.hardware.audio@2.0-service.rc +++ b/audio/common/all-versions/default/service/android.hardware.audio@2.0-service.rc @@ -1,4 +1,4 @@ -service audio-hal-2-0 /vendor/bin/hw/android.hardware.audio@2.0-service +service vendor.audio-hal-2-0 /vendor/bin/hw/android.hardware.audio@2.0-service class hal user audioserver # media gid needed for /dev/fm (radio) and for /data/misc/media (tee) @@ -9,3 +9,5 @@ service audio-hal-2-0 /vendor/bin/hw/android.hardware.audio@2.0-service # and its .rc file has an "onrestart restart audio-hal" rule, thus # an additional auto-restart from the init process isn't needed. oneshot + interface android.hardware.audio@4.0::IDevicesFactory default + interface android.hardware.audio@2.0::IDevicesFactory default diff --git a/audio/common/all-versions/default/service/service.cpp b/audio/common/all-versions/default/service/service.cpp new file mode 100644 index 0000000000000000000000000000000000000000..c7ce638e7fa3b6f9c2e899c99506e81100f0348a --- /dev/null +++ b/audio/common/all-versions/default/service/service.cpp @@ -0,0 +1,57 @@ +/* + * Copyright (C) 2016 The Android Open Source Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#define LOG_TAG "audiohalservice" + +#include +#include +#include +#include +#include +#include +#include +#include +#include +#include + +using namespace android::hardware; +using android::OK; + +int main(int /* argc */, char* /* argv */ []) { + android::ProcessState::initWithDriver("/dev/vndbinder"); + // start a threadpool for vndbinder interactions + android::ProcessState::self()->startThreadPool(); + configureRpcThreadpool(16, true /*callerWillJoin*/); + + bool fail = registerPassthroughServiceImplementation() != OK && + registerPassthroughServiceImplementation() != OK; + LOG_ALWAYS_FATAL_IF(fail, "Could not register audio core API 2.0 nor 4.0"); + + fail = registerPassthroughServiceImplementation() != OK && + registerPassthroughServiceImplementation() != OK, + LOG_ALWAYS_FATAL_IF(fail, "Could not register audio effect API 2.0 nor 4.0"); + + fail = registerPassthroughServiceImplementation() != OK && + registerPassthroughServiceImplementation() != OK, + ALOGW_IF(fail, "Could not register soundtrigger API 2.0 nor 2.1"); + + fail = + registerPassthroughServiceImplementation() != + OK; + ALOGW_IF(fail, "Could not register Bluetooth audio offload 1.0"); + + joinRpcThreadpool(); +} diff --git a/audio/common/all-versions/test/OWNERS b/audio/common/all-versions/test/OWNERS new file mode 100644 index 0000000000000000000000000000000000000000..6a26ae72cd0a522b7dda2a24be43333f4e186157 --- /dev/null +++ b/audio/common/all-versions/test/OWNERS @@ -0,0 +1,2 @@ +yim@google.com +zhuoyao@google.com diff --git a/audio/common/test/utility/Android.bp b/audio/common/all-versions/test/utility/Android.bp similarity index 100% rename from audio/common/test/utility/Android.bp rename to audio/common/all-versions/test/utility/Android.bp diff --git a/audio/common/test/utility/include/utility/AssertOk.h b/audio/common/all-versions/test/utility/include/utility/AssertOk.h similarity index 88% rename from audio/common/test/utility/include/utility/AssertOk.h rename to audio/common/all-versions/test/utility/include/utility/AssertOk.h index d8aa45100531948e61cbf35146ba0721772de032..11e1c24783b858cd8eecf88a1e9ab7b16802de9f 100644 --- a/audio/common/test/utility/include/utility/AssertOk.h +++ b/audio/common/all-versions/test/utility/include/utility/AssertOk.h @@ -17,7 +17,7 @@ #define ANDROID_HARDWARE_AUDIO_COMMON_TEST_UTILITY_ASSERTOK_H #include -#include +#include #include @@ -33,7 +33,6 @@ namespace detail { // This is a detail namespace, thus it is OK to import a class as nobody else is // allowed to use it using ::android::hardware::Return; -using ::android::hardware::audio::V2_0::Result; template inline ::testing::AssertionResult assertIsOk(const char* expr, const Return& ret) { @@ -50,6 +49,7 @@ inline ::testing::AssertionResult continueIfIsOk(const char* expr, const Return< } // Expect two equal Results +template inline ::testing::AssertionResult assertResult(const char* e_expr, const char* r_expr, Result expected, Result result) { return ::testing::AssertionResult(expected == result) @@ -58,6 +58,7 @@ inline ::testing::AssertionResult assertResult(const char* e_expr, const char* r } // Expect two equal Results one being wrapped in an OK Return +template inline ::testing::AssertionResult assertResult(const char* e_expr, const char* r_expr, Result expected, const Return& ret) { return continueIfIsOk(r_expr, ret, @@ -65,8 +66,10 @@ inline ::testing::AssertionResult assertResult(const char* e_expr, const char* r } // Expect a Result to be part of a list of Results +template inline ::testing::AssertionResult assertResult(const char* e_expr, const char* r_expr, - const std::vector& expected, Result result) { + const std::initializer_list& expected, + Result result) { if (std::find(expected.begin(), expected.end(), result) != expected.end()) { return ::testing::AssertionSuccess(); // result is in expected } @@ -77,8 +80,9 @@ inline ::testing::AssertionResult assertResult(const char* e_expr, const char* r } // Expect a Result wrapped in an OK Return to be part of a list of Results +template inline ::testing::AssertionResult assertResult(const char* e_expr, const char* r_expr, - const std::vector& expected, + const std::initializer_list& expected, const Return& ret) { return continueIfIsOk(r_expr, ret, [&] { return assertResult(e_expr, r_expr, expected, Result{ret}); }); @@ -88,15 +92,17 @@ inline ::testing::AssertionResult assertOk(const char* expr, const Return& return assertIsOk(expr, ret); } +template inline ::testing::AssertionResult assertOk(const char* expr, Result result) { return ::testing::AssertionResult(result == Result::OK) << "Expected success: " << expr << "\nActual: " << ::testing::PrintToString(result); } +template inline ::testing::AssertionResult assertOk(const char* expr, const Return& ret) { return continueIfIsOk(expr, ret, [&] { return assertOk(expr, Result{ret}); }); } -} +} // namespace detail #define ASSERT_IS_OK(ret) ASSERT_PRED_FORMAT1(detail::assertIsOk, ret) #define EXPECT_IS_OK(ret) EXPECT_PRED_FORMAT1(detail::assertIsOk, ret) @@ -108,11 +114,11 @@ inline ::testing::AssertionResult assertOk(const char* expr, const Return #include +#include #include namespace android { @@ -33,13 +34,13 @@ namespace utility { * Avoid destroying static objects after main return. * Post main return destruction leads to incorrect gtest timing measurements as * well as harder debuging if anything goes wrong during destruction. */ -class Environment : public ::testing::Environment { +class Environment : public ::testing::VtsHalHidlTargetTestEnvBase { public: using TearDownFunc = std::function; void registerTearDown(TearDownFunc&& tearDown) { tearDowns.push_back(std::move(tearDown)); } private: - void TearDown() override { + void HidlTearDown() override { // Call the tear downs in reverse order of insertion for (auto& tearDown : tearDowns) { tearDown(); @@ -48,11 +49,11 @@ class Environment : public ::testing::Environment { std::list tearDowns; }; -} // utility -} // test -} // common -} // audio -} // test -} // utility +} // namespace utility +} // namespace test +} // namespace common +} // namespace audio +} // namespace hardware +} // namespace android #endif // ANDROID_HARDWARE_AUDIO_COMMON_TEST_UTILITY_ENVIRONMENT_TEARDOWN_H diff --git a/audio/common/test/utility/include/utility/PrettyPrintAudioTypes.h b/audio/common/all-versions/test/utility/include/utility/PrettyPrintAudioTypes.h similarity index 50% rename from audio/common/test/utility/include/utility/PrettyPrintAudioTypes.h rename to audio/common/all-versions/test/utility/include/utility/PrettyPrintAudioTypes.h index 37059e748a8254b9d8d865e372845c79e0dcf3ff..abc2ff5f82ff639c49d2ac25c313b1bb85bf7900 100644 --- a/audio/common/test/utility/include/utility/PrettyPrintAudioTypes.h +++ b/audio/common/all-versions/test/utility/include/utility/PrettyPrintAudioTypes.h @@ -14,57 +14,49 @@ * limitations under the License. */ +#ifndef AUDIO_HAL_VERSION +#error "AUDIO_HAL_VERSION must be set before including this file." +#endif + #ifndef ANDROID_HARDWARE_AUDIO_COMMON_TEST_UTILITY_PRETTY_PRINT_AUDIO_TYPES_H #define ANDROID_HARDWARE_AUDIO_COMMON_TEST_UTILITY_PRETTY_PRINT_AUDIO_TYPES_H #include -#include - -#include -#include - -/** @file Use HIDL generated toString methods to pretty print gtest errors */ +#include -namespace prettyPrintAudioTypesDetail { - -// Print the value of an enum as hex -template -inline void printUnderlyingValue(Enum value, ::std::ostream* os) { - *os << std::hex << " (0x" << static_cast>(value) << ")"; -} - -} // namespace detail +/** @file Use HIDL generated toString methods to pretty print gtest errors + * Unfortunately Gtest does not offer a template to specialize, only + * overloading PrintTo. + * @note that this overload can NOT be template because + * the fallback is already template, resulting in ambiguity. + * @note that the overload MUST be in the exact namespace + * the type is declared in, as per the ADL rules. + */ namespace android { namespace hardware { namespace audio { -namespace V2_0 { -inline void PrintTo(const Result& result, ::std::ostream* os) { - *os << toString(result); - prettyPrintAudioTypesDetail::printUnderlyingValue(result, os); -} +#define DEFINE_GTEST_PRINT_TO(T) \ + inline void PrintTo(const T& val, ::std::ostream* os) { *os << toString(val); } -} // namespace V2_0 -namespace common { -namespace V2_0 { - -inline void PrintTo(const AudioConfig& config, ::std::ostream* os) { - *os << toString(config); -} +namespace AUDIO_HAL_VERSION { +DEFINE_GTEST_PRINT_TO(IPrimaryDevice::TtyMode) +DEFINE_GTEST_PRINT_TO(Result) +} // namespace AUDIO_HAL_VERSION -inline void PrintTo(const AudioDevice& device, ::std::ostream* os) { - *os << toString(device); - prettyPrintAudioTypesDetail::printUnderlyingValue(device, os); -} +namespace common { +namespace AUDIO_HAL_VERSION { +DEFINE_GTEST_PRINT_TO(AudioConfig) +DEFINE_GTEST_PRINT_TO(AudioMode) +DEFINE_GTEST_PRINT_TO(AudioDevice) +DEFINE_GTEST_PRINT_TO(AudioFormat) +DEFINE_GTEST_PRINT_TO(AudioChannelMask) +} // namespace AUDIO_HAL_VERSION +} // namespace common -inline void PrintTo(const AudioChannelMask& channelMask, ::std::ostream* os) { - *os << toString(channelMask); - prettyPrintAudioTypesDetail::printUnderlyingValue(channelMask, os); -} +#undef DEFINE_GTEST_PRINT_TO -} // namespace V2_0 -} // namespace common } // namespace audio } // namespace hardware } // namespace android diff --git a/audio/common/test/utility/include/utility/ReturnIn.h b/audio/common/all-versions/test/utility/include/utility/ReturnIn.h similarity index 89% rename from audio/common/test/utility/include/utility/ReturnIn.h rename to audio/common/all-versions/test/utility/include/utility/ReturnIn.h index 08d502f2d4bac5ffb7608524342bc5e591fe6213..7fd0d4a4372ceda061aaabdf5c9303dbb23094f0 100644 --- a/audio/common/test/utility/include/utility/ReturnIn.h +++ b/audio/common/all-versions/test/utility/include/utility/ReturnIn.h @@ -45,7 +45,7 @@ class ReturnIn { template void set(Head&& head, Tail&&... tail) { std::get(results) = std::forward(head); - set(tail...); + set(std::forward(tail)...); } // Trivial case void set() {} @@ -56,7 +56,7 @@ class ReturnIn { } // namespace detail // Generate the HIDL synchronous callback with a copy policy -// Input: the variables (lvalue reference) where to save the return values +// Input: the variables (lvalue references) where to copy the return values // Output: the callback to provide to a HIDL call with a synchronous callback // The output parameters *will be copied* do not use this function if you have // a zero copy policy @@ -65,11 +65,11 @@ detail::ReturnIn returnIn(ResultStore&... ts) { return {ts...}; } -} // utility -} // test -} // common -} // audio -} // test -} // utility +} // namespace utility +} // namespace test +} // namespace common +} // namespace audio +} // namespace hardware +} // namespace android #endif // ANDROID_HARDWARE_AUDIO_COMMON_TEST_UTILITY_RETURN_IN_H diff --git a/audio/common/all-versions/test/utility/include/utility/ValidateXml.h b/audio/common/all-versions/test/utility/include/utility/ValidateXml.h new file mode 100644 index 0000000000000000000000000000000000000000..95080d1c4e964240d53519090a2921277d996ba7 --- /dev/null +++ b/audio/common/all-versions/test/utility/include/utility/ValidateXml.h @@ -0,0 +1,80 @@ +/* + * Copyright (C) 2017 The Android Open Source Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#ifndef ANDROID_HARDWARE_AUDIO_COMMON_TEST_UTILITY_VALIDATE_XML_H +#define ANDROID_HARDWARE_AUDIO_COMMON_TEST_UTILITY_VALIDATE_XML_H + +#include + +namespace android { +namespace hardware { +namespace audio { +namespace common { +namespace test { +namespace utility { + +/** Validate the provided XmlFile with the provided xsdFile. + * Intended to use with ASSERT_PRED_FORMAT2 as such: + * ASSERT_PRED_FORMAT2(validateXml, pathToXml, pathToXsd); + * See ASSERT_VALID_XML for a helper macro. + */ +::testing::AssertionResult validateXml(const char* xmlFilePathExpr, const char* xsdFilePathExpr, + const char* xmlFilePath, const char* xsdFilePath); + +/** Helper gtest ASSERT to test XML validity against an XSD. */ +#define ASSERT_VALID_XML(xmlFilePath, xsdFilePath) \ + ASSERT_PRED_FORMAT2(::android::hardware::audio::common::test::utility::validateXml, \ + xmlFilePath, xsdFilePath) + +/** Helper gtest EXPECT to test XML validity against an XSD. */ +#define EXPECT_VALID_XML(xmlFilePath, xsdFilePath) \ + EXPECT_PRED_FORMAT2(::android::hardware::audio::common::test::utility::validateXml, \ + xmlFilePath, xsdFilePath) + +/** Validate an XML according to an xsd. + * The XML file must be in at least one of the provided locations. + * If multiple are found, all are validated. + */ +::testing::AssertionResult validateXmlMultipleLocations( + const char* xmlFileNameExpr, const char* xmlFileLocationsExpr, const char* xsdFilePathExpr, + const char* xmlFileName, std::vector xmlFileLocations, const char* xsdFilePath); + +/** ASSERT that an XML is valid according to an xsd. + * The XML file must be in at least one of the provided locations. + * If multiple are found, all are validated. + */ +#define ASSERT_ONE_VALID_XML_MULTIPLE_LOCATIONS(xmlFileName, xmlFileLocations, xsdFilePath) \ + ASSERT_PRED_FORMAT3( \ + ::android::hardware::audio::common::test::utility::validateXmlMultipleLocations, \ + xmlFileName, xmlFileLocations, xsdFilePath) + +/** EXPECT an XML to be valid according to an xsd. + * The XML file must be in at least one of the provided locations. + * If multiple are found, all are validated. + */ +#define EXPECT_ONE_VALID_XML_MULTIPLE_LOCATIONS(xmlFileName, xmlFileLocations, xsdFilePath) \ + EXPECT_PRED_FORMAT3( \ + ::android::hardware::audio::common::test::utility::validateXmlMultipleLocations, \ + xmlFileName, xmlFileLocations, xsdFilePath) + +} // namespace utility +} // namespace test +} // namespace common +} // namespace audio +} // namespace hardware +} // namespace android + +#endif // ANDROID_HARDWARE_AUDIO_COMMON_TEST_UTILITY_VALIDATE_XML_H diff --git a/audio/common/test/utility/src/ValidateXml.cpp b/audio/common/all-versions/test/utility/src/ValidateXml.cpp similarity index 67% rename from audio/common/test/utility/src/ValidateXml.cpp rename to audio/common/all-versions/test/utility/src/ValidateXml.cpp index 784f9401ecc7432d1232f493e7128f2a45372e46..5030af5067d0f065ba8021670e35fc53f097e399 100644 --- a/audio/common/test/utility/src/ValidateXml.cpp +++ b/audio/common/all-versions/test/utility/src/ValidateXml.cpp @@ -17,6 +17,8 @@ #define LOG_TAG "ValidateAudioConfig" #include +#include + #define LIBXML_SCHEMAS_ENABLED #include #define LIBXML_XINCLUDE_ENABLED @@ -94,9 +96,9 @@ struct Libxml2Global { Libxml2Global libxml2; auto context = [&]() { - return std::string() + " While validating: " + xmlFilePathExpr + + return std::string() + " While validating: " + xmlFilePathExpr + "\n Which is: " + xmlFilePath + "\nAgainst the schema: " + xsdFilePathExpr + - "\n Which is: " + xsdFilePath + "Libxml2 errors\n" + libxml2.getErrors(); + "\n Which is: " + xsdFilePath + "\nLibxml2 errors:\n" + libxml2.getErrors(); }; auto schemaParserCtxt = make_xmlUnique(xmlSchemaNewParserCtxt(xsdFilePath)); @@ -117,7 +119,7 @@ struct Libxml2Global { auto schemaCtxt = make_xmlUnique(xmlSchemaNewValidCtxt(schema.get())); int ret = xmlSchemaValidateDoc(schemaCtxt.get(), doc.get()); if (ret > 0) { - return ::testing::AssertionFailure() << "xml is not valid according to the xsd.\n" + return ::testing::AssertionFailure() << "XML is not valid according to the xsd\n" << context(); } if (ret < 0) { @@ -127,9 +129,43 @@ struct Libxml2Global { return ::testing::AssertionSuccess(); } -} // utility -} // test -} // common -} // audio -} // test -} // utility +::testing::AssertionResult validateXmlMultipleLocations( + const char* xmlFileNameExpr, const char* xmlFileLocationsExpr, const char* xsdFilePathExpr, + const char* xmlFileName, std::vector xmlFileLocations, const char* xsdFilePath) { + using namespace std::string_literals; + + std::vector errors; + std::vector foundFiles; + + for (const char* location : xmlFileLocations) { + std::string xmlFilePath = location + "/"s + xmlFileName; + if (access(xmlFilePath.c_str(), F_OK) != 0) { + // If the file does not exist ignore this location and fallback on the next one + continue; + } + foundFiles.push_back(" " + xmlFilePath + '\n'); + auto result = validateXml("xmlFilePath", xsdFilePathExpr, xmlFilePath.c_str(), xsdFilePath); + if (!result) { + errors.push_back(result.message()); + } + } + + if (foundFiles.empty()) { + errors.push_back("No xml file found in provided locations.\n"); + } + + return ::testing::AssertionResult(errors.empty()) + << errors.size() << " error" << (errors.size() == 1 ? " " : "s ") + << std::accumulate(begin(errors), end(errors), "occurred during xml validation:\n"s) + << " While validating all: " << xmlFileNameExpr + << "\n Which is: " << xmlFileName + << "\n In the following folders: " << xmlFileLocationsExpr + << "\n Which is: " << ::testing::PrintToString(xmlFileLocations); +} + +} // namespace utility +} // namespace test +} // namespace common +} // namespace audio +} // namespace hardware +} // namespace android diff --git a/audio/common/all-versions/util/Android.bp b/audio/common/all-versions/util/Android.bp new file mode 100644 index 0000000000000000000000000000000000000000..5d33a3a189e56b63bd52696cbe3a7a84e1c66c45 --- /dev/null +++ b/audio/common/all-versions/util/Android.bp @@ -0,0 +1,10 @@ +cc_library_headers { + name: "android.hardware.audio.common.util@all-versions", + defaults: ["hidl_defaults"], + vendor_available: true, + vndk: { + enabled: true, + }, + + export_include_dirs: ["include"], +} diff --git a/audio/common/all-versions/util/include/common/all-versions/IncludeGuard.h b/audio/common/all-versions/util/include/common/all-versions/IncludeGuard.h new file mode 100644 index 0000000000000000000000000000000000000000..2d5481625cd71b5ba7847d30c0834fd33d2d3322 --- /dev/null +++ b/audio/common/all-versions/util/include/common/all-versions/IncludeGuard.h @@ -0,0 +1,19 @@ +/* + * Copyright (C) 2017 The Android Open Source Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#ifndef AUDIO_HAL_VERSION +#error "AUDIO_HAL_VERSION must be set before including this file." +#endif diff --git a/audio/common/all-versions/util/include/common/all-versions/VersionUtils.h b/audio/common/all-versions/util/include/common/all-versions/VersionUtils.h new file mode 100644 index 0000000000000000000000000000000000000000..70c3d56a42ef3da12ec3937abbfceca40cb44b0c --- /dev/null +++ b/audio/common/all-versions/util/include/common/all-versions/VersionUtils.h @@ -0,0 +1,67 @@ +/* + * Copyright (C) 2018 The Android Open Source Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#ifndef android_hardware_audio_common_VersionUtils_H_ +#define android_hardware_audio_common_VersionUtils_H_ + +#include +#include + +namespace android { +namespace hardware { +namespace audio { +namespace common { +namespace utils { + +/** Similar to static_cast but also casts to hidl_bitfield depending on + * return type inference (emulated through user-define conversion). + */ +template +class EnumConverter { + public: + static_assert(std::is_enum::value || std::is_enum::value, + "Source or destination should be an enum"); + + explicit EnumConverter(Source source) : mSource(source) {} + + operator Destination() const { return static_cast(mSource); } + + template ::value>> + operator ::android::hardware::hidl_bitfield() { + return static_cast>(mSource); + } + + private: + const Source mSource; +}; +template +auto mkEnumConverter(Source source) { + return EnumConverter{source}; +} + +/** Allows converting an enum to its bitfield or itself. */ +template +EnumConverter mkBitfield(Enum value) { + return EnumConverter{value}; +} + +} // namespace utils +} // namespace common +} // namespace audio +} // namespace hardware +} // namespace android + +#endif // android_hardware_audio_common_VersionUtils_H_ diff --git a/audio/common/test/utility/include/utility/ValidateXml.h b/audio/common/test/utility/include/utility/ValidateXml.h deleted file mode 100644 index fdfa50666e5b0eccaacdeb2489ee65676d708783..0000000000000000000000000000000000000000 --- a/audio/common/test/utility/include/utility/ValidateXml.h +++ /dev/null @@ -1,49 +0,0 @@ -/* - * Copyright (C) 2017 The Android Open Source Project - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -#ifndef ANDROID_HARDWARE_AUDIO_COMMON_TEST_UTILITY_VALIDATE_XML_H -#define ANDROID_HARDWARE_AUDIO_COMMON_TEST_UTILITY_VALIDATE_XML_H - -#include - -namespace android { -namespace hardware { -namespace audio { -namespace common { -namespace test { -namespace utility { - -/** Validate the provided XmlFile with the provided xsdFile. - * Intended to use with ASSERT_PRED_FORMAT2 as such: - * ASSERT_PRED_FORMAT2(validateXml, pathToXml, pathToXsd); - * See ASSERT_VALID_XML for a helper macro. - */ -::testing::AssertionResult validateXml(const char* xmlFilePathExpr, const char* xsdFilePathExpr, - const char* xmlFilePath, const char* xsdPathName); - -/** Helper gtest ASSERT to test xml validity against an xsd. */ -#define ASSERT_VALID_XML(xmlFilePath, xsdFilePath) \ - ASSERT_PRED_FORMAT2(::android::hardware::audio::common::test::utility::validateXml, \ - xmlFilePath, xsdFilePath) - -} // utility -} // test -} // common -} // audio -} // test -} // utility - -#endif // ANDROID_HARDWARE_AUDIO_COMMON_TEST_UTILITY_VALIDATE_XML_H diff --git a/audio/core/2.0/default/Android.bp b/audio/core/2.0/default/Android.bp new file mode 100644 index 0000000000000000000000000000000000000000..625df74a88a10e915a137ba5d46082217d4c96fd --- /dev/null +++ b/audio/core/2.0/default/Android.bp @@ -0,0 +1,53 @@ +cc_library_shared { + name: "android.hardware.audio@2.0-impl", + relative_install_path: "hw", + proprietary: true, + vendor: true, + srcs: [ + "Conversions.cpp", + "Device.cpp", + "DevicesFactory.cpp", + "ParametersUtil.cpp", + "PrimaryDevice.cpp", + "Stream.cpp", + "StreamIn.cpp", + "StreamOut.cpp", + ], + + cflags: [ + "-DAUDIO_HAL_VERSION_2_0", + ], + + defaults: ["hidl_defaults"], + + export_include_dirs: ["include"], + + shared_libs: [ + "libbase", + "libcutils", + "libfmq", + "libhardware", + "libhidlbase", + "libhidltransport", + "liblog", + "libutils", + "android.hardware.audio@2.0", + "android.hardware.audio.common@2.0", + "android.hardware.audio.common@2.0-util", + "android.hardware.audio.common-util", + ], + + header_libs: [ + "android.hardware.audio.common.util@all-versions", + "android.hardware.audio.core@all-versions-impl", + "libaudioclient_headers", + "libaudio_system_headers", + "libhardware_headers", + "libmedia_headers", + ], + + whole_static_libs: [ + "libmedia_helper", + ], + +} diff --git a/audio/core/2.0/default/Conversions.cpp b/audio/core/2.0/default/Conversions.cpp new file mode 100644 index 0000000000000000000000000000000000000000..6c32090626df365495c148ff068f68f52716f5cd --- /dev/null +++ b/audio/core/2.0/default/Conversions.cpp @@ -0,0 +1,21 @@ +/* + * Copyright (C) 2017 The Android Open Source Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#include "core/2.0/default/Conversions.h" + +#define AUDIO_HAL_VERSION V2_0 +#include +#undef AUDIO_HAL_VERSION diff --git a/audio/core/2.0/default/Device.cpp b/audio/core/2.0/default/Device.cpp new file mode 100644 index 0000000000000000000000000000000000000000..b67203d50e1393147cc77e58117faf5b03be6bbd --- /dev/null +++ b/audio/core/2.0/default/Device.cpp @@ -0,0 +1,28 @@ +/* + * Copyright (C) 2017 The Android Open Source Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#define LOG_TAG "DeviceHAL" + +#include "core/2.0/default/Device.h" +#include +#include "core/2.0/default/Conversions.h" +#include "core/2.0/default/StreamIn.h" +#include "core/2.0/default/StreamOut.h" +#include "core/2.0/default/Util.h" + +#define AUDIO_HAL_VERSION V2_0 +#include +#undef AUDIO_HAL_VERSION diff --git a/audio/core/2.0/default/DevicesFactory.cpp b/audio/core/2.0/default/DevicesFactory.cpp new file mode 100644 index 0000000000000000000000000000000000000000..65a9ccdc4e3de3a9fbfd3330c4d4967f30a46be1 --- /dev/null +++ b/audio/core/2.0/default/DevicesFactory.cpp @@ -0,0 +1,25 @@ +/* + * Copyright (C) 2017 The Android Open Source Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#define LOG_TAG "DevicesFactoryHAL" + +#include "core/2.0/default/DevicesFactory.h" +#include "core/2.0/default/Device.h" +#include "core/2.0/default/PrimaryDevice.h" + +#define AUDIO_HAL_VERSION V2_0 +#include +#undef AUDIO_HAL_VERSION diff --git a/audio/core/2.0/default/OWNERS b/audio/core/2.0/default/OWNERS new file mode 100644 index 0000000000000000000000000000000000000000..6fdc97ca298fbbda9cb676f5acb02d7495debcb4 --- /dev/null +++ b/audio/core/2.0/default/OWNERS @@ -0,0 +1,3 @@ +elaurent@google.com +krocard@google.com +mnaganov@google.com diff --git a/audio/core/2.0/default/ParametersUtil.cpp b/audio/core/2.0/default/ParametersUtil.cpp new file mode 100644 index 0000000000000000000000000000000000000000..963e291de0a2c95b92afd9c6bbcd3e1272b7f021 --- /dev/null +++ b/audio/core/2.0/default/ParametersUtil.cpp @@ -0,0 +1,22 @@ +/* + * Copyright (C) 2017 The Android Open Source Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#include "core/2.0/default/ParametersUtil.h" +#include "core/2.0/default/Util.h" + +#define AUDIO_HAL_VERSION V2_0 +#include +#undef AUDIO_HAL_VERSION diff --git a/audio/core/2.0/default/PrimaryDevice.cpp b/audio/core/2.0/default/PrimaryDevice.cpp new file mode 100644 index 0000000000000000000000000000000000000000..decaa14658b94bf15505ef272551bcec0c074b33 --- /dev/null +++ b/audio/core/2.0/default/PrimaryDevice.cpp @@ -0,0 +1,24 @@ +/* + * Copyright (C) 2017 The Android Open Source Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#define LOG_TAG "PrimaryDeviceHAL" + +#include "core/2.0/default/PrimaryDevice.h" +#include "core/2.0/default/Util.h" + +#define AUDIO_HAL_VERSION V2_0 +#include +#undef AUDIO_HAL_VERSION diff --git a/audio/core/2.0/default/Stream.cpp b/audio/core/2.0/default/Stream.cpp new file mode 100644 index 0000000000000000000000000000000000000000..0863a7c39964a78f811aacd98f300ca15c2e52bb --- /dev/null +++ b/audio/core/2.0/default/Stream.cpp @@ -0,0 +1,26 @@ +/* + * Copyright (C) 2017 The Android Open Source Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#define LOG_TAG "StreamHAL" + +#include "core/2.0/default/Stream.h" +#include "common/all-versions/default/EffectMap.h" +#include "core/2.0/default/Conversions.h" +#include "core/2.0/default/Util.h" + +#define AUDIO_HAL_VERSION V2_0 +#include +#undef AUDIO_HAL_VERSION diff --git a/audio/core/2.0/default/StreamIn.cpp b/audio/core/2.0/default/StreamIn.cpp new file mode 100644 index 0000000000000000000000000000000000000000..2021df1c4996636faf6d2520aa1638545b610a60 --- /dev/null +++ b/audio/core/2.0/default/StreamIn.cpp @@ -0,0 +1,24 @@ +/* + * Copyright (C) 2017 The Android Open Source Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#define LOG_TAG "StreamInHAL" + +#include "core/2.0/default/StreamIn.h" +#include "core/2.0/default/Util.h" + +#define AUDIO_HAL_VERSION V2_0 +#include +#undef AUDIO_HAL_VERSION diff --git a/audio/core/2.0/default/StreamOut.cpp b/audio/core/2.0/default/StreamOut.cpp new file mode 100644 index 0000000000000000000000000000000000000000..940a251272fcd5ac4a56f348d3553dc306be81e2 --- /dev/null +++ b/audio/core/2.0/default/StreamOut.cpp @@ -0,0 +1,24 @@ +/* + * Copyright (C) 2017 The Android Open Source Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#define LOG_TAG "StreamOutHAL" + +#include "core/2.0/default/StreamOut.h" +#include "core/2.0/default/Util.h" + +#define AUDIO_HAL_VERSION V2_0 +#include +#undef AUDIO_HAL_VERSION diff --git a/audio/core/2.0/default/include/core/2.0/default/Conversions.h b/audio/core/2.0/default/include/core/2.0/default/Conversions.h new file mode 100644 index 0000000000000000000000000000000000000000..b3a6ea886eb0069a6d710ee1723764fcab160c57 --- /dev/null +++ b/audio/core/2.0/default/include/core/2.0/default/Conversions.h @@ -0,0 +1,26 @@ +/* + * Copyright (C) 2017 The Android Open Source Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#ifndef ANDROID_HARDWARE_AUDIO_V2_0_CONVERSIONS_H_ +#define ANDROID_HARDWARE_AUDIO_V2_0_CONVERSIONS_H_ + +#include + +#define AUDIO_HAL_VERSION V2_0 +#include +#undef AUDIO_HAL_VERSION + +#endif // ANDROID_HARDWARE_AUDIO_V2_0_CONVERSIONS_H_ diff --git a/audio/core/2.0/default/include/core/2.0/default/Device.h b/audio/core/2.0/default/include/core/2.0/default/Device.h new file mode 100644 index 0000000000000000000000000000000000000000..3ec74649ecc3616c0fac062490f94415291e4be9 --- /dev/null +++ b/audio/core/2.0/default/include/core/2.0/default/Device.h @@ -0,0 +1,28 @@ +/* + * Copyright (C) 2017 The Android Open Source Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#ifndef ANDROID_HARDWARE_AUDIO_V2_0_DEVICE_H +#define ANDROID_HARDWARE_AUDIO_V2_0_DEVICE_H + +#include + +#include "ParametersUtil.h" + +#define AUDIO_HAL_VERSION V2_0 +#include +#undef AUDIO_HAL_VERSION + +#endif // ANDROID_HARDWARE_AUDIO_V2_0_DEVICE_H diff --git a/audio/core/2.0/default/include/core/2.0/default/DevicesFactory.h b/audio/core/2.0/default/include/core/2.0/default/DevicesFactory.h new file mode 100644 index 0000000000000000000000000000000000000000..8e8ee88ffa069765b28935f103f2351e962ce82b --- /dev/null +++ b/audio/core/2.0/default/include/core/2.0/default/DevicesFactory.h @@ -0,0 +1,26 @@ +/* + * Copyright (C) 2017 The Android Open Source Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#ifndef ANDROID_HARDWARE_AUDIO_V2_0_DEVICESFACTORY_H +#define ANDROID_HARDWARE_AUDIO_V2_0_DEVICESFACTORY_H + +#include + +#define AUDIO_HAL_VERSION V2_0 +#include +#undef AUDIO_HAL_VERSION + +#endif // ANDROID_HARDWARE_AUDIO_V2_0_DEVICESFACTORY_H diff --git a/audio/core/2.0/default/include/core/2.0/default/ParametersUtil.h b/audio/core/2.0/default/include/core/2.0/default/ParametersUtil.h new file mode 100644 index 0000000000000000000000000000000000000000..a5c1c78f949421e7be6ca620117c7db33e67fd06 --- /dev/null +++ b/audio/core/2.0/default/include/core/2.0/default/ParametersUtil.h @@ -0,0 +1,26 @@ +/* + * Copyright (C) 2017 The Android Open Source Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#ifndef ANDROID_HARDWARE_AUDIO_V2_0_PARAMETERS_UTIL_H_ +#define ANDROID_HARDWARE_AUDIO_V2_0_PARAMETERS_UTIL_H_ + +#include + +#define AUDIO_HAL_VERSION V2_0 +#include +#undef AUDIO_HAL_VERSION + +#endif // ANDROID_HARDWARE_AUDIO_V2_0_PARAMETERS_UTIL_H_ diff --git a/audio/core/2.0/default/include/core/2.0/default/PrimaryDevice.h b/audio/core/2.0/default/include/core/2.0/default/PrimaryDevice.h new file mode 100644 index 0000000000000000000000000000000000000000..f89859700a39c60aed22aa6bb2ff6b782a66e7c4 --- /dev/null +++ b/audio/core/2.0/default/include/core/2.0/default/PrimaryDevice.h @@ -0,0 +1,28 @@ +/* + * Copyright (C) 2017 The Android Open Source Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#ifndef ANDROID_HARDWARE_AUDIO_V2_0_PRIMARYDEVICE_H +#define ANDROID_HARDWARE_AUDIO_V2_0_PRIMARYDEVICE_H + +#include + +#include "Device.h" + +#define AUDIO_HAL_VERSION V2_0 +#include +#undef AUDIO_HAL_VERSION + +#endif // ANDROID_HARDWARE_AUDIO_V2_0_PRIMARYDEVICE_H diff --git a/audio/core/2.0/default/include/core/2.0/default/Stream.h b/audio/core/2.0/default/include/core/2.0/default/Stream.h new file mode 100644 index 0000000000000000000000000000000000000000..a2d845609c19973a4ccc7539912a16ff384364ec --- /dev/null +++ b/audio/core/2.0/default/include/core/2.0/default/Stream.h @@ -0,0 +1,28 @@ +/* + * Copyright (C) 2017 The Android Open Source Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#ifndef ANDROID_HARDWARE_AUDIO_V2_0_STREAM_H +#define ANDROID_HARDWARE_AUDIO_V2_0_STREAM_H + +#include + +#include "ParametersUtil.h" + +#define AUDIO_HAL_VERSION V2_0 +#include +#undef AUDIO_HAL_VERSION + +#endif // ANDROID_HARDWARE_AUDIO_V2_0_STREAM_H diff --git a/audio/core/2.0/default/include/core/2.0/default/StreamIn.h b/audio/core/2.0/default/include/core/2.0/default/StreamIn.h new file mode 100644 index 0000000000000000000000000000000000000000..c36abbd68ec049bf1a031586fe7cc7049ed21667 --- /dev/null +++ b/audio/core/2.0/default/include/core/2.0/default/StreamIn.h @@ -0,0 +1,29 @@ +/* + * Copyright (C) 2017 The Android Open Source Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#ifndef ANDROID_HARDWARE_AUDIO_V2_0_STREAMIN_H +#define ANDROID_HARDWARE_AUDIO_V2_0_STREAMIN_H + +#include + +#include "Device.h" +#include "Stream.h" + +#define AUDIO_HAL_VERSION V2_0 +#include +#undef AUDIO_HAL_VERSION + +#endif // ANDROID_HARDWARE_AUDIO_V2_0_STREAMIN_H diff --git a/audio/core/2.0/default/include/core/2.0/default/StreamOut.h b/audio/core/2.0/default/include/core/2.0/default/StreamOut.h new file mode 100644 index 0000000000000000000000000000000000000000..ab35687414c46e620966536c014e05cfe70c02dd --- /dev/null +++ b/audio/core/2.0/default/include/core/2.0/default/StreamOut.h @@ -0,0 +1,29 @@ +/* + * Copyright (C) 2017 The Android Open Source Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#ifndef ANDROID_HARDWARE_AUDIO_V2_0_STREAMOUT_H +#define ANDROID_HARDWARE_AUDIO_V2_0_STREAMOUT_H + +#include + +#include "Device.h" +#include "Stream.h" + +#define AUDIO_HAL_VERSION V2_0 +#include +#undef AUDIO_HAL_VERSION + +#endif // ANDROID_HARDWARE_AUDIO_V2_0_STREAMOUT_H diff --git a/audio/core/2.0/default/include/core/2.0/default/Util.h b/audio/core/2.0/default/include/core/2.0/default/Util.h new file mode 100644 index 0000000000000000000000000000000000000000..1f0e284d0aeec602bb6a7cd4a371be83932d7848 --- /dev/null +++ b/audio/core/2.0/default/include/core/2.0/default/Util.h @@ -0,0 +1,26 @@ +/* + * Copyright (C) 2018 The Android Open Source Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#ifndef ANDROID_HARDWARE_AUDIO_V2_0_UTIL_H +#define ANDROID_HARDWARE_AUDIO_V2_0_UTIL_H + +#include + +#define AUDIO_HAL_VERSION V2_0 +#include +#undef AUDIO_HAL_VERSION + +#endif // ANDROID_HARDWARE_AUDIO_V2_0_UTIL_H diff --git a/audio/2.0/vts/OWNERS b/audio/core/2.0/vts/OWNERS similarity index 100% rename from audio/2.0/vts/OWNERS rename to audio/core/2.0/vts/OWNERS diff --git a/audio/2.0/vts/functional/Android.bp b/audio/core/2.0/vts/functional/Android.bp similarity index 97% rename from audio/2.0/vts/functional/Android.bp rename to audio/core/2.0/vts/functional/Android.bp index f3b2ca72cdf4abc988e27d8278f0a29a1b8c22a8..d1ddaff9481a8d60c89560ce2302e4cd24e41142 100644 --- a/audio/2.0/vts/functional/Android.bp +++ b/audio/core/2.0/vts/functional/Android.bp @@ -25,9 +25,8 @@ cc_test { "android.hardware.audio.common.test.utility", "android.hardware.audio@2.0", "android.hardware.audio.common@2.0", - "libxml2", - ], - shared_libs: [ "libicuuc", + "libicuuc_stubdata", + "libxml2", ], } diff --git a/audio/2.0/vts/functional/AudioPrimaryHidlHalTest.cpp b/audio/core/2.0/vts/functional/AudioPrimaryHidlHalTest.cpp similarity index 76% rename from audio/2.0/vts/functional/AudioPrimaryHidlHalTest.cpp rename to audio/core/2.0/vts/functional/AudioPrimaryHidlHalTest.cpp index 7c213d4e6b9d44df21f77b9183ef9f254e2212c2..bb1d26f9284e0163344ab3e3e1f5129c88f15890 100644 --- a/audio/2.0/vts/functional/AudioPrimaryHidlHalTest.cpp +++ b/audio/core/2.0/vts/functional/AudioPrimaryHidlHalTest.cpp @@ -20,6 +20,7 @@ #include #include #include +#include #include #include #include @@ -40,9 +41,11 @@ #include "utility/AssertOk.h" #include "utility/Documentation.h" #include "utility/EnvironmentTearDown.h" +#define AUDIO_HAL_VERSION V2_0 #include "utility/PrettyPrintAudioTypes.h" #include "utility/ReturnIn.h" +using std::initializer_list; using std::string; using std::to_string; using std::vector; @@ -85,8 +88,13 @@ using ::android::hardware::audio::common::V2_0::ThreadInfo; using namespace ::android::hardware::audio::common::test::utility; +class AudioHidlTestEnvironment : public ::Environment { + public: + virtual void registerTestServices() override { registerTestService(); } +}; + // Instance to register global tearDown -static Environment* environment; +static AudioHidlTestEnvironment* environment; class HidlTest : public ::testing::VtsHalHidlTargetTestBase { protected: @@ -106,8 +114,8 @@ class AudioHidlTest : public HidlTest { if (devicesFactory == nullptr) { environment->registerTearDown([] { devicesFactory.clear(); }); - devicesFactory = ::testing::VtsHalHidlTargetTestBase::getService< - IDevicesFactory>(); + devicesFactory = ::testing::VtsHalHidlTargetTestBase::getService( + environment->getServiceName("default")); } ASSERT_TRUE(devicesFactory != nullptr); } @@ -126,8 +134,7 @@ TEST_F(AudioHidlTest, OpenDeviceInvalidParameter) { doc::test("test passing an invalid parameter to openDevice"); IDevicesFactory::Result result; sp device; - ASSERT_OK(devicesFactory->openDevice(IDevicesFactory::Device(-1), - returnIn(result, device))); + ASSERT_OK(devicesFactory->openDevice(IDevicesFactory::Device(-1), returnIn(result, device))); ASSERT_EQ(IDevicesFactory::Result::INVALID_ARGUMENTS, result); ASSERT_TRUE(device == nullptr); } @@ -146,9 +153,8 @@ class AudioPrimaryHidlTest : public AudioHidlTest { if (device == nullptr) { IDevicesFactory::Result result; sp baseDevice; - ASSERT_OK( - devicesFactory->openDevice(IDevicesFactory::Device::PRIMARY, - returnIn(result, baseDevice))); + ASSERT_OK(devicesFactory->openDevice(IDevicesFactory::Device::PRIMARY, + returnIn(result, baseDevice))); ASSERT_OK(result); ASSERT_TRUE(baseDevice != nullptr); @@ -182,10 +188,8 @@ class AccessorPrimaryHidlTest : public AudioPrimaryHidlTest { protected: /** Test a property getter and setter. */ template - void testAccessors(const string& propertyName, - const vector& valuesToTest, Setter setter, - Getter getter, - const vector& invalidValues = {}) { + void testAccessors(const string& propertyName, const vector& valuesToTest, + Setter setter, Getter getter, const vector& invalidValues = {}) { Property initialValue; // Save initial value to restore it at the end // of the test ASSERT_OK((device.get()->*getter)(returnIn(res, initialValue))); @@ -203,21 +207,17 @@ class AccessorPrimaryHidlTest : public AudioPrimaryHidlTest { } for (Property invalidValue : invalidValues) { - SCOPED_TRACE("Try to set " + propertyName + - " with the invalid value " + + SCOPED_TRACE("Try to set " + propertyName + " with the invalid value " + testing::PrintToString(invalidValue)); - EXPECT_RESULT(Result::INVALID_ARGUMENTS, - (device.get()->*setter)(invalidValue)); + EXPECT_RESULT(Result::INVALID_ARGUMENTS, (device.get()->*setter)(invalidValue)); } - ASSERT_OK( - (device.get()->*setter)(initialValue)); // restore initial value + ASSERT_OK((device.get()->*setter)(initialValue)); // restore initial value } /** Test the getter and setter of an optional feature. */ template - void testOptionalAccessors(const string& propertyName, - const vector& valuesToTest, + void testOptionalAccessors(const string& propertyName, const vector& valuesToTest, Setter setter, Getter getter, const vector& invalidValues = {}) { doc::test("Test the optional " + propertyName + " getters and setter"); @@ -232,8 +232,7 @@ class AccessorPrimaryHidlTest : public AudioPrimaryHidlTest { ASSERT_OK(res); // If it is supported it must succeed } // The feature is supported, test it - testAccessors(propertyName, valuesToTest, setter, getter, - invalidValues); + testAccessors(propertyName, valuesToTest, setter, getter, invalidValues); } }; @@ -241,8 +240,7 @@ using BoolAccessorPrimaryHidlTest = AccessorPrimaryHidlTest; TEST_F(BoolAccessorPrimaryHidlTest, MicMuteTest) { doc::test("Check that the mic can be muted and unmuted"); - testAccessors("mic mute", {true, false, true}, &IDevice::setMicMute, - &IDevice::getMicMute); + testAccessors("mic mute", {true, false, true}, &IDevice::setMicMute, &IDevice::getMicMute); // TODO: check that the mic is really muted (all sample are 0) } @@ -250,18 +248,17 @@ TEST_F(BoolAccessorPrimaryHidlTest, MasterMuteTest) { doc::test( "If master mute is supported, try to mute and unmute the master " "output"); - testOptionalAccessors("master mute", {true, false, true}, - &IDevice::setMasterMute, &IDevice::getMasterMute); + testOptionalAccessors("master mute", {true, false, true}, &IDevice::setMasterMute, + &IDevice::getMasterMute); // TODO: check that the master volume is really muted } using FloatAccessorPrimaryHidlTest = AccessorPrimaryHidlTest; TEST_F(FloatAccessorPrimaryHidlTest, MasterVolumeTest) { doc::test("Test the master volume if supported"); - testOptionalAccessors("master volume", {0, 0.5, 1}, - &IDevice::setMasterVolume, &IDevice::getMasterVolume, - {-0.1, 1.1, NAN, INFINITY, -INFINITY, - 1 + std::numeric_limits::epsilon()}); + testOptionalAccessors( + "master volume", {0, 0.5, 1}, &IDevice::setMasterVolume, &IDevice::getMasterVolume, + {-0.1, 1.1, NAN, INFINITY, -INFINITY, 1 + std::numeric_limits::epsilon()}); // TODO: check that the master volume is really changed } @@ -300,17 +297,14 @@ class AudioConfigPrimaryTest : public AudioPatchPrimaryHidlTest { public: // Cache result ? static const vector getRequiredSupportPlaybackAudioConfig() { - return combineAudioConfig( - {AudioChannelMask::OUT_STEREO, AudioChannelMask::OUT_MONO}, - {8000, 11025, 16000, 22050, 32000, 44100}, - {AudioFormat::PCM_16_BIT}); + return combineAudioConfig({AudioChannelMask::OUT_STEREO, AudioChannelMask::OUT_MONO}, + {8000, 11025, 16000, 22050, 32000, 44100}, + {AudioFormat::PCM_16_BIT}); } - static const vector - getRecommendedSupportPlaybackAudioConfig() { - return combineAudioConfig( - {AudioChannelMask::OUT_STEREO, AudioChannelMask::OUT_MONO}, - {24000, 48000}, {AudioFormat::PCM_16_BIT}); + static const vector getRecommendedSupportPlaybackAudioConfig() { + return combineAudioConfig({AudioChannelMask::OUT_STEREO, AudioChannelMask::OUT_MONO}, + {24000, 48000}, {AudioFormat::PCM_16_BIT}); } static const vector getSupportedPlaybackAudioConfig() { @@ -320,8 +314,7 @@ class AudioConfigPrimaryTest : public AudioPatchPrimaryHidlTest { } static const vector getRequiredSupportCaptureAudioConfig() { - return combineAudioConfig({AudioChannelMask::IN_MONO}, - {8000, 11025, 16000, 44100}, + return combineAudioConfig({AudioChannelMask::IN_MONO}, {8000, 11025, 16000, 44100}, {AudioFormat::PCM_16_BIT}); } static const vector getRecommendedSupportCaptureAudioConfig() { @@ -335,9 +328,9 @@ class AudioConfigPrimaryTest : public AudioPatchPrimaryHidlTest { } private: - static const vector combineAudioConfig( - vector channelMasks, vector sampleRates, - vector formats) { + static const vector combineAudioConfig(vector channelMasks, + vector sampleRates, + vector formats) { vector configs; for (auto channelMask : channelMasks) { for (auto sampleRate : sampleRates) { @@ -361,8 +354,7 @@ class AudioConfigPrimaryTest : public AudioPatchPrimaryHidlTest { * As the only parameter changing are channel mask and sample rate, * only print those ones in the test name. */ -static string generateTestName( - const testing::TestParamInfo& info) { +static string generateTestName(const testing::TestParamInfo& info) { const AudioConfig& config = info.param; return to_string(info.index) + "__" + to_string(config.sampleRateHz) + "_" + // "MONO" is more clear than "FRONT_LEFT" @@ -380,15 +372,12 @@ static string generateTestName( // android.hardware.microphone // how to get this value ? is it a property ??? -class AudioCaptureConfigPrimaryTest - : public AudioConfigPrimaryTest, - public ::testing::WithParamInterface { +class AudioCaptureConfigPrimaryTest : public AudioConfigPrimaryTest, + public ::testing::WithParamInterface { protected: - void inputBufferSizeTest(const AudioConfig& audioConfig, - bool supportRequired) { + void inputBufferSizeTest(const AudioConfig& audioConfig, bool supportRequired) { uint64_t bufferSize; - ASSERT_OK( - device->getInputBufferSize(audioConfig, returnIn(res, bufferSize))); + ASSERT_OK(device->getInputBufferSize(audioConfig, returnIn(res, bufferSize))); switch (res) { case Result::INVALID_ARGUMENTS: @@ -400,8 +389,7 @@ class AudioCaptureConfigPrimaryTest EXPECT_GT(bufferSize, uint64_t(0)); break; default: - FAIL() << "Invalid return status: " - << ::testing::PrintToString(res); + FAIL() << "Invalid return status: " << ::testing::PrintToString(res); } } }; @@ -417,13 +405,11 @@ TEST_P(RequiredInputBufferSizeTest, RequiredInputBufferSizeTest) { } INSTANTIATE_TEST_CASE_P( RequiredInputBufferSize, RequiredInputBufferSizeTest, - ::testing::ValuesIn( - AudioConfigPrimaryTest::getRequiredSupportCaptureAudioConfig()), + ::testing::ValuesIn(AudioConfigPrimaryTest::getRequiredSupportCaptureAudioConfig()), &generateTestName); INSTANTIATE_TEST_CASE_P( SupportedInputBufferSize, RequiredInputBufferSizeTest, - ::testing::ValuesIn( - AudioConfigPrimaryTest::getSupportedCaptureAudioConfig()), + ::testing::ValuesIn(AudioConfigPrimaryTest::getSupportedCaptureAudioConfig()), &generateTestName); // Test that the recommended capture config are supported or lead to a @@ -437,8 +423,7 @@ TEST_P(OptionalInputBufferSizeTest, OptionalInputBufferSizeTest) { } INSTANTIATE_TEST_CASE_P( RecommendedCaptureAudioConfigSupport, OptionalInputBufferSizeTest, - ::testing::ValuesIn( - AudioConfigPrimaryTest::getRecommendedSupportCaptureAudioConfig()), + ::testing::ValuesIn(AudioConfigPrimaryTest::getRecommendedSupportCaptureAudioConfig()), &generateTestName); ////////////////////////////////////////////////////////////////////////////// @@ -530,11 +515,9 @@ class OpenStreamTest : public AudioConfigPrimaryTest, void testOpen(Open openStream, const AudioConfig& config) { // FIXME: Open a stream without an IOHandle // This is not required to be accepted by hal implementations - AudioIoHandle ioHandle = - (AudioIoHandle)AudioHandleConsts::AUDIO_IO_HANDLE_NONE; + AudioIoHandle ioHandle = (AudioIoHandle)AudioHandleConsts::AUDIO_IO_HANDLE_NONE; AudioConfig suggestedConfig{}; - ASSERT_OK(openStream(ioHandle, config, - returnIn(res, stream, suggestedConfig))); + ASSERT_OK(openStream(ioHandle, config, returnIn(res, stream, suggestedConfig))); // TODO: only allow failure for RecommendedPlaybackAudioConfig switch (res) { @@ -547,17 +530,15 @@ class OpenStreamTest : public AudioConfigPrimaryTest, AudioConfig suggestedConfigRetry; // Could not open stream with config, try again with the // suggested one - ASSERT_OK( - openStream(ioHandle, suggestedConfig, - returnIn(res, stream, suggestedConfigRetry))); + ASSERT_OK(openStream(ioHandle, suggestedConfig, + returnIn(res, stream, suggestedConfigRetry))); // This time it must succeed ASSERT_OK(res); ASSERT_TRUE(stream != nullptr); audioConfig = suggestedConfig; break; default: - FAIL() << "Invalid return status: " - << ::testing::PrintToString(res); + FAIL() << "Invalid return status: " << ::testing::PrintToString(res); } open = true; } @@ -588,12 +569,10 @@ class OutputStreamTest : public OpenStreamTest { ASSERT_NO_FATAL_FAILURE(OpenStreamTest::SetUp()); // setup base address.device = AudioDevice::OUT_DEFAULT; const AudioConfig& config = GetParam(); - AudioOutputFlag flags = - AudioOutputFlag::NONE; // TODO: test all flag combination + AudioOutputFlag flags = AudioOutputFlag::NONE; // TODO: test all flag combination testOpen( [&](AudioIoHandle handle, AudioConfig config, auto cb) { - return device->openOutputStream(handle, address, config, flags, - cb); + return device->openOutputStream(handle, address, config, flags, cb); }, config); } @@ -606,19 +585,16 @@ TEST_P(OutputStreamTest, OpenOutputStreamTest) { } INSTANTIATE_TEST_CASE_P( RequiredOutputStreamConfigSupport, OutputStreamTest, - ::testing::ValuesIn( - AudioConfigPrimaryTest::getRequiredSupportPlaybackAudioConfig()), + ::testing::ValuesIn(AudioConfigPrimaryTest::getRequiredSupportPlaybackAudioConfig()), &generateTestName); INSTANTIATE_TEST_CASE_P( SupportedOutputStreamConfig, OutputStreamTest, - ::testing::ValuesIn( - AudioConfigPrimaryTest::getSupportedPlaybackAudioConfig()), + ::testing::ValuesIn(AudioConfigPrimaryTest::getSupportedPlaybackAudioConfig()), &generateTestName); INSTANTIATE_TEST_CASE_P( RecommendedOutputStreamConfigSupport, OutputStreamTest, - ::testing::ValuesIn( - AudioConfigPrimaryTest::getRecommendedSupportPlaybackAudioConfig()), + ::testing::ValuesIn(AudioConfigPrimaryTest::getRecommendedSupportPlaybackAudioConfig()), &generateTestName); ////////////////////////////// openInputStream ////////////////////////////// @@ -628,14 +604,11 @@ class InputStreamTest : public OpenStreamTest { ASSERT_NO_FATAL_FAILURE(OpenStreamTest::SetUp()); // setup base address.device = AudioDevice::IN_DEFAULT; const AudioConfig& config = GetParam(); - AudioInputFlag flags = - AudioInputFlag::NONE; // TODO: test all flag combination - AudioSource source = - AudioSource::DEFAULT; // TODO: test all flag combination + AudioInputFlag flags = AudioInputFlag::NONE; // TODO: test all flag combination + AudioSource source = AudioSource::DEFAULT; // TODO: test all flag combination testOpen( [&](AudioIoHandle handle, AudioConfig config, auto cb) { - return device->openInputStream(handle, address, config, flags, - source, cb); + return device->openInputStream(handle, address, config, flags, source, cb); }, config); } @@ -649,19 +622,16 @@ TEST_P(InputStreamTest, OpenInputStreamTest) { } INSTANTIATE_TEST_CASE_P( RequiredInputStreamConfigSupport, InputStreamTest, - ::testing::ValuesIn( - AudioConfigPrimaryTest::getRequiredSupportCaptureAudioConfig()), + ::testing::ValuesIn(AudioConfigPrimaryTest::getRequiredSupportCaptureAudioConfig()), &generateTestName); INSTANTIATE_TEST_CASE_P( SupportedInputStreamConfig, InputStreamTest, - ::testing::ValuesIn( - AudioConfigPrimaryTest::getSupportedCaptureAudioConfig()), + ::testing::ValuesIn(AudioConfigPrimaryTest::getSupportedCaptureAudioConfig()), &generateTestName); INSTANTIATE_TEST_CASE_P( RecommendedInputStreamConfigSupport, InputStreamTest, - ::testing::ValuesIn( - AudioConfigPrimaryTest::getRecommendedSupportCaptureAudioConfig()), + ::testing::ValuesIn(AudioConfigPrimaryTest::getRecommendedSupportCaptureAudioConfig()), &generateTestName); ////////////////////////////////////////////////////////////////////////////// @@ -691,10 +661,8 @@ static R extract(Return ret) { code; \ } -TEST_IO_STREAM( - GetFrameCount, - "Check that the stream frame count == the one it was opened with", - ASSERT_EQ(audioConfig.frameCount, extract(stream->getFrameCount()))) +TEST_IO_STREAM(GetFrameCount, "Check that the stream frame count == the one it was opened with", + ASSERT_EQ(audioConfig.frameCount, extract(stream->getFrameCount()))) TEST_IO_STREAM(GetSampleRate, "Check that the stream sample rate == the one it was opened with", ASSERT_EQ(audioConfig.sampleRateHz, extract(stream->getSampleRate()))) @@ -702,25 +670,22 @@ TEST_IO_STREAM(GetSampleRate, "Check that the stream sample rate == the one it w TEST_IO_STREAM(GetChannelMask, "Check that the stream channel mask == the one it was opened with", ASSERT_EQ(audioConfig.channelMask, extract(stream->getChannelMask()))) -TEST_IO_STREAM(GetFormat, - "Check that the stream format == the one it was opened with", +TEST_IO_STREAM(GetFormat, "Check that the stream format == the one it was opened with", ASSERT_EQ(audioConfig.format, extract(stream->getFormat()))) // TODO: for now only check that the framesize is not incoherent -TEST_IO_STREAM(GetFrameSize, - "Check that the stream frame size == the one it was opened with", +TEST_IO_STREAM(GetFrameSize, "Check that the stream frame size == the one it was opened with", ASSERT_GT(extract(stream->getFrameSize()), 0U)) -TEST_IO_STREAM(GetBufferSize, - "Check that the stream buffer size== the one it was opened with", - ASSERT_GE(extract(stream->getBufferSize()), - extract(stream->getFrameSize()))); +TEST_IO_STREAM(GetBufferSize, "Check that the stream buffer size== the one it was opened with", + ASSERT_GE(extract(stream->getBufferSize()), extract(stream->getFrameSize()))); -template +template static void testCapabilityGetter(const string& name, IStream* stream, - Property currentValue, CapabilityGetter capablityGetter, - Getter getter, Setter setter) { + Return (IStream::*getter)(), + Return (IStream::*setter)(Property), + bool currentMustBeSupported = true) { hidl_vec capabilities; ASSERT_OK((stream->*capablityGetter)(returnIn(capabilities))); if (capabilities.size() == 0) { @@ -731,42 +696,46 @@ static void testCapabilityGetter(const string& name, IStream* stream, doc::partialTest(name + " is not supported"); return; }; - // TODO: This code has never been tested on a hal that supports - // getSupportedSampleRates - EXPECT_NE(std::find(capabilities.begin(), capabilities.end(), currentValue), - capabilities.end()) - << "current " << name << " is not in the list of the supported ones " - << toString(capabilities); + + if (currentMustBeSupported) { + Property currentValue = extract((stream->*getter)()); + EXPECT_NE(std::find(capabilities.begin(), capabilities.end(), currentValue), + capabilities.end()) + << "current " << name << " is not in the list of the supported ones " + << toString(capabilities); + } // Check that all declared supported values are indeed supported for (auto capability : capabilities) { - ASSERT_OK((stream->*setter)(capability)); + auto ret = (stream->*setter)(capability); + ASSERT_TRUE(ret.isOk()); + if (ret == Result::NOT_SUPPORTED) { + doc::partialTest("Setter is not supported"); + return; + } + ASSERT_OK(ret); ASSERT_EQ(capability, extract((stream->*getter)())); } } -TEST_IO_STREAM(SupportedSampleRate, - "Check that the stream sample rate is declared as supported", +TEST_IO_STREAM(SupportedSampleRate, "Check that the stream sample rate is declared as supported", testCapabilityGetter("getSupportedSampleRate", stream.get(), - extract(stream->getSampleRate()), - &IStream::getSupportedSampleRates, - &IStream::getSampleRate, - &IStream::setSampleRate)) - -TEST_IO_STREAM(SupportedChannelMask, - "Check that the stream channel mask is declared as supported", + &IStream::getSupportedSampleRates, &IStream::getSampleRate, + &IStream::setSampleRate, + // getSupportedSampleRate returns the native sampling rates, + // (the sampling rates that can be played without resampling) + // but other sampling rates can be supported by the HAL. + false)) + +TEST_IO_STREAM(SupportedChannelMask, "Check that the stream channel mask is declared as supported", testCapabilityGetter("getSupportedChannelMask", stream.get(), - extract(stream->getChannelMask()), - &IStream::getSupportedChannelMasks, - &IStream::getChannelMask, + &IStream::getSupportedChannelMasks, &IStream::getChannelMask, &IStream::setChannelMask)) -TEST_IO_STREAM(SupportedFormat, - "Check that the stream format is declared as supported", +TEST_IO_STREAM(SupportedFormat, "Check that the stream format is declared as supported", testCapabilityGetter("getSupportedFormat", stream.get(), - extract(stream->getFormat()), - &IStream::getSupportedFormats, - &IStream::getFormat, &IStream::setFormat)) + &IStream::getSupportedFormats, &IStream::getFormat, + &IStream::setFormat)) static void testGetDevice(IStream* stream, AudioDevice expectedDevice) { // Unfortunately the interface does not allow the implementation to return @@ -780,27 +749,22 @@ static void testGetDevice(IStream* stream, AudioDevice expectedDevice) { << "\n Actual: " << ::testing::PrintToString(device); } -TEST_IO_STREAM(GetDevice, - "Check that the stream device == the one it was opened with", - areAudioPatchesSupported() - ? doc::partialTest("Audio patches are supported") - : testGetDevice(stream.get(), address.device)) +TEST_IO_STREAM(GetDevice, "Check that the stream device == the one it was opened with", + areAudioPatchesSupported() ? doc::partialTest("Audio patches are supported") + : testGetDevice(stream.get(), address.device)) static void testSetDevice(IStream* stream, const DeviceAddress& address) { DeviceAddress otherAddress = address; - otherAddress.device = (address.device & AudioDevice::BIT_IN) == 0 - ? AudioDevice::OUT_SPEAKER - : AudioDevice::IN_BUILTIN_MIC; + otherAddress.device = (address.device & AudioDevice::BIT_IN) == 0 ? AudioDevice::OUT_SPEAKER + : AudioDevice::IN_BUILTIN_MIC; EXPECT_OK(stream->setDevice(otherAddress)); ASSERT_OK(stream->setDevice(address)); // Go back to the original value } -TEST_IO_STREAM( - SetDevice, - "Check that the stream can be rerouted to SPEAKER or BUILTIN_MIC", - areAudioPatchesSupported() ? doc::partialTest("Audio patches are supported") - : testSetDevice(stream.get(), address)) +TEST_IO_STREAM(SetDevice, "Check that the stream can be rerouted to SPEAKER or BUILTIN_MIC", + areAudioPatchesSupported() ? doc::partialTest("Audio patches are supported") + : testSetDevice(stream.get(), address)) static void testGetAudioProperties(IStream* stream, AudioConfig expectedConfig) { uint32_t sampleRateHz; @@ -823,8 +787,7 @@ TEST_IO_STREAM(GetAudioProperties, static void testConnectedState(IStream* stream) { DeviceAddress address = {}; using AD = AudioDevice; - for (auto device : - {AD::OUT_HDMI, AD::OUT_WIRED_HEADPHONE, AD::IN_USB_HEADSET}) { + for (auto device : {AD::OUT_HDMI, AD::OUT_WIRED_HEADPHONE, AD::IN_USB_HEADSET}) { address.device = device; ASSERT_OK(stream->setConnectedState(address, true)); @@ -836,17 +799,15 @@ TEST_IO_STREAM(SetConnectedState, "deconnection", testConnectedState(stream.get())) -static auto invalidArgsOrNotSupportedOrOK = {Result::INVALID_ARGUMENTS, - Result::NOT_SUPPORTED, Result::OK}; +static auto invalidArgsOrNotSupportedOrOK = {Result::INVALID_ARGUMENTS, Result::NOT_SUPPORTED, + Result::OK}; TEST_IO_STREAM(SetHwAvSync, "Try to set hardware sync to an invalid value", - ASSERT_RESULT(invalidArgsOrNotSupportedOrOK, - stream->setHwAvSync(666))) + ASSERT_RESULT(invalidArgsOrNotSupportedOrOK, stream->setHwAvSync(666))) -TEST_IO_STREAM(GetHwAvSync, "Get hardware sync can not fail", - ASSERT_IS_OK(device->getHwAvSync())); +TEST_IO_STREAM(GetHwAvSync, "Get hardware sync can not fail", ASSERT_IS_OK(device->getHwAvSync())); static void checkGetNoParameter(IStream* stream, hidl_vec keys, - vector expectedResults) { + initializer_list expectedResults) { hidl_vec parameters; Result res; ASSERT_OK(stream->getParameters(keys, returnIn(res, parameters))); @@ -865,30 +826,23 @@ static void checkGetNoParameter(IStream* stream, hidl_vec keys, TEST_IO_STREAM(getEmptySetParameter, "Retrieve the values of an empty set", checkGetNoParameter(stream.get(), {} /* keys */, {Result::OK})) -TEST_IO_STREAM(getNonExistingParameter, - "Retrieve the values of an non existing parameter", - checkGetNoParameter(stream.get(), - {"Non existing key"} /* keys */, +TEST_IO_STREAM(getNonExistingParameter, "Retrieve the values of an non existing parameter", + checkGetNoParameter(stream.get(), {"Non existing key"} /* keys */, {Result::NOT_SUPPORTED})) -TEST_IO_STREAM(setEmptySetParameter, - "Set the values of an empty set of parameters", +TEST_IO_STREAM(setEmptySetParameter, "Set the values of an empty set of parameters", ASSERT_RESULT(Result::OK, stream->setParameters({}))) -TEST_IO_STREAM( - setNonExistingParameter, "Set the values of an non existing parameter", - // Unfortunately, the set_parameter legacy interface did not return any - // error code when a key is not supported. - // To allow implementation to just wrapped the legacy one, consider OK as a - // valid result for setting a non existing parameter. - ASSERT_RESULT(invalidArgsOrNotSupportedOrOK, - stream->setParameters({{"non existing key", "0"}}))) - -TEST_IO_STREAM(DebugDump, - "Check that a stream can dump its state without error", - testDebugDump([this](const auto& handle) { - return stream->debugDump(handle); - })) +TEST_IO_STREAM(setNonExistingParameter, "Set the values of an non existing parameter", + // Unfortunately, the set_parameter legacy interface did not return any + // error code when a key is not supported. + // To allow implementation to just wrapped the legacy one, consider OK as a + // valid result for setting a non existing parameter. + ASSERT_RESULT(invalidArgsOrNotSupportedOrOK, + stream->setParameters({{"non existing key", "0"}}))) + +TEST_IO_STREAM(DebugDump, "Check that a stream can dump its state without error", + testDebugDump([this](const auto& handle) { return stream->debugDump(handle); })) TEST_IO_STREAM(DebugDumpInvalidArguments, "Check that the stream dump doesn't crash on invalid arguments", @@ -900,10 +854,8 @@ TEST_IO_STREAM(DebugDumpInvalidArguments, TEST_IO_STREAM(AddNonExistingEffect, "Adding a non existing effect should fail", ASSERT_RESULT(Result::INVALID_ARGUMENTS, stream->addEffect(666))) -TEST_IO_STREAM(RemoveNonExistingEffect, - "Removing a non existing effect should fail", - ASSERT_RESULT(Result::INVALID_ARGUMENTS, - stream->removeEffect(666))) +TEST_IO_STREAM(RemoveNonExistingEffect, "Removing a non existing effect should fail", + ASSERT_RESULT(Result::INVALID_ARGUMENTS, stream->removeEffect(666))) // TODO: positive tests @@ -914,29 +866,22 @@ TEST_IO_STREAM(RemoveNonExistingEffect, TEST_IO_STREAM(standby, "Make sure the stream can be put in stanby", ASSERT_OK(stream->standby())) // can not fail -static vector invalidStateOrNotSupported = {Result::INVALID_STATE, - Result::NOT_SUPPORTED}; +static constexpr auto invalidStateOrNotSupported = {Result::INVALID_STATE, Result::NOT_SUPPORTED}; -TEST_IO_STREAM(startNoMmap, - "Starting a mmaped stream before mapping it should fail", +TEST_IO_STREAM(startNoMmap, "Starting a mmaped stream before mapping it should fail", ASSERT_RESULT(invalidStateOrNotSupported, stream->start())) -TEST_IO_STREAM(stopNoMmap, - "Stopping a mmaped stream before mapping it should fail", +TEST_IO_STREAM(stopNoMmap, "Stopping a mmaped stream before mapping it should fail", ASSERT_RESULT(invalidStateOrNotSupported, stream->stop())) -TEST_IO_STREAM(getMmapPositionNoMmap, - "Get a stream Mmap position before mapping it should fail", +TEST_IO_STREAM(getMmapPositionNoMmap, "Get a stream Mmap position before mapping it should fail", ASSERT_RESULT(invalidStateOrNotSupported, stream->stop())) -TEST_IO_STREAM(close, "Make sure a stream can be closed", - ASSERT_OK(closeStream())) -TEST_IO_STREAM(closeTwice, "Make sure a stream can not be closed twice", - ASSERT_OK(closeStream()); +TEST_IO_STREAM(close, "Make sure a stream can be closed", ASSERT_OK(closeStream())) +TEST_IO_STREAM(closeTwice, "Make sure a stream can not be closed twice", ASSERT_OK(closeStream()); ASSERT_RESULT(Result::INVALID_STATE, closeStream())) -static auto invalidArgsOrNotSupported = {Result::INVALID_ARGUMENTS, - Result::NOT_SUPPORTED}; +static auto invalidArgsOrNotSupported = {Result::INVALID_ARGUMENTS, Result::NOT_SUPPORTED}; static void testCreateTooBigMmapBuffer(IStream* stream) { MmapBufferInfo info; Result res; @@ -958,18 +903,16 @@ static void testGetMmapPositionOfNonMmapedStream(IStream* stream) { ASSERT_RESULT(invalidArgsOrNotSupported, res); } -TEST_IO_STREAM( - GetMmapPositionOfNonMmapedStream, - "Retrieving the mmap position of a non mmaped stream should fail", - testGetMmapPositionOfNonMmapedStream(stream.get())) +TEST_IO_STREAM(GetMmapPositionOfNonMmapedStream, + "Retrieving the mmap position of a non mmaped stream should fail", + testGetMmapPositionOfNonMmapedStream(stream.get())) ////////////////////////////////////////////////////////////////////////////// ///////////////////////////////// StreamIn /////////////////////////////////// ////////////////////////////////////////////////////////////////////////////// TEST_P(InputStreamTest, GetAudioSource) { - doc::test( - "Retrieving the audio source of an input stream should always succeed"); + doc::test("Retrieving the audio source of an input stream should always succeed"); AudioSource source; ASSERT_OK(stream->getAudioSource(returnIn(res, source))); if (res == Result::NOT_SUPPORTED) { @@ -981,11 +924,9 @@ TEST_P(InputStreamTest, GetAudioSource) { } static void testUnitaryGain(std::function(float)> setGain) { - for (float value : - (float[]){-INFINITY, -1.0, 1.0 + std::numeric_limits::epsilon(), - 2.0, INFINITY, NAN}) { - EXPECT_RESULT(Result::INVALID_ARGUMENTS, setGain(value)) << "value=" - << value; + for (float value : (float[]){-INFINITY, -1.0, 1.0 + std::numeric_limits::epsilon(), 2.0, + INFINITY, NAN}) { + EXPECT_RESULT(Result::INVALID_ARGUMENTS, setGain(value)) << "value=" << value; } // Do not consider -0.0 as an invalid value as it is == with 0.0 for (float value : {-0.0, 0.0, 0.01, 0.5, 0.09, 1.0 /* Restore volume*/}) { @@ -993,8 +934,8 @@ static void testUnitaryGain(std::function(float)> setGain) { } } -static void testOptionalUnitaryGain( - std::function(float)> setGain, string debugName) { +static void testOptionalUnitaryGain(std::function(float)> setGain, + string debugName) { auto result = setGain(1); ASSERT_IS_OK(result); if (result == Result::NOT_SUPPORTED) { @@ -1006,32 +947,26 @@ static void testOptionalUnitaryGain( TEST_P(InputStreamTest, SetGain) { doc::test("The gain of an input stream should only be set between [0,1]"); - testOptionalUnitaryGain( - [this](float volume) { return stream->setGain(volume); }, - "InputStream::setGain"); + testOptionalUnitaryGain([this](float volume) { return stream->setGain(volume); }, + "InputStream::setGain"); } -static void testPrepareForReading(IStreamIn* stream, uint32_t frameSize, - uint32_t framesCount) { +static void testPrepareForReading(IStreamIn* stream, uint32_t frameSize, uint32_t framesCount) { Result res; // Ignore output parameters as the call should fail - ASSERT_OK(stream->prepareForReading( - frameSize, framesCount, - [&res](auto r, auto&, auto&, auto&, auto&) { res = r; })); + ASSERT_OK(stream->prepareForReading(frameSize, framesCount, + [&res](auto r, auto&, auto&, auto&, auto&) { res = r; })); EXPECT_RESULT(Result::INVALID_ARGUMENTS, res); } TEST_P(InputStreamTest, PrepareForReadingWithZeroBuffer) { - doc::test( - "Preparing a stream for reading with a 0 sized buffer should fail"); + doc::test("Preparing a stream for reading with a 0 sized buffer should fail"); testPrepareForReading(stream.get(), 0, 0); } TEST_P(InputStreamTest, PrepareForReadingWithHugeBuffer) { - doc::test( - "Preparing a stream for reading with a 2^32 sized buffer should fail"); - testPrepareForReading(stream.get(), 1, - std::numeric_limits::max()); + doc::test("Preparing a stream for reading with a 2^32 sized buffer should fail"); + testPrepareForReading(stream.get(), 1, std::numeric_limits::max()); } TEST_P(InputStreamTest, PrepareForReadingCheckOverflow) { @@ -1043,8 +978,7 @@ TEST_P(InputStreamTest, PrepareForReadingCheckOverflow) { } TEST_P(InputStreamTest, GetInputFramesLost) { - doc::test( - "The number of frames lost on a never started stream should be 0"); + doc::test("The number of frames lost on a never started stream should be 0"); auto ret = stream->getInputFramesLost(); ASSERT_IS_OK(ret); uint32_t framesLost{ret}; @@ -1074,32 +1008,26 @@ TEST_P(OutputStreamTest, getLatency) { TEST_P(OutputStreamTest, setVolume) { doc::test("Try to set the output volume"); - testOptionalUnitaryGain( - [this](float volume) { return stream->setVolume(volume, volume); }, - "setVolume"); + testOptionalUnitaryGain([this](float volume) { return stream->setVolume(volume, volume); }, + "setVolume"); } -static void testPrepareForWriting(IStreamOut* stream, uint32_t frameSize, - uint32_t framesCount) { +static void testPrepareForWriting(IStreamOut* stream, uint32_t frameSize, uint32_t framesCount) { Result res; // Ignore output parameters as the call should fail - ASSERT_OK(stream->prepareForWriting( - frameSize, framesCount, - [&res](auto r, auto&, auto&, auto&, auto&) { res = r; })); + ASSERT_OK(stream->prepareForWriting(frameSize, framesCount, + [&res](auto r, auto&, auto&, auto&, auto&) { res = r; })); EXPECT_RESULT(Result::INVALID_ARGUMENTS, res); } TEST_P(OutputStreamTest, PrepareForWriteWithZeroBuffer) { - doc::test( - "Preparing a stream for writing with a 0 sized buffer should fail"); + doc::test("Preparing a stream for writing with a 0 sized buffer should fail"); testPrepareForWriting(stream.get(), 0, 0); } TEST_P(OutputStreamTest, PrepareForWriteWithHugeBuffer) { - doc::test( - "Preparing a stream for writing with a 2^32 sized buffer should fail"); - testPrepareForWriting(stream.get(), 1, - std::numeric_limits::max()); + doc::test("Preparing a stream for writing with a 2^32 sized buffer should fail"); + testPrepareForWriting(stream.get(), 1, std::numeric_limits::max()); } TEST_P(OutputStreamTest, PrepareForWritingCheckOverflow) { @@ -1125,8 +1053,7 @@ struct Capability { }; TEST_P(OutputStreamTest, SupportsPauseAndResumeAndDrain) { - doc::test( - "Implementation must expose pause, resume and drain capabilities"); + doc::test("Implementation must expose pause, resume and drain capabilities"); Capability(stream.get()); } @@ -1280,13 +1207,10 @@ TEST_P(OutputStreamTest, GetPresentationPositionStop) { struct timespec currentTS; ASSERT_EQ(0, clock_gettime(CLOCK_MONOTONIC, ¤tTS)) << errno; - auto toMicroSec = [](uint64_t sec, auto nsec) { - return sec * 1e+6 + nsec / 1e+3; - }; + auto toMicroSec = [](uint64_t sec, auto nsec) { return sec * 1e+6 + nsec / 1e+3; }; auto currentTime = toMicroSec(currentTS.tv_sec, currentTS.tv_nsec); auto mesureTime = toMicroSec(mesureTS.tvSec, mesureTS.tvNSec); - ASSERT_PRED2([](auto c, auto m) { return c - m < 1e+6; }, currentTime, - mesureTime); + ASSERT_PRED2([](auto c, auto m) { return c - m < 1e+6; }, currentTime, mesureTime); } ////////////////////////////////////////////////////////////////////////////// @@ -1303,15 +1227,13 @@ TEST_F(AudioPrimaryHidlTest, setMode) { "Make sure setMode always succeeds if mode is valid " "and fails otherwise"); // Test Invalid values - for (AudioMode mode : - {AudioMode::INVALID, AudioMode::CURRENT, AudioMode::CNT}) { + for (AudioMode mode : {AudioMode::INVALID, AudioMode::CURRENT, AudioMode::CNT}) { SCOPED_TRACE("mode=" + toString(mode)); ASSERT_RESULT(Result::INVALID_ARGUMENTS, device->setMode(mode)); } // Test valid values - for (AudioMode mode : - {AudioMode::IN_CALL, AudioMode::IN_COMMUNICATION, AudioMode::RINGTONE, - AudioMode::NORMAL /* Make sure to leave the test in normal mode */}) { + for (AudioMode mode : {AudioMode::IN_CALL, AudioMode::IN_COMMUNICATION, AudioMode::RINGTONE, + AudioMode::NORMAL /* Make sure to leave the test in normal mode */}) { SCOPED_TRACE("mode=" + toString(mode)); ASSERT_OK(device->setMode(mode)); } @@ -1334,15 +1256,13 @@ TEST_F(BoolAccessorPrimaryHidlTest, setGetBtScoWidebandEnabled) { using TtyModeAccessorPrimaryHidlTest = AccessorPrimaryHidlTest; TEST_F(TtyModeAccessorPrimaryHidlTest, setGetTtyMode) { doc::test("Query and set the TTY mode state"); - testOptionalAccessors( - "TTY mode", {TtyMode::OFF, TtyMode::HCO, TtyMode::VCO, TtyMode::FULL}, - &IPrimaryDevice::setTtyMode, &IPrimaryDevice::getTtyMode); + testOptionalAccessors("TTY mode", {TtyMode::OFF, TtyMode::HCO, TtyMode::VCO, TtyMode::FULL}, + &IPrimaryDevice::setTtyMode, &IPrimaryDevice::getTtyMode); } TEST_F(BoolAccessorPrimaryHidlTest, setGetHac) { doc::test("Query and set the HAC state"); - testOptionalAccessors("HAC", {true, false, true}, - &IPrimaryDevice::setHacEnabled, + testOptionalAccessors("HAC", {true, false, true}, &IPrimaryDevice::setHacEnabled, &IPrimaryDevice::getHacEnabled); } @@ -1351,9 +1271,10 @@ TEST_F(BoolAccessorPrimaryHidlTest, setGetHac) { ////////////////////////////////////////////////////////////////////////////// int main(int argc, char** argv) { - environment = new Environment; + environment = new AudioHidlTestEnvironment; ::testing::AddGlobalTestEnvironment(environment); ::testing::InitGoogleTest(&argc, argv); + environment->init(&argc, argv); int status = RUN_ALL_TESTS(); return status; } diff --git a/audio/2.0/vts/functional/ValidateAudioConfiguration.cpp b/audio/core/2.0/vts/functional/ValidateAudioConfiguration.cpp similarity index 56% rename from audio/2.0/vts/functional/ValidateAudioConfiguration.cpp rename to audio/core/2.0/vts/functional/ValidateAudioConfiguration.cpp index ec3259a1e372361e0820045c8e18f7fe645b08ab..bef0e8276c609aba9e89a13d5cd87ceab6a52d3c 100644 --- a/audio/2.0/vts/functional/ValidateAudioConfiguration.cpp +++ b/audio/core/2.0/vts/functional/ValidateAudioConfiguration.cpp @@ -14,21 +14,17 @@ * limitations under the License. */ -#include #include +#include #include "utility/ValidateXml.h" TEST(CheckConfig, audioPolicyConfigurationValidation) { - const char* configName = "audio_policy_configuration.xml"; - const char* possibleConfigLocations[] = {"/odm/etc", "/vendor/etc", "/system/etc"}; - const char* configSchemaPath = "/data/local/tmp/audio_policy_configuration.xsd"; + RecordProperty("description", + "Verify that the audio policy configuration file " + "is valid according to the schema"); - for (std::string folder : possibleConfigLocations) { - const auto configPath = folder + '/' + configName; - if (access(configPath.c_str(), R_OK) == 0) { - ASSERT_VALID_XML(configPath.c_str(), configSchemaPath); - return; // The framework does not read past the first config file found - } - } + std::vector locations = {"/odm/etc", "/vendor/etc", "/system/etc"}; + EXPECT_ONE_VALID_XML_MULTIPLE_LOCATIONS("audio_policy_configuration.xml", locations, + "/data/local/tmp/audio_policy_configuration.xsd"); } diff --git a/audio/core/4.0/default/Android.bp b/audio/core/4.0/default/Android.bp new file mode 100644 index 0000000000000000000000000000000000000000..8e415459bebbb09011af01a39e63198a97ce83a7 --- /dev/null +++ b/audio/core/4.0/default/Android.bp @@ -0,0 +1,53 @@ +cc_library_shared { + name: "android.hardware.audio@4.0-impl", + relative_install_path: "hw", + proprietary: true, + vendor: true, + srcs: [ + "Conversions.cpp", + "Device.cpp", + "DevicesFactory.cpp", + "ParametersUtil.cpp", + "PrimaryDevice.cpp", + "Stream.cpp", + "StreamIn.cpp", + "StreamOut.cpp", + ], + + cflags: [ + "-DAUDIO_HAL_VERSION_4_0", + ], + + defaults: ["hidl_defaults"], + + export_include_dirs: ["include"], + + shared_libs: [ + "libbase", + "libcutils", + "libfmq", + "libhardware", + "libhidlbase", + "libhidltransport", + "liblog", + "libutils", + "android.hardware.audio@4.0", + "android.hardware.audio.common@4.0", + "android.hardware.audio.common@4.0-util", + "android.hardware.audio.common-util", + ], + + header_libs: [ + "android.hardware.audio.common.util@all-versions", + "android.hardware.audio.core@all-versions-impl", + "libaudioclient_headers", + "libaudio_system_headers", + "libhardware_headers", + "libmedia_headers", + ], + + whole_static_libs: [ + "libmedia_helper", + ], + +} diff --git a/audio/core/4.0/default/Conversions.cpp b/audio/core/4.0/default/Conversions.cpp new file mode 100644 index 0000000000000000000000000000000000000000..4f1874412c9cda9b717df8f24b93eeeaa2edb698 --- /dev/null +++ b/audio/core/4.0/default/Conversions.cpp @@ -0,0 +1,21 @@ +/* + * Copyright (C) 2017 The Android Open Source Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#include "core/4.0/default/Conversions.h" + +#define AUDIO_HAL_VERSION V4_0 +#include +#undef AUDIO_HAL_VERSION diff --git a/audio/core/4.0/default/Device.cpp b/audio/core/4.0/default/Device.cpp new file mode 100644 index 0000000000000000000000000000000000000000..b33434ecfb93cf5152c265d92abdf05dc34f3c53 --- /dev/null +++ b/audio/core/4.0/default/Device.cpp @@ -0,0 +1,28 @@ +/* + * Copyright (C) 2017 The Android Open Source Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#define LOG_TAG "DeviceHAL" + +#include "core/4.0/default/Device.h" +#include +#include "core/4.0/default/Conversions.h" +#include "core/4.0/default/StreamIn.h" +#include "core/4.0/default/StreamOut.h" +#include "core/4.0/default/Util.h" + +#define AUDIO_HAL_VERSION V4_0 +#include +#undef AUDIO_HAL_VERSION diff --git a/audio/core/4.0/default/DevicesFactory.cpp b/audio/core/4.0/default/DevicesFactory.cpp new file mode 100644 index 0000000000000000000000000000000000000000..cb8a3c3e9756e69830458102b39c8ac5cf6278d8 --- /dev/null +++ b/audio/core/4.0/default/DevicesFactory.cpp @@ -0,0 +1,25 @@ +/* + * Copyright (C) 2017 The Android Open Source Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#define LOG_TAG "DevicesFactoryHAL" + +#include "core/4.0/default/DevicesFactory.h" +#include "core/4.0/default/Device.h" +#include "core/4.0/default/PrimaryDevice.h" + +#define AUDIO_HAL_VERSION V4_0 +#include +#undef AUDIO_HAL_VERSION diff --git a/audio/core/4.0/default/OWNERS b/audio/core/4.0/default/OWNERS new file mode 100644 index 0000000000000000000000000000000000000000..6fdc97ca298fbbda9cb676f5acb02d7495debcb4 --- /dev/null +++ b/audio/core/4.0/default/OWNERS @@ -0,0 +1,3 @@ +elaurent@google.com +krocard@google.com +mnaganov@google.com diff --git a/audio/core/4.0/default/ParametersUtil.cpp b/audio/core/4.0/default/ParametersUtil.cpp new file mode 100644 index 0000000000000000000000000000000000000000..2cc9fb56a3d50e09963058b5f0a7ed8bc07b2d96 --- /dev/null +++ b/audio/core/4.0/default/ParametersUtil.cpp @@ -0,0 +1,22 @@ +/* + * Copyright (C) 2017 The Android Open Source Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#include "core/4.0/default/ParametersUtil.h" +#include "core/4.0/default/Util.h" + +#define AUDIO_HAL_VERSION V4_0 +#include +#undef AUDIO_HAL_VERSION diff --git a/audio/core/4.0/default/PrimaryDevice.cpp b/audio/core/4.0/default/PrimaryDevice.cpp new file mode 100644 index 0000000000000000000000000000000000000000..e3e49768d56f7d21c350b4e17477631444ccd8b7 --- /dev/null +++ b/audio/core/4.0/default/PrimaryDevice.cpp @@ -0,0 +1,24 @@ +/* + * Copyright (C) 2017 The Android Open Source Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#define LOG_TAG "PrimaryDeviceHAL" + +#include "core/4.0/default/PrimaryDevice.h" +#include "core/4.0/default/Util.h" + +#define AUDIO_HAL_VERSION V4_0 +#include +#undef AUDIO_HAL_VERSION diff --git a/audio/core/4.0/default/Stream.cpp b/audio/core/4.0/default/Stream.cpp new file mode 100644 index 0000000000000000000000000000000000000000..b8c71de7b7fdd74340ca9085207c41d9ed6850b2 --- /dev/null +++ b/audio/core/4.0/default/Stream.cpp @@ -0,0 +1,26 @@ +/* + * Copyright (C) 2017 The Android Open Source Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#define LOG_TAG "StreamHAL" + +#include "core/4.0/default/Stream.h" +#include "common/all-versions/default/EffectMap.h" +#include "core/4.0/default/Conversions.h" +#include "core/4.0/default/Util.h" + +#define AUDIO_HAL_VERSION V4_0 +#include +#undef AUDIO_HAL_VERSION diff --git a/audio/core/4.0/default/StreamIn.cpp b/audio/core/4.0/default/StreamIn.cpp new file mode 100644 index 0000000000000000000000000000000000000000..718bd25a632763659a9e49bdd7d704a3e53b3a27 --- /dev/null +++ b/audio/core/4.0/default/StreamIn.cpp @@ -0,0 +1,24 @@ +/* + * Copyright (C) 2017 The Android Open Source Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#define LOG_TAG "StreamInHAL" + +#include "core/4.0/default/StreamIn.h" +#include "core/4.0/default/Util.h" + +#define AUDIO_HAL_VERSION V4_0 +#include +#undef AUDIO_HAL_VERSION diff --git a/audio/core/4.0/default/StreamOut.cpp b/audio/core/4.0/default/StreamOut.cpp new file mode 100644 index 0000000000000000000000000000000000000000..db88e401d2d8ac87c3ac4bff794e24f7ed34d471 --- /dev/null +++ b/audio/core/4.0/default/StreamOut.cpp @@ -0,0 +1,24 @@ +/* + * Copyright (C) 2017 The Android Open Source Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#define LOG_TAG "StreamOutHAL" + +#include "core/4.0/default/StreamOut.h" +#include "core/4.0/default/Util.h" + +#define AUDIO_HAL_VERSION V4_0 +#include +#undef AUDIO_HAL_VERSION diff --git a/audio/core/4.0/default/include/core/4.0/default/Conversions.h b/audio/core/4.0/default/include/core/4.0/default/Conversions.h new file mode 100644 index 0000000000000000000000000000000000000000..32c2f887ee29e41e3b5d344bf80e1d868cadc291 --- /dev/null +++ b/audio/core/4.0/default/include/core/4.0/default/Conversions.h @@ -0,0 +1,26 @@ +/* + * Copyright (C) 2017 The Android Open Source Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#ifndef ANDROID_HARDWARE_AUDIO_V4_0_CONVERSIONS_H_ +#define ANDROID_HARDWARE_AUDIO_V4_0_CONVERSIONS_H_ + +#include + +#define AUDIO_HAL_VERSION V4_0 +#include +#undef AUDIO_HAL_VERSION + +#endif // ANDROID_HARDWARE_AUDIO_V4_0_CONVERSIONS_H_ diff --git a/audio/core/4.0/default/include/core/4.0/default/Device.h b/audio/core/4.0/default/include/core/4.0/default/Device.h new file mode 100644 index 0000000000000000000000000000000000000000..770d6067201e88e6a1d704248c31ea4dc07ff293 --- /dev/null +++ b/audio/core/4.0/default/include/core/4.0/default/Device.h @@ -0,0 +1,28 @@ +/* + * Copyright (C) 2017 The Android Open Source Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#ifndef ANDROID_HARDWARE_AUDIO_V4_0_DEVICE_H +#define ANDROID_HARDWARE_AUDIO_V4_0_DEVICE_H + +#include + +#include "ParametersUtil.h" + +#define AUDIO_HAL_VERSION V4_0 +#include +#undef AUDIO_HAL_VERSION + +#endif // ANDROID_HARDWARE_AUDIO_V4_0_DEVICE_H diff --git a/audio/core/4.0/default/include/core/4.0/default/DevicesFactory.h b/audio/core/4.0/default/include/core/4.0/default/DevicesFactory.h new file mode 100644 index 0000000000000000000000000000000000000000..200e59d96f24cf340b4213fdb2c174390812e523 --- /dev/null +++ b/audio/core/4.0/default/include/core/4.0/default/DevicesFactory.h @@ -0,0 +1,26 @@ +/* + * Copyright (C) 2017 The Android Open Source Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#ifndef ANDROID_HARDWARE_AUDIO_V4_0_DEVICESFACTORY_H +#define ANDROID_HARDWARE_AUDIO_V4_0_DEVICESFACTORY_H + +#include + +#define AUDIO_HAL_VERSION V4_0 +#include +#undef AUDIO_HAL_VERSION + +#endif // ANDROID_HARDWARE_AUDIO_V4_0_DEVICESFACTORY_H diff --git a/audio/core/4.0/default/include/core/4.0/default/ParametersUtil.h b/audio/core/4.0/default/include/core/4.0/default/ParametersUtil.h new file mode 100644 index 0000000000000000000000000000000000000000..fa31ee9dd3e2e6914d77f6ad8b85a38c13c09d00 --- /dev/null +++ b/audio/core/4.0/default/include/core/4.0/default/ParametersUtil.h @@ -0,0 +1,26 @@ +/* + * Copyright (C) 2017 The Android Open Source Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#ifndef ANDROID_HARDWARE_AUDIO_V4_0_PARAMETERS_UTIL_H_ +#define ANDROID_HARDWARE_AUDIO_V4_0_PARAMETERS_UTIL_H_ + +#include + +#define AUDIO_HAL_VERSION V4_0 +#include +#undef AUDIO_HAL_VERSION + +#endif // ANDROID_HARDWARE_AUDIO_V4_0_PARAMETERS_UTIL_H_ diff --git a/audio/core/4.0/default/include/core/4.0/default/PrimaryDevice.h b/audio/core/4.0/default/include/core/4.0/default/PrimaryDevice.h new file mode 100644 index 0000000000000000000000000000000000000000..e7f846b6221c42eb63d9202da55f435515e04c35 --- /dev/null +++ b/audio/core/4.0/default/include/core/4.0/default/PrimaryDevice.h @@ -0,0 +1,28 @@ +/* + * Copyright (C) 2017 The Android Open Source Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#ifndef ANDROID_HARDWARE_AUDIO_V4_0_PRIMARYDEVICE_H +#define ANDROID_HARDWARE_AUDIO_V4_0_PRIMARYDEVICE_H + +#include + +#include "Device.h" + +#define AUDIO_HAL_VERSION V4_0 +#include +#undef AUDIO_HAL_VERSION + +#endif // ANDROID_HARDWARE_AUDIO_V4_0_PRIMARYDEVICE_H diff --git a/audio/core/4.0/default/include/core/4.0/default/Stream.h b/audio/core/4.0/default/include/core/4.0/default/Stream.h new file mode 100644 index 0000000000000000000000000000000000000000..afad80fe53aa9fafcc90771594cdc5de36d8d328 --- /dev/null +++ b/audio/core/4.0/default/include/core/4.0/default/Stream.h @@ -0,0 +1,28 @@ +/* + * Copyright (C) 2017 The Android Open Source Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#ifndef ANDROID_HARDWARE_AUDIO_V4_0_STREAM_H +#define ANDROID_HARDWARE_AUDIO_V4_0_STREAM_H + +#include + +#include "ParametersUtil.h" + +#define AUDIO_HAL_VERSION V4_0 +#include +#undef AUDIO_HAL_VERSION + +#endif // ANDROID_HARDWARE_AUDIO_V4_0_STREAM_H diff --git a/audio/core/4.0/default/include/core/4.0/default/StreamIn.h b/audio/core/4.0/default/include/core/4.0/default/StreamIn.h new file mode 100644 index 0000000000000000000000000000000000000000..151f03fc2b7babb8fcac4bd1ed3daa2bd5181ec5 --- /dev/null +++ b/audio/core/4.0/default/include/core/4.0/default/StreamIn.h @@ -0,0 +1,29 @@ +/* + * Copyright (C) 2017 The Android Open Source Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#ifndef ANDROID_HARDWARE_AUDIO_V4_0_STREAMIN_H +#define ANDROID_HARDWARE_AUDIO_V4_0_STREAMIN_H + +#include + +#include "Device.h" +#include "Stream.h" + +#define AUDIO_HAL_VERSION V4_0 +#include +#undef AUDIO_HAL_VERSION + +#endif // ANDROID_HARDWARE_AUDIO_V4_0_STREAMIN_H diff --git a/audio/core/4.0/default/include/core/4.0/default/StreamOut.h b/audio/core/4.0/default/include/core/4.0/default/StreamOut.h new file mode 100644 index 0000000000000000000000000000000000000000..dbf3bd16ce907e4fbdc4eb58d22f5cc3364fc587 --- /dev/null +++ b/audio/core/4.0/default/include/core/4.0/default/StreamOut.h @@ -0,0 +1,29 @@ +/* + * Copyright (C) 2017 The Android Open Source Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#ifndef ANDROID_HARDWARE_AUDIO_V4_0_STREAMOUT_H +#define ANDROID_HARDWARE_AUDIO_V4_0_STREAMOUT_H + +#include + +#include "Device.h" +#include "Stream.h" + +#define AUDIO_HAL_VERSION V4_0 +#include +#undef AUDIO_HAL_VERSION + +#endif // ANDROID_HARDWARE_AUDIO_V4_0_STREAMOUT_H diff --git a/audio/core/4.0/default/include/core/4.0/default/Util.h b/audio/core/4.0/default/include/core/4.0/default/Util.h new file mode 100644 index 0000000000000000000000000000000000000000..ce31e6f7f25959ea81a60b610ca80b96097b5a00 --- /dev/null +++ b/audio/core/4.0/default/include/core/4.0/default/Util.h @@ -0,0 +1,26 @@ +/* + * Copyright (C) 2018 The Android Open Source Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#ifndef ANDROID_HARDWARE_AUDIO_V4_0_UTIL_H +#define ANDROID_HARDWARE_AUDIO_V4_0_UTIL_H + +#include + +#define AUDIO_HAL_VERSION V4_0 +#include +#undef AUDIO_HAL_VERSION + +#endif // ANDROID_HARDWARE_AUDIO_V4_0_UTIL_H diff --git a/audio/common/test/OWNERS b/audio/core/4.0/vts/OWNERS similarity index 100% rename from audio/common/test/OWNERS rename to audio/core/4.0/vts/OWNERS diff --git a/audio/core/4.0/vts/functional/Android.bp b/audio/core/4.0/vts/functional/Android.bp new file mode 100644 index 0000000000000000000000000000000000000000..22c5493dd8b79b680beaf41bb4fac4ddf64fc636 --- /dev/null +++ b/audio/core/4.0/vts/functional/Android.bp @@ -0,0 +1,35 @@ +// +// Copyright (C) 2017 The Android Open Source Project +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +// + +cc_test { + name: "VtsHalAudioV4_0TargetTest", + defaults: ["VtsHalTargetTestDefaults"], + srcs: [ + "AudioPrimaryHidlHalTest.cpp", + "ValidateAudioConfiguration.cpp" + ], + static_libs: [ + "android.hardware.audio.common.test.utility", + "android.hardware.audio@4.0", + "android.hardware.audio.common@4.0", + "libicuuc", + "libicuuc_stubdata", + "libxml2", + ], + header_libs: [ + "android.hardware.audio.common.util@all-versions", + ], +} diff --git a/audio/core/4.0/vts/functional/AudioPrimaryHidlHalTest.cpp b/audio/core/4.0/vts/functional/AudioPrimaryHidlHalTest.cpp new file mode 100644 index 0000000000000000000000000000000000000000..dab9f7c44e4d014a4fa185ee29822d78c887a35b --- /dev/null +++ b/audio/core/4.0/vts/functional/AudioPrimaryHidlHalTest.cpp @@ -0,0 +1,1467 @@ +/* + * Copyright (C) 2017 The Android Open Source Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#define LOG_TAG "VtsHalAudioV4_0TargetTest" + +#include +#include +#include +#include +#include +#include +#include +#include +#include + +#include +#include + +#include + +#include + +#include +#include +#include +#include +#include + +#include + +#include "utility/AssertOk.h" +#include "utility/Documentation.h" +#include "utility/EnvironmentTearDown.h" +#define AUDIO_HAL_VERSION V4_0 +#include "utility/PrettyPrintAudioTypes.h" +#include "utility/ReturnIn.h" + +using std::initializer_list; +using std::string; +using std::to_string; +using std::vector; +using std::list; + +using ::android::sp; +using ::android::hardware::Return; +using ::android::hardware::hidl_bitfield; +using ::android::hardware::hidl_enum_range; +using ::android::hardware::hidl_handle; +using ::android::hardware::hidl_string; +using ::android::hardware::hidl_vec; +using ::android::hardware::MQDescriptorSync; +using ::android::hardware::audio::V4_0::AudioDrain; +using ::android::hardware::audio::V4_0::DeviceAddress; +using ::android::hardware::audio::V4_0::IDevice; +using ::android::hardware::audio::V4_0::IPrimaryDevice; +using Rotation = ::android::hardware::audio::V4_0::IPrimaryDevice::Rotation; +using TtyMode = ::android::hardware::audio::V4_0::IPrimaryDevice::TtyMode; +using ::android::hardware::audio::V4_0::IDevicesFactory; +using ::android::hardware::audio::V4_0::IStream; +using ::android::hardware::audio::V4_0::IStreamIn; +using ::android::hardware::audio::V4_0::TimeSpec; +using ReadParameters = ::android::hardware::audio::V4_0::IStreamIn::ReadParameters; +using ReadStatus = ::android::hardware::audio::V4_0::IStreamIn::ReadStatus; +using ::android::hardware::audio::V4_0::IStreamOut; +using ::android::hardware::audio::V4_0::IStreamOutCallback; +using ::android::hardware::audio::V4_0::MicrophoneInfo; +using ::android::hardware::audio::V4_0::MmapBufferInfo; +using ::android::hardware::audio::V4_0::MmapPosition; +using ::android::hardware::audio::V4_0::ParameterValue; +using ::android::hardware::audio::V4_0::Result; +using ::android::hardware::audio::V4_0::SourceMetadata; +using ::android::hardware::audio::V4_0::SinkMetadata; +using ::android::hardware::audio::common::V4_0::AudioChannelMask; +using ::android::hardware::audio::common::V4_0::AudioConfig; +using ::android::hardware::audio::common::V4_0::AudioContentType; +using ::android::hardware::audio::common::V4_0::AudioDevice; +using ::android::hardware::audio::common::V4_0::AudioFormat; +using ::android::hardware::audio::common::V4_0::AudioHandleConsts; +using ::android::hardware::audio::common::V4_0::AudioHwSync; +using ::android::hardware::audio::common::V4_0::AudioInputFlag; +using ::android::hardware::audio::common::V4_0::AudioIoHandle; +using ::android::hardware::audio::common::V4_0::AudioMode; +using ::android::hardware::audio::common::V4_0::AudioOffloadInfo; +using ::android::hardware::audio::common::V4_0::AudioOutputFlag; +using ::android::hardware::audio::common::V4_0::AudioSource; +using ::android::hardware::audio::common::V4_0::AudioUsage; +using ::android::hardware::audio::common::V4_0::ThreadInfo; +using ::android::hardware::audio::common::utils::mkBitfield; + +using namespace ::android::hardware::audio::common::test::utility; + +// Typical accepted results from interface methods +static auto okOrNotSupported = {Result::OK, Result::NOT_SUPPORTED}; +static auto okOrNotSupportedOrInvalidArgs = {Result::OK, Result::NOT_SUPPORTED, + Result::INVALID_ARGUMENTS}; +static auto invalidArgsOrNotSupported = {Result::INVALID_ARGUMENTS, Result::NOT_SUPPORTED}; + +class AudioHidlTestEnvironment : public ::Environment { + public: + virtual void registerTestServices() override { registerTestService(); } +}; + +// Instance to register global tearDown +static AudioHidlTestEnvironment* environment; + +class HidlTest : public ::testing::VtsHalHidlTargetTestBase { + protected: + // Convenient member to store results + Result res; +}; + +////////////////////////////////////////////////////////////////////////////// +////////////////////// getService audio_devices_factory ////////////////////// +////////////////////////////////////////////////////////////////////////////// + +// Test all audio devices +class AudioHidlTest : public HidlTest { + public: + void SetUp() override { + ASSERT_NO_FATAL_FAILURE(HidlTest::SetUp()); // setup base + + if (devicesFactory == nullptr) { + environment->registerTearDown([] { devicesFactory.clear(); }); + devicesFactory = ::testing::VtsHalHidlTargetTestBase::getService( + environment->getServiceName("default")); + } + ASSERT_TRUE(devicesFactory != nullptr); + } + + protected: + // Cache the devicesFactory retrieval to speed up each test by ~0.5s + static sp devicesFactory; +}; +sp AudioHidlTest::devicesFactory; + +TEST_F(AudioHidlTest, GetAudioDevicesFactoryService) { + doc::test("Test the getService (called in SetUp)"); +} + +TEST_F(AudioHidlTest, OpenDeviceInvalidParameter) { + doc::test("Test passing an invalid parameter to openDevice"); + Result result; + sp device; + ASSERT_OK(devicesFactory->openDevice("Non existing device", returnIn(result, device))); + ASSERT_EQ(Result::INVALID_ARGUMENTS, result); + ASSERT_TRUE(device == nullptr); +} + +TEST_F(AudioHidlTest, OpenPrimaryDeviceUsingGetDevice) { + doc::test("Calling openDevice(\"primary\") should return the primary device."); + Result result; + sp baseDevice; + ASSERT_OK(devicesFactory->openDevice("primary", returnIn(result, baseDevice))); + ASSERT_OK(result); + ASSERT_TRUE(baseDevice != nullptr); + + Return> primaryDevice = IPrimaryDevice::castFrom(baseDevice); + ASSERT_TRUE(primaryDevice.isOk()); + ASSERT_TRUE(sp(primaryDevice) != nullptr); +} + +////////////////////////////////////////////////////////////////////////////// +/////////////////////////////// openDevice primary /////////////////////////// +////////////////////////////////////////////////////////////////////////////// + +// Test the primary device +class AudioPrimaryHidlTest : public AudioHidlTest { + public: + /** Primary HAL test are NOT thread safe. */ + void SetUp() override { + ASSERT_NO_FATAL_FAILURE(AudioHidlTest::SetUp()); // setup base + + if (device == nullptr) { + Result result; + ASSERT_OK(devicesFactory->openPrimaryDevice(returnIn(result, device))); + ASSERT_OK(result); + ASSERT_TRUE(device != nullptr); + + environment->registerTearDown([] { device.clear(); }); + } + } + + protected: + // Cache the device opening to speed up each test by ~0.5s + static sp device; +}; +sp AudioPrimaryHidlTest::device; + +TEST_F(AudioPrimaryHidlTest, OpenPrimaryDevice) { + doc::test("Test the openDevice (called in SetUp)"); +} + +TEST_F(AudioPrimaryHidlTest, Init) { + doc::test("Test that the audio primary hal initialized correctly"); + ASSERT_OK(device->initCheck()); +} + +////////////////////////////////////////////////////////////////////////////// +///////////////////// {set,get}{Master,Mic}{Mute,Volume} ///////////////////// +////////////////////////////////////////////////////////////////////////////// + +template +class AccessorPrimaryHidlTest : public AudioPrimaryHidlTest { + protected: + enum Optionality { REQUIRED, OPTIONAL }; + struct Initial { // Initial property value + Initial(Property value, Optionality check = REQUIRED) : value(value), check(check) {} + Property value; + Optionality check; // If this initial value should be checked + }; + /** Test a property getter and setter. + * The getter and/or the setter may return NOT_SUPPORTED if optionality == OPTIONAL. + */ + template + void testAccessors(const string& propertyName, const Initial expectedInitial, + list valuesToTest, Setter setter, Getter getter, + const vector& invalidValues = {}) { + const auto expectedResults = {Result::OK, + optionality == OPTIONAL ? Result::NOT_SUPPORTED : Result::OK}; + + Property initialValue = expectedInitial.value; + ASSERT_OK((device.get()->*getter)(returnIn(res, initialValue))); + ASSERT_RESULT(expectedResults, res); + if (res == Result::OK && expectedInitial.check == REQUIRED) { + EXPECT_EQ(expectedInitial.value, initialValue); + } + + valuesToTest.push_front(expectedInitial.value); + valuesToTest.push_back(initialValue); + for (Property setValue : valuesToTest) { + SCOPED_TRACE("Test " + propertyName + " getter and setter for " + + testing::PrintToString(setValue)); + auto ret = (device.get()->*setter)(setValue); + ASSERT_RESULT(expectedResults, ret); + if (ret == Result::NOT_SUPPORTED) { + doc::partialTest(propertyName + " setter is not supported"); + break; + } + Property getValue; + // Make sure the getter returns the same value just set + ASSERT_OK((device.get()->*getter)(returnIn(res, getValue))); + ASSERT_RESULT(expectedResults, res); + if (res == Result::NOT_SUPPORTED) { + doc::partialTest(propertyName + " getter is not supported"); + continue; + } + EXPECT_EQ(setValue, getValue); + } + + for (Property invalidValue : invalidValues) { + SCOPED_TRACE("Try to set " + propertyName + " with the invalid value " + + testing::PrintToString(invalidValue)); + EXPECT_RESULT(invalidArgsOrNotSupported, (device.get()->*setter)(invalidValue)); + } + + // Restore initial value + EXPECT_RESULT(expectedResults, (device.get()->*setter)(initialValue)); + } +}; + +using BoolAccessorPrimaryHidlTest = AccessorPrimaryHidlTest; + +TEST_F(BoolAccessorPrimaryHidlTest, MicMuteTest) { + doc::test("Check that the mic can be muted and unmuted"); + testAccessors("mic mute", Initial{false}, {true}, &IDevice::setMicMute, &IDevice::getMicMute); + // TODO: check that the mic is really muted (all sample are 0) +} + +TEST_F(BoolAccessorPrimaryHidlTest, MasterMuteTest) { + doc::test("If master mute is supported, try to mute and unmute the master output"); + testAccessors("master mute", Initial{false}, {true}, &IDevice::setMasterMute, + &IDevice::getMasterMute); + // TODO: check that the master volume is really muted +} + +using FloatAccessorPrimaryHidlTest = AccessorPrimaryHidlTest; +TEST_F(FloatAccessorPrimaryHidlTest, MasterVolumeTest) { + doc::test("Test the master volume if supported"); + testAccessors( + "master volume", Initial{1}, {0, 0.5}, &IDevice::setMasterVolume, &IDevice::getMasterVolume, + {-0.1, 1.1, NAN, INFINITY, -INFINITY, 1 + std::numeric_limits::epsilon()}); + // TODO: check that the master volume is really changed +} + +////////////////////////////////////////////////////////////////////////////// +//////////////////////////////// AudioPatches //////////////////////////////// +////////////////////////////////////////////////////////////////////////////// + +class AudioPatchPrimaryHidlTest : public AudioPrimaryHidlTest { + protected: + bool areAudioPatchesSupported() { + auto result = device->supportsAudioPatches(); + EXPECT_IS_OK(result); + return result; + } +}; + +TEST_F(AudioPatchPrimaryHidlTest, AudioPatches) { + doc::test("Test if audio patches are supported"); + if (!areAudioPatchesSupported()) { + doc::partialTest("Audio patches are not supported"); + return; + } + // TODO: test audio patches +} + +////////////////////////////////////////////////////////////////////////////// +//////////////// Required and recommended audio format support /////////////// +// From: +// https://source.android.com/compatibility/android-cdd.html#5_4_audio_recording +// From: +// https://source.android.com/compatibility/android-cdd.html#5_5_audio_playback +/////////// TODO: move to the beginning of the file for easier update //////// +////////////////////////////////////////////////////////////////////////////// + +class AudioConfigPrimaryTest : public AudioPatchPrimaryHidlTest { + public: + // Cache result ? + static const vector getRequiredSupportPlaybackAudioConfig() { + return combineAudioConfig({AudioChannelMask::OUT_STEREO, AudioChannelMask::OUT_MONO}, + {8000, 11025, 16000, 22050, 32000, 44100}, + {AudioFormat::PCM_16_BIT}); + } + + static const vector getRecommendedSupportPlaybackAudioConfig() { + return combineAudioConfig({AudioChannelMask::OUT_STEREO, AudioChannelMask::OUT_MONO}, + {24000, 48000}, {AudioFormat::PCM_16_BIT}); + } + + static const vector getSupportedPlaybackAudioConfig() { + // TODO: retrieve audio config supported by the platform + // as declared in the policy configuration + return {}; + } + + static const vector getRequiredSupportCaptureAudioConfig() { + return combineAudioConfig({AudioChannelMask::IN_MONO}, {8000, 11025, 16000, 44100}, + {AudioFormat::PCM_16_BIT}); + } + static const vector getRecommendedSupportCaptureAudioConfig() { + return combineAudioConfig({AudioChannelMask::IN_STEREO}, {22050, 48000}, + {AudioFormat::PCM_16_BIT}); + } + static const vector getSupportedCaptureAudioConfig() { + // TODO: retrieve audio config supported by the platform + // as declared in the policy configuration + return {}; + } + + private: + static const vector combineAudioConfig(vector channelMasks, + vector sampleRates, + vector formats) { + vector configs; + for (auto channelMask : channelMasks) { + for (auto sampleRate : sampleRates) { + for (auto format : formats) { + AudioConfig config{}; + // leave offloadInfo to 0 + config.channelMask = mkBitfield(channelMask); + config.sampleRateHz = sampleRate; + config.format = format; + // FIXME: leave frameCount to 0 ? + configs.push_back(config); + } + } + } + return configs; + } +}; + +/** Generate a test name based on an audio config. + * + * As the only parameter changing are channel mask and sample rate, + * only print those ones in the test name. + */ +static string generateTestName(const testing::TestParamInfo& info) { + const AudioConfig& config = info.param; + return to_string(info.index) + "__" + to_string(config.sampleRateHz) + "_" + + // "MONO" is more clear than "FRONT_LEFT" + ((config.channelMask == mkBitfield(AudioChannelMask::OUT_MONO) || + config.channelMask == mkBitfield(AudioChannelMask::IN_MONO)) + ? "MONO" + : ::testing::PrintToString(config.channelMask)); +} + +////////////////////////////////////////////////////////////////////////////// +///////////////////////////// getInputBufferSize ///////////////////////////// +////////////////////////////////////////////////////////////////////////////// + +// FIXME: execute input test only if platform declares +// android.hardware.microphone +// how to get this value ? is it a property ??? + +class AudioCaptureConfigPrimaryTest : public AudioConfigPrimaryTest, + public ::testing::WithParamInterface { + protected: + void inputBufferSizeTest(const AudioConfig& audioConfig, bool supportRequired) { + uint64_t bufferSize; + ASSERT_OK(device->getInputBufferSize(audioConfig, returnIn(res, bufferSize))); + + switch (res) { + case Result::INVALID_ARGUMENTS: + EXPECT_FALSE(supportRequired); + break; + case Result::OK: + // Check that the buffer is of a sane size + // For now only that it is > 0 + EXPECT_GT(bufferSize, uint64_t(0)); + break; + default: + FAIL() << "Invalid return status: " << ::testing::PrintToString(res); + } + } +}; + +// Test that the required capture config and those declared in the policy are +// indeed supported +class RequiredInputBufferSizeTest : public AudioCaptureConfigPrimaryTest {}; +TEST_P(RequiredInputBufferSizeTest, RequiredInputBufferSizeTest) { + doc::test( + "Input buffer size must be retrievable for a format with required " + "support."); + inputBufferSizeTest(GetParam(), true); +} +INSTANTIATE_TEST_CASE_P( + RequiredInputBufferSize, RequiredInputBufferSizeTest, + ::testing::ValuesIn(AudioConfigPrimaryTest::getRequiredSupportCaptureAudioConfig()), + &generateTestName); +INSTANTIATE_TEST_CASE_P( + SupportedInputBufferSize, RequiredInputBufferSizeTest, + ::testing::ValuesIn(AudioConfigPrimaryTest::getSupportedCaptureAudioConfig()), + &generateTestName); + +// Test that the recommended capture config are supported or lead to a +// INVALID_ARGUMENTS return +class OptionalInputBufferSizeTest : public AudioCaptureConfigPrimaryTest {}; +TEST_P(OptionalInputBufferSizeTest, OptionalInputBufferSizeTest) { + doc::test( + "Input buffer size should be retrievable for a format with recommended " + "support."); + inputBufferSizeTest(GetParam(), false); +} +INSTANTIATE_TEST_CASE_P( + RecommendedCaptureAudioConfigSupport, OptionalInputBufferSizeTest, + ::testing::ValuesIn(AudioConfigPrimaryTest::getRecommendedSupportCaptureAudioConfig()), + &generateTestName); + +////////////////////////////////////////////////////////////////////////////// +/////////////////////////////// setScreenState /////////////////////////////// +////////////////////////////////////////////////////////////////////////////// + +TEST_F(AudioPrimaryHidlTest, setScreenState) { + doc::test("Check that the hal can receive the screen state"); + for (bool turnedOn : {false, true, true, false, false}) { + ASSERT_RESULT(okOrNotSupported, device->setScreenState(turnedOn)); + } +} + +////////////////////////////////////////////////////////////////////////////// +//////////////////////////// {get,set}Parameters ///////////////////////////// +////////////////////////////////////////////////////////////////////////////// + +TEST_F(AudioPrimaryHidlTest, getParameters) { + doc::test("Check that the hal can set and get parameters"); + hidl_vec context; + hidl_vec keys; + hidl_vec values; + ASSERT_OK(device->getParameters(context, keys, returnIn(res, values))); + ASSERT_OK(device->setParameters(context, values)); + values.resize(0); + ASSERT_OK(device->setParameters(context, values)); +} + +////////////////////////////////////////////////////////////////////////////// +/////////////////////////////// getMicrophones /////////////////////////////// +////////////////////////////////////////////////////////////////////////////// + +TEST_F(AudioPrimaryHidlTest, GetMicrophonesTest) { + doc::test("Make sure getMicrophones always succeeds"); + hidl_vec microphones; + ASSERT_OK(device->getMicrophones(returnIn(res, microphones))); + ASSERT_OK(res); +} + +////////////////////////////////////////////////////////////////////////////// +//////////////////////////////// debugDebug ////////////////////////////////// +////////////////////////////////////////////////////////////////////////////// + +template +static void testDebugDump(DebugDump debugDump) { + // File descriptors to our pipe. fds[0] corresponds to the read end and + // fds[1] to the write end. + int fds[2]; + ASSERT_EQ(0, pipe2(fds, O_NONBLOCK)) << errno; + + // Make sure that the pipe is at least 1 MB in size. The test process runs + // in su domain, so it should be safe to make this call. + fcntl(fds[0], F_SETPIPE_SZ, 1 << 20); + + // Wrap the temporary file file descriptor in a native handle + auto* nativeHandle = native_handle_create(1, 0); + ASSERT_NE(nullptr, nativeHandle); + nativeHandle->data[0] = fds[1]; + + // Wrap this native handle in a hidl handle + hidl_handle handle; + handle.setTo(nativeHandle, false /*take ownership*/); + + ASSERT_OK(debugDump(handle)); + + // Check that at least one bit was written by the hal + // TODO: debugDump does not return a Result. + // This mean that the hal can not report that it not implementing the + // function. + char buff; + if (read(fds[0], &buff, 1) != 1) { + doc::note("debugDump does not seem implemented"); + } + EXPECT_EQ(0, close(fds[0])) << errno; + EXPECT_EQ(0, close(fds[1])) << errno; +} + +TEST_F(AudioPrimaryHidlTest, DebugDump) { + doc::test("Check that the hal can dump its state without error"); + testDebugDump([](const auto& handle) { return device->debug(handle, {/* options */}); }); +} + +TEST_F(AudioPrimaryHidlTest, DebugDumpInvalidArguments) { + doc::test("Check that the hal dump doesn't crash on invalid arguments"); + ASSERT_OK(device->debug(hidl_handle(), {/* options */})); +} + +TEST_F(AudioPrimaryHidlTest, SetConnectedState) { + doc::test("Check that the HAL can be notified of device connection and deconnection"); + using AD = AudioDevice; + for (auto deviceType : {AD::OUT_HDMI, AD::OUT_WIRED_HEADPHONE, AD::IN_USB_HEADSET}) { + SCOPED_TRACE("device=" + ::testing::PrintToString(deviceType)); + for (bool state : {true, false}) { + SCOPED_TRACE("state=" + ::testing::PrintToString(state)); + DeviceAddress address = {}; + address.device = deviceType; + auto ret = device->setConnectedState(address, state); + ASSERT_TRUE(ret.isOk()); + if (res == Result::NOT_SUPPORTED) { + doc::partialTest("setConnectedState is not supported"); + return; + } + ASSERT_OK(res); + } + } +} + +////////////////////////////////////////////////////////////////////////////// +////////////////////////// open{Output,Input}Stream ////////////////////////// +////////////////////////////////////////////////////////////////////////////// + +template +class OpenStreamTest : public AudioConfigPrimaryTest, + public ::testing::WithParamInterface { + protected: + template + void testOpen(Open openStream, const AudioConfig& config) { + // FIXME: Open a stream without an IOHandle + // This is not required to be accepted by hal implementations + AudioIoHandle ioHandle = (AudioIoHandle)AudioHandleConsts::AUDIO_IO_HANDLE_NONE; + AudioConfig suggestedConfig{}; + ASSERT_OK(openStream(ioHandle, config, returnIn(res, stream, suggestedConfig))); + + // TODO: only allow failure for RecommendedPlaybackAudioConfig + switch (res) { + case Result::OK: + ASSERT_TRUE(stream != nullptr); + audioConfig = config; + break; + case Result::INVALID_ARGUMENTS: + ASSERT_TRUE(stream == nullptr); + AudioConfig suggestedConfigRetry; + // Could not open stream with config, try again with the + // suggested one + ASSERT_OK(openStream(ioHandle, suggestedConfig, + returnIn(res, stream, suggestedConfigRetry))); + // This time it must succeed + ASSERT_OK(res); + ASSERT_TRUE(stream != nullptr); + audioConfig = suggestedConfig; + break; + default: + FAIL() << "Invalid return status: " << ::testing::PrintToString(res); + } + open = true; + } + + Return closeStream() { + open = false; + return stream->close(); + } + + private: + void TearDown() override { + if (open) { + ASSERT_OK(stream->close()); + } + } + + protected: + AudioConfig audioConfig; + DeviceAddress address = {}; + sp stream; + bool open = false; +}; + +////////////////////////////// openOutputStream ////////////////////////////// + +class OutputStreamTest : public OpenStreamTest { + virtual void SetUp() override { + ASSERT_NO_FATAL_FAILURE(OpenStreamTest::SetUp()); // setup base + address.device = AudioDevice::OUT_DEFAULT; + const AudioConfig& config = GetParam(); + // TODO: test all flag combination + auto flags = hidl_bitfield(AudioOutputFlag::NONE); + testOpen( + [&](AudioIoHandle handle, AudioConfig config, auto cb) { + return device->openOutputStream(handle, address, config, flags, initialMetadata, + cb); + }, + config); + } + + protected: + const SourceMetadata initialMetadata = { + {{AudioUsage::MEDIA, AudioContentType::MUSIC, 1 /* gain */}}}; +}; +TEST_P(OutputStreamTest, OpenOutputStreamTest) { + doc::test( + "Check that output streams can be open with the required and " + "recommended config"); + // Open done in SetUp +} +INSTANTIATE_TEST_CASE_P( + RequiredOutputStreamConfigSupport, OutputStreamTest, + ::testing::ValuesIn(AudioConfigPrimaryTest::getRequiredSupportPlaybackAudioConfig()), + &generateTestName); +INSTANTIATE_TEST_CASE_P( + SupportedOutputStreamConfig, OutputStreamTest, + ::testing::ValuesIn(AudioConfigPrimaryTest::getSupportedPlaybackAudioConfig()), + &generateTestName); + +INSTANTIATE_TEST_CASE_P( + RecommendedOutputStreamConfigSupport, OutputStreamTest, + ::testing::ValuesIn(AudioConfigPrimaryTest::getRecommendedSupportPlaybackAudioConfig()), + &generateTestName); + +////////////////////////////// openInputStream ////////////////////////////// + +class InputStreamTest : public OpenStreamTest { + virtual void SetUp() override { + ASSERT_NO_FATAL_FAILURE(OpenStreamTest::SetUp()); // setup base + address.device = AudioDevice::IN_DEFAULT; + const AudioConfig& config = GetParam(); + // TODO: test all supported flags and source + auto flags = hidl_bitfield(AudioInputFlag::NONE); + testOpen( + [&](AudioIoHandle handle, AudioConfig config, auto cb) { + return device->openInputStream(handle, address, config, flags, initialMetadata, cb); + }, + config); + } + + protected: + const SinkMetadata initialMetadata = {{{AudioSource::DEFAULT, 1 /* gain */}}}; +}; + +TEST_P(InputStreamTest, OpenInputStreamTest) { + doc::test( + "Check that input streams can be open with the required and " + "recommended config"); + // Open done in setup +} +INSTANTIATE_TEST_CASE_P( + RequiredInputStreamConfigSupport, InputStreamTest, + ::testing::ValuesIn(AudioConfigPrimaryTest::getRequiredSupportCaptureAudioConfig()), + &generateTestName); +INSTANTIATE_TEST_CASE_P( + SupportedInputStreamConfig, InputStreamTest, + ::testing::ValuesIn(AudioConfigPrimaryTest::getSupportedCaptureAudioConfig()), + &generateTestName); + +INSTANTIATE_TEST_CASE_P( + RecommendedInputStreamConfigSupport, InputStreamTest, + ::testing::ValuesIn(AudioConfigPrimaryTest::getRecommendedSupportCaptureAudioConfig()), + &generateTestName); + +////////////////////////////////////////////////////////////////////////////// +////////////////////////////// IStream getters /////////////////////////////// +////////////////////////////////////////////////////////////////////////////// + +/** Unpack the provided result. + * If the result is not OK, register a failure and return an undefined value. */ +template +static R extract(Return ret) { + if (!ret.isOk()) { + EXPECT_IS_OK(ret); + return R{}; + } + return ret; +} + +/* Could not find a way to write a test for two parametrized class fixure + * thus use this macro do duplicate tests for Input and Output stream */ +#define TEST_IO_STREAM(test_name, documentation, code) \ + TEST_P(InputStreamTest, test_name) { \ + doc::test(documentation); \ + code; \ + } \ + TEST_P(OutputStreamTest, test_name) { \ + doc::test(documentation); \ + code; \ + } + +TEST_IO_STREAM(GetFrameCount, "Check that the stream frame count == the one it was opened with", + ASSERT_EQ(audioConfig.frameCount, extract(stream->getFrameCount()))) + +TEST_IO_STREAM(GetSampleRate, "Check that the stream sample rate == the one it was opened with", + ASSERT_EQ(audioConfig.sampleRateHz, extract(stream->getSampleRate()))) + +TEST_IO_STREAM(GetChannelMask, "Check that the stream channel mask == the one it was opened with", + ASSERT_EQ(audioConfig.channelMask, extract(stream->getChannelMask()))) + +TEST_IO_STREAM(GetFormat, "Check that the stream format == the one it was opened with", + ASSERT_EQ(audioConfig.format, extract(stream->getFormat()))) + +// TODO: for now only check that the framesize is not incoherent +TEST_IO_STREAM(GetFrameSize, "Check that the stream frame size == the one it was opened with", + ASSERT_GT(extract(stream->getFrameSize()), 0U)) + +TEST_IO_STREAM(GetBufferSize, "Check that the stream buffer size== the one it was opened with", + ASSERT_GE(extract(stream->getBufferSize()), extract(stream->getFrameSize()))); + +template +static void testCapabilityGetter(const string& name, IStream* stream, + CapablityGetter capablityGetter, + Return (IStream::*getter)(), + Return (IStream::*setter)(Property), + bool currentMustBeSupported = true) { + hidl_vec capabilities; + auto ret = capablityGetter(stream, capabilities); + if (ret == Result::NOT_SUPPORTED) { + doc::partialTest(name + " is not supported"); + return; + }; + ASSERT_OK(ret); + + if (currentMustBeSupported) { + ASSERT_NE(0U, capabilities.size()) << name << " must not return an empty list"; + Property currentValue = extract((stream->*getter)()); + EXPECT_TRUE(std::find(capabilities.begin(), capabilities.end(), currentValue) != + capabilities.end()) + << "value returned by " << name << "() = " << testing::PrintToString(currentValue) + << " is not in the list of the supported ones " << toString(capabilities); + } + + // Check that all declared supported values are indeed supported + for (auto capability : capabilities) { + auto ret = (stream->*setter)(capability); + ASSERT_TRUE(ret.isOk()); + if (ret == Result::NOT_SUPPORTED) { + doc::partialTest("Setter is not supported"); + return; + } + ASSERT_OK(ret); + ASSERT_EQ(capability, extract((stream->*getter)())); + } +} + +Result getSupportedSampleRates(IStream* stream, hidl_vec& rates) { + Result res; + EXPECT_OK(stream->getSupportedSampleRates(extract(stream->getFormat()), returnIn(res, rates))); + return res; +} + +Result getSupportedChannelMasks(IStream* stream, + hidl_vec>& channels) { + Result res; + EXPECT_OK( + stream->getSupportedChannelMasks(extract(stream->getFormat()), returnIn(res, channels))); + return res; +} + +Result getSupportedFormats(IStream* stream, hidl_vec& capabilities) { + EXPECT_OK(stream->getSupportedFormats(returnIn(capabilities))); + // TODO: this should be an optional function + return Result::OK; +} + +TEST_IO_STREAM(SupportedSampleRate, "Check that the stream sample rate is declared as supported", + testCapabilityGetter("getSupportedSampleRate", stream.get(), + &getSupportedSampleRates, &IStream::getSampleRate, + &IStream::setSampleRate, + // getSupportedSampleRate returns the native sampling rates, + // (the sampling rates that can be played without resampling) + // but other sampling rates can be supported by the HAL. + false)) + +TEST_IO_STREAM(SupportedChannelMask, "Check that the stream channel mask is declared as supported", + testCapabilityGetter("getSupportedChannelMask", stream.get(), + &getSupportedChannelMasks, &IStream::getChannelMask, + &IStream::setChannelMask)) + +TEST_IO_STREAM(SupportedFormat, "Check that the stream format is declared as supported", + testCapabilityGetter("getSupportedFormat", stream.get(), &getSupportedFormats, + &IStream::getFormat, &IStream::setFormat)) + +static void testGetDevices(IStream* stream, AudioDevice expectedDevice) { + hidl_vec devices; + Result res; + ASSERT_OK(stream->getDevices(returnIn(res, devices))); + if (res == Result::NOT_SUPPORTED) { + return doc::partialTest("GetDevices is not supported"); + } + // The stream was constructed with one device, thus getDevices must only return one + ASSERT_EQ(1U, devices.size()); + AudioDevice device = devices[0].device; + ASSERT_TRUE(device == expectedDevice) + << "Expected: " << ::testing::PrintToString(expectedDevice) + << "\n Actual: " << ::testing::PrintToString(device); +} + +TEST_IO_STREAM(GetDevices, "Check that the stream device == the one it was opened with", + areAudioPatchesSupported() ? doc::partialTest("Audio patches are supported") + : testGetDevices(stream.get(), address.device)) + +static void testSetDevices(IStream* stream, const DeviceAddress& address) { + DeviceAddress otherAddress = address; + otherAddress.device = (address.device & AudioDevice::BIT_IN) == 0 ? AudioDevice::OUT_SPEAKER + : AudioDevice::IN_BUILTIN_MIC; + EXPECT_OK(stream->setDevices({otherAddress})); + + ASSERT_OK(stream->setDevices({address})); // Go back to the original value +} + +TEST_IO_STREAM(SetDevices, "Check that the stream can be rerouted to SPEAKER or BUILTIN_MIC", + areAudioPatchesSupported() ? doc::partialTest("Audio patches are supported") + : testSetDevices(stream.get(), address)) + +static void testGetAudioProperties(IStream* stream, AudioConfig expectedConfig) { + uint32_t sampleRateHz; + hidl_bitfield mask; + AudioFormat format; + + stream->getAudioProperties(returnIn(sampleRateHz, mask, format)); + + // FIXME: the qcom hal it does not currently negotiate the sampleRate & + // channel mask + EXPECT_EQ(expectedConfig.sampleRateHz, sampleRateHz); + EXPECT_EQ(expectedConfig.channelMask, mask); + EXPECT_EQ(expectedConfig.format, format); +} + +TEST_IO_STREAM(GetAudioProperties, + "Check that the stream audio properties == the ones it was opened with", + testGetAudioProperties(stream.get(), audioConfig)) + +TEST_IO_STREAM(SetHwAvSync, "Try to set hardware sync to an invalid value", + ASSERT_RESULT(okOrNotSupportedOrInvalidArgs, stream->setHwAvSync(666))) + +static void checkGetHwAVSync(IDevice* device) { + Result res; + AudioHwSync sync; + ASSERT_OK(device->getHwAvSync(returnIn(res, sync))); + if (res == Result::NOT_SUPPORTED) { + return doc::partialTest("getHwAvSync is not supported"); + } + ASSERT_OK(res); +} +TEST_IO_STREAM(GetHwAvSync, "Get hardware sync can not fail", checkGetHwAVSync(device.get())); + +static void checkGetNoParameter(IStream* stream, hidl_vec keys, + initializer_list expectedResults) { + hidl_vec context; + hidl_vec parameters; + Result res; + ASSERT_OK(stream->getParameters(context, keys, returnIn(res, parameters))); + ASSERT_RESULT(expectedResults, res); + if (res == Result::OK) { + for (auto& parameter : parameters) { + ASSERT_EQ(0U, parameter.value.size()) << toString(parameter); + } + } +} + +/* Get/Set parameter is intended to be an opaque channel between vendors app and + * their HALs. + * Thus can not be meaningfully tested. + */ +TEST_IO_STREAM(getEmptySetParameter, "Retrieve the values of an empty set", + checkGetNoParameter(stream.get(), {} /* keys */, {Result::OK})) + +TEST_IO_STREAM(getNonExistingParameter, "Retrieve the values of an non existing parameter", + checkGetNoParameter(stream.get(), {"Non existing key"} /* keys */, + {Result::NOT_SUPPORTED})) + +TEST_IO_STREAM(setEmptySetParameter, "Set the values of an empty set of parameters", + ASSERT_RESULT(Result::OK, stream->setParameters({}, {}))) + +TEST_IO_STREAM(setNonExistingParameter, "Set the values of an non existing parameter", + // Unfortunately, the set_parameter legacy interface did not return any + // error code when a key is not supported. + // To allow implementation to just wrapped the legacy one, consider OK as a + // valid result for setting a non existing parameter. + ASSERT_RESULT(okOrNotSupportedOrInvalidArgs, + stream->setParameters({}, {{"non existing key", "0"}}))) + +TEST_IO_STREAM(DebugDump, "Check that a stream can dump its state without error", + testDebugDump([this](const auto& handle) { return stream->debug(handle, {}); })) + +TEST_IO_STREAM(DebugDumpInvalidArguments, + "Check that the stream dump doesn't crash on invalid arguments", + ASSERT_OK(stream->debug(hidl_handle(), {}))) + +////////////////////////////////////////////////////////////////////////////// +////////////////////////////// addRemoveEffect /////////////////////////////// +////////////////////////////////////////////////////////////////////////////// + +TEST_IO_STREAM(AddNonExistingEffect, "Adding a non existing effect should fail", + ASSERT_RESULT(Result::INVALID_ARGUMENTS, stream->addEffect(666))) +TEST_IO_STREAM(RemoveNonExistingEffect, "Removing a non existing effect should fail", + ASSERT_RESULT(Result::INVALID_ARGUMENTS, stream->removeEffect(666))) + +// TODO: positive tests + +////////////////////////////////////////////////////////////////////////////// +/////////////////////////////// Control //////////////////////////////// +////////////////////////////////////////////////////////////////////////////// + +TEST_IO_STREAM(standby, "Make sure the stream can be put in stanby", + ASSERT_OK(stream->standby())) // can not fail + +static constexpr auto invalidStateOrNotSupported = {Result::INVALID_STATE, Result::NOT_SUPPORTED}; + +TEST_IO_STREAM(startNoMmap, "Starting a mmaped stream before mapping it should fail", + ASSERT_RESULT(invalidStateOrNotSupported, stream->start())) + +TEST_IO_STREAM(stopNoMmap, "Stopping a mmaped stream before mapping it should fail", + ASSERT_RESULT(invalidStateOrNotSupported, stream->stop())) + +TEST_IO_STREAM(getMmapPositionNoMmap, "Get a stream Mmap position before mapping it should fail", + ASSERT_RESULT(invalidStateOrNotSupported, stream->stop())) + +TEST_IO_STREAM(close, "Make sure a stream can be closed", ASSERT_OK(closeStream())) +TEST_IO_STREAM(closeTwice, "Make sure a stream can not be closed twice", ASSERT_OK(closeStream()); + ASSERT_RESULT(Result::INVALID_STATE, closeStream())) + +static void testCreateTooBigMmapBuffer(IStream* stream) { + MmapBufferInfo info; + Result res; + // Assume that int max is a value too big to be allocated + // This is true currently with a 32bit media server, but might not when it + // will run in 64 bit + auto minSizeFrames = std::numeric_limits::max(); + ASSERT_OK(stream->createMmapBuffer(minSizeFrames, returnIn(res, info))); + ASSERT_RESULT(invalidArgsOrNotSupported, res); +} + +TEST_IO_STREAM(CreateTooBigMmapBuffer, "Create mmap buffer too big should fail", + testCreateTooBigMmapBuffer(stream.get())) + +static void testGetMmapPositionOfNonMmapedStream(IStream* stream) { + Result res; + MmapPosition position; + ASSERT_OK(stream->getMmapPosition(returnIn(res, position))); + ASSERT_RESULT(invalidArgsOrNotSupported, res); +} + +TEST_IO_STREAM(GetMmapPositionOfNonMmapedStream, + "Retrieving the mmap position of a non mmaped stream should fail", + testGetMmapPositionOfNonMmapedStream(stream.get())) + +////////////////////////////////////////////////////////////////////////////// +///////////////////////////////// StreamIn /////////////////////////////////// +////////////////////////////////////////////////////////////////////////////// + +TEST_P(InputStreamTest, GetAudioSource) { + doc::test("Retrieving the audio source of an input stream should always succeed"); + AudioSource source; + ASSERT_OK(stream->getAudioSource(returnIn(res, source))); + if (res == Result::NOT_SUPPORTED) { + doc::partialTest("getAudioSource is not supported"); + return; + } + ASSERT_OK(res); + ASSERT_EQ(AudioSource::DEFAULT, source); +} + +static void testUnitaryGain(std::function(float)> setGain) { + for (float value : (float[]){-INFINITY, -1.0, 1.0 + std::numeric_limits::epsilon(), 2.0, + INFINITY, NAN}) { + EXPECT_RESULT(Result::INVALID_ARGUMENTS, setGain(value)) << "value=" << value; + } + // Do not consider -0.0 as an invalid value as it is == with 0.0 + for (float value : {-0.0, 0.0, 0.01, 0.5, 0.09, 1.0 /* Restore volume*/}) { + EXPECT_OK(setGain(value)) << "value=" << value; + } +} + +static void testOptionalUnitaryGain(std::function(float)> setGain, + string debugName) { + auto result = setGain(1); + ASSERT_IS_OK(result); + if (result == Result::NOT_SUPPORTED) { + doc::partialTest(debugName + " is not supported"); + return; + } + testUnitaryGain(setGain); +} + +TEST_P(InputStreamTest, SetGain) { + doc::test("The gain of an input stream should only be set between [0,1]"); + testOptionalUnitaryGain([this](float volume) { return stream->setGain(volume); }, + "InputStream::setGain"); +} + +static void testPrepareForReading(IStreamIn* stream, uint32_t frameSize, uint32_t framesCount) { + Result res; + // Ignore output parameters as the call should fail + ASSERT_OK(stream->prepareForReading(frameSize, framesCount, + [&res](auto r, auto&, auto&, auto&, auto&) { res = r; })); + EXPECT_RESULT(Result::INVALID_ARGUMENTS, res); +} + +TEST_P(InputStreamTest, PrepareForReadingWithZeroBuffer) { + doc::test("Preparing a stream for reading with a 0 sized buffer should fail"); + testPrepareForReading(stream.get(), 0, 0); +} + +TEST_P(InputStreamTest, PrepareForReadingWithHugeBuffer) { + doc::test("Preparing a stream for reading with a 2^32 sized buffer should fail"); + testPrepareForReading(stream.get(), 1, std::numeric_limits::max()); +} + +TEST_P(InputStreamTest, PrepareForReadingCheckOverflow) { + doc::test( + "Preparing a stream for reading with a overflowing sized buffer should " + "fail"); + auto uintMax = std::numeric_limits::max(); + testPrepareForReading(stream.get(), uintMax, uintMax); +} + +TEST_P(InputStreamTest, GetInputFramesLost) { + doc::test("The number of frames lost on a never started stream should be 0"); + auto ret = stream->getInputFramesLost(); + ASSERT_IS_OK(ret); + uint32_t framesLost{ret}; + ASSERT_EQ(0U, framesLost); +} + +TEST_P(InputStreamTest, getCapturePosition) { + doc::test( + "The capture position of a non prepared stream should not be " + "retrievable"); + uint64_t frames; + uint64_t time; + ASSERT_OK(stream->getCapturePosition(returnIn(res, frames, time))); + ASSERT_RESULT(invalidStateOrNotSupported, res); +} + +TEST_P(InputStreamTest, updateSinkMetadata) { + doc::test("The HAL should not crash on metadata change"); + + hidl_enum_range range; + // Test all possible track configuration + for (AudioSource source : range) { + for (float volume : {0.0, 0.5, 1.0}) { + const SinkMetadata metadata = {{{source, volume}}}; + ASSERT_OK(stream->updateSinkMetadata(metadata)) + << "source=" << toString(source) << ", volume=" << volume; + } + } + + // Do not test concurrent capture as this is not officially supported + + // Set no metadata as if all stream track had stopped + ASSERT_OK(stream->updateSinkMetadata({})); + + // Restore initial + ASSERT_OK(stream->updateSinkMetadata(initialMetadata)); +} + +TEST_P(InputStreamTest, getActiveMicrophones) { + doc::test("Getting active microphones should always succeed"); + hidl_vec microphones; + ASSERT_OK(device->getMicrophones(returnIn(res, microphones))); + ASSERT_OK(res); + ASSERT_TRUE(microphones.size() > 0); +} + +////////////////////////////////////////////////////////////////////////////// +///////////////////////////////// StreamOut ////////////////////////////////// +////////////////////////////////////////////////////////////////////////////// + +TEST_P(OutputStreamTest, getLatency) { + doc::test("Make sure latency is over 0"); + auto result = stream->getLatency(); + ASSERT_IS_OK(result); + ASSERT_GT(result, 0U); +} + +TEST_P(OutputStreamTest, setVolume) { + doc::test("Try to set the output volume"); + testOptionalUnitaryGain([this](float volume) { return stream->setVolume(volume, volume); }, + "setVolume"); +} + +static void testPrepareForWriting(IStreamOut* stream, uint32_t frameSize, uint32_t framesCount) { + Result res; + // Ignore output parameters as the call should fail + ASSERT_OK(stream->prepareForWriting(frameSize, framesCount, + [&res](auto r, auto&, auto&, auto&, auto&) { res = r; })); + EXPECT_RESULT(Result::INVALID_ARGUMENTS, res); +} + +TEST_P(OutputStreamTest, PrepareForWriteWithZeroBuffer) { + doc::test("Preparing a stream for writing with a 0 sized buffer should fail"); + testPrepareForWriting(stream.get(), 0, 0); +} + +TEST_P(OutputStreamTest, PrepareForWriteWithHugeBuffer) { + doc::test("Preparing a stream for writing with a 2^32 sized buffer should fail"); + testPrepareForWriting(stream.get(), 1, std::numeric_limits::max()); +} + +TEST_P(OutputStreamTest, PrepareForWritingCheckOverflow) { + doc::test( + "Preparing a stream for writing with a overflowing sized buffer should " + "fail"); + auto uintMax = std::numeric_limits::max(); + testPrepareForWriting(stream.get(), uintMax, uintMax); +} + +struct Capability { + Capability(IStreamOut* stream) { + EXPECT_OK(stream->supportsPauseAndResume(returnIn(pause, resume))); + auto ret = stream->supportsDrain(); + EXPECT_IS_OK(ret); + if (ret.isOk()) { + drain = ret; + } + } + bool pause = false; + bool resume = false; + bool drain = false; +}; + +TEST_P(OutputStreamTest, SupportsPauseAndResumeAndDrain) { + doc::test("Implementation must expose pause, resume and drain capabilities"); + Capability(stream.get()); +} + +template +static void checkInvalidStateOr0(Result res, Value value) { + switch (res) { + case Result::INVALID_STATE: + break; + case Result::OK: + ASSERT_EQ(0U, value); + break; + default: + FAIL() << "Unexpected result " << toString(res); + } +} + +TEST_P(OutputStreamTest, GetRenderPosition) { + doc::test("A new stream render position should be 0 or INVALID_STATE"); + uint32_t dspFrames; + ASSERT_OK(stream->getRenderPosition(returnIn(res, dspFrames))); + if (res == Result::NOT_SUPPORTED) { + doc::partialTest("getRenderPosition is not supported"); + return; + } + checkInvalidStateOr0(res, dspFrames); +} + +TEST_P(OutputStreamTest, GetNextWriteTimestamp) { + doc::test("A new stream next write timestamp should be 0 or INVALID_STATE"); + uint64_t timestampUs; + ASSERT_OK(stream->getNextWriteTimestamp(returnIn(res, timestampUs))); + if (res == Result::NOT_SUPPORTED) { + doc::partialTest("getNextWriteTimestamp is not supported"); + return; + } + checkInvalidStateOr0(res, timestampUs); +} + +/** Stub implementation of out stream callback. */ +class MockOutCallbacks : public IStreamOutCallback { + Return onWriteReady() override { return {}; } + Return onDrainReady() override { return {}; } + Return onError() override { return {}; } +}; + +static bool isAsyncModeSupported(IStreamOut* stream) { + auto res = stream->setCallback(new MockOutCallbacks); + stream->clearCallback(); // try to restore the no callback state, ignore + // any error + EXPECT_RESULT(okOrNotSupported, res); + return res.isOk() ? res == Result::OK : false; +} + +TEST_P(OutputStreamTest, SetCallback) { + doc::test( + "If supported, registering callback for async operation should never " + "fail"); + if (!isAsyncModeSupported(stream.get())) { + doc::partialTest("The stream does not support async operations"); + return; + } + ASSERT_OK(stream->setCallback(new MockOutCallbacks)); + ASSERT_OK(stream->setCallback(new MockOutCallbacks)); +} + +TEST_P(OutputStreamTest, clearCallback) { + doc::test( + "If supported, clearing a callback to go back to sync operation should " + "not fail"); + if (!isAsyncModeSupported(stream.get())) { + doc::partialTest("The stream does not support async operations"); + return; + } + // TODO: Clarify if clearing a non existing callback should fail + ASSERT_OK(stream->setCallback(new MockOutCallbacks)); + ASSERT_OK(stream->clearCallback()); +} + +TEST_P(OutputStreamTest, Resume) { + doc::test( + "If supported, a stream should fail to resume if not previously " + "paused"); + if (!Capability(stream.get()).resume) { + doc::partialTest("The output stream does not support resume"); + return; + } + ASSERT_RESULT(Result::INVALID_STATE, stream->resume()); +} + +TEST_P(OutputStreamTest, Pause) { + doc::test( + "If supported, a stream should fail to pause if not previously " + "started"); + if (!Capability(stream.get()).pause) { + doc::partialTest("The output stream does not support pause"); + return; + } + ASSERT_RESULT(Result::INVALID_STATE, stream->pause()); +} + +static void testDrain(IStreamOut* stream, AudioDrain type) { + if (!Capability(stream).drain) { + doc::partialTest("The output stream does not support drain"); + return; + } + ASSERT_RESULT(Result::OK, stream->drain(type)); +} + +TEST_P(OutputStreamTest, DrainAll) { + doc::test("If supported, a stream should always succeed to drain"); + testDrain(stream.get(), AudioDrain::ALL); +} + +TEST_P(OutputStreamTest, DrainEarlyNotify) { + doc::test("If supported, a stream should always succeed to drain"); + testDrain(stream.get(), AudioDrain::EARLY_NOTIFY); +} + +TEST_P(OutputStreamTest, FlushStop) { + doc::test("If supported, a stream should always succeed to flush"); + auto ret = stream->flush(); + ASSERT_IS_OK(ret); + if (ret == Result::NOT_SUPPORTED) { + doc::partialTest("Flush is not supported"); + return; + } + ASSERT_OK(ret); +} + +TEST_P(OutputStreamTest, GetPresentationPositionStop) { + doc::test( + "If supported, a stream should always succeed to retrieve the " + "presentation position"); + uint64_t frames; + TimeSpec mesureTS; + ASSERT_OK(stream->getPresentationPosition(returnIn(res, frames, mesureTS))); + if (res == Result::NOT_SUPPORTED) { + doc::partialTest("getpresentationPosition is not supported"); + return; + } + ASSERT_EQ(0U, frames); + + if (mesureTS.tvNSec == 0 && mesureTS.tvSec == 0) { + // As the stream has never written a frame yet, + // the timestamp does not really have a meaning, allow to return 0 + return; + } + + // Make sure the return measure is not more than 1s old. + struct timespec currentTS; + ASSERT_EQ(0, clock_gettime(CLOCK_MONOTONIC, ¤tTS)) << errno; + + auto toMicroSec = [](uint64_t sec, auto nsec) { return sec * 1e+6 + nsec / 1e+3; }; + auto currentTime = toMicroSec(currentTS.tv_sec, currentTS.tv_nsec); + auto mesureTime = toMicroSec(mesureTS.tvSec, mesureTS.tvNSec); + ASSERT_PRED2([](auto c, auto m) { return c - m < 1e+6; }, currentTime, mesureTime); +} + +TEST_P(OutputStreamTest, SelectPresentation) { + doc::test("Verify that presentation selection does not crash"); + ASSERT_RESULT(okOrNotSupported, stream->selectPresentation(0, 0)); +} + +TEST_P(OutputStreamTest, updateSourceMetadata) { + doc::test("The HAL should not crash on metadata change"); + + hidl_enum_range usageRange; + hidl_enum_range contentRange; + // Test all possible track configuration + for (auto usage : usageRange) { + for (auto content : contentRange) { + for (float volume : {0.0, 0.5, 1.0}) { + const SourceMetadata metadata = {{{usage, content, volume}}}; + ASSERT_OK(stream->updateSourceMetadata(metadata)) + << "usage=" << toString(usage) << ", content=" << toString(content) + << ", volume=" << volume; + } + } + } + + // Set many track of different configuration + ASSERT_OK(stream->updateSourceMetadata( + {{{AudioUsage::MEDIA, AudioContentType::MUSIC, 0.1}, + {AudioUsage::VOICE_COMMUNICATION, AudioContentType::SPEECH, 1.0}, + {AudioUsage::ALARM, AudioContentType::SONIFICATION, 0.0}, + {AudioUsage::ASSISTANT, AudioContentType::UNKNOWN, 0.3}}})); + + // Set no metadata as if all stream track had stopped + ASSERT_OK(stream->updateSourceMetadata({})); + + // Restore initial + ASSERT_OK(stream->updateSourceMetadata(initialMetadata)); +} + +////////////////////////////////////////////////////////////////////////////// +/////////////////////////////// PrimaryDevice //////////////////////////////// +////////////////////////////////////////////////////////////////////////////// + +TEST_F(AudioPrimaryHidlTest, setVoiceVolume) { + doc::test("Make sure setVoiceVolume only succeed if volume is in [0,1]"); + testUnitaryGain([](float volume) { return device->setVoiceVolume(volume); }); +} + +TEST_F(AudioPrimaryHidlTest, setMode) { + doc::test("Make sure setMode always succeeds if mode is valid and fails otherwise"); + // Test Invalid values + for (int mode : {-2, -1, int(AudioMode::IN_COMMUNICATION) + 1}) { + ASSERT_RESULT(Result::INVALID_ARGUMENTS, device->setMode(AudioMode(mode))) + << "mode=" << mode; + } + // Test valid values + for (AudioMode mode : {AudioMode::IN_CALL, AudioMode::IN_COMMUNICATION, AudioMode::RINGTONE, + AudioMode::NORMAL /* Make sure to leave the test in normal mode */}) { + ASSERT_OK(device->setMode(mode)) << "mode=" << toString(mode); + } +} + +TEST_F(AudioPrimaryHidlTest, setBtHfpSampleRate) { + doc::test( + "Make sure setBtHfpSampleRate either succeeds or " + "indicates that it is not supported at all, or that the provided value is invalid"); + for (auto samplingRate : {8000, 16000, 22050, 24000}) { + ASSERT_RESULT(okOrNotSupportedOrInvalidArgs, device->setBtHfpSampleRate(samplingRate)); + } +} + +TEST_F(AudioPrimaryHidlTest, setBtHfpVolume) { + doc::test( + "Make sure setBtHfpVolume is either not supported or " + "only succeed if volume is in [0,1]"); + auto ret = device->setBtHfpVolume(0.0); + if (ret == Result::NOT_SUPPORTED) { + doc::partialTest("setBtHfpVolume is not supported"); + return; + } + testUnitaryGain([](float volume) { return device->setBtHfpVolume(volume); }); +} + +TEST_F(AudioPrimaryHidlTest, setBtScoHeadsetDebugName) { + doc::test( + "Make sure setBtScoHeadsetDebugName either succeeds or " + "indicates that it is not supported"); + ASSERT_RESULT(okOrNotSupported, device->setBtScoHeadsetDebugName("test")); +} + +TEST_F(AudioPrimaryHidlTest, updateRotation) { + doc::test("Check that the hal can receive the current rotation"); + for (Rotation rotation : {Rotation::DEG_0, Rotation::DEG_90, Rotation::DEG_180, + Rotation::DEG_270, Rotation::DEG_0}) { + ASSERT_RESULT(okOrNotSupported, device->updateRotation(rotation)); + } +} + +TEST_F(BoolAccessorPrimaryHidlTest, BtScoNrecEnabled) { + doc::test("Query and set the BT SCO NR&EC state"); + testAccessors("BtScoNrecEnabled", Initial{false, OPTIONAL}, {true}, + &IPrimaryDevice::setBtScoNrecEnabled, + &IPrimaryDevice::getBtScoNrecEnabled); +} + +TEST_F(BoolAccessorPrimaryHidlTest, setGetBtScoWidebandEnabled) { + doc::test("Query and set the SCO whideband state"); + testAccessors("BtScoWideband", Initial{false, OPTIONAL}, {true}, + &IPrimaryDevice::setBtScoWidebandEnabled, + &IPrimaryDevice::getBtScoWidebandEnabled); +} + +TEST_F(BoolAccessorPrimaryHidlTest, setGetBtHfpEnabled) { + doc::test("Query and set the BT HFP state"); + testAccessors("BtHfpEnabled", Initial{false, OPTIONAL}, {true}, + &IPrimaryDevice::setBtHfpEnabled, &IPrimaryDevice::getBtHfpEnabled); +} + +using TtyModeAccessorPrimaryHidlTest = AccessorPrimaryHidlTest; +TEST_F(TtyModeAccessorPrimaryHidlTest, setGetTtyMode) { + doc::test("Query and set the TTY mode state"); + testAccessors("TTY mode", Initial{TtyMode::OFF}, + {TtyMode::HCO, TtyMode::VCO, TtyMode::FULL}, + &IPrimaryDevice::setTtyMode, &IPrimaryDevice::getTtyMode); +} + +TEST_F(BoolAccessorPrimaryHidlTest, setGetHac) { + doc::test("Query and set the HAC state"); + testAccessors("HAC", Initial{false}, {true}, &IPrimaryDevice::setHacEnabled, + &IPrimaryDevice::getHacEnabled); +} + +////////////////////////////////////////////////////////////////////////////// +//////////////////// Clean caches on global tear down //////////////////////// +////////////////////////////////////////////////////////////////////////////// + +int main(int argc, char** argv) { + environment = new AudioHidlTestEnvironment; + ::testing::AddGlobalTestEnvironment(environment); + ::testing::InitGoogleTest(&argc, argv); + environment->init(&argc, argv); + int status = RUN_ALL_TESTS(); + return status; +} diff --git a/audio/core/4.0/vts/functional/ValidateAudioConfiguration.cpp b/audio/core/4.0/vts/functional/ValidateAudioConfiguration.cpp new file mode 100644 index 0000000000000000000000000000000000000000..a64513fc815d69d3abc31bbc533c9b1d78752c01 --- /dev/null +++ b/audio/core/4.0/vts/functional/ValidateAudioConfiguration.cpp @@ -0,0 +1,30 @@ +/* + * Copyright (C) 2017 The Android Open Source Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#include +#include + +#include "utility/ValidateXml.h" + +TEST(CheckConfig, audioPolicyConfigurationValidation) { + RecordProperty("description", + "Verify that the audio policy configuration file " + "is valid according to the schema"); + + std::vector locations = {"/odm/etc", "/vendor/etc", "/system/etc"}; + EXPECT_ONE_VALID_XML_MULTIPLE_LOCATIONS("audio_policy_configuration.xml", locations, + "/data/local/tmp/audio_policy_configuration_V4_0.xsd"); +} diff --git a/audio/core/all-versions/OWNERS b/audio/core/all-versions/OWNERS new file mode 100644 index 0000000000000000000000000000000000000000..6fdc97ca298fbbda9cb676f5acb02d7495debcb4 --- /dev/null +++ b/audio/core/all-versions/OWNERS @@ -0,0 +1,3 @@ +elaurent@google.com +krocard@google.com +mnaganov@google.com diff --git a/audio/core/all-versions/default/Android.bp b/audio/core/all-versions/default/Android.bp new file mode 100644 index 0000000000000000000000000000000000000000..bb02863ba694a5e222cf8b84d1940286d78fb0de --- /dev/null +++ b/audio/core/all-versions/default/Android.bp @@ -0,0 +1,29 @@ +cc_library_headers { + name: "android.hardware.audio.core@all-versions-impl", + relative_install_path: "hw", + vendor: true, + + defaults: ["hidl_defaults"], + + export_include_dirs: ["include"], + + shared_libs: [ + "libbase", + "libcutils", + "libfmq", + "libhardware", + "libhidlbase", + "libhidltransport", + "liblog", + "libutils", + "android.hardware.audio.common-util", + ], + + header_libs: [ + "libaudioclient_headers", + "libaudio_system_headers", + "libhardware_headers", + "libmedia_headers", + "android.hardware.audio.common.util@all-versions", + ], +} diff --git a/audio/2.0/default/Conversions.h b/audio/core/all-versions/default/include/core/all-versions/default/Conversions.h similarity index 71% rename from audio/2.0/default/Conversions.h rename to audio/core/all-versions/default/include/core/all-versions/default/Conversions.h index ebda5c572ccc9a75190c102170bd84e327477855..b38eca35a113dc78654fde5a36d467f2dfaefd54 100644 --- a/audio/2.0/default/Conversions.h +++ b/audio/core/all-versions/default/include/core/all-versions/default/Conversions.h @@ -14,28 +14,29 @@ * limitations under the License. */ -#ifndef android_hardware_audio_V2_0_Conversions_H_ -#define android_hardware_audio_V2_0_Conversions_H_ +#include #include -#include #include namespace android { namespace hardware { namespace audio { -namespace V2_0 { +namespace AUDIO_HAL_VERSION { namespace implementation { -using ::android::hardware::audio::V2_0::DeviceAddress; +using ::android::hardware::audio::AUDIO_HAL_VERSION::DeviceAddress; std::string deviceAddressToHal(const DeviceAddress& address); +#ifdef AUDIO_HAL_VERSION_4_0 +bool halToMicrophoneCharacteristics(MicrophoneInfo* pDst, + const struct audio_microphone_characteristic_t& src); +#endif + } // namespace implementation -} // namespace V2_0 +} // namespace AUDIO_HAL_VERSION } // namespace audio } // namespace hardware } // namespace android - -#endif // android_hardware_audio_V2_0_Conversions_H_ diff --git a/audio/core/all-versions/default/include/core/all-versions/default/Conversions.impl.h b/audio/core/all-versions/default/include/core/all-versions/default/Conversions.impl.h new file mode 100644 index 0000000000000000000000000000000000000000..5828c3f3732b63c14aebe2f4aab7db6acdc05405 --- /dev/null +++ b/audio/core/all-versions/default/include/core/all-versions/default/Conversions.impl.h @@ -0,0 +1,194 @@ +/* + * Copyright (C) 2016 The Android Open Source Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#include + +#include + +#include + +namespace android { +namespace hardware { +namespace audio { +namespace AUDIO_HAL_VERSION { +namespace implementation { + +using ::android::hardware::audio::common::AUDIO_HAL_VERSION::AudioDevice; + +std::string deviceAddressToHal(const DeviceAddress& address) { + // HAL assumes that the address is NUL-terminated. + char halAddress[AUDIO_DEVICE_MAX_ADDRESS_LEN]; + memset(halAddress, 0, sizeof(halAddress)); + uint32_t halDevice = static_cast(address.device); + const bool isInput = (halDevice & AUDIO_DEVICE_BIT_IN) != 0; + if (isInput) halDevice &= ~AUDIO_DEVICE_BIT_IN; + if ((!isInput && (halDevice & AUDIO_DEVICE_OUT_ALL_A2DP) != 0) || + (isInput && (halDevice & AUDIO_DEVICE_IN_BLUETOOTH_A2DP) != 0)) { + snprintf(halAddress, sizeof(halAddress), "%02X:%02X:%02X:%02X:%02X:%02X", + address.address.mac[0], address.address.mac[1], address.address.mac[2], + address.address.mac[3], address.address.mac[4], address.address.mac[5]); + } else if ((!isInput && (halDevice & AUDIO_DEVICE_OUT_IP) != 0) || + (isInput && (halDevice & AUDIO_DEVICE_IN_IP) != 0)) { + snprintf(halAddress, sizeof(halAddress), "%d.%d.%d.%d", address.address.ipv4[0], + address.address.ipv4[1], address.address.ipv4[2], address.address.ipv4[3]); + } else if ((!isInput && (halDevice & AUDIO_DEVICE_OUT_ALL_USB) != 0) || + (isInput && (halDevice & AUDIO_DEVICE_IN_ALL_USB) != 0)) { + snprintf(halAddress, sizeof(halAddress), "card=%d;device=%d", address.address.alsa.card, + address.address.alsa.device); + } else if ((!isInput && (halDevice & AUDIO_DEVICE_OUT_BUS) != 0) || + (isInput && (halDevice & AUDIO_DEVICE_IN_BUS) != 0)) { + snprintf(halAddress, sizeof(halAddress), "%s", address.busAddress.c_str()); + } else if ((!isInput && (halDevice & AUDIO_DEVICE_OUT_REMOTE_SUBMIX)) != 0 || + (isInput && (halDevice & AUDIO_DEVICE_IN_REMOTE_SUBMIX) != 0)) { + snprintf(halAddress, sizeof(halAddress), "%s", address.rSubmixAddress.c_str()); + } + return halAddress; +} + +#ifdef AUDIO_HAL_VERSION_4_0 +status_t deviceAddressFromHal(audio_devices_t device, const char* halAddress, + DeviceAddress* address) { + if (address == nullptr) { + return BAD_VALUE; + } + address->device = AudioDevice(device); + if (halAddress == nullptr || strnlen(halAddress, AUDIO_DEVICE_MAX_ADDRESS_LEN) == 0) { + return OK; + } + + const bool isInput = (device & AUDIO_DEVICE_BIT_IN) != 0; + if (isInput) device &= ~AUDIO_DEVICE_BIT_IN; + if ((!isInput && (device & AUDIO_DEVICE_OUT_ALL_A2DP) != 0) || + (isInput && (device & AUDIO_DEVICE_IN_BLUETOOTH_A2DP) != 0)) { + int status = + sscanf(halAddress, "%hhX:%hhX:%hhX:%hhX:%hhX:%hhX", &address->address.mac[0], + &address->address.mac[1], &address->address.mac[2], &address->address.mac[3], + &address->address.mac[4], &address->address.mac[5]); + return status == 6 ? OK : BAD_VALUE; + } else if ((!isInput && (device & AUDIO_DEVICE_OUT_IP) != 0) || + (isInput && (device & AUDIO_DEVICE_IN_IP) != 0)) { + int status = + sscanf(halAddress, "%hhu.%hhu.%hhu.%hhu", &address->address.ipv4[0], + &address->address.ipv4[1], &address->address.ipv4[2], &address->address.ipv4[3]); + return status == 4 ? OK : BAD_VALUE; + } else if ((!isInput && (device & AUDIO_DEVICE_OUT_ALL_USB)) != 0 || + (isInput && (device & AUDIO_DEVICE_IN_ALL_USB)) != 0) { + int status = sscanf(halAddress, "card=%d;device=%d", &address->address.alsa.card, + &address->address.alsa.device); + return status == 2 ? OK : BAD_VALUE; + } else if ((!isInput && (device & AUDIO_DEVICE_OUT_BUS) != 0) || + (isInput && (device & AUDIO_DEVICE_IN_BUS) != 0)) { + address->busAddress = halAddress; + return OK; + } else if ((!isInput && (device & AUDIO_DEVICE_OUT_REMOTE_SUBMIX)) != 0 || + (isInput && (device & AUDIO_DEVICE_IN_REMOTE_SUBMIX) != 0)) { + address->rSubmixAddress = halAddress; + return OK; + } + address->busAddress = halAddress; + return OK; +} + +AudioMicrophoneChannelMapping halToChannelMapping(audio_microphone_channel_mapping_t mapping) { + switch (mapping) { + case AUDIO_MICROPHONE_CHANNEL_MAPPING_UNUSED: + return AudioMicrophoneChannelMapping::UNUSED; + case AUDIO_MICROPHONE_CHANNEL_MAPPING_DIRECT: + return AudioMicrophoneChannelMapping::DIRECT; + case AUDIO_MICROPHONE_CHANNEL_MAPPING_PROCESSED: + return AudioMicrophoneChannelMapping::PROCESSED; + default: + ALOGE("Invalid channel mapping type: %d", mapping); + return AudioMicrophoneChannelMapping::UNUSED; + } +} + +AudioMicrophoneLocation halToLocation(audio_microphone_location_t location) { + switch (location) { + default: + case AUDIO_MICROPHONE_LOCATION_UNKNOWN: + return AudioMicrophoneLocation::UNKNOWN; + case AUDIO_MICROPHONE_LOCATION_MAINBODY: + return AudioMicrophoneLocation::MAINBODY; + case AUDIO_MICROPHONE_LOCATION_MAINBODY_MOVABLE: + return AudioMicrophoneLocation::MAINBODY_MOVABLE; + case AUDIO_MICROPHONE_LOCATION_PERIPHERAL: + return AudioMicrophoneLocation::PERIPHERAL; + } +} + +AudioMicrophoneDirectionality halToDirectionality(audio_microphone_directionality_t dir) { + switch (dir) { + default: + case AUDIO_MICROPHONE_DIRECTIONALITY_UNKNOWN: + return AudioMicrophoneDirectionality::UNKNOWN; + case AUDIO_MICROPHONE_DIRECTIONALITY_OMNI: + return AudioMicrophoneDirectionality::OMNI; + case AUDIO_MICROPHONE_DIRECTIONALITY_BI_DIRECTIONAL: + return AudioMicrophoneDirectionality::BI_DIRECTIONAL; + case AUDIO_MICROPHONE_DIRECTIONALITY_CARDIOID: + return AudioMicrophoneDirectionality::CARDIOID; + case AUDIO_MICROPHONE_DIRECTIONALITY_HYPER_CARDIOID: + return AudioMicrophoneDirectionality::HYPER_CARDIOID; + case AUDIO_MICROPHONE_DIRECTIONALITY_SUPER_CARDIOID: + return AudioMicrophoneDirectionality::SUPER_CARDIOID; + } +} + +bool halToMicrophoneCharacteristics(MicrophoneInfo* pDst, + const struct audio_microphone_characteristic_t& src) { + bool status = false; + if (pDst != NULL) { + pDst->deviceId = src.device_id; + + if (deviceAddressFromHal(src.device, src.address, &pDst->deviceAddress) != OK) { + return false; + } + pDst->channelMapping.resize(AUDIO_CHANNEL_COUNT_MAX); + for (size_t ch = 0; ch < pDst->channelMapping.size(); ch++) { + pDst->channelMapping[ch] = halToChannelMapping(src.channel_mapping[ch]); + } + pDst->location = halToLocation(src.location); + pDst->group = (AudioMicrophoneGroup)src.group; + pDst->indexInTheGroup = (uint32_t)src.index_in_the_group; + pDst->sensitivity = src.sensitivity; + pDst->maxSpl = src.max_spl; + pDst->minSpl = src.min_spl; + pDst->directionality = halToDirectionality(src.directionality); + pDst->frequencyResponse.resize(src.num_frequency_responses); + for (size_t k = 0; k < src.num_frequency_responses; k++) { + pDst->frequencyResponse[k].frequency = src.frequency_responses[0][k]; + pDst->frequencyResponse[k].level = src.frequency_responses[1][k]; + } + pDst->position.x = src.geometric_location.x; + pDst->position.y = src.geometric_location.y; + pDst->position.z = src.geometric_location.z; + + pDst->orientation.x = src.orientation.x; + pDst->orientation.y = src.orientation.y; + pDst->orientation.z = src.orientation.z; + + status = true; + } + return status; +} +#endif + +} // namespace implementation +} // namespace AUDIO_HAL_VERSION +} // namespace audio +} // namespace hardware +} // namespace android diff --git a/audio/core/all-versions/default/include/core/all-versions/default/Device.h b/audio/core/all-versions/default/include/core/all-versions/default/Device.h new file mode 100644 index 0000000000000000000000000000000000000000..eb53b482b28a38b1a8ae641a1cd5b126fa45c117 --- /dev/null +++ b/audio/core/all-versions/default/include/core/all-versions/default/Device.h @@ -0,0 +1,148 @@ +/* + * Copyright (C) 2016 The Android Open Source Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#include + +#include + +#include +#include + +#include + +#include + +#include + +namespace android { +namespace hardware { +namespace audio { +namespace AUDIO_HAL_VERSION { +namespace implementation { + +using ::android::hardware::audio::common::AUDIO_HAL_VERSION::AudioConfig; +using ::android::hardware::audio::common::AUDIO_HAL_VERSION::AudioHwSync; +using ::android::hardware::audio::common::AUDIO_HAL_VERSION::AudioInputFlag; +using ::android::hardware::audio::common::AUDIO_HAL_VERSION::AudioOutputFlag; +using ::android::hardware::audio::common::AUDIO_HAL_VERSION::AudioPatchHandle; +using ::android::hardware::audio::common::AUDIO_HAL_VERSION::AudioPort; +using ::android::hardware::audio::common::AUDIO_HAL_VERSION::AudioPortConfig; +using ::android::hardware::audio::common::AUDIO_HAL_VERSION::AudioSource; +using ::android::hardware::audio::common::AUDIO_HAL_VERSION::implementation::AudioInputFlagBitfield; +using ::android::hardware::audio::common::AUDIO_HAL_VERSION::implementation:: + AudioOutputFlagBitfield; +using ::android::hardware::audio::AUDIO_HAL_VERSION::DeviceAddress; +using ::android::hardware::audio::AUDIO_HAL_VERSION::IDevice; +using ::android::hardware::audio::AUDIO_HAL_VERSION::IStreamIn; +using ::android::hardware::audio::AUDIO_HAL_VERSION::IStreamOut; +using ::android::hardware::audio::AUDIO_HAL_VERSION::ParameterValue; +using ::android::hardware::audio::AUDIO_HAL_VERSION::Result; +using ::android::hardware::Return; +using ::android::hardware::Void; +using ::android::hardware::hidl_vec; +using ::android::hardware::hidl_string; +using ::android::sp; + +#ifdef AUDIO_HAL_VERSION_4_0 +using ::android::hardware::audio::AUDIO_HAL_VERSION::SourceMetadata; +using ::android::hardware::audio::AUDIO_HAL_VERSION::SinkMetadata; +#endif + +struct Device : public IDevice, public ParametersUtil { + explicit Device(audio_hw_device_t* device); + + // Methods from ::android::hardware::audio::AUDIO_HAL_VERSION::IDevice follow. + Return initCheck() override; + Return setMasterVolume(float volume) override; + Return getMasterVolume(getMasterVolume_cb _hidl_cb) override; + Return setMicMute(bool mute) override; + Return getMicMute(getMicMute_cb _hidl_cb) override; + Return setMasterMute(bool mute) override; + Return getMasterMute(getMasterMute_cb _hidl_cb) override; + Return getInputBufferSize(const AudioConfig& config, + getInputBufferSize_cb _hidl_cb) override; + + // V2 openInputStream is called by V4 input stream thus present in both versions + Return openInputStream(int32_t ioHandle, const DeviceAddress& device, + const AudioConfig& config, AudioInputFlagBitfield flags, + AudioSource source, openInputStream_cb _hidl_cb); +#ifdef AUDIO_HAL_VERSION_2_0 + Return openOutputStream(int32_t ioHandle, const DeviceAddress& device, + const AudioConfig& config, AudioOutputFlagBitfield flags, + openOutputStream_cb _hidl_cb) override; +#elif defined(AUDIO_HAL_VERSION_4_0) + Return openOutputStream(int32_t ioHandle, const DeviceAddress& device, + const AudioConfig& config, AudioOutputFlagBitfield flags, + const SourceMetadata& sourceMetadata, + openOutputStream_cb _hidl_cb) override; + Return openInputStream(int32_t ioHandle, const DeviceAddress& device, + const AudioConfig& config, AudioInputFlagBitfield flags, + const SinkMetadata& sinkMetadata, + openInputStream_cb _hidl_cb) override; +#endif + + Return supportsAudioPatches() override; + Return createAudioPatch(const hidl_vec& sources, + const hidl_vec& sinks, + createAudioPatch_cb _hidl_cb) override; + Return releaseAudioPatch(int32_t patch) override; + Return getAudioPort(const AudioPort& port, getAudioPort_cb _hidl_cb) override; + Return setAudioPortConfig(const AudioPortConfig& config) override; + + Return setScreenState(bool turnedOn) override; + +#ifdef AUDIO_HAL_VERSION_2_0 + Return getHwAvSync() override; + Return getParameters(const hidl_vec& keys, + getParameters_cb _hidl_cb) override; + Return setParameters(const hidl_vec& parameters) override; + Return debugDump(const hidl_handle& fd) override; +#elif defined(AUDIO_HAL_VERSION_4_0) + Return getHwAvSync(getHwAvSync_cb _hidl_cb) override; + Return getParameters(const hidl_vec& context, + const hidl_vec& keys, + getParameters_cb _hidl_cb) override; + Return setParameters(const hidl_vec& context, + const hidl_vec& parameters) override; + Return getMicrophones(getMicrophones_cb _hidl_cb) override; + Return setConnectedState(const DeviceAddress& address, bool connected) override; +#endif + + Return debug(const hidl_handle& fd, const hidl_vec& options) override; + + // Utility methods for extending interfaces. + Result analyzeStatus(const char* funcName, int status); + void closeInputStream(audio_stream_in_t* stream); + void closeOutputStream(audio_stream_out_t* stream); + audio_hw_device_t* device() const { return mDevice; } + + private: + audio_hw_device_t* mDevice; + + virtual ~Device(); + + // Methods from ParametersUtil. + char* halGetParameters(const char* keys) override; + int halSetParameters(const char* keysAndValues) override; + + uint32_t version() const { return mDevice->common.version; } +}; + +} // namespace implementation +} // namespace AUDIO_HAL_VERSION +} // namespace audio +} // namespace hardware +} // namespace android diff --git a/audio/2.0/default/Device.cpp b/audio/core/all-versions/default/include/core/all-versions/default/Device.impl.h similarity index 51% rename from audio/2.0/default/Device.cpp rename to audio/core/all-versions/default/include/core/all-versions/default/Device.impl.h index 8b83e464412184010dde7b9e37c7931261d957dd..230b8de24351178453040c2889e287ffb4fa8f77 100644 --- a/audio/2.0/default/Device.cpp +++ b/audio/core/all-versions/default/include/core/all-versions/default/Device.impl.h @@ -1,49 +1,42 @@ /* -* Copyright (C) 2016 The Android Open Source Project -* -* Licensed under the Apache License, Version 2.0 (the "License"); -* you may not use this file except in compliance with the License. -* You may obtain a copy of the License at -* -* http://www.apache.org/licenses/LICENSE-2.0 -* -* Unless required by applicable law or agreed to in writing, software -* distributed under the License is distributed on an "AS IS" BASIS, -* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -* See the License for the specific language governing permissions and -* limitations under the License. -*/ - -#define LOG_TAG "DeviceHAL" + * Copyright (C) 2016 The Android Open Source Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#include + //#define LOG_NDEBUG 0 -#include #include #include +#include #include -#include "Conversions.h" -#include "Device.h" -#include "HidlUtils.h" -#include "StreamIn.h" -#include "StreamOut.h" -#include "Util.h" +using ::android::hardware::audio::common::AUDIO_HAL_VERSION::HidlUtils; namespace android { namespace hardware { namespace audio { -namespace V2_0 { +namespace AUDIO_HAL_VERSION { namespace implementation { -Device::Device(audio_hw_device_t* device) - : mDevice(device) { -} +Device::Device(audio_hw_device_t* device) : mDevice(device) {} Device::~Device() { int status = audio_hw_device_close(mDevice); - ALOGW_IF(status, "Error closing audio hw device %p: %s", mDevice, - strerror(-status)); + ALOGW_IF(status, "Error closing audio hw device %p: %s", mDevice, strerror(-status)); mDevice = nullptr; } @@ -67,7 +60,7 @@ int Device::halSetParameters(const char* keysAndValues) { return mDevice->set_parameters(mDevice, keysAndValues); } -// Methods from ::android::hardware::audio::V2_0::IDevice follow. +// Methods from ::android::hardware::audio::AUDIO_HAL_VERSION::IDevice follow. Return Device::initCheck() { return analyzeStatus("init_check", mDevice->init_check(mDevice)); } @@ -80,16 +73,14 @@ Return Device::setMasterVolume(float volume) { ALOGW("Can not set a master volume (%f) outside [0,1]", volume); return Result::INVALID_ARGUMENTS; } - return analyzeStatus("set_master_volume", - mDevice->set_master_volume(mDevice, volume)); + return analyzeStatus("set_master_volume", mDevice->set_master_volume(mDevice, volume)); } Return Device::getMasterVolume(getMasterVolume_cb _hidl_cb) { Result retval(Result::NOT_SUPPORTED); float volume = 0; if (mDevice->get_master_volume != NULL) { - retval = analyzeStatus("get_master_volume", - mDevice->get_master_volume(mDevice, &volume)); + retval = analyzeStatus("get_master_volume", mDevice->get_master_volume(mDevice, &volume)); } _hidl_cb(retval, volume); return Void(); @@ -101,8 +92,7 @@ Return Device::setMicMute(bool mute) { Return Device::getMicMute(getMicMute_cb _hidl_cb) { bool mute = false; - Result retval = - analyzeStatus("get_mic_mute", mDevice->get_mic_mute(mDevice, &mute)); + Result retval = analyzeStatus("get_mic_mute", mDevice->get_mic_mute(mDevice, &mute)); _hidl_cb(retval, mute); return Void(); } @@ -110,8 +100,7 @@ Return Device::getMicMute(getMicMute_cb _hidl_cb) { Return Device::setMasterMute(bool mute) { Result retval(Result::NOT_SUPPORTED); if (mDevice->set_master_mute != NULL) { - retval = analyzeStatus("set_master_mute", - mDevice->set_master_mute(mDevice, mute)); + retval = analyzeStatus("set_master_mute", mDevice->set_master_mute(mDevice, mute)); } return retval; } @@ -120,15 +109,13 @@ Return Device::getMasterMute(getMasterMute_cb _hidl_cb) { Result retval(Result::NOT_SUPPORTED); bool mute = false; if (mDevice->get_master_mute != NULL) { - retval = analyzeStatus("get_master_mute", - mDevice->get_master_mute(mDevice, &mute)); + retval = analyzeStatus("get_master_mute", mDevice->get_master_mute(mDevice, &mute)); } _hidl_cb(retval, mute); return Void(); } -Return Device::getInputBufferSize(const AudioConfig& config, - getInputBufferSize_cb _hidl_cb) { +Return Device::getInputBufferSize(const AudioConfig& config, getInputBufferSize_cb _hidl_cb) { audio_config_t halConfig; HidlUtils::audioConfigToHal(config, &halConfig); size_t halBufferSize = mDevice->get_input_buffer_size(mDevice, &halConfig); @@ -142,10 +129,11 @@ Return Device::getInputBufferSize(const AudioConfig& config, return Void(); } -Return Device::openOutputStream(int32_t ioHandle, - const DeviceAddress& device, - const AudioConfig& config, - AudioOutputFlag flags, +Return Device::openOutputStream(int32_t ioHandle, const DeviceAddress& device, + const AudioConfig& config, AudioOutputFlagBitfield flags, +#ifdef AUDIO_HAL_VERSION_4_0 + const SourceMetadata& /* sourceMetadata */, +#endif openOutputStream_cb _hidl_cb) { audio_config_t halConfig; HidlUtils::audioConfigToHal(config, &halConfig); @@ -154,13 +142,12 @@ Return Device::openOutputStream(int32_t ioHandle, "open_output_stream handle: %d devices: %x flags: %#x " "srate: %d format %#x channels %x address %s", ioHandle, static_cast(device.device), - static_cast(flags), halConfig.sample_rate, - halConfig.format, halConfig.channel_mask, - deviceAddressToHal(device).c_str()); - int status = mDevice->open_output_stream( - mDevice, ioHandle, static_cast(device.device), - static_cast(flags), &halConfig, &halStream, - deviceAddressToHal(device).c_str()); + static_cast(flags), halConfig.sample_rate, halConfig.format, + halConfig.channel_mask, deviceAddressToHal(device).c_str()); + int status = + mDevice->open_output_stream(mDevice, ioHandle, static_cast(device.device), + static_cast(flags), &halConfig, + &halStream, deviceAddressToHal(device).c_str()); ALOGV("open_output_stream status %d stream %p", status, halStream); sp streamOut; if (status == OK) { @@ -168,16 +155,13 @@ Return Device::openOutputStream(int32_t ioHandle, } AudioConfig suggestedConfig; HidlUtils::audioConfigFromHal(halConfig, &suggestedConfig); - _hidl_cb(analyzeStatus("open_output_stream", status), streamOut, - suggestedConfig); + _hidl_cb(analyzeStatus("open_output_stream", status), streamOut, suggestedConfig); return Void(); } -Return Device::openInputStream(int32_t ioHandle, - const DeviceAddress& device, - const AudioConfig& config, - AudioInputFlag flags, AudioSource source, - openInputStream_cb _hidl_cb) { +Return Device::openInputStream(int32_t ioHandle, const DeviceAddress& device, + const AudioConfig& config, AudioInputFlagBitfield flags, + AudioSource source, openInputStream_cb _hidl_cb) { audio_config_t halConfig; HidlUtils::audioConfigToHal(config, &halConfig); audio_stream_in_t* halStream; @@ -185,14 +169,12 @@ Return Device::openInputStream(int32_t ioHandle, "open_input_stream handle: %d devices: %x flags: %#x " "srate: %d format %#x channels %x address %s source %d", ioHandle, static_cast(device.device), - static_cast(flags), halConfig.sample_rate, - halConfig.format, halConfig.channel_mask, - deviceAddressToHal(device).c_str(), + static_cast(flags), halConfig.sample_rate, halConfig.format, + halConfig.channel_mask, deviceAddressToHal(device).c_str(), static_cast(source)); int status = mDevice->open_input_stream( - mDevice, ioHandle, static_cast(device.device), - &halConfig, &halStream, static_cast(flags), - deviceAddressToHal(device).c_str(), + mDevice, ioHandle, static_cast(device.device), &halConfig, &halStream, + static_cast(flags), deviceAddressToHal(device).c_str(), static_cast(source)); ALOGV("open_input_stream status %d stream %p", status, halStream); sp streamIn; @@ -201,11 +183,28 @@ Return Device::openInputStream(int32_t ioHandle, } AudioConfig suggestedConfig; HidlUtils::audioConfigFromHal(halConfig, &suggestedConfig); - _hidl_cb(analyzeStatus("open_input_stream", status), streamIn, - suggestedConfig); + _hidl_cb(analyzeStatus("open_input_stream", status), streamIn, suggestedConfig); return Void(); } +#ifdef AUDIO_HAL_VERSION_4_0 +Return Device::openInputStream(int32_t ioHandle, const DeviceAddress& device, + const AudioConfig& config, AudioInputFlagBitfield flags, + const SinkMetadata& sinkMetadata, + openInputStream_cb _hidl_cb) { + if (sinkMetadata.tracks.size() == 0) { + // This should never happen, the framework must not create as stream + // if there is no client + ALOGE("openInputStream called without tracks connected"); + _hidl_cb(Result::INVALID_ARGUMENTS, nullptr, AudioConfig()); + return Void(); + } + // Pick the first one as the main until the legacy API is update + AudioSource source = sinkMetadata.tracks[0].source; + return openInputStream(ioHandle, device, config, flags, source, _hidl_cb); +} +#endif + Return Device::supportsAudioPatches() { return version() >= AUDIO_DEVICE_API_VERSION_3_0; } @@ -216,15 +215,12 @@ Return Device::createAudioPatch(const hidl_vec& sources, Result retval(Result::NOT_SUPPORTED); AudioPatchHandle patch = 0; if (version() >= AUDIO_DEVICE_API_VERSION_3_0) { - std::unique_ptr halSources( - HidlUtils::audioPortConfigsToHal(sources)); - std::unique_ptr halSinks( - HidlUtils::audioPortConfigsToHal(sinks)); + std::unique_ptr halSources(HidlUtils::audioPortConfigsToHal(sources)); + std::unique_ptr halSinks(HidlUtils::audioPortConfigsToHal(sinks)); audio_patch_handle_t halPatch = AUDIO_PATCH_HANDLE_NONE; - retval = analyzeStatus( - "create_audio_patch", - mDevice->create_audio_patch(mDevice, sources.size(), &halSources[0], - sinks.size(), &halSinks[0], &halPatch)); + retval = analyzeStatus("create_audio_patch", + mDevice->create_audio_patch(mDevice, sources.size(), &halSources[0], + sinks.size(), &halSinks[0], &halPatch)); if (retval == Result::OK) { patch = static_cast(halPatch); } @@ -237,18 +233,15 @@ Return Device::releaseAudioPatch(int32_t patch) { if (version() >= AUDIO_DEVICE_API_VERSION_3_0) { return analyzeStatus( "release_audio_patch", - mDevice->release_audio_patch( - mDevice, static_cast(patch))); + mDevice->release_audio_patch(mDevice, static_cast(patch))); } return Result::NOT_SUPPORTED; } -Return Device::getAudioPort(const AudioPort& port, - getAudioPort_cb _hidl_cb) { +Return Device::getAudioPort(const AudioPort& port, getAudioPort_cb _hidl_cb) { audio_port halPort; HidlUtils::audioPortToHal(port, &halPort); - Result retval = analyzeStatus("get_audio_port", - mDevice->get_audio_port(mDevice, &halPort)); + Result retval = analyzeStatus("get_audio_port", mDevice->get_audio_port(mDevice, &halPort)); AudioPort resultPort = port; if (retval == Result::OK) { HidlUtils::audioPortFromHal(halPort, &resultPort); @@ -261,43 +254,92 @@ Return Device::setAudioPortConfig(const AudioPortConfig& config) { if (version() >= AUDIO_DEVICE_API_VERSION_3_0) { struct audio_port_config halPortConfig; HidlUtils::audioPortConfigToHal(config, &halPortConfig); - return analyzeStatus( - "set_audio_port_config", - mDevice->set_audio_port_config(mDevice, &halPortConfig)); + return analyzeStatus("set_audio_port_config", + mDevice->set_audio_port_config(mDevice, &halPortConfig)); } return Result::NOT_SUPPORTED; } +#ifdef AUDIO_HAL_VERSION_2_0 Return Device::getHwAvSync() { int halHwAvSync; Result retval = getParam(AudioParameter::keyHwAvSync, &halHwAvSync); return retval == Result::OK ? halHwAvSync : AUDIO_HW_SYNC_INVALID; } +#elif defined(AUDIO_HAL_VERSION_4_0) +Return Device::getHwAvSync(getHwAvSync_cb _hidl_cb) { + int halHwAvSync; + Result retval = getParam(AudioParameter::keyHwAvSync, &halHwAvSync); + _hidl_cb(retval, halHwAvSync); + return Void(); +} +#endif Return Device::setScreenState(bool turnedOn) { return setParam(AudioParameter::keyScreenState, turnedOn); } -Return Device::getParameters(const hidl_vec& keys, - getParameters_cb _hidl_cb) { - getParametersImpl(keys, _hidl_cb); +#ifdef AUDIO_HAL_VERSION_2_0 +Return Device::getParameters(const hidl_vec& keys, getParameters_cb _hidl_cb) { + getParametersImpl({}, keys, _hidl_cb); return Void(); } -Return Device::setParameters( - const hidl_vec& parameters) { - return setParametersImpl(parameters); +Return Device::setParameters(const hidl_vec& parameters) { + return setParametersImpl({} /* context */, parameters); } +#elif defined(AUDIO_HAL_VERSION_4_0) +Return Device::getParameters(const hidl_vec& context, + const hidl_vec& keys, getParameters_cb _hidl_cb) { + getParametersImpl(context, keys, _hidl_cb); + return Void(); +} +Return Device::setParameters(const hidl_vec& context, + const hidl_vec& parameters) { + return setParametersImpl(context, parameters); +} +#endif +#ifdef AUDIO_HAL_VERSION_2_0 Return Device::debugDump(const hidl_handle& fd) { + return debug(fd, {}); +} +#endif + +Return Device::debug(const hidl_handle& fd, const hidl_vec& /* options */) { if (fd.getNativeHandle() != nullptr && fd->numFds == 1) { analyzeStatus("dump", mDevice->dump(mDevice, fd->data[0])); } return Void(); } +#ifdef AUDIO_HAL_VERSION_4_0 +Return Device::getMicrophones(getMicrophones_cb _hidl_cb) { + Result retval = Result::NOT_SUPPORTED; + size_t actual_mics = AUDIO_MICROPHONE_MAX_COUNT; + audio_microphone_characteristic_t mic_array[AUDIO_MICROPHONE_MAX_COUNT]; + + hidl_vec microphones; + if (mDevice->get_microphones != NULL && + mDevice->get_microphones(mDevice, &mic_array[0], &actual_mics) == 0) { + microphones.resize(actual_mics); + for (size_t i = 0; i < actual_mics; ++i) { + halToMicrophoneCharacteristics(µphones[i], mic_array[i]); + } + retval = Result::OK; + } + _hidl_cb(retval, microphones); + return Void(); +} + +Return Device::setConnectedState(const DeviceAddress& address, bool connected) { + auto key = connected ? AudioParameter::keyStreamConnect : AudioParameter::keyStreamDisconnect; + return setParam(key, address); +} +#endif + } // namespace implementation -} // namespace V2_0 +} // namespace AUDIO_HAL_VERSION } // namespace audio } // namespace hardware } // namespace android diff --git a/audio/2.0/default/DevicesFactory.h b/audio/core/all-versions/default/include/core/all-versions/default/DevicesFactory.h similarity index 59% rename from audio/2.0/default/DevicesFactory.h rename to audio/core/all-versions/default/include/core/all-versions/default/DevicesFactory.h index b046f9ffda1627fa7d4cbec2c937ec17ebdfd44b..1509ad170c34f9b486cb4ec177f46d00a9772efb 100644 --- a/audio/2.0/default/DevicesFactory.h +++ b/audio/core/all-versions/default/include/core/all-versions/default/DevicesFactory.h @@ -14,24 +14,22 @@ * limitations under the License. */ -#ifndef ANDROID_HARDWARE_AUDIO_V2_0_DEVICESFACTORY_H -#define ANDROID_HARDWARE_AUDIO_V2_0_DEVICESFACTORY_H +#include #include -#include #include #include namespace android { namespace hardware { namespace audio { -namespace V2_0 { +namespace AUDIO_HAL_VERSION { namespace implementation { -using ::android::hardware::audio::V2_0::IDevice; -using ::android::hardware::audio::V2_0::IDevicesFactory; -using ::android::hardware::audio::V2_0::Result; +using ::android::hardware::audio::AUDIO_HAL_VERSION::IDevice; +using ::android::hardware::audio::AUDIO_HAL_VERSION::IDevicesFactory; +using ::android::hardware::audio::AUDIO_HAL_VERSION::Result; using ::android::hardware::Return; using ::android::hardware::Void; using ::android::hardware::hidl_vec; @@ -39,21 +37,26 @@ using ::android::hardware::hidl_string; using ::android::sp; struct DevicesFactory : public IDevicesFactory { - // Methods from ::android::hardware::audio::V2_0::IDevicesFactory follow. - Return openDevice(IDevicesFactory::Device device, openDevice_cb _hidl_cb) override; - - private: - static const char* deviceToString(IDevicesFactory::Device device); - static int loadAudioInterface(const char *if_name, audio_hw_device_t **dev); - +#ifdef AUDIO_HAL_VERSION_2_0 + Return openDevice(IDevicesFactory::Device device, openDevice_cb _hidl_cb) override; +#endif +#ifdef AUDIO_HAL_VERSION_4_0 + Return openDevice(const hidl_string& device, openDevice_cb _hidl_cb) override; + Return openPrimaryDevice(openPrimaryDevice_cb _hidl_cb) override; +#endif + + private: + template + Return openDevice(const char* moduleName, Callback _hidl_cb); + Return openDevice(const char* moduleName, openDevice_cb _hidl_cb); + + static int loadAudioInterface(const char* if_name, audio_hw_device_t** dev); }; extern "C" IDevicesFactory* HIDL_FETCH_IDevicesFactory(const char* name); } // namespace implementation -} // namespace V2_0 +} // namespace AUDIO_HAL_VERSION } // namespace audio } // namespace hardware } // namespace android - -#endif // ANDROID_HARDWARE_AUDIO_V2_0_DEVICESFACTORY_H diff --git a/audio/core/all-versions/default/include/core/all-versions/default/DevicesFactory.impl.h b/audio/core/all-versions/default/include/core/all-versions/default/DevicesFactory.impl.h new file mode 100644 index 0000000000000000000000000000000000000000..43e5d6eaedfcf09e14fa0f316dbde051ca11b9f7 --- /dev/null +++ b/audio/core/all-versions/default/include/core/all-versions/default/DevicesFactory.impl.h @@ -0,0 +1,117 @@ +/* + * Copyright (C) 2016 The Android Open Source Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#include + +#include + +#include + +namespace android { +namespace hardware { +namespace audio { +namespace AUDIO_HAL_VERSION { +namespace implementation { + +#ifdef AUDIO_HAL_VERSION_2_0 +Return DevicesFactory::openDevice(IDevicesFactory::Device device, openDevice_cb _hidl_cb) { + switch (device) { + case IDevicesFactory::Device::PRIMARY: + return openDevice(AUDIO_HARDWARE_MODULE_ID_PRIMARY, _hidl_cb); + case IDevicesFactory::Device::A2DP: + return openDevice(AUDIO_HARDWARE_MODULE_ID_A2DP, _hidl_cb); + case IDevicesFactory::Device::USB: + return openDevice(AUDIO_HARDWARE_MODULE_ID_USB, _hidl_cb); + case IDevicesFactory::Device::R_SUBMIX: + return openDevice(AUDIO_HARDWARE_MODULE_ID_REMOTE_SUBMIX, _hidl_cb); + case IDevicesFactory::Device::STUB: + return openDevice(AUDIO_HARDWARE_MODULE_ID_STUB, _hidl_cb); + } + _hidl_cb(Result::INVALID_ARGUMENTS, nullptr); + return Void(); +} +#endif +#ifdef AUDIO_HAL_VERSION_4_0 +Return DevicesFactory::openDevice(const hidl_string& moduleName, openDevice_cb _hidl_cb) { + if (moduleName == AUDIO_HARDWARE_MODULE_ID_PRIMARY) { + return openDevice(moduleName.c_str(), _hidl_cb); + } + return openDevice(moduleName.c_str(), _hidl_cb); +} +Return DevicesFactory::openPrimaryDevice(openPrimaryDevice_cb _hidl_cb) { + return openDevice(AUDIO_HARDWARE_MODULE_ID_PRIMARY, _hidl_cb); +} +#endif + +Return DevicesFactory::openDevice(const char* moduleName, openDevice_cb _hidl_cb) { + return openDevice(moduleName, _hidl_cb); +} + +template +Return DevicesFactory::openDevice(const char* moduleName, Callback _hidl_cb) { + audio_hw_device_t* halDevice; + Result retval(Result::INVALID_ARGUMENTS); + sp result; + int halStatus = loadAudioInterface(moduleName, &halDevice); + if (halStatus == OK) { + result = new DeviceShim(halDevice); + retval = Result::OK; + } else if (halStatus == -EINVAL) { + retval = Result::NOT_INITIALIZED; + } + _hidl_cb(retval, result); + return Void(); +} + +// static +int DevicesFactory::loadAudioInterface(const char* if_name, audio_hw_device_t** dev) { + const hw_module_t* mod; + int rc; + + rc = hw_get_module_by_class(AUDIO_HARDWARE_MODULE_ID, if_name, &mod); + if (rc) { + ALOGE("%s couldn't load audio hw module %s.%s (%s)", __func__, AUDIO_HARDWARE_MODULE_ID, + if_name, strerror(-rc)); + goto out; + } + rc = audio_hw_device_open(mod, dev); + if (rc) { + ALOGE("%s couldn't open audio hw device in %s.%s (%s)", __func__, AUDIO_HARDWARE_MODULE_ID, + if_name, strerror(-rc)); + goto out; + } + if ((*dev)->common.version < AUDIO_DEVICE_API_VERSION_MIN) { + ALOGE("%s wrong audio hw device version %04x", __func__, (*dev)->common.version); + rc = -EINVAL; + audio_hw_device_close(*dev); + goto out; + } + return OK; + +out: + *dev = NULL; + return rc; +} + +IDevicesFactory* HIDL_FETCH_IDevicesFactory(const char* /* name */) { + return new DevicesFactory(); +} + +} // namespace implementation +} // namespace AUDIO_HAL_VERSION +} // namespace audio +} // namespace hardware +} // namespace android diff --git a/audio/2.0/default/ParametersUtil.h b/audio/core/all-versions/default/include/core/all-versions/default/ParametersUtil.h similarity index 65% rename from audio/2.0/default/ParametersUtil.h rename to audio/core/all-versions/default/include/core/all-versions/default/ParametersUtil.h index 49036dc52135db979ec5ad4858b76ff5cbc90906..35ff1105ea7aa2eff246308372c69cdbbc57f0ad 100644 --- a/audio/2.0/default/ParametersUtil.h +++ b/audio/core/all-versions/default/include/core/all-versions/default/ParametersUtil.h @@ -14,43 +14,45 @@ * limitations under the License. */ -#ifndef android_hardware_audio_V2_0_ParametersUtil_H_ -#define android_hardware_audio_V2_0_ParametersUtil_H_ +#include #include #include -#include #include #include namespace android { namespace hardware { namespace audio { -namespace V2_0 { +namespace AUDIO_HAL_VERSION { namespace implementation { -using ::android::hardware::audio::V2_0::ParameterValue; -using ::android::hardware::audio::V2_0::Result; +using ::android::hardware::audio::AUDIO_HAL_VERSION::DeviceAddress; +using ::android::hardware::audio::AUDIO_HAL_VERSION::ParameterValue; +using ::android::hardware::audio::AUDIO_HAL_VERSION::Result; using ::android::hardware::hidl_string; using ::android::hardware::hidl_vec; class ParametersUtil { - public: + public: + Result setParam(const char* name, const char* value); Result getParam(const char* name, bool* value); Result getParam(const char* name, int* value); - Result getParam(const char* name, String8* value); + Result getParam(const char* name, String8* value, AudioParameter context = {}); void getParametersImpl( - const hidl_vec& keys, - std::function& parameters)> cb); + const hidl_vec& context, const hidl_vec& keys, + std::function& parameters)> cb); std::unique_ptr getParams(const AudioParameter& keys); Result setParam(const char* name, bool value); Result setParam(const char* name, int value); - Result setParam(const char* name, const char* value); - Result setParametersImpl(const hidl_vec& parameters); + Result setParam(const char* name, float value); + Result setParametersImpl(const hidl_vec& context, + const hidl_vec& parameters); Result setParams(const AudioParameter& param); + Result setParam(const char* name, const DeviceAddress& address); - protected: + protected: virtual ~ParametersUtil() {} virtual char* halGetParameters(const char* keys) = 0; @@ -58,9 +60,7 @@ class ParametersUtil { }; } // namespace implementation -} // namespace V2_0 +} // namespace AUDIO_HAL_VERSION } // namespace audio } // namespace hardware } // namespace android - -#endif // android_hardware_audio_V2_0_ParametersUtil_H_ diff --git a/audio/2.0/default/ParametersUtil.cpp b/audio/core/all-versions/default/include/core/all-versions/default/ParametersUtil.impl.h similarity index 69% rename from audio/2.0/default/ParametersUtil.cpp rename to audio/core/all-versions/default/include/core/all-versions/default/ParametersUtil.impl.h index 257c8e5f0d88dcc9fac16bded75f0d8ee24523d6..34bc53c7a75f4f8774e4e7a43e650ce6a018eae0 100644 --- a/audio/2.0/default/ParametersUtil.cpp +++ b/audio/core/all-versions/default/include/core/all-versions/default/ParametersUtil.impl.h @@ -14,16 +14,19 @@ * limitations under the License. */ -#include "ParametersUtil.h" -#include "Util.h" +#include +#include +#include namespace android { namespace hardware { namespace audio { -namespace V2_0 { +namespace AUDIO_HAL_VERSION { namespace implementation { -// Static method and not private method to avoid leaking status_t dependency +/** Converts a status_t in Result according to the rules of AudioParameter::get* + * Note: Static method and not private method to avoid leaking status_t dependency + */ static Result getHalStatusToResult(status_t status) { switch (status) { case OK: @@ -61,27 +64,26 @@ Result ParametersUtil::getParam(const char* name, int* value) { return getHalStatusToResult(params->getInt(halName, *value)); } -Result ParametersUtil::getParam(const char* name, String8* value) { +Result ParametersUtil::getParam(const char* name, String8* value, AudioParameter context) { const String8 halName(name); - AudioParameter keys; - keys.addKey(halName); - std::unique_ptr params = getParams(keys); + context.addKey(halName); + std::unique_ptr params = getParams(context); return getHalStatusToResult(params->get(halName, *value)); } void ParametersUtil::getParametersImpl( - const hidl_vec& keys, - std::function& parameters)> - cb) { + const hidl_vec& context, const hidl_vec& keys, + std::function& parameters)> cb) { AudioParameter halKeys; + for (auto& pair : context) { + halKeys.add(String8(pair.key.c_str()), String8(pair.value.c_str())); + } for (size_t i = 0; i < keys.size(); ++i) { halKeys.addKey(String8(keys[i].c_str())); } std::unique_ptr halValues = getParams(halKeys); - Result retval = (keys.size() == 0 || halValues->size() != 0) - ? Result::OK - : Result::NOT_SUPPORTED; + Result retval = + (keys.size() == 0 || halValues->size() != 0) ? Result::OK : Result::NOT_SUPPORTED; hidl_vec result; result.resize(halValues->size()); String8 halKey, halValue; @@ -98,8 +100,7 @@ void ParametersUtil::getParametersImpl( cb(retval, result); } -std::unique_ptr ParametersUtil::getParams( - const AudioParameter& keys) { +std::unique_ptr ParametersUtil::getParams(const AudioParameter& keys) { String8 paramsAndValues; char* halValues = halGetParameters(keys.keysToString().string()); if (halValues != NULL) { @@ -111,10 +112,15 @@ std::unique_ptr ParametersUtil::getParams( return std::unique_ptr(new AudioParameter(paramsAndValues)); } +Result ParametersUtil::setParam(const char* name, const char* value) { + AudioParameter param; + param.add(String8(name), String8(value)); + return setParams(param); +} + Result ParametersUtil::setParam(const char* name, bool value) { AudioParameter param; - param.add(String8(name), String8(value ? AudioParameter::valueOn - : AudioParameter::valueOff)); + param.add(String8(name), String8(value ? AudioParameter::valueOn : AudioParameter::valueOff)); return setParams(param); } @@ -124,21 +130,28 @@ Result ParametersUtil::setParam(const char* name, int value) { return setParams(param); } -Result ParametersUtil::setParam(const char* name, const char* value) { +Result ParametersUtil::setParam(const char* name, float value) { AudioParameter param; - param.add(String8(name), String8(value)); + param.addFloat(String8(name), value); return setParams(param); } -Result ParametersUtil::setParametersImpl( - const hidl_vec& parameters) { +Result ParametersUtil::setParametersImpl(const hidl_vec& context, + const hidl_vec& parameters) { AudioParameter params; + for (auto& pair : context) { + params.add(String8(pair.key.c_str()), String8(pair.value.c_str())); + } for (size_t i = 0; i < parameters.size(); ++i) { - params.add(String8(parameters[i].key.c_str()), - String8(parameters[i].value.c_str())); + params.add(String8(parameters[i].key.c_str()), String8(parameters[i].value.c_str())); } return setParams(params); } +Result ParametersUtil::setParam(const char* name, const DeviceAddress& address) { + AudioParameter params(String8(deviceAddressToHal(address).c_str())); + params.addInt(String8(name), int(address.device)); + return setParams(params); +} Result ParametersUtil::setParams(const AudioParameter& param) { int halStatus = halSetParameters(param.toString().string()); @@ -146,7 +159,7 @@ Result ParametersUtil::setParams(const AudioParameter& param) { } } // namespace implementation -} // namespace V2_0 +} // namespace AUDIO_HAL_VERSION } // namespace audio } // namespace hardware } // namespace android diff --git a/audio/core/all-versions/default/include/core/all-versions/default/PrimaryDevice.h b/audio/core/all-versions/default/include/core/all-versions/default/PrimaryDevice.h new file mode 100644 index 0000000000000000000000000000000000000000..42996d703ee881dcb6e9b41c358f2b93f4abe4ab --- /dev/null +++ b/audio/core/all-versions/default/include/core/all-versions/default/PrimaryDevice.h @@ -0,0 +1,140 @@ +/* + * Copyright (C) 2016 The Android Open Source Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#include + +#include + +#include + +namespace android { +namespace hardware { +namespace audio { +namespace AUDIO_HAL_VERSION { +namespace implementation { + +using ::android::hardware::audio::common::AUDIO_HAL_VERSION::AudioConfig; +using ::android::hardware::audio::common::AUDIO_HAL_VERSION::AudioInputFlag; +using ::android::hardware::audio::common::AUDIO_HAL_VERSION::AudioMode; +using ::android::hardware::audio::common::AUDIO_HAL_VERSION::AudioOutputFlag; +using ::android::hardware::audio::common::AUDIO_HAL_VERSION::AudioPort; +using ::android::hardware::audio::common::AUDIO_HAL_VERSION::AudioPortConfig; +using ::android::hardware::audio::common::AUDIO_HAL_VERSION::AudioSource; +using ::android::hardware::audio::AUDIO_HAL_VERSION::DeviceAddress; +using ::android::hardware::audio::AUDIO_HAL_VERSION::IDevice; +using ::android::hardware::audio::AUDIO_HAL_VERSION::IPrimaryDevice; +using ::android::hardware::audio::AUDIO_HAL_VERSION::IStreamIn; +using ::android::hardware::audio::AUDIO_HAL_VERSION::IStreamOut; +using ::android::hardware::audio::AUDIO_HAL_VERSION::ParameterValue; +using ::android::hardware::audio::AUDIO_HAL_VERSION::Result; +using ::android::hardware::Return; +using ::android::hardware::Void; +using ::android::hardware::hidl_vec; +using ::android::hardware::hidl_string; +using ::android::sp; + +struct PrimaryDevice : public IPrimaryDevice { + explicit PrimaryDevice(audio_hw_device_t* device); + + // Methods from ::android::hardware::audio::AUDIO_HAL_VERSION::IDevice follow. + Return initCheck() override; + Return setMasterVolume(float volume) override; + Return getMasterVolume(getMasterVolume_cb _hidl_cb) override; + Return setMicMute(bool mute) override; + Return getMicMute(getMicMute_cb _hidl_cb) override; + Return setMasterMute(bool mute) override; + Return getMasterMute(getMasterMute_cb _hidl_cb) override; + Return getInputBufferSize(const AudioConfig& config, + getInputBufferSize_cb _hidl_cb) override; + + Return openOutputStream(int32_t ioHandle, const DeviceAddress& device, + const AudioConfig& config, AudioOutputFlagBitfield flags, +#ifdef AUDIO_HAL_VERSION_4_0 + const SourceMetadata& sourceMetadata, +#endif + openOutputStream_cb _hidl_cb) override; + + Return openInputStream(int32_t ioHandle, const DeviceAddress& device, + const AudioConfig& config, AudioInputFlagBitfield flags, + AudioSource source, openInputStream_cb _hidl_cb); +#ifdef AUDIO_HAL_VERSION_4_0 + Return openInputStream(int32_t ioHandle, const DeviceAddress& device, + const AudioConfig& config, AudioInputFlagBitfield flags, + const SinkMetadata& sinkMetadata, + openInputStream_cb _hidl_cb) override; +#endif + + Return supportsAudioPatches() override; + Return createAudioPatch(const hidl_vec& sources, + const hidl_vec& sinks, + createAudioPatch_cb _hidl_cb) override; + Return releaseAudioPatch(int32_t patch) override; + Return getAudioPort(const AudioPort& port, getAudioPort_cb _hidl_cb) override; + Return setAudioPortConfig(const AudioPortConfig& config) override; + + Return setScreenState(bool turnedOn) override; + +#ifdef AUDIO_HAL_VERSION_2_0 + Return getHwAvSync() override; + Return getParameters(const hidl_vec& keys, + getParameters_cb _hidl_cb) override; + Return setParameters(const hidl_vec& parameters) override; + Return debugDump(const hidl_handle& fd) override; +#elif defined(AUDIO_HAL_VERSION_4_0) + Return getHwAvSync(getHwAvSync_cb _hidl_cb) override; + Return getParameters(const hidl_vec& context, + const hidl_vec& keys, + getParameters_cb _hidl_cb) override; + Return setParameters(const hidl_vec& context, + const hidl_vec& parameters) override; + Return getMicrophones(getMicrophones_cb _hidl_cb) override; + Return setConnectedState(const DeviceAddress& address, bool connected) override; +#endif + + Return debug(const hidl_handle& fd, const hidl_vec& options) override; + + // Methods from ::android::hardware::audio::AUDIO_HAL_VERSION::IPrimaryDevice follow. + Return setVoiceVolume(float volume) override; + Return setMode(AudioMode mode) override; + Return getBtScoNrecEnabled(getBtScoNrecEnabled_cb _hidl_cb) override; + Return setBtScoNrecEnabled(bool enabled) override; + Return getBtScoWidebandEnabled(getBtScoWidebandEnabled_cb _hidl_cb) override; + Return setBtScoWidebandEnabled(bool enabled) override; + Return getTtyMode(getTtyMode_cb _hidl_cb) override; + Return setTtyMode(IPrimaryDevice::TtyMode mode) override; + Return getHacEnabled(getHacEnabled_cb _hidl_cb) override; + Return setHacEnabled(bool enabled) override; + +#ifdef AUDIO_HAL_VERSION_4_0 + Return setBtScoHeadsetDebugName(const hidl_string& name) override; + Return getBtHfpEnabled(getBtHfpEnabled_cb _hidl_cb) override; + Return setBtHfpEnabled(bool enabled) override; + Return setBtHfpSampleRate(uint32_t sampleRateHz) override; + Return setBtHfpVolume(float volume) override; + Return updateRotation(IPrimaryDevice::Rotation rotation) override; +#endif + + private: + sp mDevice; + + virtual ~PrimaryDevice(); +}; + +} // namespace implementation +} // namespace AUDIO_HAL_VERSION +} // namespace audio +} // namespace hardware +} // namespace android diff --git a/audio/2.0/default/PrimaryDevice.cpp b/audio/core/all-versions/default/include/core/all-versions/default/PrimaryDevice.impl.h similarity index 55% rename from audio/2.0/default/PrimaryDevice.cpp rename to audio/core/all-versions/default/include/core/all-versions/default/PrimaryDevice.impl.h index aaf899160cd34eed9168670477ad95b24599f1ef..f269dd4f91b6c34fab650e21b60b3b55205136a1 100644 --- a/audio/2.0/default/PrimaryDevice.cpp +++ b/audio/core/all-versions/default/include/core/all-versions/default/PrimaryDevice.impl.h @@ -14,24 +14,23 @@ * limitations under the License. */ -#define LOG_TAG "PrimaryDeviceHAL" +#include -#include "PrimaryDevice.h" -#include "Util.h" +#ifdef AUDIO_HAL_VERSION_4_0 +#include +#endif namespace android { namespace hardware { namespace audio { -namespace V2_0 { +namespace AUDIO_HAL_VERSION { namespace implementation { -PrimaryDevice::PrimaryDevice(audio_hw_device_t* device) - : mDevice(new Device(device)) { -} +PrimaryDevice::PrimaryDevice(audio_hw_device_t* device) : mDevice(new Device(device)) {} PrimaryDevice::~PrimaryDevice() {} -// Methods from ::android::hardware::audio::V2_0::IDevice follow. +// Methods from ::android::hardware::audio::AUDIO_HAL_VERSION::IDevice follow. Return PrimaryDevice::initCheck() { return mDevice->initCheck(); } @@ -65,28 +64,43 @@ Return PrimaryDevice::getInputBufferSize(const AudioConfig& config, return mDevice->getInputBufferSize(config, _hidl_cb); } -Return PrimaryDevice::openOutputStream(int32_t ioHandle, - const DeviceAddress& device, +#ifdef AUDIO_HAL_VERSION_2_0 +Return PrimaryDevice::openOutputStream(int32_t ioHandle, const DeviceAddress& device, const AudioConfig& config, - AudioOutputFlag flags, + AudioOutputFlagBitfield flags, openOutputStream_cb _hidl_cb) { return mDevice->openOutputStream(ioHandle, device, config, flags, _hidl_cb); } -Return PrimaryDevice::openInputStream( - int32_t ioHandle, const DeviceAddress& device, const AudioConfig& config, - AudioInputFlag flags, AudioSource source, openInputStream_cb _hidl_cb) { - return mDevice->openInputStream(ioHandle, device, config, flags, source, - _hidl_cb); +Return PrimaryDevice::openInputStream(int32_t ioHandle, const DeviceAddress& device, + const AudioConfig& config, AudioInputFlagBitfield flags, + AudioSource source, openInputStream_cb _hidl_cb) { + return mDevice->openInputStream(ioHandle, device, config, flags, source, _hidl_cb); +} +#elif defined(AUDIO_HAL_VERSION_4_0) +Return PrimaryDevice::openOutputStream(int32_t ioHandle, const DeviceAddress& device, + const AudioConfig& config, + AudioOutputFlagBitfield flags, + const SourceMetadata& sourceMetadata, + openOutputStream_cb _hidl_cb) { + return mDevice->openOutputStream(ioHandle, device, config, flags, sourceMetadata, _hidl_cb); } +Return PrimaryDevice::openInputStream(int32_t ioHandle, const DeviceAddress& device, + const AudioConfig& config, AudioInputFlagBitfield flags, + const SinkMetadata& sinkMetadata, + openInputStream_cb _hidl_cb) { + return mDevice->openInputStream(ioHandle, device, config, flags, sinkMetadata, _hidl_cb); +} +#endif + Return PrimaryDevice::supportsAudioPatches() { return mDevice->supportsAudioPatches(); } -Return PrimaryDevice::createAudioPatch( - const hidl_vec& sources, - const hidl_vec& sinks, createAudioPatch_cb _hidl_cb) { +Return PrimaryDevice::createAudioPatch(const hidl_vec& sources, + const hidl_vec& sinks, + createAudioPatch_cb _hidl_cb) { return mDevice->createAudioPatch(sources, sinks, _hidl_cb); } @@ -94,47 +108,64 @@ Return PrimaryDevice::releaseAudioPatch(int32_t patch) { return mDevice->releaseAudioPatch(patch); } -Return PrimaryDevice::getAudioPort(const AudioPort& port, - getAudioPort_cb _hidl_cb) { +Return PrimaryDevice::getAudioPort(const AudioPort& port, getAudioPort_cb _hidl_cb) { return mDevice->getAudioPort(port, _hidl_cb); } -Return PrimaryDevice::setAudioPortConfig( - const AudioPortConfig& config) { +Return PrimaryDevice::setAudioPortConfig(const AudioPortConfig& config) { return mDevice->setAudioPortConfig(config); } -Return PrimaryDevice::getHwAvSync() { - return mDevice->getHwAvSync(); -} - Return PrimaryDevice::setScreenState(bool turnedOn) { return mDevice->setScreenState(turnedOn); } +#ifdef AUDIO_HAL_VERSION_2_0 +Return PrimaryDevice::getHwAvSync() { + return mDevice->getHwAvSync(); +} + Return PrimaryDevice::getParameters(const hidl_vec& keys, getParameters_cb _hidl_cb) { return mDevice->getParameters(keys, _hidl_cb); } -Return PrimaryDevice::setParameters( - const hidl_vec& parameters) { +Return PrimaryDevice::setParameters(const hidl_vec& parameters) { return mDevice->setParameters(parameters); } Return PrimaryDevice::debugDump(const hidl_handle& fd) { return mDevice->debugDump(fd); } +#elif defined(AUDIO_HAL_VERSION_4_0) +Return PrimaryDevice::getHwAvSync(getHwAvSync_cb _hidl_cb) { + return mDevice->getHwAvSync(_hidl_cb); +} +Return PrimaryDevice::getParameters(const hidl_vec& context, + const hidl_vec& keys, + getParameters_cb _hidl_cb) { + return mDevice->getParameters(context, keys, _hidl_cb); +} +Return PrimaryDevice::setParameters(const hidl_vec& context, + const hidl_vec& parameters) { + return mDevice->setParameters(context, parameters); +} +Return PrimaryDevice::getMicrophones(getMicrophones_cb _hidl_cb) { + return mDevice->getMicrophones(_hidl_cb); +} +Return PrimaryDevice::setConnectedState(const DeviceAddress& address, bool connected) { + return mDevice->setConnectedState(address, connected); +} +#endif -// Methods from ::android::hardware::audio::V2_0::IPrimaryDevice follow. +// Methods from ::android::hardware::audio::AUDIO_HAL_VERSION::IPrimaryDevice follow. Return PrimaryDevice::setVoiceVolume(float volume) { if (!isGainNormalized(volume)) { ALOGW("Can not set a voice volume (%f) outside [0,1]", volume); return Result::INVALID_ARGUMENTS; } - return mDevice->analyzeStatus( - "set_voice_volume", - mDevice->device()->set_voice_volume(mDevice->device(), volume)); + return mDevice->analyzeStatus("set_voice_volume", + mDevice->device()->set_voice_volume(mDevice->device(), volume)); } Return PrimaryDevice::setMode(AudioMode mode) { @@ -151,12 +182,11 @@ Return PrimaryDevice::setMode(AudioMode mode) { }; return mDevice->analyzeStatus( - "set_mode", mDevice->device()->set_mode( - mDevice->device(), static_cast(mode))); + "set_mode", + mDevice->device()->set_mode(mDevice->device(), static_cast(mode))); } -Return PrimaryDevice::getBtScoNrecEnabled( - getBtScoNrecEnabled_cb _hidl_cb) { +Return PrimaryDevice::getBtScoNrecEnabled(getBtScoNrecEnabled_cb _hidl_cb) { bool enabled; Result retval = mDevice->getParam(AudioParameter::keyBtNrec, &enabled); _hidl_cb(retval, enabled); @@ -167,8 +197,7 @@ Return PrimaryDevice::setBtScoNrecEnabled(bool enabled) { return mDevice->setParam(AudioParameter::keyBtNrec, enabled); } -Return PrimaryDevice::getBtScoWidebandEnabled( - getBtScoWidebandEnabled_cb _hidl_cb) { +Return PrimaryDevice::getBtScoWidebandEnabled(getBtScoWidebandEnabled_cb _hidl_cb) { bool enabled; Result retval = mDevice->getParam(AUDIO_PARAMETER_KEY_BT_SCO_WB, &enabled); _hidl_cb(retval, enabled); @@ -242,8 +271,43 @@ Return PrimaryDevice::setHacEnabled(bool enabled) { return mDevice->setParam(AUDIO_PARAMETER_KEY_HAC, enabled); } +#ifdef AUDIO_HAL_VERSION_4_0 +Return PrimaryDevice::setBtScoHeadsetDebugName(const hidl_string& name) { + return mDevice->setParam(AUDIO_PARAMETER_KEY_BT_SCO_HEADSET_NAME, name.c_str()); +} +Return PrimaryDevice::getBtHfpEnabled(getBtHfpEnabled_cb _hidl_cb) { + bool enabled; + Result retval = mDevice->getParam(AUDIO_PARAMETER_KEY_HFP_ENABLE, &enabled); + _hidl_cb(retval, enabled); + return Void(); +} +Return PrimaryDevice::setBtHfpEnabled(bool enabled) { + return mDevice->setParam(AUDIO_PARAMETER_KEY_HFP_ENABLE, enabled); +} +Return PrimaryDevice::setBtHfpSampleRate(uint32_t sampleRateHz) { + return mDevice->setParam(AUDIO_PARAMETER_KEY_HFP_SET_SAMPLING_RATE, int(sampleRateHz)); +} +Return PrimaryDevice::setBtHfpVolume(float volume) { + if (!isGainNormalized(volume)) { + ALOGW("Can not set BT HFP volume (%f) outside [0,1]", volume); + return Result::INVALID_ARGUMENTS; + } + // Map the normalized volume onto the range of [0, 15] + return mDevice->setParam(AUDIO_PARAMETER_KEY_HFP_VOLUME, + static_cast(std::round(volume * 15))); +} +Return PrimaryDevice::updateRotation(IPrimaryDevice::Rotation rotation) { + // legacy API expects the rotation in degree + return mDevice->setParam(AUDIO_PARAMETER_KEY_ROTATION, int(rotation) * 90); +} +#endif + +Return PrimaryDevice::debug(const hidl_handle& fd, const hidl_vec& options) { + return mDevice->debug(fd, options); +} + } // namespace implementation -} // namespace V2_0 +} // namespace AUDIO_HAL_VERSION } // namespace audio } // namespace hardware } // namespace android diff --git a/audio/2.0/default/Stream.h b/audio/core/all-versions/default/include/core/all-versions/default/Stream.h similarity index 56% rename from audio/2.0/default/Stream.h rename to audio/core/all-versions/default/include/core/all-versions/default/Stream.h index e29af531844dfc85eccacbe937f201803f62cc44..6f79429c61d6c202a9f55536e3982ba7f07ca932 100644 --- a/audio/2.0/default/Stream.h +++ b/audio/core/all-versions/default/include/core/all-versions/default/Stream.h @@ -14,32 +14,31 @@ * limitations under the License. */ -#ifndef ANDROID_HARDWARE_AUDIO_V2_0_STREAM_H -#define ANDROID_HARDWARE_AUDIO_V2_0_STREAM_H +#include #include -#include #include #include #include -#include "ParametersUtil.h" +#include namespace android { namespace hardware { namespace audio { -namespace V2_0 { +namespace AUDIO_HAL_VERSION { namespace implementation { -using ::android::hardware::audio::common::V2_0::AudioChannelMask; -using ::android::hardware::audio::common::V2_0::AudioDevice; -using ::android::hardware::audio::common::V2_0::AudioFormat; -using ::android::hardware::audio::V2_0::DeviceAddress; -using ::android::hardware::audio::V2_0::IStream; -using ::android::hardware::audio::V2_0::ParameterValue; -using ::android::hardware::audio::V2_0::Result; +using ::android::hardware::audio::common::AUDIO_HAL_VERSION::AudioChannelMask; +using ::android::hardware::audio::common::AUDIO_HAL_VERSION::AudioDevice; +using ::android::hardware::audio::common::AUDIO_HAL_VERSION::AudioFormat; +using ::android::hardware::audio::common::AUDIO_HAL_VERSION::implementation::AudioChannelBitfield; +using ::android::hardware::audio::AUDIO_HAL_VERSION::DeviceAddress; +using ::android::hardware::audio::AUDIO_HAL_VERSION::IStream; +using ::android::hardware::audio::AUDIO_HAL_VERSION::ParameterValue; +using ::android::hardware::audio::AUDIO_HAL_VERSION::Result; using ::android::hardware::Return; using ::android::hardware::Void; using ::android::hardware::hidl_vec; @@ -56,36 +55,54 @@ struct Stream : public IStream, public ParametersUtil { */ static constexpr uint32_t MAX_BUFFER_SIZE = 2 << 30 /* == 1GiB */; - // Methods from ::android::hardware::audio::V2_0::IStream follow. - Return getFrameSize() override; - Return getFrameCount() override; - Return getBufferSize() override; - Return getSampleRate() override; - Return getSupportedSampleRates(getSupportedSampleRates_cb _hidl_cb) override; - Return setSampleRate(uint32_t sampleRateHz) override; - Return getChannelMask() override; - Return getSupportedChannelMasks(getSupportedChannelMasks_cb _hidl_cb) override; - Return setChannelMask(AudioChannelMask mask) override; - Return getFormat() override; - Return getSupportedFormats(getSupportedFormats_cb _hidl_cb) override; - Return setFormat(AudioFormat format) override; - Return getAudioProperties(getAudioProperties_cb _hidl_cb) override; - Return addEffect(uint64_t effectId) override; - Return removeEffect(uint64_t effectId) override; - Return standby() override; - Return getDevice() override; - Return setDevice(const DeviceAddress& address) override; - Return setConnectedState(const DeviceAddress& address, bool connected) override; - Return setHwAvSync(uint32_t hwAvSync) override; - Return getParameters( - const hidl_vec& keys, getParameters_cb _hidl_cb) override; - Return setParameters(const hidl_vec& parameters) override; - Return debugDump(const hidl_handle& fd) override; + // Methods from ::android::hardware::audio::AUDIO_HAL_VERSION::IStream follow. + Return getFrameSize() override; + Return getFrameCount() override; + Return getBufferSize() override; + Return getSampleRate() override; +#ifdef AUDIO_HAL_VERSION_2_0 + Return getSupportedSampleRates(getSupportedSampleRates_cb _hidl_cb) override; + Return getSupportedChannelMasks(getSupportedChannelMasks_cb _hidl_cb) override; +#endif + Return getSupportedSampleRates(AudioFormat format, getSupportedSampleRates_cb _hidl_cb); + Return getSupportedChannelMasks(AudioFormat format, getSupportedChannelMasks_cb _hidl_cb); + Return setSampleRate(uint32_t sampleRateHz) override; + Return getChannelMask() override; + Return setChannelMask(AudioChannelBitfield mask) override; + Return getFormat() override; + Return getSupportedFormats(getSupportedFormats_cb _hidl_cb) override; + Return setFormat(AudioFormat format) override; + Return getAudioProperties(getAudioProperties_cb _hidl_cb) override; + Return addEffect(uint64_t effectId) override; + Return removeEffect(uint64_t effectId) override; + Return standby() override; +#ifdef AUDIO_HAL_VERSION_2_0 + Return getDevice() override; + Return setDevice(const DeviceAddress& address) override; + Return getParameters(const hidl_vec& keys, + getParameters_cb _hidl_cb) override; + Return setParameters(const hidl_vec& parameters) override; + Return setConnectedState(const DeviceAddress& address, bool connected) override; +#elif defined(AUDIO_HAL_VERSION_4_0) + Return getDevices(getDevices_cb _hidl_cb) override; + Return setDevices(const hidl_vec& devices) override; + Return getParameters(const hidl_vec& context, + const hidl_vec& keys, + getParameters_cb _hidl_cb) override; + Return setParameters(const hidl_vec& context, + const hidl_vec& parameters) override; +#endif + Return setHwAvSync(uint32_t hwAvSync) override; Return start() override; Return stop() override; Return createMmapBuffer(int32_t minSizeFrames, createMmapBuffer_cb _hidl_cb) override; Return getMmapPosition(getMmapPosition_cb _hidl_cb) override; - Return close() override; + Return close() override; + + Return debug(const hidl_handle& fd, const hidl_vec& options) override; +#ifdef AUDIO_HAL_VERSION_2_0 + Return debugDump(const hidl_handle& fd) override; +#endif // Utility methods for extending interfaces. static Result analyzeStatus(const char* funcName, int status); @@ -93,7 +110,7 @@ struct Stream : public IStream, public ParametersUtil { const std::vector& ignoreErrors); private: - audio_stream_t *mStream; + audio_stream_t* mStream; virtual ~Stream(); @@ -102,21 +119,20 @@ struct Stream : public IStream, public ParametersUtil { int halSetParameters(const char* keysAndValues) override; }; - template struct StreamMmap : public RefBase { explicit StreamMmap(T* stream) : mStream(stream) {} Return start(); Return stop(); - Return createMmapBuffer( - int32_t minSizeFrames, size_t frameSize, IStream::createMmapBuffer_cb _hidl_cb); + Return createMmapBuffer(int32_t minSizeFrames, size_t frameSize, + IStream::createMmapBuffer_cb _hidl_cb); Return getMmapPosition(IStream::getMmapPosition_cb _hidl_cb); - private: - StreamMmap() {} + private: + StreamMmap() {} - T *mStream; + T* mStream; }; template @@ -143,13 +159,12 @@ Return StreamMmap::createMmapBuffer(int32_t minSizeFrames, size_t frame if (mStream->create_mmap_buffer != NULL) { struct audio_mmap_buffer_info halInfo; retval = Stream::analyzeStatus( - "create_mmap_buffer", - mStream->create_mmap_buffer(mStream, minSizeFrames, &halInfo)); + "create_mmap_buffer", mStream->create_mmap_buffer(mStream, minSizeFrames, &halInfo)); if (retval == Result::OK) { hidlHandle = native_handle_create(1, 0); hidlHandle->data[0] = halInfo.shared_memory_fd; - info.sharedMemory = hidl_memory("audio_buffer", hidlHandle, - frameSize *halInfo.buffer_size_frames); + info.sharedMemory = + hidl_memory("audio_buffer", hidlHandle, frameSize * halInfo.buffer_size_frames); info.bufferSizeFrames = halInfo.buffer_size_frames; info.burstSizeFrames = halInfo.burst_size_frames; } @@ -168,9 +183,8 @@ Return StreamMmap::getMmapPosition(IStream::getMmapPosition_cb _hidl_cb if (mStream->get_mmap_position != NULL) { struct audio_mmap_position halPosition; - retval = Stream::analyzeStatus( - "get_mmap_position", - mStream->get_mmap_position(mStream, &halPosition)); + retval = Stream::analyzeStatus("get_mmap_position", + mStream->get_mmap_position(mStream, &halPosition)); if (retval == Result::OK) { position.timeNanoseconds = halPosition.time_nanoseconds; position.positionFrames = halPosition.position_frames; @@ -181,9 +195,7 @@ Return StreamMmap::getMmapPosition(IStream::getMmapPosition_cb _hidl_cb } } // namespace implementation -} // namespace V2_0 +} // namespace AUDIO_HAL_VERSION } // namespace audio } // namespace hardware } // namespace android - -#endif // ANDROID_HARDWARE_AUDIO_V2_0_STREAM_H diff --git a/audio/2.0/default/Stream.cpp b/audio/core/all-versions/default/include/core/all-versions/default/Stream.impl.h similarity index 50% rename from audio/2.0/default/Stream.cpp rename to audio/core/all-versions/default/include/core/all-versions/default/Stream.impl.h index 55ae6dbd014ceae26a743bbe02e0a9d76fa77df4..72d7a3760a64321f8ed9b4097ff34d72d5760da8 100644 --- a/audio/2.0/default/Stream.cpp +++ b/audio/core/all-versions/default/include/core/all-versions/default/Stream.impl.h @@ -14,31 +14,24 @@ * limitations under the License. */ -#include +#include -#define LOG_TAG "StreamHAL" +#include +#include #include #include #include -#include #include #include -#include "Conversions.h" -#include "EffectMap.h" -#include "Stream.h" -#include "Util.h" - namespace android { namespace hardware { namespace audio { -namespace V2_0 { +namespace AUDIO_HAL_VERSION { namespace implementation { -Stream::Stream(audio_stream_t* stream) - : mStream(stream) { -} +Stream::Stream(audio_stream_t* stream) : mStream(stream) {} Stream::~Stream() { mStream = nullptr; @@ -64,76 +57,110 @@ int Stream::halSetParameters(const char* keysAndValues) { return mStream->set_parameters(mStream, keysAndValues); } -// Methods from ::android::hardware::audio::V2_0::IStream follow. -Return Stream::getFrameSize() { +// Methods from ::android::hardware::audio::AUDIO_HAL_VERSION::IStream follow. +Return Stream::getFrameSize() { // Needs to be implemented by interface subclasses. But can't be declared as pure virtual, // since interface subclasses implementation do not inherit from this class. LOG_ALWAYS_FATAL("Stream::getFrameSize is pure abstract"); - return uint64_t {}; + return uint64_t{}; } -Return Stream::getFrameCount() { +Return Stream::getFrameCount() { int halFrameCount; Result retval = getParam(AudioParameter::keyFrameCount, &halFrameCount); return retval == Result::OK ? halFrameCount : 0; } -Return Stream::getBufferSize() { +Return Stream::getBufferSize() { return mStream->get_buffer_size(mStream); } -Return Stream::getSampleRate() { +Return Stream::getSampleRate() { return mStream->get_sample_rate(mStream); } -Return Stream::getSupportedSampleRates(getSupportedSampleRates_cb _hidl_cb) { +#ifdef AUDIO_HAL_VERSION_2_0 +Return Stream::getSupportedSampleRates(getSupportedSampleRates_cb _hidl_cb) { + return getSupportedSampleRates(getFormat(), _hidl_cb); +} +Return Stream::getSupportedChannelMasks(getSupportedChannelMasks_cb _hidl_cb) { + return getSupportedChannelMasks(getFormat(), _hidl_cb); +} +#endif + +Return Stream::getSupportedSampleRates(AudioFormat format, + getSupportedSampleRates_cb _hidl_cb) { + AudioParameter context; + context.addInt(String8(AUDIO_PARAMETER_STREAM_FORMAT), int(format)); String8 halListValue; - Result result = getParam(AudioParameter::keyStreamSupportedSamplingRates, &halListValue); + Result result = + getParam(AudioParameter::keyStreamSupportedSamplingRates, &halListValue, context); hidl_vec sampleRates; SortedVector halSampleRates; if (result == Result::OK) { - halSampleRates = samplingRatesFromString( - halListValue.string(), AudioParameter::valueListSeparator); + halSampleRates = + samplingRatesFromString(halListValue.string(), AudioParameter::valueListSeparator); sampleRates.setToExternal(halSampleRates.editArray(), halSampleRates.size()); + // Legacy get_parameter does not return a status_t, thus can not advertise of failure. + // Note that this method must succeed (non empty list) if the format is supported. + if (sampleRates.size() == 0) { + result = Result::NOT_SUPPORTED; + } } +#ifdef AUDIO_HAL_VERSION_2_0 _hidl_cb(sampleRates); +#elif AUDIO_HAL_VERSION_4_0 + _hidl_cb(result, sampleRates); +#endif return Void(); } -Return Stream::setSampleRate(uint32_t sampleRateHz) { - return setParam(AudioParameter::keySamplingRate, static_cast(sampleRateHz)); -} - -Return Stream::getChannelMask() { - return AudioChannelMask(mStream->get_channels(mStream)); -} - -Return Stream::getSupportedChannelMasks(getSupportedChannelMasks_cb _hidl_cb) { +Return Stream::getSupportedChannelMasks(AudioFormat format, + getSupportedChannelMasks_cb _hidl_cb) { + AudioParameter context; + context.addInt(String8(AUDIO_PARAMETER_STREAM_FORMAT), int(format)); String8 halListValue; - Result result = getParam(AudioParameter::keyStreamSupportedChannels, &halListValue); - hidl_vec channelMasks; + Result result = getParam(AudioParameter::keyStreamSupportedChannels, &halListValue, context); + hidl_vec channelMasks; SortedVector halChannelMasks; if (result == Result::OK) { - halChannelMasks = channelMasksFromString( - halListValue.string(), AudioParameter::valueListSeparator); + halChannelMasks = + channelMasksFromString(halListValue.string(), AudioParameter::valueListSeparator); channelMasks.resize(halChannelMasks.size()); for (size_t i = 0; i < halChannelMasks.size(); ++i) { - channelMasks[i] = AudioChannelMask(halChannelMasks[i]); + channelMasks[i] = AudioChannelBitfield(halChannelMasks[i]); + } + // Legacy get_parameter does not return a status_t, thus can not advertise of failure. + // Note that this method must succeed (non empty list) if the format is supported. + if (channelMasks.size() == 0) { + result = Result::NOT_SUPPORTED; } } - _hidl_cb(channelMasks); +#ifdef AUDIO_HAL_VERSION_2_0 + _hidl_cb(channelMasks); +#elif defined(AUDIO_HAL_VERSION_4_0) + _hidl_cb(result, channelMasks); +#endif return Void(); } -Return Stream::setChannelMask(AudioChannelMask mask) { +Return Stream::setSampleRate(uint32_t sampleRateHz) { + return setParam(AudioParameter::keySamplingRate, static_cast(sampleRateHz)); +} + +Return Stream::getChannelMask() { + return AudioChannelBitfield(mStream->get_channels(mStream)); +} + +Return Stream::setChannelMask(AudioChannelBitfield mask) { return setParam(AudioParameter::keyChannels, static_cast(mask)); } -Return Stream::getFormat() { +Return Stream::getFormat() { return AudioFormat(mStream->get_format(mStream)); } -Return Stream::getSupportedFormats(getSupportedFormats_cb _hidl_cb) { +Return Stream::getSupportedFormats(getSupportedFormats_cb _hidl_cb) { String8 halListValue; Result result = getParam(AudioParameter::keyStreamSupportedFormats, &halListValue); hidl_vec formats; @@ -145,23 +172,23 @@ Return Stream::getSupportedFormats(getSupportedFormats_cb _hidl_cb) { formats[i] = AudioFormat(halFormats[i]); } } - _hidl_cb(formats); + _hidl_cb(formats); return Void(); } -Return Stream::setFormat(AudioFormat format) { +Return Stream::setFormat(AudioFormat format) { return setParam(AudioParameter::keyFormat, static_cast(format)); } -Return Stream::getAudioProperties(getAudioProperties_cb _hidl_cb) { +Return Stream::getAudioProperties(getAudioProperties_cb _hidl_cb) { uint32_t halSampleRate = mStream->get_sample_rate(mStream); audio_channel_mask_t halMask = mStream->get_channels(mStream); audio_format_t halFormat = mStream->get_format(mStream); - _hidl_cb(halSampleRate, AudioChannelMask(halMask), AudioFormat(halFormat)); + _hidl_cb(halSampleRate, AudioChannelBitfield(halMask), AudioFormat(halFormat)); return Void(); } -Return Stream::addEffect(uint64_t effectId) { +Return Stream::addEffect(uint64_t effectId) { effect_handle_t halEffect = EffectMap::getInstance().get(effectId); if (halEffect != NULL) { return analyzeStatus("add_audio_effect", mStream->add_audio_effect(mStream, halEffect)); @@ -171,94 +198,130 @@ Return Stream::addEffect(uint64_t effectId) { } } -Return Stream::removeEffect(uint64_t effectId) { +Return Stream::removeEffect(uint64_t effectId) { effect_handle_t halEffect = EffectMap::getInstance().get(effectId); if (halEffect != NULL) { - return analyzeStatus( - "remove_audio_effect", mStream->remove_audio_effect(mStream, halEffect)); + return analyzeStatus("remove_audio_effect", + mStream->remove_audio_effect(mStream, halEffect)); } else { ALOGW("Invalid effect ID passed from client: %" PRIu64, effectId); return Result::INVALID_ARGUMENTS; } } -Return Stream::standby() { +Return Stream::standby() { return analyzeStatus("standby", mStream->standby(mStream)); } -Return Stream::getDevice() { - int device; +Return Stream::setHwAvSync(uint32_t hwAvSync) { + return setParam(AudioParameter::keyStreamHwAvSync, static_cast(hwAvSync)); +} + +#ifdef AUDIO_HAL_VERSION_2_0 +Return Stream::getDevice() { + int device = 0; Result retval = getParam(AudioParameter::keyRouting, &device); return retval == Result::OK ? static_cast(device) : AudioDevice::NONE; } -Return Stream::setDevice(const DeviceAddress& address) { - char* halDeviceAddress = - audio_device_address_to_parameter( - static_cast(address.device), - deviceAddressToHal(address).c_str()); - AudioParameter params((String8(halDeviceAddress))); - free(halDeviceAddress); - params.addInt( - String8(AudioParameter::keyRouting), static_cast(address.device)); - return setParams(params); +Return Stream::setDevice(const DeviceAddress& address) { + return setParam(AudioParameter::keyRouting, address); } -Return Stream::setConnectedState(const DeviceAddress& address, bool connected) { - return setParam( - connected ? AudioParameter::keyStreamConnect : AudioParameter::keyStreamDisconnect, - deviceAddressToHal(address).c_str()); +Return Stream::getParameters(const hidl_vec& keys, getParameters_cb _hidl_cb) { + getParametersImpl({} /* context */, keys, _hidl_cb); + return Void(); } -Return Stream::setHwAvSync(uint32_t hwAvSync) { - return setParam(AudioParameter::keyStreamHwAvSync, static_cast(hwAvSync)); +Return Stream::setParameters(const hidl_vec& parameters) { + return setParametersImpl({} /* context */, parameters); } -Return Stream::getParameters(const hidl_vec& keys, getParameters_cb _hidl_cb) { - getParametersImpl(keys, _hidl_cb); - return Void(); +Return Stream::setConnectedState(const DeviceAddress& address, bool connected) { + return setParam( + connected ? AudioParameter::keyStreamConnect : AudioParameter::keyStreamDisconnect, + address); } - -Return Stream::setParameters(const hidl_vec& parameters) { - return setParametersImpl(parameters); +#elif defined(AUDIO_HAL_VERSION_4_0) +Return Stream::getDevices(getDevices_cb _hidl_cb) { + int device = 0; + Result retval = getParam(AudioParameter::keyRouting, &device); + hidl_vec devices; + if (retval == Result::OK) { + devices.resize(1); + devices[0].device = static_cast(device); + } + _hidl_cb(retval, devices); + return Void(); } -Return Stream::debugDump(const hidl_handle& fd) { - if (fd.getNativeHandle() != nullptr && fd->numFds == 1) { - analyzeStatus("dump", mStream->dump(mStream, fd->data[0])); +Return Stream::setDevices(const hidl_vec& devices) { + // FIXME: can the legacy API set multiple device with address ? + if (devices.size() > 1) { + return Result::NOT_SUPPORTED; + } + DeviceAddress address; + if (devices.size() == 1) { + address = devices[0]; + } else { + address.device = AudioDevice::NONE; } + return setParam(AudioParameter::keyRouting, address); +} +Return Stream::getParameters(const hidl_vec& context, + const hidl_vec& keys, getParameters_cb _hidl_cb) { + getParametersImpl(context, keys, _hidl_cb); return Void(); } -Return Stream::start() { +Return Stream::setParameters(const hidl_vec& context, + const hidl_vec& parameters) { + return setParametersImpl(context, parameters); +} +#endif + +Return Stream::start() { return Result::NOT_SUPPORTED; } -Return Stream::stop() { +Return Stream::stop() { return Result::NOT_SUPPORTED; } -Return Stream::createMmapBuffer(int32_t minSizeFrames __unused, - createMmapBuffer_cb _hidl_cb) { +Return Stream::createMmapBuffer(int32_t minSizeFrames __unused, + createMmapBuffer_cb _hidl_cb) { Result retval(Result::NOT_SUPPORTED); MmapBufferInfo info; _hidl_cb(retval, info); return Void(); } -Return Stream::getMmapPosition(getMmapPosition_cb _hidl_cb) { +Return Stream::getMmapPosition(getMmapPosition_cb _hidl_cb) { Result retval(Result::NOT_SUPPORTED); MmapPosition position; _hidl_cb(retval, position); return Void(); } -Return Stream::close() { +Return Stream::close() { return Result::NOT_SUPPORTED; } -} // namespace implementation -} // namespace V2_0 +Return Stream::debug(const hidl_handle& fd, const hidl_vec& /* options */) { + if (fd.getNativeHandle() != nullptr && fd->numFds == 1) { + analyzeStatus("dump", mStream->dump(mStream, fd->data[0])); + } + return Void(); +} + +#ifdef AUDIO_HAL_VERSION_2_0 +Return Stream::debugDump(const hidl_handle& fd) { + return debug(fd, {} /* options */); +} +#endif + +} // namespace implementation +} // namespace AUDIO_HAL_VERSION } // namespace audio } // namespace hardware } // namespace android diff --git a/audio/core/all-versions/default/include/core/all-versions/default/StreamIn.h b/audio/core/all-versions/default/include/core/all-versions/default/StreamIn.h new file mode 100644 index 0000000000000000000000000000000000000000..f226e63f70aa691fac071e9c0a363fa05e59ffab --- /dev/null +++ b/audio/core/all-versions/default/include/core/all-versions/default/StreamIn.h @@ -0,0 +1,140 @@ +/* + * Copyright (C) 2016 The Android Open Source Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#include + +#include +#include + +#include +#include +#include +#include +#include + +namespace android { +namespace hardware { +namespace audio { +namespace AUDIO_HAL_VERSION { +namespace implementation { + +using ::android::hardware::audio::common::AUDIO_HAL_VERSION::AudioChannelMask; +using ::android::hardware::audio::common::AUDIO_HAL_VERSION::AudioDevice; +using ::android::hardware::audio::common::AUDIO_HAL_VERSION::AudioFormat; +using ::android::hardware::audio::common::AUDIO_HAL_VERSION::AudioSource; +using ::android::hardware::audio::AUDIO_HAL_VERSION::DeviceAddress; +using ::android::hardware::audio::AUDIO_HAL_VERSION::IStream; +using ::android::hardware::audio::AUDIO_HAL_VERSION::IStreamIn; +using ::android::hardware::audio::AUDIO_HAL_VERSION::ParameterValue; +using ::android::hardware::audio::AUDIO_HAL_VERSION::Result; +using ::android::hardware::Return; +using ::android::hardware::Void; +using ::android::hardware::hidl_vec; +using ::android::hardware::hidl_string; +using ::android::sp; + +struct StreamIn : public IStreamIn { + typedef MessageQueue CommandMQ; + typedef MessageQueue DataMQ; + typedef MessageQueue StatusMQ; + + StreamIn(const sp& device, audio_stream_in_t* stream); + + // Methods from ::android::hardware::audio::AUDIO_HAL_VERSION::IStream follow. + Return getFrameSize() override; + Return getFrameCount() override; + Return getBufferSize() override; + Return getSampleRate() override; +#ifdef AUDIO_HAL_VERSION_2_0 + Return getSupportedSampleRates(getSupportedSampleRates_cb _hidl_cb) override; + Return getSupportedChannelMasks(getSupportedChannelMasks_cb _hidl_cb) override; +#endif + Return getSupportedSampleRates(AudioFormat format, getSupportedSampleRates_cb _hidl_cb); + Return getSupportedChannelMasks(AudioFormat format, getSupportedChannelMasks_cb _hidl_cb); + Return setSampleRate(uint32_t sampleRateHz) override; + Return getChannelMask() override; + Return setChannelMask(AudioChannelBitfield mask) override; + Return getFormat() override; + Return getSupportedFormats(getSupportedFormats_cb _hidl_cb) override; + Return setFormat(AudioFormat format) override; + Return getAudioProperties(getAudioProperties_cb _hidl_cb) override; + Return addEffect(uint64_t effectId) override; + Return removeEffect(uint64_t effectId) override; + Return standby() override; +#ifdef AUDIO_HAL_VERSION_2_0 + Return getDevice() override; + Return setDevice(const DeviceAddress& address) override; + Return getParameters(const hidl_vec& keys, + getParameters_cb _hidl_cb) override; + Return setParameters(const hidl_vec& parameters) override; + Return setConnectedState(const DeviceAddress& address, bool connected) override; +#elif defined(AUDIO_HAL_VERSION_4_0) + Return getDevices(getDevices_cb _hidl_cb) override; + Return setDevices(const hidl_vec& devices) override; + Return getParameters(const hidl_vec& context, + const hidl_vec& keys, + getParameters_cb _hidl_cb) override; + Return setParameters(const hidl_vec& context, + const hidl_vec& parameters) override; +#endif + Return setHwAvSync(uint32_t hwAvSync) override; + Return close() override; + + Return debug(const hidl_handle& fd, const hidl_vec& options) override; +#ifdef AUDIO_HAL_VERSION_2_0 + Return debugDump(const hidl_handle& fd) override; +#endif + + // Methods from ::android::hardware::audio::AUDIO_HAL_VERSION::IStreamIn follow. + Return getAudioSource(getAudioSource_cb _hidl_cb) override; + Return setGain(float gain) override; + Return prepareForReading(uint32_t frameSize, uint32_t framesCount, + prepareForReading_cb _hidl_cb) override; + Return getInputFramesLost() override; + Return getCapturePosition(getCapturePosition_cb _hidl_cb) override; + Return start() override; + Return stop() override; + Return createMmapBuffer(int32_t minSizeFrames, createMmapBuffer_cb _hidl_cb) override; + Return getMmapPosition(getMmapPosition_cb _hidl_cb) override; +#ifdef AUDIO_HAL_VERSION_4_0 + Return updateSinkMetadata(const SinkMetadata& sinkMetadata) override; + Return getActiveMicrophones(getActiveMicrophones_cb _hidl_cb) override; +#endif + + static Result getCapturePositionImpl(audio_stream_in_t* stream, uint64_t* frames, + uint64_t* time); + + private: + bool mIsClosed; + const sp mDevice; + audio_stream_in_t* mStream; + const sp mStreamCommon; + const sp> mStreamMmap; + std::unique_ptr mCommandMQ; + std::unique_ptr mDataMQ; + std::unique_ptr mStatusMQ; + EventFlag* mEfGroup; + std::atomic mStopReadThread; + sp mReadThread; + + virtual ~StreamIn(); +}; + +} // namespace implementation +} // namespace AUDIO_HAL_VERSION +} // namespace audio +} // namespace hardware +} // namespace android diff --git a/audio/2.0/default/StreamIn.cpp b/audio/core/all-versions/default/include/core/all-versions/default/StreamIn.impl.h similarity index 69% rename from audio/2.0/default/StreamIn.cpp rename to audio/core/all-versions/default/include/core/all-versions/default/StreamIn.impl.h index 61d5d8ea4df3397380bbe6f138f9f65ecfb157eb..64c85ab5fc049c9eb5dd6f8e7ef10ea5e73963d4 100644 --- a/audio/2.0/default/StreamIn.cpp +++ b/audio/core/all-versions/default/include/core/all-versions/default/StreamIn.impl.h @@ -14,7 +14,8 @@ * limitations under the License. */ -#define LOG_TAG "StreamInHAL" +#include + //#define LOG_NDEBUG 0 #define ATRACE_TAG ATRACE_TAG_AUDIO @@ -23,27 +24,24 @@ #include #include -#include "StreamIn.h" -#include "Util.h" - -using ::android::hardware::audio::V2_0::MessageQueueFlagBits; +using ::android::hardware::audio::AUDIO_HAL_VERSION::MessageQueueFlagBits; +#include "Conversions.h" namespace android { namespace hardware { namespace audio { -namespace V2_0 { +namespace AUDIO_HAL_VERSION { namespace implementation { -using ::android::hardware::audio::common::V2_0::ThreadInfo; +using ::android::hardware::audio::common::AUDIO_HAL_VERSION::ThreadInfo; namespace { class ReadThread : public Thread { public: // ReadThread's lifespan never exceeds StreamIn's lifespan. - ReadThread(std::atomic* stop, audio_stream_in_t* stream, - StreamIn::CommandMQ* commandMQ, StreamIn::DataMQ* dataMQ, - StreamIn::StatusMQ* statusMQ, EventFlag* efGroup) + ReadThread(std::atomic* stop, audio_stream_in_t* stream, StreamIn::CommandMQ* commandMQ, + StreamIn::DataMQ* dataMQ, StreamIn::StatusMQ* statusMQ, EventFlag* efGroup) : Thread(false /*canCallJava*/), mStop(stop), mStream(stream), @@ -99,8 +97,7 @@ void ReadThread::doRead() { void ReadThread::doGetCapturePosition() { mStatus.retval = StreamIn::getCapturePositionImpl( - mStream, &mStatus.reply.capturePosition.frames, - &mStatus.reply.capturePosition.time); + mStream, &mStatus.reply.capturePosition.frames, &mStatus.reply.capturePosition.time); } bool ReadThread::threadLoop() { @@ -109,10 +106,8 @@ bool ReadThread::threadLoop() { // as the Thread uses mutexes, and this can lead to priority inversion. while (!std::atomic_load_explicit(mStop, std::memory_order_acquire)) { uint32_t efState = 0; - mEfGroup->wait(static_cast(MessageQueueFlagBits::NOT_FULL), - &efState); - if (!(efState & - static_cast(MessageQueueFlagBits::NOT_FULL))) { + mEfGroup->wait(static_cast(MessageQueueFlagBits::NOT_FULL), &efState); + if (!(efState & static_cast(MessageQueueFlagBits::NOT_FULL))) { continue; // Nothing to do. } if (!mCommandMQ->read(&mParameters)) { @@ -127,8 +122,7 @@ bool ReadThread::threadLoop() { doGetCapturePosition(); break; default: - ALOGE("Unknown read thread command code %d", - mParameters.command); + ALOGE("Unknown read thread command code %d", mParameters.command); mStatus.retval = Result::NOT_SUPPORTED; break; } @@ -162,14 +156,13 @@ StreamIn::~StreamIn() { } if (mEfGroup) { status_t status = EventFlag::deleteEventFlag(&mEfGroup); - ALOGE_IF(status, "read MQ event flag deletion error: %s", - strerror(-status)); + ALOGE_IF(status, "read MQ event flag deletion error: %s", strerror(-status)); } mDevice->closeInputStream(mStream); mStream = nullptr; } -// Methods from ::android::hardware::audio::V2_0::IStream follow. +// Methods from ::android::hardware::audio::AUDIO_HAL_VERSION::IStream follow. Return StreamIn::getFrameSize() { return audio_stream_in_frame_size(mStream); } @@ -186,25 +179,33 @@ Return StreamIn::getSampleRate() { return mStreamCommon->getSampleRate(); } -Return StreamIn::getSupportedSampleRates( - getSupportedSampleRates_cb _hidl_cb) { +#ifdef AUDIO_HAL_VERSION_2_0 +Return StreamIn::getSupportedChannelMasks(getSupportedChannelMasks_cb _hidl_cb) { + return mStreamCommon->getSupportedChannelMasks(_hidl_cb); +} +Return StreamIn::getSupportedSampleRates(getSupportedSampleRates_cb _hidl_cb) { return mStreamCommon->getSupportedSampleRates(_hidl_cb); } +#endif + +Return StreamIn::getSupportedChannelMasks(AudioFormat format, + getSupportedChannelMasks_cb _hidl_cb) { + return mStreamCommon->getSupportedChannelMasks(format, _hidl_cb); +} +Return StreamIn::getSupportedSampleRates(AudioFormat format, + getSupportedSampleRates_cb _hidl_cb) { + return mStreamCommon->getSupportedSampleRates(format, _hidl_cb); +} Return StreamIn::setSampleRate(uint32_t sampleRateHz) { return mStreamCommon->setSampleRate(sampleRateHz); } -Return StreamIn::getChannelMask() { +Return StreamIn::getChannelMask() { return mStreamCommon->getChannelMask(); } -Return StreamIn::getSupportedChannelMasks( - getSupportedChannelMasks_cb _hidl_cb) { - return mStreamCommon->getSupportedChannelMasks(_hidl_cb); -} - -Return StreamIn::setChannelMask(AudioChannelMask mask) { +Return StreamIn::setChannelMask(AudioChannelBitfield mask) { return mStreamCommon->setChannelMask(mask); } @@ -236,36 +237,52 @@ Return StreamIn::standby() { return mStreamCommon->standby(); } -Return StreamIn::getDevice() { - return mStreamCommon->getDevice(); +Return StreamIn::setHwAvSync(uint32_t hwAvSync) { + return mStreamCommon->setHwAvSync(hwAvSync); } -Return StreamIn::setDevice(const DeviceAddress& address) { - return mStreamCommon->setDevice(address); +#ifdef AUDIO_HAL_VERSION_2_0 +Return StreamIn::setConnectedState(const DeviceAddress& address, bool connected) { + return mStreamCommon->setConnectedState(address, connected); } -Return StreamIn::setConnectedState(const DeviceAddress& address, - bool connected) { - return mStreamCommon->setConnectedState(address, connected); +Return StreamIn::getDevice() { + return mStreamCommon->getDevice(); } -Return StreamIn::setHwAvSync(uint32_t hwAvSync) { - return mStreamCommon->setHwAvSync(hwAvSync); +Return StreamIn::setDevice(const DeviceAddress& address) { + return mStreamCommon->setDevice(address); } -Return StreamIn::getParameters(const hidl_vec& keys, - getParameters_cb _hidl_cb) { +Return StreamIn::getParameters(const hidl_vec& keys, getParameters_cb _hidl_cb) { return mStreamCommon->getParameters(keys, _hidl_cb); } -Return StreamIn::setParameters( - const hidl_vec& parameters) { +Return StreamIn::setParameters(const hidl_vec& parameters) { return mStreamCommon->setParameters(parameters); } Return StreamIn::debugDump(const hidl_handle& fd) { return mStreamCommon->debugDump(fd); } +#elif defined(AUDIO_HAL_VERSION_4_0) +Return StreamIn::getDevices(getDevices_cb _hidl_cb) { + return mStreamCommon->getDevices(_hidl_cb); +} + +Return StreamIn::setDevices(const hidl_vec& devices) { + return mStreamCommon->setDevices(devices); +} +Return StreamIn::getParameters(const hidl_vec& context, + const hidl_vec& keys, getParameters_cb _hidl_cb) { + return mStreamCommon->getParameters(context, keys, _hidl_cb); +} + +Return StreamIn::setParameters(const hidl_vec& context, + const hidl_vec& parameters) { + return mStreamCommon->setParameters(context, parameters); +} +#endif Return StreamIn::start() { return mStreamMmap->start(); @@ -275,10 +292,9 @@ Return StreamIn::stop() { return mStreamMmap->stop(); } -Return StreamIn::createMmapBuffer(int32_t minSizeFrames, - createMmapBuffer_cb _hidl_cb) { - return mStreamMmap->createMmapBuffer( - minSizeFrames, audio_stream_in_frame_size(mStream), _hidl_cb); +Return StreamIn::createMmapBuffer(int32_t minSizeFrames, createMmapBuffer_cb _hidl_cb) { + return mStreamMmap->createMmapBuffer(minSizeFrames, audio_stream_in_frame_size(mStream), + _hidl_cb); } Return StreamIn::getMmapPosition(getMmapPosition_cb _hidl_cb) { @@ -297,11 +313,10 @@ Return StreamIn::close() { return Result::OK; } -// Methods from ::android::hardware::audio::V2_0::IStreamIn follow. +// Methods from ::android::hardware::audio::AUDIO_HAL_VERSION::IStreamIn follow. Return StreamIn::getAudioSource(getAudioSource_cb _hidl_cb) { int halSource; - Result retval = - mStreamCommon->getParam(AudioParameter::keyInputSource, &halSource); + Result retval = mStreamCommon->getParam(AudioParameter::keyInputSource, &halSource); AudioSource source(AudioSource::DEFAULT); if (retval == Result::OK) { source = AudioSource(halSource); @@ -318,16 +333,15 @@ Return StreamIn::setGain(float gain) { return Stream::analyzeStatus("set_gain", mStream->set_gain(mStream, gain)); } -Return StreamIn::prepareForReading(uint32_t frameSize, - uint32_t framesCount, +Return StreamIn::prepareForReading(uint32_t frameSize, uint32_t framesCount, prepareForReading_cb _hidl_cb) { status_t status; ThreadInfo threadInfo = {0, 0}; // Wrap the _hidl_cb to return an error auto sendError = [&threadInfo, &_hidl_cb](Result result) { - _hidl_cb(result, CommandMQ::Descriptor(), DataMQ::Descriptor(), - StatusMQ::Descriptor(), threadInfo); + _hidl_cb(result, CommandMQ::Descriptor(), DataMQ::Descriptor(), StatusMQ::Descriptor(), + threadInfo); }; @@ -341,8 +355,7 @@ Return StreamIn::prepareForReading(uint32_t frameSize, // Check frameSize and framesCount if (frameSize == 0 || framesCount == 0) { - ALOGE("Null frameSize (%u) or framesCount (%u)", frameSize, - framesCount); + ALOGE("Null frameSize (%u) or framesCount (%u)", frameSize, framesCount); sendError(Result::INVALID_ARGUMENTS); return Void(); } @@ -353,12 +366,10 @@ Return StreamIn::prepareForReading(uint32_t frameSize, sendError(Result::INVALID_ARGUMENTS); return Void(); } - std::unique_ptr tempDataMQ( - new DataMQ(frameSize * framesCount, true /* EventFlag */)); + std::unique_ptr tempDataMQ(new DataMQ(frameSize * framesCount, true /* EventFlag */)); std::unique_ptr tempStatusMQ(new StatusMQ(1)); - if (!tempCommandMQ->isValid() || !tempDataMQ->isValid() || - !tempStatusMQ->isValid()) { + if (!tempCommandMQ->isValid() || !tempDataMQ->isValid() || !tempStatusMQ->isValid()) { ALOGE_IF(!tempCommandMQ->isValid(), "command MQ is invalid"); ALOGE_IF(!tempDataMQ->isValid(), "data MQ is invalid"); ALOGE_IF(!tempStatusMQ->isValid(), "status MQ is invalid"); @@ -366,8 +377,7 @@ Return StreamIn::prepareForReading(uint32_t frameSize, return Void(); } EventFlag* tempRawEfGroup{}; - status = EventFlag::createEventFlag(tempDataMQ->getEventFlagWord(), - &tempRawEfGroup); + status = EventFlag::createEventFlag(tempDataMQ->getEventFlagWord(), &tempRawEfGroup); std::unique_ptr tempElfGroup( tempRawEfGroup, [](auto* ef) { EventFlag::deleteEventFlag(&ef); }); if (status != OK || !tempElfGroup) { @@ -377,9 +387,9 @@ Return StreamIn::prepareForReading(uint32_t frameSize, } // Create and launch the thread. - auto tempReadThread = std::make_unique( - &mStopReadThread, mStream, tempCommandMQ.get(), tempDataMQ.get(), - tempStatusMQ.get(), tempElfGroup.get()); + auto tempReadThread = + std::make_unique(&mStopReadThread, mStream, tempCommandMQ.get(), + tempDataMQ.get(), tempStatusMQ.get(), tempElfGroup.get()); if (!tempReadThread->init()) { ALOGW("failed to start reader thread: %s", strerror(-status)); sendError(Result::INVALID_ARGUMENTS); @@ -399,8 +409,8 @@ Return StreamIn::prepareForReading(uint32_t frameSize, mEfGroup = tempElfGroup.release(); threadInfo.pid = getpid(); threadInfo.tid = mReadThread->getTid(); - _hidl_cb(Result::OK, *mCommandMQ->getDesc(), *mDataMQ->getDesc(), - *mStatusMQ->getDesc(), threadInfo); + _hidl_cb(Result::OK, *mCommandMQ->getDesc(), *mDataMQ->getDesc(), *mStatusMQ->getDesc(), + threadInfo); return Void(); } @@ -409,8 +419,8 @@ Return StreamIn::getInputFramesLost() { } // static -Result StreamIn::getCapturePositionImpl(audio_stream_in_t* stream, - uint64_t* frames, uint64_t* time) { +Result StreamIn::getCapturePositionImpl(audio_stream_in_t* stream, uint64_t* frames, + uint64_t* time) { // HAL may have a stub function, always returning ENOSYS, don't // spam the log in this case. static const std::vector ignoredErrors{ENOSYS}; @@ -434,8 +444,50 @@ Return StreamIn::getCapturePosition(getCapturePosition_cb _hidl_cb) { return Void(); } +Return StreamIn::debug(const hidl_handle& fd, const hidl_vec& options) { + return mStreamCommon->debug(fd, options); +} + +#ifdef AUDIO_HAL_VERSION_4_0 +Return StreamIn::updateSinkMetadata(const SinkMetadata& sinkMetadata) { + if (mStream->update_sink_metadata == nullptr) { + return Void(); // not supported by the HAL + } + std::vector halTracks; + halTracks.reserve(sinkMetadata.tracks.size()); + for (auto& metadata : sinkMetadata.tracks) { + halTracks.push_back( + {.source = static_cast(metadata.source), .gain = metadata.gain}); + } + const sink_metadata_t halMetadata = { + .track_count = halTracks.size(), .tracks = halTracks.data(), + }; + mStream->update_sink_metadata(mStream, &halMetadata); + return Void(); +} + +Return StreamIn::getActiveMicrophones(getActiveMicrophones_cb _hidl_cb) { + Result retval = Result::NOT_SUPPORTED; + size_t actual_mics = AUDIO_MICROPHONE_MAX_COUNT; + audio_microphone_characteristic_t mic_array[AUDIO_MICROPHONE_MAX_COUNT]; + + hidl_vec microphones; + if (mStream->get_active_microphones != NULL && + mStream->get_active_microphones(mStream, &mic_array[0], &actual_mics) == 0) { + microphones.resize(actual_mics); + for (size_t i = 0; i < actual_mics; ++i) { + halToMicrophoneCharacteristics(µphones[i], mic_array[i]); + } + retval = Result::OK; + } + + _hidl_cb(retval, microphones); + return Void(); +} +#endif + } // namespace implementation -} // namespace V2_0 +} // namespace AUDIO_HAL_VERSION } // namespace audio } // namespace hardware } // namespace android diff --git a/audio/core/all-versions/default/include/core/all-versions/default/StreamOut.h b/audio/core/all-versions/default/include/core/all-versions/default/StreamOut.h new file mode 100644 index 0000000000000000000000000000000000000000..134d7b9bbe7358fef06a64aa1a3c81f8f9f4a98e --- /dev/null +++ b/audio/core/all-versions/default/include/core/all-versions/default/StreamOut.h @@ -0,0 +1,154 @@ +/* + * Copyright (C) 2016 The Android Open Source Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#include + +#include +#include + +#include +#include +#include +#include +#include + +namespace android { +namespace hardware { +namespace audio { +namespace AUDIO_HAL_VERSION { +namespace implementation { + +using ::android::hardware::audio::common::AUDIO_HAL_VERSION::AudioChannelMask; +using ::android::hardware::audio::common::AUDIO_HAL_VERSION::AudioDevice; +using ::android::hardware::audio::common::AUDIO_HAL_VERSION::AudioFormat; +using ::android::hardware::audio::AUDIO_HAL_VERSION::AudioDrain; +using ::android::hardware::audio::AUDIO_HAL_VERSION::DeviceAddress; +using ::android::hardware::audio::AUDIO_HAL_VERSION::IStream; +using ::android::hardware::audio::AUDIO_HAL_VERSION::IStreamOut; +using ::android::hardware::audio::AUDIO_HAL_VERSION::IStreamOutCallback; +using ::android::hardware::audio::AUDIO_HAL_VERSION::ParameterValue; +using ::android::hardware::audio::AUDIO_HAL_VERSION::Result; +using ::android::hardware::audio::AUDIO_HAL_VERSION::TimeSpec; +using ::android::hardware::Return; +using ::android::hardware::Void; +using ::android::hardware::hidl_vec; +using ::android::hardware::hidl_string; +using ::android::sp; + +struct StreamOut : public IStreamOut { + typedef MessageQueue CommandMQ; + typedef MessageQueue DataMQ; + typedef MessageQueue StatusMQ; + + StreamOut(const sp& device, audio_stream_out_t* stream); + + // Methods from ::android::hardware::audio::AUDIO_HAL_VERSION::IStream follow. + Return getFrameSize() override; + Return getFrameCount() override; + Return getBufferSize() override; + Return getSampleRate() override; +#ifdef AUDIO_HAL_VERSION_2_0 + Return getSupportedSampleRates(getSupportedSampleRates_cb _hidl_cb) override; + Return getSupportedChannelMasks(getSupportedChannelMasks_cb _hidl_cb) override; +#endif + Return getSupportedSampleRates(AudioFormat format, getSupportedSampleRates_cb _hidl_cb); + Return getSupportedChannelMasks(AudioFormat format, getSupportedChannelMasks_cb _hidl_cb); + Return setSampleRate(uint32_t sampleRateHz) override; + Return getChannelMask() override; + Return setChannelMask(AudioChannelBitfield mask) override; + Return getFormat() override; + Return getSupportedFormats(getSupportedFormats_cb _hidl_cb) override; + Return setFormat(AudioFormat format) override; + Return getAudioProperties(getAudioProperties_cb _hidl_cb) override; + Return addEffect(uint64_t effectId) override; + Return removeEffect(uint64_t effectId) override; + Return standby() override; +#ifdef AUDIO_HAL_VERSION_2_0 + Return getDevice() override; + Return setDevice(const DeviceAddress& address) override; + Return getParameters(const hidl_vec& keys, + getParameters_cb _hidl_cb) override; + Return setParameters(const hidl_vec& parameters) override; + Return setConnectedState(const DeviceAddress& address, bool connected) override; +#elif defined(AUDIO_HAL_VERSION_4_0) + Return getDevices(getDevices_cb _hidl_cb) override; + Return setDevices(const hidl_vec& devices) override; + Return getParameters(const hidl_vec& context, + const hidl_vec& keys, + getParameters_cb _hidl_cb) override; + Return setParameters(const hidl_vec& context, + const hidl_vec& parameters) override; +#endif + Return setHwAvSync(uint32_t hwAvSync) override; + Return close() override; + + Return debug(const hidl_handle& fd, const hidl_vec& options) override; +#ifdef AUDIO_HAL_VERSION_2_0 + Return debugDump(const hidl_handle& fd) override; +#endif + + // Methods from ::android::hardware::audio::AUDIO_HAL_VERSION::IStreamOut follow. + Return getLatency() override; + Return setVolume(float left, float right) override; + Return prepareForWriting(uint32_t frameSize, uint32_t framesCount, + prepareForWriting_cb _hidl_cb) override; + Return getRenderPosition(getRenderPosition_cb _hidl_cb) override; + Return getNextWriteTimestamp(getNextWriteTimestamp_cb _hidl_cb) override; + Return setCallback(const sp& callback) override; + Return clearCallback() override; + Return supportsPauseAndResume(supportsPauseAndResume_cb _hidl_cb) override; + Return pause() override; + Return resume() override; + Return supportsDrain() override; + Return drain(AudioDrain type) override; + Return flush() override; + Return getPresentationPosition(getPresentationPosition_cb _hidl_cb) override; + Return start() override; + Return stop() override; + Return createMmapBuffer(int32_t minSizeFrames, createMmapBuffer_cb _hidl_cb) override; + Return getMmapPosition(getMmapPosition_cb _hidl_cb) override; +#ifdef AUDIO_HAL_VERSION_4_0 + Return updateSourceMetadata(const SourceMetadata& sourceMetadata) override; + Return selectPresentation(int32_t presentationId, int32_t programId) override; +#endif + + static Result getPresentationPositionImpl(audio_stream_out_t* stream, uint64_t* frames, + TimeSpec* timeStamp); + + private: + bool mIsClosed; + const sp mDevice; + audio_stream_out_t* mStream; + const sp mStreamCommon; + const sp> mStreamMmap; + sp mCallback; + std::unique_ptr mCommandMQ; + std::unique_ptr mDataMQ; + std::unique_ptr mStatusMQ; + EventFlag* mEfGroup; + std::atomic mStopWriteThread; + sp mWriteThread; + + virtual ~StreamOut(); + + static int asyncCallback(stream_callback_event_t event, void* param, void* cookie); +}; + +} // namespace implementation +} // namespace AUDIO_HAL_VERSION +} // namespace audio +} // namespace hardware +} // namespace android diff --git a/audio/2.0/default/StreamOut.cpp b/audio/core/all-versions/default/include/core/all-versions/default/StreamOut.impl.h similarity index 72% rename from audio/2.0/default/StreamOut.cpp rename to audio/core/all-versions/default/include/core/all-versions/default/StreamOut.impl.h index 49a6b12d137bdeb05b1a7851cf3bfcef487cc628..6fb157f7dedbc19adb0ee5dd6f95b66df80b7d23 100644 --- a/audio/2.0/default/StreamOut.cpp +++ b/audio/core/all-versions/default/include/core/all-versions/default/StreamOut.impl.h @@ -14,7 +14,8 @@ * limitations under the License. */ -#define LOG_TAG "StreamOutHAL" +#include + //#define LOG_NDEBUG 0 #define ATRACE_TAG ATRACE_TAG_AUDIO @@ -24,16 +25,13 @@ #include #include -#include "StreamOut.h" -#include "Util.h" - namespace android { namespace hardware { namespace audio { -namespace V2_0 { +namespace AUDIO_HAL_VERSION { namespace implementation { -using ::android::hardware::audio::common::V2_0::ThreadInfo; +using ::android::hardware::audio::common::AUDIO_HAL_VERSION::ThreadInfo; namespace { @@ -89,9 +87,9 @@ void WriteThread::doWrite() { } void WriteThread::doGetPresentationPosition() { - mStatus.retval = StreamOut::getPresentationPositionImpl( - mStream, &mStatus.reply.presentationPosition.frames, - &mStatus.reply.presentationPosition.timeStamp); + mStatus.retval = + StreamOut::getPresentationPositionImpl(mStream, &mStatus.reply.presentationPosition.frames, + &mStatus.reply.presentationPosition.timeStamp); } void WriteThread::doGetLatency() { @@ -105,10 +103,8 @@ bool WriteThread::threadLoop() { // as the Thread uses mutexes, and this can lead to priority inversion. while (!std::atomic_load_explicit(mStop, std::memory_order_acquire)) { uint32_t efState = 0; - mEfGroup->wait(static_cast(MessageQueueFlagBits::NOT_EMPTY), - &efState); - if (!(efState & - static_cast(MessageQueueFlagBits::NOT_EMPTY))) { + mEfGroup->wait(static_cast(MessageQueueFlagBits::NOT_EMPTY), &efState); + if (!(efState & static_cast(MessageQueueFlagBits::NOT_EMPTY))) { continue; // Nothing to do. } if (!mCommandMQ->read(&mStatus.replyTo)) { @@ -159,8 +155,7 @@ StreamOut::~StreamOut() { } if (mEfGroup) { status_t status = EventFlag::deleteEventFlag(&mEfGroup); - ALOGE_IF(status, "write MQ event flag deletion error: %s", - strerror(-status)); + ALOGE_IF(status, "write MQ event flag deletion error: %s", strerror(-status)); } mCallback.clear(); mDevice->closeOutputStream(mStream); @@ -170,7 +165,7 @@ StreamOut::~StreamOut() { mStream = nullptr; } -// Methods from ::android::hardware::audio::V2_0::IStream follow. +// Methods from ::android::hardware::audio::AUDIO_HAL_VERSION::IStream follow. Return StreamOut::getFrameSize() { return audio_stream_out_frame_size(mStream); } @@ -187,25 +182,33 @@ Return StreamOut::getSampleRate() { return mStreamCommon->getSampleRate(); } -Return StreamOut::getSupportedSampleRates( - getSupportedSampleRates_cb _hidl_cb) { +#ifdef AUDIO_HAL_VERSION_2_0 +Return StreamOut::getSupportedChannelMasks(getSupportedChannelMasks_cb _hidl_cb) { + return mStreamCommon->getSupportedChannelMasks(_hidl_cb); +} +Return StreamOut::getSupportedSampleRates(getSupportedSampleRates_cb _hidl_cb) { return mStreamCommon->getSupportedSampleRates(_hidl_cb); } +#endif + +Return StreamOut::getSupportedChannelMasks(AudioFormat format, + getSupportedChannelMasks_cb _hidl_cb) { + return mStreamCommon->getSupportedChannelMasks(format, _hidl_cb); +} +Return StreamOut::getSupportedSampleRates(AudioFormat format, + getSupportedSampleRates_cb _hidl_cb) { + return mStreamCommon->getSupportedSampleRates(format, _hidl_cb); +} Return StreamOut::setSampleRate(uint32_t sampleRateHz) { return mStreamCommon->setSampleRate(sampleRateHz); } -Return StreamOut::getChannelMask() { +Return StreamOut::getChannelMask() { return mStreamCommon->getChannelMask(); } -Return StreamOut::getSupportedChannelMasks( - getSupportedChannelMasks_cb _hidl_cb) { - return mStreamCommon->getSupportedChannelMasks(_hidl_cb); -} - -Return StreamOut::setChannelMask(AudioChannelMask mask) { +Return StreamOut::setChannelMask(AudioChannelBitfield mask) { return mStreamCommon->setChannelMask(mask); } @@ -237,21 +240,21 @@ Return StreamOut::standby() { return mStreamCommon->standby(); } -Return StreamOut::getDevice() { - return mStreamCommon->getDevice(); +Return StreamOut::setHwAvSync(uint32_t hwAvSync) { + return mStreamCommon->setHwAvSync(hwAvSync); } -Return StreamOut::setDevice(const DeviceAddress& address) { - return mStreamCommon->setDevice(address); +#ifdef AUDIO_HAL_VERSION_2_0 +Return StreamOut::setConnectedState(const DeviceAddress& address, bool connected) { + return mStreamCommon->setConnectedState(address, connected); } -Return StreamOut::setConnectedState(const DeviceAddress& address, - bool connected) { - return mStreamCommon->setConnectedState(address, connected); +Return StreamOut::getDevice() { + return mStreamCommon->getDevice(); } -Return StreamOut::setHwAvSync(uint32_t hwAvSync) { - return mStreamCommon->setHwAvSync(hwAvSync); +Return StreamOut::setDevice(const DeviceAddress& address) { + return mStreamCommon->setDevice(address); } Return StreamOut::getParameters(const hidl_vec& keys, @@ -259,14 +262,32 @@ Return StreamOut::getParameters(const hidl_vec& keys, return mStreamCommon->getParameters(keys, _hidl_cb); } -Return StreamOut::setParameters( - const hidl_vec& parameters) { +Return StreamOut::setParameters(const hidl_vec& parameters) { return mStreamCommon->setParameters(parameters); } Return StreamOut::debugDump(const hidl_handle& fd) { return mStreamCommon->debugDump(fd); } +#elif defined(AUDIO_HAL_VERSION_4_0) +Return StreamOut::getDevices(getDevices_cb _hidl_cb) { + return mStreamCommon->getDevices(_hidl_cb); +} + +Return StreamOut::setDevices(const hidl_vec& devices) { + return mStreamCommon->setDevices(devices); +} +Return StreamOut::getParameters(const hidl_vec& context, + const hidl_vec& keys, + getParameters_cb _hidl_cb) { + return mStreamCommon->getParameters(context, keys, _hidl_cb); +} + +Return StreamOut::setParameters(const hidl_vec& context, + const hidl_vec& parameters) { + return mStreamCommon->setParameters(context, parameters); +} +#endif Return StreamOut::close() { if (mIsClosed) return Result::INVALID_STATE; @@ -280,7 +301,7 @@ Return StreamOut::close() { return Result::OK; } -// Methods from ::android::hardware::audio::V2_0::IStreamOut follow. +// Methods from ::android::hardware::audio::AUDIO_HAL_VERSION::IStreamOut follow. Return StreamOut::getLatency() { return mStream->get_latency(mStream); } @@ -290,24 +311,21 @@ Return StreamOut::setVolume(float left, float right) { return Result::NOT_SUPPORTED; } if (!isGainNormalized(left)) { - ALOGW("Can not set a stream output volume {%f, %f} outside [0,1]", left, - right); + ALOGW("Can not set a stream output volume {%f, %f} outside [0,1]", left, right); return Result::INVALID_ARGUMENTS; } - return Stream::analyzeStatus("set_volume", - mStream->set_volume(mStream, left, right)); + return Stream::analyzeStatus("set_volume", mStream->set_volume(mStream, left, right)); } -Return StreamOut::prepareForWriting(uint32_t frameSize, - uint32_t framesCount, +Return StreamOut::prepareForWriting(uint32_t frameSize, uint32_t framesCount, prepareForWriting_cb _hidl_cb) { status_t status; ThreadInfo threadInfo = {0, 0}; // Wrap the _hidl_cb to return an error auto sendError = [&threadInfo, &_hidl_cb](Result result) { - _hidl_cb(result, CommandMQ::Descriptor(), DataMQ::Descriptor(), - StatusMQ::Descriptor(), threadInfo); + _hidl_cb(result, CommandMQ::Descriptor(), DataMQ::Descriptor(), StatusMQ::Descriptor(), + threadInfo); }; @@ -321,8 +339,7 @@ Return StreamOut::prepareForWriting(uint32_t frameSize, // Check frameSize and framesCount if (frameSize == 0 || framesCount == 0) { - ALOGE("Null frameSize (%u) or framesCount (%u)", frameSize, - framesCount); + ALOGE("Null frameSize (%u) or framesCount (%u)", frameSize, framesCount); sendError(Result::INVALID_ARGUMENTS); return Void(); } @@ -332,12 +349,10 @@ Return StreamOut::prepareForWriting(uint32_t frameSize, sendError(Result::INVALID_ARGUMENTS); return Void(); } - std::unique_ptr tempDataMQ( - new DataMQ(frameSize * framesCount, true /* EventFlag */)); + std::unique_ptr tempDataMQ(new DataMQ(frameSize * framesCount, true /* EventFlag */)); std::unique_ptr tempStatusMQ(new StatusMQ(1)); - if (!tempCommandMQ->isValid() || !tempDataMQ->isValid() || - !tempStatusMQ->isValid()) { + if (!tempCommandMQ->isValid() || !tempDataMQ->isValid() || !tempStatusMQ->isValid()) { ALOGE_IF(!tempCommandMQ->isValid(), "command MQ is invalid"); ALOGE_IF(!tempDataMQ->isValid(), "data MQ is invalid"); ALOGE_IF(!tempStatusMQ->isValid(), "status MQ is invalid"); @@ -345,8 +360,7 @@ Return StreamOut::prepareForWriting(uint32_t frameSize, return Void(); } EventFlag* tempRawEfGroup{}; - status = EventFlag::createEventFlag(tempDataMQ->getEventFlagWord(), - &tempRawEfGroup); + status = EventFlag::createEventFlag(tempDataMQ->getEventFlagWord(), &tempRawEfGroup); std::unique_ptr tempElfGroup( tempRawEfGroup, [](auto* ef) { EventFlag::deleteEventFlag(&ef); }); if (status != OK || !tempElfGroup) { @@ -356,9 +370,9 @@ Return StreamOut::prepareForWriting(uint32_t frameSize, } // Create and launch the thread. - auto tempWriteThread = std::make_unique( - &mStopWriteThread, mStream, tempCommandMQ.get(), tempDataMQ.get(), - tempStatusMQ.get(), tempElfGroup.get()); + auto tempWriteThread = + std::make_unique(&mStopWriteThread, mStream, tempCommandMQ.get(), + tempDataMQ.get(), tempStatusMQ.get(), tempElfGroup.get()); if (!tempWriteThread->init()) { ALOGW("failed to start writer thread: %s", strerror(-status)); sendError(Result::INVALID_ARGUMENTS); @@ -378,28 +392,25 @@ Return StreamOut::prepareForWriting(uint32_t frameSize, mEfGroup = tempElfGroup.release(); threadInfo.pid = getpid(); threadInfo.tid = mWriteThread->getTid(); - _hidl_cb(Result::OK, *mCommandMQ->getDesc(), *mDataMQ->getDesc(), - *mStatusMQ->getDesc(), threadInfo); + _hidl_cb(Result::OK, *mCommandMQ->getDesc(), *mDataMQ->getDesc(), *mStatusMQ->getDesc(), + threadInfo); return Void(); } Return StreamOut::getRenderPosition(getRenderPosition_cb _hidl_cb) { uint32_t halDspFrames; - Result retval = Stream::analyzeStatus( - "get_render_position", - mStream->get_render_position(mStream, &halDspFrames)); + Result retval = Stream::analyzeStatus("get_render_position", + mStream->get_render_position(mStream, &halDspFrames)); _hidl_cb(retval, halDspFrames); return Void(); } -Return StreamOut::getNextWriteTimestamp( - getNextWriteTimestamp_cb _hidl_cb) { +Return StreamOut::getNextWriteTimestamp(getNextWriteTimestamp_cb _hidl_cb) { Result retval(Result::NOT_SUPPORTED); int64_t timestampUs = 0; if (mStream->get_next_write_timestamp != NULL) { - retval = Stream::analyzeStatus( - "get_next_write_timestamp", - mStream->get_next_write_timestamp(mStream, ×tampUs)); + retval = Stream::analyzeStatus("get_next_write_timestamp", + mStream->get_next_write_timestamp(mStream, ×tampUs)); } _hidl_cb(retval, timestampUs); return Void(); @@ -423,14 +434,13 @@ Return StreamOut::clearCallback() { } // static -int StreamOut::asyncCallback(stream_callback_event_t event, void*, - void* cookie) { +int StreamOut::asyncCallback(stream_callback_event_t event, void*, void* cookie) { // It is guaranteed that the callback thread is joined prior // to exiting from StreamOut's destructor. Must *not* use sp // here because it can make this code the last owner of StreamOut, // and an attempt to run the destructor on the callback thread // will cause a deadlock in the legacy HAL code. - StreamOut *self = reinterpret_cast(cookie); + StreamOut* self = reinterpret_cast(cookie); // It's correct to hold an sp<> to callback because the reference // in the StreamOut instance can be cleared in the meantime. There is // no difference on which thread to run IStreamOutCallback's destructor. @@ -454,22 +464,19 @@ int StreamOut::asyncCallback(stream_callback_event_t event, void*, return 0; } -Return StreamOut::supportsPauseAndResume( - supportsPauseAndResume_cb _hidl_cb) { +Return StreamOut::supportsPauseAndResume(supportsPauseAndResume_cb _hidl_cb) { _hidl_cb(mStream->pause != NULL, mStream->resume != NULL); return Void(); } Return StreamOut::pause() { - return mStream->pause != NULL - ? Stream::analyzeStatus("pause", mStream->pause(mStream)) - : Result::NOT_SUPPORTED; + return mStream->pause != NULL ? Stream::analyzeStatus("pause", mStream->pause(mStream)) + : Result::NOT_SUPPORTED; } Return StreamOut::resume() { - return mStream->resume != NULL - ? Stream::analyzeStatus("resume", mStream->resume(mStream)) - : Result::NOT_SUPPORTED; + return mStream->resume != NULL ? Stream::analyzeStatus("resume", mStream->resume(mStream)) + : Result::NOT_SUPPORTED; } Return StreamOut::supportsDrain() { @@ -479,21 +486,17 @@ Return StreamOut::supportsDrain() { Return StreamOut::drain(AudioDrain type) { return mStream->drain != NULL ? Stream::analyzeStatus( - "drain", - mStream->drain(mStream, - static_cast(type))) + "drain", mStream->drain(mStream, static_cast(type))) : Result::NOT_SUPPORTED; } Return StreamOut::flush() { - return mStream->flush != NULL - ? Stream::analyzeStatus("flush", mStream->flush(mStream)) - : Result::NOT_SUPPORTED; + return mStream->flush != NULL ? Stream::analyzeStatus("flush", mStream->flush(mStream)) + : Result::NOT_SUPPORTED; } // static -Result StreamOut::getPresentationPositionImpl(audio_stream_out_t* stream, - uint64_t* frames, +Result StreamOut::getPresentationPositionImpl(audio_stream_out_t* stream, uint64_t* frames, TimeSpec* timeStamp) { // Don't logspam on EINVAL--it's normal for get_presentation_position // to return it sometimes. EAGAIN may be returned by A2DP audio HAL @@ -513,8 +516,7 @@ Result StreamOut::getPresentationPositionImpl(audio_stream_out_t* stream, return retval; } -Return StreamOut::getPresentationPosition( - getPresentationPosition_cb _hidl_cb) { +Return StreamOut::getPresentationPosition(getPresentationPosition_cb _hidl_cb) { uint64_t frames = 0; TimeSpec timeStamp = {0, 0}; Result retval = getPresentationPositionImpl(mStream, &frames, &timeStamp); @@ -530,18 +532,46 @@ Return StreamOut::stop() { return mStreamMmap->stop(); } -Return StreamOut::createMmapBuffer(int32_t minSizeFrames, - createMmapBuffer_cb _hidl_cb) { - return mStreamMmap->createMmapBuffer( - minSizeFrames, audio_stream_out_frame_size(mStream), _hidl_cb); +Return StreamOut::createMmapBuffer(int32_t minSizeFrames, createMmapBuffer_cb _hidl_cb) { + return mStreamMmap->createMmapBuffer(minSizeFrames, audio_stream_out_frame_size(mStream), + _hidl_cb); } Return StreamOut::getMmapPosition(getMmapPosition_cb _hidl_cb) { return mStreamMmap->getMmapPosition(_hidl_cb); } +Return StreamOut::debug(const hidl_handle& fd, const hidl_vec& options) { + return mStreamCommon->debug(fd, options); +} + +#ifdef AUDIO_HAL_VERSION_4_0 +Return StreamOut::updateSourceMetadata(const SourceMetadata& sourceMetadata) { + if (mStream->update_source_metadata == nullptr) { + return Void(); // not supported by the HAL + } + std::vector halTracks; + halTracks.reserve(sourceMetadata.tracks.size()); + for (auto& metadata : sourceMetadata.tracks) { + halTracks.push_back({ + .usage = static_cast(metadata.usage), + .content_type = static_cast(metadata.contentType), + .gain = metadata.gain, + }); + } + const source_metadata_t halMetadata = { + .track_count = halTracks.size(), .tracks = halTracks.data(), + }; + mStream->update_source_metadata(mStream, &halMetadata); + return Void(); +} +Return StreamOut::selectPresentation(int32_t /*presentationId*/, int32_t /*programId*/) { + return Result::NOT_SUPPORTED; // TODO: propagate to legacy +} +#endif + } // namespace implementation -} // namespace V2_0 +} // namespace AUDIO_HAL_VERSION } // namespace audio } // namespace hardware } // namespace android diff --git a/audio/2.0/default/Util.h b/audio/core/all-versions/default/include/core/all-versions/default/Util.h similarity index 92% rename from audio/2.0/default/Util.h rename to audio/core/all-versions/default/include/core/all-versions/default/Util.h index 55019b8798df6fd1fbe0a2ffaf2c2145555587e7..350fd867e6c3456fb37dfbb60e280a8d448409b1 100644 --- a/audio/2.0/default/Util.h +++ b/audio/core/all-versions/default/include/core/all-versions/default/Util.h @@ -14,8 +14,7 @@ * limitations under the License. */ -#ifndef ANDROID_HARDWARE_AUDIO_V2_0_UTIL_H -#define ANDROID_HARDWARE_AUDIO_V2_0_UTIL_H +#include #include #include @@ -25,9 +24,11 @@ namespace android { namespace hardware { namespace audio { -namespace V2_0 { +namespace AUDIO_HAL_VERSION { namespace implementation { +using ::android::hardware::audio::AUDIO_HAL_VERSION::Result; + /** @return true if gain is between 0 and 1 included. */ constexpr bool isGainNormalized(float gain) { return gain >= 0.0 && gain <= 1.0; @@ -67,9 +68,7 @@ static inline Result analyzeStatus(const char* className, const char* funcName, } // namespace util } // namespace implementation -} // namespace V2_0 +} // namespace AUDIO_HAL_VERSION } // namespace audio } // namespace hardware } // namespace android - -#endif // ANDROID_HARDWARE_AUDIO_V2_0_UTIL_H diff --git a/audio/effect/2.0/default/AcousticEchoCancelerEffect.cpp b/audio/effect/2.0/default/AcousticEchoCancelerEffect.cpp index 7b9ca302e628ceb65efffe9784a79bbd2b054f91..cadc2f1b4377f07746cbc8dced905b5aa11e2cd2 100644 --- a/audio/effect/2.0/default/AcousticEchoCancelerEffect.cpp +++ b/audio/effect/2.0/default/AcousticEchoCancelerEffect.cpp @@ -1,5 +1,5 @@ /* - * Copyright (C) 2016 The Android Open Source Project + * Copyright (C) 2017 The Android Open Source Project * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. @@ -15,177 +15,9 @@ */ #define LOG_TAG "AEC_Effect_HAL" -#include -#include #include "AcousticEchoCancelerEffect.h" -namespace android { -namespace hardware { -namespace audio { -namespace effect { -namespace V2_0 { -namespace implementation { - -AcousticEchoCancelerEffect::AcousticEchoCancelerEffect(effect_handle_t handle) - : mEffect(new Effect(handle)) { -} - -AcousticEchoCancelerEffect::~AcousticEchoCancelerEffect() {} - -// Methods from ::android::hardware::audio::effect::V2_0::IEffect follow. -Return AcousticEchoCancelerEffect::init() { - return mEffect->init(); -} - -Return AcousticEchoCancelerEffect::setConfig( - const EffectConfig& config, - const sp& inputBufferProvider, - const sp& outputBufferProvider) { - return mEffect->setConfig(config, inputBufferProvider, outputBufferProvider); -} - -Return AcousticEchoCancelerEffect::reset() { - return mEffect->reset(); -} - -Return AcousticEchoCancelerEffect::enable() { - return mEffect->enable(); -} - -Return AcousticEchoCancelerEffect::disable() { - return mEffect->disable(); -} - -Return AcousticEchoCancelerEffect::setDevice(AudioDevice device) { - return mEffect->setDevice(device); -} - -Return AcousticEchoCancelerEffect::setAndGetVolume( - const hidl_vec& volumes, setAndGetVolume_cb _hidl_cb) { - return mEffect->setAndGetVolume(volumes, _hidl_cb); -} - -Return AcousticEchoCancelerEffect::volumeChangeNotification( - const hidl_vec& volumes) { - return mEffect->volumeChangeNotification(volumes); -} - -Return AcousticEchoCancelerEffect::setAudioMode(AudioMode mode) { - return mEffect->setAudioMode(mode); -} - -Return AcousticEchoCancelerEffect::setConfigReverse( - const EffectConfig& config, - const sp& inputBufferProvider, - const sp& outputBufferProvider) { - return mEffect->setConfigReverse(config, inputBufferProvider, outputBufferProvider); -} - -Return AcousticEchoCancelerEffect::setInputDevice(AudioDevice device) { - return mEffect->setInputDevice(device); -} - -Return AcousticEchoCancelerEffect::getConfig(getConfig_cb _hidl_cb) { - return mEffect->getConfig(_hidl_cb); -} - -Return AcousticEchoCancelerEffect::getConfigReverse(getConfigReverse_cb _hidl_cb) { - return mEffect->getConfigReverse(_hidl_cb); -} - -Return AcousticEchoCancelerEffect::getSupportedAuxChannelsConfigs( - uint32_t maxConfigs, getSupportedAuxChannelsConfigs_cb _hidl_cb) { - return mEffect->getSupportedAuxChannelsConfigs(maxConfigs, _hidl_cb); -} - -Return AcousticEchoCancelerEffect::getAuxChannelsConfig(getAuxChannelsConfig_cb _hidl_cb) { - return mEffect->getAuxChannelsConfig(_hidl_cb); -} - -Return AcousticEchoCancelerEffect::setAuxChannelsConfig( - const EffectAuxChannelsConfig& config) { - return mEffect->setAuxChannelsConfig(config); -} - -Return AcousticEchoCancelerEffect::setAudioSource(AudioSource source) { - return mEffect->setAudioSource(source); -} - -Return AcousticEchoCancelerEffect::offload(const EffectOffloadParameter& param) { - return mEffect->offload(param); -} - -Return AcousticEchoCancelerEffect::getDescriptor(getDescriptor_cb _hidl_cb) { - return mEffect->getDescriptor(_hidl_cb); -} - -Return AcousticEchoCancelerEffect::prepareForProcessing( - prepareForProcessing_cb _hidl_cb) { - return mEffect->prepareForProcessing(_hidl_cb); -} - -Return AcousticEchoCancelerEffect::setProcessBuffers( - const AudioBuffer& inBuffer, const AudioBuffer& outBuffer) { - return mEffect->setProcessBuffers(inBuffer, outBuffer); -} - -Return AcousticEchoCancelerEffect::command( - uint32_t commandId, - const hidl_vec& data, - uint32_t resultMaxSize, - command_cb _hidl_cb) { - return mEffect->command(commandId, data, resultMaxSize, _hidl_cb); -} - -Return AcousticEchoCancelerEffect::setParameter( - const hidl_vec& parameter, const hidl_vec& value) { - return mEffect->setParameter(parameter, value); -} - -Return AcousticEchoCancelerEffect::getParameter( - const hidl_vec& parameter, - uint32_t valueMaxSize, - getParameter_cb _hidl_cb) { - return mEffect->getParameter(parameter, valueMaxSize, _hidl_cb); -} - -Return AcousticEchoCancelerEffect::getSupportedConfigsForFeature( - uint32_t featureId, - uint32_t maxConfigs, - uint32_t configSize, - getSupportedConfigsForFeature_cb _hidl_cb) { - return mEffect->getSupportedConfigsForFeature(featureId, maxConfigs, configSize, _hidl_cb); -} - -Return AcousticEchoCancelerEffect::getCurrentConfigForFeature( - uint32_t featureId, - uint32_t configSize, - getCurrentConfigForFeature_cb _hidl_cb) { - return mEffect->getCurrentConfigForFeature(featureId, configSize, _hidl_cb); -} - -Return AcousticEchoCancelerEffect::setCurrentConfigForFeature( - uint32_t featureId, const hidl_vec& configData) { - return mEffect->setCurrentConfigForFeature(featureId, configData); -} - -Return AcousticEchoCancelerEffect::close() { - return mEffect->close(); -} - -// Methods from ::android::hardware::audio::effect::V2_0::IAcousticEchoCancelerEffect follow. -Return AcousticEchoCancelerEffect::setEchoDelay(uint32_t echoDelayMs) { - return mEffect->setParam(AEC_PARAM_ECHO_DELAY, echoDelayMs); -} - -Return AcousticEchoCancelerEffect::getEchoDelay(getEchoDelay_cb _hidl_cb) { - return mEffect->getIntegerParam(AEC_PARAM_ECHO_DELAY, _hidl_cb); -} - -} // namespace implementation -} // namespace V2_0 -} // namespace effect -} // namespace audio -} // namespace hardware -} // namespace android +#define AUDIO_HAL_VERSION V2_0 +#include +#undef AUDIO_HAL_VERSION diff --git a/audio/effect/2.0/default/AcousticEchoCancelerEffect.h b/audio/effect/2.0/default/AcousticEchoCancelerEffect.h index 1ac925df6ebd9d53b2a980157c736a16f2686a46..d36335c7b393e96a0ede9eb4dd3792967ed492f2 100644 --- a/audio/effect/2.0/default/AcousticEchoCancelerEffect.h +++ b/audio/effect/2.0/default/AcousticEchoCancelerEffect.h @@ -1,5 +1,5 @@ /* - * Copyright (C) 2016 The Android Open Source Project + * Copyright (C) 2017 The Android Open Source Project * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. @@ -18,100 +18,11 @@ #define ANDROID_HARDWARE_AUDIO_EFFECT_V2_0_ACOUSTICECHOCANCELEREFFECT_H #include -#include - -#include #include "Effect.h" -namespace android { -namespace hardware { -namespace audio { -namespace effect { -namespace V2_0 { -namespace implementation { - -using ::android::hardware::audio::effect::V2_0::IAcousticEchoCancelerEffect; -using ::android::hardware::audio::effect::V2_0::Result; -using ::android::hardware::Return; -using ::android::hardware::Void; -using ::android::hardware::hidl_vec; -using ::android::hardware::hidl_string; -using ::android::sp; - -struct AcousticEchoCancelerEffect : public IAcousticEchoCancelerEffect { - explicit AcousticEchoCancelerEffect(effect_handle_t handle); - - // Methods from ::android::hardware::audio::effect::V2_0::IEffect follow. - Return init() override; - Return setConfig( - const EffectConfig& config, - const sp& inputBufferProvider, - const sp& outputBufferProvider) override; - Return reset() override; - Return enable() override; - Return disable() override; - Return setDevice(AudioDevice device) override; - Return setAndGetVolume( - const hidl_vec& volumes, setAndGetVolume_cb _hidl_cb) override; - Return volumeChangeNotification(const hidl_vec& volumes) override; - Return setAudioMode(AudioMode mode) override; - Return setConfigReverse( - const EffectConfig& config, - const sp& inputBufferProvider, - const sp& outputBufferProvider) override; - Return setInputDevice(AudioDevice device) override; - Return getConfig(getConfig_cb _hidl_cb) override; - Return getConfigReverse(getConfigReverse_cb _hidl_cb) override; - Return getSupportedAuxChannelsConfigs( - uint32_t maxConfigs, getSupportedAuxChannelsConfigs_cb _hidl_cb) override; - Return getAuxChannelsConfig(getAuxChannelsConfig_cb _hidl_cb) override; - Return setAuxChannelsConfig(const EffectAuxChannelsConfig& config) override; - Return setAudioSource(AudioSource source) override; - Return offload(const EffectOffloadParameter& param) override; - Return getDescriptor(getDescriptor_cb _hidl_cb) override; - Return prepareForProcessing(prepareForProcessing_cb _hidl_cb) override; - Return setProcessBuffers( - const AudioBuffer& inBuffer, const AudioBuffer& outBuffer) override; - Return command( - uint32_t commandId, - const hidl_vec& data, - uint32_t resultMaxSize, - command_cb _hidl_cb) override; - Return setParameter( - const hidl_vec& parameter, const hidl_vec& value) override; - Return getParameter( - const hidl_vec& parameter, - uint32_t valueMaxSize, - getParameter_cb _hidl_cb) override; - Return getSupportedConfigsForFeature( - uint32_t featureId, - uint32_t maxConfigs, - uint32_t configSize, - getSupportedConfigsForFeature_cb _hidl_cb) override; - Return getCurrentConfigForFeature( - uint32_t featureId, - uint32_t configSize, - getCurrentConfigForFeature_cb _hidl_cb) override; - Return setCurrentConfigForFeature( - uint32_t featureId, const hidl_vec& configData) override; - Return close() override; - - // Methods from ::android::hardware::audio::effect::V2_0::IAcousticEchoCancelerEffect follow. - Return setEchoDelay(uint32_t echoDelayMs) override; - Return getEchoDelay(getEchoDelay_cb _hidl_cb) override; - - private: - sp mEffect; - - virtual ~AcousticEchoCancelerEffect(); -}; - -} // namespace implementation -} // namespace V2_0 -} // namespace effect -} // namespace audio -} // namespace hardware -} // namespace android +#define AUDIO_HAL_VERSION V2_0 +#include +#undef AUDIO_HAL_VERSION #endif // ANDROID_HARDWARE_AUDIO_EFFECT_V2_0_ACOUSTICECHOCANCELEREFFECT_H diff --git a/audio/effect/2.0/default/Android.bp b/audio/effect/2.0/default/Android.bp index e1072b466d48deac63df0610d564e2e24f31c7ea..db0098849c18f7bb5378651602ade74f7e56764c 100644 --- a/audio/effect/2.0/default/Android.bp +++ b/audio/effect/2.0/default/Android.bp @@ -31,6 +31,7 @@ cc_library_shared { "libhidltransport", "liblog", "libutils", + "android.hardware.audio.common-util", "android.hardware.audio.common@2.0", "android.hardware.audio.common@2.0-util", "android.hardware.audio.effect@2.0", @@ -38,6 +39,8 @@ cc_library_shared { ], header_libs: [ + "android.hardware.audio.common.util@all-versions", + "android.hardware.audio.effect@all-versions-impl", "libaudio_system_headers", "libaudioclient_headers", "libeffects_headers", diff --git a/audio/effect/2.0/default/AudioBufferManager.cpp b/audio/effect/2.0/default/AudioBufferManager.cpp index bba0c4adf934f4c4d92a2b66351ebdb34f4a712b..39918dd1c76e6255c7dd2082f09d4b157b9eb4d6 100644 --- a/audio/effect/2.0/default/AudioBufferManager.cpp +++ b/audio/effect/2.0/default/AudioBufferManager.cpp @@ -14,78 +14,8 @@ * limitations under the License. */ -#include - -#include - #include "AudioBufferManager.h" -namespace android { - -ANDROID_SINGLETON_STATIC_INSTANCE(AudioBufferManager); - -bool AudioBufferManager::wrap(const AudioBuffer& buffer, sp* wrapper) { - // Check if we have this buffer already - std::lock_guard lock(mLock); - ssize_t idx = mBuffers.indexOfKey(buffer.id); - if (idx >= 0) { - *wrapper = mBuffers[idx].promote(); - if (*wrapper != nullptr) { - (*wrapper)->getHalBuffer()->frameCount = buffer.frameCount; - return true; - } - mBuffers.removeItemsAt(idx); - } - // Need to create and init a new AudioBufferWrapper. - sp tempBuffer(new AudioBufferWrapper(buffer)); - if (!tempBuffer->init()) return false; - *wrapper = tempBuffer; - mBuffers.add(buffer.id, *wrapper); - return true; -} - -void AudioBufferManager::removeEntry(uint64_t id) { - std::lock_guard lock(mLock); - ssize_t idx = mBuffers.indexOfKey(id); - if (idx >= 0) mBuffers.removeItemsAt(idx); -} - -namespace hardware { -namespace audio { -namespace effect { -namespace V2_0 { -namespace implementation { - -AudioBufferWrapper::AudioBufferWrapper(const AudioBuffer& buffer) : - mHidlBuffer(buffer), mHalBuffer{ 0, { nullptr } } { -} - -AudioBufferWrapper::~AudioBufferWrapper() { - AudioBufferManager::getInstance().removeEntry(mHidlBuffer.id); -} - -bool AudioBufferWrapper::init() { - if (mHalBuffer.raw != nullptr) { - ALOGE("An attempt to init AudioBufferWrapper twice"); - return false; - } - mHidlMemory = mapMemory(mHidlBuffer.data); - if (mHidlMemory == nullptr) { - ALOGE("Could not map HIDL memory to IMemory"); - return false; - } - mHalBuffer.raw = static_cast(mHidlMemory->getPointer()); - if (mHalBuffer.raw == nullptr) { - ALOGE("IMemory buffer pointer is null"); - return false; - } - mHalBuffer.frameCount = mHidlBuffer.frameCount; - return true; -} - -} // namespace implementation -} // namespace V2_0 -} // namespace effect -} // namespace audio -} // namespace hardware -} // namespace android +#define AUDIO_HAL_VERSION V2_0 +#include +#undef AUDIO_HAL_VERSION diff --git a/audio/effect/2.0/default/AudioBufferManager.h b/audio/effect/2.0/default/AudioBufferManager.h index 6d6599555b159fee0f5ed94159d8ad94b4848adf..789fbd1c8fac2e87f6c111d065c43b16826e06f1 100644 --- a/audio/effect/2.0/default/AudioBufferManager.h +++ b/audio/effect/2.0/default/AudioBufferManager.h @@ -14,69 +14,13 @@ * limitations under the License. */ -#ifndef android_hardware_audio_effect_V2_0_AudioBufferManager_H_ -#define android_hardware_audio_effect_V2_0_AudioBufferManager_H_ - -#include +#ifndef ANDROID_HARDWARE_AUDIO_EFFECT_V2_0_AUDIO_BUFFER_MANAGER_H_ +#define ANDROID_HARDWARE_AUDIO_EFFECT_V2_0_AUDIO_BUFFER_MANAGER_H_ #include -#include -#include -#include -#include -#include - -using ::android::hardware::audio::effect::V2_0::AudioBuffer; -using ::android::hidl::memory::V1_0::IMemory; - -namespace android { -namespace hardware { -namespace audio { -namespace effect { -namespace V2_0 { -namespace implementation { - -class AudioBufferWrapper : public RefBase { - public: - explicit AudioBufferWrapper(const AudioBuffer& buffer); - virtual ~AudioBufferWrapper(); - bool init(); - audio_buffer_t* getHalBuffer() { return &mHalBuffer; } - private: - AudioBufferWrapper(const AudioBufferWrapper&) = delete; - void operator=(AudioBufferWrapper) = delete; - - AudioBuffer mHidlBuffer; - sp mHidlMemory; - audio_buffer_t mHalBuffer; -}; - -} // namespace implementation -} // namespace V2_0 -} // namespace effect -} // namespace audio -} // namespace hardware -} // namespace android - -using ::android::hardware::audio::effect::V2_0::implementation::AudioBufferWrapper; - -namespace android { - -// This class needs to be in 'android' ns because Singleton macros require that. -class AudioBufferManager : public Singleton { - public: - bool wrap(const AudioBuffer& buffer, sp* wrapper); - - private: - friend class hardware::audio::effect::V2_0::implementation::AudioBufferWrapper; - - // Called by AudioBufferWrapper. - void removeEntry(uint64_t id); - - std::mutex mLock; - KeyedVector> mBuffers; -}; -} // namespace android +#define AUDIO_HAL_VERSION V2_0 +#include +#undef AUDIO_HAL_VERSION -#endif // android_hardware_audio_effect_V2_0_AudioBufferManager_H_ +#endif // ANDROID_HARDWARE_AUDIO_EFFECT_V2_0_AUDIO_BUFFER_MANAGER_H_ diff --git a/audio/effect/2.0/default/AutomaticGainControlEffect.cpp b/audio/effect/2.0/default/AutomaticGainControlEffect.cpp index 62fe5f763fcf8f70e64f094c1e231e708a4e50b5..7e00a8065fde84748b1220b150efabff4d1611fc 100644 --- a/audio/effect/2.0/default/AutomaticGainControlEffect.cpp +++ b/audio/effect/2.0/default/AutomaticGainControlEffect.cpp @@ -1,5 +1,5 @@ /* - * Copyright (C) 2016 The Android Open Source Project + * Copyright (C) 2017 The Android Open Source Project * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. @@ -15,223 +15,9 @@ */ #define LOG_TAG "AGC_Effect_HAL" -#include #include "AutomaticGainControlEffect.h" -namespace android { -namespace hardware { -namespace audio { -namespace effect { -namespace V2_0 { -namespace implementation { - -AutomaticGainControlEffect::AutomaticGainControlEffect(effect_handle_t handle) - : mEffect(new Effect(handle)) { -} - -AutomaticGainControlEffect::~AutomaticGainControlEffect() {} - -void AutomaticGainControlEffect::propertiesFromHal( - const t_agc_settings& halProperties, - IAutomaticGainControlEffect::AllProperties* properties) { - properties->targetLevelMb = halProperties.targetLevel; - properties->compGainMb = halProperties.compGain; - properties->limiterEnabled = halProperties.limiterEnabled; -} - -void AutomaticGainControlEffect::propertiesToHal( - const IAutomaticGainControlEffect::AllProperties& properties, - t_agc_settings* halProperties) { - halProperties->targetLevel = properties.targetLevelMb; - halProperties->compGain = properties.compGainMb; - halProperties->limiterEnabled = properties.limiterEnabled; -} - -// Methods from ::android::hardware::audio::effect::V2_0::IEffect follow. -Return AutomaticGainControlEffect::init() { - return mEffect->init(); -} - -Return AutomaticGainControlEffect::setConfig( - const EffectConfig& config, - const sp& inputBufferProvider, - const sp& outputBufferProvider) { - return mEffect->setConfig(config, inputBufferProvider, outputBufferProvider); -} - -Return AutomaticGainControlEffect::reset() { - return mEffect->reset(); -} - -Return AutomaticGainControlEffect::enable() { - return mEffect->enable(); -} - -Return AutomaticGainControlEffect::disable() { - return mEffect->disable(); -} - -Return AutomaticGainControlEffect::setDevice(AudioDevice device) { - return mEffect->setDevice(device); -} - -Return AutomaticGainControlEffect::setAndGetVolume( - const hidl_vec& volumes, setAndGetVolume_cb _hidl_cb) { - return mEffect->setAndGetVolume(volumes, _hidl_cb); -} - -Return AutomaticGainControlEffect::volumeChangeNotification( - const hidl_vec& volumes) { - return mEffect->volumeChangeNotification(volumes); -} - -Return AutomaticGainControlEffect::setAudioMode(AudioMode mode) { - return mEffect->setAudioMode(mode); -} - -Return AutomaticGainControlEffect::setConfigReverse( - const EffectConfig& config, - const sp& inputBufferProvider, - const sp& outputBufferProvider) { - return mEffect->setConfigReverse(config, inputBufferProvider, outputBufferProvider); -} - -Return AutomaticGainControlEffect::setInputDevice(AudioDevice device) { - return mEffect->setInputDevice(device); -} - -Return AutomaticGainControlEffect::getConfig(getConfig_cb _hidl_cb) { - return mEffect->getConfig(_hidl_cb); -} - -Return AutomaticGainControlEffect::getConfigReverse(getConfigReverse_cb _hidl_cb) { - return mEffect->getConfigReverse(_hidl_cb); -} - -Return AutomaticGainControlEffect::getSupportedAuxChannelsConfigs( - uint32_t maxConfigs, getSupportedAuxChannelsConfigs_cb _hidl_cb) { - return mEffect->getSupportedAuxChannelsConfigs(maxConfigs, _hidl_cb); -} - -Return AutomaticGainControlEffect::getAuxChannelsConfig(getAuxChannelsConfig_cb _hidl_cb) { - return mEffect->getAuxChannelsConfig(_hidl_cb); -} - -Return AutomaticGainControlEffect::setAuxChannelsConfig( - const EffectAuxChannelsConfig& config) { - return mEffect->setAuxChannelsConfig(config); -} - -Return AutomaticGainControlEffect::setAudioSource(AudioSource source) { - return mEffect->setAudioSource(source); -} - -Return AutomaticGainControlEffect::offload(const EffectOffloadParameter& param) { - return mEffect->offload(param); -} - -Return AutomaticGainControlEffect::getDescriptor(getDescriptor_cb _hidl_cb) { - return mEffect->getDescriptor(_hidl_cb); -} - -Return AutomaticGainControlEffect::prepareForProcessing( - prepareForProcessing_cb _hidl_cb) { - return mEffect->prepareForProcessing(_hidl_cb); -} - -Return AutomaticGainControlEffect::setProcessBuffers( - const AudioBuffer& inBuffer, const AudioBuffer& outBuffer) { - return mEffect->setProcessBuffers(inBuffer, outBuffer); -} - -Return AutomaticGainControlEffect::command( - uint32_t commandId, - const hidl_vec& data, - uint32_t resultMaxSize, - command_cb _hidl_cb) { - return mEffect->command(commandId, data, resultMaxSize, _hidl_cb); -} - -Return AutomaticGainControlEffect::setParameter( - const hidl_vec& parameter, const hidl_vec& value) { - return mEffect->setParameter(parameter, value); -} - -Return AutomaticGainControlEffect::getParameter( - const hidl_vec& parameter, - uint32_t valueMaxSize, - getParameter_cb _hidl_cb) { - return mEffect->getParameter(parameter, valueMaxSize, _hidl_cb); -} - -Return AutomaticGainControlEffect::getSupportedConfigsForFeature( - uint32_t featureId, - uint32_t maxConfigs, - uint32_t configSize, - getSupportedConfigsForFeature_cb _hidl_cb) { - return mEffect->getSupportedConfigsForFeature(featureId, maxConfigs, configSize, _hidl_cb); -} - -Return AutomaticGainControlEffect::getCurrentConfigForFeature( - uint32_t featureId, - uint32_t configSize, - getCurrentConfigForFeature_cb _hidl_cb) { - return mEffect->getCurrentConfigForFeature(featureId, configSize, _hidl_cb); -} - -Return AutomaticGainControlEffect::setCurrentConfigForFeature( - uint32_t featureId, const hidl_vec& configData) { - return mEffect->setCurrentConfigForFeature(featureId, configData); -} - -Return AutomaticGainControlEffect::close() { - return mEffect->close(); -} - -// Methods from ::android::hardware::audio::effect::V2_0::IAutomaticGainControlEffect follow. -Return AutomaticGainControlEffect::setTargetLevel(int16_t targetLevelMb) { - return mEffect->setParam(AGC_PARAM_TARGET_LEVEL, targetLevelMb); -} - -Return AutomaticGainControlEffect::getTargetLevel(getTargetLevel_cb _hidl_cb) { - return mEffect->getIntegerParam(AGC_PARAM_TARGET_LEVEL, _hidl_cb); -} - -Return AutomaticGainControlEffect::setCompGain(int16_t compGainMb) { - return mEffect->setParam(AGC_PARAM_COMP_GAIN, compGainMb); -} - -Return AutomaticGainControlEffect::getCompGain(getCompGain_cb _hidl_cb) { - return mEffect->getIntegerParam(AGC_PARAM_COMP_GAIN, _hidl_cb); -} - -Return AutomaticGainControlEffect::setLimiterEnabled(bool enabled) { - return mEffect->setParam(AGC_PARAM_LIMITER_ENA, enabled); -} - -Return AutomaticGainControlEffect::isLimiterEnabled(isLimiterEnabled_cb _hidl_cb) { - return mEffect->getIntegerParam(AGC_PARAM_LIMITER_ENA, _hidl_cb); -} - -Return AutomaticGainControlEffect::setAllProperties(const IAutomaticGainControlEffect::AllProperties& properties) { - t_agc_settings halProperties; - propertiesToHal(properties, &halProperties); - return mEffect->setParam(AGC_PARAM_PROPERTIES, halProperties); -} - -Return AutomaticGainControlEffect::getAllProperties(getAllProperties_cb _hidl_cb) { - t_agc_settings halProperties; - Result retval = mEffect->getParam(AGC_PARAM_PROPERTIES, halProperties); - AllProperties properties; - propertiesFromHal(halProperties, &properties); - _hidl_cb(retval, properties); - return Void(); -} - -} // namespace implementation -} // namespace V2_0 -} // namespace effect -} // namespace audio -} // namespace hardware -} // namespace android +#define AUDIO_HAL_VERSION V2_0 +#include +#undef AUDIO_HAL_VERSION diff --git a/audio/effect/2.0/default/AutomaticGainControlEffect.h b/audio/effect/2.0/default/AutomaticGainControlEffect.h index 5e1f2796ad384be865e85a3250611533dda6c4da..ef440d2e408a5df1b4d1d25d1bf991262f40db69 100644 --- a/audio/effect/2.0/default/AutomaticGainControlEffect.h +++ b/audio/effect/2.0/default/AutomaticGainControlEffect.h @@ -1,5 +1,5 @@ /* - * Copyright (C) 2016 The Android Open Source Project + * Copyright (C) 2017 The Android Open Source Project * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. @@ -17,117 +17,12 @@ #ifndef ANDROID_HARDWARE_AUDIO_EFFECT_V2_0_AUTOMATICGAINCONTROLEFFECT_H #define ANDROID_HARDWARE_AUDIO_EFFECT_V2_0_AUTOMATICGAINCONTROLEFFECT_H -#include - #include -#include - -#include #include "Effect.h" -namespace android { -namespace hardware { -namespace audio { -namespace effect { -namespace V2_0 { -namespace implementation { - -using ::android::hardware::audio::effect::V2_0::IAutomaticGainControlEffect; -using ::android::hardware::audio::effect::V2_0::Result; -using ::android::hardware::Return; -using ::android::hardware::Void; -using ::android::hardware::hidl_vec; -using ::android::hardware::hidl_string; -using ::android::sp; - -struct AutomaticGainControlEffect : public IAutomaticGainControlEffect { - explicit AutomaticGainControlEffect(effect_handle_t handle); - - // Methods from ::android::hardware::audio::effect::V2_0::IEffect follow. - Return init() override; - Return setConfig( - const EffectConfig& config, - const sp& inputBufferProvider, - const sp& outputBufferProvider) override; - Return reset() override; - Return enable() override; - Return disable() override; - Return setDevice(AudioDevice device) override; - Return setAndGetVolume( - const hidl_vec& volumes, setAndGetVolume_cb _hidl_cb) override; - Return volumeChangeNotification(const hidl_vec& volumes) override; - Return setAudioMode(AudioMode mode) override; - Return setConfigReverse( - const EffectConfig& config, - const sp& inputBufferProvider, - const sp& outputBufferProvider) override; - Return setInputDevice(AudioDevice device) override; - Return getConfig(getConfig_cb _hidl_cb) override; - Return getConfigReverse(getConfigReverse_cb _hidl_cb) override; - Return getSupportedAuxChannelsConfigs( - uint32_t maxConfigs, getSupportedAuxChannelsConfigs_cb _hidl_cb) override; - Return getAuxChannelsConfig(getAuxChannelsConfig_cb _hidl_cb) override; - Return setAuxChannelsConfig(const EffectAuxChannelsConfig& config) override; - Return setAudioSource(AudioSource source) override; - Return offload(const EffectOffloadParameter& param) override; - Return getDescriptor(getDescriptor_cb _hidl_cb) override; - Return prepareForProcessing(prepareForProcessing_cb _hidl_cb) override; - Return setProcessBuffers( - const AudioBuffer& inBuffer, const AudioBuffer& outBuffer) override; - Return command( - uint32_t commandId, - const hidl_vec& data, - uint32_t resultMaxSize, - command_cb _hidl_cb) override; - Return setParameter( - const hidl_vec& parameter, const hidl_vec& value) override; - Return getParameter( - const hidl_vec& parameter, - uint32_t valueMaxSize, - getParameter_cb _hidl_cb) override; - Return getSupportedConfigsForFeature( - uint32_t featureId, - uint32_t maxConfigs, - uint32_t configSize, - getSupportedConfigsForFeature_cb _hidl_cb) override; - Return getCurrentConfigForFeature( - uint32_t featureId, - uint32_t configSize, - getCurrentConfigForFeature_cb _hidl_cb) override; - Return setCurrentConfigForFeature( - uint32_t featureId, const hidl_vec& configData) override; - Return close() override; - - // Methods from ::android::hardware::audio::effect::V2_0::IAutomaticGainControlEffect follow. - Return setTargetLevel(int16_t targetLevelMb) override; - Return getTargetLevel(getTargetLevel_cb _hidl_cb) override; - Return setCompGain(int16_t compGainMb) override; - Return getCompGain(getCompGain_cb _hidl_cb) override; - Return setLimiterEnabled(bool enabled) override; - Return isLimiterEnabled(isLimiterEnabled_cb _hidl_cb) override; - Return setAllProperties( - const IAutomaticGainControlEffect::AllProperties& properties) override; - Return getAllProperties(getAllProperties_cb _hidl_cb) override; - - private: - sp mEffect; - - virtual ~AutomaticGainControlEffect(); - - void propertiesFromHal( - const t_agc_settings& halProperties, - IAutomaticGainControlEffect::AllProperties* properties); - void propertiesToHal( - const IAutomaticGainControlEffect::AllProperties& properties, - t_agc_settings* halProperties); -}; - -} // namespace implementation -} // namespace V2_0 -} // namespace effect -} // namespace audio -} // namespace hardware -} // namespace android +#define AUDIO_HAL_VERSION V2_0 +#include +#undef AUDIO_HAL_VERSION #endif // ANDROID_HARDWARE_AUDIO_EFFECT_V2_0_AUTOMATICGAINCONTROLEFFECT_H diff --git a/audio/effect/2.0/default/BassBoostEffect.cpp b/audio/effect/2.0/default/BassBoostEffect.cpp index 8f35e5f0d7c317003066b90abbd7ccf77940fe8a..df9e892d6099c76ad88dee1076e6571122472a10 100644 --- a/audio/effect/2.0/default/BassBoostEffect.cpp +++ b/audio/effect/2.0/default/BassBoostEffect.cpp @@ -1,5 +1,5 @@ /* - * Copyright (C) 2016 The Android Open Source Project + * Copyright (C) 2017 The Android Open Source Project * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. @@ -15,181 +15,9 @@ */ #define LOG_TAG "BassBoost_HAL" -#include -#include #include "BassBoostEffect.h" -namespace android { -namespace hardware { -namespace audio { -namespace effect { -namespace V2_0 { -namespace implementation { - -BassBoostEffect::BassBoostEffect(effect_handle_t handle) - : mEffect(new Effect(handle)) { -} - -BassBoostEffect::~BassBoostEffect() {} - -// Methods from ::android::hardware::audio::effect::V2_0::IEffect follow. -Return BassBoostEffect::init() { - return mEffect->init(); -} - -Return BassBoostEffect::setConfig( - const EffectConfig& config, - const sp& inputBufferProvider, - const sp& outputBufferProvider) { - return mEffect->setConfig(config, inputBufferProvider, outputBufferProvider); -} - -Return BassBoostEffect::reset() { - return mEffect->reset(); -} - -Return BassBoostEffect::enable() { - return mEffect->enable(); -} - -Return BassBoostEffect::disable() { - return mEffect->disable(); -} - -Return BassBoostEffect::setDevice(AudioDevice device) { - return mEffect->setDevice(device); -} - -Return BassBoostEffect::setAndGetVolume( - const hidl_vec& volumes, setAndGetVolume_cb _hidl_cb) { - return mEffect->setAndGetVolume(volumes, _hidl_cb); -} - -Return BassBoostEffect::volumeChangeNotification( - const hidl_vec& volumes) { - return mEffect->volumeChangeNotification(volumes); -} - -Return BassBoostEffect::setAudioMode(AudioMode mode) { - return mEffect->setAudioMode(mode); -} - -Return BassBoostEffect::setConfigReverse( - const EffectConfig& config, - const sp& inputBufferProvider, - const sp& outputBufferProvider) { - return mEffect->setConfigReverse(config, inputBufferProvider, outputBufferProvider); -} - -Return BassBoostEffect::setInputDevice(AudioDevice device) { - return mEffect->setInputDevice(device); -} - -Return BassBoostEffect::getConfig(getConfig_cb _hidl_cb) { - return mEffect->getConfig(_hidl_cb); -} - -Return BassBoostEffect::getConfigReverse(getConfigReverse_cb _hidl_cb) { - return mEffect->getConfigReverse(_hidl_cb); -} - -Return BassBoostEffect::getSupportedAuxChannelsConfigs( - uint32_t maxConfigs, getSupportedAuxChannelsConfigs_cb _hidl_cb) { - return mEffect->getSupportedAuxChannelsConfigs(maxConfigs, _hidl_cb); -} - -Return BassBoostEffect::getAuxChannelsConfig(getAuxChannelsConfig_cb _hidl_cb) { - return mEffect->getAuxChannelsConfig(_hidl_cb); -} - -Return BassBoostEffect::setAuxChannelsConfig( - const EffectAuxChannelsConfig& config) { - return mEffect->setAuxChannelsConfig(config); -} - -Return BassBoostEffect::setAudioSource(AudioSource source) { - return mEffect->setAudioSource(source); -} - -Return BassBoostEffect::offload(const EffectOffloadParameter& param) { - return mEffect->offload(param); -} - -Return BassBoostEffect::getDescriptor(getDescriptor_cb _hidl_cb) { - return mEffect->getDescriptor(_hidl_cb); -} - -Return BassBoostEffect::prepareForProcessing( - prepareForProcessing_cb _hidl_cb) { - return mEffect->prepareForProcessing(_hidl_cb); -} - -Return BassBoostEffect::setProcessBuffers( - const AudioBuffer& inBuffer, const AudioBuffer& outBuffer) { - return mEffect->setProcessBuffers(inBuffer, outBuffer); -} - -Return BassBoostEffect::command( - uint32_t commandId, - const hidl_vec& data, - uint32_t resultMaxSize, - command_cb _hidl_cb) { - return mEffect->command(commandId, data, resultMaxSize, _hidl_cb); -} - -Return BassBoostEffect::setParameter( - const hidl_vec& parameter, const hidl_vec& value) { - return mEffect->setParameter(parameter, value); -} - -Return BassBoostEffect::getParameter( - const hidl_vec& parameter, - uint32_t valueMaxSize, - getParameter_cb _hidl_cb) { - return mEffect->getParameter(parameter, valueMaxSize, _hidl_cb); -} - -Return BassBoostEffect::getSupportedConfigsForFeature( - uint32_t featureId, - uint32_t maxConfigs, - uint32_t configSize, - getSupportedConfigsForFeature_cb _hidl_cb) { - return mEffect->getSupportedConfigsForFeature(featureId, maxConfigs, configSize, _hidl_cb); -} - -Return BassBoostEffect::getCurrentConfigForFeature( - uint32_t featureId, - uint32_t configSize, - getCurrentConfigForFeature_cb _hidl_cb) { - return mEffect->getCurrentConfigForFeature(featureId, configSize, _hidl_cb); -} - -Return BassBoostEffect::setCurrentConfigForFeature( - uint32_t featureId, const hidl_vec& configData) { - return mEffect->setCurrentConfigForFeature(featureId, configData); -} - -Return BassBoostEffect::close() { - return mEffect->close(); -} - -// Methods from ::android::hardware::audio::effect::V2_0::IBassBoostEffect follow. -Return BassBoostEffect::isStrengthSupported(isStrengthSupported_cb _hidl_cb) { - return mEffect->getIntegerParam(BASSBOOST_PARAM_STRENGTH_SUPPORTED, _hidl_cb); -} - -Return BassBoostEffect::setStrength(uint16_t strength) { - return mEffect->setParam(BASSBOOST_PARAM_STRENGTH, strength); -} - -Return BassBoostEffect::getStrength(getStrength_cb _hidl_cb) { - return mEffect->getIntegerParam(BASSBOOST_PARAM_STRENGTH, _hidl_cb); -} - -} // namespace implementation -} // namespace V2_0 -} // namespace effect -} // namespace audio -} // namespace hardware -} // namespace android +#define AUDIO_HAL_VERSION V2_0 +#include +#undef AUDIO_HAL_VERSION diff --git a/audio/effect/2.0/default/BassBoostEffect.h b/audio/effect/2.0/default/BassBoostEffect.h index 1e5053b3e4bc7d399ea28bde65eb12bb8a746ccc..83179e28efabf3a77a606f580613d7977836bc93 100644 --- a/audio/effect/2.0/default/BassBoostEffect.h +++ b/audio/effect/2.0/default/BassBoostEffect.h @@ -1,5 +1,5 @@ /* - * Copyright (C) 2016 The Android Open Source Project + * Copyright (C) 2017 The Android Open Source Project * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. @@ -18,101 +18,13 @@ #define ANDROID_HARDWARE_AUDIO_EFFECT_V2_0_BASSBOOSTEFFECT_H #include -#include #include #include "Effect.h" -namespace android { -namespace hardware { -namespace audio { -namespace effect { -namespace V2_0 { -namespace implementation { - -using ::android::hardware::audio::effect::V2_0::IBassBoostEffect; -using ::android::hardware::audio::effect::V2_0::Result; -using ::android::hardware::Return; -using ::android::hardware::Void; -using ::android::hardware::hidl_vec; -using ::android::hardware::hidl_string; -using ::android::sp; - -struct BassBoostEffect : public IBassBoostEffect { - explicit BassBoostEffect(effect_handle_t handle); - - // Methods from ::android::hardware::audio::effect::V2_0::IEffect follow. - Return init() override; - Return setConfig( - const EffectConfig& config, - const sp& inputBufferProvider, - const sp& outputBufferProvider) override; - Return reset() override; - Return enable() override; - Return disable() override; - Return setDevice(AudioDevice device) override; - Return setAndGetVolume( - const hidl_vec& volumes, setAndGetVolume_cb _hidl_cb) override; - Return volumeChangeNotification(const hidl_vec& volumes) override; - Return setAudioMode(AudioMode mode) override; - Return setConfigReverse( - const EffectConfig& config, - const sp& inputBufferProvider, - const sp& outputBufferProvider) override; - Return setInputDevice(AudioDevice device) override; - Return getConfig(getConfig_cb _hidl_cb) override; - Return getConfigReverse(getConfigReverse_cb _hidl_cb) override; - Return getSupportedAuxChannelsConfigs( - uint32_t maxConfigs, getSupportedAuxChannelsConfigs_cb _hidl_cb) override; - Return getAuxChannelsConfig(getAuxChannelsConfig_cb _hidl_cb) override; - Return setAuxChannelsConfig(const EffectAuxChannelsConfig& config) override; - Return setAudioSource(AudioSource source) override; - Return offload(const EffectOffloadParameter& param) override; - Return getDescriptor(getDescriptor_cb _hidl_cb) override; - Return prepareForProcessing(prepareForProcessing_cb _hidl_cb) override; - Return setProcessBuffers( - const AudioBuffer& inBuffer, const AudioBuffer& outBuffer) override; - Return command( - uint32_t commandId, - const hidl_vec& data, - uint32_t resultMaxSize, - command_cb _hidl_cb) override; - Return setParameter( - const hidl_vec& parameter, const hidl_vec& value) override; - Return getParameter( - const hidl_vec& parameter, - uint32_t valueMaxSize, - getParameter_cb _hidl_cb) override; - Return getSupportedConfigsForFeature( - uint32_t featureId, - uint32_t maxConfigs, - uint32_t configSize, - getSupportedConfigsForFeature_cb _hidl_cb) override; - Return getCurrentConfigForFeature( - uint32_t featureId, - uint32_t configSize, - getCurrentConfigForFeature_cb _hidl_cb) override; - Return setCurrentConfigForFeature( - uint32_t featureId, const hidl_vec& configData) override; - Return close() override; - - // Methods from ::android::hardware::audio::effect::V2_0::IBassBoostEffect follow. - Return isStrengthSupported(isStrengthSupported_cb _hidl_cb) override; - Return setStrength(uint16_t strength) override; - Return getStrength(getStrength_cb _hidl_cb) override; - - private: - sp mEffect; - - virtual ~BassBoostEffect(); -}; - -} // namespace implementation -} // namespace V2_0 -} // namespace effect -} // namespace audio -} // namespace hardware -} // namespace android +#define AUDIO_HAL_VERSION V2_0 +#include +#undef AUDIO_HAL_VERSION #endif // ANDROID_HARDWARE_AUDIO_EFFECT_V2_0_BASSBOOSTEFFECT_H diff --git a/audio/effect/2.0/default/Conversions.cpp b/audio/effect/2.0/default/Conversions.cpp index e7d4c463880bdedf27c4f786d5934b9ddca6db29..b59752c982f45fae33e3e6261e08399caf66973c 100644 --- a/audio/effect/2.0/default/Conversions.cpp +++ b/audio/effect/2.0/default/Conversions.cpp @@ -1,5 +1,5 @@ /* - * Copyright (C) 2016 The Android Open Source Project + * Copyright (C) 2017 The Android Open Source Project * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. @@ -14,50 +14,11 @@ * limitations under the License. */ -#include -#include - #include "Conversions.h" #include "HidlUtils.h" -namespace android { -namespace hardware { -namespace audio { -namespace effect { -namespace V2_0 { -namespace implementation { - -void effectDescriptorFromHal( - const effect_descriptor_t& halDescriptor, EffectDescriptor* descriptor) { - HidlUtils::uuidFromHal(halDescriptor.type, &descriptor->type); - HidlUtils::uuidFromHal(halDescriptor.uuid, &descriptor->uuid); - descriptor->flags = EffectFlags(halDescriptor.flags); - descriptor->cpuLoad = halDescriptor.cpuLoad; - descriptor->memoryUsage = halDescriptor.memoryUsage; - memcpy(descriptor->name.data(), halDescriptor.name, descriptor->name.size()); - memcpy(descriptor->implementor.data(), - halDescriptor.implementor, descriptor->implementor.size()); -} - -std::string uuidToString(const effect_uuid_t& halUuid) { - char str[64]; - snprintf(str, sizeof(str), "%08x-%04x-%04x-%04x-%02x%02x%02x%02x%02x%02x", - halUuid.timeLow, - halUuid.timeMid, - halUuid.timeHiAndVersion, - halUuid.clockSeq, - halUuid.node[0], - halUuid.node[1], - halUuid.node[2], - halUuid.node[3], - halUuid.node[4], - halUuid.node[5]); - return str; -} +using ::android::hardware::audio::common::V2_0::HidlUtils; -} // namespace implementation -} // namespace V2_0 -} // namespace effect -} // namespace audio -} // namespace hardware -} // namespace android +#define AUDIO_HAL_VERSION V2_0 +#include +#undef AUDIO_HAL_VERSION diff --git a/audio/effect/2.0/default/Conversions.h b/audio/effect/2.0/default/Conversions.h index 7cef362c35d3807b82fda38f93fdda86e3c6ef85..94c7f66ea685b54a37c1feb9c6c949a13247e7ff 100644 --- a/audio/effect/2.0/default/Conversions.h +++ b/audio/effect/2.0/default/Conversions.h @@ -1,5 +1,5 @@ /* - * Copyright (C) 2016 The Android Open Source Project + * Copyright (C) 2017 The Android Open Source Project * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. @@ -14,32 +14,13 @@ * limitations under the License. */ -#ifndef android_hardware_audio_effect_V2_0_Conversions_H_ -#define android_hardware_audio_effect_V2_0_Conversions_H_ - -#include +#ifndef ANDROID_HARDWARE_AUDIO_EFFECT_V2_0_CONVERSIONS_H_ +#define ANDROID_HARDWARE_AUDIO_EFFECT_V2_0_CONVERSIONS_H_ #include -#include - -namespace android { -namespace hardware { -namespace audio { -namespace effect { -namespace V2_0 { -namespace implementation { - -using ::android::hardware::audio::effect::V2_0::EffectDescriptor; - -void effectDescriptorFromHal( - const effect_descriptor_t& halDescriptor, EffectDescriptor* descriptor); -std::string uuidToString(const effect_uuid_t& halUuid); -} // namespace implementation -} // namespace V2_0 -} // namespace effect -} // namespace audio -} // namespace hardware -} // namespace android +#define AUDIO_HAL_VERSION V2_0 +#include +#undef AUDIO_HAL_VERSION -#endif // android_hardware_audio_effect_V2_0_Conversions_H_ +#endif // ANDROID_HARDWARE_AUDIO_EFFECT_V2_0_CONVERSIONS_H_ diff --git a/audio/effect/2.0/default/DownmixEffect.cpp b/audio/effect/2.0/default/DownmixEffect.cpp index 92f15bd0d1d1eb85f472580c967a5217523a3d60..1a51e13641011dc39ff4449b162ba962567121e9 100644 --- a/audio/effect/2.0/default/DownmixEffect.cpp +++ b/audio/effect/2.0/default/DownmixEffect.cpp @@ -1,5 +1,5 @@ /* - * Copyright (C) 2016 The Android Open Source Project + * Copyright (C) 2017 The Android Open Source Project * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. @@ -15,180 +15,9 @@ */ #define LOG_TAG "Downmix_HAL" -#include -#include #include "DownmixEffect.h" -namespace android { -namespace hardware { -namespace audio { -namespace effect { -namespace V2_0 { -namespace implementation { - -DownmixEffect::DownmixEffect(effect_handle_t handle) - : mEffect(new Effect(handle)) { -} - -DownmixEffect::~DownmixEffect() {} - -// Methods from ::android::hardware::audio::effect::V2_0::IEffect follow. -Return DownmixEffect::init() { - return mEffect->init(); -} - -Return DownmixEffect::setConfig( - const EffectConfig& config, - const sp& inputBufferProvider, - const sp& outputBufferProvider) { - return mEffect->setConfig(config, inputBufferProvider, outputBufferProvider); -} - -Return DownmixEffect::reset() { - return mEffect->reset(); -} - -Return DownmixEffect::enable() { - return mEffect->enable(); -} - -Return DownmixEffect::disable() { - return mEffect->disable(); -} - -Return DownmixEffect::setDevice(AudioDevice device) { - return mEffect->setDevice(device); -} - -Return DownmixEffect::setAndGetVolume( - const hidl_vec& volumes, setAndGetVolume_cb _hidl_cb) { - return mEffect->setAndGetVolume(volumes, _hidl_cb); -} - -Return DownmixEffect::volumeChangeNotification( - const hidl_vec& volumes) { - return mEffect->volumeChangeNotification(volumes); -} - -Return DownmixEffect::setAudioMode(AudioMode mode) { - return mEffect->setAudioMode(mode); -} - -Return DownmixEffect::setConfigReverse( - const EffectConfig& config, - const sp& inputBufferProvider, - const sp& outputBufferProvider) { - return mEffect->setConfigReverse(config, inputBufferProvider, outputBufferProvider); -} - -Return DownmixEffect::setInputDevice(AudioDevice device) { - return mEffect->setInputDevice(device); -} - -Return DownmixEffect::getConfig(getConfig_cb _hidl_cb) { - return mEffect->getConfig(_hidl_cb); -} - -Return DownmixEffect::getConfigReverse(getConfigReverse_cb _hidl_cb) { - return mEffect->getConfigReverse(_hidl_cb); -} - -Return DownmixEffect::getSupportedAuxChannelsConfigs( - uint32_t maxConfigs, getSupportedAuxChannelsConfigs_cb _hidl_cb) { - return mEffect->getSupportedAuxChannelsConfigs(maxConfigs, _hidl_cb); -} - -Return DownmixEffect::getAuxChannelsConfig(getAuxChannelsConfig_cb _hidl_cb) { - return mEffect->getAuxChannelsConfig(_hidl_cb); -} - -Return DownmixEffect::setAuxChannelsConfig( - const EffectAuxChannelsConfig& config) { - return mEffect->setAuxChannelsConfig(config); -} - -Return DownmixEffect::setAudioSource(AudioSource source) { - return mEffect->setAudioSource(source); -} - -Return DownmixEffect::offload(const EffectOffloadParameter& param) { - return mEffect->offload(param); -} - -Return DownmixEffect::getDescriptor(getDescriptor_cb _hidl_cb) { - return mEffect->getDescriptor(_hidl_cb); -} - -Return DownmixEffect::prepareForProcessing( - prepareForProcessing_cb _hidl_cb) { - return mEffect->prepareForProcessing(_hidl_cb); -} - -Return DownmixEffect::setProcessBuffers( - const AudioBuffer& inBuffer, const AudioBuffer& outBuffer) { - return mEffect->setProcessBuffers(inBuffer, outBuffer); -} - -Return DownmixEffect::command( - uint32_t commandId, - const hidl_vec& data, - uint32_t resultMaxSize, - command_cb _hidl_cb) { - return mEffect->command(commandId, data, resultMaxSize, _hidl_cb); -} - -Return DownmixEffect::setParameter( - const hidl_vec& parameter, const hidl_vec& value) { - return mEffect->setParameter(parameter, value); -} - -Return DownmixEffect::getParameter( - const hidl_vec& parameter, - uint32_t valueMaxSize, - getParameter_cb _hidl_cb) { - return mEffect->getParameter(parameter, valueMaxSize, _hidl_cb); -} - -Return DownmixEffect::getSupportedConfigsForFeature( - uint32_t featureId, - uint32_t maxConfigs, - uint32_t configSize, - getSupportedConfigsForFeature_cb _hidl_cb) { - return mEffect->getSupportedConfigsForFeature(featureId, maxConfigs, configSize, _hidl_cb); -} - -Return DownmixEffect::getCurrentConfigForFeature( - uint32_t featureId, - uint32_t configSize, - getCurrentConfigForFeature_cb _hidl_cb) { - return mEffect->getCurrentConfigForFeature(featureId, configSize, _hidl_cb); -} - -Return DownmixEffect::setCurrentConfigForFeature( - uint32_t featureId, const hidl_vec& configData) { - return mEffect->setCurrentConfigForFeature(featureId, configData); -} - -Return DownmixEffect::close() { - return mEffect->close(); -} - -// Methods from ::android::hardware::audio::effect::V2_0::IDownmixEffect follow. -Return DownmixEffect::setType(IDownmixEffect::Type preset) { - return mEffect->setParam(DOWNMIX_PARAM_TYPE, static_cast(preset)); -} - -Return DownmixEffect::getType(getType_cb _hidl_cb) { - downmix_type_t halPreset = DOWNMIX_TYPE_INVALID; - Result retval = mEffect->getParam(DOWNMIX_PARAM_TYPE, halPreset); - _hidl_cb(retval, Type(halPreset)); - return Void(); -} - -} // namespace implementation -} // namespace V2_0 -} // namespace effect -} // namespace audio -} // namespace hardware -} // namespace android +#define AUDIO_HAL_VERSION V2_0 +#include +#undef AUDIO_HAL_VERSION diff --git a/audio/effect/2.0/default/DownmixEffect.h b/audio/effect/2.0/default/DownmixEffect.h index 125f34deca80d632c9a485b982bf6e98ae231324..6dbbb32836c9d86480bd34dcaaed134b1bac2fde 100644 --- a/audio/effect/2.0/default/DownmixEffect.h +++ b/audio/effect/2.0/default/DownmixEffect.h @@ -1,5 +1,5 @@ /* - * Copyright (C) 2016 The Android Open Source Project + * Copyright (C) 2017 The Android Open Source Project * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. @@ -18,100 +18,11 @@ #define ANDROID_HARDWARE_AUDIO_EFFECT_V2_0_DOWNMIXEFFECT_H #include -#include - -#include #include "Effect.h" -namespace android { -namespace hardware { -namespace audio { -namespace effect { -namespace V2_0 { -namespace implementation { - -using ::android::hardware::audio::effect::V2_0::IDownmixEffect; -using ::android::hardware::audio::effect::V2_0::Result; -using ::android::hardware::Return; -using ::android::hardware::Void; -using ::android::hardware::hidl_vec; -using ::android::hardware::hidl_string; -using ::android::sp; - -struct DownmixEffect : public IDownmixEffect { - explicit DownmixEffect(effect_handle_t handle); - - // Methods from ::android::hardware::audio::effect::V2_0::IEffect follow. - Return init() override; - Return setConfig( - const EffectConfig& config, - const sp& inputBufferProvider, - const sp& outputBufferProvider) override; - Return reset() override; - Return enable() override; - Return disable() override; - Return setDevice(AudioDevice device) override; - Return setAndGetVolume( - const hidl_vec& volumes, setAndGetVolume_cb _hidl_cb) override; - Return volumeChangeNotification(const hidl_vec& volumes) override; - Return setAudioMode(AudioMode mode) override; - Return setConfigReverse( - const EffectConfig& config, - const sp& inputBufferProvider, - const sp& outputBufferProvider) override; - Return setInputDevice(AudioDevice device) override; - Return getConfig(getConfig_cb _hidl_cb) override; - Return getConfigReverse(getConfigReverse_cb _hidl_cb) override; - Return getSupportedAuxChannelsConfigs( - uint32_t maxConfigs, getSupportedAuxChannelsConfigs_cb _hidl_cb) override; - Return getAuxChannelsConfig(getAuxChannelsConfig_cb _hidl_cb) override; - Return setAuxChannelsConfig(const EffectAuxChannelsConfig& config) override; - Return setAudioSource(AudioSource source) override; - Return offload(const EffectOffloadParameter& param) override; - Return getDescriptor(getDescriptor_cb _hidl_cb) override; - Return prepareForProcessing(prepareForProcessing_cb _hidl_cb) override; - Return setProcessBuffers( - const AudioBuffer& inBuffer, const AudioBuffer& outBuffer) override; - Return command( - uint32_t commandId, - const hidl_vec& data, - uint32_t resultMaxSize, - command_cb _hidl_cb) override; - Return setParameter( - const hidl_vec& parameter, const hidl_vec& value) override; - Return getParameter( - const hidl_vec& parameter, - uint32_t valueMaxSize, - getParameter_cb _hidl_cb) override; - Return getSupportedConfigsForFeature( - uint32_t featureId, - uint32_t maxConfigs, - uint32_t configSize, - getSupportedConfigsForFeature_cb _hidl_cb) override; - Return getCurrentConfigForFeature( - uint32_t featureId, - uint32_t configSize, - getCurrentConfigForFeature_cb _hidl_cb) override; - Return setCurrentConfigForFeature( - uint32_t featureId, const hidl_vec& configData) override; - Return close() override; - - // Methods from ::android::hardware::audio::effect::V2_0::IDownmixEffect follow. - Return setType(IDownmixEffect::Type preset) override; - Return getType(getType_cb _hidl_cb) override; - - private: - sp mEffect; - - virtual ~DownmixEffect(); -}; - -} // namespace implementation -} // namespace V2_0 -} // namespace effect -} // namespace audio -} // namespace hardware -} // namespace android +#define AUDIO_HAL_VERSION V2_0 +#include +#undef AUDIO_HAL_VERSION #endif // ANDROID_HARDWARE_AUDIO_EFFECT_V2_0_DOWNMIXEFFECT_H diff --git a/audio/effect/2.0/default/Effect.cpp b/audio/effect/2.0/default/Effect.cpp index 184607e5dfabbed2268b266409d2cbc935dd69c7..e234e520b8dc5cbe07ca5649abe4fb19d9df74ca 100644 --- a/audio/effect/2.0/default/Effect.cpp +++ b/audio/effect/2.0/default/Effect.cpp @@ -1,5 +1,5 @@ /* - * Copyright (C) 2016 The Android Open Source Project + * Copyright (C) 2017 The Android Open Source Project * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. @@ -19,746 +19,10 @@ #define LOG_TAG "EffectHAL" #define ATRACE_TAG ATRACE_TAG_AUDIO -#include -#include -#include - #include "Conversions.h" #include "Effect.h" -#include "EffectMap.h" - -namespace android { -namespace hardware { -namespace audio { -namespace effect { -namespace V2_0 { -namespace implementation { - -using ::android::hardware::audio::common::V2_0::AudioChannelMask; -using ::android::hardware::audio::common::V2_0::AudioFormat; -using ::android::hardware::audio::effect::V2_0::MessageQueueFlagBits; - -namespace { - -class ProcessThread : public Thread { - public: - // ProcessThread's lifespan never exceeds Effect's lifespan. - ProcessThread(std::atomic* stop, - effect_handle_t effect, - std::atomic* inBuffer, - std::atomic* outBuffer, - Effect::StatusMQ* statusMQ, - EventFlag* efGroup) - : Thread(false /*canCallJava*/), - mStop(stop), - mEffect(effect), - mHasProcessReverse((*mEffect)->process_reverse != NULL), - mInBuffer(inBuffer), - mOutBuffer(outBuffer), - mStatusMQ(statusMQ), - mEfGroup(efGroup) { - } - virtual ~ProcessThread() {} - - private: - std::atomic* mStop; - effect_handle_t mEffect; - bool mHasProcessReverse; - std::atomic* mInBuffer; - std::atomic* mOutBuffer; - Effect::StatusMQ* mStatusMQ; - EventFlag* mEfGroup; - - bool threadLoop() override; -}; - -bool ProcessThread::threadLoop() { - // This implementation doesn't return control back to the Thread until it decides to stop, - // as the Thread uses mutexes, and this can lead to priority inversion. - while(!std::atomic_load_explicit(mStop, std::memory_order_acquire)) { - uint32_t efState = 0; - mEfGroup->wait(static_cast(MessageQueueFlagBits::REQUEST_PROCESS_ALL), &efState); - if (!(efState & static_cast(MessageQueueFlagBits::REQUEST_PROCESS_ALL)) - || (efState & static_cast(MessageQueueFlagBits::REQUEST_QUIT))) { - continue; // Nothing to do or time to quit. - } - Result retval = Result::OK; - if (efState & static_cast(MessageQueueFlagBits::REQUEST_PROCESS_REVERSE) - && !mHasProcessReverse) { - retval = Result::NOT_SUPPORTED; - } - - if (retval == Result::OK) { - // affects both buffer pointers and their contents. - std::atomic_thread_fence(std::memory_order_acquire); - int32_t processResult; - audio_buffer_t* inBuffer = - std::atomic_load_explicit(mInBuffer, std::memory_order_relaxed); - audio_buffer_t* outBuffer = - std::atomic_load_explicit(mOutBuffer, std::memory_order_relaxed); - if (inBuffer != nullptr && outBuffer != nullptr) { - if (efState & static_cast(MessageQueueFlagBits::REQUEST_PROCESS)) { - processResult = (*mEffect)->process(mEffect, inBuffer, outBuffer); - } else { - processResult = (*mEffect)->process_reverse(mEffect, inBuffer, outBuffer); - } - std::atomic_thread_fence(std::memory_order_release); - } else { - ALOGE("processing buffers were not set before calling 'process'"); - processResult = -ENODEV; - } - switch(processResult) { - case 0: retval = Result::OK; break; - case -ENODATA: retval = Result::INVALID_STATE; break; - case -EINVAL: retval = Result::INVALID_ARGUMENTS; break; - default: retval = Result::NOT_INITIALIZED; - } - } - if (!mStatusMQ->write(&retval)) { - ALOGW("status message queue write failed"); - } - mEfGroup->wake(static_cast(MessageQueueFlagBits::DONE_PROCESSING)); - } - - return false; -} - -} // namespace - -// static -const char *Effect::sContextResultOfCommand = "returned status"; -const char *Effect::sContextCallToCommand = "error"; -const char *Effect::sContextCallFunction = sContextCallToCommand; - -Effect::Effect(effect_handle_t handle) - : mIsClosed(false), mHandle(handle), mEfGroup(nullptr), mStopProcessThread(false) { -} - -Effect::~Effect() { - ATRACE_CALL(); - close(); - if (mProcessThread.get()) { - ATRACE_NAME("mProcessThread->join"); - status_t status = mProcessThread->join(); - ALOGE_IF(status, "processing thread exit error: %s", strerror(-status)); - } - if (mEfGroup) { - status_t status = EventFlag::deleteEventFlag(&mEfGroup); - ALOGE_IF(status, "processing MQ event flag deletion error: %s", strerror(-status)); - } - mInBuffer.clear(); - mOutBuffer.clear(); - int status = EffectRelease(mHandle); - ALOGW_IF(status, "Error releasing effect %p: %s", mHandle, strerror(-status)); - EffectMap::getInstance().remove(mHandle); - mHandle = 0; -} - -// static -template size_t Effect::alignedSizeIn(size_t s) { - return (s + sizeof(T) - 1) / sizeof(T); -} - -// static -template std::unique_ptr Effect::hidlVecToHal( - const hidl_vec& vec, uint32_t* halDataSize) { - // Due to bugs in HAL, they may attempt to write into the provided - // input buffer. The original binder buffer is r/o, thus it is needed - // to create a r/w version. - *halDataSize = vec.size() * sizeof(T); - std::unique_ptr halData(new uint8_t[*halDataSize]); - memcpy(&halData[0], &vec[0], *halDataSize); - return halData; -} - -// static -void Effect::effectAuxChannelsConfigFromHal( - const channel_config_t& halConfig, EffectAuxChannelsConfig* config) { - config->mainChannels = AudioChannelMask(halConfig.main_channels); - config->auxChannels = AudioChannelMask(halConfig.aux_channels); -} - -// static -void Effect::effectAuxChannelsConfigToHal( - const EffectAuxChannelsConfig& config, channel_config_t* halConfig) { - halConfig->main_channels = static_cast(config.mainChannels); - halConfig->aux_channels = static_cast(config.auxChannels); -} - -// static -void Effect::effectBufferConfigFromHal( - const buffer_config_t& halConfig, EffectBufferConfig* config) { - config->buffer.id = 0; - config->buffer.frameCount = 0; - config->samplingRateHz = halConfig.samplingRate; - config->channels = AudioChannelMask(halConfig.channels); - config->format = AudioFormat(halConfig.format); - config->accessMode = EffectBufferAccess(halConfig.accessMode); - config->mask = EffectConfigParameters(halConfig.mask); -} - -// static -void Effect::effectBufferConfigToHal(const EffectBufferConfig& config, buffer_config_t* halConfig) { - // Note: setting the buffers directly is considered obsolete. They need to be set - // using 'setProcessBuffers'. - halConfig->buffer.frameCount = 0; - halConfig->buffer.raw = NULL; - halConfig->samplingRate = config.samplingRateHz; - halConfig->channels = static_cast(config.channels); - // Note: The framework code does not use BP. - halConfig->bufferProvider.cookie = NULL; - halConfig->bufferProvider.getBuffer = NULL; - halConfig->bufferProvider.releaseBuffer = NULL; - halConfig->format = static_cast(config.format); - halConfig->accessMode = static_cast(config.accessMode); - halConfig->mask = static_cast(config.mask); -} - -// static -void Effect::effectConfigFromHal(const effect_config_t& halConfig, EffectConfig* config) { - effectBufferConfigFromHal(halConfig.inputCfg, &config->inputCfg); - effectBufferConfigFromHal(halConfig.outputCfg, &config->outputCfg); -} - -// static -void Effect::effectConfigToHal(const EffectConfig& config, effect_config_t* halConfig) { - effectBufferConfigToHal(config.inputCfg, &halConfig->inputCfg); - effectBufferConfigToHal(config.outputCfg, &halConfig->outputCfg); -} - -// static -void Effect::effectOffloadParamToHal( - const EffectOffloadParameter& offload, effect_offload_param_t* halOffload) { - halOffload->isOffload = offload.isOffload; - halOffload->ioHandle = offload.ioHandle; -} - -// static -std::vector Effect::parameterToHal( - uint32_t paramSize, - const void* paramData, - uint32_t valueSize, - const void** valueData) { - size_t valueOffsetFromData = alignedSizeIn(paramSize) * sizeof(uint32_t); - size_t halParamBufferSize = sizeof(effect_param_t) + valueOffsetFromData + valueSize; - std::vector halParamBuffer(halParamBufferSize, 0); - effect_param_t *halParam = reinterpret_cast(&halParamBuffer[0]); - halParam->psize = paramSize; - halParam->vsize = valueSize; - memcpy(halParam->data, paramData, paramSize); - if (valueData) { - if (*valueData) { - // Value data is provided. - memcpy(halParam->data + valueOffsetFromData, *valueData, valueSize); - } else { - // The caller needs the pointer to the value data location. - *valueData = halParam->data + valueOffsetFromData; - } - } - return halParamBuffer; -} - -Result Effect::analyzeCommandStatus(const char* commandName, const char* context, status_t status) { - return analyzeStatus("command", commandName, context, status); -} - -Result Effect::analyzeStatus( - const char* funcName, - const char* subFuncName, - const char* contextDescription, - status_t status) { - if (status != OK) { - ALOGW("Effect %p %s %s %s: %s", - mHandle, funcName, subFuncName, contextDescription, strerror(-status)); - } - switch (status) { - case OK: return Result::OK; - case -EINVAL: return Result::INVALID_ARGUMENTS; - case -ENODATA: return Result::INVALID_STATE; - case -ENODEV: return Result::NOT_INITIALIZED; - case -ENOMEM: return Result::RESULT_TOO_BIG; - case -ENOSYS: return Result::NOT_SUPPORTED; - default: return Result::INVALID_STATE; - } -} - -void Effect::getConfigImpl(int commandCode, const char* commandName, GetConfigCallback cb) { - uint32_t halResultSize = sizeof(effect_config_t); - effect_config_t halConfig{}; - status_t status = (*mHandle)->command( - mHandle, commandCode, 0, NULL, &halResultSize, &halConfig); - EffectConfig config; - if (status == OK) { - effectConfigFromHal(halConfig, &config); - } - cb(analyzeCommandStatus(commandName, sContextCallToCommand, status), config); -} - -Result Effect::getCurrentConfigImpl( - uint32_t featureId, uint32_t configSize, GetCurrentConfigSuccessCallback onSuccess) { - uint32_t halCmd = featureId; - uint32_t halResult[alignedSizeIn(sizeof(uint32_t) + configSize)]; - memset(halResult, 0, sizeof(halResult)); - uint32_t halResultSize = 0; - return sendCommandReturningStatusAndData( - EFFECT_CMD_GET_FEATURE_CONFIG, "GET_FEATURE_CONFIG", - sizeof(uint32_t), &halCmd, - &halResultSize, halResult, - sizeof(uint32_t), - [&]{ onSuccess(&halResult[1]); }); -} - -Result Effect::getParameterImpl( - uint32_t paramSize, - const void* paramData, - uint32_t requestValueSize, - uint32_t replyValueSize, - GetParameterSuccessCallback onSuccess) { - // As it is unknown what method HAL uses for copying the provided parameter data, - // it is safer to make sure that input and output buffers do not overlap. - std::vector halCmdBuffer = - parameterToHal(paramSize, paramData, requestValueSize, nullptr); - const void *valueData = nullptr; - std::vector halParamBuffer = - parameterToHal(paramSize, paramData, replyValueSize, &valueData); - uint32_t halParamBufferSize = halParamBuffer.size(); - - return sendCommandReturningStatusAndData( - EFFECT_CMD_GET_PARAM, "GET_PARAM", - halCmdBuffer.size(), &halCmdBuffer[0], - &halParamBufferSize, &halParamBuffer[0], - sizeof(effect_param_t), - [&]{ - effect_param_t *halParam = reinterpret_cast(&halParamBuffer[0]); - onSuccess(halParam->vsize, valueData); - }); -} - -Result Effect::getSupportedConfigsImpl( - uint32_t featureId, - uint32_t maxConfigs, - uint32_t configSize, - GetSupportedConfigsSuccessCallback onSuccess) { - uint32_t halCmd[2] = { featureId, maxConfigs }; - uint32_t halResultSize = 2 * sizeof(uint32_t) + maxConfigs * sizeof(configSize); - uint8_t halResult[halResultSize]; - memset(&halResult[0], 0, halResultSize); - return sendCommandReturningStatusAndData( - EFFECT_CMD_GET_FEATURE_SUPPORTED_CONFIGS, "GET_FEATURE_SUPPORTED_CONFIGS", - sizeof(halCmd), halCmd, - &halResultSize, &halResult[0], - 2 * sizeof(uint32_t), - [&]{ - uint32_t *halResult32 = reinterpret_cast(&halResult[0]); - uint32_t supportedConfigs = *(++halResult32); // skip status field - if (supportedConfigs > maxConfigs) supportedConfigs = maxConfigs; - onSuccess(supportedConfigs, ++halResult32); - }); -} - -Return Effect::prepareForProcessing(prepareForProcessing_cb _hidl_cb) { - status_t status; - // Create message queue. - if (mStatusMQ) { - ALOGE("the client attempts to call prepareForProcessing_cb twice"); - _hidl_cb(Result::INVALID_STATE, StatusMQ::Descriptor()); - return Void(); - } - std::unique_ptr tempStatusMQ(new StatusMQ(1, true /*EventFlag*/)); - if (!tempStatusMQ->isValid()) { - ALOGE_IF(!tempStatusMQ->isValid(), "status MQ is invalid"); - _hidl_cb(Result::INVALID_ARGUMENTS, StatusMQ::Descriptor()); - return Void(); - } - status = EventFlag::createEventFlag(tempStatusMQ->getEventFlagWord(), &mEfGroup); - if (status != OK || !mEfGroup) { - ALOGE("failed creating event flag for status MQ: %s", strerror(-status)); - _hidl_cb(Result::INVALID_ARGUMENTS, StatusMQ::Descriptor()); - return Void(); - } - - // Create and launch the thread. - mProcessThread = new ProcessThread( - &mStopProcessThread, - mHandle, - &mHalInBufferPtr, - &mHalOutBufferPtr, - tempStatusMQ.get(), - mEfGroup); - status = mProcessThread->run("effect", PRIORITY_URGENT_AUDIO); - if (status != OK) { - ALOGW("failed to start effect processing thread: %s", strerror(-status)); - _hidl_cb(Result::INVALID_ARGUMENTS, MQDescriptorSync()); - return Void(); - } - - mStatusMQ = std::move(tempStatusMQ); - _hidl_cb(Result::OK, *mStatusMQ->getDesc()); - return Void(); -} - -Return Effect::setProcessBuffers( - const AudioBuffer& inBuffer, const AudioBuffer& outBuffer) { - AudioBufferManager& manager = AudioBufferManager::getInstance(); - sp tempInBuffer, tempOutBuffer; - if (!manager.wrap(inBuffer, &tempInBuffer)) { - ALOGE("Could not map memory of the input buffer"); - return Result::INVALID_ARGUMENTS; - } - if (!manager.wrap(outBuffer, &tempOutBuffer)) { - ALOGE("Could not map memory of the output buffer"); - return Result::INVALID_ARGUMENTS; - } - mInBuffer = tempInBuffer; - mOutBuffer = tempOutBuffer; - // The processing thread only reads these pointers after waking up by an event flag, - // so it's OK to update the pair non-atomically. - mHalInBufferPtr.store(mInBuffer->getHalBuffer(), std::memory_order_release); - mHalOutBufferPtr.store(mOutBuffer->getHalBuffer(), std::memory_order_release); - return Result::OK; -} - -Result Effect::sendCommand(int commandCode, const char* commandName) { - return sendCommand(commandCode, commandName, 0, NULL); -} - -Result Effect::sendCommand( - int commandCode, const char* commandName, uint32_t size, void* data) { - status_t status = (*mHandle)->command(mHandle, commandCode, size, data, 0, NULL); - return analyzeCommandStatus(commandName, sContextCallToCommand, status); -} - -Result Effect::sendCommandReturningData( - int commandCode, const char* commandName, - uint32_t* replySize, void* replyData) { - return sendCommandReturningData(commandCode, commandName, 0, NULL, replySize, replyData); -} - -Result Effect::sendCommandReturningData( - int commandCode, const char* commandName, - uint32_t size, void* data, - uint32_t* replySize, void* replyData) { - uint32_t expectedReplySize = *replySize; - status_t status = (*mHandle)->command(mHandle, commandCode, size, data, replySize, replyData); - if (status == OK && *replySize != expectedReplySize) { - status = -ENODATA; - } - return analyzeCommandStatus(commandName, sContextCallToCommand, status); -} - -Result Effect::sendCommandReturningStatus(int commandCode, const char* commandName) { - return sendCommandReturningStatus(commandCode, commandName, 0, NULL); -} - -Result Effect::sendCommandReturningStatus( - int commandCode, const char* commandName, uint32_t size, void* data) { - uint32_t replyCmdStatus; - uint32_t replySize = sizeof(uint32_t); - return sendCommandReturningStatusAndData( - commandCode, commandName, size, data, &replySize, &replyCmdStatus, replySize, []{}); -} - -Result Effect::sendCommandReturningStatusAndData( - int commandCode, const char* commandName, - uint32_t size, void* data, - uint32_t* replySize, void* replyData, - uint32_t minReplySize, - CommandSuccessCallback onSuccess) { - status_t status = - (*mHandle)->command(mHandle, commandCode, size, data, replySize, replyData); - Result retval; - if (status == OK && minReplySize >= sizeof(uint32_t) && *replySize >= minReplySize) { - uint32_t commandStatus = *reinterpret_cast(replyData); - retval = analyzeCommandStatus(commandName, sContextResultOfCommand, commandStatus); - if (commandStatus == OK) { - onSuccess(); - } - } else { - retval = analyzeCommandStatus(commandName, sContextCallToCommand, status); - } - return retval; -} - -Result Effect::setConfigImpl( - int commandCode, const char* commandName, - const EffectConfig& config, - const sp& inputBufferProvider, - const sp& outputBufferProvider) { - effect_config_t halConfig; - effectConfigToHal(config, &halConfig); - if (inputBufferProvider != 0) { - LOG_FATAL("Using input buffer provider is not supported"); - } - if (outputBufferProvider != 0) { - LOG_FATAL("Using output buffer provider is not supported"); - } - return sendCommandReturningStatus( - commandCode, commandName, sizeof(effect_config_t), &halConfig); -} - - -Result Effect::setParameterImpl( - uint32_t paramSize, const void* paramData, uint32_t valueSize, const void* valueData) { - std::vector halParamBuffer = parameterToHal( - paramSize, paramData, valueSize, &valueData); - return sendCommandReturningStatus( - EFFECT_CMD_SET_PARAM, "SET_PARAM", halParamBuffer.size(), &halParamBuffer[0]); -} - -// Methods from ::android::hardware::audio::effect::V2_0::IEffect follow. -Return Effect::init() { - return sendCommandReturningStatus(EFFECT_CMD_INIT, "INIT"); -} - -Return Effect::setConfig( - const EffectConfig& config, - const sp& inputBufferProvider, - const sp& outputBufferProvider) { - return setConfigImpl( - EFFECT_CMD_SET_CONFIG, "SET_CONFIG", config, inputBufferProvider, outputBufferProvider); -} - -Return Effect::reset() { - return sendCommand(EFFECT_CMD_RESET, "RESET"); -} - -Return Effect::enable() { - return sendCommandReturningStatus(EFFECT_CMD_ENABLE, "ENABLE"); -} - -Return Effect::disable() { - return sendCommandReturningStatus(EFFECT_CMD_DISABLE, "DISABLE"); -} - -Return Effect::setDevice(AudioDevice device) { - uint32_t halDevice = static_cast(device); - return sendCommand(EFFECT_CMD_SET_DEVICE, "SET_DEVICE", sizeof(uint32_t), &halDevice); -} - -Return Effect::setAndGetVolume( - const hidl_vec& volumes, setAndGetVolume_cb _hidl_cb) { - uint32_t halDataSize; - std::unique_ptr halData = hidlVecToHal(volumes, &halDataSize); - uint32_t halResultSize = halDataSize; - uint32_t halResult[volumes.size()]; - Result retval = sendCommandReturningData( - EFFECT_CMD_SET_VOLUME, "SET_VOLUME", - halDataSize, &halData[0], - &halResultSize, halResult); - hidl_vec result; - if (retval == Result::OK) { - result.setToExternal(&halResult[0], halResultSize); - } - _hidl_cb(retval, result); - return Void(); -} - -Return Effect::volumeChangeNotification(const hidl_vec& volumes) { - uint32_t halDataSize; - std::unique_ptr halData = hidlVecToHal(volumes, &halDataSize); - return sendCommand( - EFFECT_CMD_SET_VOLUME, "SET_VOLUME", - halDataSize, &halData[0]); -} - -Return Effect::setAudioMode(AudioMode mode) { - uint32_t halMode = static_cast(mode); - return sendCommand( - EFFECT_CMD_SET_AUDIO_MODE, "SET_AUDIO_MODE", sizeof(uint32_t), &halMode); -} - -Return Effect::setConfigReverse( - const EffectConfig& config, - const sp& inputBufferProvider, - const sp& outputBufferProvider) { - return setConfigImpl(EFFECT_CMD_SET_CONFIG_REVERSE, "SET_CONFIG_REVERSE", - config, inputBufferProvider, outputBufferProvider); -} - -Return Effect::setInputDevice(AudioDevice device) { - uint32_t halDevice = static_cast(device); - return sendCommand( - EFFECT_CMD_SET_INPUT_DEVICE, "SET_INPUT_DEVICE", sizeof(uint32_t), &halDevice); -} - -Return Effect::getConfig(getConfig_cb _hidl_cb) { - getConfigImpl(EFFECT_CMD_GET_CONFIG, "GET_CONFIG", _hidl_cb); - return Void(); -} - -Return Effect::getConfigReverse(getConfigReverse_cb _hidl_cb) { - getConfigImpl(EFFECT_CMD_GET_CONFIG_REVERSE, "GET_CONFIG_REVERSE", _hidl_cb); - return Void(); -} - -Return Effect::getSupportedAuxChannelsConfigs( - uint32_t maxConfigs, getSupportedAuxChannelsConfigs_cb _hidl_cb) { - hidl_vec result; - Result retval = getSupportedConfigsImpl( - EFFECT_FEATURE_AUX_CHANNELS, - maxConfigs, - sizeof(channel_config_t), - [&] (uint32_t supportedConfigs, void* configsData) { - result.resize(supportedConfigs); - channel_config_t *config = reinterpret_cast(configsData); - for (size_t i = 0; i < result.size(); ++i) { - effectAuxChannelsConfigFromHal(*config++, &result[i]); - } - }); - _hidl_cb(retval, result); - return Void(); -} - -Return Effect::getAuxChannelsConfig(getAuxChannelsConfig_cb _hidl_cb) { - uint32_t halResult[alignedSizeIn(sizeof(uint32_t) + sizeof(channel_config_t))]; - memset(halResult, 0, sizeof(halResult)); - EffectAuxChannelsConfig result; - Result retval = getCurrentConfigImpl( - EFFECT_FEATURE_AUX_CHANNELS, - sizeof(channel_config_t), - [&] (void* configData) { - effectAuxChannelsConfigFromHal( - *reinterpret_cast(configData), &result); - }); - _hidl_cb(retval, result); - return Void(); -} - -Return Effect::setAuxChannelsConfig(const EffectAuxChannelsConfig& config) { - uint32_t halCmd[alignedSizeIn(sizeof(uint32_t) + sizeof(channel_config_t))]; - halCmd[0] = EFFECT_FEATURE_AUX_CHANNELS; - effectAuxChannelsConfigToHal(config, reinterpret_cast(&halCmd[1])); - return sendCommandReturningStatus(EFFECT_CMD_SET_FEATURE_CONFIG, - "SET_FEATURE_CONFIG AUX_CHANNELS", sizeof(halCmd), halCmd); -} - -Return Effect::setAudioSource(AudioSource source) { - uint32_t halSource = static_cast(source); - return sendCommand( - EFFECT_CMD_SET_AUDIO_SOURCE, "SET_AUDIO_SOURCE", sizeof(uint32_t), &halSource); -} - -Return Effect::offload(const EffectOffloadParameter& param) { - effect_offload_param_t halParam; - effectOffloadParamToHal(param, &halParam); - return sendCommandReturningStatus( - EFFECT_CMD_OFFLOAD, "OFFLOAD", sizeof(effect_offload_param_t), &halParam); -} - -Return Effect::getDescriptor(getDescriptor_cb _hidl_cb) { - effect_descriptor_t halDescriptor; - memset(&halDescriptor, 0, sizeof(effect_descriptor_t)); - status_t status = (*mHandle)->get_descriptor(mHandle, &halDescriptor); - EffectDescriptor descriptor; - if (status == OK) { - effectDescriptorFromHal(halDescriptor, &descriptor); - } - _hidl_cb(analyzeStatus("get_descriptor", "", sContextCallFunction, status), descriptor); - return Void(); -} - -Return Effect::command( - uint32_t commandId, - const hidl_vec& data, - uint32_t resultMaxSize, - command_cb _hidl_cb) { - uint32_t halDataSize; - std::unique_ptr halData = hidlVecToHal(data, &halDataSize); - uint32_t halResultSize = resultMaxSize; - std::unique_ptr halResult(new uint8_t[halResultSize]); - memset(&halResult[0], 0, halResultSize); - - void* dataPtr = halDataSize > 0 ? &halData[0] : NULL; - void* resultPtr = halResultSize > 0 ? &halResult[0] : NULL; - status_t status = (*mHandle)->command( - mHandle, commandId, halDataSize, dataPtr, &halResultSize, resultPtr); - hidl_vec result; - if (status == OK && resultPtr != NULL) { - result.setToExternal(&halResult[0], halResultSize); - } - _hidl_cb(status, result); - return Void(); -} - -Return Effect::setParameter( - const hidl_vec& parameter, const hidl_vec& value) { - return setParameterImpl(parameter.size(), ¶meter[0], value.size(), &value[0]); -} - -Return Effect::getParameter( - const hidl_vec& parameter, uint32_t valueMaxSize, getParameter_cb _hidl_cb) { - hidl_vec value; - Result retval = getParameterImpl( - parameter.size(), - ¶meter[0], - valueMaxSize, - [&] (uint32_t valueSize, const void* valueData) { - value.setToExternal( - reinterpret_cast(const_cast(valueData)), valueSize); - }); - _hidl_cb(retval, value); - return Void(); -} - -Return Effect::getSupportedConfigsForFeature( - uint32_t featureId, - uint32_t maxConfigs, - uint32_t configSize, - getSupportedConfigsForFeature_cb _hidl_cb) { - uint32_t configCount = 0; - hidl_vec result; - Result retval = getSupportedConfigsImpl( - featureId, - maxConfigs, - configSize, - [&] (uint32_t supportedConfigs, void* configsData) { - configCount = supportedConfigs; - result.resize(configCount * configSize); - memcpy(&result[0], configsData, result.size()); - }); - _hidl_cb(retval, configCount, result); - return Void(); -} - -Return Effect::getCurrentConfigForFeature( - uint32_t featureId, uint32_t configSize, getCurrentConfigForFeature_cb _hidl_cb) { - hidl_vec result; - Result retval = getCurrentConfigImpl( - featureId, - configSize, - [&] (void* configData) { - result.resize(configSize); - memcpy(&result[0], configData, result.size()); - }); - _hidl_cb(retval, result); - return Void(); -} - -Return Effect::setCurrentConfigForFeature( - uint32_t featureId, const hidl_vec& configData) { - uint32_t halCmd[alignedSizeIn(sizeof(uint32_t) + configData.size())]; - memset(halCmd, 0, sizeof(halCmd)); - halCmd[0] = featureId; - memcpy(&halCmd[1], &configData[0], configData.size()); - return sendCommandReturningStatus( - EFFECT_CMD_SET_FEATURE_CONFIG, "SET_FEATURE_CONFIG", sizeof(halCmd), halCmd); -} - -Return Effect::close() { - if (mIsClosed) return Result::INVALID_STATE; - mIsClosed = true; - if (mProcessThread.get()) { - mStopProcessThread.store(true, std::memory_order_release); - } - if (mEfGroup) { - mEfGroup->wake(static_cast(MessageQueueFlagBits::REQUEST_QUIT)); - } - return Result::OK; -} +#include "common/all-versions/default/EffectMap.h" -} // namespace implementation -} // namespace V2_0 -} // namespace effect -} // namespace audio -} // namespace hardware -} // namespace android +#define AUDIO_HAL_VERSION V2_0 +#include +#undef AUDIO_HAL_VERSION diff --git a/audio/effect/2.0/default/Effect.h b/audio/effect/2.0/default/Effect.h index 0918cd8792ae8ccb20c08c9e03fae73bf84de968..a4d194dab912138e6ba602cded891f3aa9593314 100644 --- a/audio/effect/2.0/default/Effect.h +++ b/audio/effect/2.0/default/Effect.h @@ -1,5 +1,5 @@ /* - * Copyright (C) 2016 The Android Open Source Project + * Copyright (C) 2017 The Android Open Source Project * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. @@ -17,259 +17,12 @@ #ifndef ANDROID_HARDWARE_AUDIO_EFFECT_V2_0_EFFECT_H #define ANDROID_HARDWARE_AUDIO_EFFECT_V2_0_EFFECT_H -#include -#include -#include - #include -#include -#include -#include -#include -#include - -#include #include "AudioBufferManager.h" -namespace android { -namespace hardware { -namespace audio { -namespace effect { -namespace V2_0 { -namespace implementation { - -using ::android::hardware::audio::common::V2_0::AudioDevice; -using ::android::hardware::audio::common::V2_0::AudioMode; -using ::android::hardware::audio::common::V2_0::AudioSource; -using ::android::hardware::audio::common::V2_0::Uuid; -using ::android::hardware::audio::effect::V2_0::AudioBuffer; -using ::android::hardware::audio::effect::V2_0::EffectAuxChannelsConfig; -using ::android::hardware::audio::effect::V2_0::EffectConfig; -using ::android::hardware::audio::effect::V2_0::EffectDescriptor; -using ::android::hardware::audio::effect::V2_0::EffectFeature; -using ::android::hardware::audio::effect::V2_0::EffectOffloadParameter; -using ::android::hardware::audio::effect::V2_0::IEffect; -using ::android::hardware::audio::effect::V2_0::IEffectBufferProviderCallback; -using ::android::hardware::audio::effect::V2_0::Result; -using ::android::hardware::Return; -using ::android::hardware::Void; -using ::android::hardware::hidl_vec; -using ::android::hardware::hidl_string; -using ::android::sp; - -struct Effect : public IEffect { - typedef MessageQueue StatusMQ; - using GetParameterSuccessCallback = - std::function; - - explicit Effect(effect_handle_t handle); - - // Methods from ::android::hardware::audio::effect::V2_0::IEffect follow. - Return init() override; - Return setConfig( - const EffectConfig& config, - const sp& inputBufferProvider, - const sp& outputBufferProvider) override; - Return reset() override; - Return enable() override; - Return disable() override; - Return setDevice(AudioDevice device) override; - Return setAndGetVolume( - const hidl_vec& volumes, setAndGetVolume_cb _hidl_cb) override; - Return volumeChangeNotification(const hidl_vec& volumes) override; - Return setAudioMode(AudioMode mode) override; - Return setConfigReverse( - const EffectConfig& config, - const sp& inputBufferProvider, - const sp& outputBufferProvider) override; - Return setInputDevice(AudioDevice device) override; - Return getConfig(getConfig_cb _hidl_cb) override; - Return getConfigReverse(getConfigReverse_cb _hidl_cb) override; - Return getSupportedAuxChannelsConfigs( - uint32_t maxConfigs, getSupportedAuxChannelsConfigs_cb _hidl_cb) override; - Return getAuxChannelsConfig(getAuxChannelsConfig_cb _hidl_cb) override; - Return setAuxChannelsConfig(const EffectAuxChannelsConfig& config) override; - Return setAudioSource(AudioSource source) override; - Return offload(const EffectOffloadParameter& param) override; - Return getDescriptor(getDescriptor_cb _hidl_cb) override; - Return prepareForProcessing(prepareForProcessing_cb _hidl_cb) override; - Return setProcessBuffers( - const AudioBuffer& inBuffer, const AudioBuffer& outBuffer) override; - Return command( - uint32_t commandId, - const hidl_vec& data, - uint32_t resultMaxSize, - command_cb _hidl_cb) override; - Return setParameter( - const hidl_vec& parameter, const hidl_vec& value) override; - Return getParameter( - const hidl_vec& parameter, - uint32_t valueMaxSize, - getParameter_cb _hidl_cb) override; - Return getSupportedConfigsForFeature( - uint32_t featureId, - uint32_t maxConfigs, - uint32_t configSize, - getSupportedConfigsForFeature_cb _hidl_cb) override; - Return getCurrentConfigForFeature( - uint32_t featureId, - uint32_t configSize, - getCurrentConfigForFeature_cb _hidl_cb) override; - Return setCurrentConfigForFeature( - uint32_t featureId, const hidl_vec& configData) override; - Return close() override; - - // Utility methods for extending interfaces. - template Return getIntegerParam( - uint32_t paramId, std::function cb) { - T value; - Result retval = getParameterImpl( - sizeof(uint32_t), ¶mId, - sizeof(T), - [&] (uint32_t valueSize, const void* valueData) { - if (valueSize > sizeof(T)) valueSize = sizeof(T); - memcpy(&value, valueData, valueSize); - }); - cb(retval, value); - return Void(); - } - - template Result getParam(uint32_t paramId, T& paramValue) { - return getParameterImpl( - sizeof(uint32_t), ¶mId, - sizeof(T), - [&] (uint32_t valueSize, const void* valueData) { - if (valueSize > sizeof(T)) valueSize = sizeof(T); - memcpy(¶mValue, valueData, valueSize); - }); - } - - template Result getParam(uint32_t paramId, uint32_t paramArg, T& paramValue) { - uint32_t params[2] = { paramId, paramArg }; - return getParameterImpl( - sizeof(params), params, - sizeof(T), - [&] (uint32_t valueSize, const void* valueData) { - if (valueSize > sizeof(T)) valueSize = sizeof(T); - memcpy(¶mValue, valueData, valueSize); - }); - } - - template Result setParam(uint32_t paramId, const T& paramValue) { - return setParameterImpl(sizeof(uint32_t), ¶mId, sizeof(T), ¶mValue); - } - - template Result setParam(uint32_t paramId, uint32_t paramArg, const T& paramValue) { - uint32_t params[2] = { paramId, paramArg }; - return setParameterImpl(sizeof(params), params, sizeof(T), ¶mValue); - } - - Result getParameterImpl( - uint32_t paramSize, - const void* paramData, - uint32_t valueSize, - GetParameterSuccessCallback onSuccess) { - return getParameterImpl(paramSize, paramData, valueSize, valueSize, onSuccess); - } - Result getParameterImpl( - uint32_t paramSize, - const void* paramData, - uint32_t requestValueSize, - uint32_t replyValueSize, - GetParameterSuccessCallback onSuccess); - Result setParameterImpl( - uint32_t paramSize, const void* paramData, uint32_t valueSize, const void* valueData); - - private: - friend struct VirtualizerEffect; // for getParameterImpl - friend struct VisualizerEffect; // to allow executing commands - - using CommandSuccessCallback = std::function; - using GetConfigCallback = std::function; - using GetCurrentConfigSuccessCallback = std::function; - using GetSupportedConfigsSuccessCallback = - std::function; - - static const char *sContextResultOfCommand; - static const char *sContextCallToCommand; - static const char *sContextCallFunction; - - bool mIsClosed; - effect_handle_t mHandle; - sp mInBuffer; - sp mOutBuffer; - std::atomic mHalInBufferPtr; - std::atomic mHalOutBufferPtr; - std::unique_ptr mStatusMQ; - EventFlag* mEfGroup; - std::atomic mStopProcessThread; - sp mProcessThread; - - virtual ~Effect(); - - template static size_t alignedSizeIn(size_t s); - template std::unique_ptr hidlVecToHal( - const hidl_vec& vec, uint32_t* halDataSize); - static void effectAuxChannelsConfigFromHal( - const channel_config_t& halConfig, EffectAuxChannelsConfig* config); - static void effectAuxChannelsConfigToHal( - const EffectAuxChannelsConfig& config, channel_config_t* halConfig); - static void effectBufferConfigFromHal( - const buffer_config_t& halConfig, EffectBufferConfig* config); - static void effectBufferConfigToHal( - const EffectBufferConfig& config, buffer_config_t* halConfig); - static void effectConfigFromHal(const effect_config_t& halConfig, EffectConfig* config); - static void effectConfigToHal(const EffectConfig& config, effect_config_t* halConfig); - static void effectOffloadParamToHal( - const EffectOffloadParameter& offload, effect_offload_param_t* halOffload); - static std::vector parameterToHal( - uint32_t paramSize, const void* paramData, uint32_t valueSize, const void** valueData); - - Result analyzeCommandStatus( - const char* commandName, const char* context, status_t status); - Result analyzeStatus( - const char* funcName, - const char* subFuncName, - const char* contextDescription, - status_t status); - void getConfigImpl(int commandCode, const char* commandName, GetConfigCallback cb); - Result getCurrentConfigImpl( - uint32_t featureId, uint32_t configSize, GetCurrentConfigSuccessCallback onSuccess); - Result getSupportedConfigsImpl( - uint32_t featureId, - uint32_t maxConfigs, - uint32_t configSize, - GetSupportedConfigsSuccessCallback onSuccess); - Result sendCommand(int commandCode, const char* commandName); - Result sendCommand(int commandCode, const char* commandName, uint32_t size, void* data); - Result sendCommandReturningData( - int commandCode, const char* commandName, uint32_t* replySize, void* replyData); - Result sendCommandReturningData( - int commandCode, const char* commandName, - uint32_t size, void* data, - uint32_t* replySize, void* replyData); - Result sendCommandReturningStatus(int commandCode, const char* commandName); - Result sendCommandReturningStatus( - int commandCode, const char* commandName, uint32_t size, void* data); - Result sendCommandReturningStatusAndData( - int commandCode, const char* commandName, - uint32_t size, void* data, - uint32_t* replySize, void* replyData, - uint32_t minReplySize, - CommandSuccessCallback onSuccess); - Result setConfigImpl( - int commandCode, const char* commandName, - const EffectConfig& config, - const sp& inputBufferProvider, - const sp& outputBufferProvider); -}; - -} // namespace implementation -} // namespace V2_0 -} // namespace effect -} // namespace audio -} // namespace hardware -} // namespace android +#define AUDIO_HAL_VERSION V2_0 +#include +#undef AUDIO_HAL_VERSION #endif // ANDROID_HARDWARE_AUDIO_EFFECT_V2_0_EFFECT_H diff --git a/audio/effect/2.0/default/EffectsFactory.cpp b/audio/effect/2.0/default/EffectsFactory.cpp index 922a922182ebf9791214827384efaa2d6c76b480..a48a85f7c258bc20490a8140c4100fa2748586e4 100644 --- a/audio/effect/2.0/default/EffectsFactory.cpp +++ b/audio/effect/2.0/default/EffectsFactory.cpp @@ -1,5 +1,5 @@ /* - * Copyright (C) 2016 The Android Open Source Project + * Copyright (C) 2017 The Android Open Source Project * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. @@ -15,191 +15,25 @@ */ #define LOG_TAG "EffectFactoryHAL" -#include -#include -#include -#include -#include -#include -#include -#include -#include -#include -#include -#include -#include - +#include "EffectsFactory.h" #include "AcousticEchoCancelerEffect.h" #include "AutomaticGainControlEffect.h" #include "BassBoostEffect.h" #include "Conversions.h" #include "DownmixEffect.h" -#include "EffectsFactory.h" -#include "HidlUtils.h" #include "Effect.h" -#include "EffectMap.h" #include "EnvironmentalReverbEffect.h" #include "EqualizerEffect.h" +#include "HidlUtils.h" #include "LoudnessEnhancerEffect.h" #include "NoiseSuppressionEffect.h" #include "PresetReverbEffect.h" #include "VirtualizerEffect.h" #include "VisualizerEffect.h" +#include "common/all-versions/default/EffectMap.h" -namespace android { -namespace hardware { -namespace audio { -namespace effect { -namespace V2_0 { -namespace implementation { - -// static -sp EffectsFactory::dispatchEffectInstanceCreation( - const effect_descriptor_t& halDescriptor, effect_handle_t handle) { - const effect_uuid_t *halUuid = &halDescriptor.type; - if (memcmp(halUuid, FX_IID_AEC, sizeof(effect_uuid_t)) == 0) { - return new AcousticEchoCancelerEffect(handle); - } else if (memcmp(halUuid, FX_IID_AGC, sizeof(effect_uuid_t)) == 0) { - return new AutomaticGainControlEffect(handle); - } else if (memcmp(halUuid, SL_IID_BASSBOOST, sizeof(effect_uuid_t)) == 0) { - return new BassBoostEffect(handle); - } else if (memcmp(halUuid, EFFECT_UIID_DOWNMIX, sizeof(effect_uuid_t)) == 0) { - return new DownmixEffect(handle); - } else if (memcmp(halUuid, SL_IID_ENVIRONMENTALREVERB, sizeof(effect_uuid_t)) == 0) { - return new EnvironmentalReverbEffect(handle); - } else if (memcmp(halUuid, SL_IID_EQUALIZER, sizeof(effect_uuid_t)) == 0) { - return new EqualizerEffect(handle); - } else if (memcmp(halUuid, FX_IID_LOUDNESS_ENHANCER, sizeof(effect_uuid_t)) == 0) { - return new LoudnessEnhancerEffect(handle); - } else if (memcmp(halUuid, FX_IID_NS, sizeof(effect_uuid_t)) == 0) { - return new NoiseSuppressionEffect(handle); - } else if (memcmp(halUuid, SL_IID_PRESETREVERB, sizeof(effect_uuid_t)) == 0) { - return new PresetReverbEffect(handle); - } else if (memcmp(halUuid, SL_IID_VIRTUALIZER, sizeof(effect_uuid_t)) == 0) { - return new VirtualizerEffect(handle); - } else if (memcmp(halUuid, SL_IID_VISUALIZATION, sizeof(effect_uuid_t)) == 0) { - return new VisualizerEffect(handle); - } - return new Effect(handle); -} - -// Methods from ::android::hardware::audio::effect::V2_0::IEffectsFactory follow. -Return EffectsFactory::getAllDescriptors(getAllDescriptors_cb _hidl_cb) { - Result retval(Result::OK); - hidl_vec result; - uint32_t numEffects; - status_t status; - -restart: - numEffects = 0; - status = EffectQueryNumberEffects(&numEffects); - if (status != OK) { - retval = Result::NOT_INITIALIZED; - ALOGE("Error querying number of effects: %s", strerror(-status)); - goto exit; - } - result.resize(numEffects); - for (uint32_t i = 0; i < numEffects; ++i) { - effect_descriptor_t halDescriptor; - status = EffectQueryEffect(i, &halDescriptor); - if (status == OK) { - effectDescriptorFromHal(halDescriptor, &result[i]); - } else { - ALOGE("Error querying effect at position %d / %d: %s", - i, numEffects, strerror(-status)); - switch (status) { - case -ENOSYS: { - // Effect list has changed. - goto restart; - } - case -ENOENT: { - // No more effects available. - result.resize(i); - } - default: { - result.resize(0); - retval = Result::NOT_INITIALIZED; - } - } - break; - } - } - -exit: - _hidl_cb(retval, result); - return Void(); -} - -Return EffectsFactory::getDescriptor(const Uuid& uid, getDescriptor_cb _hidl_cb) { - effect_uuid_t halUuid; - HidlUtils::uuidToHal(uid, &halUuid); - effect_descriptor_t halDescriptor; - status_t status = EffectGetDescriptor(&halUuid, &halDescriptor); - EffectDescriptor descriptor; - effectDescriptorFromHal(halDescriptor, &descriptor); - Result retval(Result::OK); - if (status != OK) { - ALOGE("Error querying effect descriptor for %s: %s", - uuidToString(halUuid).c_str(), strerror(-status)); - if (status == -ENOENT) { - retval = Result::INVALID_ARGUMENTS; - } else { - retval = Result::NOT_INITIALIZED; - } - } - _hidl_cb(retval, descriptor); - return Void(); -} - -Return EffectsFactory::createEffect( - const Uuid& uid, int32_t session, int32_t ioHandle, createEffect_cb _hidl_cb) { - effect_uuid_t halUuid; - HidlUtils::uuidToHal(uid, &halUuid); - effect_handle_t handle; - Result retval(Result::OK); - status_t status = EffectCreate(&halUuid, session, ioHandle, &handle); - sp effect; - uint64_t effectId = EffectMap::INVALID_ID; - if (status == OK) { - effect_descriptor_t halDescriptor; - memset(&halDescriptor, 0, sizeof(effect_descriptor_t)); - status = (*handle)->get_descriptor(handle, &halDescriptor); - if (status == OK) { - effect = dispatchEffectInstanceCreation(halDescriptor, handle); - effectId = EffectMap::getInstance().add(handle); - } else { - ALOGE("Error querying effect descriptor for %s: %s", - uuidToString(halUuid).c_str(), strerror(-status)); - EffectRelease(handle); - } - } - if (status != OK) { - ALOGE("Error creating effect %s: %s", uuidToString(halUuid).c_str(), strerror(-status)); - if (status == -ENOENT) { - retval = Result::INVALID_ARGUMENTS; - } else { - retval = Result::NOT_INITIALIZED; - } - } - _hidl_cb(retval, effect, effectId); - return Void(); -} - -Return EffectsFactory::debugDump(const hidl_handle& fd) { - if (fd.getNativeHandle() != nullptr && fd->numFds == 1) { - EffectDumpEffects(fd->data[0]); - } - return Void(); -} - - -IEffectsFactory* HIDL_FETCH_IEffectsFactory(const char* /* name */) { - return new EffectsFactory(); -} +using ::android::hardware::audio::common::V2_0::HidlUtils; -} // namespace implementation -} // namespace V2_0 -} // namespace effect -} // namespace audio -} // namespace hardware -} // namespace android +#define AUDIO_HAL_VERSION V2_0 +#include +#undef AUDIO_HAL_VERSION diff --git a/audio/effect/2.0/default/EffectsFactory.h b/audio/effect/2.0/default/EffectsFactory.h index 829a534c90ba11d5d50d1c07d088c0a7f1edfcbe..f1bfbcff4c3434318b3e0adbf4eab8cbb47efdab 100644 --- a/audio/effect/2.0/default/EffectsFactory.h +++ b/audio/effect/2.0/default/EffectsFactory.h @@ -1,5 +1,5 @@ /* - * Copyright (C) 2016 The Android Open Source Project + * Copyright (C) 2017 The Android Open Source Project * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. @@ -20,47 +20,10 @@ #include #include -#include #include -namespace android { -namespace hardware { -namespace audio { -namespace effect { -namespace V2_0 { -namespace implementation { - -using ::android::hardware::audio::common::V2_0::Uuid; -using ::android::hardware::audio::effect::V2_0::EffectDescriptor; -using ::android::hardware::audio::effect::V2_0::IEffect; -using ::android::hardware::audio::effect::V2_0::IEffectsFactory; -using ::android::hardware::audio::effect::V2_0::Result; -using ::android::hardware::Return; -using ::android::hardware::Void; -using ::android::hardware::hidl_vec; -using ::android::hardware::hidl_string; -using ::android::sp; - -struct EffectsFactory : public IEffectsFactory { - // Methods from ::android::hardware::audio::effect::V2_0::IEffectsFactory follow. - Return getAllDescriptors(getAllDescriptors_cb _hidl_cb) override; - Return getDescriptor(const Uuid& uid, getDescriptor_cb _hidl_cb) override; - Return createEffect( - const Uuid& uid, int32_t session, int32_t ioHandle, createEffect_cb _hidl_cb) override; - Return debugDump(const hidl_handle& fd) override; - - private: - static sp dispatchEffectInstanceCreation( - const effect_descriptor_t& halDescriptor, effect_handle_t handle); -}; - -extern "C" IEffectsFactory* HIDL_FETCH_IEffectsFactory(const char* name); - -} // namespace implementation -} // namespace V2_0 -} // namespace effect -} // namespace audio -} // namespace hardware -} // namespace android +#define AUDIO_HAL_VERSION V2_0 +#include +#undef AUDIO_HAL_VERSION #endif // ANDROID_HARDWARE_AUDIO_EFFECT_V2_0_EFFECTSFACTORY_H diff --git a/audio/effect/2.0/default/EnvironmentalReverbEffect.cpp b/audio/effect/2.0/default/EnvironmentalReverbEffect.cpp index 86ff36833d3343e7d6ba1ae94471f5353be4f232..017dd1f4cba1018b186f340593547090d194b800 100644 --- a/audio/effect/2.0/default/EnvironmentalReverbEffect.cpp +++ b/audio/effect/2.0/default/EnvironmentalReverbEffect.cpp @@ -1,5 +1,5 @@ /* - * Copyright (C) 2016 The Android Open Source Project + * Copyright (C) 2017 The Android Open Source Project * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. @@ -19,298 +19,6 @@ #include "EnvironmentalReverbEffect.h" -namespace android { -namespace hardware { -namespace audio { -namespace effect { -namespace V2_0 { -namespace implementation { - -EnvironmentalReverbEffect::EnvironmentalReverbEffect(effect_handle_t handle) - : mEffect(new Effect(handle)) { -} - -EnvironmentalReverbEffect::~EnvironmentalReverbEffect() {} - -void EnvironmentalReverbEffect::propertiesFromHal( - const t_reverb_settings& halProperties, - IEnvironmentalReverbEffect::AllProperties* properties) { - properties->roomLevel = halProperties.roomLevel; - properties->roomHfLevel = halProperties.roomHFLevel; - properties->decayTime = halProperties.decayTime; - properties->decayHfRatio = halProperties.decayHFRatio; - properties->reflectionsLevel = halProperties.reflectionsLevel; - properties->reflectionsDelay = halProperties.reflectionsDelay; - properties->reverbLevel = halProperties.reverbLevel; - properties->reverbDelay = halProperties.reverbDelay; - properties->diffusion = halProperties.diffusion; - properties->density = halProperties.density; -} - -void EnvironmentalReverbEffect::propertiesToHal( - const IEnvironmentalReverbEffect::AllProperties& properties, - t_reverb_settings* halProperties) { - halProperties->roomLevel = properties.roomLevel; - halProperties->roomHFLevel = properties.roomHfLevel; - halProperties->decayTime = properties.decayTime; - halProperties->decayHFRatio = properties.decayHfRatio; - halProperties->reflectionsLevel = properties.reflectionsLevel; - halProperties->reflectionsDelay = properties.reflectionsDelay; - halProperties->reverbLevel = properties.reverbLevel; - halProperties->reverbDelay = properties.reverbDelay; - halProperties->diffusion = properties.diffusion; - halProperties->density = properties.density; -} - -// Methods from ::android::hardware::audio::effect::V2_0::IEffect follow. -Return EnvironmentalReverbEffect::init() { - return mEffect->init(); -} - -Return EnvironmentalReverbEffect::setConfig( - const EffectConfig& config, - const sp& inputBufferProvider, - const sp& outputBufferProvider) { - return mEffect->setConfig(config, inputBufferProvider, outputBufferProvider); -} - -Return EnvironmentalReverbEffect::reset() { - return mEffect->reset(); -} - -Return EnvironmentalReverbEffect::enable() { - return mEffect->enable(); -} - -Return EnvironmentalReverbEffect::disable() { - return mEffect->disable(); -} - -Return EnvironmentalReverbEffect::setDevice(AudioDevice device) { - return mEffect->setDevice(device); -} - -Return EnvironmentalReverbEffect::setAndGetVolume( - const hidl_vec& volumes, setAndGetVolume_cb _hidl_cb) { - return mEffect->setAndGetVolume(volumes, _hidl_cb); -} - -Return EnvironmentalReverbEffect::volumeChangeNotification( - const hidl_vec& volumes) { - return mEffect->volumeChangeNotification(volumes); -} - -Return EnvironmentalReverbEffect::setAudioMode(AudioMode mode) { - return mEffect->setAudioMode(mode); -} - -Return EnvironmentalReverbEffect::setConfigReverse( - const EffectConfig& config, - const sp& inputBufferProvider, - const sp& outputBufferProvider) { - return mEffect->setConfigReverse(config, inputBufferProvider, outputBufferProvider); -} - -Return EnvironmentalReverbEffect::setInputDevice(AudioDevice device) { - return mEffect->setInputDevice(device); -} - -Return EnvironmentalReverbEffect::getConfig(getConfig_cb _hidl_cb) { - return mEffect->getConfig(_hidl_cb); -} - -Return EnvironmentalReverbEffect::getConfigReverse(getConfigReverse_cb _hidl_cb) { - return mEffect->getConfigReverse(_hidl_cb); -} - -Return EnvironmentalReverbEffect::getSupportedAuxChannelsConfigs( - uint32_t maxConfigs, getSupportedAuxChannelsConfigs_cb _hidl_cb) { - return mEffect->getSupportedAuxChannelsConfigs(maxConfigs, _hidl_cb); -} - -Return EnvironmentalReverbEffect::getAuxChannelsConfig(getAuxChannelsConfig_cb _hidl_cb) { - return mEffect->getAuxChannelsConfig(_hidl_cb); -} - -Return EnvironmentalReverbEffect::setAuxChannelsConfig( - const EffectAuxChannelsConfig& config) { - return mEffect->setAuxChannelsConfig(config); -} - -Return EnvironmentalReverbEffect::setAudioSource(AudioSource source) { - return mEffect->setAudioSource(source); -} - -Return EnvironmentalReverbEffect::offload(const EffectOffloadParameter& param) { - return mEffect->offload(param); -} - -Return EnvironmentalReverbEffect::getDescriptor(getDescriptor_cb _hidl_cb) { - return mEffect->getDescriptor(_hidl_cb); -} - -Return EnvironmentalReverbEffect::prepareForProcessing( - prepareForProcessing_cb _hidl_cb) { - return mEffect->prepareForProcessing(_hidl_cb); -} - -Return EnvironmentalReverbEffect::setProcessBuffers( - const AudioBuffer& inBuffer, const AudioBuffer& outBuffer) { - return mEffect->setProcessBuffers(inBuffer, outBuffer); -} - -Return EnvironmentalReverbEffect::command( - uint32_t commandId, - const hidl_vec& data, - uint32_t resultMaxSize, - command_cb _hidl_cb) { - return mEffect->command(commandId, data, resultMaxSize, _hidl_cb); -} - -Return EnvironmentalReverbEffect::setParameter( - const hidl_vec& parameter, const hidl_vec& value) { - return mEffect->setParameter(parameter, value); -} - -Return EnvironmentalReverbEffect::getParameter( - const hidl_vec& parameter, - uint32_t valueMaxSize, - getParameter_cb _hidl_cb) { - return mEffect->getParameter(parameter, valueMaxSize, _hidl_cb); -} - -Return EnvironmentalReverbEffect::getSupportedConfigsForFeature( - uint32_t featureId, - uint32_t maxConfigs, - uint32_t configSize, - getSupportedConfigsForFeature_cb _hidl_cb) { - return mEffect->getSupportedConfigsForFeature(featureId, maxConfigs, configSize, _hidl_cb); -} - -Return EnvironmentalReverbEffect::getCurrentConfigForFeature( - uint32_t featureId, - uint32_t configSize, - getCurrentConfigForFeature_cb _hidl_cb) { - return mEffect->getCurrentConfigForFeature(featureId, configSize, _hidl_cb); -} - -Return EnvironmentalReverbEffect::setCurrentConfigForFeature( - uint32_t featureId, const hidl_vec& configData) { - return mEffect->setCurrentConfigForFeature(featureId, configData); -} - -Return EnvironmentalReverbEffect::close() { - return mEffect->close(); -} - -// Methods from ::android::hardware::audio::effect::V2_0::IEnvironmentalReverbEffect follow. -Return EnvironmentalReverbEffect::setBypass(bool bypass) { - return mEffect->setParam(REVERB_PARAM_BYPASS, bypass); -} - -Return EnvironmentalReverbEffect::getBypass(getBypass_cb _hidl_cb) { - return mEffect->getIntegerParam(REVERB_PARAM_BYPASS, _hidl_cb); -} - -Return EnvironmentalReverbEffect::setRoomLevel(int16_t roomLevel) { - return mEffect->setParam(REVERB_PARAM_ROOM_LEVEL, roomLevel); -} - -Return EnvironmentalReverbEffect::getRoomLevel(getRoomLevel_cb _hidl_cb) { - return mEffect->getIntegerParam(REVERB_PARAM_ROOM_LEVEL, _hidl_cb); -} - -Return EnvironmentalReverbEffect::setRoomHfLevel(int16_t roomHfLevel) { - return mEffect->setParam(REVERB_PARAM_ROOM_HF_LEVEL, roomHfLevel); -} - -Return EnvironmentalReverbEffect::getRoomHfLevel(getRoomHfLevel_cb _hidl_cb) { - return mEffect->getIntegerParam(REVERB_PARAM_ROOM_HF_LEVEL, _hidl_cb); -} - -Return EnvironmentalReverbEffect::setDecayTime(uint32_t decayTime) { - return mEffect->setParam(REVERB_PARAM_DECAY_TIME, decayTime); -} - -Return EnvironmentalReverbEffect::getDecayTime(getDecayTime_cb _hidl_cb) { - return mEffect->getIntegerParam(REVERB_PARAM_DECAY_TIME, _hidl_cb); -} - -Return EnvironmentalReverbEffect::setDecayHfRatio(int16_t decayHfRatio) { - return mEffect->setParam(REVERB_PARAM_DECAY_HF_RATIO, decayHfRatio); -} - -Return EnvironmentalReverbEffect::getDecayHfRatio(getDecayHfRatio_cb _hidl_cb) { - return mEffect->getIntegerParam(REVERB_PARAM_DECAY_HF_RATIO, _hidl_cb); -} - -Return EnvironmentalReverbEffect::setReflectionsLevel(int16_t reflectionsLevel) { - return mEffect->setParam(REVERB_PARAM_REFLECTIONS_LEVEL, reflectionsLevel); -} - -Return EnvironmentalReverbEffect::getReflectionsLevel(getReflectionsLevel_cb _hidl_cb) { - return mEffect->getIntegerParam(REVERB_PARAM_REFLECTIONS_LEVEL, _hidl_cb); -} - -Return EnvironmentalReverbEffect::setReflectionsDelay(uint32_t reflectionsDelay) { - return mEffect->setParam(REVERB_PARAM_REFLECTIONS_DELAY, reflectionsDelay); -} - -Return EnvironmentalReverbEffect::getReflectionsDelay(getReflectionsDelay_cb _hidl_cb) { - return mEffect->getIntegerParam(REVERB_PARAM_REFLECTIONS_DELAY, _hidl_cb); -} - -Return EnvironmentalReverbEffect::setReverbLevel(int16_t reverbLevel) { - return mEffect->setParam(REVERB_PARAM_REVERB_LEVEL, reverbLevel); -} - -Return EnvironmentalReverbEffect::getReverbLevel(getReverbLevel_cb _hidl_cb) { - return mEffect->getIntegerParam(REVERB_PARAM_REVERB_LEVEL, _hidl_cb); -} - -Return EnvironmentalReverbEffect::setReverbDelay(uint32_t reverbDelay) { - return mEffect->setParam(REVERB_PARAM_REVERB_DELAY, reverbDelay); -} - -Return EnvironmentalReverbEffect::getReverbDelay(getReverbDelay_cb _hidl_cb) { - return mEffect->getIntegerParam(REVERB_PARAM_REVERB_DELAY, _hidl_cb); -} - -Return EnvironmentalReverbEffect::setDiffusion(int16_t diffusion) { - return mEffect->setParam(REVERB_PARAM_DIFFUSION, diffusion); -} - -Return EnvironmentalReverbEffect::getDiffusion(getDiffusion_cb _hidl_cb) { - return mEffect->getIntegerParam(REVERB_PARAM_DIFFUSION, _hidl_cb); -} - -Return EnvironmentalReverbEffect::setDensity(int16_t density) { - return mEffect->setParam(REVERB_PARAM_DENSITY, density); -} - -Return EnvironmentalReverbEffect::getDensity(getDensity_cb _hidl_cb) { - return mEffect->getIntegerParam(REVERB_PARAM_DENSITY, _hidl_cb); -} - -Return EnvironmentalReverbEffect::setAllProperties( - const IEnvironmentalReverbEffect::AllProperties& properties) { - t_reverb_settings halProperties; - propertiesToHal(properties, &halProperties); - return mEffect->setParam(REVERB_PARAM_PROPERTIES, halProperties); -} - -Return EnvironmentalReverbEffect::getAllProperties(getAllProperties_cb _hidl_cb) { - t_reverb_settings halProperties; - Result retval = mEffect->getParam(REVERB_PARAM_PROPERTIES, halProperties); - AllProperties properties; - propertiesFromHal(halProperties, &properties); - _hidl_cb(retval, properties); - return Void(); -} - -} // namespace implementation -} // namespace V2_0 -} // namespace effect -} // namespace audio -} // namespace hardware -} // namespace android +#define AUDIO_HAL_VERSION V2_0 +#include +#undef AUDIO_HAL_VERSION diff --git a/audio/effect/2.0/default/EnvironmentalReverbEffect.h b/audio/effect/2.0/default/EnvironmentalReverbEffect.h index 794caac5cfe0975aa5f476992c6dbff18dea5bff..d93a53f42f4498ba46dab72b79bfddc845e85cb8 100644 --- a/audio/effect/2.0/default/EnvironmentalReverbEffect.h +++ b/audio/effect/2.0/default/EnvironmentalReverbEffect.h @@ -1,5 +1,5 @@ /* - * Copyright (C) 2016 The Android Open Source Project + * Copyright (C) 2017 The Android Open Source Project * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. @@ -20,140 +20,11 @@ #include #include -#include - -#include #include "Effect.h" -namespace android { -namespace hardware { -namespace audio { -namespace effect { -namespace V2_0 { -namespace implementation { - -using ::android::hardware::audio::common::V2_0::AudioDevice; -using ::android::hardware::audio::common::V2_0::AudioMode; -using ::android::hardware::audio::common::V2_0::AudioSource; -using ::android::hardware::audio::effect::V2_0::AudioBuffer; -using ::android::hardware::audio::effect::V2_0::EffectAuxChannelsConfig; -using ::android::hardware::audio::effect::V2_0::EffectConfig; -using ::android::hardware::audio::effect::V2_0::EffectDescriptor; -using ::android::hardware::audio::effect::V2_0::EffectOffloadParameter; -using ::android::hardware::audio::effect::V2_0::IEffect; -using ::android::hardware::audio::effect::V2_0::IEffectBufferProviderCallback; -using ::android::hardware::audio::effect::V2_0::IEnvironmentalReverbEffect; -using ::android::hardware::audio::effect::V2_0::Result; -using ::android::hardware::Return; -using ::android::hardware::Void; -using ::android::hardware::hidl_vec; -using ::android::hardware::hidl_string; -using ::android::sp; - -struct EnvironmentalReverbEffect : public IEnvironmentalReverbEffect { - explicit EnvironmentalReverbEffect(effect_handle_t handle); - - // Methods from ::android::hardware::audio::effect::V2_0::IEffect follow. - Return init() override; - Return setConfig( - const EffectConfig& config, - const sp& inputBufferProvider, - const sp& outputBufferProvider) override; - Return reset() override; - Return enable() override; - Return disable() override; - Return setDevice(AudioDevice device) override; - Return setAndGetVolume( - const hidl_vec& volumes, setAndGetVolume_cb _hidl_cb) override; - Return volumeChangeNotification(const hidl_vec& volumes) override; - Return setAudioMode(AudioMode mode) override; - Return setConfigReverse( - const EffectConfig& config, - const sp& inputBufferProvider, - const sp& outputBufferProvider) override; - Return setInputDevice(AudioDevice device) override; - Return getConfig(getConfig_cb _hidl_cb) override; - Return getConfigReverse(getConfigReverse_cb _hidl_cb) override; - Return getSupportedAuxChannelsConfigs( - uint32_t maxConfigs, getSupportedAuxChannelsConfigs_cb _hidl_cb) override; - Return getAuxChannelsConfig(getAuxChannelsConfig_cb _hidl_cb) override; - Return setAuxChannelsConfig(const EffectAuxChannelsConfig& config) override; - Return setAudioSource(AudioSource source) override; - Return offload(const EffectOffloadParameter& param) override; - Return getDescriptor(getDescriptor_cb _hidl_cb) override; - Return prepareForProcessing(prepareForProcessing_cb _hidl_cb) override; - Return setProcessBuffers( - const AudioBuffer& inBuffer, const AudioBuffer& outBuffer) override; - Return command( - uint32_t commandId, - const hidl_vec& data, - uint32_t resultMaxSize, - command_cb _hidl_cb) override; - Return setParameter( - const hidl_vec& parameter, const hidl_vec& value) override; - Return getParameter( - const hidl_vec& parameter, - uint32_t valueMaxSize, - getParameter_cb _hidl_cb) override; - Return getSupportedConfigsForFeature( - uint32_t featureId, - uint32_t maxConfigs, - uint32_t configSize, - getSupportedConfigsForFeature_cb _hidl_cb) override; - Return getCurrentConfigForFeature( - uint32_t featureId, - uint32_t configSize, - getCurrentConfigForFeature_cb _hidl_cb) override; - Return setCurrentConfigForFeature( - uint32_t featureId, const hidl_vec& configData) override; - Return close() override; - - // Methods from ::android::hardware::audio::effect::V2_0::IEnvironmentalReverbEffect follow. - Return setBypass(bool bypass) override; - Return getBypass(getBypass_cb _hidl_cb) override; - Return setRoomLevel(int16_t roomLevel) override; - Return getRoomLevel(getRoomLevel_cb _hidl_cb) override; - Return setRoomHfLevel(int16_t roomHfLevel) override; - Return getRoomHfLevel(getRoomHfLevel_cb _hidl_cb) override; - Return setDecayTime(uint32_t decayTime) override; - Return getDecayTime(getDecayTime_cb _hidl_cb) override; - Return setDecayHfRatio(int16_t decayHfRatio) override; - Return getDecayHfRatio(getDecayHfRatio_cb _hidl_cb) override; - Return setReflectionsLevel(int16_t reflectionsLevel) override; - Return getReflectionsLevel(getReflectionsLevel_cb _hidl_cb) override; - Return setReflectionsDelay(uint32_t reflectionsDelay) override; - Return getReflectionsDelay(getReflectionsDelay_cb _hidl_cb) override; - Return setReverbLevel(int16_t reverbLevel) override; - Return getReverbLevel(getReverbLevel_cb _hidl_cb) override; - Return setReverbDelay(uint32_t reverbDelay) override; - Return getReverbDelay(getReverbDelay_cb _hidl_cb) override; - Return setDiffusion(int16_t diffusion) override; - Return getDiffusion(getDiffusion_cb _hidl_cb) override; - Return setDensity(int16_t density) override; - Return getDensity(getDensity_cb _hidl_cb) override; - Return setAllProperties( - const IEnvironmentalReverbEffect::AllProperties& properties) override; - Return getAllProperties(getAllProperties_cb _hidl_cb) override; - - private: - sp mEffect; - - virtual ~EnvironmentalReverbEffect(); - - void propertiesFromHal( - const t_reverb_settings& halProperties, - IEnvironmentalReverbEffect::AllProperties* properties); - void propertiesToHal( - const IEnvironmentalReverbEffect::AllProperties& properties, - t_reverb_settings* halProperties); -}; - -} // namespace implementation -} // namespace V2_0 -} // namespace effect -} // namespace audio -} // namespace hardware -} // namespace android +#define AUDIO_HAL_VERSION V2_0 +#include +#undef AUDIO_HAL_VERSION #endif // ANDROID_HARDWARE_AUDIO_EFFECT_V2_0_ENVIRONMENTALREVERBEFFECT_H diff --git a/audio/effect/2.0/default/EqualizerEffect.cpp b/audio/effect/2.0/default/EqualizerEffect.cpp index 808d8eb23efae7b406ffaa2bbdc919634ecd3849..d6e056c421af89fa4d1c4ae5ca3452acb5cbc8b6 100644 --- a/audio/effect/2.0/default/EqualizerEffect.cpp +++ b/audio/effect/2.0/default/EqualizerEffect.cpp @@ -1,5 +1,5 @@ /* - * Copyright (C) 2016 The Android Open Source Project + * Copyright (C) 2017 The Android Open Source Project * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. @@ -14,298 +14,10 @@ * limitations under the License. */ -#include - #define LOG_TAG "Equalizer_HAL" -#include #include "EqualizerEffect.h" -namespace android { -namespace hardware { -namespace audio { -namespace effect { -namespace V2_0 { -namespace implementation { - -EqualizerEffect::EqualizerEffect(effect_handle_t handle) - : mEffect(new Effect(handle)) { -} - -EqualizerEffect::~EqualizerEffect() {} - -void EqualizerEffect::propertiesFromHal( - const t_equalizer_settings& halProperties, - IEqualizerEffect::AllProperties* properties) { - properties->curPreset = halProperties.curPreset; - // t_equalizer_settings incorrectly defines bandLevels as uint16_t, - // whereas the actual type of values used by effects is int16_t. - const int16_t* signedBandLevels = - reinterpret_cast(&halProperties.bandLevels[0]); - properties->bandLevels.setToExternal( - const_cast(signedBandLevels), halProperties.numBands); -} - -std::vector EqualizerEffect::propertiesToHal( - const IEqualizerEffect::AllProperties& properties, - t_equalizer_settings** halProperties) { - size_t bandsSize = properties.bandLevels.size() * sizeof(uint16_t); - std::vector halBuffer(sizeof(t_equalizer_settings) + bandsSize, 0); - *halProperties = reinterpret_cast(&halBuffer[0]); - (*halProperties)->curPreset = properties.curPreset; - (*halProperties)->numBands = properties.bandLevels.size(); - memcpy((*halProperties)->bandLevels, &properties.bandLevels[0], bandsSize); - return halBuffer; -} - -// Methods from ::android::hardware::audio::effect::V2_0::IEffect follow. -Return EqualizerEffect::init() { - return mEffect->init(); -} - -Return EqualizerEffect::setConfig( - const EffectConfig& config, - const sp& inputBufferProvider, - const sp& outputBufferProvider) { - return mEffect->setConfig(config, inputBufferProvider, outputBufferProvider); -} - -Return EqualizerEffect::reset() { - return mEffect->reset(); -} - -Return EqualizerEffect::enable() { - return mEffect->enable(); -} - -Return EqualizerEffect::disable() { - return mEffect->disable(); -} - -Return EqualizerEffect::setDevice(AudioDevice device) { - return mEffect->setDevice(device); -} - -Return EqualizerEffect::setAndGetVolume( - const hidl_vec& volumes, setAndGetVolume_cb _hidl_cb) { - return mEffect->setAndGetVolume(volumes, _hidl_cb); -} - -Return EqualizerEffect::volumeChangeNotification( - const hidl_vec& volumes) { - return mEffect->volumeChangeNotification(volumes); -} - -Return EqualizerEffect::setAudioMode(AudioMode mode) { - return mEffect->setAudioMode(mode); -} - -Return EqualizerEffect::setConfigReverse( - const EffectConfig& config, - const sp& inputBufferProvider, - const sp& outputBufferProvider) { - return mEffect->setConfigReverse(config, inputBufferProvider, outputBufferProvider); -} - -Return EqualizerEffect::setInputDevice(AudioDevice device) { - return mEffect->setInputDevice(device); -} - -Return EqualizerEffect::getConfig(getConfig_cb _hidl_cb) { - return mEffect->getConfig(_hidl_cb); -} - -Return EqualizerEffect::getConfigReverse(getConfigReverse_cb _hidl_cb) { - return mEffect->getConfigReverse(_hidl_cb); -} - -Return EqualizerEffect::getSupportedAuxChannelsConfigs( - uint32_t maxConfigs, getSupportedAuxChannelsConfigs_cb _hidl_cb) { - return mEffect->getSupportedAuxChannelsConfigs(maxConfigs, _hidl_cb); -} - -Return EqualizerEffect::getAuxChannelsConfig(getAuxChannelsConfig_cb _hidl_cb) { - return mEffect->getAuxChannelsConfig(_hidl_cb); -} - -Return EqualizerEffect::setAuxChannelsConfig( - const EffectAuxChannelsConfig& config) { - return mEffect->setAuxChannelsConfig(config); -} - -Return EqualizerEffect::setAudioSource(AudioSource source) { - return mEffect->setAudioSource(source); -} - -Return EqualizerEffect::offload(const EffectOffloadParameter& param) { - return mEffect->offload(param); -} - -Return EqualizerEffect::getDescriptor(getDescriptor_cb _hidl_cb) { - return mEffect->getDescriptor(_hidl_cb); -} - -Return EqualizerEffect::prepareForProcessing( - prepareForProcessing_cb _hidl_cb) { - return mEffect->prepareForProcessing(_hidl_cb); -} - -Return EqualizerEffect::setProcessBuffers( - const AudioBuffer& inBuffer, const AudioBuffer& outBuffer) { - return mEffect->setProcessBuffers(inBuffer, outBuffer); -} - -Return EqualizerEffect::command( - uint32_t commandId, - const hidl_vec& data, - uint32_t resultMaxSize, - command_cb _hidl_cb) { - return mEffect->command(commandId, data, resultMaxSize, _hidl_cb); -} - -Return EqualizerEffect::setParameter( - const hidl_vec& parameter, const hidl_vec& value) { - return mEffect->setParameter(parameter, value); -} - -Return EqualizerEffect::getParameter( - const hidl_vec& parameter, - uint32_t valueMaxSize, - getParameter_cb _hidl_cb) { - return mEffect->getParameter(parameter, valueMaxSize, _hidl_cb); -} - -Return EqualizerEffect::getSupportedConfigsForFeature( - uint32_t featureId, - uint32_t maxConfigs, - uint32_t configSize, - getSupportedConfigsForFeature_cb _hidl_cb) { - return mEffect->getSupportedConfigsForFeature(featureId, maxConfigs, configSize, _hidl_cb); -} - -Return EqualizerEffect::getCurrentConfigForFeature( - uint32_t featureId, - uint32_t configSize, - getCurrentConfigForFeature_cb _hidl_cb) { - return mEffect->getCurrentConfigForFeature(featureId, configSize, _hidl_cb); -} - -Return EqualizerEffect::setCurrentConfigForFeature( - uint32_t featureId, const hidl_vec& configData) { - return mEffect->setCurrentConfigForFeature(featureId, configData); -} - -Return EqualizerEffect::close() { - return mEffect->close(); -} - -// Methods from ::android::hardware::audio::effect::V2_0::IEqualizerEffect follow. -Return EqualizerEffect::getNumBands(getNumBands_cb _hidl_cb) { - return mEffect->getIntegerParam(EQ_PARAM_NUM_BANDS, _hidl_cb); -} - -Return EqualizerEffect::getLevelRange(getLevelRange_cb _hidl_cb) { - int16_t halLevels[2] = { 0, 0 }; - Result retval = mEffect->getParam(EQ_PARAM_LEVEL_RANGE, halLevels); - _hidl_cb(retval, halLevels[0], halLevels[1]); - return Void(); -} - -Return EqualizerEffect::setBandLevel(uint16_t band, int16_t level) { - return mEffect->setParam(EQ_PARAM_BAND_LEVEL, band, level); -} - -Return EqualizerEffect::getBandLevel(uint16_t band, getBandLevel_cb _hidl_cb) { - int16_t halLevel = 0; - Result retval = mEffect->getParam(EQ_PARAM_BAND_LEVEL, band, halLevel); - _hidl_cb(retval, halLevel); - return Void(); -} - -Return EqualizerEffect::getBandCenterFrequency( - uint16_t band, getBandCenterFrequency_cb _hidl_cb) { - uint32_t halFreq = 0; - Result retval = mEffect->getParam(EQ_PARAM_CENTER_FREQ, band, halFreq); - _hidl_cb(retval, halFreq); - return Void(); -} - -Return EqualizerEffect::getBandFrequencyRange( - uint16_t band, getBandFrequencyRange_cb _hidl_cb) { - uint32_t halFreqs[2] = { 0, 0 }; - Result retval = mEffect->getParam(EQ_PARAM_BAND_FREQ_RANGE, band, halFreqs); - _hidl_cb(retval, halFreqs[0], halFreqs[1]); - return Void(); -} - -Return EqualizerEffect::getBandForFrequency(uint32_t freq, getBandForFrequency_cb _hidl_cb) { - uint16_t halBand = 0; - Result retval = mEffect->getParam(EQ_PARAM_GET_BAND, freq, halBand); - _hidl_cb(retval, halBand); - return Void(); -} - -Return EqualizerEffect::getPresetNames(getPresetNames_cb _hidl_cb) { - uint16_t halPresetCount = 0; - Result retval = mEffect->getParam(EQ_PARAM_GET_NUM_OF_PRESETS, halPresetCount); - hidl_vec presetNames; - if (retval == Result::OK) { - presetNames.resize(halPresetCount); - for (uint16_t i = 0; i < halPresetCount; ++i) { - char halPresetName[EFFECT_STRING_LEN_MAX]; - retval = mEffect->getParam(EQ_PARAM_GET_PRESET_NAME, i, halPresetName); - if (retval == Result::OK) { - presetNames[i] = halPresetName; - } else { - presetNames.resize(i); - } - } - } - _hidl_cb(retval, presetNames); - return Void(); -} - -Return EqualizerEffect::setCurrentPreset(uint16_t preset) { - return mEffect->setParam(EQ_PARAM_CUR_PRESET, preset); -} - -Return EqualizerEffect::getCurrentPreset(getCurrentPreset_cb _hidl_cb) { - return mEffect->getIntegerParam(EQ_PARAM_CUR_PRESET, _hidl_cb); -} - -Return EqualizerEffect::setAllProperties( - const IEqualizerEffect::AllProperties& properties) { - t_equalizer_settings *halPropertiesPtr = nullptr; - std::vector halBuffer = propertiesToHal(properties, &halPropertiesPtr); - uint32_t paramId = EQ_PARAM_PROPERTIES; - return mEffect->setParameterImpl( - sizeof(paramId), ¶mId, halBuffer.size(), halPropertiesPtr); -} - -Return EqualizerEffect::getAllProperties(getAllProperties_cb _hidl_cb) { - uint16_t numBands = 0; - Result retval = mEffect->getParam(EQ_PARAM_NUM_BANDS, numBands); - AllProperties properties; - if (retval != Result::OK) { - _hidl_cb(retval, properties); - return Void(); - } - size_t valueSize = sizeof(t_equalizer_settings) + sizeof(int16_t) * numBands; - uint32_t paramId = EQ_PARAM_PROPERTIES; - retval = mEffect->getParameterImpl( - sizeof(paramId), ¶mId, valueSize, - [&] (uint32_t, const void* valueData) { - const t_equalizer_settings* halProperties = - reinterpret_cast(valueData); - propertiesFromHal(*halProperties, &properties); - }); - _hidl_cb(retval, properties); - return Void(); -} - -} // namespace implementation -} // namespace V2_0 -} // namespace effect -} // namespace audio -} // namespace hardware -} // namespace android +#define AUDIO_HAL_VERSION V2_0 +#include +#undef AUDIO_HAL_VERSION diff --git a/audio/effect/2.0/default/EqualizerEffect.h b/audio/effect/2.0/default/EqualizerEffect.h index 9e8d75b4f8e3f44f71889a402fee1cd93f95cab6..54cdd50e1315a87cb55544d81dea927f1ca0e18d 100644 --- a/audio/effect/2.0/default/EqualizerEffect.h +++ b/audio/effect/2.0/default/EqualizerEffect.h @@ -1,5 +1,5 @@ /* - * Copyright (C) 2016 The Android Open Source Project + * Copyright (C) 2017 The Android Open Source Project * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. @@ -17,133 +17,12 @@ #ifndef ANDROID_HARDWARE_AUDIO_EFFECT_V2_0_EQUALIZEREFFECT_H #define ANDROID_HARDWARE_AUDIO_EFFECT_V2_0_EQUALIZEREFFECT_H -#include - -#include - #include -#include - -#include #include "Effect.h" -namespace android { -namespace hardware { -namespace audio { -namespace effect { -namespace V2_0 { -namespace implementation { - -using ::android::hardware::audio::common::V2_0::AudioDevice; -using ::android::hardware::audio::common::V2_0::AudioMode; -using ::android::hardware::audio::common::V2_0::AudioSource; -using ::android::hardware::audio::effect::V2_0::AudioBuffer; -using ::android::hardware::audio::effect::V2_0::EffectAuxChannelsConfig; -using ::android::hardware::audio::effect::V2_0::EffectConfig; -using ::android::hardware::audio::effect::V2_0::EffectDescriptor; -using ::android::hardware::audio::effect::V2_0::EffectOffloadParameter; -using ::android::hardware::audio::effect::V2_0::IEffect; -using ::android::hardware::audio::effect::V2_0::IEffectBufferProviderCallback; -using ::android::hardware::audio::effect::V2_0::IEqualizerEffect; -using ::android::hardware::audio::effect::V2_0::Result; -using ::android::hardware::Return; -using ::android::hardware::Void; -using ::android::hardware::hidl_vec; -using ::android::hardware::hidl_string; -using ::android::sp; - -struct EqualizerEffect : public IEqualizerEffect { - explicit EqualizerEffect(effect_handle_t handle); - - // Methods from ::android::hardware::audio::effect::V2_0::IEffect follow. - Return init() override; - Return setConfig( - const EffectConfig& config, - const sp& inputBufferProvider, - const sp& outputBufferProvider) override; - Return reset() override; - Return enable() override; - Return disable() override; - Return setDevice(AudioDevice device) override; - Return setAndGetVolume( - const hidl_vec& volumes, setAndGetVolume_cb _hidl_cb) override; - Return volumeChangeNotification(const hidl_vec& volumes) override; - Return setAudioMode(AudioMode mode) override; - Return setConfigReverse( - const EffectConfig& config, - const sp& inputBufferProvider, - const sp& outputBufferProvider) override; - Return setInputDevice(AudioDevice device) override; - Return getConfig(getConfig_cb _hidl_cb) override; - Return getConfigReverse(getConfigReverse_cb _hidl_cb) override; - Return getSupportedAuxChannelsConfigs( - uint32_t maxConfigs, getSupportedAuxChannelsConfigs_cb _hidl_cb) override; - Return getAuxChannelsConfig(getAuxChannelsConfig_cb _hidl_cb) override; - Return setAuxChannelsConfig(const EffectAuxChannelsConfig& config) override; - Return setAudioSource(AudioSource source) override; - Return offload(const EffectOffloadParameter& param) override; - Return getDescriptor(getDescriptor_cb _hidl_cb) override; - Return prepareForProcessing(prepareForProcessing_cb _hidl_cb) override; - Return setProcessBuffers( - const AudioBuffer& inBuffer, const AudioBuffer& outBuffer) override; - Return command( - uint32_t commandId, - const hidl_vec& data, - uint32_t resultMaxSize, - command_cb _hidl_cb) override; - Return setParameter( - const hidl_vec& parameter, const hidl_vec& value) override; - Return getParameter( - const hidl_vec& parameter, - uint32_t valueMaxSize, - getParameter_cb _hidl_cb) override; - Return getSupportedConfigsForFeature( - uint32_t featureId, - uint32_t maxConfigs, - uint32_t configSize, - getSupportedConfigsForFeature_cb _hidl_cb) override; - Return getCurrentConfigForFeature( - uint32_t featureId, - uint32_t configSize, - getCurrentConfigForFeature_cb _hidl_cb) override; - Return setCurrentConfigForFeature( - uint32_t featureId, const hidl_vec& configData) override; - Return close() override; - - // Methods from ::android::hardware::audio::effect::V2_0::IEqualizerEffect follow. - Return getNumBands(getNumBands_cb _hidl_cb) override; - Return getLevelRange(getLevelRange_cb _hidl_cb) override; - Return setBandLevel(uint16_t band, int16_t level) override; - Return getBandLevel(uint16_t band, getBandLevel_cb _hidl_cb) override; - Return getBandCenterFrequency( - uint16_t band, getBandCenterFrequency_cb _hidl_cb) override; - Return getBandFrequencyRange(uint16_t band, getBandFrequencyRange_cb _hidl_cb) override; - Return getBandForFrequency(uint32_t freq, getBandForFrequency_cb _hidl_cb) override; - Return getPresetNames(getPresetNames_cb _hidl_cb) override; - Return setCurrentPreset(uint16_t preset) override; - Return getCurrentPreset(getCurrentPreset_cb _hidl_cb) override; - Return setAllProperties(const IEqualizerEffect::AllProperties& properties) override; - Return getAllProperties(getAllProperties_cb _hidl_cb) override; - - private: - sp mEffect; - - virtual ~EqualizerEffect(); - - void propertiesFromHal( - const t_equalizer_settings& halProperties, - IEqualizerEffect::AllProperties* properties); - std::vector propertiesToHal( - const IEqualizerEffect::AllProperties& properties, - t_equalizer_settings** halProperties); -}; - -} // namespace implementation -} // namespace V2_0 -} // namespace effect -} // namespace audio -} // namespace hardware -} // namespace android +#define AUDIO_HAL_VERSION V2_0 +#include +#undef AUDIO_HAL_VERSION #endif // ANDROID_HARDWARE_AUDIO_EFFECT_V2_0_EQUALIZEREFFECT_H diff --git a/audio/effect/2.0/default/LoudnessEnhancerEffect.cpp b/audio/effect/2.0/default/LoudnessEnhancerEffect.cpp index fda5eb098a5f5c62586294cdfee0b93b121f90fb..2dca0f4c3983d1974a4208fd015a17ae6228247e 100644 --- a/audio/effect/2.0/default/LoudnessEnhancerEffect.cpp +++ b/audio/effect/2.0/default/LoudnessEnhancerEffect.cpp @@ -1,5 +1,5 @@ /* - * Copyright (C) 2016 The Android Open Source Project + * Copyright (C) 2017 The Android Open Source Project * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. @@ -14,191 +14,10 @@ * limitations under the License. */ -#include - #define LOG_TAG "LoudnessEnhancer_HAL" -#include -#include #include "LoudnessEnhancerEffect.h" -namespace android { -namespace hardware { -namespace audio { -namespace effect { -namespace V2_0 { -namespace implementation { - -LoudnessEnhancerEffect::LoudnessEnhancerEffect(effect_handle_t handle) - : mEffect(new Effect(handle)) { -} - -LoudnessEnhancerEffect::~LoudnessEnhancerEffect() {} - -// Methods from ::android::hardware::audio::effect::V2_0::IEffect follow. -Return LoudnessEnhancerEffect::init() { - return mEffect->init(); -} - -Return LoudnessEnhancerEffect::setConfig( - const EffectConfig& config, - const sp& inputBufferProvider, - const sp& outputBufferProvider) { - return mEffect->setConfig(config, inputBufferProvider, outputBufferProvider); -} - -Return LoudnessEnhancerEffect::reset() { - return mEffect->reset(); -} - -Return LoudnessEnhancerEffect::enable() { - return mEffect->enable(); -} - -Return LoudnessEnhancerEffect::disable() { - return mEffect->disable(); -} - -Return LoudnessEnhancerEffect::setDevice(AudioDevice device) { - return mEffect->setDevice(device); -} - -Return LoudnessEnhancerEffect::setAndGetVolume( - const hidl_vec& volumes, setAndGetVolume_cb _hidl_cb) { - return mEffect->setAndGetVolume(volumes, _hidl_cb); -} - -Return LoudnessEnhancerEffect::volumeChangeNotification( - const hidl_vec& volumes) { - return mEffect->volumeChangeNotification(volumes); -} - -Return LoudnessEnhancerEffect::setAudioMode(AudioMode mode) { - return mEffect->setAudioMode(mode); -} - -Return LoudnessEnhancerEffect::setConfigReverse( - const EffectConfig& config, - const sp& inputBufferProvider, - const sp& outputBufferProvider) { - return mEffect->setConfigReverse(config, inputBufferProvider, outputBufferProvider); -} - -Return LoudnessEnhancerEffect::setInputDevice(AudioDevice device) { - return mEffect->setInputDevice(device); -} - -Return LoudnessEnhancerEffect::getConfig(getConfig_cb _hidl_cb) { - return mEffect->getConfig(_hidl_cb); -} - -Return LoudnessEnhancerEffect::getConfigReverse(getConfigReverse_cb _hidl_cb) { - return mEffect->getConfigReverse(_hidl_cb); -} - -Return LoudnessEnhancerEffect::getSupportedAuxChannelsConfigs( - uint32_t maxConfigs, getSupportedAuxChannelsConfigs_cb _hidl_cb) { - return mEffect->getSupportedAuxChannelsConfigs(maxConfigs, _hidl_cb); -} - -Return LoudnessEnhancerEffect::getAuxChannelsConfig(getAuxChannelsConfig_cb _hidl_cb) { - return mEffect->getAuxChannelsConfig(_hidl_cb); -} - -Return LoudnessEnhancerEffect::setAuxChannelsConfig( - const EffectAuxChannelsConfig& config) { - return mEffect->setAuxChannelsConfig(config); -} - -Return LoudnessEnhancerEffect::setAudioSource(AudioSource source) { - return mEffect->setAudioSource(source); -} - -Return LoudnessEnhancerEffect::offload(const EffectOffloadParameter& param) { - return mEffect->offload(param); -} - -Return LoudnessEnhancerEffect::getDescriptor(getDescriptor_cb _hidl_cb) { - return mEffect->getDescriptor(_hidl_cb); -} - -Return LoudnessEnhancerEffect::prepareForProcessing( - prepareForProcessing_cb _hidl_cb) { - return mEffect->prepareForProcessing(_hidl_cb); -} - -Return LoudnessEnhancerEffect::setProcessBuffers( - const AudioBuffer& inBuffer, const AudioBuffer& outBuffer) { - return mEffect->setProcessBuffers(inBuffer, outBuffer); -} - -Return LoudnessEnhancerEffect::command( - uint32_t commandId, - const hidl_vec& data, - uint32_t resultMaxSize, - command_cb _hidl_cb) { - return mEffect->command(commandId, data, resultMaxSize, _hidl_cb); -} - -Return LoudnessEnhancerEffect::setParameter( - const hidl_vec& parameter, const hidl_vec& value) { - return mEffect->setParameter(parameter, value); -} - -Return LoudnessEnhancerEffect::getParameter( - const hidl_vec& parameter, - uint32_t valueMaxSize, - getParameter_cb _hidl_cb) { - return mEffect->getParameter(parameter, valueMaxSize, _hidl_cb); -} - -Return LoudnessEnhancerEffect::getSupportedConfigsForFeature( - uint32_t featureId, - uint32_t maxConfigs, - uint32_t configSize, - getSupportedConfigsForFeature_cb _hidl_cb) { - return mEffect->getSupportedConfigsForFeature(featureId, maxConfigs, configSize, _hidl_cb); -} - -Return LoudnessEnhancerEffect::getCurrentConfigForFeature( - uint32_t featureId, - uint32_t configSize, - getCurrentConfigForFeature_cb _hidl_cb) { - return mEffect->getCurrentConfigForFeature(featureId, configSize, _hidl_cb); -} - -Return LoudnessEnhancerEffect::setCurrentConfigForFeature( - uint32_t featureId, const hidl_vec& configData) { - return mEffect->setCurrentConfigForFeature(featureId, configData); -} - -Return LoudnessEnhancerEffect::close() { - return mEffect->close(); -} - -// Methods from ::android::hardware::audio::effect::V2_0::ILoudnessEnhancerEffect follow. -Return LoudnessEnhancerEffect::setTargetGain(int32_t targetGainMb) { - return mEffect->setParam(LOUDNESS_ENHANCER_DEFAULT_TARGET_GAIN_MB, targetGainMb); -} - -Return LoudnessEnhancerEffect::getTargetGain(getTargetGain_cb _hidl_cb) { - // AOSP Loudness Enhancer expects the size of the request to not include the - // size of the parameter. - uint32_t paramId = LOUDNESS_ENHANCER_DEFAULT_TARGET_GAIN_MB; - uint32_t targetGainMb = 0; - Result retval = mEffect->getParameterImpl( - sizeof(paramId), ¶mId, - 0, sizeof(targetGainMb), - [&] (uint32_t, const void* valueData) { - memcpy(&targetGainMb, valueData, sizeof(targetGainMb)); - }); - _hidl_cb(retval, targetGainMb); - return Void(); -} - -} // namespace implementation -} // namespace V2_0 -} // namespace effect -} // namespace audio -} // namespace hardware -} // namespace android +#define AUDIO_HAL_VERSION V2_0 +#include +#undef AUDIO_HAL_VERSION diff --git a/audio/effect/2.0/default/LoudnessEnhancerEffect.h b/audio/effect/2.0/default/LoudnessEnhancerEffect.h index 039b8d6a88e835ed2e4db559d86a1908e2e06bd3..992e238ef1781137c7da78faad0a180a944b8f23 100644 --- a/audio/effect/2.0/default/LoudnessEnhancerEffect.h +++ b/audio/effect/2.0/default/LoudnessEnhancerEffect.h @@ -1,5 +1,5 @@ /* - * Copyright (C) 2016 The Android Open Source Project + * Copyright (C) 2017 The Android Open Source Project * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. @@ -18,110 +18,11 @@ #define ANDROID_HARDWARE_AUDIO_EFFECT_V2_0_LOUDNESSENHANCEREFFECT_H #include -#include - -#include #include "Effect.h" -namespace android { -namespace hardware { -namespace audio { -namespace effect { -namespace V2_0 { -namespace implementation { - -using ::android::hardware::audio::common::V2_0::AudioDevice; -using ::android::hardware::audio::common::V2_0::AudioMode; -using ::android::hardware::audio::common::V2_0::AudioSource; -using ::android::hardware::audio::effect::V2_0::AudioBuffer; -using ::android::hardware::audio::effect::V2_0::EffectAuxChannelsConfig; -using ::android::hardware::audio::effect::V2_0::EffectConfig; -using ::android::hardware::audio::effect::V2_0::EffectDescriptor; -using ::android::hardware::audio::effect::V2_0::EffectOffloadParameter; -using ::android::hardware::audio::effect::V2_0::IEffect; -using ::android::hardware::audio::effect::V2_0::IEffectBufferProviderCallback; -using ::android::hardware::audio::effect::V2_0::ILoudnessEnhancerEffect; -using ::android::hardware::audio::effect::V2_0::Result; -using ::android::hardware::Return; -using ::android::hardware::Void; -using ::android::hardware::hidl_vec; -using ::android::hardware::hidl_string; -using ::android::sp; - -struct LoudnessEnhancerEffect : public ILoudnessEnhancerEffect { - explicit LoudnessEnhancerEffect(effect_handle_t handle); - - // Methods from ::android::hardware::audio::effect::V2_0::IEffect follow. - Return init() override; - Return setConfig( - const EffectConfig& config, - const sp& inputBufferProvider, - const sp& outputBufferProvider) override; - Return reset() override; - Return enable() override; - Return disable() override; - Return setDevice(AudioDevice device) override; - Return setAndGetVolume( - const hidl_vec& volumes, setAndGetVolume_cb _hidl_cb) override; - Return volumeChangeNotification(const hidl_vec& volumes) override; - Return setAudioMode(AudioMode mode) override; - Return setConfigReverse( - const EffectConfig& config, - const sp& inputBufferProvider, - const sp& outputBufferProvider) override; - Return setInputDevice(AudioDevice device) override; - Return getConfig(getConfig_cb _hidl_cb) override; - Return getConfigReverse(getConfigReverse_cb _hidl_cb) override; - Return getSupportedAuxChannelsConfigs( - uint32_t maxConfigs, getSupportedAuxChannelsConfigs_cb _hidl_cb) override; - Return getAuxChannelsConfig(getAuxChannelsConfig_cb _hidl_cb) override; - Return setAuxChannelsConfig(const EffectAuxChannelsConfig& config) override; - Return setAudioSource(AudioSource source) override; - Return offload(const EffectOffloadParameter& param) override; - Return getDescriptor(getDescriptor_cb _hidl_cb) override; - Return prepareForProcessing(prepareForProcessing_cb _hidl_cb) override; - Return setProcessBuffers( - const AudioBuffer& inBuffer, const AudioBuffer& outBuffer) override; - Return command( - uint32_t commandId, - const hidl_vec& data, - uint32_t resultMaxSize, - command_cb _hidl_cb) override; - Return setParameter( - const hidl_vec& parameter, const hidl_vec& value) override; - Return getParameter( - const hidl_vec& parameter, - uint32_t valueMaxSize, - getParameter_cb _hidl_cb) override; - Return getSupportedConfigsForFeature( - uint32_t featureId, - uint32_t maxConfigs, - uint32_t configSize, - getSupportedConfigsForFeature_cb _hidl_cb) override; - Return getCurrentConfigForFeature( - uint32_t featureId, - uint32_t configSize, - getCurrentConfigForFeature_cb _hidl_cb) override; - Return setCurrentConfigForFeature( - uint32_t featureId, const hidl_vec& configData) override; - Return close() override; - - // Methods from ::android::hardware::audio::effect::V2_0::ILoudnessEnhancerEffect follow. - Return setTargetGain(int32_t targetGainMb) override; - Return getTargetGain(getTargetGain_cb _hidl_cb) override; - - private: - sp mEffect; - - virtual ~LoudnessEnhancerEffect(); -}; - -} // namespace implementation -} // namespace V2_0 -} // namespace effect -} // namespace audio -} // namespace hardware -} // namespace android +#define AUDIO_HAL_VERSION V2_0 +#include +#undef AUDIO_HAL_VERSION #endif // ANDROID_HARDWARE_AUDIO_EFFECT_V2_0_LOUDNESSENHANCEREFFECT_H diff --git a/audio/effect/2.0/default/NoiseSuppressionEffect.cpp b/audio/effect/2.0/default/NoiseSuppressionEffect.cpp index 7c4e06da7604f3e3b6877d358e306fe7ca7d3f02..089e811e096bc42f2a3c179df91eb7b96b8a24cb 100644 --- a/audio/effect/2.0/default/NoiseSuppressionEffect.cpp +++ b/audio/effect/2.0/default/NoiseSuppressionEffect.cpp @@ -1,5 +1,5 @@ /* - * Copyright (C) 2016 The Android Open Source Project + * Copyright (C) 2017 The Android Open Source Project * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. @@ -15,220 +15,9 @@ */ #define LOG_TAG "NS_Effect_HAL" -#include #include "NoiseSuppressionEffect.h" -namespace android { -namespace hardware { -namespace audio { -namespace effect { -namespace V2_0 { -namespace implementation { - -NoiseSuppressionEffect::NoiseSuppressionEffect(effect_handle_t handle) - : mEffect(new Effect(handle)) { -} - -NoiseSuppressionEffect::~NoiseSuppressionEffect() {} - -void NoiseSuppressionEffect::propertiesFromHal( - const t_ns_settings& halProperties, - INoiseSuppressionEffect::AllProperties* properties) { - properties->level = Level(halProperties.level); - properties->type = Type(halProperties.type); -} - -void NoiseSuppressionEffect::propertiesToHal( - const INoiseSuppressionEffect::AllProperties& properties, - t_ns_settings* halProperties) { - halProperties->level = static_cast(properties.level); - halProperties->type = static_cast(properties.type); -} - -// Methods from ::android::hardware::audio::effect::V2_0::IEffect follow. -Return NoiseSuppressionEffect::init() { - return mEffect->init(); -} - -Return NoiseSuppressionEffect::setConfig( - const EffectConfig& config, - const sp& inputBufferProvider, - const sp& outputBufferProvider) { - return mEffect->setConfig(config, inputBufferProvider, outputBufferProvider); -} - -Return NoiseSuppressionEffect::reset() { - return mEffect->reset(); -} - -Return NoiseSuppressionEffect::enable() { - return mEffect->enable(); -} - -Return NoiseSuppressionEffect::disable() { - return mEffect->disable(); -} - -Return NoiseSuppressionEffect::setDevice(AudioDevice device) { - return mEffect->setDevice(device); -} - -Return NoiseSuppressionEffect::setAndGetVolume( - const hidl_vec& volumes, setAndGetVolume_cb _hidl_cb) { - return mEffect->setAndGetVolume(volumes, _hidl_cb); -} - -Return NoiseSuppressionEffect::volumeChangeNotification( - const hidl_vec& volumes) { - return mEffect->volumeChangeNotification(volumes); -} - -Return NoiseSuppressionEffect::setAudioMode(AudioMode mode) { - return mEffect->setAudioMode(mode); -} - -Return NoiseSuppressionEffect::setConfigReverse( - const EffectConfig& config, - const sp& inputBufferProvider, - const sp& outputBufferProvider) { - return mEffect->setConfigReverse(config, inputBufferProvider, outputBufferProvider); -} - -Return NoiseSuppressionEffect::setInputDevice(AudioDevice device) { - return mEffect->setInputDevice(device); -} - -Return NoiseSuppressionEffect::getConfig(getConfig_cb _hidl_cb) { - return mEffect->getConfig(_hidl_cb); -} - -Return NoiseSuppressionEffect::getConfigReverse(getConfigReverse_cb _hidl_cb) { - return mEffect->getConfigReverse(_hidl_cb); -} - -Return NoiseSuppressionEffect::getSupportedAuxChannelsConfigs( - uint32_t maxConfigs, getSupportedAuxChannelsConfigs_cb _hidl_cb) { - return mEffect->getSupportedAuxChannelsConfigs(maxConfigs, _hidl_cb); -} - -Return NoiseSuppressionEffect::getAuxChannelsConfig(getAuxChannelsConfig_cb _hidl_cb) { - return mEffect->getAuxChannelsConfig(_hidl_cb); -} - -Return NoiseSuppressionEffect::setAuxChannelsConfig( - const EffectAuxChannelsConfig& config) { - return mEffect->setAuxChannelsConfig(config); -} - -Return NoiseSuppressionEffect::setAudioSource(AudioSource source) { - return mEffect->setAudioSource(source); -} - -Return NoiseSuppressionEffect::offload(const EffectOffloadParameter& param) { - return mEffect->offload(param); -} - -Return NoiseSuppressionEffect::getDescriptor(getDescriptor_cb _hidl_cb) { - return mEffect->getDescriptor(_hidl_cb); -} - -Return NoiseSuppressionEffect::prepareForProcessing( - prepareForProcessing_cb _hidl_cb) { - return mEffect->prepareForProcessing(_hidl_cb); -} - -Return NoiseSuppressionEffect::setProcessBuffers( - const AudioBuffer& inBuffer, const AudioBuffer& outBuffer) { - return mEffect->setProcessBuffers(inBuffer, outBuffer); -} - -Return NoiseSuppressionEffect::command( - uint32_t commandId, - const hidl_vec& data, - uint32_t resultMaxSize, - command_cb _hidl_cb) { - return mEffect->command(commandId, data, resultMaxSize, _hidl_cb); -} - -Return NoiseSuppressionEffect::setParameter( - const hidl_vec& parameter, const hidl_vec& value) { - return mEffect->setParameter(parameter, value); -} - -Return NoiseSuppressionEffect::getParameter( - const hidl_vec& parameter, - uint32_t valueMaxSize, - getParameter_cb _hidl_cb) { - return mEffect->getParameter(parameter, valueMaxSize, _hidl_cb); -} - -Return NoiseSuppressionEffect::getSupportedConfigsForFeature( - uint32_t featureId, - uint32_t maxConfigs, - uint32_t configSize, - getSupportedConfigsForFeature_cb _hidl_cb) { - return mEffect->getSupportedConfigsForFeature(featureId, maxConfigs, configSize, _hidl_cb); -} - -Return NoiseSuppressionEffect::getCurrentConfigForFeature( - uint32_t featureId, - uint32_t configSize, - getCurrentConfigForFeature_cb _hidl_cb) { - return mEffect->getCurrentConfigForFeature(featureId, configSize, _hidl_cb); -} - -Return NoiseSuppressionEffect::setCurrentConfigForFeature( - uint32_t featureId, const hidl_vec& configData) { - return mEffect->setCurrentConfigForFeature(featureId, configData); -} - -Return NoiseSuppressionEffect::close() { - return mEffect->close(); -} - -// Methods from ::android::hardware::audio::effect::V2_0::INoiseSuppressionEffect follow. -Return NoiseSuppressionEffect::setSuppressionLevel(INoiseSuppressionEffect::Level level) { - return mEffect->setParam(NS_PARAM_LEVEL, static_cast(level)); -} - -Return NoiseSuppressionEffect::getSuppressionLevel(getSuppressionLevel_cb _hidl_cb) { - int32_t halLevel = 0; - Result retval = mEffect->getParam(NS_PARAM_LEVEL, halLevel); - _hidl_cb(retval, Level(halLevel)); - return Void(); -} - -Return NoiseSuppressionEffect::setSuppressionType(INoiseSuppressionEffect::Type type) { - return mEffect->setParam(NS_PARAM_TYPE, static_cast(type)); -} - -Return NoiseSuppressionEffect::getSuppressionType(getSuppressionType_cb _hidl_cb) { - int32_t halType = 0; - Result retval = mEffect->getParam(NS_PARAM_TYPE, halType); - _hidl_cb(retval, Type(halType)); - return Void(); -} - -Return NoiseSuppressionEffect::setAllProperties( - const INoiseSuppressionEffect::AllProperties& properties) { - t_ns_settings halProperties; - propertiesToHal(properties, &halProperties); - return mEffect->setParam(NS_PARAM_PROPERTIES, halProperties); -} - -Return NoiseSuppressionEffect::getAllProperties(getAllProperties_cb _hidl_cb) { - t_ns_settings halProperties; - Result retval = mEffect->getParam(NS_PARAM_PROPERTIES, halProperties); - AllProperties properties; - propertiesFromHal(halProperties, &properties); - _hidl_cb(retval, properties); - return Void(); -} - -} // namespace implementation -} // namespace V2_0 -} // namespace effect -} // namespace audio -} // namespace hardware -} // namespace android +#define AUDIO_HAL_VERSION V2_0 +#include +#undef AUDIO_HAL_VERSION diff --git a/audio/effect/2.0/default/NoiseSuppressionEffect.h b/audio/effect/2.0/default/NoiseSuppressionEffect.h index 5491201e980d155eab7fe54773df662e63978181..0eee4b51b22bd2f05c89bed097c5af05f7d810c0 100644 --- a/audio/effect/2.0/default/NoiseSuppressionEffect.h +++ b/audio/effect/2.0/default/NoiseSuppressionEffect.h @@ -1,5 +1,5 @@ /* - * Copyright (C) 2016 The Android Open Source Project + * Copyright (C) 2017 The Android Open Source Project * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. @@ -17,125 +17,12 @@ #ifndef ANDROID_HARDWARE_AUDIO_EFFECT_V2_0_NOISESUPPRESSIONEFFECT_H #define ANDROID_HARDWARE_AUDIO_EFFECT_V2_0_NOISESUPPRESSIONEFFECT_H -#include - #include -#include - -#include #include "Effect.h" -namespace android { -namespace hardware { -namespace audio { -namespace effect { -namespace V2_0 { -namespace implementation { - -using ::android::hardware::audio::common::V2_0::AudioDevice; -using ::android::hardware::audio::common::V2_0::AudioMode; -using ::android::hardware::audio::common::V2_0::AudioSource; -using ::android::hardware::audio::effect::V2_0::AudioBuffer; -using ::android::hardware::audio::effect::V2_0::EffectAuxChannelsConfig; -using ::android::hardware::audio::effect::V2_0::EffectConfig; -using ::android::hardware::audio::effect::V2_0::EffectDescriptor; -using ::android::hardware::audio::effect::V2_0::EffectOffloadParameter; -using ::android::hardware::audio::effect::V2_0::IEffect; -using ::android::hardware::audio::effect::V2_0::IEffectBufferProviderCallback; -using ::android::hardware::audio::effect::V2_0::INoiseSuppressionEffect; -using ::android::hardware::audio::effect::V2_0::Result; -using ::android::hardware::Return; -using ::android::hardware::Void; -using ::android::hardware::hidl_vec; -using ::android::hardware::hidl_string; -using ::android::sp; - -struct NoiseSuppressionEffect : public INoiseSuppressionEffect { - explicit NoiseSuppressionEffect(effect_handle_t handle); - - // Methods from ::android::hardware::audio::effect::V2_0::IEffect follow. - Return init() override; - Return setConfig( - const EffectConfig& config, - const sp& inputBufferProvider, - const sp& outputBufferProvider) override; - Return reset() override; - Return enable() override; - Return disable() override; - Return setDevice(AudioDevice device) override; - Return setAndGetVolume( - const hidl_vec& volumes, setAndGetVolume_cb _hidl_cb) override; - Return volumeChangeNotification(const hidl_vec& volumes) override; - Return setAudioMode(AudioMode mode) override; - Return setConfigReverse( - const EffectConfig& config, - const sp& inputBufferProvider, - const sp& outputBufferProvider) override; - Return setInputDevice(AudioDevice device) override; - Return getConfig(getConfig_cb _hidl_cb) override; - Return getConfigReverse(getConfigReverse_cb _hidl_cb) override; - Return getSupportedAuxChannelsConfigs( - uint32_t maxConfigs, getSupportedAuxChannelsConfigs_cb _hidl_cb) override; - Return getAuxChannelsConfig(getAuxChannelsConfig_cb _hidl_cb) override; - Return setAuxChannelsConfig(const EffectAuxChannelsConfig& config) override; - Return setAudioSource(AudioSource source) override; - Return offload(const EffectOffloadParameter& param) override; - Return getDescriptor(getDescriptor_cb _hidl_cb) override; - Return prepareForProcessing(prepareForProcessing_cb _hidl_cb) override; - Return setProcessBuffers( - const AudioBuffer& inBuffer, const AudioBuffer& outBuffer) override; - Return command( - uint32_t commandId, - const hidl_vec& data, - uint32_t resultMaxSize, - command_cb _hidl_cb) override; - Return setParameter( - const hidl_vec& parameter, const hidl_vec& value) override; - Return getParameter( - const hidl_vec& parameter, - uint32_t valueMaxSize, - getParameter_cb _hidl_cb) override; - Return getSupportedConfigsForFeature( - uint32_t featureId, - uint32_t maxConfigs, - uint32_t configSize, - getSupportedConfigsForFeature_cb _hidl_cb) override; - Return getCurrentConfigForFeature( - uint32_t featureId, - uint32_t configSize, - getCurrentConfigForFeature_cb _hidl_cb) override; - Return setCurrentConfigForFeature( - uint32_t featureId, const hidl_vec& configData) override; - Return close() override; - - // Methods from ::android::hardware::audio::effect::V2_0::INoiseSuppressionEffect follow. - Return setSuppressionLevel(INoiseSuppressionEffect::Level level) override; - Return getSuppressionLevel(getSuppressionLevel_cb _hidl_cb) override; - Return setSuppressionType(INoiseSuppressionEffect::Type type) override; - Return getSuppressionType(getSuppressionType_cb _hidl_cb) override; - Return setAllProperties( - const INoiseSuppressionEffect::AllProperties& properties) override; - Return getAllProperties(getAllProperties_cb _hidl_cb) override; - - private: - sp mEffect; - - virtual ~NoiseSuppressionEffect(); - - void propertiesFromHal( - const t_ns_settings& halProperties, - INoiseSuppressionEffect::AllProperties* properties); - void propertiesToHal( - const INoiseSuppressionEffect::AllProperties& properties, - t_ns_settings* halProperties); -}; - -} // namespace implementation -} // namespace V2_0 -} // namespace effect -} // namespace audio -} // namespace hardware -} // namespace android +#define AUDIO_HAL_VERSION V2_0 +#include +#undef AUDIO_HAL_VERSION #endif // ANDROID_HARDWARE_AUDIO_EFFECT_V2_0_NOISESUPPRESSIONEFFECT_H diff --git a/audio/effect/2.0/default/PresetReverbEffect.cpp b/audio/effect/2.0/default/PresetReverbEffect.cpp index 5f17791d77611a010a00945ee43d7442d9497fd0..0648f6a8eba012eb4d89b86796929af11a179eb7 100644 --- a/audio/effect/2.0/default/PresetReverbEffect.cpp +++ b/audio/effect/2.0/default/PresetReverbEffect.cpp @@ -1,5 +1,5 @@ /* - * Copyright (C) 2016 The Android Open Source Project + * Copyright (C) 2017 The Android Open Source Project * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. @@ -15,180 +15,9 @@ */ #define LOG_TAG "PresetReverb_HAL" -#include -#include #include "PresetReverbEffect.h" -namespace android { -namespace hardware { -namespace audio { -namespace effect { -namespace V2_0 { -namespace implementation { - -PresetReverbEffect::PresetReverbEffect(effect_handle_t handle) - : mEffect(new Effect(handle)) { -} - -PresetReverbEffect::~PresetReverbEffect() {} - -// Methods from ::android::hardware::audio::effect::V2_0::IEffect follow. -Return PresetReverbEffect::init() { - return mEffect->init(); -} - -Return PresetReverbEffect::setConfig( - const EffectConfig& config, - const sp& inputBufferProvider, - const sp& outputBufferProvider) { - return mEffect->setConfig(config, inputBufferProvider, outputBufferProvider); -} - -Return PresetReverbEffect::reset() { - return mEffect->reset(); -} - -Return PresetReverbEffect::enable() { - return mEffect->enable(); -} - -Return PresetReverbEffect::disable() { - return mEffect->disable(); -} - -Return PresetReverbEffect::setDevice(AudioDevice device) { - return mEffect->setDevice(device); -} - -Return PresetReverbEffect::setAndGetVolume( - const hidl_vec& volumes, setAndGetVolume_cb _hidl_cb) { - return mEffect->setAndGetVolume(volumes, _hidl_cb); -} - -Return PresetReverbEffect::volumeChangeNotification( - const hidl_vec& volumes) { - return mEffect->volumeChangeNotification(volumes); -} - -Return PresetReverbEffect::setAudioMode(AudioMode mode) { - return mEffect->setAudioMode(mode); -} - -Return PresetReverbEffect::setConfigReverse( - const EffectConfig& config, - const sp& inputBufferProvider, - const sp& outputBufferProvider) { - return mEffect->setConfigReverse(config, inputBufferProvider, outputBufferProvider); -} - -Return PresetReverbEffect::setInputDevice(AudioDevice device) { - return mEffect->setInputDevice(device); -} - -Return PresetReverbEffect::getConfig(getConfig_cb _hidl_cb) { - return mEffect->getConfig(_hidl_cb); -} - -Return PresetReverbEffect::getConfigReverse(getConfigReverse_cb _hidl_cb) { - return mEffect->getConfigReverse(_hidl_cb); -} - -Return PresetReverbEffect::getSupportedAuxChannelsConfigs( - uint32_t maxConfigs, getSupportedAuxChannelsConfigs_cb _hidl_cb) { - return mEffect->getSupportedAuxChannelsConfigs(maxConfigs, _hidl_cb); -} - -Return PresetReverbEffect::getAuxChannelsConfig(getAuxChannelsConfig_cb _hidl_cb) { - return mEffect->getAuxChannelsConfig(_hidl_cb); -} - -Return PresetReverbEffect::setAuxChannelsConfig( - const EffectAuxChannelsConfig& config) { - return mEffect->setAuxChannelsConfig(config); -} - -Return PresetReverbEffect::setAudioSource(AudioSource source) { - return mEffect->setAudioSource(source); -} - -Return PresetReverbEffect::offload(const EffectOffloadParameter& param) { - return mEffect->offload(param); -} - -Return PresetReverbEffect::getDescriptor(getDescriptor_cb _hidl_cb) { - return mEffect->getDescriptor(_hidl_cb); -} - -Return PresetReverbEffect::prepareForProcessing( - prepareForProcessing_cb _hidl_cb) { - return mEffect->prepareForProcessing(_hidl_cb); -} - -Return PresetReverbEffect::setProcessBuffers( - const AudioBuffer& inBuffer, const AudioBuffer& outBuffer) { - return mEffect->setProcessBuffers(inBuffer, outBuffer); -} - -Return PresetReverbEffect::command( - uint32_t commandId, - const hidl_vec& data, - uint32_t resultMaxSize, - command_cb _hidl_cb) { - return mEffect->command(commandId, data, resultMaxSize, _hidl_cb); -} - -Return PresetReverbEffect::setParameter( - const hidl_vec& parameter, const hidl_vec& value) { - return mEffect->setParameter(parameter, value); -} - -Return PresetReverbEffect::getParameter( - const hidl_vec& parameter, - uint32_t valueMaxSize, - getParameter_cb _hidl_cb) { - return mEffect->getParameter(parameter, valueMaxSize, _hidl_cb); -} - -Return PresetReverbEffect::getSupportedConfigsForFeature( - uint32_t featureId, - uint32_t maxConfigs, - uint32_t configSize, - getSupportedConfigsForFeature_cb _hidl_cb) { - return mEffect->getSupportedConfigsForFeature(featureId, maxConfigs, configSize, _hidl_cb); -} - -Return PresetReverbEffect::getCurrentConfigForFeature( - uint32_t featureId, - uint32_t configSize, - getCurrentConfigForFeature_cb _hidl_cb) { - return mEffect->getCurrentConfigForFeature(featureId, configSize, _hidl_cb); -} - -Return PresetReverbEffect::setCurrentConfigForFeature( - uint32_t featureId, const hidl_vec& configData) { - return mEffect->setCurrentConfigForFeature(featureId, configData); -} - -Return PresetReverbEffect::close() { - return mEffect->close(); -} - -// Methods from ::android::hardware::audio::effect::V2_0::IPresetReverbEffect follow. -Return PresetReverbEffect::setPreset(IPresetReverbEffect::Preset preset) { - return mEffect->setParam(REVERB_PARAM_PRESET, static_cast(preset)); -} - -Return PresetReverbEffect::getPreset(getPreset_cb _hidl_cb) { - t_reverb_presets halPreset = REVERB_PRESET_NONE; - Result retval = mEffect->getParam(REVERB_PARAM_PRESET, halPreset); - _hidl_cb(retval, Preset(halPreset)); - return Void(); -} - -} // namespace implementation -} // namespace V2_0 -} // namespace effect -} // namespace audio -} // namespace hardware -} // namespace android +#define AUDIO_HAL_VERSION V2_0 +#include +#undef AUDIO_HAL_VERSION diff --git a/audio/effect/2.0/default/PresetReverbEffect.h b/audio/effect/2.0/default/PresetReverbEffect.h index 4eb074a828dba4afe233e1eeb1e6e3306ce35bb9..1ea1626ffa901680ded426315ce1fda7e355ce7f 100644 --- a/audio/effect/2.0/default/PresetReverbEffect.h +++ b/audio/effect/2.0/default/PresetReverbEffect.h @@ -1,5 +1,5 @@ /* - * Copyright (C) 2016 The Android Open Source Project + * Copyright (C) 2017 The Android Open Source Project * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. @@ -18,110 +18,11 @@ #define ANDROID_HARDWARE_AUDIO_EFFECT_V2_0_PRESETREVERBEFFECT_H #include -#include - -#include #include "Effect.h" -namespace android { -namespace hardware { -namespace audio { -namespace effect { -namespace V2_0 { -namespace implementation { - -using ::android::hardware::audio::common::V2_0::AudioDevice; -using ::android::hardware::audio::common::V2_0::AudioMode; -using ::android::hardware::audio::common::V2_0::AudioSource; -using ::android::hardware::audio::effect::V2_0::AudioBuffer; -using ::android::hardware::audio::effect::V2_0::EffectAuxChannelsConfig; -using ::android::hardware::audio::effect::V2_0::EffectConfig; -using ::android::hardware::audio::effect::V2_0::EffectDescriptor; -using ::android::hardware::audio::effect::V2_0::EffectOffloadParameter; -using ::android::hardware::audio::effect::V2_0::IEffect; -using ::android::hardware::audio::effect::V2_0::IEffectBufferProviderCallback; -using ::android::hardware::audio::effect::V2_0::IPresetReverbEffect; -using ::android::hardware::audio::effect::V2_0::Result; -using ::android::hardware::Return; -using ::android::hardware::Void; -using ::android::hardware::hidl_vec; -using ::android::hardware::hidl_string; -using ::android::sp; - -struct PresetReverbEffect : public IPresetReverbEffect { - explicit PresetReverbEffect(effect_handle_t handle); - - // Methods from ::android::hardware::audio::effect::V2_0::IEffect follow. - Return init() override; - Return setConfig( - const EffectConfig& config, - const sp& inputBufferProvider, - const sp& outputBufferProvider) override; - Return reset() override; - Return enable() override; - Return disable() override; - Return setDevice(AudioDevice device) override; - Return setAndGetVolume( - const hidl_vec& volumes, setAndGetVolume_cb _hidl_cb) override; - Return volumeChangeNotification(const hidl_vec& volumes) override; - Return setAudioMode(AudioMode mode) override; - Return setConfigReverse( - const EffectConfig& config, - const sp& inputBufferProvider, - const sp& outputBufferProvider) override; - Return setInputDevice(AudioDevice device) override; - Return getConfig(getConfig_cb _hidl_cb) override; - Return getConfigReverse(getConfigReverse_cb _hidl_cb) override; - Return getSupportedAuxChannelsConfigs( - uint32_t maxConfigs, getSupportedAuxChannelsConfigs_cb _hidl_cb) override; - Return getAuxChannelsConfig(getAuxChannelsConfig_cb _hidl_cb) override; - Return setAuxChannelsConfig(const EffectAuxChannelsConfig& config) override; - Return setAudioSource(AudioSource source) override; - Return offload(const EffectOffloadParameter& param) override; - Return getDescriptor(getDescriptor_cb _hidl_cb) override; - Return prepareForProcessing(prepareForProcessing_cb _hidl_cb) override; - Return setProcessBuffers( - const AudioBuffer& inBuffer, const AudioBuffer& outBuffer) override; - Return command( - uint32_t commandId, - const hidl_vec& data, - uint32_t resultMaxSize, - command_cb _hidl_cb) override; - Return setParameter( - const hidl_vec& parameter, const hidl_vec& value) override; - Return getParameter( - const hidl_vec& parameter, - uint32_t valueMaxSize, - getParameter_cb _hidl_cb) override; - Return getSupportedConfigsForFeature( - uint32_t featureId, - uint32_t maxConfigs, - uint32_t configSize, - getSupportedConfigsForFeature_cb _hidl_cb) override; - Return getCurrentConfigForFeature( - uint32_t featureId, - uint32_t configSize, - getCurrentConfigForFeature_cb _hidl_cb) override; - Return setCurrentConfigForFeature( - uint32_t featureId, const hidl_vec& configData) override; - Return close() override; - - // Methods from ::android::hardware::audio::effect::V2_0::IPresetReverbEffect follow. - Return setPreset(IPresetReverbEffect::Preset preset) override; - Return getPreset(getPreset_cb _hidl_cb) override; - - private: - sp mEffect; - - virtual ~PresetReverbEffect(); -}; - -} // namespace implementation -} // namespace V2_0 -} // namespace effect -} // namespace audio -} // namespace hardware -} // namespace android +#define AUDIO_HAL_VERSION V2_0 +#include +#undef AUDIO_HAL_VERSION #endif // ANDROID_HARDWARE_AUDIO_EFFECT_V2_0_PRESETREVERBEFFECT_H diff --git a/audio/effect/2.0/default/VirtualizerEffect.cpp b/audio/effect/2.0/default/VirtualizerEffect.cpp index c1fe52fbe9993f8e5cae272ade63ae72be584721..63d3eb925f9cc999588b8f5a17ba07a44ed6640d 100644 --- a/audio/effect/2.0/default/VirtualizerEffect.cpp +++ b/audio/effect/2.0/default/VirtualizerEffect.cpp @@ -1,5 +1,5 @@ /* - * Copyright (C) 2016 The Android Open Source Project + * Copyright (C) 2017 The Android Open Source Project * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. @@ -14,235 +14,10 @@ * limitations under the License. */ -#include - #define LOG_TAG "Virtualizer_HAL" -#include -#include #include "VirtualizerEffect.h" -namespace android { -namespace hardware { -namespace audio { -namespace effect { -namespace V2_0 { -namespace implementation { - -VirtualizerEffect::VirtualizerEffect(effect_handle_t handle) - : mEffect(new Effect(handle)) { -} - -VirtualizerEffect::~VirtualizerEffect() {} - -void VirtualizerEffect::speakerAnglesFromHal( - const int32_t* halAngles, uint32_t channelCount, hidl_vec& speakerAngles) { - speakerAngles.resize(channelCount); - for (uint32_t i = 0; i < channelCount; ++i) { - speakerAngles[i].mask = AudioChannelMask(*halAngles++); - speakerAngles[i].azimuth = *halAngles++; - speakerAngles[i].elevation = *halAngles++; - } -} - -// Methods from ::android::hardware::audio::effect::V2_0::IEffect follow. -Return VirtualizerEffect::init() { - return mEffect->init(); -} - -Return VirtualizerEffect::setConfig( - const EffectConfig& config, - const sp& inputBufferProvider, - const sp& outputBufferProvider) { - return mEffect->setConfig(config, inputBufferProvider, outputBufferProvider); -} - -Return VirtualizerEffect::reset() { - return mEffect->reset(); -} - -Return VirtualizerEffect::enable() { - return mEffect->enable(); -} - -Return VirtualizerEffect::disable() { - return mEffect->disable(); -} - -Return VirtualizerEffect::setDevice(AudioDevice device) { - return mEffect->setDevice(device); -} - -Return VirtualizerEffect::setAndGetVolume( - const hidl_vec& volumes, setAndGetVolume_cb _hidl_cb) { - return mEffect->setAndGetVolume(volumes, _hidl_cb); -} - -Return VirtualizerEffect::volumeChangeNotification( - const hidl_vec& volumes) { - return mEffect->volumeChangeNotification(volumes); -} - -Return VirtualizerEffect::setAudioMode(AudioMode mode) { - return mEffect->setAudioMode(mode); -} - -Return VirtualizerEffect::setConfigReverse( - const EffectConfig& config, - const sp& inputBufferProvider, - const sp& outputBufferProvider) { - return mEffect->setConfigReverse(config, inputBufferProvider, outputBufferProvider); -} - -Return VirtualizerEffect::setInputDevice(AudioDevice device) { - return mEffect->setInputDevice(device); -} - -Return VirtualizerEffect::getConfig(getConfig_cb _hidl_cb) { - return mEffect->getConfig(_hidl_cb); -} - -Return VirtualizerEffect::getConfigReverse(getConfigReverse_cb _hidl_cb) { - return mEffect->getConfigReverse(_hidl_cb); -} - -Return VirtualizerEffect::getSupportedAuxChannelsConfigs( - uint32_t maxConfigs, getSupportedAuxChannelsConfigs_cb _hidl_cb) { - return mEffect->getSupportedAuxChannelsConfigs(maxConfigs, _hidl_cb); -} - -Return VirtualizerEffect::getAuxChannelsConfig(getAuxChannelsConfig_cb _hidl_cb) { - return mEffect->getAuxChannelsConfig(_hidl_cb); -} - -Return VirtualizerEffect::setAuxChannelsConfig( - const EffectAuxChannelsConfig& config) { - return mEffect->setAuxChannelsConfig(config); -} - -Return VirtualizerEffect::setAudioSource(AudioSource source) { - return mEffect->setAudioSource(source); -} - -Return VirtualizerEffect::offload(const EffectOffloadParameter& param) { - return mEffect->offload(param); -} - -Return VirtualizerEffect::getDescriptor(getDescriptor_cb _hidl_cb) { - return mEffect->getDescriptor(_hidl_cb); -} - -Return VirtualizerEffect::prepareForProcessing( - prepareForProcessing_cb _hidl_cb) { - return mEffect->prepareForProcessing(_hidl_cb); -} - -Return VirtualizerEffect::setProcessBuffers( - const AudioBuffer& inBuffer, const AudioBuffer& outBuffer) { - return mEffect->setProcessBuffers(inBuffer, outBuffer); -} - -Return VirtualizerEffect::command( - uint32_t commandId, - const hidl_vec& data, - uint32_t resultMaxSize, - command_cb _hidl_cb) { - return mEffect->command(commandId, data, resultMaxSize, _hidl_cb); -} - -Return VirtualizerEffect::setParameter( - const hidl_vec& parameter, const hidl_vec& value) { - return mEffect->setParameter(parameter, value); -} - -Return VirtualizerEffect::getParameter( - const hidl_vec& parameter, - uint32_t valueMaxSize, - getParameter_cb _hidl_cb) { - return mEffect->getParameter(parameter, valueMaxSize, _hidl_cb); -} - -Return VirtualizerEffect::getSupportedConfigsForFeature( - uint32_t featureId, - uint32_t maxConfigs, - uint32_t configSize, - getSupportedConfigsForFeature_cb _hidl_cb) { - return mEffect->getSupportedConfigsForFeature(featureId, maxConfigs, configSize, _hidl_cb); -} - -Return VirtualizerEffect::getCurrentConfigForFeature( - uint32_t featureId, - uint32_t configSize, - getCurrentConfigForFeature_cb _hidl_cb) { - return mEffect->getCurrentConfigForFeature(featureId, configSize, _hidl_cb); -} - -Return VirtualizerEffect::setCurrentConfigForFeature( - uint32_t featureId, const hidl_vec& configData) { - return mEffect->setCurrentConfigForFeature(featureId, configData); -} - -Return VirtualizerEffect::close() { - return mEffect->close(); -} - -// Methods from ::android::hardware::audio::effect::V2_0::IVirtualizerEffect follow. -Return VirtualizerEffect::isStrengthSupported() { - bool halSupported = false; - mEffect->getParam(VIRTUALIZER_PARAM_STRENGTH_SUPPORTED, halSupported); - return halSupported; -} - -Return VirtualizerEffect::setStrength(uint16_t strength) { - return mEffect->setParam(VIRTUALIZER_PARAM_STRENGTH, strength); -} - -Return VirtualizerEffect::getStrength(getStrength_cb _hidl_cb) { - return mEffect->getIntegerParam(VIRTUALIZER_PARAM_STRENGTH, _hidl_cb); -} - -Return VirtualizerEffect::getVirtualSpeakerAngles( - AudioChannelMask mask, AudioDevice device, getVirtualSpeakerAngles_cb _hidl_cb) { - uint32_t channelCount = audio_channel_count_from_out_mask( - static_cast(mask)); - size_t halSpeakerAnglesSize = sizeof(int32_t) * 3 * channelCount; - uint32_t halParam[3] = { - VIRTUALIZER_PARAM_VIRTUAL_SPEAKER_ANGLES, - static_cast(mask), - static_cast(device) - }; - hidl_vec speakerAngles; - Result retval = mEffect->getParameterImpl( - sizeof(halParam), halParam, - halSpeakerAnglesSize, - [&] (uint32_t valueSize, const void* valueData) { - if (valueSize > halSpeakerAnglesSize) { - valueSize = halSpeakerAnglesSize; - } else if (valueSize < halSpeakerAnglesSize) { - channelCount = valueSize / (sizeof(int32_t) * 3); - } - speakerAnglesFromHal( - reinterpret_cast(valueData), channelCount, speakerAngles); - }); - _hidl_cb(retval, speakerAngles); - return Void(); -} - -Return VirtualizerEffect::forceVirtualizationMode(AudioDevice device) { - return mEffect->setParam( - VIRTUALIZER_PARAM_FORCE_VIRTUALIZATION_MODE, static_cast(device)); -} - -Return VirtualizerEffect::getVirtualizationMode(getVirtualizationMode_cb _hidl_cb) { - uint32_t halMode = 0; - Result retval = mEffect->getParam(VIRTUALIZER_PARAM_FORCE_VIRTUALIZATION_MODE, halMode); - _hidl_cb(retval, AudioDevice(halMode)); - return Void(); -} - -} // namespace implementation -} // namespace V2_0 -} // namespace effect -} // namespace audio -} // namespace hardware -} // namespace android +#define AUDIO_HAL_VERSION V2_0 +#include +#undef AUDIO_HAL_VERSION diff --git a/audio/effect/2.0/default/VirtualizerEffect.h b/audio/effect/2.0/default/VirtualizerEffect.h index 536775f414cb3e82d721fd6ced9b524fc4514814..04f93c4c72a7ffe531b6fe7968b522f2cedb000c 100644 --- a/audio/effect/2.0/default/VirtualizerEffect.h +++ b/audio/effect/2.0/default/VirtualizerEffect.h @@ -1,5 +1,5 @@ /* - * Copyright (C) 2016 The Android Open Source Project + * Copyright (C) 2017 The Android Open Source Project * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. @@ -18,121 +18,11 @@ #define ANDROID_HARDWARE_AUDIO_EFFECT_V2_0_VIRTUALIZEREFFECT_H #include -#include - -#include #include "Effect.h" -namespace android { -namespace hardware { -namespace audio { -namespace effect { -namespace V2_0 { -namespace implementation { - -using ::android::hardware::audio::common::V2_0::AudioChannelMask; -using ::android::hardware::audio::common::V2_0::AudioDevice; -using ::android::hardware::audio::common::V2_0::AudioMode; -using ::android::hardware::audio::common::V2_0::AudioSource; -using ::android::hardware::audio::effect::V2_0::AudioBuffer; -using ::android::hardware::audio::effect::V2_0::EffectAuxChannelsConfig; -using ::android::hardware::audio::effect::V2_0::EffectConfig; -using ::android::hardware::audio::effect::V2_0::EffectDescriptor; -using ::android::hardware::audio::effect::V2_0::EffectOffloadParameter; -using ::android::hardware::audio::effect::V2_0::IEffect; -using ::android::hardware::audio::effect::V2_0::IEffectBufferProviderCallback; -using ::android::hardware::audio::effect::V2_0::IVirtualizerEffect; -using ::android::hardware::audio::effect::V2_0::Result; -using ::android::hardware::Return; -using ::android::hardware::Void; -using ::android::hardware::hidl_vec; -using ::android::hardware::hidl_string; -using ::android::sp; - -struct VirtualizerEffect : public IVirtualizerEffect { - explicit VirtualizerEffect(effect_handle_t handle); - - // Methods from ::android::hardware::audio::effect::V2_0::IEffect follow. - Return init() override; - Return setConfig( - const EffectConfig& config, - const sp& inputBufferProvider, - const sp& outputBufferProvider) override; - Return reset() override; - Return enable() override; - Return disable() override; - Return setDevice(AudioDevice device) override; - Return setAndGetVolume( - const hidl_vec& volumes, setAndGetVolume_cb _hidl_cb) override; - Return volumeChangeNotification(const hidl_vec& volumes) override; - Return setAudioMode(AudioMode mode) override; - Return setConfigReverse( - const EffectConfig& config, - const sp& inputBufferProvider, - const sp& outputBufferProvider) override; - Return setInputDevice(AudioDevice device) override; - Return getConfig(getConfig_cb _hidl_cb) override; - Return getConfigReverse(getConfigReverse_cb _hidl_cb) override; - Return getSupportedAuxChannelsConfigs( - uint32_t maxConfigs, getSupportedAuxChannelsConfigs_cb _hidl_cb) override; - Return getAuxChannelsConfig(getAuxChannelsConfig_cb _hidl_cb) override; - Return setAuxChannelsConfig(const EffectAuxChannelsConfig& config) override; - Return setAudioSource(AudioSource source) override; - Return offload(const EffectOffloadParameter& param) override; - Return getDescriptor(getDescriptor_cb _hidl_cb) override; - Return prepareForProcessing(prepareForProcessing_cb _hidl_cb) override; - Return setProcessBuffers( - const AudioBuffer& inBuffer, const AudioBuffer& outBuffer) override; - Return command( - uint32_t commandId, - const hidl_vec& data, - uint32_t resultMaxSize, - command_cb _hidl_cb) override; - Return setParameter( - const hidl_vec& parameter, const hidl_vec& value) override; - Return getParameter( - const hidl_vec& parameter, - uint32_t valueMaxSize, - getParameter_cb _hidl_cb) override; - Return getSupportedConfigsForFeature( - uint32_t featureId, - uint32_t maxConfigs, - uint32_t configSize, - getSupportedConfigsForFeature_cb _hidl_cb) override; - Return getCurrentConfigForFeature( - uint32_t featureId, - uint32_t configSize, - getCurrentConfigForFeature_cb _hidl_cb) override; - Return setCurrentConfigForFeature( - uint32_t featureId, const hidl_vec& configData) override; - Return close() override; - - // Methods from ::android::hardware::audio::effect::V2_0::IVirtualizerEffect follow. - Return isStrengthSupported() override; - Return setStrength(uint16_t strength) override; - Return getStrength(getStrength_cb _hidl_cb) override; - Return getVirtualSpeakerAngles( - AudioChannelMask mask, - AudioDevice device, - getVirtualSpeakerAngles_cb _hidl_cb) override; - Return forceVirtualizationMode(AudioDevice device) override; - Return getVirtualizationMode(getVirtualizationMode_cb _hidl_cb) override; - - private: - sp mEffect; - - virtual ~VirtualizerEffect(); - - void speakerAnglesFromHal( - const int32_t* halAngles, uint32_t channelCount, hidl_vec& speakerAngles); -}; - -} // namespace implementation -} // namespace V2_0 -} // namespace effect -} // namespace audio -} // namespace hardware -} // namespace android +#define AUDIO_HAL_VERSION V2_0 +#include +#undef AUDIO_HAL_VERSION #endif // ANDROID_HARDWARE_AUDIO_EFFECT_V2_0_VIRTUALIZEREFFECT_H diff --git a/audio/effect/2.0/default/VisualizerEffect.cpp b/audio/effect/2.0/default/VisualizerEffect.cpp index 2cd32405191ea7fda2c812a573c64c8ba0861e75..523552466d22e4f4a60459a9b9e0ca8ef72d9957 100644 --- a/audio/effect/2.0/default/VisualizerEffect.cpp +++ b/audio/effect/2.0/default/VisualizerEffect.cpp @@ -1,5 +1,5 @@ /* - * Copyright (C) 2016 The Android Open Source Project + * Copyright (C) 2017 The Android Open Source Project * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. @@ -15,254 +15,9 @@ */ #define LOG_TAG "Visualizer_HAL" -#include -#include #include "VisualizerEffect.h" -namespace android { -namespace hardware { -namespace audio { -namespace effect { -namespace V2_0 { -namespace implementation { - -VisualizerEffect::VisualizerEffect(effect_handle_t handle) - : mEffect(new Effect(handle)), mCaptureSize(0), mMeasurementMode(MeasurementMode::NONE) { -} - -VisualizerEffect::~VisualizerEffect() {} - -// Methods from ::android::hardware::audio::effect::V2_0::IEffect follow. -Return VisualizerEffect::init() { - return mEffect->init(); -} - -Return VisualizerEffect::setConfig( - const EffectConfig& config, - const sp& inputBufferProvider, - const sp& outputBufferProvider) { - return mEffect->setConfig(config, inputBufferProvider, outputBufferProvider); -} - -Return VisualizerEffect::reset() { - return mEffect->reset(); -} - -Return VisualizerEffect::enable() { - return mEffect->enable(); -} - -Return VisualizerEffect::disable() { - return mEffect->disable(); -} - -Return VisualizerEffect::setDevice(AudioDevice device) { - return mEffect->setDevice(device); -} - -Return VisualizerEffect::setAndGetVolume( - const hidl_vec& volumes, setAndGetVolume_cb _hidl_cb) { - return mEffect->setAndGetVolume(volumes, _hidl_cb); -} - -Return VisualizerEffect::volumeChangeNotification( - const hidl_vec& volumes) { - return mEffect->volumeChangeNotification(volumes); -} - -Return VisualizerEffect::setAudioMode(AudioMode mode) { - return mEffect->setAudioMode(mode); -} - -Return VisualizerEffect::setConfigReverse( - const EffectConfig& config, - const sp& inputBufferProvider, - const sp& outputBufferProvider) { - return mEffect->setConfigReverse(config, inputBufferProvider, outputBufferProvider); -} - -Return VisualizerEffect::setInputDevice(AudioDevice device) { - return mEffect->setInputDevice(device); -} - -Return VisualizerEffect::getConfig(getConfig_cb _hidl_cb) { - return mEffect->getConfig(_hidl_cb); -} - -Return VisualizerEffect::getConfigReverse(getConfigReverse_cb _hidl_cb) { - return mEffect->getConfigReverse(_hidl_cb); -} - -Return VisualizerEffect::getSupportedAuxChannelsConfigs( - uint32_t maxConfigs, getSupportedAuxChannelsConfigs_cb _hidl_cb) { - return mEffect->getSupportedAuxChannelsConfigs(maxConfigs, _hidl_cb); -} - -Return VisualizerEffect::getAuxChannelsConfig(getAuxChannelsConfig_cb _hidl_cb) { - return mEffect->getAuxChannelsConfig(_hidl_cb); -} - -Return VisualizerEffect::setAuxChannelsConfig( - const EffectAuxChannelsConfig& config) { - return mEffect->setAuxChannelsConfig(config); -} - -Return VisualizerEffect::setAudioSource(AudioSource source) { - return mEffect->setAudioSource(source); -} - -Return VisualizerEffect::offload(const EffectOffloadParameter& param) { - return mEffect->offload(param); -} - -Return VisualizerEffect::getDescriptor(getDescriptor_cb _hidl_cb) { - return mEffect->getDescriptor(_hidl_cb); -} - -Return VisualizerEffect::prepareForProcessing( - prepareForProcessing_cb _hidl_cb) { - return mEffect->prepareForProcessing(_hidl_cb); -} - -Return VisualizerEffect::setProcessBuffers( - const AudioBuffer& inBuffer, const AudioBuffer& outBuffer) { - return mEffect->setProcessBuffers(inBuffer, outBuffer); -} - -Return VisualizerEffect::command( - uint32_t commandId, - const hidl_vec& data, - uint32_t resultMaxSize, - command_cb _hidl_cb) { - return mEffect->command(commandId, data, resultMaxSize, _hidl_cb); -} - -Return VisualizerEffect::setParameter( - const hidl_vec& parameter, const hidl_vec& value) { - return mEffect->setParameter(parameter, value); -} - -Return VisualizerEffect::getParameter( - const hidl_vec& parameter, - uint32_t valueMaxSize, - getParameter_cb _hidl_cb) { - return mEffect->getParameter(parameter, valueMaxSize, _hidl_cb); -} - -Return VisualizerEffect::getSupportedConfigsForFeature( - uint32_t featureId, - uint32_t maxConfigs, - uint32_t configSize, - getSupportedConfigsForFeature_cb _hidl_cb) { - return mEffect->getSupportedConfigsForFeature(featureId, maxConfigs, configSize, _hidl_cb); -} - -Return VisualizerEffect::getCurrentConfigForFeature( - uint32_t featureId, - uint32_t configSize, - getCurrentConfigForFeature_cb _hidl_cb) { - return mEffect->getCurrentConfigForFeature(featureId, configSize, _hidl_cb); -} - -Return VisualizerEffect::setCurrentConfigForFeature( - uint32_t featureId, const hidl_vec& configData) { - return mEffect->setCurrentConfigForFeature(featureId, configData); -} - -Return VisualizerEffect::close() { - return mEffect->close(); -} - -// Methods from ::android::hardware::audio::effect::V2_0::IVisualizerEffect follow. -Return VisualizerEffect::setCaptureSize(uint16_t captureSize) { - Result retval = mEffect->setParam(VISUALIZER_PARAM_CAPTURE_SIZE, captureSize); - if (retval == Result::OK) { - mCaptureSize = captureSize; - } - return retval; -} - -Return VisualizerEffect::getCaptureSize(getCaptureSize_cb _hidl_cb) { - return mEffect->getIntegerParam(VISUALIZER_PARAM_CAPTURE_SIZE, _hidl_cb); -} - -Return VisualizerEffect::setScalingMode(IVisualizerEffect::ScalingMode scalingMode) { - return mEffect->setParam(VISUALIZER_PARAM_SCALING_MODE, static_cast(scalingMode)); -} - -Return VisualizerEffect::getScalingMode(getScalingMode_cb _hidl_cb) { - int32_t halMode; - Result retval = mEffect->getParam(VISUALIZER_PARAM_SCALING_MODE, halMode); - _hidl_cb(retval, ScalingMode(halMode)); - return Void(); -} - -Return VisualizerEffect::setLatency(uint32_t latencyMs) { - return mEffect->setParam(VISUALIZER_PARAM_LATENCY, latencyMs); -} - -Return VisualizerEffect::getLatency(getLatency_cb _hidl_cb) { - return mEffect->getIntegerParam(VISUALIZER_PARAM_LATENCY, _hidl_cb); -} - -Return VisualizerEffect::setMeasurementMode( - IVisualizerEffect::MeasurementMode measurementMode) { - Result retval = mEffect->setParam( - VISUALIZER_PARAM_MEASUREMENT_MODE, static_cast(measurementMode)); - if (retval == Result::OK) { - mMeasurementMode = measurementMode; - } - return retval; -} - -Return VisualizerEffect::getMeasurementMode(getMeasurementMode_cb _hidl_cb) { - int32_t halMode; - Result retval = mEffect->getParam(VISUALIZER_PARAM_MEASUREMENT_MODE, halMode); - _hidl_cb(retval, MeasurementMode(halMode)); - return Void(); -} - -Return VisualizerEffect::capture(capture_cb _hidl_cb) { - if (mCaptureSize == 0) { - _hidl_cb(Result::NOT_INITIALIZED, hidl_vec()); - return Void(); - } - uint32_t halCaptureSize = mCaptureSize; - uint8_t halCapture[mCaptureSize]; - Result retval = mEffect->sendCommandReturningData( - VISUALIZER_CMD_CAPTURE, "VISUALIZER_CAPTURE", &halCaptureSize, halCapture); - hidl_vec capture; - if (retval == Result::OK) { - capture.setToExternal(&halCapture[0], halCaptureSize); - } - _hidl_cb(retval, capture); - return Void(); -} - -Return VisualizerEffect::measure(measure_cb _hidl_cb) { - if (mMeasurementMode == MeasurementMode::NONE) { - _hidl_cb(Result::NOT_INITIALIZED, Measurement()); - return Void(); - } - int32_t halMeasurement[MEASUREMENT_COUNT]; - uint32_t halMeasurementSize = sizeof(halMeasurement); - Result retval = mEffect->sendCommandReturningData( - VISUALIZER_CMD_MEASURE, "VISUALIZER_MEASURE", &halMeasurementSize, halMeasurement); - Measurement measurement = { .mode = MeasurementMode::PEAK_RMS }; - measurement.value.peakAndRms.peakMb = 0; - measurement.value.peakAndRms.rmsMb = 0; - if (retval == Result::OK) { - measurement.value.peakAndRms.peakMb = halMeasurement[MEASUREMENT_IDX_PEAK]; - measurement.value.peakAndRms.rmsMb = halMeasurement[MEASUREMENT_IDX_RMS]; - } - _hidl_cb(retval, measurement); - return Void(); -} - -} // namespace implementation -} // namespace V2_0 -} // namespace effect -} // namespace audio -} // namespace hardware -} // namespace android +#define AUDIO_HAL_VERSION V2_0 +#include +#undef AUDIO_HAL_VERSION diff --git a/audio/effect/2.0/default/VisualizerEffect.h b/audio/effect/2.0/default/VisualizerEffect.h index fd40ca88e619c2fc5e690f793c8d6403e606bb50..940f15de9bb81c2574bca5fdcd0c157bf965ecb1 100644 --- a/audio/effect/2.0/default/VisualizerEffect.h +++ b/audio/effect/2.0/default/VisualizerEffect.h @@ -1,5 +1,5 @@ /* - * Copyright (C) 2016 The Android Open Source Project + * Copyright (C) 2017 The Android Open Source Project * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. @@ -18,120 +18,11 @@ #define ANDROID_HARDWARE_AUDIO_EFFECT_V2_0_VISUALIZEREFFECT_H #include -#include - -#include #include "Effect.h" -namespace android { -namespace hardware { -namespace audio { -namespace effect { -namespace V2_0 { -namespace implementation { - -using ::android::hardware::audio::common::V2_0::AudioDevice; -using ::android::hardware::audio::common::V2_0::AudioMode; -using ::android::hardware::audio::common::V2_0::AudioSource; -using ::android::hardware::audio::effect::V2_0::AudioBuffer; -using ::android::hardware::audio::effect::V2_0::EffectAuxChannelsConfig; -using ::android::hardware::audio::effect::V2_0::EffectConfig; -using ::android::hardware::audio::effect::V2_0::EffectDescriptor; -using ::android::hardware::audio::effect::V2_0::EffectOffloadParameter; -using ::android::hardware::audio::effect::V2_0::IEffect; -using ::android::hardware::audio::effect::V2_0::IEffectBufferProviderCallback; -using ::android::hardware::audio::effect::V2_0::IVisualizerEffect; -using ::android::hardware::audio::effect::V2_0::Result; -using ::android::hardware::Return; -using ::android::hardware::Void; -using ::android::hardware::hidl_vec; -using ::android::hardware::hidl_string; -using ::android::sp; - -struct VisualizerEffect : public IVisualizerEffect { - explicit VisualizerEffect(effect_handle_t handle); - - // Methods from ::android::hardware::audio::effect::V2_0::IEffect follow. - Return init() override; - Return setConfig( - const EffectConfig& config, - const sp& inputBufferProvider, - const sp& outputBufferProvider) override; - Return reset() override; - Return enable() override; - Return disable() override; - Return setDevice(AudioDevice device) override; - Return setAndGetVolume( - const hidl_vec& volumes, setAndGetVolume_cb _hidl_cb) override; - Return volumeChangeNotification(const hidl_vec& volumes) override; - Return setAudioMode(AudioMode mode) override; - Return setConfigReverse( - const EffectConfig& config, - const sp& inputBufferProvider, - const sp& outputBufferProvider) override; - Return setInputDevice(AudioDevice device) override; - Return getConfig(getConfig_cb _hidl_cb) override; - Return getConfigReverse(getConfigReverse_cb _hidl_cb) override; - Return getSupportedAuxChannelsConfigs( - uint32_t maxConfigs, getSupportedAuxChannelsConfigs_cb _hidl_cb) override; - Return getAuxChannelsConfig(getAuxChannelsConfig_cb _hidl_cb) override; - Return setAuxChannelsConfig(const EffectAuxChannelsConfig& config) override; - Return setAudioSource(AudioSource source) override; - Return offload(const EffectOffloadParameter& param) override; - Return getDescriptor(getDescriptor_cb _hidl_cb) override; - Return prepareForProcessing(prepareForProcessing_cb _hidl_cb) override; - Return setProcessBuffers( - const AudioBuffer& inBuffer, const AudioBuffer& outBuffer) override; - Return command( - uint32_t commandId, - const hidl_vec& data, - uint32_t resultMaxSize, - command_cb _hidl_cb) override; - Return setParameter( - const hidl_vec& parameter, const hidl_vec& value) override; - Return getParameter( - const hidl_vec& parameter, - uint32_t valueMaxSize, - getParameter_cb _hidl_cb) override; - Return getSupportedConfigsForFeature( - uint32_t featureId, - uint32_t maxConfigs, - uint32_t configSize, - getSupportedConfigsForFeature_cb _hidl_cb) override; - Return getCurrentConfigForFeature( - uint32_t featureId, - uint32_t configSize, - getCurrentConfigForFeature_cb _hidl_cb) override; - Return setCurrentConfigForFeature( - uint32_t featureId, const hidl_vec& configData) override; - Return close() override; - - // Methods from ::android::hardware::audio::effect::V2_0::IVisualizerEffect follow. - Return setCaptureSize(uint16_t captureSize) override; - Return getCaptureSize(getCaptureSize_cb _hidl_cb) override; - Return setScalingMode(IVisualizerEffect::ScalingMode scalingMode) override; - Return getScalingMode(getScalingMode_cb _hidl_cb) override; - Return setLatency(uint32_t latencyMs) override; - Return getLatency(getLatency_cb _hidl_cb) override; - Return setMeasurementMode(IVisualizerEffect::MeasurementMode measurementMode) override; - Return getMeasurementMode(getMeasurementMode_cb _hidl_cb) override; - Return capture(capture_cb _hidl_cb) override; - Return measure(measure_cb _hidl_cb) override; - - private: - sp mEffect; - uint16_t mCaptureSize; - MeasurementMode mMeasurementMode; - - virtual ~VisualizerEffect(); -}; - -} // namespace implementation -} // namespace V2_0 -} // namespace effect -} // namespace audio -} // namespace hardware -} // namespace android +#define AUDIO_HAL_VERSION V2_0 +#include +#undef AUDIO_HAL_VERSION #endif // ANDROID_HARDWARE_AUDIO_EFFECT_V2_0_VISUALIZEREFFECT_H diff --git a/audio/effect/2.0/vts/functional/Android.bp b/audio/effect/2.0/vts/functional/Android.bp index 7b421cb7e77b86243b288de147d8e49815956d00..51d2e11a199b65ab1f1b5f39603bdd192cfb6747 100644 --- a/audio/effect/2.0/vts/functional/Android.bp +++ b/audio/effect/2.0/vts/functional/Android.bp @@ -27,9 +27,9 @@ cc_test { "android.hardware.audio.effect@2.0", "android.hidl.allocator@1.0", "android.hidl.memory@1.0", - "libxml2", - ], - shared_libs: [ + "libeffectsconfig", "libicuuc", + "libicuuc_stubdata", + "libxml2", ], } diff --git a/audio/effect/2.0/vts/functional/ValidateAudioEffectsConfiguration.cpp b/audio/effect/2.0/vts/functional/ValidateAudioEffectsConfiguration.cpp index fdc1347497562ed6f73de993f1dcf3859526c313..d0bc6908d6308ef32515cd1dddb57402183d0342 100644 --- a/audio/effect/2.0/vts/functional/ValidateAudioEffectsConfiguration.cpp +++ b/audio/effect/2.0/vts/functional/ValidateAudioEffectsConfiguration.cpp @@ -15,16 +15,18 @@ */ #include +#include + +#include #include "utility/ValidateXml.h" TEST(CheckConfig, audioEffectsConfigurationValidation) { RecordProperty("description", "Verify that the effects configuration file is valid according to the schema"); - const char* xmlConfigFile = "/vendor/etc/audio_effects.xml"; - // Not every device uses XML configuration, so only validate - // if the XML configuration actually exists. - if (access(xmlConfigFile, F_OK) == 0) { - ASSERT_VALID_XML(xmlConfigFile, "/data/local/tmp/audio_effects_conf_V2_0.xsd"); - } + using namespace android::effectsConfig; + + std::vector locations(std::begin(DEFAULT_LOCATIONS), std::end(DEFAULT_LOCATIONS)); + EXPECT_ONE_VALID_XML_MULTIPLE_LOCATIONS(DEFAULT_NAME, locations, + "/data/local/tmp/audio_effects_conf_V2_0.xsd"); } diff --git a/audio/effect/2.0/vts/functional/VtsHalAudioEffectV2_0TargetTest.cpp b/audio/effect/2.0/vts/functional/VtsHalAudioEffectV2_0TargetTest.cpp index f069c467c79024e6d79188de5b6f79289236c42c..c90c4fab2e6298ff61077d3cb07b1ce7b5af9517 100644 --- a/audio/effect/2.0/vts/functional/VtsHalAudioEffectV2_0TargetTest.cpp +++ b/audio/effect/2.0/vts/functional/VtsHalAudioEffectV2_0TargetTest.cpp @@ -27,6 +27,7 @@ #include #include +#include using android::hardware::audio::common::V2_0::AudioDevice; using android::hardware::audio::common::V2_0::AudioHandleConsts; @@ -59,13 +60,26 @@ using android::sp; #define ARRAY_SIZE(a) (sizeof(a) / sizeof(*(a))) #endif +// Test environment for Audio Effects Factory HIDL HAL. +class AudioEffectsFactoryHidlEnvironment : public ::testing::VtsHalHidlTargetTestEnvBase { + public: + // get the test environment singleton + static AudioEffectsFactoryHidlEnvironment* Instance() { + static AudioEffectsFactoryHidlEnvironment* instance = + new AudioEffectsFactoryHidlEnvironment; + return instance; + } + + virtual void registerTestServices() override { registerTestService(); } +}; + // The main test class for Audio Effects Factory HIDL HAL. class AudioEffectsFactoryHidlTest : public ::testing::VtsHalHidlTargetTestBase { public: void SetUp() override { - effectsFactory = - ::testing::VtsHalHidlTargetTestBase::getService(); - ASSERT_NE(effectsFactory, nullptr); + effectsFactory = ::testing::VtsHalHidlTargetTestBase::getService( + AudioEffectsFactoryHidlEnvironment::Instance()->getServiceName()); + ASSERT_NE(effectsFactory, nullptr); } void TearDown() override { effectsFactory.clear(); } @@ -824,3 +838,12 @@ TEST_F(LoudnessEnhancerAudioEffectHidlTest, GetSetTargetGain) { EXPECT_EQ(Result::OK, retval); EXPECT_EQ(gain, actualGain); } + +int main(int argc, char** argv) { + ::testing::AddGlobalTestEnvironment(AudioEffectsFactoryHidlEnvironment::Instance()); + ::testing::InitGoogleTest(&argc, argv); + AudioEffectsFactoryHidlEnvironment::Instance()->init(&argc, argv); + int status = RUN_ALL_TESTS(); + LOG(INFO) << "Test result = " << status; + return status; +} diff --git a/audio/effect/4.0/Android.bp b/audio/effect/4.0/Android.bp new file mode 100644 index 0000000000000000000000000000000000000000..e7676a91da4f4f2d974b2431263a1779e287e55f --- /dev/null +++ b/audio/effect/4.0/Android.bp @@ -0,0 +1,47 @@ +// This file is autogenerated by hidl-gen -Landroidbp. + +hidl_interface { + name: "android.hardware.audio.effect@4.0", + root: "android.hardware", + vndk: { + enabled: true, + }, + srcs: [ + "types.hal", + "IAcousticEchoCancelerEffect.hal", + "IAutomaticGainControlEffect.hal", + "IBassBoostEffect.hal", + "IDownmixEffect.hal", + "IEffect.hal", + "IEffectBufferProviderCallback.hal", + "IEffectsFactory.hal", + "IEnvironmentalReverbEffect.hal", + "IEqualizerEffect.hal", + "ILoudnessEnhancerEffect.hal", + "INoiseSuppressionEffect.hal", + "IPresetReverbEffect.hal", + "IVirtualizerEffect.hal", + "IVisualizerEffect.hal", + ], + interfaces: [ + "android.hardware.audio.common@4.0", + "android.hidl.base@1.0", + ], + types: [ + "AudioBuffer", + "EffectAuxChannelsConfig", + "EffectBufferAccess", + "EffectBufferConfig", + "EffectConfig", + "EffectConfigParameters", + "EffectDescriptor", + "EffectFeature", + "EffectFlags", + "EffectOffloadParameter", + "MessageQueueFlagBits", + "Result", + ], + gen_java: false, + gen_java_constants: true, +} + diff --git a/audio/effect/4.0/IAcousticEchoCancelerEffect.hal b/audio/effect/4.0/IAcousticEchoCancelerEffect.hal new file mode 100644 index 0000000000000000000000000000000000000000..f495e6f8f639b81cab86a76ed69ac1038be3b9fa --- /dev/null +++ b/audio/effect/4.0/IAcousticEchoCancelerEffect.hal @@ -0,0 +1,32 @@ +/* + * Copyright (C) 2018 The Android Open Source Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package android.hardware.audio.effect@4.0; + +import android.hardware.audio.common@4.0; +import IEffect; + +interface IAcousticEchoCancelerEffect extends IEffect { + /** + * Sets echo delay value in milliseconds. + */ + setEchoDelay(uint32_t echoDelayMs) generates (Result retval); + + /** + * Gets echo delay value in milliseconds. + */ + getEchoDelay() generates (Result retval, uint32_t echoDelayMs); +}; diff --git a/audio/effect/4.0/IAutomaticGainControlEffect.hal b/audio/effect/4.0/IAutomaticGainControlEffect.hal new file mode 100644 index 0000000000000000000000000000000000000000..d7fa04c88bbcdf0036ff88fb39e47e613982196c --- /dev/null +++ b/audio/effect/4.0/IAutomaticGainControlEffect.hal @@ -0,0 +1,68 @@ +/* + * Copyright (C) 2018 The Android Open Source Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package android.hardware.audio.effect@4.0; + +import android.hardware.audio.common@4.0; +import IEffect; + +interface IAutomaticGainControlEffect extends IEffect { + /** + * Sets target level in millibels. + */ + setTargetLevel(int16_t targetLevelMb) generates (Result retval); + + /** + * Gets target level. + */ + getTargetLevel() generates (Result retval, int16_t targetLevelMb); + + /** + * Sets gain in the compression range in millibels. + */ + setCompGain(int16_t compGainMb) generates (Result retval); + + /** + * Gets gain in the compression range. + */ + getCompGain() generates (Result retval, int16_t compGainMb); + + /** + * Enables or disables limiter. + */ + setLimiterEnabled(bool enabled) generates (Result retval); + + /** + * Returns whether limiter is enabled. + */ + isLimiterEnabled() generates (Result retval, bool enabled); + + struct AllProperties { + int16_t targetLevelMb; + int16_t compGainMb; + bool limiterEnabled; + }; + + /** + * Sets all properties at once. + */ + setAllProperties(AllProperties properties) generates (Result retval); + + /** + * Gets all properties at once. + */ + getAllProperties() generates (Result retval, AllProperties properties); +}; diff --git a/audio/effect/4.0/IBassBoostEffect.hal b/audio/effect/4.0/IBassBoostEffect.hal new file mode 100644 index 0000000000000000000000000000000000000000..bd302f69ca87b00b3f108926793666608393084c --- /dev/null +++ b/audio/effect/4.0/IBassBoostEffect.hal @@ -0,0 +1,48 @@ +/* + * Copyright (C) 2018 The Android Open Source Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package android.hardware.audio.effect@4.0; + +import android.hardware.audio.common@4.0; +import IEffect; + +interface IBassBoostEffect extends IEffect { + /** + * Returns whether setting bass boost strength is supported. + */ + isStrengthSupported() generates (Result retval, bool strengthSupported); + + enum StrengthRange : uint16_t { + MIN = 0, + MAX = 1000 + }; + + /** + * Sets bass boost strength. + * + * @param strength strength of the effect. The valid range for strength + * strength is [0, 1000], where 0 per mille designates the + * mildest effect and 1000 per mille designates the + * strongest. + * @return retval operation completion status. + */ + setStrength(uint16_t strength) generates (Result retval); + + /** + * Gets virtualization strength. + */ + getStrength() generates (Result retval, uint16_t strength); +}; diff --git a/audio/effect/4.0/IDownmixEffect.hal b/audio/effect/4.0/IDownmixEffect.hal new file mode 100644 index 0000000000000000000000000000000000000000..3ce3a79d57dfe27c3699b62615e6049982b5527d --- /dev/null +++ b/audio/effect/4.0/IDownmixEffect.hal @@ -0,0 +1,37 @@ +/* + * Copyright (C) 2018 The Android Open Source Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package android.hardware.audio.effect@4.0; + +import android.hardware.audio.common@4.0; +import IEffect; + +interface IDownmixEffect extends IEffect { + enum Type : int32_t { + STRIP, // throw away the extra channels + FOLD // mix the extra channels with FL/FR + }; + + /** + * Sets the current downmix preset. + */ + setType(Type preset) generates (Result retval); + + /** + * Gets the current downmix preset. + */ + getType() generates (Result retval, Type preset); +}; diff --git a/audio/effect/4.0/IEffect.hal b/audio/effect/4.0/IEffect.hal new file mode 100644 index 0000000000000000000000000000000000000000..d1d949614feaa517e1b62b5552311d789e33de85 --- /dev/null +++ b/audio/effect/4.0/IEffect.hal @@ -0,0 +1,418 @@ +/* + * Copyright (C) 2018 The Android Open Source Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package android.hardware.audio.effect@4.0; + +import android.hardware.audio.common@4.0; +import IEffectBufferProviderCallback; + +interface IEffect { + /** + * Initialize effect engine--all configurations return to default. + * + * @return retval operation completion status. + */ + @entry + init() generates (Result retval); + + /** + * Apply new audio parameters configurations for input and output buffers. + * The provider callbacks may be empty, but in this case the buffer + * must be provided in the EffectConfig structure. + * + * @param config configuration descriptor. + * @param inputBufferProvider optional buffer provider reference. + * @param outputBufferProvider optional buffer provider reference. + * @return retval operation completion status. + */ + setConfig(EffectConfig config, + IEffectBufferProviderCallback inputBufferProvider, + IEffectBufferProviderCallback outputBufferProvider) + generates (Result retval); + + /** + * Reset the effect engine. Keep configuration but resets state and buffer + * content. + * + * @return retval operation completion status. + */ + reset() generates (Result retval); + + /** + * Enable processing. + * + * @return retval operation completion status. + */ + @callflow(next={"prepareForProcessing"}) + enable() generates (Result retval); + + /** + * Disable processing. + * + * @return retval operation completion status. + */ + @callflow(next={"close"}) + disable() generates (Result retval); + + /** + * Set the rendering device the audio output path is connected to. The + * effect implementation must set EFFECT_FLAG_DEVICE_IND flag in its + * descriptor to receive this command when the device changes. + * + * Note: this method is only supported for effects inserted into + * the output chain. + * + * @param device output device specification. + * @return retval operation completion status. + */ + setDevice(bitfield device) generates (Result retval); + + /** + * Set and get volume. Used by audio framework to delegate volume control to + * effect engine. The effect implementation must set EFFECT_FLAG_VOLUME_CTRL + * flag in its descriptor to receive this command. The effect engine must + * return the volume that should be applied before the effect is + * processed. The overall volume (the volume actually applied by the effect + * engine multiplied by the returned value) should match the value indicated + * in the command. + * + * @param volumes vector containing volume for each channel defined in + * EffectConfig for output buffer expressed in 8.24 fixed + * point format. + * @return result updated volume values. + * @return retval operation completion status. + */ + setAndGetVolume(vec volumes) + generates (Result retval, vec result); + + /** + * Notify the effect of the volume change. The effect implementation must + * set EFFECT_FLAG_VOLUME_IND flag in its descriptor to receive this + * command. + * + * @param volumes vector containing volume for each channel defined in + * EffectConfig for output buffer expressed in 8.24 fixed + * point format. + * @return retval operation completion status. + */ + volumeChangeNotification(vec volumes) + generates (Result retval); + + /** + * Set the audio mode. The effect implementation must set + * EFFECT_FLAG_AUDIO_MODE_IND flag in its descriptor to receive this command + * when the audio mode changes. + * + * @param mode desired audio mode. + * @return retval operation completion status. + */ + setAudioMode(AudioMode mode) generates (Result retval); + + /** + * Apply new audio parameters configurations for input and output buffers of + * reverse stream. An example of reverse stream is the echo reference + * supplied to an Acoustic Echo Canceler. + * + * @param config configuration descriptor. + * @param inputBufferProvider optional buffer provider reference. + * @param outputBufferProvider optional buffer provider reference. + * @return retval operation completion status. + */ + setConfigReverse(EffectConfig config, + IEffectBufferProviderCallback inputBufferProvider, + IEffectBufferProviderCallback outputBufferProvider) + generates (Result retval); + + /** + * Set the capture device the audio input path is connected to. The effect + * implementation must set EFFECT_FLAG_DEVICE_IND flag in its descriptor to + * receive this command when the device changes. + * + * Note: this method is only supported for effects inserted into + * the input chain. + * + * @param device input device specification. + * @return retval operation completion status. + */ + setInputDevice(bitfield device) generates (Result retval); + + /** + * Read audio parameters configurations for input and output buffers. + * + * @return retval operation completion status. + * @return config configuration descriptor. + */ + getConfig() generates (Result retval, EffectConfig config); + + /** + * Read audio parameters configurations for input and output buffers of + * reverse stream. + * + * @return retval operation completion status. + * @return config configuration descriptor. + */ + getConfigReverse() generates (Result retval, EffectConfig config); + + /** + * Queries for supported combinations of main and auxiliary channels + * (e.g. for a multi-microphone noise suppressor). + * + * @param maxConfigs maximum number of the combinations to return. + * @return retval absence of the feature support is indicated using + * NOT_SUPPORTED code. RESULT_TOO_BIG is returned if + * the number of supported combinations exceeds 'maxConfigs'. + * @return result list of configuration descriptors. + */ + getSupportedAuxChannelsConfigs(uint32_t maxConfigs) + generates (Result retval, vec result); + + /** + * Retrieves the current configuration of main and auxiliary channels. + * + * @return retval absence of the feature support is indicated using + * NOT_SUPPORTED code. + * @return result configuration descriptor. + */ + getAuxChannelsConfig() + generates (Result retval, EffectAuxChannelsConfig result); + + /** + * Sets the current configuration of main and auxiliary channels. + * + * @return retval operation completion status; absence of the feature + * support is indicated using NOT_SUPPORTED code. + */ + setAuxChannelsConfig(EffectAuxChannelsConfig config) + generates (Result retval); + + /** + * Set the audio source the capture path is configured for (Camcorder, voice + * recognition...). + * + * Note: this method is only supported for effects inserted into + * the input chain. + * + * @param source source descriptor. + * @return retval operation completion status. + */ + setAudioSource(AudioSource source) generates (Result retval); + + /** + * This command indicates if the playback thread the effect is attached to + * is offloaded or not, and updates the I/O handle of the playback thread + * the effect is attached to. + * + * @param param effect offload descriptor. + * @return retval operation completion status. + */ + offload(EffectOffloadParameter param) generates (Result retval); + + /** + * Returns the effect descriptor. + * + * @return retval operation completion status. + * @return descriptor effect descriptor. + */ + getDescriptor() generates (Result retval, EffectDescriptor descriptor); + + /** + * Set up required transports for passing audio buffers to the effect. + * + * The transport consists of shared memory and a message queue for reporting + * effect processing operation status. The shared memory is set up + * separately using 'setProcessBuffers' method. + * + * Processing is requested by setting 'REQUEST_PROCESS' or + * 'REQUEST_PROCESS_REVERSE' EventFlags associated with the status message + * queue. The result of processing may be one of the following: + * OK if there were no errors during processing; + * INVALID_ARGUMENTS if audio buffers are invalid; + * INVALID_STATE if the engine has finished the disable phase; + * NOT_INITIALIZED if the audio buffers were not set; + * NOT_SUPPORTED if the requested processing type is not supported by + * the effect. + * + * @return retval OK if both message queues were created successfully. + * INVALID_STATE if the method was already called. + * INVALID_ARGUMENTS if there was a problem setting up + * the queue. + * @return statusMQ a message queue used for passing status from the effect. + */ + @callflow(next={"setProcessBuffers"}) + prepareForProcessing() generates (Result retval, fmq_sync statusMQ); + + /** + * Set up input and output buffers for processing audio data. The effect + * may modify both the input and the output buffer during the operation. + * Buffers may be set multiple times during effect lifetime. + * + * The input and the output buffer may be reused between different effects, + * and the input buffer may be used as an output buffer. Buffers are + * distinguished using 'AudioBuffer.id' field. + * + * @param inBuffer input audio buffer. + * @param outBuffer output audio buffer. + * @return retval OK if both buffers were mapped successfully. + * INVALID_ARGUMENTS if there was a problem with mapping + * any of the buffers. + */ + setProcessBuffers(AudioBuffer inBuffer, AudioBuffer outBuffer) + generates (Result retval); + + /** + * Execute a vendor specific command on the effect. The command code + * and data, as well as result data are not interpreted by Android + * Framework and are passed as-is between the application and the effect. + * + * The effect must use standard POSIX.1-2001 error codes for the operation + * completion status. + * + * Use this method only if the effect is provided by a third party, and + * there is no interface defined for it. This method only works for effects + * implemented in software. + * + * @param commandId the ID of the command. + * @param data command data. + * @param resultMaxSize maximum size in bytes of the result; can be 0. + * @return status command completion status. + * @return result result data. + */ + command(uint32_t commandId, vec data, uint32_t resultMaxSize) + generates (int32_t status, vec result); + + /** + * Set a vendor-specific parameter and apply it immediately. The parameter + * code and data are not interpreted by Android Framework and are passed + * as-is between the application and the effect. + * + * The effect must use INVALID_ARGUMENTS return code if the parameter ID is + * unknown or if provided parameter data is invalid. If the effect does not + * support setting vendor-specific parameters, it must return NOT_SUPPORTED. + * + * Use this method only if the effect is provided by a third party, and + * there is no interface defined for it. This method only works for effects + * implemented in software. + * + * @param parameter identifying data of the parameter. + * @param value the value of the parameter. + * @return retval operation completion status. + */ + setParameter(vec parameter, vec value) + generates (Result retval); + + /** + * Get a vendor-specific parameter value. The parameter code and returned + * data are not interpreted by Android Framework and are passed as-is + * between the application and the effect. + * + * The effect must use INVALID_ARGUMENTS return code if the parameter ID is + * unknown. If the effect does not support setting vendor-specific + * parameters, it must return NOT_SUPPORTED. + * + * Use this method only if the effect is provided by a third party, and + * there is no interface defined for it. This method only works for effects + * implemented in software. + * + * @param parameter identifying data of the parameter. + * @param valueMaxSize maximum size in bytes of the value. + * @return retval operation completion status. + * @return result the value of the parameter. + */ + getParameter(vec parameter, uint32_t valueMaxSize) + generates (Result retval, vec value); + + /** + * Get supported configs for a vendor-specific feature. The configs returned + * are not interpreted by Android Framework and are passed as-is between the + * application and the effect. + * + * The effect must use INVALID_ARGUMENTS return code if the feature ID is + * unknown. If the effect does not support getting vendor-specific feature + * configs, it must return NOT_SUPPORTED. If the feature is supported but + * the total number of supported configurations exceeds the maximum number + * indicated by the caller, the method must return RESULT_TOO_BIG. + * + * Use this method only if the effect is provided by a third party, and + * there is no interface defined for it. This method only works for effects + * implemented in software. + * + * @param featureId feature identifier. + * @param maxConfigs maximum number of configs to return. + * @param configSize size of each config in bytes. + * @return retval operation completion status. + * @return configsCount number of configs returned. + * @return configsData data for all the configs returned. + */ + getSupportedConfigsForFeature( + uint32_t featureId, + uint32_t maxConfigs, + uint32_t configSize) generates ( + Result retval, + uint32_t configsCount, + vec configsData); + + /** + * Get the current config for a vendor-specific feature. The config returned + * is not interpreted by Android Framework and is passed as-is between the + * application and the effect. + * + * The effect must use INVALID_ARGUMENTS return code if the feature ID is + * unknown. If the effect does not support getting vendor-specific + * feature configs, it must return NOT_SUPPORTED. + * + * Use this method only if the effect is provided by a third party, and + * there is no interface defined for it. This method only works for effects + * implemented in software. + * + * @param featureId feature identifier. + * @param configSize size of the config in bytes. + * @return retval operation completion status. + * @return configData config data. + */ + getCurrentConfigForFeature(uint32_t featureId, uint32_t configSize) + generates (Result retval, vec configData); + + /** + * Set the current config for a vendor-specific feature. The config data + * is not interpreted by Android Framework and is passed as-is between the + * application and the effect. + * + * The effect must use INVALID_ARGUMENTS return code if the feature ID is + * unknown. If the effect does not support getting vendor-specific + * feature configs, it must return NOT_SUPPORTED. + * + * Use this method only if the effect is provided by a third party, and + * there is no interface defined for it. This method only works for effects + * implemented in software. + * + * @param featureId feature identifier. + * @param configData config data. + * @return retval operation completion status. + */ + setCurrentConfigForFeature(uint32_t featureId, vec configData) + generates (Result retval); + + /** + * Called by the framework to deinitialize the effect and free up + * all the currently allocated resources. It is recommended to close + * the effect on the client side as soon as it is becomes unused. + * + * @return retval OK in case the success. + * INVALID_STATE if the effect was already closed. + */ + @exit + close() generates (Result retval); +}; diff --git a/audio/effect/4.0/IEffectBufferProviderCallback.hal b/audio/effect/4.0/IEffectBufferProviderCallback.hal new file mode 100644 index 0000000000000000000000000000000000000000..439383bcefc5f9987db69ae31d5777be89cb2f78 --- /dev/null +++ b/audio/effect/4.0/IEffectBufferProviderCallback.hal @@ -0,0 +1,38 @@ +/* + * Copyright (C) 2018 The Android Open Source Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package android.hardware.audio.effect@4.0; + +/** + * This callback interface contains functions that can be used by the effect + * engine 'process' function to exchange input and output audio buffers. + */ +interface IEffectBufferProviderCallback { + /** + * Called to retrieve a buffer where data should read from by 'process' + * function. + * + * @return buffer audio buffer for processing + */ + getBuffer() generates (AudioBuffer buffer); + + /** + * Called to provide a buffer with the data written by 'process' function. + * + * @param buffer audio buffer for processing + */ + putBuffer(AudioBuffer buffer); +}; diff --git a/audio/effect/4.0/IEffectsFactory.hal b/audio/effect/4.0/IEffectsFactory.hal new file mode 100644 index 0000000000000000000000000000000000000000..034af584afb7ec554054835be3a432e3799728b4 --- /dev/null +++ b/audio/effect/4.0/IEffectsFactory.hal @@ -0,0 +1,58 @@ +/* + * Copyright (C) 2018 The Android Open Source Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package android.hardware.audio.effect@4.0; + +import android.hardware.audio.common@4.0; +import IEffect; + +interface IEffectsFactory { + /** + * Returns descriptors of different effects in all loaded libraries. + * + * @return retval operation completion status. + * @return result list of effect descriptors. + */ + getAllDescriptors() generates(Result retval, vec result); + + /** + * Returns a descriptor of a particular effect. + * + * @return retval operation completion status. + * @return result effect descriptor. + */ + getDescriptor(Uuid uid) generates(Result retval, EffectDescriptor result); + + /** + * Creates an effect engine of the specified type. To release the effect + * engine, it is necessary to release references to the returned effect + * object. + * + * @param uid effect uuid. + * @param session audio session to which this effect instance will be + * attached. All effects created with the same session ID + * are connected in series and process the same signal + * stream. + * @param ioHandle identifies the output or input stream this effect is + * directed to in audio HAL. + * @return retval operation completion status. + * @return result the interface for the created effect. + * @return effectId the unique ID of the effect to be used with + * IStream::addEffect and IStream::removeEffect methods. + */ + createEffect(Uuid uid, AudioSession session, AudioIoHandle ioHandle) + generates (Result retval, IEffect result, uint64_t effectId); +}; diff --git a/audio/effect/4.0/IEnvironmentalReverbEffect.hal b/audio/effect/4.0/IEnvironmentalReverbEffect.hal new file mode 100644 index 0000000000000000000000000000000000000000..21264d262bb64a71da4c759fd149001679bf66d4 --- /dev/null +++ b/audio/effect/4.0/IEnvironmentalReverbEffect.hal @@ -0,0 +1,178 @@ +/* + * Copyright (C) 2018 The Android Open Source Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package android.hardware.audio.effect@4.0; + +import android.hardware.audio.common@4.0; +import IEffect; + +interface IEnvironmentalReverbEffect extends IEffect { + /** + * Sets whether the effect should be bypassed. + */ + setBypass(bool bypass) generates (Result retval); + + /** + * Gets whether the effect should be bypassed. + */ + getBypass() generates (Result retval, bool bypass); + + enum ParamRange : int16_t { + ROOM_LEVEL_MIN = -6000, + ROOM_LEVEL_MAX = 0, + ROOM_HF_LEVEL_MIN = -4000, + ROOM_HF_LEVEL_MAX = 0, + DECAY_TIME_MIN = 100, + DECAY_TIME_MAX = 20000, + DECAY_HF_RATIO_MIN = 100, + DECAY_HF_RATIO_MAX = 1000, + REFLECTIONS_LEVEL_MIN = -6000, + REFLECTIONS_LEVEL_MAX = 0, + REFLECTIONS_DELAY_MIN = 0, + REFLECTIONS_DELAY_MAX = 65, + REVERB_LEVEL_MIN = -6000, + REVERB_LEVEL_MAX = 0, + REVERB_DELAY_MIN = 0, + REVERB_DELAY_MAX = 65, + DIFFUSION_MIN = 0, + DIFFUSION_MAX = 1000, + DENSITY_MIN = 0, + DENSITY_MAX = 1000 + }; + + /** + * Sets the room level. + */ + setRoomLevel(int16_t roomLevel) generates (Result retval); + + /** + * Gets the room level. + */ + getRoomLevel() generates (Result retval, int16_t roomLevel); + + /** + * Sets the room high frequencies level. + */ + setRoomHfLevel(int16_t roomHfLevel) generates (Result retval); + + /** + * Gets the room high frequencies level. + */ + getRoomHfLevel() generates (Result retval, int16_t roomHfLevel); + + /** + * Sets the room decay time. + */ + setDecayTime(uint32_t decayTime) generates (Result retval); + + /** + * Gets the room decay time. + */ + getDecayTime() generates (Result retval, uint32_t decayTime); + + /** + * Sets the ratio of high frequencies decay. + */ + setDecayHfRatio(int16_t decayHfRatio) generates (Result retval); + + /** + * Gets the ratio of high frequencies decay. + */ + getDecayHfRatio() generates (Result retval, int16_t decayHfRatio); + + /** + * Sets the level of reflections in the room. + */ + setReflectionsLevel(int16_t reflectionsLevel) generates (Result retval); + + /** + * Gets the level of reflections in the room. + */ + getReflectionsLevel() generates (Result retval, int16_t reflectionsLevel); + + /** + * Sets the reflections delay in the room. + */ + setReflectionsDelay(uint32_t reflectionsDelay) generates (Result retval); + + /** + * Gets the reflections delay in the room. + */ + getReflectionsDelay() generates (Result retval, uint32_t reflectionsDelay); + + /** + * Sets the reverb level of the room. + */ + setReverbLevel(int16_t reverbLevel) generates (Result retval); + + /** + * Gets the reverb level of the room. + */ + getReverbLevel() generates (Result retval, int16_t reverbLevel); + + /** + * Sets the reverb delay of the room. + */ + setReverbDelay(uint32_t reverDelay) generates (Result retval); + + /** + * Gets the reverb delay of the room. + */ + getReverbDelay() generates (Result retval, uint32_t reverbDelay); + + /** + * Sets room diffusion. + */ + setDiffusion(int16_t diffusion) generates (Result retval); + + /** + * Gets room diffusion. + */ + getDiffusion() generates (Result retval, int16_t diffusion); + + /** + * Sets room wall density. + */ + setDensity(int16_t density) generates (Result retval); + + /** + * Gets room wall density. + */ + getDensity() generates (Result retval, int16_t density); + + struct AllProperties { + int16_t roomLevel; // in millibels, range -6000 to 0 + int16_t roomHfLevel; // in millibels, range -4000 to 0 + uint32_t decayTime; // in milliseconds, range 100 to 20000 + int16_t decayHfRatio; // in permilles, range 100 to 1000 + int16_t reflectionsLevel; // in millibels, range -6000 to 0 + uint32_t reflectionsDelay; // in milliseconds, range 0 to 65 + int16_t reverbLevel; // in millibels, range -6000 to 0 + uint32_t reverbDelay; // in milliseconds, range 0 to 65 + int16_t diffusion; // in permilles, range 0 to 1000 + int16_t density; // in permilles, range 0 to 1000 + }; + + /** + * Sets all properties at once. + */ + setAllProperties(AllProperties properties) generates (Result retval); + + /** + * Gets all properties at once. + */ + getAllProperties() generates (Result retval, AllProperties properties); +}; diff --git a/audio/effect/4.0/IEqualizerEffect.hal b/audio/effect/4.0/IEqualizerEffect.hal new file mode 100644 index 0000000000000000000000000000000000000000..58f2b738311a1cc6083f866ace8a19d94aae3bcc --- /dev/null +++ b/audio/effect/4.0/IEqualizerEffect.hal @@ -0,0 +1,93 @@ +/* + * Copyright (C) 2018 The Android Open Source Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package android.hardware.audio.effect@4.0; + +import android.hardware.audio.common@4.0; +import IEffect; + +interface IEqualizerEffect extends IEffect { + /** + * Gets the number of frequency bands that the equalizer supports. + */ + getNumBands() generates (Result retval, uint16_t numBands); + + /** + * Returns the minimum and maximum band levels supported. + */ + getLevelRange() + generates (Result retval, int16_t minLevel, int16_t maxLevel); + + /** + * Sets the gain for the given equalizer band. + */ + setBandLevel(uint16_t band, int16_t level) generates (Result retval); + + /** + * Gets the gain for the given equalizer band. + */ + getBandLevel(uint16_t band) generates (Result retval, int16_t level); + + /** + * Gets the center frequency of the given band, in milliHertz. + */ + getBandCenterFrequency(uint16_t band) + generates (Result retval, uint32_t centerFreqmHz); + + /** + * Gets the frequency range of the given frequency band, in milliHertz. + */ + getBandFrequencyRange(uint16_t band) + generates (Result retval, uint32_t minFreqmHz, uint32_t maxFreqmHz); + + /** + * Gets the band that has the most effect on the given frequency + * in milliHertz. + */ + getBandForFrequency(uint32_t freqmHz) + generates (Result retval, uint16_t band); + + /** + * Gets the names of all presets the equalizer supports. + */ + getPresetNames() generates (Result retval, vec names); + + /** + * Sets the current preset using the index of the preset in the names + * vector returned via 'getPresetNames'. + */ + setCurrentPreset(uint16_t preset) generates (Result retval); + + /** + * Gets the current preset. + */ + getCurrentPreset() generates (Result retval, uint16_t preset); + + struct AllProperties { + uint16_t curPreset; + vec bandLevels; + }; + + /** + * Sets all properties at once. + */ + setAllProperties(AllProperties properties) generates (Result retval); + + /** + * Gets all properties at once. + */ + getAllProperties() generates (Result retval, AllProperties properties); +}; diff --git a/audio/effect/4.0/ILoudnessEnhancerEffect.hal b/audio/effect/4.0/ILoudnessEnhancerEffect.hal new file mode 100644 index 0000000000000000000000000000000000000000..2421834c7092457dd025a8161b89a4c689ebb4a2 --- /dev/null +++ b/audio/effect/4.0/ILoudnessEnhancerEffect.hal @@ -0,0 +1,32 @@ +/* + * Copyright (C) 2018 The Android Open Source Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package android.hardware.audio.effect@4.0; + +import android.hardware.audio.common@4.0; +import IEffect; + +interface ILoudnessEnhancerEffect extends IEffect { + /** + * Sets target gain expressed in millibels. + */ + setTargetGain(int32_t targetGainMb) generates (Result retval); + + /** + * Gets target gain expressed in millibels. + */ + getTargetGain() generates (Result retval, int32_t targetGainMb); +}; diff --git a/audio/effect/4.0/INoiseSuppressionEffect.hal b/audio/effect/4.0/INoiseSuppressionEffect.hal new file mode 100644 index 0000000000000000000000000000000000000000..ce593a03894d127fcc701a3718d4d79fe9072ac2 --- /dev/null +++ b/audio/effect/4.0/INoiseSuppressionEffect.hal @@ -0,0 +1,68 @@ +/* + * Copyright (C) 2018 The Android Open Source Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package android.hardware.audio.effect@4.0; + +import android.hardware.audio.common@4.0; +import IEffect; + +interface INoiseSuppressionEffect extends IEffect { + enum Level : int32_t { + LOW, + MEDIUM, + HIGH + }; + + /** + * Sets suppression level. + */ + setSuppressionLevel(Level level) generates (Result retval); + + /** + * Gets suppression level. + */ + getSuppressionLevel() generates (Result retval, Level level); + + enum Type : int32_t { + SINGLE_CHANNEL, + MULTI_CHANNEL + }; + + /** + * Set suppression type. + */ + setSuppressionType(Type type) generates (Result retval); + + /** + * Get suppression type. + */ + getSuppressionType() generates (Result retval, Type type); + + struct AllProperties { + Level level; + Type type; + }; + + /** + * Sets all properties at once. + */ + setAllProperties(AllProperties properties) generates (Result retval); + + /** + * Gets all properties at once. + */ + getAllProperties() generates (Result retval, AllProperties properties); +}; diff --git a/audio/effect/4.0/IPresetReverbEffect.hal b/audio/effect/4.0/IPresetReverbEffect.hal new file mode 100644 index 0000000000000000000000000000000000000000..241723590f11be4fc86f21fc4d05d034696f57f9 --- /dev/null +++ b/audio/effect/4.0/IPresetReverbEffect.hal @@ -0,0 +1,43 @@ +/* + * Copyright (C) 2018 The Android Open Source Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package android.hardware.audio.effect@4.0; + +import android.hardware.audio.common@4.0; +import IEffect; + +interface IPresetReverbEffect extends IEffect { + enum Preset : int32_t { + NONE, // no reverb or reflections + SMALLROOM, // a small room less than five meters in length + MEDIUMROOM, // a medium room with a length of ten meters or less + LARGEROOM, // a large-sized room suitable for live performances + MEDIUMHALL, // a medium-sized hall + LARGEHALL, // a large-sized hall suitable for a full orchestra + PLATE, // synthesis of the traditional plate reverb + LAST = PLATE + }; + + /** + * Sets the current preset. + */ + setPreset(Preset preset) generates (Result retval); + + /** + * Gets the current preset. + */ + getPreset() generates (Result retval, Preset preset); +}; diff --git a/audio/effect/4.0/IVirtualizerEffect.hal b/audio/effect/4.0/IVirtualizerEffect.hal new file mode 100644 index 0000000000000000000000000000000000000000..52038ca08cbd4fe05e651df6f43db748fae69142 --- /dev/null +++ b/audio/effect/4.0/IVirtualizerEffect.hal @@ -0,0 +1,77 @@ +/* + * Copyright (C) 2018 The Android Open Source Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package android.hardware.audio.effect@4.0; + +import android.hardware.audio.common@4.0; +import IEffect; + +interface IVirtualizerEffect extends IEffect { + /** + * Returns whether setting virtualization strength is supported. + */ + isStrengthSupported() generates (bool strengthSupported); + + enum StrengthRange : uint16_t { + MIN = 0, + MAX = 1000 + }; + + /** + * Sets virtualization strength. + * + * @param strength strength of the effect. The valid range for strength + * strength is [0, 1000], where 0 per mille designates the + * mildest effect and 1000 per mille designates the + * strongest. + * @return retval operation completion status. + */ + setStrength(uint16_t strength) generates (Result retval); + + /** + * Gets virtualization strength. + */ + getStrength() generates (Result retval, uint16_t strength); + + struct SpeakerAngle { + /** Speaker channel mask */ + bitfield mask; + // all angles are expressed in degrees and + // are relative to the listener. + int16_t azimuth; // 0 is the direction the listener faces + // 180 is behind the listener + // -90 is to their left + int16_t elevation; // 0 is the horizontal plane + // +90 is above the listener, -90 is below + }; + /** + * Retrieves virtual speaker angles for the given channel mask on the + * specified device. + */ + getVirtualSpeakerAngles(bitfield mask, AudioDevice device) + generates (Result retval, vec speakerAngles); + + /** + * Forces the virtualizer effect for the given output device. + */ + forceVirtualizationMode(AudioDevice device) generates (Result retval); + + /** + * Returns audio device reflecting the current virtualization mode, + * AUDIO_DEVICE_NONE when not virtualizing. + */ + getVirtualizationMode() generates (Result retval, AudioDevice device); +}; diff --git a/audio/effect/4.0/IVisualizerEffect.hal b/audio/effect/4.0/IVisualizerEffect.hal new file mode 100644 index 0000000000000000000000000000000000000000..2fee9801e729e6eae8ebb820b7e809b4839231fd --- /dev/null +++ b/audio/effect/4.0/IVisualizerEffect.hal @@ -0,0 +1,110 @@ +/* + * Copyright (C) 2018 The Android Open Source Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package android.hardware.audio.effect@4.0; + +import android.hardware.audio.common@4.0; +import IEffect; + +interface IVisualizerEffect extends IEffect { + enum CaptureSizeRange : int32_t { + MAX = 1024, // maximum capture size in samples + MIN = 128 // minimum capture size in samples + }; + + /** + * Sets the number PCM samples in the capture. + */ + setCaptureSize(uint16_t captureSize) generates (Result retval); + + /** + * Gets the number PCM samples in the capture. + */ + getCaptureSize() generates (Result retval, uint16_t captureSize); + + enum ScalingMode : int32_t { + // Keep in sync with SCALING_MODE_... in + // frameworks/base/media/java/android/media/audiofx/Visualizer.java + NORMALIZED = 0, + AS_PLAYED = 1 + }; + + /** + * Specifies the way the captured data is scaled. + */ + setScalingMode(ScalingMode scalingMode) generates (Result retval); + + /** + * Retrieves the way the captured data is scaled. + */ + getScalingMode() generates (Result retval, ScalingMode scalingMode); + + /** + * Informs the visualizer about the downstream latency. + */ + setLatency(uint32_t latencyMs) generates (Result retval); + + /** + * Gets the downstream latency. + */ + getLatency() generates (Result retval, uint32_t latencyMs); + + enum MeasurementMode : int32_t { + // Keep in sync with MEASUREMENT_MODE_... in + // frameworks/base/media/java/android/media/audiofx/Visualizer.java + NONE = 0x0, + PEAK_RMS = 0x1 + }; + + /** + * Specifies which measurements are to be made. + */ + setMeasurementMode(MeasurementMode measurementMode) + generates (Result retval); + + /** + * Retrieves which measurements are to be made. + */ + getMeasurementMode() generates ( + Result retval, MeasurementMode measurementMode); + + /** + * Retrieves the latest PCM snapshot captured by the visualizer engine. The + * number of samples to capture is specified by 'setCaptureSize' parameter. + * + * @return retval operation completion status. + * @return samples samples in 8 bit unsigned format (0 = 0x80) + */ + capture() generates (Result retval, vec samples); + + struct Measurement { + MeasurementMode mode; // discriminator + union Values { + struct PeakAndRms { + int32_t peakMb; // millibels + int32_t rmsMb; // millibels + } peakAndRms; + } value; + }; + /** + * Retrieves the latest measurements. The measurements to be made + * are specified by 'setMeasurementMode' parameter. + * + * @return retval operation completion status. + * @return result measurement. + */ + measure() generates (Result retval, Measurement result); +}; diff --git a/audio/effect/4.0/default/AcousticEchoCancelerEffect.cpp b/audio/effect/4.0/default/AcousticEchoCancelerEffect.cpp new file mode 100644 index 0000000000000000000000000000000000000000..242740e5822bf9c2d53e29250cbe808924949b1f --- /dev/null +++ b/audio/effect/4.0/default/AcousticEchoCancelerEffect.cpp @@ -0,0 +1,23 @@ +/* + * Copyright (C) 2018 The Android Open Source Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#define LOG_TAG "AEC_Effect_HAL" + +#include "AcousticEchoCancelerEffect.h" + +#define AUDIO_HAL_VERSION V4_0 +#include +#undef AUDIO_HAL_VERSION diff --git a/audio/effect/4.0/default/AcousticEchoCancelerEffect.h b/audio/effect/4.0/default/AcousticEchoCancelerEffect.h new file mode 100644 index 0000000000000000000000000000000000000000..0ac0a1e0dfd0c71d9b0e2b3698439012202838ae --- /dev/null +++ b/audio/effect/4.0/default/AcousticEchoCancelerEffect.h @@ -0,0 +1,28 @@ +/* + * Copyright (C) 2018 The Android Open Source Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#ifndef ANDROID_HARDWARE_AUDIO_EFFECT_V4_0_ACOUSTICECHOCANCELEREFFECT_H +#define ANDROID_HARDWARE_AUDIO_EFFECT_V4_0_ACOUSTICECHOCANCELEREFFECT_H + +#include + +#include "Effect.h" + +#define AUDIO_HAL_VERSION V4_0 +#include +#undef AUDIO_HAL_VERSION + +#endif // ANDROID_HARDWARE_AUDIO_EFFECT_V4_0_ACOUSTICECHOCANCELEREFFECT_H diff --git a/audio/effect/4.0/default/Android.bp b/audio/effect/4.0/default/Android.bp new file mode 100644 index 0000000000000000000000000000000000000000..dcb2269a9baaefcdbeb760f4aa8b0e9c7a6a5c4a --- /dev/null +++ b/audio/effect/4.0/default/Android.bp @@ -0,0 +1,50 @@ +cc_library_shared { + name: "android.hardware.audio.effect@4.0-impl", + defaults: ["hidl_defaults"], + vendor: true, + relative_install_path: "hw", + srcs: [ + "AcousticEchoCancelerEffect.cpp", + "AudioBufferManager.cpp", + "AutomaticGainControlEffect.cpp", + "BassBoostEffect.cpp", + "Conversions.cpp", + "DownmixEffect.cpp", + "Effect.cpp", + "EffectsFactory.cpp", + "EnvironmentalReverbEffect.cpp", + "EqualizerEffect.cpp", + "LoudnessEnhancerEffect.cpp", + "NoiseSuppressionEffect.cpp", + "PresetReverbEffect.cpp", + "VirtualizerEffect.cpp", + "VisualizerEffect.cpp", + ], + + shared_libs: [ + "libbase", + "libcutils", + "libeffects", + "libfmq", + "libhidlbase", + "libhidlmemory", + "libhidltransport", + "liblog", + "libutils", + "android.hardware.audio.common-util", + "android.hardware.audio.common@4.0", + "android.hardware.audio.common@4.0-util", + "android.hardware.audio.effect@4.0", + "android.hidl.memory@1.0", + ], + + header_libs: [ + "android.hardware.audio.common.util@all-versions", + "android.hardware.audio.effect@all-versions-impl", + "libaudio_system_headers", + "libaudioclient_headers", + "libeffects_headers", + "libhardware_headers", + "libmedia_headers", + ], +} diff --git a/audio/effect/4.0/default/AudioBufferManager.cpp b/audio/effect/4.0/default/AudioBufferManager.cpp new file mode 100644 index 0000000000000000000000000000000000000000..2d75f3fdbb1224b1de9480b012d8dcc8417d12e0 --- /dev/null +++ b/audio/effect/4.0/default/AudioBufferManager.cpp @@ -0,0 +1,21 @@ +/* + * Copyright (C) 2018 The Android Open Source Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#include "AudioBufferManager.h" + +#define AUDIO_HAL_VERSION V4_0 +#include +#undef AUDIO_HAL_VERSION diff --git a/audio/effect/4.0/default/AudioBufferManager.h b/audio/effect/4.0/default/AudioBufferManager.h new file mode 100644 index 0000000000000000000000000000000000000000..1f151e6b99d95c0b3b6195c42cbede070516c2ac --- /dev/null +++ b/audio/effect/4.0/default/AudioBufferManager.h @@ -0,0 +1,26 @@ +/* + * Copyright (C) 2018 The Android Open Source Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#ifndef ANDROID_HARDWARE_AUDIO_EFFECT_V4_0_AUDIO_BUFFER_MANAGER_H_ +#define ANDROID_HARDWARE_AUDIO_EFFECT_V4_0_AUDIO_BUFFER_MANAGER_H_ + +#include + +#define AUDIO_HAL_VERSION V4_0 +#include +#undef AUDIO_HAL_VERSION + +#endif // ANDROID_HARDWARE_AUDIO_EFFECT_V4_0_AUDIO_BUFFER_MANAGER_H_ diff --git a/audio/effect/4.0/default/AutomaticGainControlEffect.cpp b/audio/effect/4.0/default/AutomaticGainControlEffect.cpp new file mode 100644 index 0000000000000000000000000000000000000000..9d21c8ae6bcd8da7f699722ce9f846e6e022f8a5 --- /dev/null +++ b/audio/effect/4.0/default/AutomaticGainControlEffect.cpp @@ -0,0 +1,23 @@ +/* + * Copyright (C) 2018 The Android Open Source Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#define LOG_TAG "AGC_Effect_HAL" + +#include "AutomaticGainControlEffect.h" + +#define AUDIO_HAL_VERSION V4_0 +#include +#undef AUDIO_HAL_VERSION diff --git a/audio/effect/4.0/default/AutomaticGainControlEffect.h b/audio/effect/4.0/default/AutomaticGainControlEffect.h new file mode 100644 index 0000000000000000000000000000000000000000..7f12007f8f391cdaf3c669f40180df694c67aeae --- /dev/null +++ b/audio/effect/4.0/default/AutomaticGainControlEffect.h @@ -0,0 +1,28 @@ +/* + * Copyright (C) 2018 The Android Open Source Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#ifndef ANDROID_HARDWARE_AUDIO_EFFECT_V4_0_AUTOMATICGAINCONTROLEFFECT_H +#define ANDROID_HARDWARE_AUDIO_EFFECT_V4_0_AUTOMATICGAINCONTROLEFFECT_H + +#include + +#include "Effect.h" + +#define AUDIO_HAL_VERSION V4_0 +#include +#undef AUDIO_HAL_VERSION + +#endif // ANDROID_HARDWARE_AUDIO_EFFECT_V4_0_AUTOMATICGAINCONTROLEFFECT_H diff --git a/audio/effect/4.0/default/BassBoostEffect.cpp b/audio/effect/4.0/default/BassBoostEffect.cpp new file mode 100644 index 0000000000000000000000000000000000000000..74a626b79e1baa1c09d109a05aa858af04e3a426 --- /dev/null +++ b/audio/effect/4.0/default/BassBoostEffect.cpp @@ -0,0 +1,23 @@ +/* + * Copyright (C) 2018 The Android Open Source Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#define LOG_TAG "BassBoost_HAL" + +#include "BassBoostEffect.h" + +#define AUDIO_HAL_VERSION V4_0 +#include +#undef AUDIO_HAL_VERSION diff --git a/audio/effect/4.0/default/BassBoostEffect.h b/audio/effect/4.0/default/BassBoostEffect.h new file mode 100644 index 0000000000000000000000000000000000000000..206a75fab473fb836bf75b5c18bd6cd581ad36e9 --- /dev/null +++ b/audio/effect/4.0/default/BassBoostEffect.h @@ -0,0 +1,30 @@ +/* + * Copyright (C) 2018 The Android Open Source Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#ifndef ANDROID_HARDWARE_AUDIO_EFFECT_V4_0_BASSBOOSTEFFECT_H +#define ANDROID_HARDWARE_AUDIO_EFFECT_V4_0_BASSBOOSTEFFECT_H + +#include + +#include + +#include "Effect.h" + +#define AUDIO_HAL_VERSION V4_0 +#include +#undef AUDIO_HAL_VERSION + +#endif // ANDROID_HARDWARE_AUDIO_EFFECT_V4_0_BASSBOOSTEFFECT_H diff --git a/audio/effect/4.0/default/Conversions.cpp b/audio/effect/4.0/default/Conversions.cpp new file mode 100644 index 0000000000000000000000000000000000000000..91285ae6b846107028829ec880329458e2816080 --- /dev/null +++ b/audio/effect/4.0/default/Conversions.cpp @@ -0,0 +1,24 @@ +/* + * Copyright (C) 2018 The Android Open Source Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#include "Conversions.h" +#include "HidlUtils.h" + +using ::android::hardware::audio::common::V4_0::HidlUtils; + +#define AUDIO_HAL_VERSION V4_0 +#include +#undef AUDIO_HAL_VERSION diff --git a/audio/effect/4.0/default/Conversions.h b/audio/effect/4.0/default/Conversions.h new file mode 100644 index 0000000000000000000000000000000000000000..50e380fe2e34d0a826776c67a44da51e65abc358 --- /dev/null +++ b/audio/effect/4.0/default/Conversions.h @@ -0,0 +1,26 @@ +/* + * Copyright (C) 2018 The Android Open Source Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#ifndef ANDROID_HARDWARE_AUDIO_EFFECT_V4_0_CONVERSIONS_H_ +#define ANDROID_HARDWARE_AUDIO_EFFECT_V4_0_CONVERSIONS_H_ + +#include + +#define AUDIO_HAL_VERSION V4_0 +#include +#undef AUDIO_HAL_VERSION + +#endif // ANDROID_HARDWARE_AUDIO_EFFECT_V4_0_CONVERSIONS_H_ diff --git a/audio/effect/4.0/default/DownmixEffect.cpp b/audio/effect/4.0/default/DownmixEffect.cpp new file mode 100644 index 0000000000000000000000000000000000000000..07fcab2f1c8957acecafd4171b3ae4d438d602a9 --- /dev/null +++ b/audio/effect/4.0/default/DownmixEffect.cpp @@ -0,0 +1,23 @@ +/* + * Copyright (C) 2018 The Android Open Source Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#define LOG_TAG "Downmix_HAL" + +#include "DownmixEffect.h" + +#define AUDIO_HAL_VERSION V4_0 +#include +#undef AUDIO_HAL_VERSION diff --git a/audio/effect/4.0/default/DownmixEffect.h b/audio/effect/4.0/default/DownmixEffect.h new file mode 100644 index 0000000000000000000000000000000000000000..5ae820b76fa06574a466e0818ed23d99268d0379 --- /dev/null +++ b/audio/effect/4.0/default/DownmixEffect.h @@ -0,0 +1,28 @@ +/* + * Copyright (C) 2018 The Android Open Source Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#ifndef ANDROID_HARDWARE_AUDIO_EFFECT_V4_0_DOWNMIXEFFECT_H +#define ANDROID_HARDWARE_AUDIO_EFFECT_V4_0_DOWNMIXEFFECT_H + +#include + +#include "Effect.h" + +#define AUDIO_HAL_VERSION V4_0 +#include +#undef AUDIO_HAL_VERSION + +#endif // ANDROID_HARDWARE_AUDIO_EFFECT_V4_0_DOWNMIXEFFECT_H diff --git a/audio/effect/4.0/default/Effect.cpp b/audio/effect/4.0/default/Effect.cpp new file mode 100644 index 0000000000000000000000000000000000000000..707044bff015a9d852c773e760eebcd3fcb1b052 --- /dev/null +++ b/audio/effect/4.0/default/Effect.cpp @@ -0,0 +1,28 @@ +/* + * Copyright (C) 2018 The Android Open Source Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#include + +#define LOG_TAG "EffectHAL" +#define ATRACE_TAG ATRACE_TAG_AUDIO + +#include "Conversions.h" +#include "Effect.h" +#include "common/all-versions/default/EffectMap.h" + +#define AUDIO_HAL_VERSION V4_0 +#include +#undef AUDIO_HAL_VERSION diff --git a/audio/effect/4.0/default/Effect.h b/audio/effect/4.0/default/Effect.h new file mode 100644 index 0000000000000000000000000000000000000000..9ca79c4596485b9f0c190f6685e19d550a595e37 --- /dev/null +++ b/audio/effect/4.0/default/Effect.h @@ -0,0 +1,28 @@ +/* + * Copyright (C) 2018 The Android Open Source Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#ifndef ANDROID_HARDWARE_AUDIO_EFFECT_V4_0_EFFECT_H +#define ANDROID_HARDWARE_AUDIO_EFFECT_V4_0_EFFECT_H + +#include + +#include "AudioBufferManager.h" + +#define AUDIO_HAL_VERSION V4_0 +#include +#undef AUDIO_HAL_VERSION + +#endif // ANDROID_HARDWARE_AUDIO_EFFECT_V4_0_EFFECT_H diff --git a/audio/effect/4.0/default/EffectsFactory.cpp b/audio/effect/4.0/default/EffectsFactory.cpp new file mode 100644 index 0000000000000000000000000000000000000000..ee0413df8f6700e2bcfae55ac6181e8c14452ba4 --- /dev/null +++ b/audio/effect/4.0/default/EffectsFactory.cpp @@ -0,0 +1,39 @@ +/* + * Copyright (C) 2018 The Android Open Source Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#define LOG_TAG "EffectFactoryHAL" +#include "EffectsFactory.h" +#include "AcousticEchoCancelerEffect.h" +#include "AutomaticGainControlEffect.h" +#include "BassBoostEffect.h" +#include "Conversions.h" +#include "DownmixEffect.h" +#include "Effect.h" +#include "EnvironmentalReverbEffect.h" +#include "EqualizerEffect.h" +#include "HidlUtils.h" +#include "LoudnessEnhancerEffect.h" +#include "NoiseSuppressionEffect.h" +#include "PresetReverbEffect.h" +#include "VirtualizerEffect.h" +#include "VisualizerEffect.h" +#include "common/all-versions/default/EffectMap.h" + +using ::android::hardware::audio::common::V4_0::HidlUtils; + +#define AUDIO_HAL_VERSION V4_0 +#include +#undef AUDIO_HAL_VERSION diff --git a/audio/effect/4.0/default/EffectsFactory.h b/audio/effect/4.0/default/EffectsFactory.h new file mode 100644 index 0000000000000000000000000000000000000000..48e4b4cb9f2e08be3d546819edce07a89d3823c3 --- /dev/null +++ b/audio/effect/4.0/default/EffectsFactory.h @@ -0,0 +1,29 @@ +/* + * Copyright (C) 2018 The Android Open Source Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#ifndef ANDROID_HARDWARE_AUDIO_EFFECT_V4_0_EFFECTSFACTORY_H +#define ANDROID_HARDWARE_AUDIO_EFFECT_V4_0_EFFECTSFACTORY_H + +#include + +#include + +#include +#define AUDIO_HAL_VERSION V4_0 +#include +#undef AUDIO_HAL_VERSION + +#endif // ANDROID_HARDWARE_AUDIO_EFFECT_V4_0_EFFECTSFACTORY_H diff --git a/audio/effect/4.0/default/EnvironmentalReverbEffect.cpp b/audio/effect/4.0/default/EnvironmentalReverbEffect.cpp new file mode 100644 index 0000000000000000000000000000000000000000..cc3102d1f8c6df402061b6be8789fbafb7706433 --- /dev/null +++ b/audio/effect/4.0/default/EnvironmentalReverbEffect.cpp @@ -0,0 +1,24 @@ +/* + * Copyright (C) 2018 The Android Open Source Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#define LOG_TAG "EnvReverb_HAL" +#include + +#include "EnvironmentalReverbEffect.h" + +#define AUDIO_HAL_VERSION V4_0 +#include +#undef AUDIO_HAL_VERSION diff --git a/audio/effect/4.0/default/EnvironmentalReverbEffect.h b/audio/effect/4.0/default/EnvironmentalReverbEffect.h new file mode 100644 index 0000000000000000000000000000000000000000..c0fb25c02cc50a01ce8514f4589259a12fe63013 --- /dev/null +++ b/audio/effect/4.0/default/EnvironmentalReverbEffect.h @@ -0,0 +1,30 @@ +/* + * Copyright (C) 2018 The Android Open Source Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#ifndef ANDROID_HARDWARE_AUDIO_EFFECT_V4_0_ENVIRONMENTALREVERBEFFECT_H +#define ANDROID_HARDWARE_AUDIO_EFFECT_V4_0_ENVIRONMENTALREVERBEFFECT_H + +#include + +#include + +#include "Effect.h" + +#define AUDIO_HAL_VERSION V4_0 +#include +#undef AUDIO_HAL_VERSION + +#endif // ANDROID_HARDWARE_AUDIO_EFFECT_V4_0_ENVIRONMENTALREVERBEFFECT_H diff --git a/audio/effect/4.0/default/EqualizerEffect.cpp b/audio/effect/4.0/default/EqualizerEffect.cpp new file mode 100644 index 0000000000000000000000000000000000000000..d0a40bc3cdfd0350e2409184d093bd3d23af7198 --- /dev/null +++ b/audio/effect/4.0/default/EqualizerEffect.cpp @@ -0,0 +1,23 @@ +/* + * Copyright (C) 2018 The Android Open Source Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#define LOG_TAG "Equalizer_HAL" + +#include "EqualizerEffect.h" + +#define AUDIO_HAL_VERSION V4_0 +#include +#undef AUDIO_HAL_VERSION diff --git a/audio/effect/4.0/default/EqualizerEffect.h b/audio/effect/4.0/default/EqualizerEffect.h new file mode 100644 index 0000000000000000000000000000000000000000..7c9463b01e2ad23a2384ee7ca8c71b4afb07a357 --- /dev/null +++ b/audio/effect/4.0/default/EqualizerEffect.h @@ -0,0 +1,28 @@ +/* + * Copyright (C) 2018 The Android Open Source Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#ifndef ANDROID_HARDWARE_AUDIO_EFFECT_V4_0_EQUALIZEREFFECT_H +#define ANDROID_HARDWARE_AUDIO_EFFECT_V4_0_EQUALIZEREFFECT_H + +#include + +#include "Effect.h" + +#define AUDIO_HAL_VERSION V4_0 +#include +#undef AUDIO_HAL_VERSION + +#endif // ANDROID_HARDWARE_AUDIO_EFFECT_V4_0_EQUALIZEREFFECT_H diff --git a/audio/effect/4.0/default/LoudnessEnhancerEffect.cpp b/audio/effect/4.0/default/LoudnessEnhancerEffect.cpp new file mode 100644 index 0000000000000000000000000000000000000000..e3c518422562d744fd153cf55669c2bd5a140fc5 --- /dev/null +++ b/audio/effect/4.0/default/LoudnessEnhancerEffect.cpp @@ -0,0 +1,23 @@ +/* + * Copyright (C) 2018 The Android Open Source Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#define LOG_TAG "LoudnessEnhancer_HAL" + +#include "LoudnessEnhancerEffect.h" + +#define AUDIO_HAL_VERSION V4_0 +#include +#undef AUDIO_HAL_VERSION diff --git a/audio/effect/4.0/default/LoudnessEnhancerEffect.h b/audio/effect/4.0/default/LoudnessEnhancerEffect.h new file mode 100644 index 0000000000000000000000000000000000000000..64fa26add8f394dd73b5f1e5975bbbb79e9522be --- /dev/null +++ b/audio/effect/4.0/default/LoudnessEnhancerEffect.h @@ -0,0 +1,28 @@ +/* + * Copyright (C) 2018 The Android Open Source Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#ifndef ANDROID_HARDWARE_AUDIO_EFFECT_V4_0_LOUDNESSENHANCEREFFECT_H +#define ANDROID_HARDWARE_AUDIO_EFFECT_V4_0_LOUDNESSENHANCEREFFECT_H + +#include + +#include "Effect.h" + +#define AUDIO_HAL_VERSION V4_0 +#include +#undef AUDIO_HAL_VERSION + +#endif // ANDROID_HARDWARE_AUDIO_EFFECT_V4_0_LOUDNESSENHANCEREFFECT_H diff --git a/audio/effect/4.0/default/NoiseSuppressionEffect.cpp b/audio/effect/4.0/default/NoiseSuppressionEffect.cpp new file mode 100644 index 0000000000000000000000000000000000000000..e83a8e3373c05bddb5c278bcdc44560aaf46f1d3 --- /dev/null +++ b/audio/effect/4.0/default/NoiseSuppressionEffect.cpp @@ -0,0 +1,23 @@ +/* + * Copyright (C) 2018 The Android Open Source Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#define LOG_TAG "NS_Effect_HAL" + +#include "NoiseSuppressionEffect.h" + +#define AUDIO_HAL_VERSION V4_0 +#include +#undef AUDIO_HAL_VERSION diff --git a/audio/effect/4.0/default/NoiseSuppressionEffect.h b/audio/effect/4.0/default/NoiseSuppressionEffect.h new file mode 100644 index 0000000000000000000000000000000000000000..36d45afaf7c17455e3c3837ada8e727b240180ee --- /dev/null +++ b/audio/effect/4.0/default/NoiseSuppressionEffect.h @@ -0,0 +1,28 @@ +/* + * Copyright (C) 2018 The Android Open Source Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#ifndef ANDROID_HARDWARE_AUDIO_EFFECT_V4_0_NOISESUPPRESSIONEFFECT_H +#define ANDROID_HARDWARE_AUDIO_EFFECT_V4_0_NOISESUPPRESSIONEFFECT_H + +#include + +#include "Effect.h" + +#define AUDIO_HAL_VERSION V4_0 +#include +#undef AUDIO_HAL_VERSION + +#endif // ANDROID_HARDWARE_AUDIO_EFFECT_V4_0_NOISESUPPRESSIONEFFECT_H diff --git a/audio/effect/4.0/default/OWNERS b/audio/effect/4.0/default/OWNERS new file mode 100644 index 0000000000000000000000000000000000000000..6fdc97ca298fbbda9cb676f5acb02d7495debcb4 --- /dev/null +++ b/audio/effect/4.0/default/OWNERS @@ -0,0 +1,3 @@ +elaurent@google.com +krocard@google.com +mnaganov@google.com diff --git a/audio/effect/4.0/default/PresetReverbEffect.cpp b/audio/effect/4.0/default/PresetReverbEffect.cpp new file mode 100644 index 0000000000000000000000000000000000000000..0c23be73af2bafe2acb2e2b2fcc992b45cb2efe5 --- /dev/null +++ b/audio/effect/4.0/default/PresetReverbEffect.cpp @@ -0,0 +1,23 @@ +/* + * Copyright (C) 2018 The Android Open Source Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#define LOG_TAG "PresetReverb_HAL" + +#include "PresetReverbEffect.h" + +#define AUDIO_HAL_VERSION V4_0 +#include +#undef AUDIO_HAL_VERSION diff --git a/audio/effect/4.0/default/PresetReverbEffect.h b/audio/effect/4.0/default/PresetReverbEffect.h new file mode 100644 index 0000000000000000000000000000000000000000..3eeae0a04ba458aac49f55559aedb8ff77f03ca5 --- /dev/null +++ b/audio/effect/4.0/default/PresetReverbEffect.h @@ -0,0 +1,28 @@ +/* + * Copyright (C) 2018 The Android Open Source Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#ifndef ANDROID_HARDWARE_AUDIO_EFFECT_V4_0_PRESETREVERBEFFECT_H +#define ANDROID_HARDWARE_AUDIO_EFFECT_V4_0_PRESETREVERBEFFECT_H + +#include + +#include "Effect.h" + +#define AUDIO_HAL_VERSION V4_0 +#include +#undef AUDIO_HAL_VERSION + +#endif // ANDROID_HARDWARE_AUDIO_EFFECT_V4_0_PRESETREVERBEFFECT_H diff --git a/audio/effect/4.0/default/VirtualizerEffect.cpp b/audio/effect/4.0/default/VirtualizerEffect.cpp new file mode 100644 index 0000000000000000000000000000000000000000..f50e8adb7b891c33ad9a2a47119f5191c2e4cf41 --- /dev/null +++ b/audio/effect/4.0/default/VirtualizerEffect.cpp @@ -0,0 +1,23 @@ +/* + * Copyright (C) 2018 The Android Open Source Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#define LOG_TAG "Virtualizer_HAL" + +#include "VirtualizerEffect.h" + +#define AUDIO_HAL_VERSION V4_0 +#include +#undef AUDIO_HAL_VERSION diff --git a/audio/effect/4.0/default/VirtualizerEffect.h b/audio/effect/4.0/default/VirtualizerEffect.h new file mode 100644 index 0000000000000000000000000000000000000000..8e7114e5696bab5b90cec45dfb6f9204340dfa80 --- /dev/null +++ b/audio/effect/4.0/default/VirtualizerEffect.h @@ -0,0 +1,28 @@ +/* + * Copyright (C) 2018 The Android Open Source Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#ifndef ANDROID_HARDWARE_AUDIO_EFFECT_V4_0_VIRTUALIZEREFFECT_H +#define ANDROID_HARDWARE_AUDIO_EFFECT_V4_0_VIRTUALIZEREFFECT_H + +#include + +#include "Effect.h" + +#define AUDIO_HAL_VERSION V4_0 +#include +#undef AUDIO_HAL_VERSION + +#endif // ANDROID_HARDWARE_AUDIO_EFFECT_V4_0_VIRTUALIZEREFFECT_H diff --git a/audio/effect/4.0/default/VisualizerEffect.cpp b/audio/effect/4.0/default/VisualizerEffect.cpp new file mode 100644 index 0000000000000000000000000000000000000000..8d4f100ced5a0774ed7f5e11ffc676fca8258e0e --- /dev/null +++ b/audio/effect/4.0/default/VisualizerEffect.cpp @@ -0,0 +1,23 @@ +/* + * Copyright (C) 2018 The Android Open Source Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#define LOG_TAG "Visualizer_HAL" + +#include "VisualizerEffect.h" + +#define AUDIO_HAL_VERSION V4_0 +#include +#undef AUDIO_HAL_VERSION diff --git a/audio/effect/4.0/default/VisualizerEffect.h b/audio/effect/4.0/default/VisualizerEffect.h new file mode 100644 index 0000000000000000000000000000000000000000..6b5ab9c393acc832e8aa50ea6aa771969bb08930 --- /dev/null +++ b/audio/effect/4.0/default/VisualizerEffect.h @@ -0,0 +1,28 @@ +/* + * Copyright (C) 2018 The Android Open Source Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#ifndef ANDROID_HARDWARE_AUDIO_EFFECT_V4_0_VISUALIZEREFFECT_H +#define ANDROID_HARDWARE_AUDIO_EFFECT_V4_0_VISUALIZEREFFECT_H + +#include + +#include "Effect.h" + +#define AUDIO_HAL_VERSION V4_0 +#include +#undef AUDIO_HAL_VERSION + +#endif // ANDROID_HARDWARE_AUDIO_EFFECT_V4_0_VISUALIZEREFFECT_H diff --git a/audio/effect/4.0/types.hal b/audio/effect/4.0/types.hal new file mode 100644 index 0000000000000000000000000000000000000000..2a8f4b86bc6055a25bd0c3aa326cfdd7b66ea97f --- /dev/null +++ b/audio/effect/4.0/types.hal @@ -0,0 +1,299 @@ +/* + * Copyright (C) 2018 The Android Open Source Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package android.hardware.audio.effect@4.0; + +import android.hardware.audio.common@4.0; + +enum Result : int32_t { + OK, + NOT_INITIALIZED, + INVALID_ARGUMENTS, + INVALID_STATE, + NOT_SUPPORTED, + RESULT_TOO_BIG +}; + +/** + * Effect engine capabilities/requirements flags. + * + * Definitions for flags field of effect descriptor. + * + * +----------------+--------+-------------------------------------------------- + * | description | bits | values + * +----------------+--------+-------------------------------------------------- + * | connection | 0..2 | 0 insert: after track process + * | mode | | 1 auxiliary: connect to track auxiliary + * | | | output and use send level + * | | | 2 replace: replaces track process function; + * | | | must implement SRC, volume and mono to stereo. + * | | | 3 pre processing: applied below audio HAL on in + * | | | 4 post processing: applied below audio HAL on out + * | | | 5 - 7 reserved + * +----------------+--------+-------------------------------------------------- + * | insertion | 3..5 | 0 none + * | preference | | 1 first of the chain + * | | | 2 last of the chain + * | | | 3 exclusive (only effect in the insert chain) + * | | | 4..7 reserved + * +----------------+--------+-------------------------------------------------- + * | Volume | 6..8 | 0 none + * | management | | 1 implements volume control + * | | | 2 requires volume indication + * | | | 4 reserved + * +----------------+--------+-------------------------------------------------- + * | Device | 9..11 | 0 none + * | indication | | 1 requires device updates + * | | | 2, 4 reserved + * +----------------+--------+-------------------------------------------------- + * | Sample input | 12..13 | 1 direct: process() function or + * | mode | | EFFECT_CMD_SET_CONFIG command must specify + * | | | a buffer descriptor + * | | | 2 provider: process() function uses the + * | | | bufferProvider indicated by the + * | | | EFFECT_CMD_SET_CONFIG command to request input. + * | | | buffers. + * | | | 3 both: both input modes are supported + * +----------------+--------+-------------------------------------------------- + * | Sample output | 14..15 | 1 direct: process() function or + * | mode | | EFFECT_CMD_SET_CONFIG command must specify + * | | | a buffer descriptor + * | | | 2 provider: process() function uses the + * | | | bufferProvider indicated by the + * | | | EFFECT_CMD_SET_CONFIG command to request output + * | | | buffers. + * | | | 3 both: both output modes are supported + * +----------------+--------+-------------------------------------------------- + * | Hardware | 16..17 | 0 No hardware acceleration + * | acceleration | | 1 non tunneled hw acceleration: the process() + * | | | function reads the samples, send them to HW + * | | | accelerated effect processor, reads back + * | | | the processed samples and returns them + * | | | to the output buffer. + * | | | 2 tunneled hw acceleration: the process() + * | | | function is transparent. The effect interface + * | | | is only used to control the effect engine. + * | | | This mode is relevant for global effects + * | | | actually applied by the audio hardware on + * | | | the output stream. + * +----------------+--------+-------------------------------------------------- + * | Audio Mode | 18..19 | 0 none + * | indication | | 1 requires audio mode updates + * | | | 2..3 reserved + * +----------------+--------+-------------------------------------------------- + * | Audio source | 20..21 | 0 none + * | indication | | 1 requires audio source updates + * | | | 2..3 reserved + * +----------------+--------+-------------------------------------------------- + * | Effect offload | 22 | 0 The effect cannot be offloaded to an audio DSP + * | supported | | 1 The effect can be offloaded to an audio DSP + * +----------------+--------+-------------------------------------------------- + * | Process | 23 | 0 The effect implements a process function. + * | function | | 1 The effect does not implement a process + * | not | | function: enabling the effect has no impact + * | implemented | | on latency or CPU load. + * | | | Effect implementations setting this flag do not + * | | | have to implement a process function. + * +----------------+--------+-------------------------------------------------- + */ +@export(name="", value_prefix="EFFECT_FLAG_") +enum EffectFlags : int32_t { + // Insert mode + TYPE_SHIFT = 0, + TYPE_SIZE = 3, + TYPE_MASK = ((1 << TYPE_SIZE) -1) << TYPE_SHIFT, + TYPE_INSERT = 0 << TYPE_SHIFT, + TYPE_AUXILIARY = 1 << TYPE_SHIFT, + TYPE_REPLACE = 2 << TYPE_SHIFT, + TYPE_PRE_PROC = 3 << TYPE_SHIFT, + TYPE_POST_PROC = 4 << TYPE_SHIFT, + + // Insert preference + INSERT_SHIFT = TYPE_SHIFT + TYPE_SIZE, + INSERT_SIZE = 3, + INSERT_MASK = ((1 << INSERT_SIZE) -1) << INSERT_SHIFT, + INSERT_ANY = 0 << INSERT_SHIFT, + INSERT_FIRST = 1 << INSERT_SHIFT, + INSERT_LAST = 2 << INSERT_SHIFT, + INSERT_EXCLUSIVE = 3 << INSERT_SHIFT, + + // Volume control + VOLUME_SHIFT = INSERT_SHIFT + INSERT_SIZE, + VOLUME_SIZE = 3, + VOLUME_MASK = ((1 << VOLUME_SIZE) -1) << VOLUME_SHIFT, + VOLUME_CTRL = 1 << VOLUME_SHIFT, + VOLUME_IND = 2 << VOLUME_SHIFT, + VOLUME_NONE = 0 << VOLUME_SHIFT, + + // Device indication + DEVICE_SHIFT = VOLUME_SHIFT + VOLUME_SIZE, + DEVICE_SIZE = 3, + DEVICE_MASK = ((1 << DEVICE_SIZE) -1) << DEVICE_SHIFT, + DEVICE_IND = 1 << DEVICE_SHIFT, + DEVICE_NONE = 0 << DEVICE_SHIFT, + + // Sample input modes + INPUT_SHIFT = DEVICE_SHIFT + DEVICE_SIZE, + INPUT_SIZE = 2, + INPUT_MASK = ((1 << INPUT_SIZE) -1) << INPUT_SHIFT, + INPUT_DIRECT = 1 << INPUT_SHIFT, + INPUT_PROVIDER = 2 << INPUT_SHIFT, + INPUT_BOTH = 3 << INPUT_SHIFT, + + // Sample output modes + OUTPUT_SHIFT = INPUT_SHIFT + INPUT_SIZE, + OUTPUT_SIZE = 2, + OUTPUT_MASK = ((1 << OUTPUT_SIZE) -1) << OUTPUT_SHIFT, + OUTPUT_DIRECT = 1 << OUTPUT_SHIFT, + OUTPUT_PROVIDER = 2 << OUTPUT_SHIFT, + OUTPUT_BOTH = 3 << OUTPUT_SHIFT, + + // Hardware acceleration mode + HW_ACC_SHIFT = OUTPUT_SHIFT + OUTPUT_SIZE, + HW_ACC_SIZE = 2, + HW_ACC_MASK = ((1 << HW_ACC_SIZE) -1) << HW_ACC_SHIFT, + HW_ACC_SIMPLE = 1 << HW_ACC_SHIFT, + HW_ACC_TUNNEL = 2 << HW_ACC_SHIFT, + + // Audio mode indication + AUDIO_MODE_SHIFT = HW_ACC_SHIFT + HW_ACC_SIZE, + AUDIO_MODE_SIZE = 2, + AUDIO_MODE_MASK = ((1 << AUDIO_MODE_SIZE) -1) << AUDIO_MODE_SHIFT, + AUDIO_MODE_IND = 1 << AUDIO_MODE_SHIFT, + AUDIO_MODE_NONE = 0 << AUDIO_MODE_SHIFT, + + // Audio source indication + AUDIO_SOURCE_SHIFT = AUDIO_MODE_SHIFT + AUDIO_MODE_SIZE, + AUDIO_SOURCE_SIZE = 2, + AUDIO_SOURCE_MASK = ((1 << AUDIO_SOURCE_SIZE) -1) << AUDIO_SOURCE_SHIFT, + AUDIO_SOURCE_IND = 1 << AUDIO_SOURCE_SHIFT, + AUDIO_SOURCE_NONE = 0 << AUDIO_SOURCE_SHIFT, + + // Effect offload indication + OFFLOAD_SHIFT = AUDIO_SOURCE_SHIFT + AUDIO_SOURCE_SIZE, + OFFLOAD_SIZE = 1, + OFFLOAD_MASK = ((1 << OFFLOAD_SIZE) -1) << OFFLOAD_SHIFT, + OFFLOAD_SUPPORTED = 1 << OFFLOAD_SHIFT, + + // Effect has no process indication + NO_PROCESS_SHIFT = OFFLOAD_SHIFT + OFFLOAD_SIZE, + NO_PROCESS_SIZE = 1, + NO_PROCESS_MASK = ((1 << NO_PROCESS_SIZE) -1) << NO_PROCESS_SHIFT, + NO_PROCESS = 1 << NO_PROCESS_SHIFT +}; + +/** + * The effect descriptor contains necessary information to facilitate the + * enumeration of the effect engines present in a library. + */ +struct EffectDescriptor { + Uuid type; // UUID of to the OpenSL ES interface implemented + // by this effect + Uuid uuid; // UUID for this particular implementation + bitfield flags; // effect engine capabilities/requirements flags + uint16_t cpuLoad; // CPU load indication expressed in 0.1 MIPS units + // as estimated on an ARM9E core (ARMv5TE) with 0 WS + uint16_t memoryUsage; // data memory usage expressed in KB and includes + // only dynamically allocated memory + uint8_t[64] name; // human readable effect name + uint8_t[64] implementor; // human readable effect implementor name +}; + +/** + * A buffer is a chunk of audio data for processing. Multi-channel audio is + * always interleaved. The channel order is from LSB to MSB with regard to the + * channel mask definition in audio.h, audio_channel_mask_t, e.g.: + * Stereo: L, R; 5.1: FL, FR, FC, LFE, BL, BR. + * + * The buffer size is expressed in frame count, a frame being composed of + * samples for all channels at a given time. Frame size for unspecified format + * (AUDIO_FORMAT_OTHER) is 8 bit by definition. + */ +struct AudioBuffer { + uint64_t id; + uint32_t frameCount; + memory data; +}; + +@export(name="effect_buffer_access_e", value_prefix="EFFECT_BUFFER_") +enum EffectBufferAccess : int32_t { + ACCESS_WRITE, + ACCESS_READ, + ACCESS_ACCUMULATE +}; + +/** + * Determines what fields of EffectBufferConfig need to be considered. + */ +@export(name="", value_prefix="EFFECT_CONFIG_") +enum EffectConfigParameters : int32_t { + BUFFER = 0x0001, // buffer field + SMP_RATE = 0x0002, // samplingRate + CHANNELS = 0x0004, // channels + FORMAT = 0x0008, // format + ACC_MODE = 0x0010, // accessMode + // Note that the 2.0 ALL have been moved to an helper function +}; + +/** + * The buffer config structure specifies the input or output audio format + * to be used by the effect engine. + */ +struct EffectBufferConfig { + AudioBuffer buffer; + uint32_t samplingRateHz; + bitfield channels; + AudioFormat format; + EffectBufferAccess accessMode; + bitfield mask; +}; + +struct EffectConfig { + EffectBufferConfig inputCfg; + EffectBufferConfig outputCfg; +}; + +@export(name="effect_feature_e", value_prefix="EFFECT_FEATURE_") +enum EffectFeature : int32_t { + AUX_CHANNELS, // supports auxiliary channels + // (e.g. dual mic noise suppressor) + CNT +}; + +struct EffectAuxChannelsConfig { + bitfield mainChannels; // channel mask for main channels + bitfield auxChannels; // channel mask for auxiliary channels +}; + +struct EffectOffloadParameter { + bool isOffload; // true if the playback thread the effect + // is attached to is offloaded + AudioIoHandle ioHandle; // io handle of the playback thread + // the effect is attached to +}; + +/** + * The message queue flags used to synchronize reads and writes from + * the status message queue used by effects. + */ +enum MessageQueueFlagBits : uint32_t { + DONE_PROCESSING = 1 << 0, + REQUEST_PROCESS = 1 << 1, + REQUEST_PROCESS_REVERSE = 1 << 2, + REQUEST_QUIT = 1 << 3, + REQUEST_PROCESS_ALL = + REQUEST_PROCESS | REQUEST_PROCESS_REVERSE | REQUEST_QUIT +}; diff --git a/audio/effect/4.0/vts/OWNERS b/audio/effect/4.0/vts/OWNERS new file mode 100644 index 0000000000000000000000000000000000000000..8711a9ff6a1af70374377a271fa3229f471bc067 --- /dev/null +++ b/audio/effect/4.0/vts/OWNERS @@ -0,0 +1,5 @@ +elaurent@google.com +krocard@google.com +mnaganov@google.com +yim@google.com +zhuoyao@google.com \ No newline at end of file diff --git a/audio/effect/4.0/vts/functional/Android.bp b/audio/effect/4.0/vts/functional/Android.bp new file mode 100644 index 0000000000000000000000000000000000000000..96ded69d22b43b092f82ae5bb05ac0e0d5ba1cef --- /dev/null +++ b/audio/effect/4.0/vts/functional/Android.bp @@ -0,0 +1,38 @@ +// +// Copyright (C) 2016 The Android Open Source Project +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +// + +cc_test { + name: "VtsHalAudioEffectV4_0TargetTest", + defaults: ["VtsHalTargetTestDefaults"], + srcs: [ + "VtsHalAudioEffectV4_0TargetTest.cpp", + "ValidateAudioEffectsConfiguration.cpp" + ], + static_libs: [ + "android.hardware.audio.common.test.utility", + "android.hardware.audio.common@4.0", + "android.hardware.audio.effect@4.0", + "android.hidl.allocator@1.0", + "android.hidl.memory@1.0", + "libeffectsconfig", + "libicuuc", + "libicuuc_stubdata", + "libxml2", + ], + header_libs: [ + "android.hardware.audio.common.util@all-versions", + ], +} diff --git a/audio/effect/4.0/vts/functional/ValidateAudioEffectsConfiguration.cpp b/audio/effect/4.0/vts/functional/ValidateAudioEffectsConfiguration.cpp new file mode 100644 index 0000000000000000000000000000000000000000..6338563c2ee396d33141755ae4fe6277042cb005 --- /dev/null +++ b/audio/effect/4.0/vts/functional/ValidateAudioEffectsConfiguration.cpp @@ -0,0 +1,32 @@ +/* + * Copyright (C) 2017 The Android Open Source Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#include +#include + +#include + +#include "utility/ValidateXml.h" + +TEST(CheckConfig, audioEffectsConfigurationValidation) { + RecordProperty("description", + "Verify that the effects configuration file is valid according to the schema"); + using namespace android::effectsConfig; + + std::vector locations(std::begin(DEFAULT_LOCATIONS), std::end(DEFAULT_LOCATIONS)); + EXPECT_ONE_VALID_XML_MULTIPLE_LOCATIONS(DEFAULT_NAME, locations, + "/data/local/tmp/audio_effects_conf_V4_0.xsd"); +} diff --git a/audio/effect/4.0/vts/functional/VtsHalAudioEffectV4_0TargetTest.cpp b/audio/effect/4.0/vts/functional/VtsHalAudioEffectV4_0TargetTest.cpp new file mode 100644 index 0000000000000000000000000000000000000000..ec783c4bfa369376a64e407844fc9dad827c11ad --- /dev/null +++ b/audio/effect/4.0/vts/functional/VtsHalAudioEffectV4_0TargetTest.cpp @@ -0,0 +1,852 @@ +/* + * Copyright (C) 2017 The Android Open Source Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#define LOG_TAG "AudioEffectHidlHalTest" +#include +#include + +#include +#include +#include +#include +#include +#include +#include + +#include + +#include +#include + +using android::hardware::audio::common::V4_0::AudioDevice; +using android::hardware::audio::common::V4_0::AudioHandleConsts; +using android::hardware::audio::common::V4_0::AudioMode; +using android::hardware::audio::common::V4_0::AudioSource; +using android::hardware::audio::common::V4_0::Uuid; +using android::hardware::audio::common::utils::mkBitfield; +using android::hardware::audio::effect::V4_0::AudioBuffer; +using android::hardware::audio::effect::V4_0::EffectAuxChannelsConfig; +using android::hardware::audio::effect::V4_0::EffectBufferConfig; +using android::hardware::audio::effect::V4_0::EffectConfig; +using android::hardware::audio::effect::V4_0::EffectDescriptor; +using android::hardware::audio::effect::V4_0::EffectOffloadParameter; +using android::hardware::audio::effect::V4_0::IEffect; +using android::hardware::audio::effect::V4_0::IEffectsFactory; +using android::hardware::audio::effect::V4_0::IEqualizerEffect; +using android::hardware::audio::effect::V4_0::ILoudnessEnhancerEffect; +using android::hardware::audio::effect::V4_0::Result; +using android::hardware::MQDescriptorSync; +using android::hardware::Return; +using android::hardware::Void; +using android::hardware::hidl_handle; +using android::hardware::hidl_memory; +using android::hardware::hidl_string; +using android::hardware::hidl_vec; +using android::hidl::allocator::V1_0::IAllocator; +using android::hidl::memory::V1_0::IMemory; +using android::sp; + +#ifndef ARRAY_SIZE +#define ARRAY_SIZE(a) (sizeof(a) / sizeof(*(a))) +#endif + +// Test environment for Audio Effects Factory HIDL HAL. +class AudioEffectsFactoryHidlEnvironment : public ::testing::VtsHalHidlTargetTestEnvBase { + public: + // get the test environment singleton + static AudioEffectsFactoryHidlEnvironment* Instance() { + static AudioEffectsFactoryHidlEnvironment* instance = + new AudioEffectsFactoryHidlEnvironment; + return instance; + } + + virtual void registerTestServices() override { registerTestService(); } +}; + +// The main test class for Audio Effects Factory HIDL HAL. +class AudioEffectsFactoryHidlTest : public ::testing::VtsHalHidlTargetTestBase { + public: + void SetUp() override { + effectsFactory = ::testing::VtsHalHidlTargetTestBase::getService( + AudioEffectsFactoryHidlEnvironment::Instance()->getServiceName()); + ASSERT_NE(effectsFactory, nullptr); + } + + void TearDown() override { effectsFactory.clear(); } + + protected: + static void description(const std::string& description) { + RecordProperty("description", description); + } + + sp effectsFactory; +}; + +TEST_F(AudioEffectsFactoryHidlTest, EnumerateEffects) { + description("Verify that EnumerateEffects returns at least one effect"); + Result retval = Result::NOT_INITIALIZED; + size_t effectCount = 0; + Return ret = effectsFactory->getAllDescriptors( + [&](Result r, const hidl_vec& result) { + retval = r; + effectCount = result.size(); + }); + EXPECT_TRUE(ret.isOk()); + EXPECT_EQ(Result::OK, retval); + EXPECT_GT(effectCount, 0u); +} + +TEST_F(AudioEffectsFactoryHidlTest, CreateEffect) { + description("Verify that an effect can be created via CreateEffect"); + bool gotEffect = false; + Uuid effectUuid; + Return ret = effectsFactory->getAllDescriptors( + [&](Result r, const hidl_vec& result) { + if (r == Result::OK && result.size() > 0) { + gotEffect = true; + effectUuid = result[0].uuid; + } + }); + ASSERT_TRUE(ret.isOk()); + ASSERT_TRUE(gotEffect); + Result retval = Result::NOT_INITIALIZED; + sp effect; + ret = effectsFactory->createEffect( + effectUuid, 1 /*session*/, 1 /*ioHandle*/, + [&](Result r, const sp& result, uint64_t /*effectId*/) { + retval = r; + if (r == Result::OK) { + effect = result; + } + }); + EXPECT_TRUE(ret.isOk()); + EXPECT_EQ(Result::OK, retval); + EXPECT_NE(nullptr, effect.get()); +} + +TEST_F(AudioEffectsFactoryHidlTest, GetDescriptor) { + description( + "Verify that effects factory can provide an effect descriptor via " + "GetDescriptor"); + hidl_vec allDescriptors; + Return ret = effectsFactory->getAllDescriptors( + [&](Result r, const hidl_vec& result) { + if (r == Result::OK) { + allDescriptors = result; + } + }); + ASSERT_TRUE(ret.isOk()); + ASSERT_GT(allDescriptors.size(), 0u); + for (size_t i = 0; i < allDescriptors.size(); ++i) { + ret = effectsFactory->getDescriptor( + allDescriptors[i].uuid, [&](Result r, const EffectDescriptor& result) { + EXPECT_EQ(r, Result::OK); + EXPECT_EQ(result, allDescriptors[i]); + }); + } + EXPECT_TRUE(ret.isOk()); +} + +TEST_F(AudioEffectsFactoryHidlTest, DebugDumpInvalidArgument) { + description("Verify that debugDump doesn't crash on invalid arguments"); + Return ret = effectsFactory->debug(hidl_handle(), {}); + ASSERT_TRUE(ret.isOk()); +} + +// Equalizer effect is required by CDD, but only the type is fixed. +// This is the same UUID as AudioEffect.EFFECT_TYPE_EQUALIZER in Java. +static const Uuid EQUALIZER_EFFECT_TYPE = { + 0x0bed4300, 0xddd6, 0x11db, 0x8f34, + std::array{{0x00, 0x02, 0xa5, 0xd5, 0xc5, 0x1b}}}; +// Loudness Enhancer effect is required by CDD, but only the type is fixed. +// This is the same UUID as AudioEffect.EFFECT_TYPE_LOUDNESS_ENHANCER in Java. +static const Uuid LOUDNESS_ENHANCER_EFFECT_TYPE = { + 0xfe3199be, 0xaed0, 0x413f, 0x87bb, + std::array{{0x11, 0x26, 0x0e, 0xb6, 0x3c, 0xf1}}}; + +// The main test class for Audio Effect HIDL HAL. +class AudioEffectHidlTest : public ::testing::VtsHalHidlTargetTestBase { + public: + void SetUp() override { + effectsFactory = + ::testing::VtsHalHidlTargetTestBase::getService(); + ASSERT_NE(nullptr, effectsFactory.get()); + + findAndCreateEffect(getEffectType()); + ASSERT_NE(nullptr, effect.get()); + + Return ret = effect->init(); + ASSERT_TRUE(ret.isOk()); + ASSERT_EQ(Result::OK, ret); + } + + void TearDown() override { + effect.clear(); + effectsFactory.clear(); + } + + protected: + static void description(const std::string& description) { + RecordProperty("description", description); + } + + virtual Uuid getEffectType() { return EQUALIZER_EFFECT_TYPE; } + + void findAndCreateEffect(const Uuid& type); + void findEffectInstance(const Uuid& type, Uuid* uuid); + void getChannelCount(uint32_t* channelCount); + + sp effectsFactory; + sp effect; +}; + +void AudioEffectHidlTest::findAndCreateEffect(const Uuid& type) { + Uuid effectUuid; + findEffectInstance(type, &effectUuid); + Return ret = effectsFactory->createEffect( + effectUuid, 1 /*session*/, 1 /*ioHandle*/, + [&](Result r, const sp& result, uint64_t /*effectId*/) { + if (r == Result::OK) { + effect = result; + } + }); + ASSERT_TRUE(ret.isOk()); +} + +void AudioEffectHidlTest::findEffectInstance(const Uuid& type, Uuid* uuid) { + bool effectFound = false; + Return ret = effectsFactory->getAllDescriptors( + [&](Result r, const hidl_vec& result) { + if (r == Result::OK) { + for (const auto& desc : result) { + if (desc.type == type) { + effectFound = true; + *uuid = desc.uuid; + break; + } + } + } + }); + ASSERT_TRUE(ret.isOk()); + ASSERT_TRUE(effectFound); +} + +void AudioEffectHidlTest::getChannelCount(uint32_t* channelCount) { + Result retval; + EffectConfig currentConfig; + Return ret = effect->getConfig([&](Result r, const EffectConfig& conf) { + retval = r; + if (r == Result::OK) { + currentConfig = conf; + } + }); + ASSERT_TRUE(ret.isOk()); + ASSERT_EQ(Result::OK, retval); + ASSERT_TRUE(audio_channel_mask_is_valid( + static_cast(currentConfig.outputCfg.channels))); + *channelCount = audio_channel_count_from_out_mask( + static_cast(currentConfig.outputCfg.channels)); +} + +TEST_F(AudioEffectHidlTest, Close) { + description("Verify that an effect can be closed"); + Return ret = effect->close(); + EXPECT_TRUE(ret.isOk()); + EXPECT_EQ(Result::OK, ret); +} + +TEST_F(AudioEffectHidlTest, GetDescriptor) { + description( + "Verify that an effect can return its own descriptor via GetDescriptor"); + Result retval = Result::NOT_INITIALIZED; + Uuid actualType; + Return ret = + effect->getDescriptor([&](Result r, const EffectDescriptor& desc) { + retval = r; + if (r == Result::OK) { + actualType = desc.type; + } + }); + EXPECT_TRUE(ret.isOk()); + EXPECT_EQ(Result::OK, retval); + EXPECT_EQ(getEffectType(), actualType); +} + +TEST_F(AudioEffectHidlTest, GetSetConfig) { + description( + "Verify that it is possible to manipulate effect config via Get / " + "SetConfig"); + Result retval = Result::NOT_INITIALIZED; + EffectConfig currentConfig; + Return ret = effect->getConfig([&](Result r, const EffectConfig& conf) { + retval = r; + if (r == Result::OK) { + currentConfig = conf; + } + }); + EXPECT_TRUE(ret.isOk()); + EXPECT_EQ(Result::OK, retval); + Return ret2 = effect->setConfig(currentConfig, nullptr, nullptr); + EXPECT_TRUE(ret2.isOk()); + EXPECT_EQ(Result::OK, ret2); +} + +TEST_F(AudioEffectHidlTest, GetConfigReverse) { + description("Verify that GetConfigReverse does not crash"); + Return ret = + effect->getConfigReverse([&](Result, const EffectConfig&) {}); + EXPECT_TRUE(ret.isOk()); +} + +TEST_F(AudioEffectHidlTest, GetSupportedAuxChannelsConfigs) { + description("Verify that GetSupportedAuxChannelsConfigs does not crash"); + Return ret = effect->getSupportedAuxChannelsConfigs( + 0, [&](Result, const hidl_vec&) {}); + EXPECT_TRUE(ret.isOk()); +} + +TEST_F(AudioEffectHidlTest, GetAuxChannelsConfig) { + description("Verify that GetAuxChannelsConfig does not crash"); + Return ret = effect->getAuxChannelsConfig( + [&](Result, const EffectAuxChannelsConfig&) {}); + EXPECT_TRUE(ret.isOk()); +} + +TEST_F(AudioEffectHidlTest, SetAuxChannelsConfig) { + description("Verify that SetAuxChannelsConfig does not crash"); + Return ret = effect->setAuxChannelsConfig(EffectAuxChannelsConfig()); + EXPECT_TRUE(ret.isOk()); +} + +// Not generated automatically because AudioBuffer contains +// instances of hidl_memory which can't be compared properly +// in general case due to presence of handles. +// +// However, in this particular case, handles must not present +// thus comparison is possible. +// +// operator== must be defined in the same namespace as the structures. +namespace android { +namespace hardware { +namespace audio { +namespace effect { +namespace V4_0 { +inline bool operator==(const AudioBuffer& lhs, const AudioBuffer& rhs) { + return lhs.id == rhs.id && lhs.frameCount == rhs.frameCount && + lhs.data.handle() == nullptr && rhs.data.handle() == nullptr; +} + +inline bool operator==(const EffectBufferConfig& lhs, + const EffectBufferConfig& rhs) { + return lhs.buffer == rhs.buffer && lhs.samplingRateHz == rhs.samplingRateHz && + lhs.channels == rhs.channels && lhs.format == rhs.format && + lhs.accessMode == rhs.accessMode && lhs.mask == rhs.mask; +} + +inline bool operator==(const EffectConfig& lhs, const EffectConfig& rhs) { + return lhs.inputCfg == rhs.inputCfg && lhs.outputCfg == rhs.outputCfg; +} +} // namespace V4_0 +} // namespace effect +} // namespace audio +} // namespace hardware +} // namespace android + +TEST_F(AudioEffectHidlTest, Reset) { + description("Verify that Reset preserves effect configuration"); + Result retval = Result::NOT_INITIALIZED; + EffectConfig originalConfig; + Return ret = effect->getConfig([&](Result r, const EffectConfig& conf) { + retval = r; + if (r == Result::OK) { + originalConfig = conf; + } + }); + ASSERT_TRUE(ret.isOk()); + ASSERT_EQ(Result::OK, retval); + Return ret2 = effect->reset(); + EXPECT_TRUE(ret2.isOk()); + EXPECT_EQ(Result::OK, ret2); + EffectConfig configAfterReset; + ret = effect->getConfig([&](Result r, const EffectConfig& conf) { + retval = r; + if (r == Result::OK) { + configAfterReset = conf; + } + }); + EXPECT_EQ(originalConfig, configAfterReset); +} + +TEST_F(AudioEffectHidlTest, DisableEnableDisable) { + description("Verify Disable -> Enable -> Disable sequence for an effect"); + Return ret = effect->disable(); + EXPECT_TRUE(ret.isOk()); + EXPECT_EQ(Result::INVALID_ARGUMENTS, ret); + ret = effect->enable(); + EXPECT_TRUE(ret.isOk()); + EXPECT_EQ(Result::OK, ret); + ret = effect->disable(); + EXPECT_TRUE(ret.isOk()); + EXPECT_EQ(Result::OK, ret); +} + +TEST_F(AudioEffectHidlTest, SetDevice) { + description("Verify that SetDevice works for an output chain effect"); + Return ret = effect->setDevice(mkBitfield(AudioDevice::OUT_SPEAKER)); + EXPECT_TRUE(ret.isOk()); + EXPECT_EQ(Result::OK, ret); +} + +TEST_F(AudioEffectHidlTest, SetAndGetVolume) { + description("Verify that SetAndGetVolume method works for an effect"); + uint32_t channelCount; + getChannelCount(&channelCount); + hidl_vec volumes; + volumes.resize(channelCount); + for (uint32_t i = 0; i < channelCount; ++i) { + volumes[i] = 0; + } + Result retval = Result::NOT_INITIALIZED; + Return ret = effect->setAndGetVolume( + volumes, [&](Result r, const hidl_vec&) { retval = r; }); + EXPECT_TRUE(ret.isOk()); + EXPECT_EQ(Result::OK, retval); +} + +TEST_F(AudioEffectHidlTest, VolumeChangeNotification) { + description("Verify that effect accepts VolumeChangeNotification"); + uint32_t channelCount; + getChannelCount(&channelCount); + hidl_vec volumes; + volumes.resize(channelCount); + for (uint32_t i = 0; i < channelCount; ++i) { + volumes[i] = 0; + } + Return ret = effect->volumeChangeNotification(volumes); + EXPECT_TRUE(ret.isOk()); + EXPECT_EQ(Result::OK, ret); +} + +TEST_F(AudioEffectHidlTest, SetAudioMode) { + description("Verify that SetAudioMode works for an effect"); + Return ret = effect->setAudioMode(AudioMode::NORMAL); + EXPECT_TRUE(ret.isOk()); + EXPECT_EQ(Result::OK, ret); +} + +TEST_F(AudioEffectHidlTest, SetConfigReverse) { + description("Verify that SetConfigReverse does not crash"); + Return ret = + effect->setConfigReverse(EffectConfig(), nullptr, nullptr); + EXPECT_TRUE(ret.isOk()); +} + +TEST_F(AudioEffectHidlTest, SetInputDevice) { + description("Verify that SetInputDevice does not crash"); + Return ret = effect->setInputDevice(mkBitfield(AudioDevice::IN_BUILTIN_MIC)); + EXPECT_TRUE(ret.isOk()); +} + +TEST_F(AudioEffectHidlTest, SetAudioSource) { + description("Verify that SetAudioSource does not crash"); + Return ret = effect->setAudioSource(AudioSource::MIC); + EXPECT_TRUE(ret.isOk()); +} + +TEST_F(AudioEffectHidlTest, Offload) { + description("Verify that calling Offload method does not crash"); + EffectOffloadParameter offloadParam; + offloadParam.isOffload = false; + offloadParam.ioHandle = + static_cast(AudioHandleConsts::AUDIO_IO_HANDLE_NONE); + Return ret = effect->offload(offloadParam); + EXPECT_TRUE(ret.isOk()); +} + +TEST_F(AudioEffectHidlTest, PrepareForProcessing) { + description("Verify that PrepareForProcessing method works for an effect"); + Result retval = Result::NOT_INITIALIZED; + Return ret = effect->prepareForProcessing( + [&](Result r, const MQDescriptorSync&) { retval = r; }); + EXPECT_TRUE(ret.isOk()); + EXPECT_EQ(Result::OK, retval); +} + +TEST_F(AudioEffectHidlTest, SetProcessBuffers) { + description("Verify that SetProcessBuffers works for an effect"); + sp ashmem = IAllocator::getService("ashmem"); + ASSERT_NE(nullptr, ashmem.get()); + bool success = false; + AudioBuffer buffer; + Return ret = + ashmem->allocate(1024, [&](bool s, const hidl_memory& memory) { + success = s; + if (s) { + buffer.data = memory; + } + }); + ASSERT_TRUE(ret.isOk()); + ASSERT_TRUE(success); + Return ret2 = effect->setProcessBuffers(buffer, buffer); + EXPECT_TRUE(ret2.isOk()); + EXPECT_EQ(Result::OK, ret2); +} + +TEST_F(AudioEffectHidlTest, Command) { + description("Verify that Command does not crash"); + Return ret = effect->command(0, hidl_vec(), 0, + [&](int32_t, const hidl_vec&) {}); + EXPECT_TRUE(ret.isOk()); +} + +TEST_F(AudioEffectHidlTest, SetParameter) { + description("Verify that SetParameter does not crash"); + Return ret = + effect->setParameter(hidl_vec(), hidl_vec()); + EXPECT_TRUE(ret.isOk()); +} + +TEST_F(AudioEffectHidlTest, GetParameter) { + description("Verify that GetParameter does not crash"); + Return ret = effect->getParameter( + hidl_vec(), 0, [&](Result, const hidl_vec&) {}); + EXPECT_TRUE(ret.isOk()); +} + +TEST_F(AudioEffectHidlTest, GetSupportedConfigsForFeature) { + description("Verify that GetSupportedConfigsForFeature does not crash"); + Return ret = effect->getSupportedConfigsForFeature( + 0, 0, 0, [&](Result, uint32_t, const hidl_vec&) {}); + EXPECT_TRUE(ret.isOk()); +} + +TEST_F(AudioEffectHidlTest, GetCurrentConfigForFeature) { + description("Verify that GetCurrentConfigForFeature does not crash"); + Return ret = effect->getCurrentConfigForFeature( + 0, 0, [&](Result, const hidl_vec&) {}); + EXPECT_TRUE(ret.isOk()); +} + +TEST_F(AudioEffectHidlTest, SetCurrentConfigForFeature) { + description("Verify that SetCurrentConfigForFeature does not crash"); + Return ret = + effect->setCurrentConfigForFeature(0, hidl_vec()); + EXPECT_TRUE(ret.isOk()); +} + + +// The main test class for Equalizer Audio Effect HIDL HAL. +class EqualizerAudioEffectHidlTest : public AudioEffectHidlTest { + public: + void SetUp() override { + AudioEffectHidlTest::SetUp(); + equalizer = IEqualizerEffect::castFrom(effect); + ASSERT_NE(nullptr, equalizer.get()); + } + + protected: + Uuid getEffectType() override { return EQUALIZER_EFFECT_TYPE; } + void getNumBands(uint16_t* numBands); + void getLevelRange(int16_t* minLevel, int16_t* maxLevel); + void getBandFrequencyRange(uint16_t band, uint32_t* minFreq, + uint32_t* centerFreq, uint32_t* maxFreq); + void getPresetCount(size_t* count); + + sp equalizer; +}; + +void EqualizerAudioEffectHidlTest::getNumBands(uint16_t* numBands) { + Result retval = Result::NOT_INITIALIZED; + Return ret = equalizer->getNumBands([&](Result r, uint16_t b) { + retval = r; + if (retval == Result::OK) { + *numBands = b; + } + }); + ASSERT_TRUE(ret.isOk()); + ASSERT_EQ(Result::OK, retval); +} + +void EqualizerAudioEffectHidlTest::getLevelRange(int16_t* minLevel, + int16_t* maxLevel) { + Result retval = Result::NOT_INITIALIZED; + Return ret = + equalizer->getLevelRange([&](Result r, int16_t min, int16_t max) { + retval = r; + if (retval == Result::OK) { + *minLevel = min; + *maxLevel = max; + } + }); + ASSERT_TRUE(ret.isOk()); + ASSERT_EQ(Result::OK, retval); +} + +void EqualizerAudioEffectHidlTest::getBandFrequencyRange(uint16_t band, + uint32_t* minFreq, + uint32_t* centerFreq, + uint32_t* maxFreq) { + Result retval = Result::NOT_INITIALIZED; + Return ret = equalizer->getBandFrequencyRange( + band, [&](Result r, uint32_t min, uint32_t max) { + retval = r; + if (retval == Result::OK) { + *minFreq = min; + *maxFreq = max; + } + }); + ASSERT_TRUE(ret.isOk()); + ASSERT_EQ(Result::OK, retval); + ret = equalizer->getBandCenterFrequency(band, [&](Result r, uint32_t center) { + retval = r; + if (retval == Result::OK) { + *centerFreq = center; + } + }); + ASSERT_TRUE(ret.isOk()); + ASSERT_EQ(Result::OK, retval); +} + +void EqualizerAudioEffectHidlTest::getPresetCount(size_t* count) { + Result retval = Result::NOT_INITIALIZED; + Return ret = equalizer->getPresetNames( + [&](Result r, const hidl_vec& names) { + retval = r; + if (retval == Result::OK) { + *count = names.size(); + } + }); + ASSERT_TRUE(ret.isOk()); + ASSERT_EQ(Result::OK, retval); +} + +TEST_F(EqualizerAudioEffectHidlTest, GetNumBands) { + description("Verify that Equalizer effect reports at least one band"); + uint16_t numBands = 0; + getNumBands(&numBands); + EXPECT_GT(numBands, 0); +} + +TEST_F(EqualizerAudioEffectHidlTest, GetLevelRange) { + description("Verify that Equalizer effect reports adequate band level range"); + int16_t minLevel = 0x7fff, maxLevel = 0; + getLevelRange(&minLevel, &maxLevel); + EXPECT_GT(maxLevel, minLevel); +} + +TEST_F(EqualizerAudioEffectHidlTest, GetSetBandLevel) { + description( + "Verify that manipulating band levels works for Equalizer effect"); + uint16_t numBands = 0; + getNumBands(&numBands); + ASSERT_GT(numBands, 0); + int16_t levels[3]{0x7fff, 0, 0}; + getLevelRange(&levels[0], &levels[2]); + ASSERT_GT(levels[2], levels[0]); + levels[1] = (levels[2] + levels[0]) / 2; + for (uint16_t i = 0; i < numBands; ++i) { + for (size_t j = 0; j < ARRAY_SIZE(levels); ++j) { + Return ret = equalizer->setBandLevel(i, levels[j]); + EXPECT_TRUE(ret.isOk()); + EXPECT_EQ(Result::OK, ret); + Result retval = Result::NOT_INITIALIZED; + int16_t actualLevel; + Return ret2 = equalizer->getBandLevel(i, [&](Result r, int16_t l) { + retval = r; + if (retval == Result::OK) { + actualLevel = l; + } + }); + EXPECT_TRUE(ret2.isOk()); + EXPECT_EQ(Result::OK, retval); + EXPECT_EQ(levels[j], actualLevel); + } + } +} + +TEST_F(EqualizerAudioEffectHidlTest, GetBandCenterFrequencyAndRange) { + description( + "Verify that Equalizer effect reports adequate band frequency range"); + uint16_t numBands = 0; + getNumBands(&numBands); + ASSERT_GT(numBands, 0); + for (uint16_t i = 0; i < numBands; ++i) { + uint32_t minFreq = 0xffffffff, centerFreq = 0xffffffff, + maxFreq = 0xffffffff; + getBandFrequencyRange(i, &minFreq, ¢erFreq, &maxFreq); + // Note: NXP legacy implementation reports "1" as upper bound for last band, + // so this check fails. + EXPECT_GE(maxFreq, centerFreq); + EXPECT_GE(centerFreq, minFreq); + } +} + +TEST_F(EqualizerAudioEffectHidlTest, GetBandForFrequency) { + description( + "Verify that Equalizer effect supports GetBandForFrequency correctly"); + uint16_t numBands = 0; + getNumBands(&numBands); + ASSERT_GT(numBands, 0); + for (uint16_t i = 0; i < numBands; ++i) { + uint32_t freqs[3]{0, 0, 0}; + getBandFrequencyRange(i, &freqs[0], &freqs[1], &freqs[2]); + // NXP legacy implementation reports "1" as upper bound for last band, some + // of the checks fail. + for (size_t j = 0; j < ARRAY_SIZE(freqs); ++j) { + if (j == 0) { + freqs[j]++; + } // Min frequency is an open interval. + Result retval = Result::NOT_INITIALIZED; + uint16_t actualBand = numBands + 1; + Return ret = + equalizer->getBandForFrequency(freqs[j], [&](Result r, uint16_t b) { + retval = r; + if (retval == Result::OK) { + actualBand = b; + } + }); + EXPECT_TRUE(ret.isOk()); + EXPECT_EQ(Result::OK, retval); + EXPECT_EQ(i, actualBand) << "Frequency: " << freqs[j]; + } + } +} + +TEST_F(EqualizerAudioEffectHidlTest, GetPresetNames) { + description("Verify that Equalizer effect reports at least one preset"); + size_t presetCount; + getPresetCount(&presetCount); + EXPECT_GT(presetCount, 0u); +} + +TEST_F(EqualizerAudioEffectHidlTest, GetSetCurrentPreset) { + description( + "Verify that manipulating the current preset for Equalizer effect"); + size_t presetCount; + getPresetCount(&presetCount); + ASSERT_GT(presetCount, 0u); + for (uint16_t i = 0; i < presetCount; ++i) { + Return ret = equalizer->setCurrentPreset(i); + EXPECT_TRUE(ret.isOk()); + EXPECT_EQ(Result::OK, ret); + Result retval = Result::NOT_INITIALIZED; + uint16_t actualPreset = 0xffff; + Return ret2 = equalizer->getCurrentPreset([&](Result r, uint16_t p) { + retval = r; + if (retval == Result::OK) { + actualPreset = p; + } + }); + EXPECT_TRUE(ret2.isOk()); + EXPECT_EQ(Result::OK, retval); + EXPECT_EQ(i, actualPreset); + } +} + +TEST_F(EqualizerAudioEffectHidlTest, GetSetAllProperties) { + description( + "Verify that setting band levels and presets works via Get / " + "SetAllProperties for Equalizer effect"); + using AllProperties = + android::hardware::audio::effect::V4_0::IEqualizerEffect::AllProperties; + uint16_t numBands = 0; + getNumBands(&numBands); + ASSERT_GT(numBands, 0); + AllProperties props; + props.bandLevels.resize(numBands); + for (size_t i = 0; i < numBands; ++i) { + props.bandLevels[i] = 0; + } + + AllProperties actualProps; + Result retval = Result::NOT_INITIALIZED; + + // Verify setting of the band levels via properties. + props.curPreset = -1; + Return ret = equalizer->setAllProperties(props); + EXPECT_TRUE(ret.isOk()); + EXPECT_EQ(Result::OK, ret); + Return ret2 = + equalizer->getAllProperties([&](Result r, AllProperties p) { + retval = r; + if (retval == Result::OK) { + actualProps = p; + } + }); + EXPECT_TRUE(ret2.isOk()); + EXPECT_EQ(Result::OK, retval); + EXPECT_EQ(props.bandLevels, actualProps.bandLevels); + + // Verify setting of the current preset via properties. + props.curPreset = 0; // Assuming there is at least one preset. + ret = equalizer->setAllProperties(props); + EXPECT_TRUE(ret.isOk()); + EXPECT_EQ(Result::OK, ret); + ret2 = equalizer->getAllProperties([&](Result r, AllProperties p) { + retval = r; + if (retval == Result::OK) { + actualProps = p; + } + }); + EXPECT_TRUE(ret2.isOk()); + EXPECT_EQ(Result::OK, retval); + EXPECT_EQ(props.curPreset, actualProps.curPreset); +} + +// The main test class for Equalizer Audio Effect HIDL HAL. +class LoudnessEnhancerAudioEffectHidlTest : public AudioEffectHidlTest { + public: + void SetUp() override { + AudioEffectHidlTest::SetUp(); + enhancer = ILoudnessEnhancerEffect::castFrom(effect); + ASSERT_NE(nullptr, enhancer.get()); + } + + protected: + Uuid getEffectType() override { return LOUDNESS_ENHANCER_EFFECT_TYPE; } + + sp enhancer; +}; + +TEST_F(LoudnessEnhancerAudioEffectHidlTest, GetSetTargetGain) { + description( + "Verify that manipulating the target gain works for Loudness Enhancer " + "effect"); + const int32_t gain = 100; + Return ret = enhancer->setTargetGain(gain); + EXPECT_TRUE(ret.isOk()); + EXPECT_EQ(Result::OK, ret); + int32_t actualGain = 0; + Result retval; + Return ret2 = enhancer->getTargetGain([&](Result r, int32_t g) { + retval = r; + if (retval == Result::OK) { + actualGain = g; + } + }); + EXPECT_TRUE(ret2.isOk()); + EXPECT_EQ(Result::OK, retval); + EXPECT_EQ(gain, actualGain); +} + +int main(int argc, char** argv) { + ::testing::AddGlobalTestEnvironment(AudioEffectsFactoryHidlEnvironment::Instance()); + ::testing::InitGoogleTest(&argc, argv); + AudioEffectsFactoryHidlEnvironment::Instance()->init(&argc, argv); + int status = RUN_ALL_TESTS(); + LOG(INFO) << "Test result = " << status; + return status; +} diff --git a/audio/effect/4.0/xml/audio_effects_conf_V4_0.xsd b/audio/effect/4.0/xml/audio_effects_conf_V4_0.xsd new file mode 120000 index 0000000000000000000000000000000000000000..82d569a78376d884f0b401c4419746bfa6104949 --- /dev/null +++ b/audio/effect/4.0/xml/audio_effects_conf_V4_0.xsd @@ -0,0 +1 @@ +../../2.0/xml/audio_effects_conf_V2_0.xsd \ No newline at end of file diff --git a/audio/effect/all-versions/OWNERS b/audio/effect/all-versions/OWNERS new file mode 100644 index 0000000000000000000000000000000000000000..6fdc97ca298fbbda9cb676f5acb02d7495debcb4 --- /dev/null +++ b/audio/effect/all-versions/OWNERS @@ -0,0 +1,3 @@ +elaurent@google.com +krocard@google.com +mnaganov@google.com diff --git a/audio/effect/all-versions/default/Android.bp b/audio/effect/all-versions/default/Android.bp new file mode 100644 index 0000000000000000000000000000000000000000..ed2a093050c2c067920d30f8fc1e600324a78880 --- /dev/null +++ b/audio/effect/all-versions/default/Android.bp @@ -0,0 +1,31 @@ +cc_library_headers { + name: "android.hardware.audio.effect@all-versions-impl", + defaults: ["hidl_defaults"], + vendor: true, + relative_install_path: "hw", + + export_include_dirs: ["include"], + + shared_libs: [ + "libbase", + "libcutils", + "libeffects", + "libfmq", + "libhidlbase", + "libhidlmemory", + "libhidltransport", + "liblog", + "libutils", + "android.hardware.audio.common-util", + "android.hidl.memory@1.0", + ], + + header_libs: [ + "libaudio_system_headers", + "libaudioclient_headers", + "libeffects_headers", + "libhardware_headers", + "libmedia_headers", + "android.hardware.audio.common.util@all-versions", + ], +} diff --git a/audio/effect/all-versions/default/include/effect/all-versions/default/AcousticEchoCancelerEffect.h b/audio/effect/all-versions/default/include/effect/all-versions/default/AcousticEchoCancelerEffect.h new file mode 100644 index 0000000000000000000000000000000000000000..852cb3fd152d189d9f373b4f81b0eecc4fd32c18 --- /dev/null +++ b/audio/effect/all-versions/default/include/effect/all-versions/default/AcousticEchoCancelerEffect.h @@ -0,0 +1,103 @@ +/* + * Copyright (C) 2016 The Android Open Source Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#include + +#include + +#include + +#include "VersionUtils.h" + +namespace android { +namespace hardware { +namespace audio { +namespace effect { +namespace AUDIO_HAL_VERSION { +namespace implementation { + +using ::android::hardware::audio::effect::AUDIO_HAL_VERSION::IAcousticEchoCancelerEffect; +using ::android::hardware::audio::effect::AUDIO_HAL_VERSION::Result; +using ::android::hardware::Return; +using ::android::hardware::Void; +using ::android::hardware::hidl_vec; +using ::android::hardware::hidl_string; +using ::android::sp; + +struct AcousticEchoCancelerEffect : public IAcousticEchoCancelerEffect { + explicit AcousticEchoCancelerEffect(effect_handle_t handle); + + // Methods from ::android::hardware::audio::effect::AUDIO_HAL_VERSION::IEffect follow. + Return init() override; + Return setConfig( + const EffectConfig& config, const sp& inputBufferProvider, + const sp& outputBufferProvider) override; + Return reset() override; + Return enable() override; + Return disable() override; + Return setDevice(AudioDeviceBitfield device) override; + Return setAndGetVolume(const hidl_vec& volumes, + setAndGetVolume_cb _hidl_cb) override; + Return volumeChangeNotification(const hidl_vec& volumes) override; + Return setAudioMode(AudioMode mode) override; + Return setConfigReverse( + const EffectConfig& config, const sp& inputBufferProvider, + const sp& outputBufferProvider) override; + Return setInputDevice(AudioDeviceBitfield device) override; + Return getConfig(getConfig_cb _hidl_cb) override; + Return getConfigReverse(getConfigReverse_cb _hidl_cb) override; + Return getSupportedAuxChannelsConfigs( + uint32_t maxConfigs, getSupportedAuxChannelsConfigs_cb _hidl_cb) override; + Return getAuxChannelsConfig(getAuxChannelsConfig_cb _hidl_cb) override; + Return setAuxChannelsConfig(const EffectAuxChannelsConfig& config) override; + Return setAudioSource(AudioSource source) override; + Return offload(const EffectOffloadParameter& param) override; + Return getDescriptor(getDescriptor_cb _hidl_cb) override; + Return prepareForProcessing(prepareForProcessing_cb _hidl_cb) override; + Return setProcessBuffers(const AudioBuffer& inBuffer, + const AudioBuffer& outBuffer) override; + Return command(uint32_t commandId, const hidl_vec& data, uint32_t resultMaxSize, + command_cb _hidl_cb) override; + Return setParameter(const hidl_vec& parameter, + const hidl_vec& value) override; + Return getParameter(const hidl_vec& parameter, uint32_t valueMaxSize, + getParameter_cb _hidl_cb) override; + Return getSupportedConfigsForFeature(uint32_t featureId, uint32_t maxConfigs, + uint32_t configSize, + getSupportedConfigsForFeature_cb _hidl_cb) override; + Return getCurrentConfigForFeature(uint32_t featureId, uint32_t configSize, + getCurrentConfigForFeature_cb _hidl_cb) override; + Return setCurrentConfigForFeature(uint32_t featureId, + const hidl_vec& configData) override; + Return close() override; + + // Methods from + // ::android::hardware::audio::effect::AUDIO_HAL_VERSION::IAcousticEchoCancelerEffect follow. + Return setEchoDelay(uint32_t echoDelayMs) override; + Return getEchoDelay(getEchoDelay_cb _hidl_cb) override; + + private: + sp mEffect; + + virtual ~AcousticEchoCancelerEffect(); +}; + +} // namespace implementation +} // namespace AUDIO_HAL_VERSION +} // namespace effect +} // namespace audio +} // namespace hardware +} // namespace android diff --git a/audio/effect/all-versions/default/include/effect/all-versions/default/AcousticEchoCancelerEffect.impl.h b/audio/effect/all-versions/default/include/effect/all-versions/default/AcousticEchoCancelerEffect.impl.h new file mode 100644 index 0000000000000000000000000000000000000000..8ad80a22a071d47d5530e4610bbe7296ae95f7d4 --- /dev/null +++ b/audio/effect/all-versions/default/include/effect/all-versions/default/AcousticEchoCancelerEffect.impl.h @@ -0,0 +1,181 @@ +/* + * Copyright (C) 2016 The Android Open Source Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#include + +#include +#include + +#include "VersionUtils.h" + +namespace android { +namespace hardware { +namespace audio { +namespace effect { +namespace AUDIO_HAL_VERSION { +namespace implementation { + +AcousticEchoCancelerEffect::AcousticEchoCancelerEffect(effect_handle_t handle) + : mEffect(new Effect(handle)) {} + +AcousticEchoCancelerEffect::~AcousticEchoCancelerEffect() {} + +// Methods from ::android::hardware::audio::effect::AUDIO_HAL_VERSION::IEffect follow. +Return AcousticEchoCancelerEffect::init() { + return mEffect->init(); +} + +Return AcousticEchoCancelerEffect::setConfig( + const EffectConfig& config, const sp& inputBufferProvider, + const sp& outputBufferProvider) { + return mEffect->setConfig(config, inputBufferProvider, outputBufferProvider); +} + +Return AcousticEchoCancelerEffect::reset() { + return mEffect->reset(); +} + +Return AcousticEchoCancelerEffect::enable() { + return mEffect->enable(); +} + +Return AcousticEchoCancelerEffect::disable() { + return mEffect->disable(); +} + +Return AcousticEchoCancelerEffect::setDevice(AudioDeviceBitfield device) { + return mEffect->setDevice(device); +} + +Return AcousticEchoCancelerEffect::setAndGetVolume(const hidl_vec& volumes, + setAndGetVolume_cb _hidl_cb) { + return mEffect->setAndGetVolume(volumes, _hidl_cb); +} + +Return AcousticEchoCancelerEffect::volumeChangeNotification( + const hidl_vec& volumes) { + return mEffect->volumeChangeNotification(volumes); +} + +Return AcousticEchoCancelerEffect::setAudioMode(AudioMode mode) { + return mEffect->setAudioMode(mode); +} + +Return AcousticEchoCancelerEffect::setConfigReverse( + const EffectConfig& config, const sp& inputBufferProvider, + const sp& outputBufferProvider) { + return mEffect->setConfigReverse(config, inputBufferProvider, outputBufferProvider); +} + +Return AcousticEchoCancelerEffect::setInputDevice(AudioDeviceBitfield device) { + return mEffect->setInputDevice(device); +} + +Return AcousticEchoCancelerEffect::getConfig(getConfig_cb _hidl_cb) { + return mEffect->getConfig(_hidl_cb); +} + +Return AcousticEchoCancelerEffect::getConfigReverse(getConfigReverse_cb _hidl_cb) { + return mEffect->getConfigReverse(_hidl_cb); +} + +Return AcousticEchoCancelerEffect::getSupportedAuxChannelsConfigs( + uint32_t maxConfigs, getSupportedAuxChannelsConfigs_cb _hidl_cb) { + return mEffect->getSupportedAuxChannelsConfigs(maxConfigs, _hidl_cb); +} + +Return AcousticEchoCancelerEffect::getAuxChannelsConfig(getAuxChannelsConfig_cb _hidl_cb) { + return mEffect->getAuxChannelsConfig(_hidl_cb); +} + +Return AcousticEchoCancelerEffect::setAuxChannelsConfig( + const EffectAuxChannelsConfig& config) { + return mEffect->setAuxChannelsConfig(config); +} + +Return AcousticEchoCancelerEffect::setAudioSource(AudioSource source) { + return mEffect->setAudioSource(source); +} + +Return AcousticEchoCancelerEffect::offload(const EffectOffloadParameter& param) { + return mEffect->offload(param); +} + +Return AcousticEchoCancelerEffect::getDescriptor(getDescriptor_cb _hidl_cb) { + return mEffect->getDescriptor(_hidl_cb); +} + +Return AcousticEchoCancelerEffect::prepareForProcessing(prepareForProcessing_cb _hidl_cb) { + return mEffect->prepareForProcessing(_hidl_cb); +} + +Return AcousticEchoCancelerEffect::setProcessBuffers(const AudioBuffer& inBuffer, + const AudioBuffer& outBuffer) { + return mEffect->setProcessBuffers(inBuffer, outBuffer); +} + +Return AcousticEchoCancelerEffect::command(uint32_t commandId, const hidl_vec& data, + uint32_t resultMaxSize, command_cb _hidl_cb) { + return mEffect->command(commandId, data, resultMaxSize, _hidl_cb); +} + +Return AcousticEchoCancelerEffect::setParameter(const hidl_vec& parameter, + const hidl_vec& value) { + return mEffect->setParameter(parameter, value); +} + +Return AcousticEchoCancelerEffect::getParameter(const hidl_vec& parameter, + uint32_t valueMaxSize, + getParameter_cb _hidl_cb) { + return mEffect->getParameter(parameter, valueMaxSize, _hidl_cb); +} + +Return AcousticEchoCancelerEffect::getSupportedConfigsForFeature( + uint32_t featureId, uint32_t maxConfigs, uint32_t configSize, + getSupportedConfigsForFeature_cb _hidl_cb) { + return mEffect->getSupportedConfigsForFeature(featureId, maxConfigs, configSize, _hidl_cb); +} + +Return AcousticEchoCancelerEffect::getCurrentConfigForFeature( + uint32_t featureId, uint32_t configSize, getCurrentConfigForFeature_cb _hidl_cb) { + return mEffect->getCurrentConfigForFeature(featureId, configSize, _hidl_cb); +} + +Return AcousticEchoCancelerEffect::setCurrentConfigForFeature( + uint32_t featureId, const hidl_vec& configData) { + return mEffect->setCurrentConfigForFeature(featureId, configData); +} + +Return AcousticEchoCancelerEffect::close() { + return mEffect->close(); +} + +// Methods from ::android::hardware::audio::effect::AUDIO_HAL_VERSION::IAcousticEchoCancelerEffect +// follow. +Return AcousticEchoCancelerEffect::setEchoDelay(uint32_t echoDelayMs) { + return mEffect->setParam(AEC_PARAM_ECHO_DELAY, echoDelayMs); +} + +Return AcousticEchoCancelerEffect::getEchoDelay(getEchoDelay_cb _hidl_cb) { + return mEffect->getIntegerParam(AEC_PARAM_ECHO_DELAY, _hidl_cb); +} + +} // namespace implementation +} // namespace AUDIO_HAL_VERSION +} // namespace effect +} // namespace audio +} // namespace hardware +} // namespace android diff --git a/audio/effect/all-versions/default/include/effect/all-versions/default/AudioBufferManager.h b/audio/effect/all-versions/default/include/effect/all-versions/default/AudioBufferManager.h new file mode 100644 index 0000000000000000000000000000000000000000..34dea2d03b57c916b1382365576b37a0f91ceea7 --- /dev/null +++ b/audio/effect/all-versions/default/include/effect/all-versions/default/AudioBufferManager.h @@ -0,0 +1,79 @@ +/* + * Copyright (C) 2017 The Android Open Source Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#include + +#include + +#include +#include +#include +#include +#include + +using ::android::hardware::audio::effect::AUDIO_HAL_VERSION::AudioBuffer; +using ::android::hidl::memory::V1_0::IMemory; + +namespace android { +namespace hardware { +namespace audio { +namespace effect { +namespace AUDIO_HAL_VERSION { +namespace implementation { + +class AudioBufferWrapper : public RefBase { + public: + explicit AudioBufferWrapper(const AudioBuffer& buffer); + virtual ~AudioBufferWrapper(); + bool init(); + audio_buffer_t* getHalBuffer() { return &mHalBuffer; } + + private: + AudioBufferWrapper(const AudioBufferWrapper&) = delete; + void operator=(AudioBufferWrapper) = delete; + + AudioBuffer mHidlBuffer; + sp mHidlMemory; + audio_buffer_t mHalBuffer; +}; + +} // namespace implementation +} // namespace AUDIO_HAL_VERSION +} // namespace effect +} // namespace audio +} // namespace hardware +} // namespace android + +using ::android::hardware::audio::effect::AUDIO_HAL_VERSION::implementation::AudioBufferWrapper; + +namespace android { + +// This class needs to be in 'android' ns because Singleton macros require that. +class AudioBufferManager : public Singleton { + public: + bool wrap(const AudioBuffer& buffer, sp* wrapper); + + private: + friend class hardware::audio::effect::AUDIO_HAL_VERSION::implementation::AudioBufferWrapper; + + // Called by AudioBufferWrapper. + void removeEntry(uint64_t id); + + std::mutex mLock; + KeyedVector> mBuffers; +}; + +} // namespace android diff --git a/audio/effect/all-versions/default/include/effect/all-versions/default/AudioBufferManager.impl.h b/audio/effect/all-versions/default/include/effect/all-versions/default/AudioBufferManager.impl.h new file mode 100644 index 0000000000000000000000000000000000000000..71ccd2d52011f9054210d02b5daa37a1af2b6bad --- /dev/null +++ b/audio/effect/all-versions/default/include/effect/all-versions/default/AudioBufferManager.impl.h @@ -0,0 +1,90 @@ +/* + * Copyright (C) 2017 The Android Open Source Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#include + +#include + +#include + +namespace android { + +ANDROID_SINGLETON_STATIC_INSTANCE(AudioBufferManager); + +bool AudioBufferManager::wrap(const AudioBuffer& buffer, sp* wrapper) { + // Check if we have this buffer already + std::lock_guard lock(mLock); + ssize_t idx = mBuffers.indexOfKey(buffer.id); + if (idx >= 0) { + *wrapper = mBuffers[idx].promote(); + if (*wrapper != nullptr) { + (*wrapper)->getHalBuffer()->frameCount = buffer.frameCount; + return true; + } + mBuffers.removeItemsAt(idx); + } + // Need to create and init a new AudioBufferWrapper. + sp tempBuffer(new AudioBufferWrapper(buffer)); + if (!tempBuffer->init()) return false; + *wrapper = tempBuffer; + mBuffers.add(buffer.id, *wrapper); + return true; +} + +void AudioBufferManager::removeEntry(uint64_t id) { + std::lock_guard lock(mLock); + ssize_t idx = mBuffers.indexOfKey(id); + if (idx >= 0) mBuffers.removeItemsAt(idx); +} + +namespace hardware { +namespace audio { +namespace effect { +namespace AUDIO_HAL_VERSION { +namespace implementation { + +AudioBufferWrapper::AudioBufferWrapper(const AudioBuffer& buffer) + : mHidlBuffer(buffer), mHalBuffer{0, {nullptr}} {} + +AudioBufferWrapper::~AudioBufferWrapper() { + AudioBufferManager::getInstance().removeEntry(mHidlBuffer.id); +} + +bool AudioBufferWrapper::init() { + if (mHalBuffer.raw != nullptr) { + ALOGE("An attempt to init AudioBufferWrapper twice"); + return false; + } + mHidlMemory = mapMemory(mHidlBuffer.data); + if (mHidlMemory == nullptr) { + ALOGE("Could not map HIDL memory to IMemory"); + return false; + } + mHalBuffer.raw = static_cast(mHidlMemory->getPointer()); + if (mHalBuffer.raw == nullptr) { + ALOGE("IMemory buffer pointer is null"); + return false; + } + mHalBuffer.frameCount = mHidlBuffer.frameCount; + return true; +} + +} // namespace implementation +} // namespace AUDIO_HAL_VERSION +} // namespace effect +} // namespace audio +} // namespace hardware +} // namespace android diff --git a/audio/effect/all-versions/default/include/effect/all-versions/default/AutomaticGainControlEffect.h b/audio/effect/all-versions/default/include/effect/all-versions/default/AutomaticGainControlEffect.h new file mode 100644 index 0000000000000000000000000000000000000000..5ac43eb9bc846fb58a9ab9050496c22e9e4baba9 --- /dev/null +++ b/audio/effect/all-versions/default/include/effect/all-versions/default/AutomaticGainControlEffect.h @@ -0,0 +1,117 @@ +/* + * Copyright (C) 2016 The Android Open Source Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#include + +#include + +#include + +#include + +#include "VersionUtils.h" + +namespace android { +namespace hardware { +namespace audio { +namespace effect { +namespace AUDIO_HAL_VERSION { +namespace implementation { + +using ::android::hardware::audio::effect::AUDIO_HAL_VERSION::IAutomaticGainControlEffect; +using ::android::hardware::audio::effect::AUDIO_HAL_VERSION::Result; +using ::android::hardware::Return; +using ::android::hardware::Void; +using ::android::hardware::hidl_vec; +using ::android::hardware::hidl_string; +using ::android::sp; + +struct AutomaticGainControlEffect : public IAutomaticGainControlEffect { + explicit AutomaticGainControlEffect(effect_handle_t handle); + + // Methods from ::android::hardware::audio::effect::AUDIO_HAL_VERSION::IEffect follow. + Return init() override; + Return setConfig( + const EffectConfig& config, const sp& inputBufferProvider, + const sp& outputBufferProvider) override; + Return reset() override; + Return enable() override; + Return disable() override; + Return setDevice(AudioDeviceBitfield device) override; + Return setAndGetVolume(const hidl_vec& volumes, + setAndGetVolume_cb _hidl_cb) override; + Return volumeChangeNotification(const hidl_vec& volumes) override; + Return setAudioMode(AudioMode mode) override; + Return setConfigReverse( + const EffectConfig& config, const sp& inputBufferProvider, + const sp& outputBufferProvider) override; + Return setInputDevice(AudioDeviceBitfield device) override; + Return getConfig(getConfig_cb _hidl_cb) override; + Return getConfigReverse(getConfigReverse_cb _hidl_cb) override; + Return getSupportedAuxChannelsConfigs( + uint32_t maxConfigs, getSupportedAuxChannelsConfigs_cb _hidl_cb) override; + Return getAuxChannelsConfig(getAuxChannelsConfig_cb _hidl_cb) override; + Return setAuxChannelsConfig(const EffectAuxChannelsConfig& config) override; + Return setAudioSource(AudioSource source) override; + Return offload(const EffectOffloadParameter& param) override; + Return getDescriptor(getDescriptor_cb _hidl_cb) override; + Return prepareForProcessing(prepareForProcessing_cb _hidl_cb) override; + Return setProcessBuffers(const AudioBuffer& inBuffer, + const AudioBuffer& outBuffer) override; + Return command(uint32_t commandId, const hidl_vec& data, uint32_t resultMaxSize, + command_cb _hidl_cb) override; + Return setParameter(const hidl_vec& parameter, + const hidl_vec& value) override; + Return getParameter(const hidl_vec& parameter, uint32_t valueMaxSize, + getParameter_cb _hidl_cb) override; + Return getSupportedConfigsForFeature(uint32_t featureId, uint32_t maxConfigs, + uint32_t configSize, + getSupportedConfigsForFeature_cb _hidl_cb) override; + Return getCurrentConfigForFeature(uint32_t featureId, uint32_t configSize, + getCurrentConfigForFeature_cb _hidl_cb) override; + Return setCurrentConfigForFeature(uint32_t featureId, + const hidl_vec& configData) override; + Return close() override; + + // Methods from + // ::android::hardware::audio::effect::AUDIO_HAL_VERSION::IAutomaticGainControlEffect follow. + Return setTargetLevel(int16_t targetLevelMb) override; + Return getTargetLevel(getTargetLevel_cb _hidl_cb) override; + Return setCompGain(int16_t compGainMb) override; + Return getCompGain(getCompGain_cb _hidl_cb) override; + Return setLimiterEnabled(bool enabled) override; + Return isLimiterEnabled(isLimiterEnabled_cb _hidl_cb) override; + Return setAllProperties( + const IAutomaticGainControlEffect::AllProperties& properties) override; + Return getAllProperties(getAllProperties_cb _hidl_cb) override; + + private: + sp mEffect; + + virtual ~AutomaticGainControlEffect(); + + void propertiesFromHal(const t_agc_settings& halProperties, + IAutomaticGainControlEffect::AllProperties* properties); + void propertiesToHal(const IAutomaticGainControlEffect::AllProperties& properties, + t_agc_settings* halProperties); +}; + +} // namespace implementation +} // namespace AUDIO_HAL_VERSION +} // namespace effect +} // namespace audio +} // namespace hardware +} // namespace android diff --git a/audio/effect/all-versions/default/include/effect/all-versions/default/AutomaticGainControlEffect.impl.h b/audio/effect/all-versions/default/include/effect/all-versions/default/AutomaticGainControlEffect.impl.h new file mode 100644 index 0000000000000000000000000000000000000000..e2e751e86b060bf9f9d1cb24195d9c2aa0828420 --- /dev/null +++ b/audio/effect/all-versions/default/include/effect/all-versions/default/AutomaticGainControlEffect.impl.h @@ -0,0 +1,226 @@ +/* + * Copyright (C) 2016 The Android Open Source Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#include + +#include + +#include "VersionUtils.h" + +namespace android { +namespace hardware { +namespace audio { +namespace effect { +namespace AUDIO_HAL_VERSION { +namespace implementation { + +AutomaticGainControlEffect::AutomaticGainControlEffect(effect_handle_t handle) + : mEffect(new Effect(handle)) {} + +AutomaticGainControlEffect::~AutomaticGainControlEffect() {} + +void AutomaticGainControlEffect::propertiesFromHal( + const t_agc_settings& halProperties, IAutomaticGainControlEffect::AllProperties* properties) { + properties->targetLevelMb = halProperties.targetLevel; + properties->compGainMb = halProperties.compGain; + properties->limiterEnabled = halProperties.limiterEnabled; +} + +void AutomaticGainControlEffect::propertiesToHal( + const IAutomaticGainControlEffect::AllProperties& properties, t_agc_settings* halProperties) { + halProperties->targetLevel = properties.targetLevelMb; + halProperties->compGain = properties.compGainMb; + halProperties->limiterEnabled = properties.limiterEnabled; +} + +// Methods from ::android::hardware::audio::effect::AUDIO_HAL_VERSION::IEffect follow. +Return AutomaticGainControlEffect::init() { + return mEffect->init(); +} + +Return AutomaticGainControlEffect::setConfig( + const EffectConfig& config, const sp& inputBufferProvider, + const sp& outputBufferProvider) { + return mEffect->setConfig(config, inputBufferProvider, outputBufferProvider); +} + +Return AutomaticGainControlEffect::reset() { + return mEffect->reset(); +} + +Return AutomaticGainControlEffect::enable() { + return mEffect->enable(); +} + +Return AutomaticGainControlEffect::disable() { + return mEffect->disable(); +} + +Return AutomaticGainControlEffect::setDevice(AudioDeviceBitfield device) { + return mEffect->setDevice(device); +} + +Return AutomaticGainControlEffect::setAndGetVolume(const hidl_vec& volumes, + setAndGetVolume_cb _hidl_cb) { + return mEffect->setAndGetVolume(volumes, _hidl_cb); +} + +Return AutomaticGainControlEffect::volumeChangeNotification( + const hidl_vec& volumes) { + return mEffect->volumeChangeNotification(volumes); +} + +Return AutomaticGainControlEffect::setAudioMode(AudioMode mode) { + return mEffect->setAudioMode(mode); +} + +Return AutomaticGainControlEffect::setConfigReverse( + const EffectConfig& config, const sp& inputBufferProvider, + const sp& outputBufferProvider) { + return mEffect->setConfigReverse(config, inputBufferProvider, outputBufferProvider); +} + +Return AutomaticGainControlEffect::setInputDevice(AudioDeviceBitfield device) { + return mEffect->setInputDevice(device); +} + +Return AutomaticGainControlEffect::getConfig(getConfig_cb _hidl_cb) { + return mEffect->getConfig(_hidl_cb); +} + +Return AutomaticGainControlEffect::getConfigReverse(getConfigReverse_cb _hidl_cb) { + return mEffect->getConfigReverse(_hidl_cb); +} + +Return AutomaticGainControlEffect::getSupportedAuxChannelsConfigs( + uint32_t maxConfigs, getSupportedAuxChannelsConfigs_cb _hidl_cb) { + return mEffect->getSupportedAuxChannelsConfigs(maxConfigs, _hidl_cb); +} + +Return AutomaticGainControlEffect::getAuxChannelsConfig(getAuxChannelsConfig_cb _hidl_cb) { + return mEffect->getAuxChannelsConfig(_hidl_cb); +} + +Return AutomaticGainControlEffect::setAuxChannelsConfig( + const EffectAuxChannelsConfig& config) { + return mEffect->setAuxChannelsConfig(config); +} + +Return AutomaticGainControlEffect::setAudioSource(AudioSource source) { + return mEffect->setAudioSource(source); +} + +Return AutomaticGainControlEffect::offload(const EffectOffloadParameter& param) { + return mEffect->offload(param); +} + +Return AutomaticGainControlEffect::getDescriptor(getDescriptor_cb _hidl_cb) { + return mEffect->getDescriptor(_hidl_cb); +} + +Return AutomaticGainControlEffect::prepareForProcessing(prepareForProcessing_cb _hidl_cb) { + return mEffect->prepareForProcessing(_hidl_cb); +} + +Return AutomaticGainControlEffect::setProcessBuffers(const AudioBuffer& inBuffer, + const AudioBuffer& outBuffer) { + return mEffect->setProcessBuffers(inBuffer, outBuffer); +} + +Return AutomaticGainControlEffect::command(uint32_t commandId, const hidl_vec& data, + uint32_t resultMaxSize, command_cb _hidl_cb) { + return mEffect->command(commandId, data, resultMaxSize, _hidl_cb); +} + +Return AutomaticGainControlEffect::setParameter(const hidl_vec& parameter, + const hidl_vec& value) { + return mEffect->setParameter(parameter, value); +} + +Return AutomaticGainControlEffect::getParameter(const hidl_vec& parameter, + uint32_t valueMaxSize, + getParameter_cb _hidl_cb) { + return mEffect->getParameter(parameter, valueMaxSize, _hidl_cb); +} + +Return AutomaticGainControlEffect::getSupportedConfigsForFeature( + uint32_t featureId, uint32_t maxConfigs, uint32_t configSize, + getSupportedConfigsForFeature_cb _hidl_cb) { + return mEffect->getSupportedConfigsForFeature(featureId, maxConfigs, configSize, _hidl_cb); +} + +Return AutomaticGainControlEffect::getCurrentConfigForFeature( + uint32_t featureId, uint32_t configSize, getCurrentConfigForFeature_cb _hidl_cb) { + return mEffect->getCurrentConfigForFeature(featureId, configSize, _hidl_cb); +} + +Return AutomaticGainControlEffect::setCurrentConfigForFeature( + uint32_t featureId, const hidl_vec& configData) { + return mEffect->setCurrentConfigForFeature(featureId, configData); +} + +Return AutomaticGainControlEffect::close() { + return mEffect->close(); +} + +// Methods from ::android::hardware::audio::effect::AUDIO_HAL_VERSION::IAutomaticGainControlEffect +// follow. +Return AutomaticGainControlEffect::setTargetLevel(int16_t targetLevelMb) { + return mEffect->setParam(AGC_PARAM_TARGET_LEVEL, targetLevelMb); +} + +Return AutomaticGainControlEffect::getTargetLevel(getTargetLevel_cb _hidl_cb) { + return mEffect->getIntegerParam(AGC_PARAM_TARGET_LEVEL, _hidl_cb); +} + +Return AutomaticGainControlEffect::setCompGain(int16_t compGainMb) { + return mEffect->setParam(AGC_PARAM_COMP_GAIN, compGainMb); +} + +Return AutomaticGainControlEffect::getCompGain(getCompGain_cb _hidl_cb) { + return mEffect->getIntegerParam(AGC_PARAM_COMP_GAIN, _hidl_cb); +} + +Return AutomaticGainControlEffect::setLimiterEnabled(bool enabled) { + return mEffect->setParam(AGC_PARAM_LIMITER_ENA, enabled); +} + +Return AutomaticGainControlEffect::isLimiterEnabled(isLimiterEnabled_cb _hidl_cb) { + return mEffect->getIntegerParam(AGC_PARAM_LIMITER_ENA, _hidl_cb); +} + +Return AutomaticGainControlEffect::setAllProperties( + const IAutomaticGainControlEffect::AllProperties& properties) { + t_agc_settings halProperties; + propertiesToHal(properties, &halProperties); + return mEffect->setParam(AGC_PARAM_PROPERTIES, halProperties); +} + +Return AutomaticGainControlEffect::getAllProperties(getAllProperties_cb _hidl_cb) { + t_agc_settings halProperties; + Result retval = mEffect->getParam(AGC_PARAM_PROPERTIES, halProperties); + AllProperties properties; + propertiesFromHal(halProperties, &properties); + _hidl_cb(retval, properties); + return Void(); +} + +} // namespace implementation +} // namespace AUDIO_HAL_VERSION +} // namespace effect +} // namespace audio +} // namespace hardware +} // namespace android diff --git a/audio/effect/all-versions/default/include/effect/all-versions/default/BassBoostEffect.h b/audio/effect/all-versions/default/include/effect/all-versions/default/BassBoostEffect.h new file mode 100644 index 0000000000000000000000000000000000000000..29173ddebeb481b8b5db699f6aa967a38d5599be --- /dev/null +++ b/audio/effect/all-versions/default/include/effect/all-versions/default/BassBoostEffect.h @@ -0,0 +1,103 @@ +/* + * Copyright (C) 2016 The Android Open Source Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#include + +#include + +#include + +#include "VersionUtils.h" + +namespace android { +namespace hardware { +namespace audio { +namespace effect { +namespace AUDIO_HAL_VERSION { +namespace implementation { + +using ::android::hardware::audio::effect::AUDIO_HAL_VERSION::IBassBoostEffect; +using ::android::hardware::audio::effect::AUDIO_HAL_VERSION::Result; +using ::android::hardware::Return; +using ::android::hardware::Void; +using ::android::hardware::hidl_vec; +using ::android::hardware::hidl_string; +using ::android::sp; + +struct BassBoostEffect : public IBassBoostEffect { + explicit BassBoostEffect(effect_handle_t handle); + + // Methods from ::android::hardware::audio::effect::AUDIO_HAL_VERSION::IEffect follow. + Return init() override; + Return setConfig( + const EffectConfig& config, const sp& inputBufferProvider, + const sp& outputBufferProvider) override; + Return reset() override; + Return enable() override; + Return disable() override; + Return setDevice(AudioDeviceBitfield device) override; + Return setAndGetVolume(const hidl_vec& volumes, + setAndGetVolume_cb _hidl_cb) override; + Return volumeChangeNotification(const hidl_vec& volumes) override; + Return setAudioMode(AudioMode mode) override; + Return setConfigReverse( + const EffectConfig& config, const sp& inputBufferProvider, + const sp& outputBufferProvider) override; + Return setInputDevice(AudioDeviceBitfield device) override; + Return getConfig(getConfig_cb _hidl_cb) override; + Return getConfigReverse(getConfigReverse_cb _hidl_cb) override; + Return getSupportedAuxChannelsConfigs( + uint32_t maxConfigs, getSupportedAuxChannelsConfigs_cb _hidl_cb) override; + Return getAuxChannelsConfig(getAuxChannelsConfig_cb _hidl_cb) override; + Return setAuxChannelsConfig(const EffectAuxChannelsConfig& config) override; + Return setAudioSource(AudioSource source) override; + Return offload(const EffectOffloadParameter& param) override; + Return getDescriptor(getDescriptor_cb _hidl_cb) override; + Return prepareForProcessing(prepareForProcessing_cb _hidl_cb) override; + Return setProcessBuffers(const AudioBuffer& inBuffer, + const AudioBuffer& outBuffer) override; + Return command(uint32_t commandId, const hidl_vec& data, uint32_t resultMaxSize, + command_cb _hidl_cb) override; + Return setParameter(const hidl_vec& parameter, + const hidl_vec& value) override; + Return getParameter(const hidl_vec& parameter, uint32_t valueMaxSize, + getParameter_cb _hidl_cb) override; + Return getSupportedConfigsForFeature(uint32_t featureId, uint32_t maxConfigs, + uint32_t configSize, + getSupportedConfigsForFeature_cb _hidl_cb) override; + Return getCurrentConfigForFeature(uint32_t featureId, uint32_t configSize, + getCurrentConfigForFeature_cb _hidl_cb) override; + Return setCurrentConfigForFeature(uint32_t featureId, + const hidl_vec& configData) override; + Return close() override; + + // Methods from ::android::hardware::audio::effect::AUDIO_HAL_VERSION::IBassBoostEffect follow. + Return isStrengthSupported(isStrengthSupported_cb _hidl_cb) override; + Return setStrength(uint16_t strength) override; + Return getStrength(getStrength_cb _hidl_cb) override; + + private: + sp mEffect; + + virtual ~BassBoostEffect(); +}; + +} // namespace implementation +} // namespace AUDIO_HAL_VERSION +} // namespace effect +} // namespace audio +} // namespace hardware +} // namespace android diff --git a/audio/effect/all-versions/default/include/effect/all-versions/default/BassBoostEffect.impl.h b/audio/effect/all-versions/default/include/effect/all-versions/default/BassBoostEffect.impl.h new file mode 100644 index 0000000000000000000000000000000000000000..7bcb4a34970ab90ff1f1a9d4024a56e2c63ec04e --- /dev/null +++ b/audio/effect/all-versions/default/include/effect/all-versions/default/BassBoostEffect.impl.h @@ -0,0 +1,180 @@ +/* + * Copyright (C) 2016 The Android Open Source Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#include + +#include +#include + +#include "VersionUtils.h" + +namespace android { +namespace hardware { +namespace audio { +namespace effect { +namespace AUDIO_HAL_VERSION { +namespace implementation { + +BassBoostEffect::BassBoostEffect(effect_handle_t handle) : mEffect(new Effect(handle)) {} + +BassBoostEffect::~BassBoostEffect() {} + +// Methods from ::android::hardware::audio::effect::AUDIO_HAL_VERSION::IEffect follow. +Return BassBoostEffect::init() { + return mEffect->init(); +} + +Return BassBoostEffect::setConfig( + const EffectConfig& config, const sp& inputBufferProvider, + const sp& outputBufferProvider) { + return mEffect->setConfig(config, inputBufferProvider, outputBufferProvider); +} + +Return BassBoostEffect::reset() { + return mEffect->reset(); +} + +Return BassBoostEffect::enable() { + return mEffect->enable(); +} + +Return BassBoostEffect::disable() { + return mEffect->disable(); +} + +Return BassBoostEffect::setDevice(AudioDeviceBitfield device) { + return mEffect->setDevice(device); +} + +Return BassBoostEffect::setAndGetVolume(const hidl_vec& volumes, + setAndGetVolume_cb _hidl_cb) { + return mEffect->setAndGetVolume(volumes, _hidl_cb); +} + +Return BassBoostEffect::volumeChangeNotification(const hidl_vec& volumes) { + return mEffect->volumeChangeNotification(volumes); +} + +Return BassBoostEffect::setAudioMode(AudioMode mode) { + return mEffect->setAudioMode(mode); +} + +Return BassBoostEffect::setConfigReverse( + const EffectConfig& config, const sp& inputBufferProvider, + const sp& outputBufferProvider) { + return mEffect->setConfigReverse(config, inputBufferProvider, outputBufferProvider); +} + +Return BassBoostEffect::setInputDevice(AudioDeviceBitfield device) { + return mEffect->setInputDevice(device); +} + +Return BassBoostEffect::getConfig(getConfig_cb _hidl_cb) { + return mEffect->getConfig(_hidl_cb); +} + +Return BassBoostEffect::getConfigReverse(getConfigReverse_cb _hidl_cb) { + return mEffect->getConfigReverse(_hidl_cb); +} + +Return BassBoostEffect::getSupportedAuxChannelsConfigs( + uint32_t maxConfigs, getSupportedAuxChannelsConfigs_cb _hidl_cb) { + return mEffect->getSupportedAuxChannelsConfigs(maxConfigs, _hidl_cb); +} + +Return BassBoostEffect::getAuxChannelsConfig(getAuxChannelsConfig_cb _hidl_cb) { + return mEffect->getAuxChannelsConfig(_hidl_cb); +} + +Return BassBoostEffect::setAuxChannelsConfig(const EffectAuxChannelsConfig& config) { + return mEffect->setAuxChannelsConfig(config); +} + +Return BassBoostEffect::setAudioSource(AudioSource source) { + return mEffect->setAudioSource(source); +} + +Return BassBoostEffect::offload(const EffectOffloadParameter& param) { + return mEffect->offload(param); +} + +Return BassBoostEffect::getDescriptor(getDescriptor_cb _hidl_cb) { + return mEffect->getDescriptor(_hidl_cb); +} + +Return BassBoostEffect::prepareForProcessing(prepareForProcessing_cb _hidl_cb) { + return mEffect->prepareForProcessing(_hidl_cb); +} + +Return BassBoostEffect::setProcessBuffers(const AudioBuffer& inBuffer, + const AudioBuffer& outBuffer) { + return mEffect->setProcessBuffers(inBuffer, outBuffer); +} + +Return BassBoostEffect::command(uint32_t commandId, const hidl_vec& data, + uint32_t resultMaxSize, command_cb _hidl_cb) { + return mEffect->command(commandId, data, resultMaxSize, _hidl_cb); +} + +Return BassBoostEffect::setParameter(const hidl_vec& parameter, + const hidl_vec& value) { + return mEffect->setParameter(parameter, value); +} + +Return BassBoostEffect::getParameter(const hidl_vec& parameter, + uint32_t valueMaxSize, getParameter_cb _hidl_cb) { + return mEffect->getParameter(parameter, valueMaxSize, _hidl_cb); +} + +Return BassBoostEffect::getSupportedConfigsForFeature( + uint32_t featureId, uint32_t maxConfigs, uint32_t configSize, + getSupportedConfigsForFeature_cb _hidl_cb) { + return mEffect->getSupportedConfigsForFeature(featureId, maxConfigs, configSize, _hidl_cb); +} + +Return BassBoostEffect::getCurrentConfigForFeature(uint32_t featureId, uint32_t configSize, + getCurrentConfigForFeature_cb _hidl_cb) { + return mEffect->getCurrentConfigForFeature(featureId, configSize, _hidl_cb); +} + +Return BassBoostEffect::setCurrentConfigForFeature(uint32_t featureId, + const hidl_vec& configData) { + return mEffect->setCurrentConfigForFeature(featureId, configData); +} + +Return BassBoostEffect::close() { + return mEffect->close(); +} + +// Methods from ::android::hardware::audio::effect::AUDIO_HAL_VERSION::IBassBoostEffect follow. +Return BassBoostEffect::isStrengthSupported(isStrengthSupported_cb _hidl_cb) { + return mEffect->getIntegerParam(BASSBOOST_PARAM_STRENGTH_SUPPORTED, _hidl_cb); +} + +Return BassBoostEffect::setStrength(uint16_t strength) { + return mEffect->setParam(BASSBOOST_PARAM_STRENGTH, strength); +} + +Return BassBoostEffect::getStrength(getStrength_cb _hidl_cb) { + return mEffect->getIntegerParam(BASSBOOST_PARAM_STRENGTH, _hidl_cb); +} + +} // namespace implementation +} // namespace AUDIO_HAL_VERSION +} // namespace effect +} // namespace audio +} // namespace hardware +} // namespace android diff --git a/audio/effect/all-versions/default/include/effect/all-versions/default/Conversions.h b/audio/effect/all-versions/default/include/effect/all-versions/default/Conversions.h new file mode 100644 index 0000000000000000000000000000000000000000..3f9317f763520aca9347124174313fb637a3ac54 --- /dev/null +++ b/audio/effect/all-versions/default/include/effect/all-versions/default/Conversions.h @@ -0,0 +1,41 @@ +/* + * Copyright (C) 2016 The Android Open Source Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#include + +#include + +#include + +namespace android { +namespace hardware { +namespace audio { +namespace effect { +namespace AUDIO_HAL_VERSION { +namespace implementation { + +using ::android::hardware::audio::effect::AUDIO_HAL_VERSION::EffectDescriptor; + +void effectDescriptorFromHal(const effect_descriptor_t& halDescriptor, + EffectDescriptor* descriptor); +std::string uuidToString(const effect_uuid_t& halUuid); + +} // namespace implementation +} // namespace AUDIO_HAL_VERSION +} // namespace effect +} // namespace audio +} // namespace hardware +} // namespace android diff --git a/audio/effect/all-versions/default/include/effect/all-versions/default/Conversions.impl.h b/audio/effect/all-versions/default/include/effect/all-versions/default/Conversions.impl.h new file mode 100644 index 0000000000000000000000000000000000000000..de67d89dda773d22f3ecb22a7263dcae2db8a3bf --- /dev/null +++ b/audio/effect/all-versions/default/include/effect/all-versions/default/Conversions.impl.h @@ -0,0 +1,59 @@ +/* + * Copyright (C) 2016 The Android Open Source Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#include + +#include +#include + +#include + +using ::android::hardware::audio::common::AUDIO_HAL_VERSION::HidlUtils; +using ::android::hardware::audio::common::utils::mkEnumConverter; + +namespace android { +namespace hardware { +namespace audio { +namespace effect { +namespace AUDIO_HAL_VERSION { +namespace implementation { + +void effectDescriptorFromHal(const effect_descriptor_t& halDescriptor, + EffectDescriptor* descriptor) { + HidlUtils::uuidFromHal(halDescriptor.type, &descriptor->type); + HidlUtils::uuidFromHal(halDescriptor.uuid, &descriptor->uuid); + descriptor->flags = mkEnumConverter(halDescriptor.flags); + descriptor->cpuLoad = halDescriptor.cpuLoad; + descriptor->memoryUsage = halDescriptor.memoryUsage; + memcpy(descriptor->name.data(), halDescriptor.name, descriptor->name.size()); + memcpy(descriptor->implementor.data(), halDescriptor.implementor, + descriptor->implementor.size()); +} + +std::string uuidToString(const effect_uuid_t& halUuid) { + char str[64]; + snprintf(str, sizeof(str), "%08x-%04x-%04x-%04x-%02x%02x%02x%02x%02x%02x", halUuid.timeLow, + halUuid.timeMid, halUuid.timeHiAndVersion, halUuid.clockSeq, halUuid.node[0], + halUuid.node[1], halUuid.node[2], halUuid.node[3], halUuid.node[4], halUuid.node[5]); + return str; +} + +} // namespace implementation +} // namespace AUDIO_HAL_VERSION +} // namespace effect +} // namespace audio +} // namespace hardware +} // namespace android diff --git a/audio/effect/all-versions/default/include/effect/all-versions/default/DownmixEffect.h b/audio/effect/all-versions/default/include/effect/all-versions/default/DownmixEffect.h new file mode 100644 index 0000000000000000000000000000000000000000..3e3aa78477c5c66bd73947c8cd418cb68d923089 --- /dev/null +++ b/audio/effect/all-versions/default/include/effect/all-versions/default/DownmixEffect.h @@ -0,0 +1,102 @@ +/* + * Copyright (C) 2016 The Android Open Source Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#include + +#include + +#include + +#include "VersionUtils.h" + +namespace android { +namespace hardware { +namespace audio { +namespace effect { +namespace AUDIO_HAL_VERSION { +namespace implementation { + +using ::android::hardware::audio::effect::AUDIO_HAL_VERSION::IDownmixEffect; +using ::android::hardware::audio::effect::AUDIO_HAL_VERSION::Result; +using ::android::hardware::Return; +using ::android::hardware::Void; +using ::android::hardware::hidl_vec; +using ::android::hardware::hidl_string; +using ::android::sp; + +struct DownmixEffect : public IDownmixEffect { + explicit DownmixEffect(effect_handle_t handle); + + // Methods from ::android::hardware::audio::effect::AUDIO_HAL_VERSION::IEffect follow. + Return init() override; + Return setConfig( + const EffectConfig& config, const sp& inputBufferProvider, + const sp& outputBufferProvider) override; + Return reset() override; + Return enable() override; + Return disable() override; + Return setDevice(AudioDeviceBitfield device) override; + Return setAndGetVolume(const hidl_vec& volumes, + setAndGetVolume_cb _hidl_cb) override; + Return volumeChangeNotification(const hidl_vec& volumes) override; + Return setAudioMode(AudioMode mode) override; + Return setConfigReverse( + const EffectConfig& config, const sp& inputBufferProvider, + const sp& outputBufferProvider) override; + Return setInputDevice(AudioDeviceBitfield device) override; + Return getConfig(getConfig_cb _hidl_cb) override; + Return getConfigReverse(getConfigReverse_cb _hidl_cb) override; + Return getSupportedAuxChannelsConfigs( + uint32_t maxConfigs, getSupportedAuxChannelsConfigs_cb _hidl_cb) override; + Return getAuxChannelsConfig(getAuxChannelsConfig_cb _hidl_cb) override; + Return setAuxChannelsConfig(const EffectAuxChannelsConfig& config) override; + Return setAudioSource(AudioSource source) override; + Return offload(const EffectOffloadParameter& param) override; + Return getDescriptor(getDescriptor_cb _hidl_cb) override; + Return prepareForProcessing(prepareForProcessing_cb _hidl_cb) override; + Return setProcessBuffers(const AudioBuffer& inBuffer, + const AudioBuffer& outBuffer) override; + Return command(uint32_t commandId, const hidl_vec& data, uint32_t resultMaxSize, + command_cb _hidl_cb) override; + Return setParameter(const hidl_vec& parameter, + const hidl_vec& value) override; + Return getParameter(const hidl_vec& parameter, uint32_t valueMaxSize, + getParameter_cb _hidl_cb) override; + Return getSupportedConfigsForFeature(uint32_t featureId, uint32_t maxConfigs, + uint32_t configSize, + getSupportedConfigsForFeature_cb _hidl_cb) override; + Return getCurrentConfigForFeature(uint32_t featureId, uint32_t configSize, + getCurrentConfigForFeature_cb _hidl_cb) override; + Return setCurrentConfigForFeature(uint32_t featureId, + const hidl_vec& configData) override; + Return close() override; + + // Methods from ::android::hardware::audio::effect::AUDIO_HAL_VERSION::IDownmixEffect follow. + Return setType(IDownmixEffect::Type preset) override; + Return getType(getType_cb _hidl_cb) override; + + private: + sp mEffect; + + virtual ~DownmixEffect(); +}; + +} // namespace implementation +} // namespace AUDIO_HAL_VERSION +} // namespace effect +} // namespace audio +} // namespace hardware +} // namespace android diff --git a/audio/effect/all-versions/default/include/effect/all-versions/default/DownmixEffect.impl.h b/audio/effect/all-versions/default/include/effect/all-versions/default/DownmixEffect.impl.h new file mode 100644 index 0000000000000000000000000000000000000000..abef10ea09bb2f662f92c0059558f2bfefc1a24d --- /dev/null +++ b/audio/effect/all-versions/default/include/effect/all-versions/default/DownmixEffect.impl.h @@ -0,0 +1,179 @@ +/* + * Copyright (C) 2016 The Android Open Source Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#include + +#include +#include + +#include "VersionUtils.h" + +namespace android { +namespace hardware { +namespace audio { +namespace effect { +namespace AUDIO_HAL_VERSION { +namespace implementation { + +DownmixEffect::DownmixEffect(effect_handle_t handle) : mEffect(new Effect(handle)) {} + +DownmixEffect::~DownmixEffect() {} + +// Methods from ::android::hardware::audio::effect::AUDIO_HAL_VERSION::IEffect follow. +Return DownmixEffect::init() { + return mEffect->init(); +} + +Return DownmixEffect::setConfig( + const EffectConfig& config, const sp& inputBufferProvider, + const sp& outputBufferProvider) { + return mEffect->setConfig(config, inputBufferProvider, outputBufferProvider); +} + +Return DownmixEffect::reset() { + return mEffect->reset(); +} + +Return DownmixEffect::enable() { + return mEffect->enable(); +} + +Return DownmixEffect::disable() { + return mEffect->disable(); +} + +Return DownmixEffect::setDevice(AudioDeviceBitfield device) { + return mEffect->setDevice(device); +} + +Return DownmixEffect::setAndGetVolume(const hidl_vec& volumes, + setAndGetVolume_cb _hidl_cb) { + return mEffect->setAndGetVolume(volumes, _hidl_cb); +} + +Return DownmixEffect::volumeChangeNotification(const hidl_vec& volumes) { + return mEffect->volumeChangeNotification(volumes); +} + +Return DownmixEffect::setAudioMode(AudioMode mode) { + return mEffect->setAudioMode(mode); +} + +Return DownmixEffect::setConfigReverse( + const EffectConfig& config, const sp& inputBufferProvider, + const sp& outputBufferProvider) { + return mEffect->setConfigReverse(config, inputBufferProvider, outputBufferProvider); +} + +Return DownmixEffect::setInputDevice(AudioDeviceBitfield device) { + return mEffect->setInputDevice(device); +} + +Return DownmixEffect::getConfig(getConfig_cb _hidl_cb) { + return mEffect->getConfig(_hidl_cb); +} + +Return DownmixEffect::getConfigReverse(getConfigReverse_cb _hidl_cb) { + return mEffect->getConfigReverse(_hidl_cb); +} + +Return DownmixEffect::getSupportedAuxChannelsConfigs( + uint32_t maxConfigs, getSupportedAuxChannelsConfigs_cb _hidl_cb) { + return mEffect->getSupportedAuxChannelsConfigs(maxConfigs, _hidl_cb); +} + +Return DownmixEffect::getAuxChannelsConfig(getAuxChannelsConfig_cb _hidl_cb) { + return mEffect->getAuxChannelsConfig(_hidl_cb); +} + +Return DownmixEffect::setAuxChannelsConfig(const EffectAuxChannelsConfig& config) { + return mEffect->setAuxChannelsConfig(config); +} + +Return DownmixEffect::setAudioSource(AudioSource source) { + return mEffect->setAudioSource(source); +} + +Return DownmixEffect::offload(const EffectOffloadParameter& param) { + return mEffect->offload(param); +} + +Return DownmixEffect::getDescriptor(getDescriptor_cb _hidl_cb) { + return mEffect->getDescriptor(_hidl_cb); +} + +Return DownmixEffect::prepareForProcessing(prepareForProcessing_cb _hidl_cb) { + return mEffect->prepareForProcessing(_hidl_cb); +} + +Return DownmixEffect::setProcessBuffers(const AudioBuffer& inBuffer, + const AudioBuffer& outBuffer) { + return mEffect->setProcessBuffers(inBuffer, outBuffer); +} + +Return DownmixEffect::command(uint32_t commandId, const hidl_vec& data, + uint32_t resultMaxSize, command_cb _hidl_cb) { + return mEffect->command(commandId, data, resultMaxSize, _hidl_cb); +} + +Return DownmixEffect::setParameter(const hidl_vec& parameter, + const hidl_vec& value) { + return mEffect->setParameter(parameter, value); +} + +Return DownmixEffect::getParameter(const hidl_vec& parameter, uint32_t valueMaxSize, + getParameter_cb _hidl_cb) { + return mEffect->getParameter(parameter, valueMaxSize, _hidl_cb); +} + +Return DownmixEffect::getSupportedConfigsForFeature( + uint32_t featureId, uint32_t maxConfigs, uint32_t configSize, + getSupportedConfigsForFeature_cb _hidl_cb) { + return mEffect->getSupportedConfigsForFeature(featureId, maxConfigs, configSize, _hidl_cb); +} + +Return DownmixEffect::getCurrentConfigForFeature(uint32_t featureId, uint32_t configSize, + getCurrentConfigForFeature_cb _hidl_cb) { + return mEffect->getCurrentConfigForFeature(featureId, configSize, _hidl_cb); +} + +Return DownmixEffect::setCurrentConfigForFeature(uint32_t featureId, + const hidl_vec& configData) { + return mEffect->setCurrentConfigForFeature(featureId, configData); +} + +Return DownmixEffect::close() { + return mEffect->close(); +} + +// Methods from ::android::hardware::audio::effect::AUDIO_HAL_VERSION::IDownmixEffect follow. +Return DownmixEffect::setType(IDownmixEffect::Type preset) { + return mEffect->setParam(DOWNMIX_PARAM_TYPE, static_cast(preset)); +} + +Return DownmixEffect::getType(getType_cb _hidl_cb) { + downmix_type_t halPreset = DOWNMIX_TYPE_INVALID; + Result retval = mEffect->getParam(DOWNMIX_PARAM_TYPE, halPreset); + _hidl_cb(retval, Type(halPreset)); + return Void(); +} + +} // namespace implementation +} // namespace AUDIO_HAL_VERSION +} // namespace effect +} // namespace audio +} // namespace hardware +} // namespace android diff --git a/audio/effect/all-versions/default/include/effect/all-versions/default/Effect.h b/audio/effect/all-versions/default/include/effect/all-versions/default/Effect.h new file mode 100644 index 0000000000000000000000000000000000000000..b546e0eb3234de2123907f9c24fe55e8b1c5ef75 --- /dev/null +++ b/audio/effect/all-versions/default/include/effect/all-versions/default/Effect.h @@ -0,0 +1,241 @@ +/* + * Copyright (C) 2016 The Android Open Source Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#include + +#include +#include +#include + +#include +#include +#include +#include +#include + +#include + +#include "VersionUtils.h" + +namespace android { +namespace hardware { +namespace audio { +namespace effect { +namespace AUDIO_HAL_VERSION { +namespace implementation { + +using ::android::hardware::audio::common::AUDIO_HAL_VERSION::AudioDevice; +using ::android::hardware::audio::common::AUDIO_HAL_VERSION::AudioMode; +using ::android::hardware::audio::common::AUDIO_HAL_VERSION::AudioSource; +using ::android::hardware::audio::common::AUDIO_HAL_VERSION::Uuid; +using ::android::hardware::audio::common::AUDIO_HAL_VERSION::implementation::AudioDeviceBitfield; +using ::android::hardware::audio::effect::AUDIO_HAL_VERSION::AudioBuffer; +using ::android::hardware::audio::effect::AUDIO_HAL_VERSION::EffectAuxChannelsConfig; +using ::android::hardware::audio::effect::AUDIO_HAL_VERSION::EffectConfig; +using ::android::hardware::audio::effect::AUDIO_HAL_VERSION::EffectDescriptor; +using ::android::hardware::audio::effect::AUDIO_HAL_VERSION::EffectFeature; +using ::android::hardware::audio::effect::AUDIO_HAL_VERSION::EffectOffloadParameter; +using ::android::hardware::audio::effect::AUDIO_HAL_VERSION::IEffect; +using ::android::hardware::audio::effect::AUDIO_HAL_VERSION::IEffectBufferProviderCallback; +using ::android::hardware::audio::effect::AUDIO_HAL_VERSION::Result; +using ::android::hardware::Return; +using ::android::hardware::Void; +using ::android::hardware::hidl_vec; +using ::android::hardware::hidl_string; +using ::android::sp; + +struct Effect : public IEffect { + typedef MessageQueue StatusMQ; + using GetParameterSuccessCallback = + std::function; + + explicit Effect(effect_handle_t handle); + + // Methods from ::android::hardware::audio::effect::AUDIO_HAL_VERSION::IEffect follow. + Return init() override; + Return setConfig( + const EffectConfig& config, const sp& inputBufferProvider, + const sp& outputBufferProvider) override; + Return reset() override; + Return enable() override; + Return disable() override; + Return setDevice(AudioDeviceBitfield device) override; + Return setAndGetVolume(const hidl_vec& volumes, + setAndGetVolume_cb _hidl_cb) override; + Return volumeChangeNotification(const hidl_vec& volumes) override; + Return setAudioMode(AudioMode mode) override; + Return setConfigReverse( + const EffectConfig& config, const sp& inputBufferProvider, + const sp& outputBufferProvider) override; + Return setInputDevice(AudioDeviceBitfield device) override; + Return getConfig(getConfig_cb _hidl_cb) override; + Return getConfigReverse(getConfigReverse_cb _hidl_cb) override; + Return getSupportedAuxChannelsConfigs( + uint32_t maxConfigs, getSupportedAuxChannelsConfigs_cb _hidl_cb) override; + Return getAuxChannelsConfig(getAuxChannelsConfig_cb _hidl_cb) override; + Return setAuxChannelsConfig(const EffectAuxChannelsConfig& config) override; + Return setAudioSource(AudioSource source) override; + Return offload(const EffectOffloadParameter& param) override; + Return getDescriptor(getDescriptor_cb _hidl_cb) override; + Return prepareForProcessing(prepareForProcessing_cb _hidl_cb) override; + Return setProcessBuffers(const AudioBuffer& inBuffer, + const AudioBuffer& outBuffer) override; + Return command(uint32_t commandId, const hidl_vec& data, uint32_t resultMaxSize, + command_cb _hidl_cb) override; + Return setParameter(const hidl_vec& parameter, + const hidl_vec& value) override; + Return getParameter(const hidl_vec& parameter, uint32_t valueMaxSize, + getParameter_cb _hidl_cb) override; + Return getSupportedConfigsForFeature(uint32_t featureId, uint32_t maxConfigs, + uint32_t configSize, + getSupportedConfigsForFeature_cb _hidl_cb) override; + Return getCurrentConfigForFeature(uint32_t featureId, uint32_t configSize, + getCurrentConfigForFeature_cb _hidl_cb) override; + Return setCurrentConfigForFeature(uint32_t featureId, + const hidl_vec& configData) override; + Return close() override; + + // Utility methods for extending interfaces. + template + Return getIntegerParam(uint32_t paramId, + std::function cb) { + T value; + Result retval = getParameterImpl(sizeof(uint32_t), ¶mId, sizeof(T), + [&](uint32_t valueSize, const void* valueData) { + if (valueSize > sizeof(T)) valueSize = sizeof(T); + memcpy(&value, valueData, valueSize); + }); + cb(retval, value); + return Void(); + } + + template + Result getParam(uint32_t paramId, T& paramValue) { + return getParameterImpl(sizeof(uint32_t), ¶mId, sizeof(T), + [&](uint32_t valueSize, const void* valueData) { + if (valueSize > sizeof(T)) valueSize = sizeof(T); + memcpy(¶mValue, valueData, valueSize); + }); + } + + template + Result getParam(uint32_t paramId, uint32_t paramArg, T& paramValue) { + uint32_t params[2] = {paramId, paramArg}; + return getParameterImpl(sizeof(params), params, sizeof(T), + [&](uint32_t valueSize, const void* valueData) { + if (valueSize > sizeof(T)) valueSize = sizeof(T); + memcpy(¶mValue, valueData, valueSize); + }); + } + + template + Result setParam(uint32_t paramId, const T& paramValue) { + return setParameterImpl(sizeof(uint32_t), ¶mId, sizeof(T), ¶mValue); + } + + template + Result setParam(uint32_t paramId, uint32_t paramArg, const T& paramValue) { + uint32_t params[2] = {paramId, paramArg}; + return setParameterImpl(sizeof(params), params, sizeof(T), ¶mValue); + } + + Result getParameterImpl(uint32_t paramSize, const void* paramData, uint32_t valueSize, + GetParameterSuccessCallback onSuccess) { + return getParameterImpl(paramSize, paramData, valueSize, valueSize, onSuccess); + } + Result getParameterImpl(uint32_t paramSize, const void* paramData, uint32_t requestValueSize, + uint32_t replyValueSize, GetParameterSuccessCallback onSuccess); + Result setParameterImpl(uint32_t paramSize, const void* paramData, uint32_t valueSize, + const void* valueData); + + private: + friend struct VirtualizerEffect; // for getParameterImpl + friend struct VisualizerEffect; // to allow executing commands + + using CommandSuccessCallback = std::function; + using GetConfigCallback = std::function; + using GetCurrentConfigSuccessCallback = std::function; + using GetSupportedConfigsSuccessCallback = + std::function; + + static const char* sContextResultOfCommand; + static const char* sContextCallToCommand; + static const char* sContextCallFunction; + + bool mIsClosed; + effect_handle_t mHandle; + sp mInBuffer; + sp mOutBuffer; + std::atomic mHalInBufferPtr; + std::atomic mHalOutBufferPtr; + std::unique_ptr mStatusMQ; + EventFlag* mEfGroup; + std::atomic mStopProcessThread; + sp mProcessThread; + + virtual ~Effect(); + + template + static size_t alignedSizeIn(size_t s); + template + std::unique_ptr hidlVecToHal(const hidl_vec& vec, uint32_t* halDataSize); + static void effectAuxChannelsConfigFromHal(const channel_config_t& halConfig, + EffectAuxChannelsConfig* config); + static void effectAuxChannelsConfigToHal(const EffectAuxChannelsConfig& config, + channel_config_t* halConfig); + static void effectBufferConfigFromHal(const buffer_config_t& halConfig, + EffectBufferConfig* config); + static void effectBufferConfigToHal(const EffectBufferConfig& config, + buffer_config_t* halConfig); + static void effectConfigFromHal(const effect_config_t& halConfig, EffectConfig* config); + static void effectConfigToHal(const EffectConfig& config, effect_config_t* halConfig); + static void effectOffloadParamToHal(const EffectOffloadParameter& offload, + effect_offload_param_t* halOffload); + static std::vector parameterToHal(uint32_t paramSize, const void* paramData, + uint32_t valueSize, const void** valueData); + + Result analyzeCommandStatus(const char* commandName, const char* context, status_t status); + Result analyzeStatus(const char* funcName, const char* subFuncName, + const char* contextDescription, status_t status); + void getConfigImpl(int commandCode, const char* commandName, GetConfigCallback cb); + Result getCurrentConfigImpl(uint32_t featureId, uint32_t configSize, + GetCurrentConfigSuccessCallback onSuccess); + Result getSupportedConfigsImpl(uint32_t featureId, uint32_t maxConfigs, uint32_t configSize, + GetSupportedConfigsSuccessCallback onSuccess); + Result sendCommand(int commandCode, const char* commandName); + Result sendCommand(int commandCode, const char* commandName, uint32_t size, void* data); + Result sendCommandReturningData(int commandCode, const char* commandName, uint32_t* replySize, + void* replyData); + Result sendCommandReturningData(int commandCode, const char* commandName, uint32_t size, + void* data, uint32_t* replySize, void* replyData); + Result sendCommandReturningStatus(int commandCode, const char* commandName); + Result sendCommandReturningStatus(int commandCode, const char* commandName, uint32_t size, + void* data); + Result sendCommandReturningStatusAndData(int commandCode, const char* commandName, + uint32_t size, void* data, uint32_t* replySize, + void* replyData, uint32_t minReplySize, + CommandSuccessCallback onSuccess); + Result setConfigImpl(int commandCode, const char* commandName, const EffectConfig& config, + const sp& inputBufferProvider, + const sp& outputBufferProvider); +}; + +} // namespace implementation +} // namespace AUDIO_HAL_VERSION +} // namespace effect +} // namespace audio +} // namespace hardware +} // namespace android diff --git a/audio/effect/all-versions/default/include/effect/all-versions/default/Effect.impl.h b/audio/effect/all-versions/default/include/effect/all-versions/default/Effect.impl.h new file mode 100644 index 0000000000000000000000000000000000000000..61c9805663b78bd2b45a6fb8d9f440f24b536195 --- /dev/null +++ b/audio/effect/all-versions/default/include/effect/all-versions/default/Effect.impl.h @@ -0,0 +1,714 @@ +/* + * Copyright (C) 2016 The Android Open Source Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#include + +#include + +#define ATRACE_TAG ATRACE_TAG_AUDIO + +#include +#include +#include + +#include "VersionUtils.h" + +namespace android { +namespace hardware { +namespace audio { +namespace effect { +namespace AUDIO_HAL_VERSION { +namespace implementation { + +using ::android::hardware::audio::common::AUDIO_HAL_VERSION::AudioChannelMask; +using ::android::hardware::audio::common::AUDIO_HAL_VERSION::AudioFormat; +using ::android::hardware::audio::common::AUDIO_HAL_VERSION::implementation::AudioChannelBitfield; +using ::android::hardware::audio::effect::AUDIO_HAL_VERSION::MessageQueueFlagBits; + +namespace { + +class ProcessThread : public Thread { + public: + // ProcessThread's lifespan never exceeds Effect's lifespan. + ProcessThread(std::atomic* stop, effect_handle_t effect, + std::atomic* inBuffer, std::atomic* outBuffer, + Effect::StatusMQ* statusMQ, EventFlag* efGroup) + : Thread(false /*canCallJava*/), + mStop(stop), + mEffect(effect), + mHasProcessReverse((*mEffect)->process_reverse != NULL), + mInBuffer(inBuffer), + mOutBuffer(outBuffer), + mStatusMQ(statusMQ), + mEfGroup(efGroup) {} + virtual ~ProcessThread() {} + + private: + std::atomic* mStop; + effect_handle_t mEffect; + bool mHasProcessReverse; + std::atomic* mInBuffer; + std::atomic* mOutBuffer; + Effect::StatusMQ* mStatusMQ; + EventFlag* mEfGroup; + + bool threadLoop() override; +}; + +bool ProcessThread::threadLoop() { + // This implementation doesn't return control back to the Thread until it decides to stop, + // as the Thread uses mutexes, and this can lead to priority inversion. + while (!std::atomic_load_explicit(mStop, std::memory_order_acquire)) { + uint32_t efState = 0; + mEfGroup->wait(static_cast(MessageQueueFlagBits::REQUEST_PROCESS_ALL), &efState); + if (!(efState & static_cast(MessageQueueFlagBits::REQUEST_PROCESS_ALL)) || + (efState & static_cast(MessageQueueFlagBits::REQUEST_QUIT))) { + continue; // Nothing to do or time to quit. + } + Result retval = Result::OK; + if (efState & static_cast(MessageQueueFlagBits::REQUEST_PROCESS_REVERSE) && + !mHasProcessReverse) { + retval = Result::NOT_SUPPORTED; + } + + if (retval == Result::OK) { + // affects both buffer pointers and their contents. + std::atomic_thread_fence(std::memory_order_acquire); + int32_t processResult; + audio_buffer_t* inBuffer = + std::atomic_load_explicit(mInBuffer, std::memory_order_relaxed); + audio_buffer_t* outBuffer = + std::atomic_load_explicit(mOutBuffer, std::memory_order_relaxed); + if (inBuffer != nullptr && outBuffer != nullptr) { + if (efState & static_cast(MessageQueueFlagBits::REQUEST_PROCESS)) { + processResult = (*mEffect)->process(mEffect, inBuffer, outBuffer); + } else { + processResult = (*mEffect)->process_reverse(mEffect, inBuffer, outBuffer); + } + std::atomic_thread_fence(std::memory_order_release); + } else { + ALOGE("processing buffers were not set before calling 'process'"); + processResult = -ENODEV; + } + switch (processResult) { + case 0: + retval = Result::OK; + break; + case -ENODATA: + retval = Result::INVALID_STATE; + break; + case -EINVAL: + retval = Result::INVALID_ARGUMENTS; + break; + default: + retval = Result::NOT_INITIALIZED; + } + } + if (!mStatusMQ->write(&retval)) { + ALOGW("status message queue write failed"); + } + mEfGroup->wake(static_cast(MessageQueueFlagBits::DONE_PROCESSING)); + } + + return false; +} + +} // namespace + +// static +const char* Effect::sContextResultOfCommand = "returned status"; +const char* Effect::sContextCallToCommand = "error"; +const char* Effect::sContextCallFunction = sContextCallToCommand; + +Effect::Effect(effect_handle_t handle) + : mIsClosed(false), mHandle(handle), mEfGroup(nullptr), mStopProcessThread(false) {} + +Effect::~Effect() { + ATRACE_CALL(); + close(); + if (mProcessThread.get()) { + ATRACE_NAME("mProcessThread->join"); + status_t status = mProcessThread->join(); + ALOGE_IF(status, "processing thread exit error: %s", strerror(-status)); + } + if (mEfGroup) { + status_t status = EventFlag::deleteEventFlag(&mEfGroup); + ALOGE_IF(status, "processing MQ event flag deletion error: %s", strerror(-status)); + } + mInBuffer.clear(); + mOutBuffer.clear(); + int status = EffectRelease(mHandle); + ALOGW_IF(status, "Error releasing effect %p: %s", mHandle, strerror(-status)); + EffectMap::getInstance().remove(mHandle); + mHandle = 0; +} + +// static +template +size_t Effect::alignedSizeIn(size_t s) { + return (s + sizeof(T) - 1) / sizeof(T); +} + +// static +template +std::unique_ptr Effect::hidlVecToHal(const hidl_vec& vec, uint32_t* halDataSize) { + // Due to bugs in HAL, they may attempt to write into the provided + // input buffer. The original binder buffer is r/o, thus it is needed + // to create a r/w version. + *halDataSize = vec.size() * sizeof(T); + std::unique_ptr halData(new uint8_t[*halDataSize]); + memcpy(&halData[0], &vec[0], *halDataSize); + return halData; +} + +// static +void Effect::effectAuxChannelsConfigFromHal(const channel_config_t& halConfig, + EffectAuxChannelsConfig* config) { + config->mainChannels = AudioChannelBitfield(halConfig.main_channels); + config->auxChannels = AudioChannelBitfield(halConfig.aux_channels); +} + +// static +void Effect::effectAuxChannelsConfigToHal(const EffectAuxChannelsConfig& config, + channel_config_t* halConfig) { + halConfig->main_channels = static_cast(config.mainChannels); + halConfig->aux_channels = static_cast(config.auxChannels); +} + +// static +void Effect::effectBufferConfigFromHal(const buffer_config_t& halConfig, + EffectBufferConfig* config) { + config->buffer.id = 0; + config->buffer.frameCount = 0; + config->samplingRateHz = halConfig.samplingRate; + config->channels = AudioChannelBitfield(halConfig.channels); + config->format = AudioFormat(halConfig.format); + config->accessMode = EffectBufferAccess(halConfig.accessMode); + config->mask = static_castmask)>(halConfig.mask); +} + +// static +void Effect::effectBufferConfigToHal(const EffectBufferConfig& config, buffer_config_t* halConfig) { + // Note: setting the buffers directly is considered obsolete. They need to be set + // using 'setProcessBuffers'. + halConfig->buffer.frameCount = 0; + halConfig->buffer.raw = NULL; + halConfig->samplingRate = config.samplingRateHz; + halConfig->channels = static_cast(config.channels); + // Note: The framework code does not use BP. + halConfig->bufferProvider.cookie = NULL; + halConfig->bufferProvider.getBuffer = NULL; + halConfig->bufferProvider.releaseBuffer = NULL; + halConfig->format = static_cast(config.format); + halConfig->accessMode = static_cast(config.accessMode); + halConfig->mask = static_cast(config.mask); +} + +// static +void Effect::effectConfigFromHal(const effect_config_t& halConfig, EffectConfig* config) { + effectBufferConfigFromHal(halConfig.inputCfg, &config->inputCfg); + effectBufferConfigFromHal(halConfig.outputCfg, &config->outputCfg); +} + +// static +void Effect::effectConfigToHal(const EffectConfig& config, effect_config_t* halConfig) { + effectBufferConfigToHal(config.inputCfg, &halConfig->inputCfg); + effectBufferConfigToHal(config.outputCfg, &halConfig->outputCfg); +} + +// static +void Effect::effectOffloadParamToHal(const EffectOffloadParameter& offload, + effect_offload_param_t* halOffload) { + halOffload->isOffload = offload.isOffload; + halOffload->ioHandle = offload.ioHandle; +} + +// static +std::vector Effect::parameterToHal(uint32_t paramSize, const void* paramData, + uint32_t valueSize, const void** valueData) { + size_t valueOffsetFromData = alignedSizeIn(paramSize) * sizeof(uint32_t); + size_t halParamBufferSize = sizeof(effect_param_t) + valueOffsetFromData + valueSize; + std::vector halParamBuffer(halParamBufferSize, 0); + effect_param_t* halParam = reinterpret_cast(&halParamBuffer[0]); + halParam->psize = paramSize; + halParam->vsize = valueSize; + memcpy(halParam->data, paramData, paramSize); + if (valueData) { + if (*valueData) { + // Value data is provided. + memcpy(halParam->data + valueOffsetFromData, *valueData, valueSize); + } else { + // The caller needs the pointer to the value data location. + *valueData = halParam->data + valueOffsetFromData; + } + } + return halParamBuffer; +} + +Result Effect::analyzeCommandStatus(const char* commandName, const char* context, status_t status) { + return analyzeStatus("command", commandName, context, status); +} + +Result Effect::analyzeStatus(const char* funcName, const char* subFuncName, + const char* contextDescription, status_t status) { + if (status != OK) { + ALOGW("Effect %p %s %s %s: %s", mHandle, funcName, subFuncName, contextDescription, + strerror(-status)); + } + switch (status) { + case OK: + return Result::OK; + case -EINVAL: + return Result::INVALID_ARGUMENTS; + case -ENODATA: + return Result::INVALID_STATE; + case -ENODEV: + return Result::NOT_INITIALIZED; + case -ENOMEM: + return Result::RESULT_TOO_BIG; + case -ENOSYS: + return Result::NOT_SUPPORTED; + default: + return Result::INVALID_STATE; + } +} + +void Effect::getConfigImpl(int commandCode, const char* commandName, GetConfigCallback cb) { + uint32_t halResultSize = sizeof(effect_config_t); + effect_config_t halConfig{}; + status_t status = + (*mHandle)->command(mHandle, commandCode, 0, NULL, &halResultSize, &halConfig); + EffectConfig config; + if (status == OK) { + effectConfigFromHal(halConfig, &config); + } + cb(analyzeCommandStatus(commandName, sContextCallToCommand, status), config); +} + +Result Effect::getCurrentConfigImpl(uint32_t featureId, uint32_t configSize, + GetCurrentConfigSuccessCallback onSuccess) { + uint32_t halCmd = featureId; + uint32_t halResult[alignedSizeIn(sizeof(uint32_t) + configSize)]; + memset(halResult, 0, sizeof(halResult)); + uint32_t halResultSize = 0; + return sendCommandReturningStatusAndData(EFFECT_CMD_GET_FEATURE_CONFIG, "GET_FEATURE_CONFIG", + sizeof(uint32_t), &halCmd, &halResultSize, halResult, + sizeof(uint32_t), [&] { onSuccess(&halResult[1]); }); +} + +Result Effect::getParameterImpl(uint32_t paramSize, const void* paramData, + uint32_t requestValueSize, uint32_t replyValueSize, + GetParameterSuccessCallback onSuccess) { + // As it is unknown what method HAL uses for copying the provided parameter data, + // it is safer to make sure that input and output buffers do not overlap. + std::vector halCmdBuffer = + parameterToHal(paramSize, paramData, requestValueSize, nullptr); + const void* valueData = nullptr; + std::vector halParamBuffer = + parameterToHal(paramSize, paramData, replyValueSize, &valueData); + uint32_t halParamBufferSize = halParamBuffer.size(); + + return sendCommandReturningStatusAndData( + EFFECT_CMD_GET_PARAM, "GET_PARAM", halCmdBuffer.size(), &halCmdBuffer[0], + &halParamBufferSize, &halParamBuffer[0], sizeof(effect_param_t), [&] { + effect_param_t* halParam = reinterpret_cast(&halParamBuffer[0]); + onSuccess(halParam->vsize, valueData); + }); +} + +Result Effect::getSupportedConfigsImpl(uint32_t featureId, uint32_t maxConfigs, uint32_t configSize, + GetSupportedConfigsSuccessCallback onSuccess) { + uint32_t halCmd[2] = {featureId, maxConfigs}; + uint32_t halResultSize = 2 * sizeof(uint32_t) + maxConfigs * sizeof(configSize); + uint8_t halResult[halResultSize]; + memset(&halResult[0], 0, halResultSize); + return sendCommandReturningStatusAndData( + EFFECT_CMD_GET_FEATURE_SUPPORTED_CONFIGS, "GET_FEATURE_SUPPORTED_CONFIGS", sizeof(halCmd), + halCmd, &halResultSize, &halResult[0], 2 * sizeof(uint32_t), [&] { + uint32_t* halResult32 = reinterpret_cast(&halResult[0]); + uint32_t supportedConfigs = *(++halResult32); // skip status field + if (supportedConfigs > maxConfigs) supportedConfigs = maxConfigs; + onSuccess(supportedConfigs, ++halResult32); + }); +} + +Return Effect::prepareForProcessing(prepareForProcessing_cb _hidl_cb) { + status_t status; + // Create message queue. + if (mStatusMQ) { + ALOGE("the client attempts to call prepareForProcessing_cb twice"); + _hidl_cb(Result::INVALID_STATE, StatusMQ::Descriptor()); + return Void(); + } + std::unique_ptr tempStatusMQ(new StatusMQ(1, true /*EventFlag*/)); + if (!tempStatusMQ->isValid()) { + ALOGE_IF(!tempStatusMQ->isValid(), "status MQ is invalid"); + _hidl_cb(Result::INVALID_ARGUMENTS, StatusMQ::Descriptor()); + return Void(); + } + status = EventFlag::createEventFlag(tempStatusMQ->getEventFlagWord(), &mEfGroup); + if (status != OK || !mEfGroup) { + ALOGE("failed creating event flag for status MQ: %s", strerror(-status)); + _hidl_cb(Result::INVALID_ARGUMENTS, StatusMQ::Descriptor()); + return Void(); + } + + // Create and launch the thread. + mProcessThread = new ProcessThread(&mStopProcessThread, mHandle, &mHalInBufferPtr, + &mHalOutBufferPtr, tempStatusMQ.get(), mEfGroup); + status = mProcessThread->run("effect", PRIORITY_URGENT_AUDIO); + if (status != OK) { + ALOGW("failed to start effect processing thread: %s", strerror(-status)); + _hidl_cb(Result::INVALID_ARGUMENTS, MQDescriptorSync()); + return Void(); + } + + mStatusMQ = std::move(tempStatusMQ); + _hidl_cb(Result::OK, *mStatusMQ->getDesc()); + return Void(); +} + +Return Effect::setProcessBuffers(const AudioBuffer& inBuffer, + const AudioBuffer& outBuffer) { + AudioBufferManager& manager = AudioBufferManager::getInstance(); + sp tempInBuffer, tempOutBuffer; + if (!manager.wrap(inBuffer, &tempInBuffer)) { + ALOGE("Could not map memory of the input buffer"); + return Result::INVALID_ARGUMENTS; + } + if (!manager.wrap(outBuffer, &tempOutBuffer)) { + ALOGE("Could not map memory of the output buffer"); + return Result::INVALID_ARGUMENTS; + } + mInBuffer = tempInBuffer; + mOutBuffer = tempOutBuffer; + // The processing thread only reads these pointers after waking up by an event flag, + // so it's OK to update the pair non-atomically. + mHalInBufferPtr.store(mInBuffer->getHalBuffer(), std::memory_order_release); + mHalOutBufferPtr.store(mOutBuffer->getHalBuffer(), std::memory_order_release); + return Result::OK; +} + +Result Effect::sendCommand(int commandCode, const char* commandName) { + return sendCommand(commandCode, commandName, 0, NULL); +} + +Result Effect::sendCommand(int commandCode, const char* commandName, uint32_t size, void* data) { + status_t status = (*mHandle)->command(mHandle, commandCode, size, data, 0, NULL); + return analyzeCommandStatus(commandName, sContextCallToCommand, status); +} + +Result Effect::sendCommandReturningData(int commandCode, const char* commandName, + uint32_t* replySize, void* replyData) { + return sendCommandReturningData(commandCode, commandName, 0, NULL, replySize, replyData); +} + +Result Effect::sendCommandReturningData(int commandCode, const char* commandName, uint32_t size, + void* data, uint32_t* replySize, void* replyData) { + uint32_t expectedReplySize = *replySize; + status_t status = (*mHandle)->command(mHandle, commandCode, size, data, replySize, replyData); + if (status == OK && *replySize != expectedReplySize) { + status = -ENODATA; + } + return analyzeCommandStatus(commandName, sContextCallToCommand, status); +} + +Result Effect::sendCommandReturningStatus(int commandCode, const char* commandName) { + return sendCommandReturningStatus(commandCode, commandName, 0, NULL); +} + +Result Effect::sendCommandReturningStatus(int commandCode, const char* commandName, uint32_t size, + void* data) { + uint32_t replyCmdStatus; + uint32_t replySize = sizeof(uint32_t); + return sendCommandReturningStatusAndData(commandCode, commandName, size, data, &replySize, + &replyCmdStatus, replySize, [] {}); +} + +Result Effect::sendCommandReturningStatusAndData(int commandCode, const char* commandName, + uint32_t size, void* data, uint32_t* replySize, + void* replyData, uint32_t minReplySize, + CommandSuccessCallback onSuccess) { + status_t status = (*mHandle)->command(mHandle, commandCode, size, data, replySize, replyData); + Result retval; + if (status == OK && minReplySize >= sizeof(uint32_t) && *replySize >= minReplySize) { + uint32_t commandStatus = *reinterpret_cast(replyData); + retval = analyzeCommandStatus(commandName, sContextResultOfCommand, commandStatus); + if (commandStatus == OK) { + onSuccess(); + } + } else { + retval = analyzeCommandStatus(commandName, sContextCallToCommand, status); + } + return retval; +} + +Result Effect::setConfigImpl(int commandCode, const char* commandName, const EffectConfig& config, + const sp& inputBufferProvider, + const sp& outputBufferProvider) { + effect_config_t halConfig; + effectConfigToHal(config, &halConfig); + if (inputBufferProvider != 0) { + LOG_FATAL("Using input buffer provider is not supported"); + } + if (outputBufferProvider != 0) { + LOG_FATAL("Using output buffer provider is not supported"); + } + return sendCommandReturningStatus(commandCode, commandName, sizeof(effect_config_t), + &halConfig); +} + +Result Effect::setParameterImpl(uint32_t paramSize, const void* paramData, uint32_t valueSize, + const void* valueData) { + std::vector halParamBuffer = + parameterToHal(paramSize, paramData, valueSize, &valueData); + return sendCommandReturningStatus(EFFECT_CMD_SET_PARAM, "SET_PARAM", halParamBuffer.size(), + &halParamBuffer[0]); +} + +// Methods from ::android::hardware::audio::effect::AUDIO_HAL_VERSION::IEffect follow. +Return Effect::init() { + return sendCommandReturningStatus(EFFECT_CMD_INIT, "INIT"); +} + +Return Effect::setConfig(const EffectConfig& config, + const sp& inputBufferProvider, + const sp& outputBufferProvider) { + return setConfigImpl(EFFECT_CMD_SET_CONFIG, "SET_CONFIG", config, inputBufferProvider, + outputBufferProvider); +} + +Return Effect::reset() { + return sendCommand(EFFECT_CMD_RESET, "RESET"); +} + +Return Effect::enable() { + return sendCommandReturningStatus(EFFECT_CMD_ENABLE, "ENABLE"); +} + +Return Effect::disable() { + return sendCommandReturningStatus(EFFECT_CMD_DISABLE, "DISABLE"); +} + +Return Effect::setDevice(AudioDeviceBitfield device) { + uint32_t halDevice = static_cast(device); + return sendCommand(EFFECT_CMD_SET_DEVICE, "SET_DEVICE", sizeof(uint32_t), &halDevice); +} + +Return Effect::setAndGetVolume(const hidl_vec& volumes, + setAndGetVolume_cb _hidl_cb) { + uint32_t halDataSize; + std::unique_ptr halData = hidlVecToHal(volumes, &halDataSize); + uint32_t halResultSize = halDataSize; + uint32_t halResult[volumes.size()]; + Result retval = sendCommandReturningData(EFFECT_CMD_SET_VOLUME, "SET_VOLUME", halDataSize, + &halData[0], &halResultSize, halResult); + hidl_vec result; + if (retval == Result::OK) { + result.setToExternal(&halResult[0], halResultSize); + } + _hidl_cb(retval, result); + return Void(); +} + +Return Effect::volumeChangeNotification(const hidl_vec& volumes) { + uint32_t halDataSize; + std::unique_ptr halData = hidlVecToHal(volumes, &halDataSize); + return sendCommand(EFFECT_CMD_SET_VOLUME, "SET_VOLUME", halDataSize, &halData[0]); +} + +Return Effect::setAudioMode(AudioMode mode) { + uint32_t halMode = static_cast(mode); + return sendCommand(EFFECT_CMD_SET_AUDIO_MODE, "SET_AUDIO_MODE", sizeof(uint32_t), &halMode); +} + +Return Effect::setConfigReverse( + const EffectConfig& config, const sp& inputBufferProvider, + const sp& outputBufferProvider) { + return setConfigImpl(EFFECT_CMD_SET_CONFIG_REVERSE, "SET_CONFIG_REVERSE", config, + inputBufferProvider, outputBufferProvider); +} + +Return Effect::setInputDevice(AudioDeviceBitfield device) { + uint32_t halDevice = static_cast(device); + return sendCommand(EFFECT_CMD_SET_INPUT_DEVICE, "SET_INPUT_DEVICE", sizeof(uint32_t), + &halDevice); +} + +Return Effect::getConfig(getConfig_cb _hidl_cb) { + getConfigImpl(EFFECT_CMD_GET_CONFIG, "GET_CONFIG", _hidl_cb); + return Void(); +} + +Return Effect::getConfigReverse(getConfigReverse_cb _hidl_cb) { + getConfigImpl(EFFECT_CMD_GET_CONFIG_REVERSE, "GET_CONFIG_REVERSE", _hidl_cb); + return Void(); +} + +Return Effect::getSupportedAuxChannelsConfigs(uint32_t maxConfigs, + getSupportedAuxChannelsConfigs_cb _hidl_cb) { + hidl_vec result; + Result retval = getSupportedConfigsImpl( + EFFECT_FEATURE_AUX_CHANNELS, maxConfigs, sizeof(channel_config_t), + [&](uint32_t supportedConfigs, void* configsData) { + result.resize(supportedConfigs); + channel_config_t* config = reinterpret_cast(configsData); + for (size_t i = 0; i < result.size(); ++i) { + effectAuxChannelsConfigFromHal(*config++, &result[i]); + } + }); + _hidl_cb(retval, result); + return Void(); +} + +Return Effect::getAuxChannelsConfig(getAuxChannelsConfig_cb _hidl_cb) { + uint32_t halResult[alignedSizeIn(sizeof(uint32_t) + sizeof(channel_config_t))]; + memset(halResult, 0, sizeof(halResult)); + EffectAuxChannelsConfig result; + Result retval = getCurrentConfigImpl( + EFFECT_FEATURE_AUX_CHANNELS, sizeof(channel_config_t), [&](void* configData) { + effectAuxChannelsConfigFromHal(*reinterpret_cast(configData), + &result); + }); + _hidl_cb(retval, result); + return Void(); +} + +Return Effect::setAuxChannelsConfig(const EffectAuxChannelsConfig& config) { + uint32_t halCmd[alignedSizeIn(sizeof(uint32_t) + sizeof(channel_config_t))]; + halCmd[0] = EFFECT_FEATURE_AUX_CHANNELS; + effectAuxChannelsConfigToHal(config, reinterpret_cast(&halCmd[1])); + return sendCommandReturningStatus(EFFECT_CMD_SET_FEATURE_CONFIG, + "SET_FEATURE_CONFIG AUX_CHANNELS", sizeof(halCmd), halCmd); +} + +Return Effect::setAudioSource(AudioSource source) { + uint32_t halSource = static_cast(source); + return sendCommand(EFFECT_CMD_SET_AUDIO_SOURCE, "SET_AUDIO_SOURCE", sizeof(uint32_t), + &halSource); +} + +Return Effect::offload(const EffectOffloadParameter& param) { + effect_offload_param_t halParam; + effectOffloadParamToHal(param, &halParam); + return sendCommandReturningStatus(EFFECT_CMD_OFFLOAD, "OFFLOAD", sizeof(effect_offload_param_t), + &halParam); +} + +Return Effect::getDescriptor(getDescriptor_cb _hidl_cb) { + effect_descriptor_t halDescriptor; + memset(&halDescriptor, 0, sizeof(effect_descriptor_t)); + status_t status = (*mHandle)->get_descriptor(mHandle, &halDescriptor); + EffectDescriptor descriptor; + if (status == OK) { + effectDescriptorFromHal(halDescriptor, &descriptor); + } + _hidl_cb(analyzeStatus("get_descriptor", "", sContextCallFunction, status), descriptor); + return Void(); +} + +Return Effect::command(uint32_t commandId, const hidl_vec& data, + uint32_t resultMaxSize, command_cb _hidl_cb) { + uint32_t halDataSize; + std::unique_ptr halData = hidlVecToHal(data, &halDataSize); + uint32_t halResultSize = resultMaxSize; + std::unique_ptr halResult(new uint8_t[halResultSize]); + memset(&halResult[0], 0, halResultSize); + + void* dataPtr = halDataSize > 0 ? &halData[0] : NULL; + void* resultPtr = halResultSize > 0 ? &halResult[0] : NULL; + status_t status = + (*mHandle)->command(mHandle, commandId, halDataSize, dataPtr, &halResultSize, resultPtr); + hidl_vec result; + if (status == OK && resultPtr != NULL) { + result.setToExternal(&halResult[0], halResultSize); + } + _hidl_cb(status, result); + return Void(); +} + +Return Effect::setParameter(const hidl_vec& parameter, + const hidl_vec& value) { + return setParameterImpl(parameter.size(), ¶meter[0], value.size(), &value[0]); +} + +Return Effect::getParameter(const hidl_vec& parameter, uint32_t valueMaxSize, + getParameter_cb _hidl_cb) { + hidl_vec value; + Result retval = getParameterImpl( + parameter.size(), ¶meter[0], valueMaxSize, + [&](uint32_t valueSize, const void* valueData) { + value.setToExternal(reinterpret_cast(const_cast(valueData)), + valueSize); + }); + _hidl_cb(retval, value); + return Void(); +} + +Return Effect::getSupportedConfigsForFeature(uint32_t featureId, uint32_t maxConfigs, + uint32_t configSize, + getSupportedConfigsForFeature_cb _hidl_cb) { + uint32_t configCount = 0; + hidl_vec result; + Result retval = getSupportedConfigsImpl(featureId, maxConfigs, configSize, + [&](uint32_t supportedConfigs, void* configsData) { + configCount = supportedConfigs; + result.resize(configCount * configSize); + memcpy(&result[0], configsData, result.size()); + }); + _hidl_cb(retval, configCount, result); + return Void(); +} + +Return Effect::getCurrentConfigForFeature(uint32_t featureId, uint32_t configSize, + getCurrentConfigForFeature_cb _hidl_cb) { + hidl_vec result; + Result retval = getCurrentConfigImpl(featureId, configSize, [&](void* configData) { + result.resize(configSize); + memcpy(&result[0], configData, result.size()); + }); + _hidl_cb(retval, result); + return Void(); +} + +Return Effect::setCurrentConfigForFeature(uint32_t featureId, + const hidl_vec& configData) { + uint32_t halCmd[alignedSizeIn(sizeof(uint32_t) + configData.size())]; + memset(halCmd, 0, sizeof(halCmd)); + halCmd[0] = featureId; + memcpy(&halCmd[1], &configData[0], configData.size()); + return sendCommandReturningStatus(EFFECT_CMD_SET_FEATURE_CONFIG, "SET_FEATURE_CONFIG", + sizeof(halCmd), halCmd); +} + +Return Effect::close() { + if (mIsClosed) return Result::INVALID_STATE; + mIsClosed = true; + if (mProcessThread.get()) { + mStopProcessThread.store(true, std::memory_order_release); + } + if (mEfGroup) { + mEfGroup->wake(static_cast(MessageQueueFlagBits::REQUEST_QUIT)); + } + return Result::OK; +} + +} // namespace implementation +} // namespace AUDIO_HAL_VERSION +} // namespace effect +} // namespace audio +} // namespace hardware +} // namespace android diff --git a/audio/effect/all-versions/default/include/effect/all-versions/default/EffectsFactory.h b/audio/effect/all-versions/default/include/effect/all-versions/default/EffectsFactory.h new file mode 100644 index 0000000000000000000000000000000000000000..526abbb89ab2fc3876383acaff2116cb5ce315b4 --- /dev/null +++ b/audio/effect/all-versions/default/include/effect/all-versions/default/EffectsFactory.h @@ -0,0 +1,64 @@ +/* + * Copyright (C) 2016 The Android Open Source Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#include + +#include +#include + +#include + +#include +namespace android { +namespace hardware { +namespace audio { +namespace effect { +namespace AUDIO_HAL_VERSION { +namespace implementation { + +using ::android::hardware::audio::common::AUDIO_HAL_VERSION::Uuid; +using ::android::hardware::audio::effect::AUDIO_HAL_VERSION::EffectDescriptor; +using ::android::hardware::audio::effect::AUDIO_HAL_VERSION::IEffect; +using ::android::hardware::audio::effect::AUDIO_HAL_VERSION::IEffectsFactory; +using ::android::hardware::audio::effect::AUDIO_HAL_VERSION::Result; +using ::android::hardware::Return; +using ::android::hardware::Void; +using ::android::hardware::hidl_vec; +using ::android::hardware::hidl_string; +using ::android::sp; + +struct EffectsFactory : public IEffectsFactory { + // Methods from ::android::hardware::audio::effect::AUDIO_HAL_VERSION::IEffectsFactory follow. + Return getAllDescriptors(getAllDescriptors_cb _hidl_cb) override; + Return getDescriptor(const Uuid& uid, getDescriptor_cb _hidl_cb) override; + Return createEffect(const Uuid& uid, int32_t session, int32_t ioHandle, + createEffect_cb _hidl_cb) override; + Return debugDump(const hidl_handle& fd); //< in V2_0::IEffectsFactory only, alias of debug + Return debug(const hidl_handle& fd, const hidl_vec& options) override; + + private: + static sp dispatchEffectInstanceCreation(const effect_descriptor_t& halDescriptor, + effect_handle_t handle); +}; + +extern "C" IEffectsFactory* HIDL_FETCH_IEffectsFactory(const char* name); + +} // namespace implementation +} // namespace AUDIO_HAL_VERSION +} // namespace effect +} // namespace audio +} // namespace hardware +} // namespace android diff --git a/audio/effect/all-versions/default/include/effect/all-versions/default/EffectsFactory.impl.h b/audio/effect/all-versions/default/include/effect/all-versions/default/EffectsFactory.impl.h new file mode 100644 index 0000000000000000000000000000000000000000..1882a2c4905146ece973394070570942fa39a02e --- /dev/null +++ b/audio/effect/all-versions/default/include/effect/all-versions/default/EffectsFactory.impl.h @@ -0,0 +1,195 @@ +/* + * Copyright (C) 2016 The Android Open Source Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#include + +#include +#include +#include +#include +#include +#include +#include +#include +#include +#include +#include +#include +#include + +using ::android::hardware::audio::common::AUDIO_HAL_VERSION::HidlUtils; + +namespace android { +namespace hardware { +namespace audio { +namespace effect { +namespace AUDIO_HAL_VERSION { +namespace implementation { + +// static +sp EffectsFactory::dispatchEffectInstanceCreation(const effect_descriptor_t& halDescriptor, + effect_handle_t handle) { + const effect_uuid_t* halUuid = &halDescriptor.type; + if (memcmp(halUuid, FX_IID_AEC, sizeof(effect_uuid_t)) == 0) { + return new AcousticEchoCancelerEffect(handle); + } else if (memcmp(halUuid, FX_IID_AGC, sizeof(effect_uuid_t)) == 0) { + return new AutomaticGainControlEffect(handle); + } else if (memcmp(halUuid, SL_IID_BASSBOOST, sizeof(effect_uuid_t)) == 0) { + return new BassBoostEffect(handle); + } else if (memcmp(halUuid, EFFECT_UIID_DOWNMIX, sizeof(effect_uuid_t)) == 0) { + return new DownmixEffect(handle); + } else if (memcmp(halUuid, SL_IID_ENVIRONMENTALREVERB, sizeof(effect_uuid_t)) == 0) { + return new EnvironmentalReverbEffect(handle); + } else if (memcmp(halUuid, SL_IID_EQUALIZER, sizeof(effect_uuid_t)) == 0) { + return new EqualizerEffect(handle); + } else if (memcmp(halUuid, FX_IID_LOUDNESS_ENHANCER, sizeof(effect_uuid_t)) == 0) { + return new LoudnessEnhancerEffect(handle); + } else if (memcmp(halUuid, FX_IID_NS, sizeof(effect_uuid_t)) == 0) { + return new NoiseSuppressionEffect(handle); + } else if (memcmp(halUuid, SL_IID_PRESETREVERB, sizeof(effect_uuid_t)) == 0) { + return new PresetReverbEffect(handle); + } else if (memcmp(halUuid, SL_IID_VIRTUALIZER, sizeof(effect_uuid_t)) == 0) { + return new VirtualizerEffect(handle); + } else if (memcmp(halUuid, SL_IID_VISUALIZATION, sizeof(effect_uuid_t)) == 0) { + return new VisualizerEffect(handle); + } + return new Effect(handle); +} + +// Methods from ::android::hardware::audio::effect::AUDIO_HAL_VERSION::IEffectsFactory follow. +Return EffectsFactory::getAllDescriptors(getAllDescriptors_cb _hidl_cb) { + Result retval(Result::OK); + hidl_vec result; + uint32_t numEffects; + status_t status; + +restart: + numEffects = 0; + status = EffectQueryNumberEffects(&numEffects); + if (status != OK) { + retval = Result::NOT_INITIALIZED; + ALOGE("Error querying number of effects: %s", strerror(-status)); + goto exit; + } + result.resize(numEffects); + for (uint32_t i = 0; i < numEffects; ++i) { + effect_descriptor_t halDescriptor; + status = EffectQueryEffect(i, &halDescriptor); + if (status == OK) { + effectDescriptorFromHal(halDescriptor, &result[i]); + } else { + ALOGE("Error querying effect at position %d / %d: %s", i, numEffects, + strerror(-status)); + switch (status) { + case -ENOSYS: { + // Effect list has changed. + goto restart; + } + case -ENOENT: { + // No more effects available. + result.resize(i); + } + default: { + result.resize(0); + retval = Result::NOT_INITIALIZED; + } + } + break; + } + } + +exit: + _hidl_cb(retval, result); + return Void(); +} + +Return EffectsFactory::getDescriptor(const Uuid& uid, getDescriptor_cb _hidl_cb) { + effect_uuid_t halUuid; + HidlUtils::uuidToHal(uid, &halUuid); + effect_descriptor_t halDescriptor; + status_t status = EffectGetDescriptor(&halUuid, &halDescriptor); + EffectDescriptor descriptor; + effectDescriptorFromHal(halDescriptor, &descriptor); + Result retval(Result::OK); + if (status != OK) { + ALOGE("Error querying effect descriptor for %s: %s", uuidToString(halUuid).c_str(), + strerror(-status)); + if (status == -ENOENT) { + retval = Result::INVALID_ARGUMENTS; + } else { + retval = Result::NOT_INITIALIZED; + } + } + _hidl_cb(retval, descriptor); + return Void(); +} + +Return EffectsFactory::createEffect(const Uuid& uid, int32_t session, int32_t ioHandle, + createEffect_cb _hidl_cb) { + effect_uuid_t halUuid; + HidlUtils::uuidToHal(uid, &halUuid); + effect_handle_t handle; + Result retval(Result::OK); + status_t status = EffectCreate(&halUuid, session, ioHandle, &handle); + sp effect; + uint64_t effectId = EffectMap::INVALID_ID; + if (status == OK) { + effect_descriptor_t halDescriptor; + memset(&halDescriptor, 0, sizeof(effect_descriptor_t)); + status = (*handle)->get_descriptor(handle, &halDescriptor); + if (status == OK) { + effect = dispatchEffectInstanceCreation(halDescriptor, handle); + effectId = EffectMap::getInstance().add(handle); + } else { + ALOGE("Error querying effect descriptor for %s: %s", uuidToString(halUuid).c_str(), + strerror(-status)); + EffectRelease(handle); + } + } + if (status != OK) { + ALOGE("Error creating effect %s: %s", uuidToString(halUuid).c_str(), strerror(-status)); + if (status == -ENOENT) { + retval = Result::INVALID_ARGUMENTS; + } else { + retval = Result::NOT_INITIALIZED; + } + } + _hidl_cb(retval, effect, effectId); + return Void(); +} + +Return EffectsFactory::debugDump(const hidl_handle& fd) { + return debug(fd, {} /* options */); +} + +Return EffectsFactory::debug(const hidl_handle& fd, + const hidl_vec& /* options */) { + if (fd.getNativeHandle() != nullptr && fd->numFds == 1) { + EffectDumpEffects(fd->data[0]); + } + return Void(); +} + +IEffectsFactory* HIDL_FETCH_IEffectsFactory(const char* /* name */) { + return new EffectsFactory(); +} + +} // namespace implementation +} // namespace AUDIO_HAL_VERSION +} // namespace effect +} // namespace audio +} // namespace hardware +} // namespace android diff --git a/audio/effect/all-versions/default/include/effect/all-versions/default/EnvironmentalReverbEffect.h b/audio/effect/all-versions/default/include/effect/all-versions/default/EnvironmentalReverbEffect.h new file mode 100644 index 0000000000000000000000000000000000000000..d2f8cc3a84260684a5842892a395f74571d04e89 --- /dev/null +++ b/audio/effect/all-versions/default/include/effect/all-versions/default/EnvironmentalReverbEffect.h @@ -0,0 +1,143 @@ +/* + * Copyright (C) 2016 The Android Open Source Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#include + +#include + +#include + +#include + +#include "VersionUtils.h" + +namespace android { +namespace hardware { +namespace audio { +namespace effect { +namespace AUDIO_HAL_VERSION { +namespace implementation { + +using ::android::hardware::audio::common::AUDIO_HAL_VERSION::AudioDevice; +using ::android::hardware::audio::common::AUDIO_HAL_VERSION::AudioMode; +using ::android::hardware::audio::common::AUDIO_HAL_VERSION::AudioSource; +using ::android::hardware::audio::effect::AUDIO_HAL_VERSION::AudioBuffer; +using ::android::hardware::audio::effect::AUDIO_HAL_VERSION::EffectAuxChannelsConfig; +using ::android::hardware::audio::effect::AUDIO_HAL_VERSION::EffectConfig; +using ::android::hardware::audio::effect::AUDIO_HAL_VERSION::EffectDescriptor; +using ::android::hardware::audio::effect::AUDIO_HAL_VERSION::EffectOffloadParameter; +using ::android::hardware::audio::effect::AUDIO_HAL_VERSION::IEffect; +using ::android::hardware::audio::effect::AUDIO_HAL_VERSION::IEffectBufferProviderCallback; +using ::android::hardware::audio::effect::AUDIO_HAL_VERSION::IEnvironmentalReverbEffect; +using ::android::hardware::audio::effect::AUDIO_HAL_VERSION::Result; +using ::android::hardware::Return; +using ::android::hardware::Void; +using ::android::hardware::hidl_vec; +using ::android::hardware::hidl_string; +using ::android::sp; + +struct EnvironmentalReverbEffect : public IEnvironmentalReverbEffect { + explicit EnvironmentalReverbEffect(effect_handle_t handle); + + // Methods from ::android::hardware::audio::effect::AUDIO_HAL_VERSION::IEffect follow. + Return init() override; + Return setConfig( + const EffectConfig& config, const sp& inputBufferProvider, + const sp& outputBufferProvider) override; + Return reset() override; + Return enable() override; + Return disable() override; + Return setDevice(AudioDeviceBitfield device) override; + Return setAndGetVolume(const hidl_vec& volumes, + setAndGetVolume_cb _hidl_cb) override; + Return volumeChangeNotification(const hidl_vec& volumes) override; + Return setAudioMode(AudioMode mode) override; + Return setConfigReverse( + const EffectConfig& config, const sp& inputBufferProvider, + const sp& outputBufferProvider) override; + Return setInputDevice(AudioDeviceBitfield device) override; + Return getConfig(getConfig_cb _hidl_cb) override; + Return getConfigReverse(getConfigReverse_cb _hidl_cb) override; + Return getSupportedAuxChannelsConfigs( + uint32_t maxConfigs, getSupportedAuxChannelsConfigs_cb _hidl_cb) override; + Return getAuxChannelsConfig(getAuxChannelsConfig_cb _hidl_cb) override; + Return setAuxChannelsConfig(const EffectAuxChannelsConfig& config) override; + Return setAudioSource(AudioSource source) override; + Return offload(const EffectOffloadParameter& param) override; + Return getDescriptor(getDescriptor_cb _hidl_cb) override; + Return prepareForProcessing(prepareForProcessing_cb _hidl_cb) override; + Return setProcessBuffers(const AudioBuffer& inBuffer, + const AudioBuffer& outBuffer) override; + Return command(uint32_t commandId, const hidl_vec& data, uint32_t resultMaxSize, + command_cb _hidl_cb) override; + Return setParameter(const hidl_vec& parameter, + const hidl_vec& value) override; + Return getParameter(const hidl_vec& parameter, uint32_t valueMaxSize, + getParameter_cb _hidl_cb) override; + Return getSupportedConfigsForFeature(uint32_t featureId, uint32_t maxConfigs, + uint32_t configSize, + getSupportedConfigsForFeature_cb _hidl_cb) override; + Return getCurrentConfigForFeature(uint32_t featureId, uint32_t configSize, + getCurrentConfigForFeature_cb _hidl_cb) override; + Return setCurrentConfigForFeature(uint32_t featureId, + const hidl_vec& configData) override; + Return close() override; + + // Methods from + // ::android::hardware::audio::effect::AUDIO_HAL_VERSION::IEnvironmentalReverbEffect follow. + Return setBypass(bool bypass) override; + Return getBypass(getBypass_cb _hidl_cb) override; + Return setRoomLevel(int16_t roomLevel) override; + Return getRoomLevel(getRoomLevel_cb _hidl_cb) override; + Return setRoomHfLevel(int16_t roomHfLevel) override; + Return getRoomHfLevel(getRoomHfLevel_cb _hidl_cb) override; + Return setDecayTime(uint32_t decayTime) override; + Return getDecayTime(getDecayTime_cb _hidl_cb) override; + Return setDecayHfRatio(int16_t decayHfRatio) override; + Return getDecayHfRatio(getDecayHfRatio_cb _hidl_cb) override; + Return setReflectionsLevel(int16_t reflectionsLevel) override; + Return getReflectionsLevel(getReflectionsLevel_cb _hidl_cb) override; + Return setReflectionsDelay(uint32_t reflectionsDelay) override; + Return getReflectionsDelay(getReflectionsDelay_cb _hidl_cb) override; + Return setReverbLevel(int16_t reverbLevel) override; + Return getReverbLevel(getReverbLevel_cb _hidl_cb) override; + Return setReverbDelay(uint32_t reverbDelay) override; + Return getReverbDelay(getReverbDelay_cb _hidl_cb) override; + Return setDiffusion(int16_t diffusion) override; + Return getDiffusion(getDiffusion_cb _hidl_cb) override; + Return setDensity(int16_t density) override; + Return getDensity(getDensity_cb _hidl_cb) override; + Return setAllProperties( + const IEnvironmentalReverbEffect::AllProperties& properties) override; + Return getAllProperties(getAllProperties_cb _hidl_cb) override; + + private: + sp mEffect; + + virtual ~EnvironmentalReverbEffect(); + + void propertiesFromHal(const t_reverb_settings& halProperties, + IEnvironmentalReverbEffect::AllProperties* properties); + void propertiesToHal(const IEnvironmentalReverbEffect::AllProperties& properties, + t_reverb_settings* halProperties); +}; + +} // namespace implementation +} // namespace AUDIO_HAL_VERSION +} // namespace effect +} // namespace audio +} // namespace hardware +} // namespace android diff --git a/audio/effect/all-versions/default/include/effect/all-versions/default/EnvironmentalReverbEffect.impl.h b/audio/effect/all-versions/default/include/effect/all-versions/default/EnvironmentalReverbEffect.impl.h new file mode 100644 index 0000000000000000000000000000000000000000..39a4092e8cceca2fd2bdebf5dc1088a9ed28d42e --- /dev/null +++ b/audio/effect/all-versions/default/include/effect/all-versions/default/EnvironmentalReverbEffect.impl.h @@ -0,0 +1,304 @@ +/* + * Copyright (C) 2016 The Android Open Source Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#include + +#include + +#include "VersionUtils.h" + +namespace android { +namespace hardware { +namespace audio { +namespace effect { +namespace AUDIO_HAL_VERSION { +namespace implementation { + +EnvironmentalReverbEffect::EnvironmentalReverbEffect(effect_handle_t handle) + : mEffect(new Effect(handle)) {} + +EnvironmentalReverbEffect::~EnvironmentalReverbEffect() {} + +void EnvironmentalReverbEffect::propertiesFromHal( + const t_reverb_settings& halProperties, IEnvironmentalReverbEffect::AllProperties* properties) { + properties->roomLevel = halProperties.roomLevel; + properties->roomHfLevel = halProperties.roomHFLevel; + properties->decayTime = halProperties.decayTime; + properties->decayHfRatio = halProperties.decayHFRatio; + properties->reflectionsLevel = halProperties.reflectionsLevel; + properties->reflectionsDelay = halProperties.reflectionsDelay; + properties->reverbLevel = halProperties.reverbLevel; + properties->reverbDelay = halProperties.reverbDelay; + properties->diffusion = halProperties.diffusion; + properties->density = halProperties.density; +} + +void EnvironmentalReverbEffect::propertiesToHal( + const IEnvironmentalReverbEffect::AllProperties& properties, t_reverb_settings* halProperties) { + halProperties->roomLevel = properties.roomLevel; + halProperties->roomHFLevel = properties.roomHfLevel; + halProperties->decayTime = properties.decayTime; + halProperties->decayHFRatio = properties.decayHfRatio; + halProperties->reflectionsLevel = properties.reflectionsLevel; + halProperties->reflectionsDelay = properties.reflectionsDelay; + halProperties->reverbLevel = properties.reverbLevel; + halProperties->reverbDelay = properties.reverbDelay; + halProperties->diffusion = properties.diffusion; + halProperties->density = properties.density; +} + +// Methods from ::android::hardware::audio::effect::AUDIO_HAL_VERSION::IEffect follow. +Return EnvironmentalReverbEffect::init() { + return mEffect->init(); +} + +Return EnvironmentalReverbEffect::setConfig( + const EffectConfig& config, const sp& inputBufferProvider, + const sp& outputBufferProvider) { + return mEffect->setConfig(config, inputBufferProvider, outputBufferProvider); +} + +Return EnvironmentalReverbEffect::reset() { + return mEffect->reset(); +} + +Return EnvironmentalReverbEffect::enable() { + return mEffect->enable(); +} + +Return EnvironmentalReverbEffect::disable() { + return mEffect->disable(); +} + +Return EnvironmentalReverbEffect::setDevice(AudioDeviceBitfield device) { + return mEffect->setDevice(device); +} + +Return EnvironmentalReverbEffect::setAndGetVolume(const hidl_vec& volumes, + setAndGetVolume_cb _hidl_cb) { + return mEffect->setAndGetVolume(volumes, _hidl_cb); +} + +Return EnvironmentalReverbEffect::volumeChangeNotification( + const hidl_vec& volumes) { + return mEffect->volumeChangeNotification(volumes); +} + +Return EnvironmentalReverbEffect::setAudioMode(AudioMode mode) { + return mEffect->setAudioMode(mode); +} + +Return EnvironmentalReverbEffect::setConfigReverse( + const EffectConfig& config, const sp& inputBufferProvider, + const sp& outputBufferProvider) { + return mEffect->setConfigReverse(config, inputBufferProvider, outputBufferProvider); +} + +Return EnvironmentalReverbEffect::setInputDevice(AudioDeviceBitfield device) { + return mEffect->setInputDevice(device); +} + +Return EnvironmentalReverbEffect::getConfig(getConfig_cb _hidl_cb) { + return mEffect->getConfig(_hidl_cb); +} + +Return EnvironmentalReverbEffect::getConfigReverse(getConfigReverse_cb _hidl_cb) { + return mEffect->getConfigReverse(_hidl_cb); +} + +Return EnvironmentalReverbEffect::getSupportedAuxChannelsConfigs( + uint32_t maxConfigs, getSupportedAuxChannelsConfigs_cb _hidl_cb) { + return mEffect->getSupportedAuxChannelsConfigs(maxConfigs, _hidl_cb); +} + +Return EnvironmentalReverbEffect::getAuxChannelsConfig(getAuxChannelsConfig_cb _hidl_cb) { + return mEffect->getAuxChannelsConfig(_hidl_cb); +} + +Return EnvironmentalReverbEffect::setAuxChannelsConfig( + const EffectAuxChannelsConfig& config) { + return mEffect->setAuxChannelsConfig(config); +} + +Return EnvironmentalReverbEffect::setAudioSource(AudioSource source) { + return mEffect->setAudioSource(source); +} + +Return EnvironmentalReverbEffect::offload(const EffectOffloadParameter& param) { + return mEffect->offload(param); +} + +Return EnvironmentalReverbEffect::getDescriptor(getDescriptor_cb _hidl_cb) { + return mEffect->getDescriptor(_hidl_cb); +} + +Return EnvironmentalReverbEffect::prepareForProcessing(prepareForProcessing_cb _hidl_cb) { + return mEffect->prepareForProcessing(_hidl_cb); +} + +Return EnvironmentalReverbEffect::setProcessBuffers(const AudioBuffer& inBuffer, + const AudioBuffer& outBuffer) { + return mEffect->setProcessBuffers(inBuffer, outBuffer); +} + +Return EnvironmentalReverbEffect::command(uint32_t commandId, const hidl_vec& data, + uint32_t resultMaxSize, command_cb _hidl_cb) { + return mEffect->command(commandId, data, resultMaxSize, _hidl_cb); +} + +Return EnvironmentalReverbEffect::setParameter(const hidl_vec& parameter, + const hidl_vec& value) { + return mEffect->setParameter(parameter, value); +} + +Return EnvironmentalReverbEffect::getParameter(const hidl_vec& parameter, + uint32_t valueMaxSize, + getParameter_cb _hidl_cb) { + return mEffect->getParameter(parameter, valueMaxSize, _hidl_cb); +} + +Return EnvironmentalReverbEffect::getSupportedConfigsForFeature( + uint32_t featureId, uint32_t maxConfigs, uint32_t configSize, + getSupportedConfigsForFeature_cb _hidl_cb) { + return mEffect->getSupportedConfigsForFeature(featureId, maxConfigs, configSize, _hidl_cb); +} + +Return EnvironmentalReverbEffect::getCurrentConfigForFeature( + uint32_t featureId, uint32_t configSize, getCurrentConfigForFeature_cb _hidl_cb) { + return mEffect->getCurrentConfigForFeature(featureId, configSize, _hidl_cb); +} + +Return EnvironmentalReverbEffect::setCurrentConfigForFeature( + uint32_t featureId, const hidl_vec& configData) { + return mEffect->setCurrentConfigForFeature(featureId, configData); +} + +Return EnvironmentalReverbEffect::close() { + return mEffect->close(); +} + +// Methods from ::android::hardware::audio::effect::AUDIO_HAL_VERSION::IEnvironmentalReverbEffect +// follow. +Return EnvironmentalReverbEffect::setBypass(bool bypass) { + return mEffect->setParam(REVERB_PARAM_BYPASS, bypass); +} + +Return EnvironmentalReverbEffect::getBypass(getBypass_cb _hidl_cb) { + return mEffect->getIntegerParam(REVERB_PARAM_BYPASS, _hidl_cb); +} + +Return EnvironmentalReverbEffect::setRoomLevel(int16_t roomLevel) { + return mEffect->setParam(REVERB_PARAM_ROOM_LEVEL, roomLevel); +} + +Return EnvironmentalReverbEffect::getRoomLevel(getRoomLevel_cb _hidl_cb) { + return mEffect->getIntegerParam(REVERB_PARAM_ROOM_LEVEL, _hidl_cb); +} + +Return EnvironmentalReverbEffect::setRoomHfLevel(int16_t roomHfLevel) { + return mEffect->setParam(REVERB_PARAM_ROOM_HF_LEVEL, roomHfLevel); +} + +Return EnvironmentalReverbEffect::getRoomHfLevel(getRoomHfLevel_cb _hidl_cb) { + return mEffect->getIntegerParam(REVERB_PARAM_ROOM_HF_LEVEL, _hidl_cb); +} + +Return EnvironmentalReverbEffect::setDecayTime(uint32_t decayTime) { + return mEffect->setParam(REVERB_PARAM_DECAY_TIME, decayTime); +} + +Return EnvironmentalReverbEffect::getDecayTime(getDecayTime_cb _hidl_cb) { + return mEffect->getIntegerParam(REVERB_PARAM_DECAY_TIME, _hidl_cb); +} + +Return EnvironmentalReverbEffect::setDecayHfRatio(int16_t decayHfRatio) { + return mEffect->setParam(REVERB_PARAM_DECAY_HF_RATIO, decayHfRatio); +} + +Return EnvironmentalReverbEffect::getDecayHfRatio(getDecayHfRatio_cb _hidl_cb) { + return mEffect->getIntegerParam(REVERB_PARAM_DECAY_HF_RATIO, _hidl_cb); +} + +Return EnvironmentalReverbEffect::setReflectionsLevel(int16_t reflectionsLevel) { + return mEffect->setParam(REVERB_PARAM_REFLECTIONS_LEVEL, reflectionsLevel); +} + +Return EnvironmentalReverbEffect::getReflectionsLevel(getReflectionsLevel_cb _hidl_cb) { + return mEffect->getIntegerParam(REVERB_PARAM_REFLECTIONS_LEVEL, _hidl_cb); +} + +Return EnvironmentalReverbEffect::setReflectionsDelay(uint32_t reflectionsDelay) { + return mEffect->setParam(REVERB_PARAM_REFLECTIONS_DELAY, reflectionsDelay); +} + +Return EnvironmentalReverbEffect::getReflectionsDelay(getReflectionsDelay_cb _hidl_cb) { + return mEffect->getIntegerParam(REVERB_PARAM_REFLECTIONS_DELAY, _hidl_cb); +} + +Return EnvironmentalReverbEffect::setReverbLevel(int16_t reverbLevel) { + return mEffect->setParam(REVERB_PARAM_REVERB_LEVEL, reverbLevel); +} + +Return EnvironmentalReverbEffect::getReverbLevel(getReverbLevel_cb _hidl_cb) { + return mEffect->getIntegerParam(REVERB_PARAM_REVERB_LEVEL, _hidl_cb); +} + +Return EnvironmentalReverbEffect::setReverbDelay(uint32_t reverbDelay) { + return mEffect->setParam(REVERB_PARAM_REVERB_DELAY, reverbDelay); +} + +Return EnvironmentalReverbEffect::getReverbDelay(getReverbDelay_cb _hidl_cb) { + return mEffect->getIntegerParam(REVERB_PARAM_REVERB_DELAY, _hidl_cb); +} + +Return EnvironmentalReverbEffect::setDiffusion(int16_t diffusion) { + return mEffect->setParam(REVERB_PARAM_DIFFUSION, diffusion); +} + +Return EnvironmentalReverbEffect::getDiffusion(getDiffusion_cb _hidl_cb) { + return mEffect->getIntegerParam(REVERB_PARAM_DIFFUSION, _hidl_cb); +} + +Return EnvironmentalReverbEffect::setDensity(int16_t density) { + return mEffect->setParam(REVERB_PARAM_DENSITY, density); +} + +Return EnvironmentalReverbEffect::getDensity(getDensity_cb _hidl_cb) { + return mEffect->getIntegerParam(REVERB_PARAM_DENSITY, _hidl_cb); +} + +Return EnvironmentalReverbEffect::setAllProperties( + const IEnvironmentalReverbEffect::AllProperties& properties) { + t_reverb_settings halProperties; + propertiesToHal(properties, &halProperties); + return mEffect->setParam(REVERB_PARAM_PROPERTIES, halProperties); +} + +Return EnvironmentalReverbEffect::getAllProperties(getAllProperties_cb _hidl_cb) { + t_reverb_settings halProperties; + Result retval = mEffect->getParam(REVERB_PARAM_PROPERTIES, halProperties); + AllProperties properties; + propertiesFromHal(halProperties, &properties); + _hidl_cb(retval, properties); + return Void(); +} + +} // namespace implementation +} // namespace AUDIO_HAL_VERSION +} // namespace effect +} // namespace audio +} // namespace hardware +} // namespace android diff --git a/audio/effect/all-versions/default/include/effect/all-versions/default/EqualizerEffect.h b/audio/effect/all-versions/default/include/effect/all-versions/default/EqualizerEffect.h new file mode 100644 index 0000000000000000000000000000000000000000..de520521fa1933b9d3ddb1b08b606db38fde8fa8 --- /dev/null +++ b/audio/effect/all-versions/default/include/effect/all-versions/default/EqualizerEffect.h @@ -0,0 +1,131 @@ +/* + * Copyright (C) 2016 The Android Open Source Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#include + +#include + +#include + +#include + +#include + +#include "VersionUtils.h" + +namespace android { +namespace hardware { +namespace audio { +namespace effect { +namespace AUDIO_HAL_VERSION { +namespace implementation { + +using ::android::hardware::audio::common::AUDIO_HAL_VERSION::AudioDevice; +using ::android::hardware::audio::common::AUDIO_HAL_VERSION::AudioMode; +using ::android::hardware::audio::common::AUDIO_HAL_VERSION::AudioSource; +using ::android::hardware::audio::effect::AUDIO_HAL_VERSION::AudioBuffer; +using ::android::hardware::audio::effect::AUDIO_HAL_VERSION::EffectAuxChannelsConfig; +using ::android::hardware::audio::effect::AUDIO_HAL_VERSION::EffectConfig; +using ::android::hardware::audio::effect::AUDIO_HAL_VERSION::EffectDescriptor; +using ::android::hardware::audio::effect::AUDIO_HAL_VERSION::EffectOffloadParameter; +using ::android::hardware::audio::effect::AUDIO_HAL_VERSION::IEffect; +using ::android::hardware::audio::effect::AUDIO_HAL_VERSION::IEffectBufferProviderCallback; +using ::android::hardware::audio::effect::AUDIO_HAL_VERSION::IEqualizerEffect; +using ::android::hardware::audio::effect::AUDIO_HAL_VERSION::Result; +using ::android::hardware::Return; +using ::android::hardware::Void; +using ::android::hardware::hidl_vec; +using ::android::hardware::hidl_string; +using ::android::sp; + +struct EqualizerEffect : public IEqualizerEffect { + explicit EqualizerEffect(effect_handle_t handle); + + // Methods from ::android::hardware::audio::effect::AUDIO_HAL_VERSION::IEffect follow. + Return init() override; + Return setConfig( + const EffectConfig& config, const sp& inputBufferProvider, + const sp& outputBufferProvider) override; + Return reset() override; + Return enable() override; + Return disable() override; + Return setDevice(AudioDeviceBitfield device) override; + Return setAndGetVolume(const hidl_vec& volumes, + setAndGetVolume_cb _hidl_cb) override; + Return volumeChangeNotification(const hidl_vec& volumes) override; + Return setAudioMode(AudioMode mode) override; + Return setConfigReverse( + const EffectConfig& config, const sp& inputBufferProvider, + const sp& outputBufferProvider) override; + Return setInputDevice(AudioDeviceBitfield device) override; + Return getConfig(getConfig_cb _hidl_cb) override; + Return getConfigReverse(getConfigReverse_cb _hidl_cb) override; + Return getSupportedAuxChannelsConfigs( + uint32_t maxConfigs, getSupportedAuxChannelsConfigs_cb _hidl_cb) override; + Return getAuxChannelsConfig(getAuxChannelsConfig_cb _hidl_cb) override; + Return setAuxChannelsConfig(const EffectAuxChannelsConfig& config) override; + Return setAudioSource(AudioSource source) override; + Return offload(const EffectOffloadParameter& param) override; + Return getDescriptor(getDescriptor_cb _hidl_cb) override; + Return prepareForProcessing(prepareForProcessing_cb _hidl_cb) override; + Return setProcessBuffers(const AudioBuffer& inBuffer, + const AudioBuffer& outBuffer) override; + Return command(uint32_t commandId, const hidl_vec& data, uint32_t resultMaxSize, + command_cb _hidl_cb) override; + Return setParameter(const hidl_vec& parameter, + const hidl_vec& value) override; + Return getParameter(const hidl_vec& parameter, uint32_t valueMaxSize, + getParameter_cb _hidl_cb) override; + Return getSupportedConfigsForFeature(uint32_t featureId, uint32_t maxConfigs, + uint32_t configSize, + getSupportedConfigsForFeature_cb _hidl_cb) override; + Return getCurrentConfigForFeature(uint32_t featureId, uint32_t configSize, + getCurrentConfigForFeature_cb _hidl_cb) override; + Return setCurrentConfigForFeature(uint32_t featureId, + const hidl_vec& configData) override; + Return close() override; + + // Methods from ::android::hardware::audio::effect::AUDIO_HAL_VERSION::IEqualizerEffect follow. + Return getNumBands(getNumBands_cb _hidl_cb) override; + Return getLevelRange(getLevelRange_cb _hidl_cb) override; + Return setBandLevel(uint16_t band, int16_t level) override; + Return getBandLevel(uint16_t band, getBandLevel_cb _hidl_cb) override; + Return getBandCenterFrequency(uint16_t band, getBandCenterFrequency_cb _hidl_cb) override; + Return getBandFrequencyRange(uint16_t band, getBandFrequencyRange_cb _hidl_cb) override; + Return getBandForFrequency(uint32_t freq, getBandForFrequency_cb _hidl_cb) override; + Return getPresetNames(getPresetNames_cb _hidl_cb) override; + Return setCurrentPreset(uint16_t preset) override; + Return getCurrentPreset(getCurrentPreset_cb _hidl_cb) override; + Return setAllProperties(const IEqualizerEffect::AllProperties& properties) override; + Return getAllProperties(getAllProperties_cb _hidl_cb) override; + + private: + sp mEffect; + + virtual ~EqualizerEffect(); + + void propertiesFromHal(const t_equalizer_settings& halProperties, + IEqualizerEffect::AllProperties* properties); + std::vector propertiesToHal(const IEqualizerEffect::AllProperties& properties, + t_equalizer_settings** halProperties); +}; + +} // namespace implementation +} // namespace AUDIO_HAL_VERSION +} // namespace effect +} // namespace audio +} // namespace hardware +} // namespace android diff --git a/audio/effect/all-versions/default/include/effect/all-versions/default/EqualizerEffect.impl.h b/audio/effect/all-versions/default/include/effect/all-versions/default/EqualizerEffect.impl.h new file mode 100644 index 0000000000000000000000000000000000000000..db6bed8d2e1ee09e5db98c64cc0b1c9d517e79dc --- /dev/null +++ b/audio/effect/all-versions/default/include/effect/all-versions/default/EqualizerEffect.impl.h @@ -0,0 +1,292 @@ +/* + * Copyright (C) 2016 The Android Open Source Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#include + +#include + +#include + +#include "VersionUtils.h" + +namespace android { +namespace hardware { +namespace audio { +namespace effect { +namespace AUDIO_HAL_VERSION { +namespace implementation { + +EqualizerEffect::EqualizerEffect(effect_handle_t handle) : mEffect(new Effect(handle)) {} + +EqualizerEffect::~EqualizerEffect() {} + +void EqualizerEffect::propertiesFromHal(const t_equalizer_settings& halProperties, + IEqualizerEffect::AllProperties* properties) { + properties->curPreset = halProperties.curPreset; + // t_equalizer_settings incorrectly defines bandLevels as uint16_t, + // whereas the actual type of values used by effects is int16_t. + const int16_t* signedBandLevels = + reinterpret_cast(&halProperties.bandLevels[0]); + properties->bandLevels.setToExternal(const_cast(signedBandLevels), + halProperties.numBands); +} + +std::vector EqualizerEffect::propertiesToHal( + const IEqualizerEffect::AllProperties& properties, t_equalizer_settings** halProperties) { + size_t bandsSize = properties.bandLevels.size() * sizeof(uint16_t); + std::vector halBuffer(sizeof(t_equalizer_settings) + bandsSize, 0); + *halProperties = reinterpret_cast(&halBuffer[0]); + (*halProperties)->curPreset = properties.curPreset; + (*halProperties)->numBands = properties.bandLevels.size(); + memcpy((*halProperties)->bandLevels, &properties.bandLevels[0], bandsSize); + return halBuffer; +} + +// Methods from ::android::hardware::audio::effect::AUDIO_HAL_VERSION::IEffect follow. +Return EqualizerEffect::init() { + return mEffect->init(); +} + +Return EqualizerEffect::setConfig( + const EffectConfig& config, const sp& inputBufferProvider, + const sp& outputBufferProvider) { + return mEffect->setConfig(config, inputBufferProvider, outputBufferProvider); +} + +Return EqualizerEffect::reset() { + return mEffect->reset(); +} + +Return EqualizerEffect::enable() { + return mEffect->enable(); +} + +Return EqualizerEffect::disable() { + return mEffect->disable(); +} + +Return EqualizerEffect::setDevice(AudioDeviceBitfield device) { + return mEffect->setDevice(device); +} + +Return EqualizerEffect::setAndGetVolume(const hidl_vec& volumes, + setAndGetVolume_cb _hidl_cb) { + return mEffect->setAndGetVolume(volumes, _hidl_cb); +} + +Return EqualizerEffect::volumeChangeNotification(const hidl_vec& volumes) { + return mEffect->volumeChangeNotification(volumes); +} + +Return EqualizerEffect::setAudioMode(AudioMode mode) { + return mEffect->setAudioMode(mode); +} + +Return EqualizerEffect::setConfigReverse( + const EffectConfig& config, const sp& inputBufferProvider, + const sp& outputBufferProvider) { + return mEffect->setConfigReverse(config, inputBufferProvider, outputBufferProvider); +} + +Return EqualizerEffect::setInputDevice(AudioDeviceBitfield device) { + return mEffect->setInputDevice(device); +} + +Return EqualizerEffect::getConfig(getConfig_cb _hidl_cb) { + return mEffect->getConfig(_hidl_cb); +} + +Return EqualizerEffect::getConfigReverse(getConfigReverse_cb _hidl_cb) { + return mEffect->getConfigReverse(_hidl_cb); +} + +Return EqualizerEffect::getSupportedAuxChannelsConfigs( + uint32_t maxConfigs, getSupportedAuxChannelsConfigs_cb _hidl_cb) { + return mEffect->getSupportedAuxChannelsConfigs(maxConfigs, _hidl_cb); +} + +Return EqualizerEffect::getAuxChannelsConfig(getAuxChannelsConfig_cb _hidl_cb) { + return mEffect->getAuxChannelsConfig(_hidl_cb); +} + +Return EqualizerEffect::setAuxChannelsConfig(const EffectAuxChannelsConfig& config) { + return mEffect->setAuxChannelsConfig(config); +} + +Return EqualizerEffect::setAudioSource(AudioSource source) { + return mEffect->setAudioSource(source); +} + +Return EqualizerEffect::offload(const EffectOffloadParameter& param) { + return mEffect->offload(param); +} + +Return EqualizerEffect::getDescriptor(getDescriptor_cb _hidl_cb) { + return mEffect->getDescriptor(_hidl_cb); +} + +Return EqualizerEffect::prepareForProcessing(prepareForProcessing_cb _hidl_cb) { + return mEffect->prepareForProcessing(_hidl_cb); +} + +Return EqualizerEffect::setProcessBuffers(const AudioBuffer& inBuffer, + const AudioBuffer& outBuffer) { + return mEffect->setProcessBuffers(inBuffer, outBuffer); +} + +Return EqualizerEffect::command(uint32_t commandId, const hidl_vec& data, + uint32_t resultMaxSize, command_cb _hidl_cb) { + return mEffect->command(commandId, data, resultMaxSize, _hidl_cb); +} + +Return EqualizerEffect::setParameter(const hidl_vec& parameter, + const hidl_vec& value) { + return mEffect->setParameter(parameter, value); +} + +Return EqualizerEffect::getParameter(const hidl_vec& parameter, + uint32_t valueMaxSize, getParameter_cb _hidl_cb) { + return mEffect->getParameter(parameter, valueMaxSize, _hidl_cb); +} + +Return EqualizerEffect::getSupportedConfigsForFeature( + uint32_t featureId, uint32_t maxConfigs, uint32_t configSize, + getSupportedConfigsForFeature_cb _hidl_cb) { + return mEffect->getSupportedConfigsForFeature(featureId, maxConfigs, configSize, _hidl_cb); +} + +Return EqualizerEffect::getCurrentConfigForFeature(uint32_t featureId, uint32_t configSize, + getCurrentConfigForFeature_cb _hidl_cb) { + return mEffect->getCurrentConfigForFeature(featureId, configSize, _hidl_cb); +} + +Return EqualizerEffect::setCurrentConfigForFeature(uint32_t featureId, + const hidl_vec& configData) { + return mEffect->setCurrentConfigForFeature(featureId, configData); +} + +Return EqualizerEffect::close() { + return mEffect->close(); +} + +// Methods from ::android::hardware::audio::effect::AUDIO_HAL_VERSION::IEqualizerEffect follow. +Return EqualizerEffect::getNumBands(getNumBands_cb _hidl_cb) { + return mEffect->getIntegerParam(EQ_PARAM_NUM_BANDS, _hidl_cb); +} + +Return EqualizerEffect::getLevelRange(getLevelRange_cb _hidl_cb) { + int16_t halLevels[2] = {0, 0}; + Result retval = mEffect->getParam(EQ_PARAM_LEVEL_RANGE, halLevels); + _hidl_cb(retval, halLevels[0], halLevels[1]); + return Void(); +} + +Return EqualizerEffect::setBandLevel(uint16_t band, int16_t level) { + return mEffect->setParam(EQ_PARAM_BAND_LEVEL, band, level); +} + +Return EqualizerEffect::getBandLevel(uint16_t band, getBandLevel_cb _hidl_cb) { + int16_t halLevel = 0; + Result retval = mEffect->getParam(EQ_PARAM_BAND_LEVEL, band, halLevel); + _hidl_cb(retval, halLevel); + return Void(); +} + +Return EqualizerEffect::getBandCenterFrequency(uint16_t band, + getBandCenterFrequency_cb _hidl_cb) { + uint32_t halFreq = 0; + Result retval = mEffect->getParam(EQ_PARAM_CENTER_FREQ, band, halFreq); + _hidl_cb(retval, halFreq); + return Void(); +} + +Return EqualizerEffect::getBandFrequencyRange(uint16_t band, + getBandFrequencyRange_cb _hidl_cb) { + uint32_t halFreqs[2] = {0, 0}; + Result retval = mEffect->getParam(EQ_PARAM_BAND_FREQ_RANGE, band, halFreqs); + _hidl_cb(retval, halFreqs[0], halFreqs[1]); + return Void(); +} + +Return EqualizerEffect::getBandForFrequency(uint32_t freq, getBandForFrequency_cb _hidl_cb) { + uint16_t halBand = 0; + Result retval = mEffect->getParam(EQ_PARAM_GET_BAND, freq, halBand); + _hidl_cb(retval, halBand); + return Void(); +} + +Return EqualizerEffect::getPresetNames(getPresetNames_cb _hidl_cb) { + uint16_t halPresetCount = 0; + Result retval = mEffect->getParam(EQ_PARAM_GET_NUM_OF_PRESETS, halPresetCount); + hidl_vec presetNames; + if (retval == Result::OK) { + presetNames.resize(halPresetCount); + for (uint16_t i = 0; i < halPresetCount; ++i) { + char halPresetName[EFFECT_STRING_LEN_MAX]; + retval = mEffect->getParam(EQ_PARAM_GET_PRESET_NAME, i, halPresetName); + if (retval == Result::OK) { + presetNames[i] = halPresetName; + } else { + presetNames.resize(i); + } + } + } + _hidl_cb(retval, presetNames); + return Void(); +} + +Return EqualizerEffect::setCurrentPreset(uint16_t preset) { + return mEffect->setParam(EQ_PARAM_CUR_PRESET, preset); +} + +Return EqualizerEffect::getCurrentPreset(getCurrentPreset_cb _hidl_cb) { + return mEffect->getIntegerParam(EQ_PARAM_CUR_PRESET, _hidl_cb); +} + +Return EqualizerEffect::setAllProperties( + const IEqualizerEffect::AllProperties& properties) { + t_equalizer_settings* halPropertiesPtr = nullptr; + std::vector halBuffer = propertiesToHal(properties, &halPropertiesPtr); + uint32_t paramId = EQ_PARAM_PROPERTIES; + return mEffect->setParameterImpl(sizeof(paramId), ¶mId, halBuffer.size(), halPropertiesPtr); +} + +Return EqualizerEffect::getAllProperties(getAllProperties_cb _hidl_cb) { + uint16_t numBands = 0; + Result retval = mEffect->getParam(EQ_PARAM_NUM_BANDS, numBands); + AllProperties properties; + if (retval != Result::OK) { + _hidl_cb(retval, properties); + return Void(); + } + size_t valueSize = sizeof(t_equalizer_settings) + sizeof(int16_t) * numBands; + uint32_t paramId = EQ_PARAM_PROPERTIES; + retval = mEffect->getParameterImpl( + sizeof(paramId), ¶mId, valueSize, [&](uint32_t, const void* valueData) { + const t_equalizer_settings* halProperties = + reinterpret_cast(valueData); + propertiesFromHal(*halProperties, &properties); + }); + _hidl_cb(retval, properties); + return Void(); +} + +} // namespace implementation +} // namespace AUDIO_HAL_VERSION +} // namespace effect +} // namespace audio +} // namespace hardware +} // namespace android diff --git a/audio/effect/all-versions/default/include/effect/all-versions/default/LoudnessEnhancerEffect.h b/audio/effect/all-versions/default/include/effect/all-versions/default/LoudnessEnhancerEffect.h new file mode 100644 index 0000000000000000000000000000000000000000..b59b077e765f7b6af2524bd15f79b360347c956f --- /dev/null +++ b/audio/effect/all-versions/default/include/effect/all-versions/default/LoudnessEnhancerEffect.h @@ -0,0 +1,113 @@ +/* + * Copyright (C) 2016 The Android Open Source Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#include + +#include + +#include + +#include "VersionUtils.h" + +namespace android { +namespace hardware { +namespace audio { +namespace effect { +namespace AUDIO_HAL_VERSION { +namespace implementation { + +using ::android::hardware::audio::common::AUDIO_HAL_VERSION::AudioDevice; +using ::android::hardware::audio::common::AUDIO_HAL_VERSION::AudioMode; +using ::android::hardware::audio::common::AUDIO_HAL_VERSION::AudioSource; +using ::android::hardware::audio::effect::AUDIO_HAL_VERSION::AudioBuffer; +using ::android::hardware::audio::effect::AUDIO_HAL_VERSION::EffectAuxChannelsConfig; +using ::android::hardware::audio::effect::AUDIO_HAL_VERSION::EffectConfig; +using ::android::hardware::audio::effect::AUDIO_HAL_VERSION::EffectDescriptor; +using ::android::hardware::audio::effect::AUDIO_HAL_VERSION::EffectOffloadParameter; +using ::android::hardware::audio::effect::AUDIO_HAL_VERSION::IEffect; +using ::android::hardware::audio::effect::AUDIO_HAL_VERSION::IEffectBufferProviderCallback; +using ::android::hardware::audio::effect::AUDIO_HAL_VERSION::ILoudnessEnhancerEffect; +using ::android::hardware::audio::effect::AUDIO_HAL_VERSION::Result; +using ::android::hardware::Return; +using ::android::hardware::Void; +using ::android::hardware::hidl_vec; +using ::android::hardware::hidl_string; +using ::android::sp; + +struct LoudnessEnhancerEffect : public ILoudnessEnhancerEffect { + explicit LoudnessEnhancerEffect(effect_handle_t handle); + + // Methods from ::android::hardware::audio::effect::AUDIO_HAL_VERSION::IEffect follow. + Return init() override; + Return setConfig( + const EffectConfig& config, const sp& inputBufferProvider, + const sp& outputBufferProvider) override; + Return reset() override; + Return enable() override; + Return disable() override; + Return setDevice(AudioDeviceBitfield device) override; + Return setAndGetVolume(const hidl_vec& volumes, + setAndGetVolume_cb _hidl_cb) override; + Return volumeChangeNotification(const hidl_vec& volumes) override; + Return setAudioMode(AudioMode mode) override; + Return setConfigReverse( + const EffectConfig& config, const sp& inputBufferProvider, + const sp& outputBufferProvider) override; + Return setInputDevice(AudioDeviceBitfield device) override; + Return getConfig(getConfig_cb _hidl_cb) override; + Return getConfigReverse(getConfigReverse_cb _hidl_cb) override; + Return getSupportedAuxChannelsConfigs( + uint32_t maxConfigs, getSupportedAuxChannelsConfigs_cb _hidl_cb) override; + Return getAuxChannelsConfig(getAuxChannelsConfig_cb _hidl_cb) override; + Return setAuxChannelsConfig(const EffectAuxChannelsConfig& config) override; + Return setAudioSource(AudioSource source) override; + Return offload(const EffectOffloadParameter& param) override; + Return getDescriptor(getDescriptor_cb _hidl_cb) override; + Return prepareForProcessing(prepareForProcessing_cb _hidl_cb) override; + Return setProcessBuffers(const AudioBuffer& inBuffer, + const AudioBuffer& outBuffer) override; + Return command(uint32_t commandId, const hidl_vec& data, uint32_t resultMaxSize, + command_cb _hidl_cb) override; + Return setParameter(const hidl_vec& parameter, + const hidl_vec& value) override; + Return getParameter(const hidl_vec& parameter, uint32_t valueMaxSize, + getParameter_cb _hidl_cb) override; + Return getSupportedConfigsForFeature(uint32_t featureId, uint32_t maxConfigs, + uint32_t configSize, + getSupportedConfigsForFeature_cb _hidl_cb) override; + Return getCurrentConfigForFeature(uint32_t featureId, uint32_t configSize, + getCurrentConfigForFeature_cb _hidl_cb) override; + Return setCurrentConfigForFeature(uint32_t featureId, + const hidl_vec& configData) override; + Return close() override; + + // Methods from ::android::hardware::audio::effect::AUDIO_HAL_VERSION::ILoudnessEnhancerEffect + // follow. + Return setTargetGain(int32_t targetGainMb) override; + Return getTargetGain(getTargetGain_cb _hidl_cb) override; + + private: + sp mEffect; + + virtual ~LoudnessEnhancerEffect(); +}; + +} // namespace implementation +} // namespace AUDIO_HAL_VERSION +} // namespace effect +} // namespace audio +} // namespace hardware +} // namespace android diff --git a/audio/effect/all-versions/default/include/effect/all-versions/default/LoudnessEnhancerEffect.impl.h b/audio/effect/all-versions/default/include/effect/all-versions/default/LoudnessEnhancerEffect.impl.h new file mode 100644 index 0000000000000000000000000000000000000000..88210e954bfbb5b46a75ad30b43a2c2ccb7fc0bd --- /dev/null +++ b/audio/effect/all-versions/default/include/effect/all-versions/default/LoudnessEnhancerEffect.impl.h @@ -0,0 +1,189 @@ +/* + * Copyright (C) 2016 The Android Open Source Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#include + +#include + +#include +#include + +#include "VersionUtils.h" + +namespace android { +namespace hardware { +namespace audio { +namespace effect { +namespace AUDIO_HAL_VERSION { +namespace implementation { + +LoudnessEnhancerEffect::LoudnessEnhancerEffect(effect_handle_t handle) + : mEffect(new Effect(handle)) {} + +LoudnessEnhancerEffect::~LoudnessEnhancerEffect() {} + +// Methods from ::android::hardware::audio::effect::AUDIO_HAL_VERSION::IEffect follow. +Return LoudnessEnhancerEffect::init() { + return mEffect->init(); +} + +Return LoudnessEnhancerEffect::setConfig( + const EffectConfig& config, const sp& inputBufferProvider, + const sp& outputBufferProvider) { + return mEffect->setConfig(config, inputBufferProvider, outputBufferProvider); +} + +Return LoudnessEnhancerEffect::reset() { + return mEffect->reset(); +} + +Return LoudnessEnhancerEffect::enable() { + return mEffect->enable(); +} + +Return LoudnessEnhancerEffect::disable() { + return mEffect->disable(); +} + +Return LoudnessEnhancerEffect::setDevice(AudioDeviceBitfield device) { + return mEffect->setDevice(device); +} + +Return LoudnessEnhancerEffect::setAndGetVolume(const hidl_vec& volumes, + setAndGetVolume_cb _hidl_cb) { + return mEffect->setAndGetVolume(volumes, _hidl_cb); +} + +Return LoudnessEnhancerEffect::volumeChangeNotification(const hidl_vec& volumes) { + return mEffect->volumeChangeNotification(volumes); +} + +Return LoudnessEnhancerEffect::setAudioMode(AudioMode mode) { + return mEffect->setAudioMode(mode); +} + +Return LoudnessEnhancerEffect::setConfigReverse( + const EffectConfig& config, const sp& inputBufferProvider, + const sp& outputBufferProvider) { + return mEffect->setConfigReverse(config, inputBufferProvider, outputBufferProvider); +} + +Return LoudnessEnhancerEffect::setInputDevice(AudioDeviceBitfield device) { + return mEffect->setInputDevice(device); +} + +Return LoudnessEnhancerEffect::getConfig(getConfig_cb _hidl_cb) { + return mEffect->getConfig(_hidl_cb); +} + +Return LoudnessEnhancerEffect::getConfigReverse(getConfigReverse_cb _hidl_cb) { + return mEffect->getConfigReverse(_hidl_cb); +} + +Return LoudnessEnhancerEffect::getSupportedAuxChannelsConfigs( + uint32_t maxConfigs, getSupportedAuxChannelsConfigs_cb _hidl_cb) { + return mEffect->getSupportedAuxChannelsConfigs(maxConfigs, _hidl_cb); +} + +Return LoudnessEnhancerEffect::getAuxChannelsConfig(getAuxChannelsConfig_cb _hidl_cb) { + return mEffect->getAuxChannelsConfig(_hidl_cb); +} + +Return LoudnessEnhancerEffect::setAuxChannelsConfig(const EffectAuxChannelsConfig& config) { + return mEffect->setAuxChannelsConfig(config); +} + +Return LoudnessEnhancerEffect::setAudioSource(AudioSource source) { + return mEffect->setAudioSource(source); +} + +Return LoudnessEnhancerEffect::offload(const EffectOffloadParameter& param) { + return mEffect->offload(param); +} + +Return LoudnessEnhancerEffect::getDescriptor(getDescriptor_cb _hidl_cb) { + return mEffect->getDescriptor(_hidl_cb); +} + +Return LoudnessEnhancerEffect::prepareForProcessing(prepareForProcessing_cb _hidl_cb) { + return mEffect->prepareForProcessing(_hidl_cb); +} + +Return LoudnessEnhancerEffect::setProcessBuffers(const AudioBuffer& inBuffer, + const AudioBuffer& outBuffer) { + return mEffect->setProcessBuffers(inBuffer, outBuffer); +} + +Return LoudnessEnhancerEffect::command(uint32_t commandId, const hidl_vec& data, + uint32_t resultMaxSize, command_cb _hidl_cb) { + return mEffect->command(commandId, data, resultMaxSize, _hidl_cb); +} + +Return LoudnessEnhancerEffect::setParameter(const hidl_vec& parameter, + const hidl_vec& value) { + return mEffect->setParameter(parameter, value); +} + +Return LoudnessEnhancerEffect::getParameter(const hidl_vec& parameter, + uint32_t valueMaxSize, getParameter_cb _hidl_cb) { + return mEffect->getParameter(parameter, valueMaxSize, _hidl_cb); +} + +Return LoudnessEnhancerEffect::getSupportedConfigsForFeature( + uint32_t featureId, uint32_t maxConfigs, uint32_t configSize, + getSupportedConfigsForFeature_cb _hidl_cb) { + return mEffect->getSupportedConfigsForFeature(featureId, maxConfigs, configSize, _hidl_cb); +} + +Return LoudnessEnhancerEffect::getCurrentConfigForFeature( + uint32_t featureId, uint32_t configSize, getCurrentConfigForFeature_cb _hidl_cb) { + return mEffect->getCurrentConfigForFeature(featureId, configSize, _hidl_cb); +} + +Return LoudnessEnhancerEffect::setCurrentConfigForFeature( + uint32_t featureId, const hidl_vec& configData) { + return mEffect->setCurrentConfigForFeature(featureId, configData); +} + +Return LoudnessEnhancerEffect::close() { + return mEffect->close(); +} + +// Methods from ::android::hardware::audio::effect::AUDIO_HAL_VERSION::ILoudnessEnhancerEffect +// follow. +Return LoudnessEnhancerEffect::setTargetGain(int32_t targetGainMb) { + return mEffect->setParam(LOUDNESS_ENHANCER_DEFAULT_TARGET_GAIN_MB, targetGainMb); +} + +Return LoudnessEnhancerEffect::getTargetGain(getTargetGain_cb _hidl_cb) { + // AOSP Loudness Enhancer expects the size of the request to not include the + // size of the parameter. + uint32_t paramId = LOUDNESS_ENHANCER_DEFAULT_TARGET_GAIN_MB; + uint32_t targetGainMb = 0; + Result retval = mEffect->getParameterImpl( + sizeof(paramId), ¶mId, 0, sizeof(targetGainMb), [&](uint32_t, const void* valueData) { + memcpy(&targetGainMb, valueData, sizeof(targetGainMb)); + }); + _hidl_cb(retval, targetGainMb); + return Void(); +} + +} // namespace implementation +} // namespace AUDIO_HAL_VERSION +} // namespace effect +} // namespace audio +} // namespace hardware +} // namespace android diff --git a/audio/effect/all-versions/default/include/effect/all-versions/default/NoiseSuppressionEffect.h b/audio/effect/all-versions/default/include/effect/all-versions/default/NoiseSuppressionEffect.h new file mode 100644 index 0000000000000000000000000000000000000000..af1635b717ffecf875ee2a7212dbe7e2cd8d7a6c --- /dev/null +++ b/audio/effect/all-versions/default/include/effect/all-versions/default/NoiseSuppressionEffect.h @@ -0,0 +1,125 @@ +/* + * Copyright (C) 2016 The Android Open Source Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#include + +#include + +#include + +#include + +#include "VersionUtils.h" + +namespace android { +namespace hardware { +namespace audio { +namespace effect { +namespace AUDIO_HAL_VERSION { +namespace implementation { + +using ::android::hardware::audio::common::AUDIO_HAL_VERSION::AudioDevice; +using ::android::hardware::audio::common::AUDIO_HAL_VERSION::AudioMode; +using ::android::hardware::audio::common::AUDIO_HAL_VERSION::AudioSource; +using ::android::hardware::audio::effect::AUDIO_HAL_VERSION::AudioBuffer; +using ::android::hardware::audio::effect::AUDIO_HAL_VERSION::EffectAuxChannelsConfig; +using ::android::hardware::audio::effect::AUDIO_HAL_VERSION::EffectConfig; +using ::android::hardware::audio::effect::AUDIO_HAL_VERSION::EffectDescriptor; +using ::android::hardware::audio::effect::AUDIO_HAL_VERSION::EffectOffloadParameter; +using ::android::hardware::audio::effect::AUDIO_HAL_VERSION::IEffect; +using ::android::hardware::audio::effect::AUDIO_HAL_VERSION::IEffectBufferProviderCallback; +using ::android::hardware::audio::effect::AUDIO_HAL_VERSION::INoiseSuppressionEffect; +using ::android::hardware::audio::effect::AUDIO_HAL_VERSION::Result; +using ::android::hardware::Return; +using ::android::hardware::Void; +using ::android::hardware::hidl_vec; +using ::android::hardware::hidl_string; +using ::android::sp; + +struct NoiseSuppressionEffect : public INoiseSuppressionEffect { + explicit NoiseSuppressionEffect(effect_handle_t handle); + + // Methods from ::android::hardware::audio::effect::AUDIO_HAL_VERSION::IEffect follow. + Return init() override; + Return setConfig( + const EffectConfig& config, const sp& inputBufferProvider, + const sp& outputBufferProvider) override; + Return reset() override; + Return enable() override; + Return disable() override; + Return setDevice(AudioDeviceBitfield device) override; + Return setAndGetVolume(const hidl_vec& volumes, + setAndGetVolume_cb _hidl_cb) override; + Return volumeChangeNotification(const hidl_vec& volumes) override; + Return setAudioMode(AudioMode mode) override; + Return setConfigReverse( + const EffectConfig& config, const sp& inputBufferProvider, + const sp& outputBufferProvider) override; + Return setInputDevice(AudioDeviceBitfield device) override; + Return getConfig(getConfig_cb _hidl_cb) override; + Return getConfigReverse(getConfigReverse_cb _hidl_cb) override; + Return getSupportedAuxChannelsConfigs( + uint32_t maxConfigs, getSupportedAuxChannelsConfigs_cb _hidl_cb) override; + Return getAuxChannelsConfig(getAuxChannelsConfig_cb _hidl_cb) override; + Return setAuxChannelsConfig(const EffectAuxChannelsConfig& config) override; + Return setAudioSource(AudioSource source) override; + Return offload(const EffectOffloadParameter& param) override; + Return getDescriptor(getDescriptor_cb _hidl_cb) override; + Return prepareForProcessing(prepareForProcessing_cb _hidl_cb) override; + Return setProcessBuffers(const AudioBuffer& inBuffer, + const AudioBuffer& outBuffer) override; + Return command(uint32_t commandId, const hidl_vec& data, uint32_t resultMaxSize, + command_cb _hidl_cb) override; + Return setParameter(const hidl_vec& parameter, + const hidl_vec& value) override; + Return getParameter(const hidl_vec& parameter, uint32_t valueMaxSize, + getParameter_cb _hidl_cb) override; + Return getSupportedConfigsForFeature(uint32_t featureId, uint32_t maxConfigs, + uint32_t configSize, + getSupportedConfigsForFeature_cb _hidl_cb) override; + Return getCurrentConfigForFeature(uint32_t featureId, uint32_t configSize, + getCurrentConfigForFeature_cb _hidl_cb) override; + Return setCurrentConfigForFeature(uint32_t featureId, + const hidl_vec& configData) override; + Return close() override; + + // Methods from ::android::hardware::audio::effect::AUDIO_HAL_VERSION::INoiseSuppressionEffect + // follow. + Return setSuppressionLevel(INoiseSuppressionEffect::Level level) override; + Return getSuppressionLevel(getSuppressionLevel_cb _hidl_cb) override; + Return setSuppressionType(INoiseSuppressionEffect::Type type) override; + Return getSuppressionType(getSuppressionType_cb _hidl_cb) override; + Return setAllProperties( + const INoiseSuppressionEffect::AllProperties& properties) override; + Return getAllProperties(getAllProperties_cb _hidl_cb) override; + + private: + sp mEffect; + + virtual ~NoiseSuppressionEffect(); + + void propertiesFromHal(const t_ns_settings& halProperties, + INoiseSuppressionEffect::AllProperties* properties); + void propertiesToHal(const INoiseSuppressionEffect::AllProperties& properties, + t_ns_settings* halProperties); +}; + +} // namespace implementation +} // namespace AUDIO_HAL_VERSION +} // namespace effect +} // namespace audio +} // namespace hardware +} // namespace android diff --git a/audio/effect/all-versions/default/include/effect/all-versions/default/NoiseSuppressionEffect.impl.h b/audio/effect/all-versions/default/include/effect/all-versions/default/NoiseSuppressionEffect.impl.h new file mode 100644 index 0000000000000000000000000000000000000000..f32399c51cb76f086fb511dba9ee07cbf9badc89 --- /dev/null +++ b/audio/effect/all-versions/default/include/effect/all-versions/default/NoiseSuppressionEffect.impl.h @@ -0,0 +1,219 @@ +/* + * Copyright (C) 2016 The Android Open Source Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#include + +#include + +#include "VersionUtils.h" + +namespace android { +namespace hardware { +namespace audio { +namespace effect { +namespace AUDIO_HAL_VERSION { +namespace implementation { + +NoiseSuppressionEffect::NoiseSuppressionEffect(effect_handle_t handle) + : mEffect(new Effect(handle)) {} + +NoiseSuppressionEffect::~NoiseSuppressionEffect() {} + +void NoiseSuppressionEffect::propertiesFromHal(const t_ns_settings& halProperties, + INoiseSuppressionEffect::AllProperties* properties) { + properties->level = Level(halProperties.level); + properties->type = Type(halProperties.type); +} + +void NoiseSuppressionEffect::propertiesToHal( + const INoiseSuppressionEffect::AllProperties& properties, t_ns_settings* halProperties) { + halProperties->level = static_cast(properties.level); + halProperties->type = static_cast(properties.type); +} + +// Methods from ::android::hardware::audio::effect::AUDIO_HAL_VERSION::IEffect follow. +Return NoiseSuppressionEffect::init() { + return mEffect->init(); +} + +Return NoiseSuppressionEffect::setConfig( + const EffectConfig& config, const sp& inputBufferProvider, + const sp& outputBufferProvider) { + return mEffect->setConfig(config, inputBufferProvider, outputBufferProvider); +} + +Return NoiseSuppressionEffect::reset() { + return mEffect->reset(); +} + +Return NoiseSuppressionEffect::enable() { + return mEffect->enable(); +} + +Return NoiseSuppressionEffect::disable() { + return mEffect->disable(); +} + +Return NoiseSuppressionEffect::setDevice(AudioDeviceBitfield device) { + return mEffect->setDevice(device); +} + +Return NoiseSuppressionEffect::setAndGetVolume(const hidl_vec& volumes, + setAndGetVolume_cb _hidl_cb) { + return mEffect->setAndGetVolume(volumes, _hidl_cb); +} + +Return NoiseSuppressionEffect::volumeChangeNotification(const hidl_vec& volumes) { + return mEffect->volumeChangeNotification(volumes); +} + +Return NoiseSuppressionEffect::setAudioMode(AudioMode mode) { + return mEffect->setAudioMode(mode); +} + +Return NoiseSuppressionEffect::setConfigReverse( + const EffectConfig& config, const sp& inputBufferProvider, + const sp& outputBufferProvider) { + return mEffect->setConfigReverse(config, inputBufferProvider, outputBufferProvider); +} + +Return NoiseSuppressionEffect::setInputDevice(AudioDeviceBitfield device) { + return mEffect->setInputDevice(device); +} + +Return NoiseSuppressionEffect::getConfig(getConfig_cb _hidl_cb) { + return mEffect->getConfig(_hidl_cb); +} + +Return NoiseSuppressionEffect::getConfigReverse(getConfigReverse_cb _hidl_cb) { + return mEffect->getConfigReverse(_hidl_cb); +} + +Return NoiseSuppressionEffect::getSupportedAuxChannelsConfigs( + uint32_t maxConfigs, getSupportedAuxChannelsConfigs_cb _hidl_cb) { + return mEffect->getSupportedAuxChannelsConfigs(maxConfigs, _hidl_cb); +} + +Return NoiseSuppressionEffect::getAuxChannelsConfig(getAuxChannelsConfig_cb _hidl_cb) { + return mEffect->getAuxChannelsConfig(_hidl_cb); +} + +Return NoiseSuppressionEffect::setAuxChannelsConfig(const EffectAuxChannelsConfig& config) { + return mEffect->setAuxChannelsConfig(config); +} + +Return NoiseSuppressionEffect::setAudioSource(AudioSource source) { + return mEffect->setAudioSource(source); +} + +Return NoiseSuppressionEffect::offload(const EffectOffloadParameter& param) { + return mEffect->offload(param); +} + +Return NoiseSuppressionEffect::getDescriptor(getDescriptor_cb _hidl_cb) { + return mEffect->getDescriptor(_hidl_cb); +} + +Return NoiseSuppressionEffect::prepareForProcessing(prepareForProcessing_cb _hidl_cb) { + return mEffect->prepareForProcessing(_hidl_cb); +} + +Return NoiseSuppressionEffect::setProcessBuffers(const AudioBuffer& inBuffer, + const AudioBuffer& outBuffer) { + return mEffect->setProcessBuffers(inBuffer, outBuffer); +} + +Return NoiseSuppressionEffect::command(uint32_t commandId, const hidl_vec& data, + uint32_t resultMaxSize, command_cb _hidl_cb) { + return mEffect->command(commandId, data, resultMaxSize, _hidl_cb); +} + +Return NoiseSuppressionEffect::setParameter(const hidl_vec& parameter, + const hidl_vec& value) { + return mEffect->setParameter(parameter, value); +} + +Return NoiseSuppressionEffect::getParameter(const hidl_vec& parameter, + uint32_t valueMaxSize, getParameter_cb _hidl_cb) { + return mEffect->getParameter(parameter, valueMaxSize, _hidl_cb); +} + +Return NoiseSuppressionEffect::getSupportedConfigsForFeature( + uint32_t featureId, uint32_t maxConfigs, uint32_t configSize, + getSupportedConfigsForFeature_cb _hidl_cb) { + return mEffect->getSupportedConfigsForFeature(featureId, maxConfigs, configSize, _hidl_cb); +} + +Return NoiseSuppressionEffect::getCurrentConfigForFeature( + uint32_t featureId, uint32_t configSize, getCurrentConfigForFeature_cb _hidl_cb) { + return mEffect->getCurrentConfigForFeature(featureId, configSize, _hidl_cb); +} + +Return NoiseSuppressionEffect::setCurrentConfigForFeature( + uint32_t featureId, const hidl_vec& configData) { + return mEffect->setCurrentConfigForFeature(featureId, configData); +} + +Return NoiseSuppressionEffect::close() { + return mEffect->close(); +} + +// Methods from ::android::hardware::audio::effect::AUDIO_HAL_VERSION::INoiseSuppressionEffect +// follow. +Return NoiseSuppressionEffect::setSuppressionLevel(INoiseSuppressionEffect::Level level) { + return mEffect->setParam(NS_PARAM_LEVEL, static_cast(level)); +} + +Return NoiseSuppressionEffect::getSuppressionLevel(getSuppressionLevel_cb _hidl_cb) { + int32_t halLevel = 0; + Result retval = mEffect->getParam(NS_PARAM_LEVEL, halLevel); + _hidl_cb(retval, Level(halLevel)); + return Void(); +} + +Return NoiseSuppressionEffect::setSuppressionType(INoiseSuppressionEffect::Type type) { + return mEffect->setParam(NS_PARAM_TYPE, static_cast(type)); +} + +Return NoiseSuppressionEffect::getSuppressionType(getSuppressionType_cb _hidl_cb) { + int32_t halType = 0; + Result retval = mEffect->getParam(NS_PARAM_TYPE, halType); + _hidl_cb(retval, Type(halType)); + return Void(); +} + +Return NoiseSuppressionEffect::setAllProperties( + const INoiseSuppressionEffect::AllProperties& properties) { + t_ns_settings halProperties; + propertiesToHal(properties, &halProperties); + return mEffect->setParam(NS_PARAM_PROPERTIES, halProperties); +} + +Return NoiseSuppressionEffect::getAllProperties(getAllProperties_cb _hidl_cb) { + t_ns_settings halProperties; + Result retval = mEffect->getParam(NS_PARAM_PROPERTIES, halProperties); + AllProperties properties; + propertiesFromHal(halProperties, &properties); + _hidl_cb(retval, properties); + return Void(); +} + +} // namespace implementation +} // namespace AUDIO_HAL_VERSION +} // namespace effect +} // namespace audio +} // namespace hardware +} // namespace android diff --git a/audio/effect/all-versions/default/include/effect/all-versions/default/PresetReverbEffect.h b/audio/effect/all-versions/default/include/effect/all-versions/default/PresetReverbEffect.h new file mode 100644 index 0000000000000000000000000000000000000000..1a91ab49b2e52636ccc020928fee17f4a6d77960 --- /dev/null +++ b/audio/effect/all-versions/default/include/effect/all-versions/default/PresetReverbEffect.h @@ -0,0 +1,113 @@ +/* + * Copyright (C) 2016 The Android Open Source Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#include + +#include + +#include + +#include "VersionUtils.h" + +namespace android { +namespace hardware { +namespace audio { +namespace effect { +namespace AUDIO_HAL_VERSION { +namespace implementation { + +using ::android::hardware::audio::common::AUDIO_HAL_VERSION::AudioDevice; +using ::android::hardware::audio::common::AUDIO_HAL_VERSION::AudioMode; +using ::android::hardware::audio::common::AUDIO_HAL_VERSION::AudioSource; +using ::android::hardware::audio::effect::AUDIO_HAL_VERSION::AudioBuffer; +using ::android::hardware::audio::effect::AUDIO_HAL_VERSION::EffectAuxChannelsConfig; +using ::android::hardware::audio::effect::AUDIO_HAL_VERSION::EffectConfig; +using ::android::hardware::audio::effect::AUDIO_HAL_VERSION::EffectDescriptor; +using ::android::hardware::audio::effect::AUDIO_HAL_VERSION::EffectOffloadParameter; +using ::android::hardware::audio::effect::AUDIO_HAL_VERSION::IEffect; +using ::android::hardware::audio::effect::AUDIO_HAL_VERSION::IEffectBufferProviderCallback; +using ::android::hardware::audio::effect::AUDIO_HAL_VERSION::IPresetReverbEffect; +using ::android::hardware::audio::effect::AUDIO_HAL_VERSION::Result; +using ::android::hardware::Return; +using ::android::hardware::Void; +using ::android::hardware::hidl_vec; +using ::android::hardware::hidl_string; +using ::android::sp; + +struct PresetReverbEffect : public IPresetReverbEffect { + explicit PresetReverbEffect(effect_handle_t handle); + + // Methods from ::android::hardware::audio::effect::AUDIO_HAL_VERSION::IEffect follow. + Return init() override; + Return setConfig( + const EffectConfig& config, const sp& inputBufferProvider, + const sp& outputBufferProvider) override; + Return reset() override; + Return enable() override; + Return disable() override; + Return setDevice(AudioDeviceBitfield device) override; + Return setAndGetVolume(const hidl_vec& volumes, + setAndGetVolume_cb _hidl_cb) override; + Return volumeChangeNotification(const hidl_vec& volumes) override; + Return setAudioMode(AudioMode mode) override; + Return setConfigReverse( + const EffectConfig& config, const sp& inputBufferProvider, + const sp& outputBufferProvider) override; + Return setInputDevice(AudioDeviceBitfield device) override; + Return getConfig(getConfig_cb _hidl_cb) override; + Return getConfigReverse(getConfigReverse_cb _hidl_cb) override; + Return getSupportedAuxChannelsConfigs( + uint32_t maxConfigs, getSupportedAuxChannelsConfigs_cb _hidl_cb) override; + Return getAuxChannelsConfig(getAuxChannelsConfig_cb _hidl_cb) override; + Return setAuxChannelsConfig(const EffectAuxChannelsConfig& config) override; + Return setAudioSource(AudioSource source) override; + Return offload(const EffectOffloadParameter& param) override; + Return getDescriptor(getDescriptor_cb _hidl_cb) override; + Return prepareForProcessing(prepareForProcessing_cb _hidl_cb) override; + Return setProcessBuffers(const AudioBuffer& inBuffer, + const AudioBuffer& outBuffer) override; + Return command(uint32_t commandId, const hidl_vec& data, uint32_t resultMaxSize, + command_cb _hidl_cb) override; + Return setParameter(const hidl_vec& parameter, + const hidl_vec& value) override; + Return getParameter(const hidl_vec& parameter, uint32_t valueMaxSize, + getParameter_cb _hidl_cb) override; + Return getSupportedConfigsForFeature(uint32_t featureId, uint32_t maxConfigs, + uint32_t configSize, + getSupportedConfigsForFeature_cb _hidl_cb) override; + Return getCurrentConfigForFeature(uint32_t featureId, uint32_t configSize, + getCurrentConfigForFeature_cb _hidl_cb) override; + Return setCurrentConfigForFeature(uint32_t featureId, + const hidl_vec& configData) override; + Return close() override; + + // Methods from ::android::hardware::audio::effect::AUDIO_HAL_VERSION::IPresetReverbEffect + // follow. + Return setPreset(IPresetReverbEffect::Preset preset) override; + Return getPreset(getPreset_cb _hidl_cb) override; + + private: + sp mEffect; + + virtual ~PresetReverbEffect(); +}; + +} // namespace implementation +} // namespace AUDIO_HAL_VERSION +} // namespace effect +} // namespace audio +} // namespace hardware +} // namespace android diff --git a/audio/effect/all-versions/default/include/effect/all-versions/default/PresetReverbEffect.impl.h b/audio/effect/all-versions/default/include/effect/all-versions/default/PresetReverbEffect.impl.h new file mode 100644 index 0000000000000000000000000000000000000000..eab68fb7038ca97a715ab077c703971490fa83cd --- /dev/null +++ b/audio/effect/all-versions/default/include/effect/all-versions/default/PresetReverbEffect.impl.h @@ -0,0 +1,179 @@ +/* + * Copyright (C) 2016 The Android Open Source Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#include + +#include +#include + +#include "VersionUtils.h" + +namespace android { +namespace hardware { +namespace audio { +namespace effect { +namespace AUDIO_HAL_VERSION { +namespace implementation { + +PresetReverbEffect::PresetReverbEffect(effect_handle_t handle) : mEffect(new Effect(handle)) {} + +PresetReverbEffect::~PresetReverbEffect() {} + +// Methods from ::android::hardware::audio::effect::AUDIO_HAL_VERSION::IEffect follow. +Return PresetReverbEffect::init() { + return mEffect->init(); +} + +Return PresetReverbEffect::setConfig( + const EffectConfig& config, const sp& inputBufferProvider, + const sp& outputBufferProvider) { + return mEffect->setConfig(config, inputBufferProvider, outputBufferProvider); +} + +Return PresetReverbEffect::reset() { + return mEffect->reset(); +} + +Return PresetReverbEffect::enable() { + return mEffect->enable(); +} + +Return PresetReverbEffect::disable() { + return mEffect->disable(); +} + +Return PresetReverbEffect::setDevice(AudioDeviceBitfield device) { + return mEffect->setDevice(device); +} + +Return PresetReverbEffect::setAndGetVolume(const hidl_vec& volumes, + setAndGetVolume_cb _hidl_cb) { + return mEffect->setAndGetVolume(volumes, _hidl_cb); +} + +Return PresetReverbEffect::volumeChangeNotification(const hidl_vec& volumes) { + return mEffect->volumeChangeNotification(volumes); +} + +Return PresetReverbEffect::setAudioMode(AudioMode mode) { + return mEffect->setAudioMode(mode); +} + +Return PresetReverbEffect::setConfigReverse( + const EffectConfig& config, const sp& inputBufferProvider, + const sp& outputBufferProvider) { + return mEffect->setConfigReverse(config, inputBufferProvider, outputBufferProvider); +} + +Return PresetReverbEffect::setInputDevice(AudioDeviceBitfield device) { + return mEffect->setInputDevice(device); +} + +Return PresetReverbEffect::getConfig(getConfig_cb _hidl_cb) { + return mEffect->getConfig(_hidl_cb); +} + +Return PresetReverbEffect::getConfigReverse(getConfigReverse_cb _hidl_cb) { + return mEffect->getConfigReverse(_hidl_cb); +} + +Return PresetReverbEffect::getSupportedAuxChannelsConfigs( + uint32_t maxConfigs, getSupportedAuxChannelsConfigs_cb _hidl_cb) { + return mEffect->getSupportedAuxChannelsConfigs(maxConfigs, _hidl_cb); +} + +Return PresetReverbEffect::getAuxChannelsConfig(getAuxChannelsConfig_cb _hidl_cb) { + return mEffect->getAuxChannelsConfig(_hidl_cb); +} + +Return PresetReverbEffect::setAuxChannelsConfig(const EffectAuxChannelsConfig& config) { + return mEffect->setAuxChannelsConfig(config); +} + +Return PresetReverbEffect::setAudioSource(AudioSource source) { + return mEffect->setAudioSource(source); +} + +Return PresetReverbEffect::offload(const EffectOffloadParameter& param) { + return mEffect->offload(param); +} + +Return PresetReverbEffect::getDescriptor(getDescriptor_cb _hidl_cb) { + return mEffect->getDescriptor(_hidl_cb); +} + +Return PresetReverbEffect::prepareForProcessing(prepareForProcessing_cb _hidl_cb) { + return mEffect->prepareForProcessing(_hidl_cb); +} + +Return PresetReverbEffect::setProcessBuffers(const AudioBuffer& inBuffer, + const AudioBuffer& outBuffer) { + return mEffect->setProcessBuffers(inBuffer, outBuffer); +} + +Return PresetReverbEffect::command(uint32_t commandId, const hidl_vec& data, + uint32_t resultMaxSize, command_cb _hidl_cb) { + return mEffect->command(commandId, data, resultMaxSize, _hidl_cb); +} + +Return PresetReverbEffect::setParameter(const hidl_vec& parameter, + const hidl_vec& value) { + return mEffect->setParameter(parameter, value); +} + +Return PresetReverbEffect::getParameter(const hidl_vec& parameter, + uint32_t valueMaxSize, getParameter_cb _hidl_cb) { + return mEffect->getParameter(parameter, valueMaxSize, _hidl_cb); +} + +Return PresetReverbEffect::getSupportedConfigsForFeature( + uint32_t featureId, uint32_t maxConfigs, uint32_t configSize, + getSupportedConfigsForFeature_cb _hidl_cb) { + return mEffect->getSupportedConfigsForFeature(featureId, maxConfigs, configSize, _hidl_cb); +} + +Return PresetReverbEffect::getCurrentConfigForFeature( + uint32_t featureId, uint32_t configSize, getCurrentConfigForFeature_cb _hidl_cb) { + return mEffect->getCurrentConfigForFeature(featureId, configSize, _hidl_cb); +} + +Return PresetReverbEffect::setCurrentConfigForFeature(uint32_t featureId, + const hidl_vec& configData) { + return mEffect->setCurrentConfigForFeature(featureId, configData); +} + +Return PresetReverbEffect::close() { + return mEffect->close(); +} + +// Methods from ::android::hardware::audio::effect::AUDIO_HAL_VERSION::IPresetReverbEffect follow. +Return PresetReverbEffect::setPreset(IPresetReverbEffect::Preset preset) { + return mEffect->setParam(REVERB_PARAM_PRESET, static_cast(preset)); +} + +Return PresetReverbEffect::getPreset(getPreset_cb _hidl_cb) { + t_reverb_presets halPreset = REVERB_PRESET_NONE; + Result retval = mEffect->getParam(REVERB_PARAM_PRESET, halPreset); + _hidl_cb(retval, Preset(halPreset)); + return Void(); +} + +} // namespace implementation +} // namespace AUDIO_HAL_VERSION +} // namespace effect +} // namespace audio +} // namespace hardware +} // namespace android diff --git a/audio/effect/all-versions/default/include/effect/all-versions/default/VirtualizerEffect.h b/audio/effect/all-versions/default/include/effect/all-versions/default/VirtualizerEffect.h new file mode 100644 index 0000000000000000000000000000000000000000..c0d5a0034da8011969132d1c728184c9943631cd --- /dev/null +++ b/audio/effect/all-versions/default/include/effect/all-versions/default/VirtualizerEffect.h @@ -0,0 +1,123 @@ +/* + * Copyright (C) 2016 The Android Open Source Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#include + +#include + +#include + +#include "VersionUtils.h" + +namespace android { +namespace hardware { +namespace audio { +namespace effect { +namespace AUDIO_HAL_VERSION { +namespace implementation { + +using ::android::hardware::audio::common::AUDIO_HAL_VERSION::AudioChannelMask; +using ::android::hardware::audio::common::AUDIO_HAL_VERSION::AudioDevice; +using ::android::hardware::audio::common::AUDIO_HAL_VERSION::AudioMode; +using ::android::hardware::audio::common::AUDIO_HAL_VERSION::AudioSource; +using ::android::hardware::audio::common::AUDIO_HAL_VERSION::implementation::AudioChannelBitfield; +using ::android::hardware::audio::effect::AUDIO_HAL_VERSION::AudioBuffer; +using ::android::hardware::audio::effect::AUDIO_HAL_VERSION::EffectAuxChannelsConfig; +using ::android::hardware::audio::effect::AUDIO_HAL_VERSION::EffectConfig; +using ::android::hardware::audio::effect::AUDIO_HAL_VERSION::EffectDescriptor; +using ::android::hardware::audio::effect::AUDIO_HAL_VERSION::EffectOffloadParameter; +using ::android::hardware::audio::effect::AUDIO_HAL_VERSION::IEffect; +using ::android::hardware::audio::effect::AUDIO_HAL_VERSION::IEffectBufferProviderCallback; +using ::android::hardware::audio::effect::AUDIO_HAL_VERSION::IVirtualizerEffect; +using ::android::hardware::audio::effect::AUDIO_HAL_VERSION::Result; +using ::android::hardware::Return; +using ::android::hardware::Void; +using ::android::hardware::hidl_vec; +using ::android::hardware::hidl_string; +using ::android::sp; + +struct VirtualizerEffect : public IVirtualizerEffect { + explicit VirtualizerEffect(effect_handle_t handle); + + // Methods from ::android::hardware::audio::effect::AUDIO_HAL_VERSION::IEffect follow. + Return init() override; + Return setConfig( + const EffectConfig& config, const sp& inputBufferProvider, + const sp& outputBufferProvider) override; + Return reset() override; + Return enable() override; + Return disable() override; + Return setDevice(AudioDeviceBitfield device) override; + Return setAndGetVolume(const hidl_vec& volumes, + setAndGetVolume_cb _hidl_cb) override; + Return volumeChangeNotification(const hidl_vec& volumes) override; + Return setAudioMode(AudioMode mode) override; + Return setConfigReverse( + const EffectConfig& config, const sp& inputBufferProvider, + const sp& outputBufferProvider) override; + Return setInputDevice(AudioDeviceBitfield device) override; + Return getConfig(getConfig_cb _hidl_cb) override; + Return getConfigReverse(getConfigReverse_cb _hidl_cb) override; + Return getSupportedAuxChannelsConfigs( + uint32_t maxConfigs, getSupportedAuxChannelsConfigs_cb _hidl_cb) override; + Return getAuxChannelsConfig(getAuxChannelsConfig_cb _hidl_cb) override; + Return setAuxChannelsConfig(const EffectAuxChannelsConfig& config) override; + Return setAudioSource(AudioSource source) override; + Return offload(const EffectOffloadParameter& param) override; + Return getDescriptor(getDescriptor_cb _hidl_cb) override; + Return prepareForProcessing(prepareForProcessing_cb _hidl_cb) override; + Return setProcessBuffers(const AudioBuffer& inBuffer, + const AudioBuffer& outBuffer) override; + Return command(uint32_t commandId, const hidl_vec& data, uint32_t resultMaxSize, + command_cb _hidl_cb) override; + Return setParameter(const hidl_vec& parameter, + const hidl_vec& value) override; + Return getParameter(const hidl_vec& parameter, uint32_t valueMaxSize, + getParameter_cb _hidl_cb) override; + Return getSupportedConfigsForFeature(uint32_t featureId, uint32_t maxConfigs, + uint32_t configSize, + getSupportedConfigsForFeature_cb _hidl_cb) override; + Return getCurrentConfigForFeature(uint32_t featureId, uint32_t configSize, + getCurrentConfigForFeature_cb _hidl_cb) override; + Return setCurrentConfigForFeature(uint32_t featureId, + const hidl_vec& configData) override; + Return close() override; + + // Methods from ::android::hardware::audio::effect::AUDIO_HAL_VERSION::IVirtualizerEffect + // follow. + Return isStrengthSupported() override; + Return setStrength(uint16_t strength) override; + Return getStrength(getStrength_cb _hidl_cb) override; + Return getVirtualSpeakerAngles(AudioChannelBitfield mask, AudioDevice device, + getVirtualSpeakerAngles_cb _hidl_cb) override; + Return forceVirtualizationMode(AudioDevice device) override; + Return getVirtualizationMode(getVirtualizationMode_cb _hidl_cb) override; + + private: + sp mEffect; + + virtual ~VirtualizerEffect(); + + void speakerAnglesFromHal(const int32_t* halAngles, uint32_t channelCount, + hidl_vec& speakerAngles); +}; + +} // namespace implementation +} // namespace AUDIO_HAL_VERSION +} // namespace effect +} // namespace audio +} // namespace hardware +} // namespace android diff --git a/audio/effect/all-versions/default/include/effect/all-versions/default/VirtualizerEffect.impl.h b/audio/effect/all-versions/default/include/effect/all-versions/default/VirtualizerEffect.impl.h new file mode 100644 index 0000000000000000000000000000000000000000..23b09a89dd413bc4f2869fa46f5a3f4e5f4a4a49 --- /dev/null +++ b/audio/effect/all-versions/default/include/effect/all-versions/default/VirtualizerEffect.impl.h @@ -0,0 +1,231 @@ +/* + * Copyright (C) 2016 The Android Open Source Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#include + +#include + +#include +#include + +#include "VersionUtils.h" + +namespace android { +namespace hardware { +namespace audio { +namespace effect { +namespace AUDIO_HAL_VERSION { +namespace implementation { + +VirtualizerEffect::VirtualizerEffect(effect_handle_t handle) : mEffect(new Effect(handle)) {} + +VirtualizerEffect::~VirtualizerEffect() {} + +void VirtualizerEffect::speakerAnglesFromHal(const int32_t* halAngles, uint32_t channelCount, + hidl_vec& speakerAngles) { + speakerAngles.resize(channelCount); + for (uint32_t i = 0; i < channelCount; ++i) { + speakerAngles[i].mask = AudioChannelBitfield(*halAngles++); + speakerAngles[i].azimuth = *halAngles++; + speakerAngles[i].elevation = *halAngles++; + } +} + +// Methods from ::android::hardware::audio::effect::AUDIO_HAL_VERSION::IEffect follow. +Return VirtualizerEffect::init() { + return mEffect->init(); +} + +Return VirtualizerEffect::setConfig( + const EffectConfig& config, const sp& inputBufferProvider, + const sp& outputBufferProvider) { + return mEffect->setConfig(config, inputBufferProvider, outputBufferProvider); +} + +Return VirtualizerEffect::reset() { + return mEffect->reset(); +} + +Return VirtualizerEffect::enable() { + return mEffect->enable(); +} + +Return VirtualizerEffect::disable() { + return mEffect->disable(); +} + +Return VirtualizerEffect::setDevice(AudioDeviceBitfield device) { + return mEffect->setDevice(device); +} + +Return VirtualizerEffect::setAndGetVolume(const hidl_vec& volumes, + setAndGetVolume_cb _hidl_cb) { + return mEffect->setAndGetVolume(volumes, _hidl_cb); +} + +Return VirtualizerEffect::volumeChangeNotification(const hidl_vec& volumes) { + return mEffect->volumeChangeNotification(volumes); +} + +Return VirtualizerEffect::setAudioMode(AudioMode mode) { + return mEffect->setAudioMode(mode); +} + +Return VirtualizerEffect::setConfigReverse( + const EffectConfig& config, const sp& inputBufferProvider, + const sp& outputBufferProvider) { + return mEffect->setConfigReverse(config, inputBufferProvider, outputBufferProvider); +} + +Return VirtualizerEffect::setInputDevice(AudioDeviceBitfield device) { + return mEffect->setInputDevice(device); +} + +Return VirtualizerEffect::getConfig(getConfig_cb _hidl_cb) { + return mEffect->getConfig(_hidl_cb); +} + +Return VirtualizerEffect::getConfigReverse(getConfigReverse_cb _hidl_cb) { + return mEffect->getConfigReverse(_hidl_cb); +} + +Return VirtualizerEffect::getSupportedAuxChannelsConfigs( + uint32_t maxConfigs, getSupportedAuxChannelsConfigs_cb _hidl_cb) { + return mEffect->getSupportedAuxChannelsConfigs(maxConfigs, _hidl_cb); +} + +Return VirtualizerEffect::getAuxChannelsConfig(getAuxChannelsConfig_cb _hidl_cb) { + return mEffect->getAuxChannelsConfig(_hidl_cb); +} + +Return VirtualizerEffect::setAuxChannelsConfig(const EffectAuxChannelsConfig& config) { + return mEffect->setAuxChannelsConfig(config); +} + +Return VirtualizerEffect::setAudioSource(AudioSource source) { + return mEffect->setAudioSource(source); +} + +Return VirtualizerEffect::offload(const EffectOffloadParameter& param) { + return mEffect->offload(param); +} + +Return VirtualizerEffect::getDescriptor(getDescriptor_cb _hidl_cb) { + return mEffect->getDescriptor(_hidl_cb); +} + +Return VirtualizerEffect::prepareForProcessing(prepareForProcessing_cb _hidl_cb) { + return mEffect->prepareForProcessing(_hidl_cb); +} + +Return VirtualizerEffect::setProcessBuffers(const AudioBuffer& inBuffer, + const AudioBuffer& outBuffer) { + return mEffect->setProcessBuffers(inBuffer, outBuffer); +} + +Return VirtualizerEffect::command(uint32_t commandId, const hidl_vec& data, + uint32_t resultMaxSize, command_cb _hidl_cb) { + return mEffect->command(commandId, data, resultMaxSize, _hidl_cb); +} + +Return VirtualizerEffect::setParameter(const hidl_vec& parameter, + const hidl_vec& value) { + return mEffect->setParameter(parameter, value); +} + +Return VirtualizerEffect::getParameter(const hidl_vec& parameter, + uint32_t valueMaxSize, getParameter_cb _hidl_cb) { + return mEffect->getParameter(parameter, valueMaxSize, _hidl_cb); +} + +Return VirtualizerEffect::getSupportedConfigsForFeature( + uint32_t featureId, uint32_t maxConfigs, uint32_t configSize, + getSupportedConfigsForFeature_cb _hidl_cb) { + return mEffect->getSupportedConfigsForFeature(featureId, maxConfigs, configSize, _hidl_cb); +} + +Return VirtualizerEffect::getCurrentConfigForFeature(uint32_t featureId, uint32_t configSize, + getCurrentConfigForFeature_cb _hidl_cb) { + return mEffect->getCurrentConfigForFeature(featureId, configSize, _hidl_cb); +} + +Return VirtualizerEffect::setCurrentConfigForFeature(uint32_t featureId, + const hidl_vec& configData) { + return mEffect->setCurrentConfigForFeature(featureId, configData); +} + +Return VirtualizerEffect::close() { + return mEffect->close(); +} + +// Methods from ::android::hardware::audio::effect::AUDIO_HAL_VERSION::IVirtualizerEffect follow. +Return VirtualizerEffect::isStrengthSupported() { + bool halSupported = false; + mEffect->getParam(VIRTUALIZER_PARAM_STRENGTH_SUPPORTED, halSupported); + return halSupported; +} + +Return VirtualizerEffect::setStrength(uint16_t strength) { + return mEffect->setParam(VIRTUALIZER_PARAM_STRENGTH, strength); +} + +Return VirtualizerEffect::getStrength(getStrength_cb _hidl_cb) { + return mEffect->getIntegerParam(VIRTUALIZER_PARAM_STRENGTH, _hidl_cb); +} + +Return VirtualizerEffect::getVirtualSpeakerAngles(AudioChannelBitfield mask, + AudioDevice device, + getVirtualSpeakerAngles_cb _hidl_cb) { + uint32_t channelCount = + audio_channel_count_from_out_mask(static_cast(mask)); + size_t halSpeakerAnglesSize = sizeof(int32_t) * 3 * channelCount; + uint32_t halParam[3] = {VIRTUALIZER_PARAM_VIRTUAL_SPEAKER_ANGLES, + static_cast(mask), + static_cast(device)}; + hidl_vec speakerAngles; + Result retval = mEffect->getParameterImpl( + sizeof(halParam), halParam, halSpeakerAnglesSize, + [&](uint32_t valueSize, const void* valueData) { + if (valueSize > halSpeakerAnglesSize) { + valueSize = halSpeakerAnglesSize; + } else if (valueSize < halSpeakerAnglesSize) { + channelCount = valueSize / (sizeof(int32_t) * 3); + } + speakerAnglesFromHal(reinterpret_cast(valueData), channelCount, + speakerAngles); + }); + _hidl_cb(retval, speakerAngles); + return Void(); +} + +Return VirtualizerEffect::forceVirtualizationMode(AudioDevice device) { + return mEffect->setParam(VIRTUALIZER_PARAM_FORCE_VIRTUALIZATION_MODE, + static_cast(device)); +} + +Return VirtualizerEffect::getVirtualizationMode(getVirtualizationMode_cb _hidl_cb) { + uint32_t halMode = 0; + Result retval = mEffect->getParam(VIRTUALIZER_PARAM_FORCE_VIRTUALIZATION_MODE, halMode); + _hidl_cb(retval, AudioDevice(halMode)); + return Void(); +} + +} // namespace implementation +} // namespace AUDIO_HAL_VERSION +} // namespace effect +} // namespace audio +} // namespace hardware +} // namespace android diff --git a/audio/effect/all-versions/default/include/effect/all-versions/default/VisualizerEffect.h b/audio/effect/all-versions/default/include/effect/all-versions/default/VisualizerEffect.h new file mode 100644 index 0000000000000000000000000000000000000000..114d3b7ae54cc5bf463edf50bbd19ee11f9d92df --- /dev/null +++ b/audio/effect/all-versions/default/include/effect/all-versions/default/VisualizerEffect.h @@ -0,0 +1,122 @@ +/* + * Copyright (C) 2016 The Android Open Source Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#include + +#include + +#include + +#include "VersionUtils.h" + +namespace android { +namespace hardware { +namespace audio { +namespace effect { +namespace AUDIO_HAL_VERSION { +namespace implementation { + +using ::android::hardware::audio::common::AUDIO_HAL_VERSION::AudioDevice; +using ::android::hardware::audio::common::AUDIO_HAL_VERSION::AudioMode; +using ::android::hardware::audio::common::AUDIO_HAL_VERSION::AudioSource; +using ::android::hardware::audio::effect::AUDIO_HAL_VERSION::AudioBuffer; +using ::android::hardware::audio::effect::AUDIO_HAL_VERSION::EffectAuxChannelsConfig; +using ::android::hardware::audio::effect::AUDIO_HAL_VERSION::EffectConfig; +using ::android::hardware::audio::effect::AUDIO_HAL_VERSION::EffectDescriptor; +using ::android::hardware::audio::effect::AUDIO_HAL_VERSION::EffectOffloadParameter; +using ::android::hardware::audio::effect::AUDIO_HAL_VERSION::IEffect; +using ::android::hardware::audio::effect::AUDIO_HAL_VERSION::IEffectBufferProviderCallback; +using ::android::hardware::audio::effect::AUDIO_HAL_VERSION::IVisualizerEffect; +using ::android::hardware::audio::effect::AUDIO_HAL_VERSION::Result; +using ::android::hardware::Return; +using ::android::hardware::Void; +using ::android::hardware::hidl_vec; +using ::android::hardware::hidl_string; +using ::android::sp; + +struct VisualizerEffect : public IVisualizerEffect { + explicit VisualizerEffect(effect_handle_t handle); + + // Methods from ::android::hardware::audio::effect::AUDIO_HAL_VERSION::IEffect follow. + Return init() override; + Return setConfig( + const EffectConfig& config, const sp& inputBufferProvider, + const sp& outputBufferProvider) override; + Return reset() override; + Return enable() override; + Return disable() override; + Return setDevice(AudioDeviceBitfield device) override; + Return setAndGetVolume(const hidl_vec& volumes, + setAndGetVolume_cb _hidl_cb) override; + Return volumeChangeNotification(const hidl_vec& volumes) override; + Return setAudioMode(AudioMode mode) override; + Return setConfigReverse( + const EffectConfig& config, const sp& inputBufferProvider, + const sp& outputBufferProvider) override; + Return setInputDevice(AudioDeviceBitfield device) override; + Return getConfig(getConfig_cb _hidl_cb) override; + Return getConfigReverse(getConfigReverse_cb _hidl_cb) override; + Return getSupportedAuxChannelsConfigs( + uint32_t maxConfigs, getSupportedAuxChannelsConfigs_cb _hidl_cb) override; + Return getAuxChannelsConfig(getAuxChannelsConfig_cb _hidl_cb) override; + Return setAuxChannelsConfig(const EffectAuxChannelsConfig& config) override; + Return setAudioSource(AudioSource source) override; + Return offload(const EffectOffloadParameter& param) override; + Return getDescriptor(getDescriptor_cb _hidl_cb) override; + Return prepareForProcessing(prepareForProcessing_cb _hidl_cb) override; + Return setProcessBuffers(const AudioBuffer& inBuffer, + const AudioBuffer& outBuffer) override; + Return command(uint32_t commandId, const hidl_vec& data, uint32_t resultMaxSize, + command_cb _hidl_cb) override; + Return setParameter(const hidl_vec& parameter, + const hidl_vec& value) override; + Return getParameter(const hidl_vec& parameter, uint32_t valueMaxSize, + getParameter_cb _hidl_cb) override; + Return getSupportedConfigsForFeature(uint32_t featureId, uint32_t maxConfigs, + uint32_t configSize, + getSupportedConfigsForFeature_cb _hidl_cb) override; + Return getCurrentConfigForFeature(uint32_t featureId, uint32_t configSize, + getCurrentConfigForFeature_cb _hidl_cb) override; + Return setCurrentConfigForFeature(uint32_t featureId, + const hidl_vec& configData) override; + Return close() override; + + // Methods from ::android::hardware::audio::effect::AUDIO_HAL_VERSION::IVisualizerEffect follow. + Return setCaptureSize(uint16_t captureSize) override; + Return getCaptureSize(getCaptureSize_cb _hidl_cb) override; + Return setScalingMode(IVisualizerEffect::ScalingMode scalingMode) override; + Return getScalingMode(getScalingMode_cb _hidl_cb) override; + Return setLatency(uint32_t latencyMs) override; + Return getLatency(getLatency_cb _hidl_cb) override; + Return setMeasurementMode(IVisualizerEffect::MeasurementMode measurementMode) override; + Return getMeasurementMode(getMeasurementMode_cb _hidl_cb) override; + Return capture(capture_cb _hidl_cb) override; + Return measure(measure_cb _hidl_cb) override; + + private: + sp mEffect; + uint16_t mCaptureSize; + MeasurementMode mMeasurementMode; + + virtual ~VisualizerEffect(); +}; + +} // namespace implementation +} // namespace AUDIO_HAL_VERSION +} // namespace effect +} // namespace audio +} // namespace hardware +} // namespace android diff --git a/audio/effect/all-versions/default/include/effect/all-versions/default/VisualizerEffect.impl.h b/audio/effect/all-versions/default/include/effect/all-versions/default/VisualizerEffect.impl.h new file mode 100644 index 0000000000000000000000000000000000000000..9f2195b5cb02e52e28c956fe9bf771dd3d10a0b7 --- /dev/null +++ b/audio/effect/all-versions/default/include/effect/all-versions/default/VisualizerEffect.impl.h @@ -0,0 +1,254 @@ +/* + * Copyright (C) 2016 The Android Open Source Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#include + +#include +#include + +#include "VersionUtils.h" + +namespace android { +namespace hardware { +namespace audio { +namespace effect { +namespace AUDIO_HAL_VERSION { +namespace implementation { + +VisualizerEffect::VisualizerEffect(effect_handle_t handle) + : mEffect(new Effect(handle)), mCaptureSize(0), mMeasurementMode(MeasurementMode::NONE) {} + +VisualizerEffect::~VisualizerEffect() {} + +// Methods from ::android::hardware::audio::effect::AUDIO_HAL_VERSION::IEffect follow. +Return VisualizerEffect::init() { + return mEffect->init(); +} + +Return VisualizerEffect::setConfig( + const EffectConfig& config, const sp& inputBufferProvider, + const sp& outputBufferProvider) { + return mEffect->setConfig(config, inputBufferProvider, outputBufferProvider); +} + +Return VisualizerEffect::reset() { + return mEffect->reset(); +} + +Return VisualizerEffect::enable() { + return mEffect->enable(); +} + +Return VisualizerEffect::disable() { + return mEffect->disable(); +} + +Return VisualizerEffect::setDevice(AudioDeviceBitfield device) { + return mEffect->setDevice(device); +} + +Return VisualizerEffect::setAndGetVolume(const hidl_vec& volumes, + setAndGetVolume_cb _hidl_cb) { + return mEffect->setAndGetVolume(volumes, _hidl_cb); +} + +Return VisualizerEffect::volumeChangeNotification(const hidl_vec& volumes) { + return mEffect->volumeChangeNotification(volumes); +} + +Return VisualizerEffect::setAudioMode(AudioMode mode) { + return mEffect->setAudioMode(mode); +} + +Return VisualizerEffect::setConfigReverse( + const EffectConfig& config, const sp& inputBufferProvider, + const sp& outputBufferProvider) { + return mEffect->setConfigReverse(config, inputBufferProvider, outputBufferProvider); +} + +Return VisualizerEffect::setInputDevice(AudioDeviceBitfield device) { + return mEffect->setInputDevice(device); +} + +Return VisualizerEffect::getConfig(getConfig_cb _hidl_cb) { + return mEffect->getConfig(_hidl_cb); +} + +Return VisualizerEffect::getConfigReverse(getConfigReverse_cb _hidl_cb) { + return mEffect->getConfigReverse(_hidl_cb); +} + +Return VisualizerEffect::getSupportedAuxChannelsConfigs( + uint32_t maxConfigs, getSupportedAuxChannelsConfigs_cb _hidl_cb) { + return mEffect->getSupportedAuxChannelsConfigs(maxConfigs, _hidl_cb); +} + +Return VisualizerEffect::getAuxChannelsConfig(getAuxChannelsConfig_cb _hidl_cb) { + return mEffect->getAuxChannelsConfig(_hidl_cb); +} + +Return VisualizerEffect::setAuxChannelsConfig(const EffectAuxChannelsConfig& config) { + return mEffect->setAuxChannelsConfig(config); +} + +Return VisualizerEffect::setAudioSource(AudioSource source) { + return mEffect->setAudioSource(source); +} + +Return VisualizerEffect::offload(const EffectOffloadParameter& param) { + return mEffect->offload(param); +} + +Return VisualizerEffect::getDescriptor(getDescriptor_cb _hidl_cb) { + return mEffect->getDescriptor(_hidl_cb); +} + +Return VisualizerEffect::prepareForProcessing(prepareForProcessing_cb _hidl_cb) { + return mEffect->prepareForProcessing(_hidl_cb); +} + +Return VisualizerEffect::setProcessBuffers(const AudioBuffer& inBuffer, + const AudioBuffer& outBuffer) { + return mEffect->setProcessBuffers(inBuffer, outBuffer); +} + +Return VisualizerEffect::command(uint32_t commandId, const hidl_vec& data, + uint32_t resultMaxSize, command_cb _hidl_cb) { + return mEffect->command(commandId, data, resultMaxSize, _hidl_cb); +} + +Return VisualizerEffect::setParameter(const hidl_vec& parameter, + const hidl_vec& value) { + return mEffect->setParameter(parameter, value); +} + +Return VisualizerEffect::getParameter(const hidl_vec& parameter, + uint32_t valueMaxSize, getParameter_cb _hidl_cb) { + return mEffect->getParameter(parameter, valueMaxSize, _hidl_cb); +} + +Return VisualizerEffect::getSupportedConfigsForFeature( + uint32_t featureId, uint32_t maxConfigs, uint32_t configSize, + getSupportedConfigsForFeature_cb _hidl_cb) { + return mEffect->getSupportedConfigsForFeature(featureId, maxConfigs, configSize, _hidl_cb); +} + +Return VisualizerEffect::getCurrentConfigForFeature(uint32_t featureId, uint32_t configSize, + getCurrentConfigForFeature_cb _hidl_cb) { + return mEffect->getCurrentConfigForFeature(featureId, configSize, _hidl_cb); +} + +Return VisualizerEffect::setCurrentConfigForFeature(uint32_t featureId, + const hidl_vec& configData) { + return mEffect->setCurrentConfigForFeature(featureId, configData); +} + +Return VisualizerEffect::close() { + return mEffect->close(); +} + +// Methods from ::android::hardware::audio::effect::AUDIO_HAL_VERSION::IVisualizerEffect follow. +Return VisualizerEffect::setCaptureSize(uint16_t captureSize) { + Result retval = mEffect->setParam(VISUALIZER_PARAM_CAPTURE_SIZE, captureSize); + if (retval == Result::OK) { + mCaptureSize = captureSize; + } + return retval; +} + +Return VisualizerEffect::getCaptureSize(getCaptureSize_cb _hidl_cb) { + return mEffect->getIntegerParam(VISUALIZER_PARAM_CAPTURE_SIZE, _hidl_cb); +} + +Return VisualizerEffect::setScalingMode(IVisualizerEffect::ScalingMode scalingMode) { + return mEffect->setParam(VISUALIZER_PARAM_SCALING_MODE, static_cast(scalingMode)); +} + +Return VisualizerEffect::getScalingMode(getScalingMode_cb _hidl_cb) { + int32_t halMode; + Result retval = mEffect->getParam(VISUALIZER_PARAM_SCALING_MODE, halMode); + _hidl_cb(retval, ScalingMode(halMode)); + return Void(); +} + +Return VisualizerEffect::setLatency(uint32_t latencyMs) { + return mEffect->setParam(VISUALIZER_PARAM_LATENCY, latencyMs); +} + +Return VisualizerEffect::getLatency(getLatency_cb _hidl_cb) { + return mEffect->getIntegerParam(VISUALIZER_PARAM_LATENCY, _hidl_cb); +} + +Return VisualizerEffect::setMeasurementMode( + IVisualizerEffect::MeasurementMode measurementMode) { + Result retval = + mEffect->setParam(VISUALIZER_PARAM_MEASUREMENT_MODE, static_cast(measurementMode)); + if (retval == Result::OK) { + mMeasurementMode = measurementMode; + } + return retval; +} + +Return VisualizerEffect::getMeasurementMode(getMeasurementMode_cb _hidl_cb) { + int32_t halMode; + Result retval = mEffect->getParam(VISUALIZER_PARAM_MEASUREMENT_MODE, halMode); + _hidl_cb(retval, MeasurementMode(halMode)); + return Void(); +} + +Return VisualizerEffect::capture(capture_cb _hidl_cb) { + if (mCaptureSize == 0) { + _hidl_cb(Result::NOT_INITIALIZED, hidl_vec()); + return Void(); + } + uint32_t halCaptureSize = mCaptureSize; + uint8_t halCapture[mCaptureSize]; + Result retval = mEffect->sendCommandReturningData(VISUALIZER_CMD_CAPTURE, "VISUALIZER_CAPTURE", + &halCaptureSize, halCapture); + hidl_vec capture; + if (retval == Result::OK) { + capture.setToExternal(&halCapture[0], halCaptureSize); + } + _hidl_cb(retval, capture); + return Void(); +} + +Return VisualizerEffect::measure(measure_cb _hidl_cb) { + if (mMeasurementMode == MeasurementMode::NONE) { + _hidl_cb(Result::NOT_INITIALIZED, Measurement()); + return Void(); + } + int32_t halMeasurement[MEASUREMENT_COUNT]; + uint32_t halMeasurementSize = sizeof(halMeasurement); + Result retval = mEffect->sendCommandReturningData(VISUALIZER_CMD_MEASURE, "VISUALIZER_MEASURE", + &halMeasurementSize, halMeasurement); + Measurement measurement = {.mode = MeasurementMode::PEAK_RMS}; + measurement.value.peakAndRms.peakMb = 0; + measurement.value.peakAndRms.rmsMb = 0; + if (retval == Result::OK) { + measurement.value.peakAndRms.peakMb = halMeasurement[MEASUREMENT_IDX_PEAK]; + measurement.value.peakAndRms.rmsMb = halMeasurement[MEASUREMENT_IDX_RMS]; + } + _hidl_cb(retval, measurement); + return Void(); +} + +} // namespace implementation +} // namespace AUDIO_HAL_VERSION +} // namespace effect +} // namespace audio +} // namespace hardware +} // namespace android diff --git a/authsecret/1.0/Android.bp b/authsecret/1.0/Android.bp new file mode 100644 index 0000000000000000000000000000000000000000..9cde99a5fc795de5bb2344e3abad0fc32f14fc6f --- /dev/null +++ b/authsecret/1.0/Android.bp @@ -0,0 +1,17 @@ +// This file is autogenerated by hidl-gen -Landroidbp. + +hidl_interface { + name: "android.hardware.authsecret@1.0", + root: "android.hardware", + vndk: { + enabled: true, + }, + srcs: [ + "IAuthSecret.hal", + ], + interfaces: [ + "android.hidl.base@1.0", + ], + gen_java: true, +} + diff --git a/authsecret/1.0/IAuthSecret.hal b/authsecret/1.0/IAuthSecret.hal new file mode 100644 index 0000000000000000000000000000000000000000..9a0fd5fd5a894e14b8e0a34e0f7c0c262dba3ea2 --- /dev/null +++ b/authsecret/1.0/IAuthSecret.hal @@ -0,0 +1,46 @@ +/* + * Copyright (C) 2018 The Android Open Source Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package android.hardware.authsecret@1.0; + +/** + * This security HAL allows vendor components to be cryptographically tied to + * the primary user's credential. For example, security hardware could require + * proof that the credential is known before applying updates. + * + * This HAL is optional so does not require an implementation on device. + */ +interface IAuthSecret { + /** + * When the primary user is unlocked, this method is passed a secret to + * prove that is has been successfully unlocked. The primary user can either + * be unlocked by a person entering their credential or by another party + * using an escrow token e.g. a device administrator. + * + * The first time this is called, the secret must be used to provision state + * that depends on the primary user's secret. The same secret must be passed + * on each call until the next factory reset. + * + * Upon factory reset, any dependence on the secret must be removed as that + * secret is now lost and must never be derived again. A new secret must be + * created for the new primary user which must be used to newly provision + * state the first time this method is called after factory reset. + * + * The secret must be at least 16 bytes. + * + * @param secret blob derived from the primary user's credential. + */ + oneway primaryUserCredential(vec secret); +}; diff --git a/authsecret/1.0/default/Android.bp b/authsecret/1.0/default/Android.bp new file mode 100644 index 0000000000000000000000000000000000000000..5c3234fc710ca15d86fec39ad602c798700fc859 --- /dev/null +++ b/authsecret/1.0/default/Android.bp @@ -0,0 +1,21 @@ +cc_binary { + name: "android.hardware.authsecret@1.0-service", + init_rc: ["android.hardware.authsecret@1.0-service.rc"], + relative_install_path: "hw", + vendor: true, + srcs: [ + "service.cpp", + "AuthSecret.cpp", + ], + cflags: [ + "-Wall", + "-Werror", + ], + shared_libs: [ + "libhidlbase", + "libhidltransport", + "liblog", + "libutils", + "android.hardware.authsecret@1.0", + ], +} diff --git a/authsecret/1.0/default/AuthSecret.cpp b/authsecret/1.0/default/AuthSecret.cpp new file mode 100644 index 0000000000000000000000000000000000000000..f9271e9617845c77c1d0df0f050bf6610aaf07a7 --- /dev/null +++ b/authsecret/1.0/default/AuthSecret.cpp @@ -0,0 +1,43 @@ +#include "AuthSecret.h" + +namespace android { +namespace hardware { +namespace authsecret { +namespace V1_0 { +namespace implementation { + +// Methods from ::android::hardware::authsecret::V1_0::IAuthSecret follow. +Return AuthSecret::primaryUserCredential(const hidl_vec& secret) { + (void)secret; + + // To create a dependency on the credential, it is recommended to derive a + // different value from the provided secret for each purpose e.g. + // + // purpose1_secret = hash( "purpose1" || secret ) + // purpose2_secret = hash( "purpose2" || secret ) + // + // The derived values can then be used as cryptographic keys or stored + // securely for comparison in a future call. + // + // For example, a security module might require that the credential has been + // entered before it applies any updates. This can be achieved by storing a + // derived value in the module and only applying updates when the same + // derived value is presented again. + // + // This implementation does nothing. + + return Void(); +} + +// Note: on factory reset, clear all dependency on the secret. +// +// With the example of updating a security module, the stored value must be +// cleared so that the new primary user enrolled as the approver of updates. +// +// This implementation does nothing as there is no dependence on the secret. + +} // namespace implementation +} // namespace V1_0 +} // namespace authsecret +} // namespace hardware +} // namespace android diff --git a/authsecret/1.0/default/AuthSecret.h b/authsecret/1.0/default/AuthSecret.h new file mode 100644 index 0000000000000000000000000000000000000000..387fa670088c97ad4e8e1790bb0012fb71201631 --- /dev/null +++ b/authsecret/1.0/default/AuthSecret.h @@ -0,0 +1,35 @@ +#ifndef ANDROID_HARDWARE_AUTHSECRET_V1_0_AUTHSECRET_H +#define ANDROID_HARDWARE_AUTHSECRET_V1_0_AUTHSECRET_H + +#include +#include +#include + +namespace android { +namespace hardware { +namespace authsecret { +namespace V1_0 { +namespace implementation { + +using ::android::hardware::hidl_array; +using ::android::hardware::hidl_memory; +using ::android::hardware::hidl_string; +using ::android::hardware::hidl_vec; +using ::android::hardware::Return; +using ::android::hardware::Void; +using ::android::sp; + +struct AuthSecret : public IAuthSecret { + // Methods from ::android::hardware::authsecret::V1_0::IAuthSecret follow. + Return primaryUserCredential(const hidl_vec& secret) override; + + // Methods from ::android::hidl::base::V1_0::IBase follow. +}; + +} // namespace implementation +} // namespace V1_0 +} // namespace authsecret +} // namespace hardware +} // namespace android + +#endif // ANDROID_HARDWARE_AUTHSECRET_V1_0_AUTHSECRET_H diff --git a/authsecret/1.0/default/android.hardware.authsecret@1.0-service.rc b/authsecret/1.0/default/android.hardware.authsecret@1.0-service.rc new file mode 100644 index 0000000000000000000000000000000000000000..e82da7eef846b789c0ba62c06e22d00f42dfc4b2 --- /dev/null +++ b/authsecret/1.0/default/android.hardware.authsecret@1.0-service.rc @@ -0,0 +1,4 @@ +service vendor.authsecret-1-0 /vendor/bin/hw/android.hardware.authsecret@1.0-service + class hal + user system + group system diff --git a/authsecret/1.0/default/service.cpp b/authsecret/1.0/default/service.cpp new file mode 100644 index 0000000000000000000000000000000000000000..4acd16c72a499b67f842ef2429501d6003023f24 --- /dev/null +++ b/authsecret/1.0/default/service.cpp @@ -0,0 +1,41 @@ +/* + * Copyright (C) 2018 The Android Open Source Project + * + * Licensed under the Apache License, Version 2.1 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.1 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#define LOG_TAG "android.hardware.authsecret@1.0-service" + +#include +#include + +#include "AuthSecret.h" + +using android::hardware::configureRpcThreadpool; +using android::hardware::joinRpcThreadpool; +using android::hardware::authsecret::V1_0::IAuthSecret; +using android::hardware::authsecret::V1_0::implementation::AuthSecret; +using android::sp; +using android::status_t; +using android::OK; + +int main() { + configureRpcThreadpool(1, true); + + sp authSecret = new AuthSecret; + status_t status = authSecret->registerAsService(); + LOG_ALWAYS_FATAL_IF(status != OK, "Could not register IAuthSecret"); + + joinRpcThreadpool(); + return 0; +} diff --git a/authsecret/1.0/vts/functional/Android.bp b/authsecret/1.0/vts/functional/Android.bp new file mode 100644 index 0000000000000000000000000000000000000000..de9f560ec693d2635c4d404bf22f5b75cec90fbc --- /dev/null +++ b/authsecret/1.0/vts/functional/Android.bp @@ -0,0 +1,22 @@ +// +// Copyright (C) 2018 The Android Open Source Project +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +// + +cc_test { + name: "VtsHalAuthSecretV1_0TargetTest", + defaults: ["VtsHalTargetTestDefaults"], + srcs: ["VtsHalAuthSecretV1_0TargetTest.cpp"], + static_libs: ["android.hardware.authsecret@1.0"], +} diff --git a/authsecret/1.0/vts/functional/VtsHalAuthSecretV1_0TargetTest.cpp b/authsecret/1.0/vts/functional/VtsHalAuthSecretV1_0TargetTest.cpp new file mode 100644 index 0000000000000000000000000000000000000000..255d4de3f5a697856a079b104b3425c2162fcf4b --- /dev/null +++ b/authsecret/1.0/vts/functional/VtsHalAuthSecretV1_0TargetTest.cpp @@ -0,0 +1,97 @@ +/* + * Copyright (C) 2018 The Android Open Source Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#include + +#include +#include + +using ::android::hardware::hidl_vec; +using ::android::hardware::authsecret::V1_0::IAuthSecret; +using ::android::sp; + +// Test environment for Boot HIDL HAL. +class AuthSecretHidlEnvironment : public ::testing::VtsHalHidlTargetTestEnvBase { + public: + // get the test environment singleton + static AuthSecretHidlEnvironment* Instance() { + static AuthSecretHidlEnvironment* instance = new AuthSecretHidlEnvironment; + return instance; + } + + virtual void registerTestServices() override { registerTestService(); } + + private: + AuthSecretHidlEnvironment() {} +}; + +/** + * There is no expected behaviour that can be tested so these tests check the + * HAL doesn't crash with different execution orders. + */ +struct AuthSecretHidlTest : public ::testing::VtsHalHidlTargetTestBase { + virtual void SetUp() override { + authsecret = ::testing::VtsHalHidlTargetTestBase::getService( + AuthSecretHidlEnvironment::Instance()->getServiceName()); + ASSERT_NE(authsecret, nullptr); + + // All tests must enroll the correct secret first as this cannot be changed + // without a factory reset and the order of tests could change. + authsecret->primaryUserCredential(CORRECT_SECRET); + } + + sp authsecret; + hidl_vec CORRECT_SECRET{61, 93, 124, 240, 5, 0, 7, 201, 9, 129, 11, 12, 0, 14, 0, 16}; + hidl_vec WRONG_SECRET{1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16}; +}; + +/* Provision the primary user with a secret. */ +TEST_F(AuthSecretHidlTest, provisionPrimaryUserCredential) { + // Secret provisioned by SetUp() +} + +/* Provision the primary user with a secret and pass the secret again. */ +TEST_F(AuthSecretHidlTest, provisionPrimaryUserCredentialAndPassAgain) { + // Secret provisioned by SetUp() + authsecret->primaryUserCredential(CORRECT_SECRET); +} + +/* Provision the primary user with a secret and pass the secret again repeatedly. */ +TEST_F(AuthSecretHidlTest, provisionPrimaryUserCredentialAndPassAgainMultipleTimes) { + // Secret provisioned by SetUp() + constexpr int N = 5; + for (int i = 0; i < N; ++i) { + authsecret->primaryUserCredential(CORRECT_SECRET); + } +} + +/* Provision the primary user with a secret and then pass the wrong secret. This + * should never happen and is an framework bug if it does. As the secret is + * wrong, the HAL implementation may not be able to function correctly but it + * should fail gracefully. */ +TEST_F(AuthSecretHidlTest, provisionPrimaryUserCredentialAndWrongSecret) { + // Secret provisioned by SetUp() + authsecret->primaryUserCredential(WRONG_SECRET); +} + +int main(int argc, char** argv) { + ::testing::AddGlobalTestEnvironment(AuthSecretHidlEnvironment::Instance()); + ::testing::InitGoogleTest(&argc, argv); + AuthSecretHidlEnvironment::Instance()->init(&argc, argv); + int status = RUN_ALL_TESTS(); + ALOGI("Test result = %d", status); + return status; +} diff --git a/automotive/README.md b/automotive/README.md new file mode 100644 index 0000000000000000000000000000000000000000..341a1bb400f130acb3b730cfbee5693008a340d8 --- /dev/null +++ b/automotive/README.md @@ -0,0 +1,10 @@ +## Autmotive HALs ## +--- + +## Overview: ## + +The automotive HAL tree is used by Android Automotive to discover and +operate hardware specific to a car. + +The HALs are not (yet) frozen, as the HAL definition is expected to evolve +between Android releases. diff --git a/automotive/audiocontrol/1.0/Android.bp b/automotive/audiocontrol/1.0/Android.bp new file mode 100644 index 0000000000000000000000000000000000000000..9335a6c4621d445502ea3872639ee89989aef135 --- /dev/null +++ b/automotive/audiocontrol/1.0/Android.bp @@ -0,0 +1,21 @@ +// This file is autogenerated by hidl-gen -Landroidbp. + +hidl_interface { + name: "android.hardware.automotive.audiocontrol@1.0", + root: "android.hardware", + vndk: { + enabled: true, + }, + srcs: [ + "types.hal", + "IAudioControl.hal", + ], + interfaces: [ + "android.hidl.base@1.0", + ], + types: [ + "ContextNumber", + ], + gen_java: true, +} + diff --git a/automotive/audiocontrol/1.0/IAudioControl.hal b/automotive/audiocontrol/1.0/IAudioControl.hal new file mode 100644 index 0000000000000000000000000000000000000000..3c8b086bc67f9721e9529a79298d461462fd797a --- /dev/null +++ b/automotive/audiocontrol/1.0/IAudioControl.hal @@ -0,0 +1,60 @@ +/* + * Copyright (C) 2017 The Android Open Source Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package android.hardware.automotive.audiocontrol@1.0; + + +/** + * Interacts with the car's audio subsystem to manage audio sources and volumes + */ +interface IAudioControl { + + /** + * Called at startup once per context to get the mapping from ContextNumber to + * busAddress. This lets the car tell the framework to which physical output stream + * each context should be routed. + * + * For every context, a valid bus number (0 - num busses-1) must be returned. If an + * unrecognized contextNumber is encountered, then -1 shall be returned. + */ + getBusForContext(ContextNumber contextNumber) + generates (int32_t busNumber); + + + /** + * Control the right/left balance setting of the car speakers. + * + * This is intended to shift the speaker volume toward the right (+) or left (-) side of + * the car. 0.0 means "centered". +1.0 means fully right. -1.0 means fully left. + * + * A value outside the range -1 to 1 must be clamped by the implementation to the -1 to 1 + * range. + */ + oneway setBalanceTowardRight(float value); + + + /** + * Control the fore/aft fade setting of the car speakers. + * + * This is intended to shift the speaker volume toward the front (+) or back (-) of the car. + * 0.0 means "centered". +1.0 means fully forward. -1.0 means fully rearward. + * + * A value outside the range -1 to 1 must be clamped by the implementation to the -1 to 1 + * range. + */ + oneway setFadeTowardFront(float value); +}; + diff --git a/automotive/audiocontrol/1.0/default/Android.bp b/automotive/audiocontrol/1.0/default/Android.bp new file mode 100644 index 0000000000000000000000000000000000000000..0e074dd869d3608d05a925a75df01a7c413de00e --- /dev/null +++ b/automotive/audiocontrol/1.0/default/Android.bp @@ -0,0 +1,39 @@ +// Copyright (C) 2017 The Android Open Source Project +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +cc_binary { + name: "android.hardware.automotive.audiocontrol@1.0-service", + defaults: ["hidl_defaults"], + vendor: true, + relative_install_path: "hw", + srcs: [ + "AudioControl.cpp", + "service.cpp" + ], + init_rc: ["android.hardware.automotive.audiocontrol@1.0-service.rc"], + + shared_libs: [ + "android.hardware.automotive.audiocontrol@1.0", + "libhidlbase", + "libhidltransport", + "liblog", + "libutils", + ], + + cflags: [ + "-DLOG_TAG=\"AudCntrlDrv\"", + "-O0", + "-g", + ], +} diff --git a/automotive/audiocontrol/1.0/default/AudioControl.cpp b/automotive/audiocontrol/1.0/default/AudioControl.cpp new file mode 100644 index 0000000000000000000000000000000000000000..c96580e160a311f2300093ce8e61267e1c913380 --- /dev/null +++ b/automotive/audiocontrol/1.0/default/AudioControl.cpp @@ -0,0 +1,78 @@ +#include "AudioControl.h" + +#include +#include + + +namespace android { +namespace hardware { +namespace automotive { +namespace audiocontrol { +namespace V1_0 { +namespace implementation { + + +// This is the static map we're using to associate a ContextNumber with a +// bus number from the audio_policy_configuration.xml setup. Every valid context needs +// to be mapped to a bus address that actually exists in the platforms configuration. +#define ARRAY_SIZE(a) (sizeof(a) / sizeof(*a)) // Would be nice if this were common... +static int sContextToBusMap[] = { + -1, // INVALID + 0, // MUSIC_CONTEXT + 1, // NAVIGATION_CONTEXT + 2, // VOICE_COMMAND_CONTEXT + 3, // CALL_RING_CONTEXT + 4, // CALL_CONTEXT + 5, // ALARM_CONTEXT + 6, // NOTIFICATION_CONTEXT + 7, // SYSTEM_SOUND_CONTEXT +}; +static const unsigned sContextMapSize = ARRAY_SIZE(sContextToBusMap); +static const unsigned sContextCount = sContextMapSize - 1; // Less one for the INVALID entry +static const unsigned sContextNumberMax = sContextCount; // contextNumber is counted from 1 + + +AudioControl::AudioControl() { +}; + + +Return AudioControl::getBusForContext(ContextNumber ctxt) { + unsigned contextNumber = static_cast(ctxt); + if (contextNumber > sContextNumberMax) { + ALOGE("Unexpected context number %d (max expected is %d)", contextNumber, sContextCount); + return -1; + } else { + return sContextToBusMap[contextNumber]; + } +} + + +Return AudioControl::setBalanceTowardRight(float value) { + // For completeness, lets bounds check the input... + if ((value > 1.0f) || (value < -1.0f)) { + ALOGE("Balance value out of range -1 to 1 at %0.2f", value); + } else { + // Just log in this default mock implementation + ALOGI("Balance set to %0.2f", value); + } + return Void(); +} + + +Return AudioControl::setFadeTowardFront(float value) { + // For completeness, lets bounds check the input... + if ((value > 1.0f) || (value < -1.0f)) { + ALOGE("Fader value out of range -1 to 1 at %0.2f", value); + } else { + // Just log in this default mock implementation + ALOGI("Fader set to %0.2f", value); + } + return Void(); +} + +} // namespace implementation +} // namespace V1_0 +} // namespace audiocontrol +} // namespace automotive +} // namespace hardware +} // namespace android diff --git a/automotive/audiocontrol/1.0/default/AudioControl.h b/automotive/audiocontrol/1.0/default/AudioControl.h new file mode 100644 index 0000000000000000000000000000000000000000..37f43c69426905c1d27d2a4e01c9d08edcabb8af --- /dev/null +++ b/automotive/audiocontrol/1.0/default/AudioControl.h @@ -0,0 +1,41 @@ +#ifndef ANDROID_HARDWARE_AUTOMOTIVE_AUDIOCONTROL_V1_0_AUDIOCONTROL_H +#define ANDROID_HARDWARE_AUTOMOTIVE_AUDIOCONTROL_V1_0_AUDIOCONTROL_H + +#include +#include +#include + +namespace android { +namespace hardware { +namespace automotive { +namespace audiocontrol { +namespace V1_0 { +namespace implementation { + +using ::android::hardware::hidl_array; +using ::android::hardware::hidl_memory; +using ::android::hardware::hidl_string; +using ::android::hardware::hidl_vec; +using ::android::hardware::Return; +using ::android::hardware::Void; +using ::android::sp; + +struct AudioControl : public IAudioControl { +public: + // Methods from ::android::hardware::automotive::audiocontrol::V1_0::IAudioControl follow. + Return getBusForContext(ContextNumber contextNumber) override; + Return setBalanceTowardRight(float value) override; + Return setFadeTowardFront(float value) override; + + // Implementation details + AudioControl(); +}; + +} // namespace implementation +} // namespace V1_0 +} // namespace audiocontrol +} // namespace automotive +} // namespace hardware +} // namespace android + +#endif // ANDROID_HARDWARE_AUTOMOTIVE_AUDIOCONTROL_V1_0_AUDIOCONTROL_H diff --git a/automotive/audiocontrol/1.0/default/android.hardware.automotive.audiocontrol@1.0-service.rc b/automotive/audiocontrol/1.0/default/android.hardware.automotive.audiocontrol@1.0-service.rc new file mode 100644 index 0000000000000000000000000000000000000000..c02db08523cc10994beb0a31af29d2366e2b05d4 --- /dev/null +++ b/automotive/audiocontrol/1.0/default/android.hardware.automotive.audiocontrol@1.0-service.rc @@ -0,0 +1,4 @@ +service vendor.audiocontrol-hal-1.0 /vendor/bin/hw/android.hardware.automotive.audiocontrol@1.0-service + class hal + user audioserver + group system diff --git a/automotive/audiocontrol/1.0/default/service.cpp b/automotive/audiocontrol/1.0/default/service.cpp new file mode 100644 index 0000000000000000000000000000000000000000..a033fd909f68b668cc3a2d4fe35c692074fb85de --- /dev/null +++ b/automotive/audiocontrol/1.0/default/service.cpp @@ -0,0 +1,54 @@ +/* + * Copyright (C) 2017 The Android Open Source Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +#include + +#include +#include +#include +#include + +#include "AudioControl.h" + + +// libhidl: +using android::hardware::configureRpcThreadpool; +using android::hardware::joinRpcThreadpool; + +// Generated HIDL files +using android::hardware::automotive::audiocontrol::V1_0::IAudioControl; + +// The namespace in which all our implementation code lives +using namespace android::hardware::automotive::audiocontrol::V1_0::implementation; +using namespace android; + + +// Main service entry point +int main() { + // Create an instance of our service class + android::sp service = new AudioControl(); + configureRpcThreadpool(1, true /*callerWillJoin*/); + + if (service->registerAsService() != OK) { + ALOGE("registerAsService failed"); + return 1; + } + + // Join (forever) the thread pool we created for the service above + joinRpcThreadpool(); + + // We don't ever actually expect to return, so return an error if we do get here + return 2; +} \ No newline at end of file diff --git a/automotive/audiocontrol/1.0/types.hal b/automotive/audiocontrol/1.0/types.hal new file mode 100644 index 0000000000000000000000000000000000000000..04d8d358c40132e0e77266ebcd2bfaf9b9b02854 --- /dev/null +++ b/automotive/audiocontrol/1.0/types.hal @@ -0,0 +1,35 @@ +/* + * Copyright (C) 2017 The Android Open Source Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package android.hardware.automotive.audiocontrol@1.0; + + +/** + * Predefined flags to identifying audio contexts + */ +enum ContextNumber : uint32_t { + INVALID = 0, /* Shouldn't be used */ + + // Sounds from Android (counting from 1 coincidentally lets us match AudioAttributes usages) + MUSIC, /* Music playback */ + NAVIGATION, /* Navigation directions */ + VOICE_COMMAND, /* Voice command session */ + CALL_RING, /* Voice call ringing */ + CALL, /* Voice call */ + ALARM, /* Alarm sound from Android */ + NOTIFICATION, /* Notifications */ + SYSTEM_SOUND, /* User interaction sounds (button clicks, etc) */ +}; diff --git a/automotive/audiocontrol/1.0/vts/functional/Android.bp b/automotive/audiocontrol/1.0/vts/functional/Android.bp new file mode 100644 index 0000000000000000000000000000000000000000..c6e0d8ea8120512b7c0b9672a05580ffc03171ec --- /dev/null +++ b/automotive/audiocontrol/1.0/vts/functional/Android.bp @@ -0,0 +1,31 @@ +// +// Copyright (C) 2016 The Android Open Source Project +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +// + +cc_test { + name: "VtsHalAudioControlV1_0TargetTest", + + srcs: [ + "VtsHalAudioControlV1_0TargetTest.cpp", + ], + + defaults: [ + "VtsHalTargetTestDefaults", + ], + + static_libs: [ + "android.hardware.automotive.audiocontrol@1.0", + ], +} diff --git a/automotive/audiocontrol/1.0/vts/functional/VtsHalAudioControlV1_0TargetTest.cpp b/automotive/audiocontrol/1.0/vts/functional/VtsHalAudioControlV1_0TargetTest.cpp new file mode 100644 index 0000000000000000000000000000000000000000..fc0deb94c36e5f54d3b0260b692fdc345536a3a8 --- /dev/null +++ b/automotive/audiocontrol/1.0/vts/functional/VtsHalAudioControlV1_0TargetTest.cpp @@ -0,0 +1,158 @@ +/* + * Copyright (C) 2018 The Android Open Source Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#define LOG_TAG "VtsHalAudioControlTest" + +#include +#include + +#include +#include +#include +#include +#include + +#include +#include +#include + +#include + +using namespace ::android::hardware::automotive::audiocontrol::V1_0; +using ::android::hardware::Return; +using ::android::hardware::Void; +using ::android::hardware::hidl_enum_range; +using ::android::hardware::hidl_handle; +using ::android::hardware::hidl_string; +using ::android::hardware::hidl_vec; +using ::android::sp; + + +// Boiler plate for test harness +class CarAudioControlHidlEnvironment : public ::testing::VtsHalHidlTargetTestEnvBase { + public: + // get the test environment singleton + static CarAudioControlHidlEnvironment* Instance() { + static CarAudioControlHidlEnvironment* instance = new CarAudioControlHidlEnvironment; + return instance; + } + + virtual void registerTestServices() override { registerTestService(); } + private: + CarAudioControlHidlEnvironment() {} +}; + + +// The main test class for the automotive AudioControl HAL +class CarAudioControlHidlTest : public ::testing::VtsHalHidlTargetTestBase { +public: + virtual void SetUp() override { + // Make sure we can connect to the driver + pAudioControl = ::testing::VtsHalHidlTargetTestBase::getService( + CarAudioControlHidlEnvironment::Instance()-> + getServiceName()); + ASSERT_NE(pAudioControl.get(), nullptr); + } + + virtual void TearDown() override {} + + protected: + sp pAudioControl; // Every test needs access to the service +}; + +// +// Tests start here... +// + +/* + * Fader exercise test. Note that only a subjective observer could determine if the + * fader actually works. The only thing we can do is exercise the HAL and if the HAL crashes, + * we _might_ get a test failure if that breaks the connection to the driver. + */ +TEST_F(CarAudioControlHidlTest, FaderExercise) { + ALOGI("Fader exercise test (silent)"); + + // Set the fader all the way to the back + pAudioControl->setFadeTowardFront(-1.0f); + + // Set the fader all the way to the front + pAudioControl->setFadeTowardFront(1.0f); + + // Set the fader part way toward the back + pAudioControl->setFadeTowardFront(-0.333f); + + // Set the fader to a out of bounds value (driver should clamp) + pAudioControl->setFadeTowardFront(99999.9f); + + // Set the fader back to the middle + pAudioControl->setFadeTowardFront(0.0f); +} + +/* + * Balance exercise test. + */ +TEST_F(CarAudioControlHidlTest, BalanceExercise) { + ALOGI("Balance exercise test (silent)"); + + // Set the balance all the way to the left + pAudioControl->setBalanceTowardRight(-1.0f); + + // Set the balance all the way to the right + pAudioControl->setBalanceTowardRight(1.0f); + + // Set the balance part way toward the left + pAudioControl->setBalanceTowardRight(-0.333f); + + // Set the balance to a out of bounds value (driver should clamp) + pAudioControl->setBalanceTowardRight(99999.9f); + + // Set the balance back to the middle + pAudioControl->setBalanceTowardRight(0.0f); +} + +/* + * Context mapping test. + */ +TEST_F(CarAudioControlHidlTest, ContextMapping) { + ALOGI("Context mapping test"); + + int bus = -1; + + // For each defined context, query the driver for the BUS on which it should be delivered + for (const auto& ctxt : hidl_enum_range()) { + bus = pAudioControl->getBusForContext(ctxt); + + if (ctxt == ContextNumber::INVALID) { + // Invalid context should never be mapped to a bus + EXPECT_EQ(bus, -1); + } else { + EXPECT_GE(bus, 0); + // TODO: Consider enumerating the devices on the actual audio hal to validate the + // bus IDs. This would introduce an dependency on the audio HAL, however. Would that + // even work while Android is up and running? + } + } + + // Try asking about an invalid context one beyond the last defined to see that it gets back a -1 + int contextRange = std::distance(hidl_enum_range().begin(), + hidl_enum_range().end()); + bus = pAudioControl->getBusForContext((ContextNumber)contextRange); + EXPECT_EQ(bus, -1); + + // Try asking about an invalid context WAY out of range to see that it gets back a -1 + bus = pAudioControl->getBusForContext((ContextNumber)~0); + EXPECT_EQ(bus, -1); +} diff --git a/automotive/evs/1.0/default/Android.bp b/automotive/evs/1.0/default/Android.bp index 2574e860b236a1763cf7d408c44c366b35e1bb2e..7286478a84d399b2f6ffc474b760e83109db31c0 100644 --- a/automotive/evs/1.0/default/Android.bp +++ b/automotive/evs/1.0/default/Android.bp @@ -13,7 +13,6 @@ cc_binary { shared_libs: [ "android.hardware.automotive.evs@1.0", - "libui", "libbase", "libbinder", "libcutils", @@ -21,6 +20,7 @@ cc_binary { "libhidlbase", "libhidltransport", "liblog", + "libui", "libutils", ], diff --git a/automotive/evs/1.0/default/ServiceNames.h b/automotive/evs/1.0/default/ServiceNames.h index d20a37f9757e46c7a916fb66db725dfdba1673f0..1178da5a9c01b44dd1c044bdb06d34d3bc48d596 100644 --- a/automotive/evs/1.0/default/ServiceNames.h +++ b/automotive/evs/1.0/default/ServiceNames.h @@ -14,4 +14,4 @@ * limitations under the License. */ -const static char kEnumeratorServiceName[] = "EvsEnumeratorHw-Mock"; +const static char kEnumeratorServiceName[] = "EvsEnumeratorHw"; diff --git a/automotive/evs/1.0/default/android.hardware.automotive.evs@1.0-service.rc b/automotive/evs/1.0/default/android.hardware.automotive.evs@1.0-service.rc index 16d521d71875ca3de56c99116f6c27d15a9caff0..117c249a51a1d3fd190c8a70206cea4be5f33813 100644 --- a/automotive/evs/1.0/default/android.hardware.automotive.evs@1.0-service.rc +++ b/automotive/evs/1.0/default/android.hardware.automotive.evs@1.0-service.rc @@ -1,4 +1,4 @@ -service evs-hal-mock /vendor/bin/hw/android.hardware.automotive.evs@1.0-service +service vendor.evs-hal-mock /vendor/bin/hw/android.hardware.automotive.evs@1.0-service class hal user automotive_evs group automotive_evs diff --git a/automotive/vehicle/2.0/Android.bp b/automotive/vehicle/2.0/Android.bp index 3441a25ea284162a34f1b066e359f9a31cf86c65..6af774e46757383b7d3f009231ae15cd898dd4f6 100644 --- a/automotive/vehicle/2.0/Android.bp +++ b/automotive/vehicle/2.0/Android.bp @@ -17,6 +17,8 @@ hidl_interface { types: [ "DiagnosticFloatSensorIndex", "DiagnosticIntegerSensorIndex", + "EvConnectorType", + "FuelType", "Obd2CommonIgnitionMonitors", "Obd2CompressionIgnitionMonitors", "Obd2FuelSystemStatus", @@ -24,51 +26,39 @@ hidl_interface { "Obd2IgnitionMonitorKind", "Obd2SecondaryAirStatus", "Obd2SparkIgnitionMonitors", + "PortLocationType", "StatusCode", "SubscribeFlags", "SubscribeOptions", "VehicleApPowerBootupReason", - "VehicleApPowerSetState", - "VehicleApPowerState", "VehicleApPowerStateConfigFlag", - "VehicleApPowerStateIndex", + "VehicleApPowerStateReport", + "VehicleApPowerStateReq", + "VehicleApPowerStateReqIndex", "VehicleApPowerStateShutdownParam", "VehicleArea", "VehicleAreaConfig", "VehicleAreaDoor", "VehicleAreaMirror", "VehicleAreaSeat", + "VehicleAreaWheel", "VehicleAreaWindow", - "VehicleAreaZone", - "VehicleAudioContextFlag", - "VehicleAudioExtFocusFlag", - "VehicleAudioFocusIndex", - "VehicleAudioFocusRequest", - "VehicleAudioFocusState", - "VehicleAudioHwVariantConfigFlag", - "VehicleAudioRoutingPolicyIndex", - "VehicleAudioStream", - "VehicleAudioStreamFlag", - "VehicleAudioVolumeCapabilityFlag", - "VehicleAudioVolumeIndex", - "VehicleAudioVolumeLimitIndex", - "VehicleAudioVolumeState", "VehicleDisplay", - "VehicleDrivingStatus", "VehicleGear", "VehicleHvacFanDirection", "VehicleHwKeyInputAction", "VehicleIgnitionState", - "VehicleInstrumentClusterType", + "VehicleLightState", + "VehicleLightSwitch", + "VehicleOilLevel", "VehiclePropConfig", "VehiclePropValue", "VehicleProperty", "VehiclePropertyAccess", "VehiclePropertyChangeMode", "VehiclePropertyGroup", - "VehiclePropertyOperation", + "VehiclePropertyStatus", "VehiclePropertyType", - "VehicleRadioConstants", "VehicleTurnSignal", "VehicleUnit", "VmsAvailabilityStateIntegerValuesIndex", @@ -77,8 +67,8 @@ hidl_interface { "VmsMessageWithLayerAndPublisherIdIntegerValuesIndex", "VmsMessageWithLayerIntegerValuesIndex", "VmsOfferingMessageIntegerValuesIndex", + "VmsPublisherInformationIntegerValuesIndex", "VmsSubscriptionsStateIntegerValuesIndex", - "Wheel", ], gen_java: true, } diff --git a/automotive/vehicle/2.0/Android.mk b/automotive/vehicle/2.0/Android.mk deleted file mode 100644 index a731d6d56fae2a1f1d088605e622cf04af2b456e..0000000000000000000000000000000000000000 --- a/automotive/vehicle/2.0/Android.mk +++ /dev/null @@ -1,1282 +0,0 @@ -# This file is autogenerated by hidl-gen. Do not edit manually. - -LOCAL_PATH := $(call my-dir) - -################################################################################ - -include $(CLEAR_VARS) -LOCAL_MODULE := android.hardware.automotive.vehicle-V2.0-java-static -LOCAL_MODULE_CLASS := JAVA_LIBRARIES - -intermediates := $(call local-generated-sources-dir, COMMON) - -HIDL := $(HOST_OUT_EXECUTABLES)/hidl-gen$(HOST_EXECUTABLE_SUFFIX) - -LOCAL_STATIC_JAVA_LIBRARIES := \ - android.hidl.base-V1.0-java-static \ - - -# -# Build types.hal (DiagnosticFloatSensorIndex) -# -GEN := $(intermediates)/android/hardware/automotive/vehicle/V2_0/DiagnosticFloatSensorIndex.java -$(GEN): $(HIDL) -$(GEN): PRIVATE_HIDL := $(HIDL) -$(GEN): PRIVATE_DEPS := $(LOCAL_PATH)/types.hal -$(GEN): PRIVATE_OUTPUT_DIR := $(intermediates) -$(GEN): PRIVATE_CUSTOM_TOOL = \ - $(PRIVATE_HIDL) -o $(PRIVATE_OUTPUT_DIR) \ - -Ljava \ - -randroid.hardware:hardware/interfaces \ - -randroid.hidl:system/libhidl/transport \ - android.hardware.automotive.vehicle@2.0::types.DiagnosticFloatSensorIndex - -$(GEN): $(LOCAL_PATH)/types.hal - $(transform-generated-source) -LOCAL_GENERATED_SOURCES += $(GEN) - -# -# Build types.hal (DiagnosticIntegerSensorIndex) -# -GEN := $(intermediates)/android/hardware/automotive/vehicle/V2_0/DiagnosticIntegerSensorIndex.java -$(GEN): $(HIDL) -$(GEN): PRIVATE_HIDL := $(HIDL) -$(GEN): PRIVATE_DEPS := $(LOCAL_PATH)/types.hal -$(GEN): PRIVATE_OUTPUT_DIR := $(intermediates) -$(GEN): PRIVATE_CUSTOM_TOOL = \ - $(PRIVATE_HIDL) -o $(PRIVATE_OUTPUT_DIR) \ - -Ljava \ - -randroid.hardware:hardware/interfaces \ - -randroid.hidl:system/libhidl/transport \ - android.hardware.automotive.vehicle@2.0::types.DiagnosticIntegerSensorIndex - -$(GEN): $(LOCAL_PATH)/types.hal - $(transform-generated-source) -LOCAL_GENERATED_SOURCES += $(GEN) - -# -# Build types.hal (Obd2CommonIgnitionMonitors) -# -GEN := $(intermediates)/android/hardware/automotive/vehicle/V2_0/Obd2CommonIgnitionMonitors.java -$(GEN): $(HIDL) -$(GEN): PRIVATE_HIDL := $(HIDL) -$(GEN): PRIVATE_DEPS := $(LOCAL_PATH)/types.hal -$(GEN): PRIVATE_OUTPUT_DIR := $(intermediates) -$(GEN): PRIVATE_CUSTOM_TOOL = \ - $(PRIVATE_HIDL) -o $(PRIVATE_OUTPUT_DIR) \ - -Ljava \ - -randroid.hardware:hardware/interfaces \ - -randroid.hidl:system/libhidl/transport \ - android.hardware.automotive.vehicle@2.0::types.Obd2CommonIgnitionMonitors - -$(GEN): $(LOCAL_PATH)/types.hal - $(transform-generated-source) -LOCAL_GENERATED_SOURCES += $(GEN) - -# -# Build types.hal (Obd2CompressionIgnitionMonitors) -# -GEN := $(intermediates)/android/hardware/automotive/vehicle/V2_0/Obd2CompressionIgnitionMonitors.java -$(GEN): $(HIDL) -$(GEN): PRIVATE_HIDL := $(HIDL) -$(GEN): PRIVATE_DEPS := $(LOCAL_PATH)/types.hal -$(GEN): PRIVATE_OUTPUT_DIR := $(intermediates) -$(GEN): PRIVATE_CUSTOM_TOOL = \ - $(PRIVATE_HIDL) -o $(PRIVATE_OUTPUT_DIR) \ - -Ljava \ - -randroid.hardware:hardware/interfaces \ - -randroid.hidl:system/libhidl/transport \ - android.hardware.automotive.vehicle@2.0::types.Obd2CompressionIgnitionMonitors - -$(GEN): $(LOCAL_PATH)/types.hal - $(transform-generated-source) -LOCAL_GENERATED_SOURCES += $(GEN) - -# -# Build types.hal (Obd2FuelSystemStatus) -# -GEN := $(intermediates)/android/hardware/automotive/vehicle/V2_0/Obd2FuelSystemStatus.java -$(GEN): $(HIDL) -$(GEN): PRIVATE_HIDL := $(HIDL) -$(GEN): PRIVATE_DEPS := $(LOCAL_PATH)/types.hal -$(GEN): PRIVATE_OUTPUT_DIR := $(intermediates) -$(GEN): PRIVATE_CUSTOM_TOOL = \ - $(PRIVATE_HIDL) -o $(PRIVATE_OUTPUT_DIR) \ - -Ljava \ - -randroid.hardware:hardware/interfaces \ - -randroid.hidl:system/libhidl/transport \ - android.hardware.automotive.vehicle@2.0::types.Obd2FuelSystemStatus - -$(GEN): $(LOCAL_PATH)/types.hal - $(transform-generated-source) -LOCAL_GENERATED_SOURCES += $(GEN) - -# -# Build types.hal (Obd2FuelType) -# -GEN := $(intermediates)/android/hardware/automotive/vehicle/V2_0/Obd2FuelType.java -$(GEN): $(HIDL) -$(GEN): PRIVATE_HIDL := $(HIDL) -$(GEN): PRIVATE_DEPS := $(LOCAL_PATH)/types.hal -$(GEN): PRIVATE_OUTPUT_DIR := $(intermediates) -$(GEN): PRIVATE_CUSTOM_TOOL = \ - $(PRIVATE_HIDL) -o $(PRIVATE_OUTPUT_DIR) \ - -Ljava \ - -randroid.hardware:hardware/interfaces \ - -randroid.hidl:system/libhidl/transport \ - android.hardware.automotive.vehicle@2.0::types.Obd2FuelType - -$(GEN): $(LOCAL_PATH)/types.hal - $(transform-generated-source) -LOCAL_GENERATED_SOURCES += $(GEN) - -# -# Build types.hal (Obd2IgnitionMonitorKind) -# -GEN := $(intermediates)/android/hardware/automotive/vehicle/V2_0/Obd2IgnitionMonitorKind.java -$(GEN): $(HIDL) -$(GEN): PRIVATE_HIDL := $(HIDL) -$(GEN): PRIVATE_DEPS := $(LOCAL_PATH)/types.hal -$(GEN): PRIVATE_OUTPUT_DIR := $(intermediates) -$(GEN): PRIVATE_CUSTOM_TOOL = \ - $(PRIVATE_HIDL) -o $(PRIVATE_OUTPUT_DIR) \ - -Ljava \ - -randroid.hardware:hardware/interfaces \ - -randroid.hidl:system/libhidl/transport \ - android.hardware.automotive.vehicle@2.0::types.Obd2IgnitionMonitorKind - -$(GEN): $(LOCAL_PATH)/types.hal - $(transform-generated-source) -LOCAL_GENERATED_SOURCES += $(GEN) - -# -# Build types.hal (Obd2SecondaryAirStatus) -# -GEN := $(intermediates)/android/hardware/automotive/vehicle/V2_0/Obd2SecondaryAirStatus.java -$(GEN): $(HIDL) -$(GEN): PRIVATE_HIDL := $(HIDL) -$(GEN): PRIVATE_DEPS := $(LOCAL_PATH)/types.hal -$(GEN): PRIVATE_OUTPUT_DIR := $(intermediates) -$(GEN): PRIVATE_CUSTOM_TOOL = \ - $(PRIVATE_HIDL) -o $(PRIVATE_OUTPUT_DIR) \ - -Ljava \ - -randroid.hardware:hardware/interfaces \ - -randroid.hidl:system/libhidl/transport \ - android.hardware.automotive.vehicle@2.0::types.Obd2SecondaryAirStatus - -$(GEN): $(LOCAL_PATH)/types.hal - $(transform-generated-source) -LOCAL_GENERATED_SOURCES += $(GEN) - -# -# Build types.hal (Obd2SparkIgnitionMonitors) -# -GEN := $(intermediates)/android/hardware/automotive/vehicle/V2_0/Obd2SparkIgnitionMonitors.java -$(GEN): $(HIDL) -$(GEN): PRIVATE_HIDL := $(HIDL) -$(GEN): PRIVATE_DEPS := $(LOCAL_PATH)/types.hal -$(GEN): PRIVATE_OUTPUT_DIR := $(intermediates) -$(GEN): PRIVATE_CUSTOM_TOOL = \ - $(PRIVATE_HIDL) -o $(PRIVATE_OUTPUT_DIR) \ - -Ljava \ - -randroid.hardware:hardware/interfaces \ - -randroid.hidl:system/libhidl/transport \ - android.hardware.automotive.vehicle@2.0::types.Obd2SparkIgnitionMonitors - -$(GEN): $(LOCAL_PATH)/types.hal - $(transform-generated-source) -LOCAL_GENERATED_SOURCES += $(GEN) - -# -# Build types.hal (StatusCode) -# -GEN := $(intermediates)/android/hardware/automotive/vehicle/V2_0/StatusCode.java -$(GEN): $(HIDL) -$(GEN): PRIVATE_HIDL := $(HIDL) -$(GEN): PRIVATE_DEPS := $(LOCAL_PATH)/types.hal -$(GEN): PRIVATE_OUTPUT_DIR := $(intermediates) -$(GEN): PRIVATE_CUSTOM_TOOL = \ - $(PRIVATE_HIDL) -o $(PRIVATE_OUTPUT_DIR) \ - -Ljava \ - -randroid.hardware:hardware/interfaces \ - -randroid.hidl:system/libhidl/transport \ - android.hardware.automotive.vehicle@2.0::types.StatusCode - -$(GEN): $(LOCAL_PATH)/types.hal - $(transform-generated-source) -LOCAL_GENERATED_SOURCES += $(GEN) - -# -# Build types.hal (SubscribeFlags) -# -GEN := $(intermediates)/android/hardware/automotive/vehicle/V2_0/SubscribeFlags.java -$(GEN): $(HIDL) -$(GEN): PRIVATE_HIDL := $(HIDL) -$(GEN): PRIVATE_DEPS := $(LOCAL_PATH)/types.hal -$(GEN): PRIVATE_OUTPUT_DIR := $(intermediates) -$(GEN): PRIVATE_CUSTOM_TOOL = \ - $(PRIVATE_HIDL) -o $(PRIVATE_OUTPUT_DIR) \ - -Ljava \ - -randroid.hardware:hardware/interfaces \ - -randroid.hidl:system/libhidl/transport \ - android.hardware.automotive.vehicle@2.0::types.SubscribeFlags - -$(GEN): $(LOCAL_PATH)/types.hal - $(transform-generated-source) -LOCAL_GENERATED_SOURCES += $(GEN) - -# -# Build types.hal (SubscribeOptions) -# -GEN := $(intermediates)/android/hardware/automotive/vehicle/V2_0/SubscribeOptions.java -$(GEN): $(HIDL) -$(GEN): PRIVATE_HIDL := $(HIDL) -$(GEN): PRIVATE_DEPS := $(LOCAL_PATH)/types.hal -$(GEN): PRIVATE_OUTPUT_DIR := $(intermediates) -$(GEN): PRIVATE_CUSTOM_TOOL = \ - $(PRIVATE_HIDL) -o $(PRIVATE_OUTPUT_DIR) \ - -Ljava \ - -randroid.hardware:hardware/interfaces \ - -randroid.hidl:system/libhidl/transport \ - android.hardware.automotive.vehicle@2.0::types.SubscribeOptions - -$(GEN): $(LOCAL_PATH)/types.hal - $(transform-generated-source) -LOCAL_GENERATED_SOURCES += $(GEN) - -# -# Build types.hal (VehicleApPowerBootupReason) -# -GEN := $(intermediates)/android/hardware/automotive/vehicle/V2_0/VehicleApPowerBootupReason.java -$(GEN): $(HIDL) -$(GEN): PRIVATE_HIDL := $(HIDL) -$(GEN): PRIVATE_DEPS := $(LOCAL_PATH)/types.hal -$(GEN): PRIVATE_OUTPUT_DIR := $(intermediates) -$(GEN): PRIVATE_CUSTOM_TOOL = \ - $(PRIVATE_HIDL) -o $(PRIVATE_OUTPUT_DIR) \ - -Ljava \ - -randroid.hardware:hardware/interfaces \ - -randroid.hidl:system/libhidl/transport \ - android.hardware.automotive.vehicle@2.0::types.VehicleApPowerBootupReason - -$(GEN): $(LOCAL_PATH)/types.hal - $(transform-generated-source) -LOCAL_GENERATED_SOURCES += $(GEN) - -# -# Build types.hal (VehicleApPowerSetState) -# -GEN := $(intermediates)/android/hardware/automotive/vehicle/V2_0/VehicleApPowerSetState.java -$(GEN): $(HIDL) -$(GEN): PRIVATE_HIDL := $(HIDL) -$(GEN): PRIVATE_DEPS := $(LOCAL_PATH)/types.hal -$(GEN): PRIVATE_OUTPUT_DIR := $(intermediates) -$(GEN): PRIVATE_CUSTOM_TOOL = \ - $(PRIVATE_HIDL) -o $(PRIVATE_OUTPUT_DIR) \ - -Ljava \ - -randroid.hardware:hardware/interfaces \ - -randroid.hidl:system/libhidl/transport \ - android.hardware.automotive.vehicle@2.0::types.VehicleApPowerSetState - -$(GEN): $(LOCAL_PATH)/types.hal - $(transform-generated-source) -LOCAL_GENERATED_SOURCES += $(GEN) - -# -# Build types.hal (VehicleApPowerState) -# -GEN := $(intermediates)/android/hardware/automotive/vehicle/V2_0/VehicleApPowerState.java -$(GEN): $(HIDL) -$(GEN): PRIVATE_HIDL := $(HIDL) -$(GEN): PRIVATE_DEPS := $(LOCAL_PATH)/types.hal -$(GEN): PRIVATE_OUTPUT_DIR := $(intermediates) -$(GEN): PRIVATE_CUSTOM_TOOL = \ - $(PRIVATE_HIDL) -o $(PRIVATE_OUTPUT_DIR) \ - -Ljava \ - -randroid.hardware:hardware/interfaces \ - -randroid.hidl:system/libhidl/transport \ - android.hardware.automotive.vehicle@2.0::types.VehicleApPowerState - -$(GEN): $(LOCAL_PATH)/types.hal - $(transform-generated-source) -LOCAL_GENERATED_SOURCES += $(GEN) - -# -# Build types.hal (VehicleApPowerStateConfigFlag) -# -GEN := $(intermediates)/android/hardware/automotive/vehicle/V2_0/VehicleApPowerStateConfigFlag.java -$(GEN): $(HIDL) -$(GEN): PRIVATE_HIDL := $(HIDL) -$(GEN): PRIVATE_DEPS := $(LOCAL_PATH)/types.hal -$(GEN): PRIVATE_OUTPUT_DIR := $(intermediates) -$(GEN): PRIVATE_CUSTOM_TOOL = \ - $(PRIVATE_HIDL) -o $(PRIVATE_OUTPUT_DIR) \ - -Ljava \ - -randroid.hardware:hardware/interfaces \ - -randroid.hidl:system/libhidl/transport \ - android.hardware.automotive.vehicle@2.0::types.VehicleApPowerStateConfigFlag - -$(GEN): $(LOCAL_PATH)/types.hal - $(transform-generated-source) -LOCAL_GENERATED_SOURCES += $(GEN) - -# -# Build types.hal (VehicleApPowerStateIndex) -# -GEN := $(intermediates)/android/hardware/automotive/vehicle/V2_0/VehicleApPowerStateIndex.java -$(GEN): $(HIDL) -$(GEN): PRIVATE_HIDL := $(HIDL) -$(GEN): PRIVATE_DEPS := $(LOCAL_PATH)/types.hal -$(GEN): PRIVATE_OUTPUT_DIR := $(intermediates) -$(GEN): PRIVATE_CUSTOM_TOOL = \ - $(PRIVATE_HIDL) -o $(PRIVATE_OUTPUT_DIR) \ - -Ljava \ - -randroid.hardware:hardware/interfaces \ - -randroid.hidl:system/libhidl/transport \ - android.hardware.automotive.vehicle@2.0::types.VehicleApPowerStateIndex - -$(GEN): $(LOCAL_PATH)/types.hal - $(transform-generated-source) -LOCAL_GENERATED_SOURCES += $(GEN) - -# -# Build types.hal (VehicleApPowerStateShutdownParam) -# -GEN := $(intermediates)/android/hardware/automotive/vehicle/V2_0/VehicleApPowerStateShutdownParam.java -$(GEN): $(HIDL) -$(GEN): PRIVATE_HIDL := $(HIDL) -$(GEN): PRIVATE_DEPS := $(LOCAL_PATH)/types.hal -$(GEN): PRIVATE_OUTPUT_DIR := $(intermediates) -$(GEN): PRIVATE_CUSTOM_TOOL = \ - $(PRIVATE_HIDL) -o $(PRIVATE_OUTPUT_DIR) \ - -Ljava \ - -randroid.hardware:hardware/interfaces \ - -randroid.hidl:system/libhidl/transport \ - android.hardware.automotive.vehicle@2.0::types.VehicleApPowerStateShutdownParam - -$(GEN): $(LOCAL_PATH)/types.hal - $(transform-generated-source) -LOCAL_GENERATED_SOURCES += $(GEN) - -# -# Build types.hal (VehicleArea) -# -GEN := $(intermediates)/android/hardware/automotive/vehicle/V2_0/VehicleArea.java -$(GEN): $(HIDL) -$(GEN): PRIVATE_HIDL := $(HIDL) -$(GEN): PRIVATE_DEPS := $(LOCAL_PATH)/types.hal -$(GEN): PRIVATE_OUTPUT_DIR := $(intermediates) -$(GEN): PRIVATE_CUSTOM_TOOL = \ - $(PRIVATE_HIDL) -o $(PRIVATE_OUTPUT_DIR) \ - -Ljava \ - -randroid.hardware:hardware/interfaces \ - -randroid.hidl:system/libhidl/transport \ - android.hardware.automotive.vehicle@2.0::types.VehicleArea - -$(GEN): $(LOCAL_PATH)/types.hal - $(transform-generated-source) -LOCAL_GENERATED_SOURCES += $(GEN) - -# -# Build types.hal (VehicleAreaConfig) -# -GEN := $(intermediates)/android/hardware/automotive/vehicle/V2_0/VehicleAreaConfig.java -$(GEN): $(HIDL) -$(GEN): PRIVATE_HIDL := $(HIDL) -$(GEN): PRIVATE_DEPS := $(LOCAL_PATH)/types.hal -$(GEN): PRIVATE_OUTPUT_DIR := $(intermediates) -$(GEN): PRIVATE_CUSTOM_TOOL = \ - $(PRIVATE_HIDL) -o $(PRIVATE_OUTPUT_DIR) \ - -Ljava \ - -randroid.hardware:hardware/interfaces \ - -randroid.hidl:system/libhidl/transport \ - android.hardware.automotive.vehicle@2.0::types.VehicleAreaConfig - -$(GEN): $(LOCAL_PATH)/types.hal - $(transform-generated-source) -LOCAL_GENERATED_SOURCES += $(GEN) - -# -# Build types.hal (VehicleAreaDoor) -# -GEN := $(intermediates)/android/hardware/automotive/vehicle/V2_0/VehicleAreaDoor.java -$(GEN): $(HIDL) -$(GEN): PRIVATE_HIDL := $(HIDL) -$(GEN): PRIVATE_DEPS := $(LOCAL_PATH)/types.hal -$(GEN): PRIVATE_OUTPUT_DIR := $(intermediates) -$(GEN): PRIVATE_CUSTOM_TOOL = \ - $(PRIVATE_HIDL) -o $(PRIVATE_OUTPUT_DIR) \ - -Ljava \ - -randroid.hardware:hardware/interfaces \ - -randroid.hidl:system/libhidl/transport \ - android.hardware.automotive.vehicle@2.0::types.VehicleAreaDoor - -$(GEN): $(LOCAL_PATH)/types.hal - $(transform-generated-source) -LOCAL_GENERATED_SOURCES += $(GEN) - -# -# Build types.hal (VehicleAreaMirror) -# -GEN := $(intermediates)/android/hardware/automotive/vehicle/V2_0/VehicleAreaMirror.java -$(GEN): $(HIDL) -$(GEN): PRIVATE_HIDL := $(HIDL) -$(GEN): PRIVATE_DEPS := $(LOCAL_PATH)/types.hal -$(GEN): PRIVATE_OUTPUT_DIR := $(intermediates) -$(GEN): PRIVATE_CUSTOM_TOOL = \ - $(PRIVATE_HIDL) -o $(PRIVATE_OUTPUT_DIR) \ - -Ljava \ - -randroid.hardware:hardware/interfaces \ - -randroid.hidl:system/libhidl/transport \ - android.hardware.automotive.vehicle@2.0::types.VehicleAreaMirror - -$(GEN): $(LOCAL_PATH)/types.hal - $(transform-generated-source) -LOCAL_GENERATED_SOURCES += $(GEN) - -# -# Build types.hal (VehicleAreaSeat) -# -GEN := $(intermediates)/android/hardware/automotive/vehicle/V2_0/VehicleAreaSeat.java -$(GEN): $(HIDL) -$(GEN): PRIVATE_HIDL := $(HIDL) -$(GEN): PRIVATE_DEPS := $(LOCAL_PATH)/types.hal -$(GEN): PRIVATE_OUTPUT_DIR := $(intermediates) -$(GEN): PRIVATE_CUSTOM_TOOL = \ - $(PRIVATE_HIDL) -o $(PRIVATE_OUTPUT_DIR) \ - -Ljava \ - -randroid.hardware:hardware/interfaces \ - -randroid.hidl:system/libhidl/transport \ - android.hardware.automotive.vehicle@2.0::types.VehicleAreaSeat - -$(GEN): $(LOCAL_PATH)/types.hal - $(transform-generated-source) -LOCAL_GENERATED_SOURCES += $(GEN) - -# -# Build types.hal (VehicleAreaWindow) -# -GEN := $(intermediates)/android/hardware/automotive/vehicle/V2_0/VehicleAreaWindow.java -$(GEN): $(HIDL) -$(GEN): PRIVATE_HIDL := $(HIDL) -$(GEN): PRIVATE_DEPS := $(LOCAL_PATH)/types.hal -$(GEN): PRIVATE_OUTPUT_DIR := $(intermediates) -$(GEN): PRIVATE_CUSTOM_TOOL = \ - $(PRIVATE_HIDL) -o $(PRIVATE_OUTPUT_DIR) \ - -Ljava \ - -randroid.hardware:hardware/interfaces \ - -randroid.hidl:system/libhidl/transport \ - android.hardware.automotive.vehicle@2.0::types.VehicleAreaWindow - -$(GEN): $(LOCAL_PATH)/types.hal - $(transform-generated-source) -LOCAL_GENERATED_SOURCES += $(GEN) - -# -# Build types.hal (VehicleAreaZone) -# -GEN := $(intermediates)/android/hardware/automotive/vehicle/V2_0/VehicleAreaZone.java -$(GEN): $(HIDL) -$(GEN): PRIVATE_HIDL := $(HIDL) -$(GEN): PRIVATE_DEPS := $(LOCAL_PATH)/types.hal -$(GEN): PRIVATE_OUTPUT_DIR := $(intermediates) -$(GEN): PRIVATE_CUSTOM_TOOL = \ - $(PRIVATE_HIDL) -o $(PRIVATE_OUTPUT_DIR) \ - -Ljava \ - -randroid.hardware:hardware/interfaces \ - -randroid.hidl:system/libhidl/transport \ - android.hardware.automotive.vehicle@2.0::types.VehicleAreaZone - -$(GEN): $(LOCAL_PATH)/types.hal - $(transform-generated-source) -LOCAL_GENERATED_SOURCES += $(GEN) - -# -# Build types.hal (VehicleAudioContextFlag) -# -GEN := $(intermediates)/android/hardware/automotive/vehicle/V2_0/VehicleAudioContextFlag.java -$(GEN): $(HIDL) -$(GEN): PRIVATE_HIDL := $(HIDL) -$(GEN): PRIVATE_DEPS := $(LOCAL_PATH)/types.hal -$(GEN): PRIVATE_OUTPUT_DIR := $(intermediates) -$(GEN): PRIVATE_CUSTOM_TOOL = \ - $(PRIVATE_HIDL) -o $(PRIVATE_OUTPUT_DIR) \ - -Ljava \ - -randroid.hardware:hardware/interfaces \ - -randroid.hidl:system/libhidl/transport \ - android.hardware.automotive.vehicle@2.0::types.VehicleAudioContextFlag - -$(GEN): $(LOCAL_PATH)/types.hal - $(transform-generated-source) -LOCAL_GENERATED_SOURCES += $(GEN) - -# -# Build types.hal (VehicleAudioExtFocusFlag) -# -GEN := $(intermediates)/android/hardware/automotive/vehicle/V2_0/VehicleAudioExtFocusFlag.java -$(GEN): $(HIDL) -$(GEN): PRIVATE_HIDL := $(HIDL) -$(GEN): PRIVATE_DEPS := $(LOCAL_PATH)/types.hal -$(GEN): PRIVATE_OUTPUT_DIR := $(intermediates) -$(GEN): PRIVATE_CUSTOM_TOOL = \ - $(PRIVATE_HIDL) -o $(PRIVATE_OUTPUT_DIR) \ - -Ljava \ - -randroid.hardware:hardware/interfaces \ - -randroid.hidl:system/libhidl/transport \ - android.hardware.automotive.vehicle@2.0::types.VehicleAudioExtFocusFlag - -$(GEN): $(LOCAL_PATH)/types.hal - $(transform-generated-source) -LOCAL_GENERATED_SOURCES += $(GEN) - -# -# Build types.hal (VehicleAudioFocusIndex) -# -GEN := $(intermediates)/android/hardware/automotive/vehicle/V2_0/VehicleAudioFocusIndex.java -$(GEN): $(HIDL) -$(GEN): PRIVATE_HIDL := $(HIDL) -$(GEN): PRIVATE_DEPS := $(LOCAL_PATH)/types.hal -$(GEN): PRIVATE_OUTPUT_DIR := $(intermediates) -$(GEN): PRIVATE_CUSTOM_TOOL = \ - $(PRIVATE_HIDL) -o $(PRIVATE_OUTPUT_DIR) \ - -Ljava \ - -randroid.hardware:hardware/interfaces \ - -randroid.hidl:system/libhidl/transport \ - android.hardware.automotive.vehicle@2.0::types.VehicleAudioFocusIndex - -$(GEN): $(LOCAL_PATH)/types.hal - $(transform-generated-source) -LOCAL_GENERATED_SOURCES += $(GEN) - -# -# Build types.hal (VehicleAudioFocusRequest) -# -GEN := $(intermediates)/android/hardware/automotive/vehicle/V2_0/VehicleAudioFocusRequest.java -$(GEN): $(HIDL) -$(GEN): PRIVATE_HIDL := $(HIDL) -$(GEN): PRIVATE_DEPS := $(LOCAL_PATH)/types.hal -$(GEN): PRIVATE_OUTPUT_DIR := $(intermediates) -$(GEN): PRIVATE_CUSTOM_TOOL = \ - $(PRIVATE_HIDL) -o $(PRIVATE_OUTPUT_DIR) \ - -Ljava \ - -randroid.hardware:hardware/interfaces \ - -randroid.hidl:system/libhidl/transport \ - android.hardware.automotive.vehicle@2.0::types.VehicleAudioFocusRequest - -$(GEN): $(LOCAL_PATH)/types.hal - $(transform-generated-source) -LOCAL_GENERATED_SOURCES += $(GEN) - -# -# Build types.hal (VehicleAudioFocusState) -# -GEN := $(intermediates)/android/hardware/automotive/vehicle/V2_0/VehicleAudioFocusState.java -$(GEN): $(HIDL) -$(GEN): PRIVATE_HIDL := $(HIDL) -$(GEN): PRIVATE_DEPS := $(LOCAL_PATH)/types.hal -$(GEN): PRIVATE_OUTPUT_DIR := $(intermediates) -$(GEN): PRIVATE_CUSTOM_TOOL = \ - $(PRIVATE_HIDL) -o $(PRIVATE_OUTPUT_DIR) \ - -Ljava \ - -randroid.hardware:hardware/interfaces \ - -randroid.hidl:system/libhidl/transport \ - android.hardware.automotive.vehicle@2.0::types.VehicleAudioFocusState - -$(GEN): $(LOCAL_PATH)/types.hal - $(transform-generated-source) -LOCAL_GENERATED_SOURCES += $(GEN) - -# -# Build types.hal (VehicleAudioHwVariantConfigFlag) -# -GEN := $(intermediates)/android/hardware/automotive/vehicle/V2_0/VehicleAudioHwVariantConfigFlag.java -$(GEN): $(HIDL) -$(GEN): PRIVATE_HIDL := $(HIDL) -$(GEN): PRIVATE_DEPS := $(LOCAL_PATH)/types.hal -$(GEN): PRIVATE_OUTPUT_DIR := $(intermediates) -$(GEN): PRIVATE_CUSTOM_TOOL = \ - $(PRIVATE_HIDL) -o $(PRIVATE_OUTPUT_DIR) \ - -Ljava \ - -randroid.hardware:hardware/interfaces \ - -randroid.hidl:system/libhidl/transport \ - android.hardware.automotive.vehicle@2.0::types.VehicleAudioHwVariantConfigFlag - -$(GEN): $(LOCAL_PATH)/types.hal - $(transform-generated-source) -LOCAL_GENERATED_SOURCES += $(GEN) - -# -# Build types.hal (VehicleAudioRoutingPolicyIndex) -# -GEN := $(intermediates)/android/hardware/automotive/vehicle/V2_0/VehicleAudioRoutingPolicyIndex.java -$(GEN): $(HIDL) -$(GEN): PRIVATE_HIDL := $(HIDL) -$(GEN): PRIVATE_DEPS := $(LOCAL_PATH)/types.hal -$(GEN): PRIVATE_OUTPUT_DIR := $(intermediates) -$(GEN): PRIVATE_CUSTOM_TOOL = \ - $(PRIVATE_HIDL) -o $(PRIVATE_OUTPUT_DIR) \ - -Ljava \ - -randroid.hardware:hardware/interfaces \ - -randroid.hidl:system/libhidl/transport \ - android.hardware.automotive.vehicle@2.0::types.VehicleAudioRoutingPolicyIndex - -$(GEN): $(LOCAL_PATH)/types.hal - $(transform-generated-source) -LOCAL_GENERATED_SOURCES += $(GEN) - -# -# Build types.hal (VehicleAudioStream) -# -GEN := $(intermediates)/android/hardware/automotive/vehicle/V2_0/VehicleAudioStream.java -$(GEN): $(HIDL) -$(GEN): PRIVATE_HIDL := $(HIDL) -$(GEN): PRIVATE_DEPS := $(LOCAL_PATH)/types.hal -$(GEN): PRIVATE_OUTPUT_DIR := $(intermediates) -$(GEN): PRIVATE_CUSTOM_TOOL = \ - $(PRIVATE_HIDL) -o $(PRIVATE_OUTPUT_DIR) \ - -Ljava \ - -randroid.hardware:hardware/interfaces \ - -randroid.hidl:system/libhidl/transport \ - android.hardware.automotive.vehicle@2.0::types.VehicleAudioStream - -$(GEN): $(LOCAL_PATH)/types.hal - $(transform-generated-source) -LOCAL_GENERATED_SOURCES += $(GEN) - -# -# Build types.hal (VehicleAudioStreamFlag) -# -GEN := $(intermediates)/android/hardware/automotive/vehicle/V2_0/VehicleAudioStreamFlag.java -$(GEN): $(HIDL) -$(GEN): PRIVATE_HIDL := $(HIDL) -$(GEN): PRIVATE_DEPS := $(LOCAL_PATH)/types.hal -$(GEN): PRIVATE_OUTPUT_DIR := $(intermediates) -$(GEN): PRIVATE_CUSTOM_TOOL = \ - $(PRIVATE_HIDL) -o $(PRIVATE_OUTPUT_DIR) \ - -Ljava \ - -randroid.hardware:hardware/interfaces \ - -randroid.hidl:system/libhidl/transport \ - android.hardware.automotive.vehicle@2.0::types.VehicleAudioStreamFlag - -$(GEN): $(LOCAL_PATH)/types.hal - $(transform-generated-source) -LOCAL_GENERATED_SOURCES += $(GEN) - -# -# Build types.hal (VehicleAudioVolumeCapabilityFlag) -# -GEN := $(intermediates)/android/hardware/automotive/vehicle/V2_0/VehicleAudioVolumeCapabilityFlag.java -$(GEN): $(HIDL) -$(GEN): PRIVATE_HIDL := $(HIDL) -$(GEN): PRIVATE_DEPS := $(LOCAL_PATH)/types.hal -$(GEN): PRIVATE_OUTPUT_DIR := $(intermediates) -$(GEN): PRIVATE_CUSTOM_TOOL = \ - $(PRIVATE_HIDL) -o $(PRIVATE_OUTPUT_DIR) \ - -Ljava \ - -randroid.hardware:hardware/interfaces \ - -randroid.hidl:system/libhidl/transport \ - android.hardware.automotive.vehicle@2.0::types.VehicleAudioVolumeCapabilityFlag - -$(GEN): $(LOCAL_PATH)/types.hal - $(transform-generated-source) -LOCAL_GENERATED_SOURCES += $(GEN) - -# -# Build types.hal (VehicleAudioVolumeIndex) -# -GEN := $(intermediates)/android/hardware/automotive/vehicle/V2_0/VehicleAudioVolumeIndex.java -$(GEN): $(HIDL) -$(GEN): PRIVATE_HIDL := $(HIDL) -$(GEN): PRIVATE_DEPS := $(LOCAL_PATH)/types.hal -$(GEN): PRIVATE_OUTPUT_DIR := $(intermediates) -$(GEN): PRIVATE_CUSTOM_TOOL = \ - $(PRIVATE_HIDL) -o $(PRIVATE_OUTPUT_DIR) \ - -Ljava \ - -randroid.hardware:hardware/interfaces \ - -randroid.hidl:system/libhidl/transport \ - android.hardware.automotive.vehicle@2.0::types.VehicleAudioVolumeIndex - -$(GEN): $(LOCAL_PATH)/types.hal - $(transform-generated-source) -LOCAL_GENERATED_SOURCES += $(GEN) - -# -# Build types.hal (VehicleAudioVolumeLimitIndex) -# -GEN := $(intermediates)/android/hardware/automotive/vehicle/V2_0/VehicleAudioVolumeLimitIndex.java -$(GEN): $(HIDL) -$(GEN): PRIVATE_HIDL := $(HIDL) -$(GEN): PRIVATE_DEPS := $(LOCAL_PATH)/types.hal -$(GEN): PRIVATE_OUTPUT_DIR := $(intermediates) -$(GEN): PRIVATE_CUSTOM_TOOL = \ - $(PRIVATE_HIDL) -o $(PRIVATE_OUTPUT_DIR) \ - -Ljava \ - -randroid.hardware:hardware/interfaces \ - -randroid.hidl:system/libhidl/transport \ - android.hardware.automotive.vehicle@2.0::types.VehicleAudioVolumeLimitIndex - -$(GEN): $(LOCAL_PATH)/types.hal - $(transform-generated-source) -LOCAL_GENERATED_SOURCES += $(GEN) - -# -# Build types.hal (VehicleAudioVolumeState) -# -GEN := $(intermediates)/android/hardware/automotive/vehicle/V2_0/VehicleAudioVolumeState.java -$(GEN): $(HIDL) -$(GEN): PRIVATE_HIDL := $(HIDL) -$(GEN): PRIVATE_DEPS := $(LOCAL_PATH)/types.hal -$(GEN): PRIVATE_OUTPUT_DIR := $(intermediates) -$(GEN): PRIVATE_CUSTOM_TOOL = \ - $(PRIVATE_HIDL) -o $(PRIVATE_OUTPUT_DIR) \ - -Ljava \ - -randroid.hardware:hardware/interfaces \ - -randroid.hidl:system/libhidl/transport \ - android.hardware.automotive.vehicle@2.0::types.VehicleAudioVolumeState - -$(GEN): $(LOCAL_PATH)/types.hal - $(transform-generated-source) -LOCAL_GENERATED_SOURCES += $(GEN) - -# -# Build types.hal (VehicleDisplay) -# -GEN := $(intermediates)/android/hardware/automotive/vehicle/V2_0/VehicleDisplay.java -$(GEN): $(HIDL) -$(GEN): PRIVATE_HIDL := $(HIDL) -$(GEN): PRIVATE_DEPS := $(LOCAL_PATH)/types.hal -$(GEN): PRIVATE_OUTPUT_DIR := $(intermediates) -$(GEN): PRIVATE_CUSTOM_TOOL = \ - $(PRIVATE_HIDL) -o $(PRIVATE_OUTPUT_DIR) \ - -Ljava \ - -randroid.hardware:hardware/interfaces \ - -randroid.hidl:system/libhidl/transport \ - android.hardware.automotive.vehicle@2.0::types.VehicleDisplay - -$(GEN): $(LOCAL_PATH)/types.hal - $(transform-generated-source) -LOCAL_GENERATED_SOURCES += $(GEN) - -# -# Build types.hal (VehicleDrivingStatus) -# -GEN := $(intermediates)/android/hardware/automotive/vehicle/V2_0/VehicleDrivingStatus.java -$(GEN): $(HIDL) -$(GEN): PRIVATE_HIDL := $(HIDL) -$(GEN): PRIVATE_DEPS := $(LOCAL_PATH)/types.hal -$(GEN): PRIVATE_OUTPUT_DIR := $(intermediates) -$(GEN): PRIVATE_CUSTOM_TOOL = \ - $(PRIVATE_HIDL) -o $(PRIVATE_OUTPUT_DIR) \ - -Ljava \ - -randroid.hardware:hardware/interfaces \ - -randroid.hidl:system/libhidl/transport \ - android.hardware.automotive.vehicle@2.0::types.VehicleDrivingStatus - -$(GEN): $(LOCAL_PATH)/types.hal - $(transform-generated-source) -LOCAL_GENERATED_SOURCES += $(GEN) - -# -# Build types.hal (VehicleGear) -# -GEN := $(intermediates)/android/hardware/automotive/vehicle/V2_0/VehicleGear.java -$(GEN): $(HIDL) -$(GEN): PRIVATE_HIDL := $(HIDL) -$(GEN): PRIVATE_DEPS := $(LOCAL_PATH)/types.hal -$(GEN): PRIVATE_OUTPUT_DIR := $(intermediates) -$(GEN): PRIVATE_CUSTOM_TOOL = \ - $(PRIVATE_HIDL) -o $(PRIVATE_OUTPUT_DIR) \ - -Ljava \ - -randroid.hardware:hardware/interfaces \ - -randroid.hidl:system/libhidl/transport \ - android.hardware.automotive.vehicle@2.0::types.VehicleGear - -$(GEN): $(LOCAL_PATH)/types.hal - $(transform-generated-source) -LOCAL_GENERATED_SOURCES += $(GEN) - -# -# Build types.hal (VehicleHvacFanDirection) -# -GEN := $(intermediates)/android/hardware/automotive/vehicle/V2_0/VehicleHvacFanDirection.java -$(GEN): $(HIDL) -$(GEN): PRIVATE_HIDL := $(HIDL) -$(GEN): PRIVATE_DEPS := $(LOCAL_PATH)/types.hal -$(GEN): PRIVATE_OUTPUT_DIR := $(intermediates) -$(GEN): PRIVATE_CUSTOM_TOOL = \ - $(PRIVATE_HIDL) -o $(PRIVATE_OUTPUT_DIR) \ - -Ljava \ - -randroid.hardware:hardware/interfaces \ - -randroid.hidl:system/libhidl/transport \ - android.hardware.automotive.vehicle@2.0::types.VehicleHvacFanDirection - -$(GEN): $(LOCAL_PATH)/types.hal - $(transform-generated-source) -LOCAL_GENERATED_SOURCES += $(GEN) - -# -# Build types.hal (VehicleHwKeyInputAction) -# -GEN := $(intermediates)/android/hardware/automotive/vehicle/V2_0/VehicleHwKeyInputAction.java -$(GEN): $(HIDL) -$(GEN): PRIVATE_HIDL := $(HIDL) -$(GEN): PRIVATE_DEPS := $(LOCAL_PATH)/types.hal -$(GEN): PRIVATE_OUTPUT_DIR := $(intermediates) -$(GEN): PRIVATE_CUSTOM_TOOL = \ - $(PRIVATE_HIDL) -o $(PRIVATE_OUTPUT_DIR) \ - -Ljava \ - -randroid.hardware:hardware/interfaces \ - -randroid.hidl:system/libhidl/transport \ - android.hardware.automotive.vehicle@2.0::types.VehicleHwKeyInputAction - -$(GEN): $(LOCAL_PATH)/types.hal - $(transform-generated-source) -LOCAL_GENERATED_SOURCES += $(GEN) - -# -# Build types.hal (VehicleIgnitionState) -# -GEN := $(intermediates)/android/hardware/automotive/vehicle/V2_0/VehicleIgnitionState.java -$(GEN): $(HIDL) -$(GEN): PRIVATE_HIDL := $(HIDL) -$(GEN): PRIVATE_DEPS := $(LOCAL_PATH)/types.hal -$(GEN): PRIVATE_OUTPUT_DIR := $(intermediates) -$(GEN): PRIVATE_CUSTOM_TOOL = \ - $(PRIVATE_HIDL) -o $(PRIVATE_OUTPUT_DIR) \ - -Ljava \ - -randroid.hardware:hardware/interfaces \ - -randroid.hidl:system/libhidl/transport \ - android.hardware.automotive.vehicle@2.0::types.VehicleIgnitionState - -$(GEN): $(LOCAL_PATH)/types.hal - $(transform-generated-source) -LOCAL_GENERATED_SOURCES += $(GEN) - -# -# Build types.hal (VehicleInstrumentClusterType) -# -GEN := $(intermediates)/android/hardware/automotive/vehicle/V2_0/VehicleInstrumentClusterType.java -$(GEN): $(HIDL) -$(GEN): PRIVATE_HIDL := $(HIDL) -$(GEN): PRIVATE_DEPS := $(LOCAL_PATH)/types.hal -$(GEN): PRIVATE_OUTPUT_DIR := $(intermediates) -$(GEN): PRIVATE_CUSTOM_TOOL = \ - $(PRIVATE_HIDL) -o $(PRIVATE_OUTPUT_DIR) \ - -Ljava \ - -randroid.hardware:hardware/interfaces \ - -randroid.hidl:system/libhidl/transport \ - android.hardware.automotive.vehicle@2.0::types.VehicleInstrumentClusterType - -$(GEN): $(LOCAL_PATH)/types.hal - $(transform-generated-source) -LOCAL_GENERATED_SOURCES += $(GEN) - -# -# Build types.hal (VehiclePropConfig) -# -GEN := $(intermediates)/android/hardware/automotive/vehicle/V2_0/VehiclePropConfig.java -$(GEN): $(HIDL) -$(GEN): PRIVATE_HIDL := $(HIDL) -$(GEN): PRIVATE_DEPS := $(LOCAL_PATH)/types.hal -$(GEN): PRIVATE_OUTPUT_DIR := $(intermediates) -$(GEN): PRIVATE_CUSTOM_TOOL = \ - $(PRIVATE_HIDL) -o $(PRIVATE_OUTPUT_DIR) \ - -Ljava \ - -randroid.hardware:hardware/interfaces \ - -randroid.hidl:system/libhidl/transport \ - android.hardware.automotive.vehicle@2.0::types.VehiclePropConfig - -$(GEN): $(LOCAL_PATH)/types.hal - $(transform-generated-source) -LOCAL_GENERATED_SOURCES += $(GEN) - -# -# Build types.hal (VehiclePropValue) -# -GEN := $(intermediates)/android/hardware/automotive/vehicle/V2_0/VehiclePropValue.java -$(GEN): $(HIDL) -$(GEN): PRIVATE_HIDL := $(HIDL) -$(GEN): PRIVATE_DEPS := $(LOCAL_PATH)/types.hal -$(GEN): PRIVATE_OUTPUT_DIR := $(intermediates) -$(GEN): PRIVATE_CUSTOM_TOOL = \ - $(PRIVATE_HIDL) -o $(PRIVATE_OUTPUT_DIR) \ - -Ljava \ - -randroid.hardware:hardware/interfaces \ - -randroid.hidl:system/libhidl/transport \ - android.hardware.automotive.vehicle@2.0::types.VehiclePropValue - -$(GEN): $(LOCAL_PATH)/types.hal - $(transform-generated-source) -LOCAL_GENERATED_SOURCES += $(GEN) - -# -# Build types.hal (VehicleProperty) -# -GEN := $(intermediates)/android/hardware/automotive/vehicle/V2_0/VehicleProperty.java -$(GEN): $(HIDL) -$(GEN): PRIVATE_HIDL := $(HIDL) -$(GEN): PRIVATE_DEPS := $(LOCAL_PATH)/types.hal -$(GEN): PRIVATE_OUTPUT_DIR := $(intermediates) -$(GEN): PRIVATE_CUSTOM_TOOL = \ - $(PRIVATE_HIDL) -o $(PRIVATE_OUTPUT_DIR) \ - -Ljava \ - -randroid.hardware:hardware/interfaces \ - -randroid.hidl:system/libhidl/transport \ - android.hardware.automotive.vehicle@2.0::types.VehicleProperty - -$(GEN): $(LOCAL_PATH)/types.hal - $(transform-generated-source) -LOCAL_GENERATED_SOURCES += $(GEN) - -# -# Build types.hal (VehiclePropertyAccess) -# -GEN := $(intermediates)/android/hardware/automotive/vehicle/V2_0/VehiclePropertyAccess.java -$(GEN): $(HIDL) -$(GEN): PRIVATE_HIDL := $(HIDL) -$(GEN): PRIVATE_DEPS := $(LOCAL_PATH)/types.hal -$(GEN): PRIVATE_OUTPUT_DIR := $(intermediates) -$(GEN): PRIVATE_CUSTOM_TOOL = \ - $(PRIVATE_HIDL) -o $(PRIVATE_OUTPUT_DIR) \ - -Ljava \ - -randroid.hardware:hardware/interfaces \ - -randroid.hidl:system/libhidl/transport \ - android.hardware.automotive.vehicle@2.0::types.VehiclePropertyAccess - -$(GEN): $(LOCAL_PATH)/types.hal - $(transform-generated-source) -LOCAL_GENERATED_SOURCES += $(GEN) - -# -# Build types.hal (VehiclePropertyChangeMode) -# -GEN := $(intermediates)/android/hardware/automotive/vehicle/V2_0/VehiclePropertyChangeMode.java -$(GEN): $(HIDL) -$(GEN): PRIVATE_HIDL := $(HIDL) -$(GEN): PRIVATE_DEPS := $(LOCAL_PATH)/types.hal -$(GEN): PRIVATE_OUTPUT_DIR := $(intermediates) -$(GEN): PRIVATE_CUSTOM_TOOL = \ - $(PRIVATE_HIDL) -o $(PRIVATE_OUTPUT_DIR) \ - -Ljava \ - -randroid.hardware:hardware/interfaces \ - -randroid.hidl:system/libhidl/transport \ - android.hardware.automotive.vehicle@2.0::types.VehiclePropertyChangeMode - -$(GEN): $(LOCAL_PATH)/types.hal - $(transform-generated-source) -LOCAL_GENERATED_SOURCES += $(GEN) - -# -# Build types.hal (VehiclePropertyGroup) -# -GEN := $(intermediates)/android/hardware/automotive/vehicle/V2_0/VehiclePropertyGroup.java -$(GEN): $(HIDL) -$(GEN): PRIVATE_HIDL := $(HIDL) -$(GEN): PRIVATE_DEPS := $(LOCAL_PATH)/types.hal -$(GEN): PRIVATE_OUTPUT_DIR := $(intermediates) -$(GEN): PRIVATE_CUSTOM_TOOL = \ - $(PRIVATE_HIDL) -o $(PRIVATE_OUTPUT_DIR) \ - -Ljava \ - -randroid.hardware:hardware/interfaces \ - -randroid.hidl:system/libhidl/transport \ - android.hardware.automotive.vehicle@2.0::types.VehiclePropertyGroup - -$(GEN): $(LOCAL_PATH)/types.hal - $(transform-generated-source) -LOCAL_GENERATED_SOURCES += $(GEN) - -# -# Build types.hal (VehiclePropertyOperation) -# -GEN := $(intermediates)/android/hardware/automotive/vehicle/V2_0/VehiclePropertyOperation.java -$(GEN): $(HIDL) -$(GEN): PRIVATE_HIDL := $(HIDL) -$(GEN): PRIVATE_DEPS := $(LOCAL_PATH)/types.hal -$(GEN): PRIVATE_OUTPUT_DIR := $(intermediates) -$(GEN): PRIVATE_CUSTOM_TOOL = \ - $(PRIVATE_HIDL) -o $(PRIVATE_OUTPUT_DIR) \ - -Ljava \ - -randroid.hardware:hardware/interfaces \ - -randroid.hidl:system/libhidl/transport \ - android.hardware.automotive.vehicle@2.0::types.VehiclePropertyOperation - -$(GEN): $(LOCAL_PATH)/types.hal - $(transform-generated-source) -LOCAL_GENERATED_SOURCES += $(GEN) - -# -# Build types.hal (VehiclePropertyType) -# -GEN := $(intermediates)/android/hardware/automotive/vehicle/V2_0/VehiclePropertyType.java -$(GEN): $(HIDL) -$(GEN): PRIVATE_HIDL := $(HIDL) -$(GEN): PRIVATE_DEPS := $(LOCAL_PATH)/types.hal -$(GEN): PRIVATE_OUTPUT_DIR := $(intermediates) -$(GEN): PRIVATE_CUSTOM_TOOL = \ - $(PRIVATE_HIDL) -o $(PRIVATE_OUTPUT_DIR) \ - -Ljava \ - -randroid.hardware:hardware/interfaces \ - -randroid.hidl:system/libhidl/transport \ - android.hardware.automotive.vehicle@2.0::types.VehiclePropertyType - -$(GEN): $(LOCAL_PATH)/types.hal - $(transform-generated-source) -LOCAL_GENERATED_SOURCES += $(GEN) - -# -# Build types.hal (VehicleRadioConstants) -# -GEN := $(intermediates)/android/hardware/automotive/vehicle/V2_0/VehicleRadioConstants.java -$(GEN): $(HIDL) -$(GEN): PRIVATE_HIDL := $(HIDL) -$(GEN): PRIVATE_DEPS := $(LOCAL_PATH)/types.hal -$(GEN): PRIVATE_OUTPUT_DIR := $(intermediates) -$(GEN): PRIVATE_CUSTOM_TOOL = \ - $(PRIVATE_HIDL) -o $(PRIVATE_OUTPUT_DIR) \ - -Ljava \ - -randroid.hardware:hardware/interfaces \ - -randroid.hidl:system/libhidl/transport \ - android.hardware.automotive.vehicle@2.0::types.VehicleRadioConstants - -$(GEN): $(LOCAL_PATH)/types.hal - $(transform-generated-source) -LOCAL_GENERATED_SOURCES += $(GEN) - -# -# Build types.hal (VehicleTurnSignal) -# -GEN := $(intermediates)/android/hardware/automotive/vehicle/V2_0/VehicleTurnSignal.java -$(GEN): $(HIDL) -$(GEN): PRIVATE_HIDL := $(HIDL) -$(GEN): PRIVATE_DEPS := $(LOCAL_PATH)/types.hal -$(GEN): PRIVATE_OUTPUT_DIR := $(intermediates) -$(GEN): PRIVATE_CUSTOM_TOOL = \ - $(PRIVATE_HIDL) -o $(PRIVATE_OUTPUT_DIR) \ - -Ljava \ - -randroid.hardware:hardware/interfaces \ - -randroid.hidl:system/libhidl/transport \ - android.hardware.automotive.vehicle@2.0::types.VehicleTurnSignal - -$(GEN): $(LOCAL_PATH)/types.hal - $(transform-generated-source) -LOCAL_GENERATED_SOURCES += $(GEN) - -# -# Build types.hal (VehicleUnit) -# -GEN := $(intermediates)/android/hardware/automotive/vehicle/V2_0/VehicleUnit.java -$(GEN): $(HIDL) -$(GEN): PRIVATE_HIDL := $(HIDL) -$(GEN): PRIVATE_DEPS := $(LOCAL_PATH)/types.hal -$(GEN): PRIVATE_OUTPUT_DIR := $(intermediates) -$(GEN): PRIVATE_CUSTOM_TOOL = \ - $(PRIVATE_HIDL) -o $(PRIVATE_OUTPUT_DIR) \ - -Ljava \ - -randroid.hardware:hardware/interfaces \ - -randroid.hidl:system/libhidl/transport \ - android.hardware.automotive.vehicle@2.0::types.VehicleUnit - -$(GEN): $(LOCAL_PATH)/types.hal - $(transform-generated-source) -LOCAL_GENERATED_SOURCES += $(GEN) - -# -# Build types.hal (VmsAvailabilityStateIntegerValuesIndex) -# -GEN := $(intermediates)/android/hardware/automotive/vehicle/V2_0/VmsAvailabilityStateIntegerValuesIndex.java -$(GEN): $(HIDL) -$(GEN): PRIVATE_HIDL := $(HIDL) -$(GEN): PRIVATE_DEPS := $(LOCAL_PATH)/types.hal -$(GEN): PRIVATE_OUTPUT_DIR := $(intermediates) -$(GEN): PRIVATE_CUSTOM_TOOL = \ - $(PRIVATE_HIDL) -o $(PRIVATE_OUTPUT_DIR) \ - -Ljava \ - -randroid.hardware:hardware/interfaces \ - -randroid.hidl:system/libhidl/transport \ - android.hardware.automotive.vehicle@2.0::types.VmsAvailabilityStateIntegerValuesIndex - -$(GEN): $(LOCAL_PATH)/types.hal - $(transform-generated-source) -LOCAL_GENERATED_SOURCES += $(GEN) - -# -# Build types.hal (VmsBaseMessageIntegerValuesIndex) -# -GEN := $(intermediates)/android/hardware/automotive/vehicle/V2_0/VmsBaseMessageIntegerValuesIndex.java -$(GEN): $(HIDL) -$(GEN): PRIVATE_HIDL := $(HIDL) -$(GEN): PRIVATE_DEPS := $(LOCAL_PATH)/types.hal -$(GEN): PRIVATE_OUTPUT_DIR := $(intermediates) -$(GEN): PRIVATE_CUSTOM_TOOL = \ - $(PRIVATE_HIDL) -o $(PRIVATE_OUTPUT_DIR) \ - -Ljava \ - -randroid.hardware:hardware/interfaces \ - -randroid.hidl:system/libhidl/transport \ - android.hardware.automotive.vehicle@2.0::types.VmsBaseMessageIntegerValuesIndex - -$(GEN): $(LOCAL_PATH)/types.hal - $(transform-generated-source) -LOCAL_GENERATED_SOURCES += $(GEN) - -# -# Build types.hal (VmsMessageType) -# -GEN := $(intermediates)/android/hardware/automotive/vehicle/V2_0/VmsMessageType.java -$(GEN): $(HIDL) -$(GEN): PRIVATE_HIDL := $(HIDL) -$(GEN): PRIVATE_DEPS := $(LOCAL_PATH)/types.hal -$(GEN): PRIVATE_OUTPUT_DIR := $(intermediates) -$(GEN): PRIVATE_CUSTOM_TOOL = \ - $(PRIVATE_HIDL) -o $(PRIVATE_OUTPUT_DIR) \ - -Ljava \ - -randroid.hardware:hardware/interfaces \ - -randroid.hidl:system/libhidl/transport \ - android.hardware.automotive.vehicle@2.0::types.VmsMessageType - -$(GEN): $(LOCAL_PATH)/types.hal - $(transform-generated-source) -LOCAL_GENERATED_SOURCES += $(GEN) - -# -# Build types.hal (VmsMessageWithLayerAndPublisherIdIntegerValuesIndex) -# -GEN := $(intermediates)/android/hardware/automotive/vehicle/V2_0/VmsMessageWithLayerAndPublisherIdIntegerValuesIndex.java -$(GEN): $(HIDL) -$(GEN): PRIVATE_HIDL := $(HIDL) -$(GEN): PRIVATE_DEPS := $(LOCAL_PATH)/types.hal -$(GEN): PRIVATE_OUTPUT_DIR := $(intermediates) -$(GEN): PRIVATE_CUSTOM_TOOL = \ - $(PRIVATE_HIDL) -o $(PRIVATE_OUTPUT_DIR) \ - -Ljava \ - -randroid.hardware:hardware/interfaces \ - -randroid.hidl:system/libhidl/transport \ - android.hardware.automotive.vehicle@2.0::types.VmsMessageWithLayerAndPublisherIdIntegerValuesIndex - -$(GEN): $(LOCAL_PATH)/types.hal - $(transform-generated-source) -LOCAL_GENERATED_SOURCES += $(GEN) - -# -# Build types.hal (VmsMessageWithLayerIntegerValuesIndex) -# -GEN := $(intermediates)/android/hardware/automotive/vehicle/V2_0/VmsMessageWithLayerIntegerValuesIndex.java -$(GEN): $(HIDL) -$(GEN): PRIVATE_HIDL := $(HIDL) -$(GEN): PRIVATE_DEPS := $(LOCAL_PATH)/types.hal -$(GEN): PRIVATE_OUTPUT_DIR := $(intermediates) -$(GEN): PRIVATE_CUSTOM_TOOL = \ - $(PRIVATE_HIDL) -o $(PRIVATE_OUTPUT_DIR) \ - -Ljava \ - -randroid.hardware:hardware/interfaces \ - -randroid.hidl:system/libhidl/transport \ - android.hardware.automotive.vehicle@2.0::types.VmsMessageWithLayerIntegerValuesIndex - -$(GEN): $(LOCAL_PATH)/types.hal - $(transform-generated-source) -LOCAL_GENERATED_SOURCES += $(GEN) - -# -# Build types.hal (VmsOfferingMessageIntegerValuesIndex) -# -GEN := $(intermediates)/android/hardware/automotive/vehicle/V2_0/VmsOfferingMessageIntegerValuesIndex.java -$(GEN): $(HIDL) -$(GEN): PRIVATE_HIDL := $(HIDL) -$(GEN): PRIVATE_DEPS := $(LOCAL_PATH)/types.hal -$(GEN): PRIVATE_OUTPUT_DIR := $(intermediates) -$(GEN): PRIVATE_CUSTOM_TOOL = \ - $(PRIVATE_HIDL) -o $(PRIVATE_OUTPUT_DIR) \ - -Ljava \ - -randroid.hardware:hardware/interfaces \ - -randroid.hidl:system/libhidl/transport \ - android.hardware.automotive.vehicle@2.0::types.VmsOfferingMessageIntegerValuesIndex - -$(GEN): $(LOCAL_PATH)/types.hal - $(transform-generated-source) -LOCAL_GENERATED_SOURCES += $(GEN) - -# -# Build types.hal (VmsSubscriptionsStateIntegerValuesIndex) -# -GEN := $(intermediates)/android/hardware/automotive/vehicle/V2_0/VmsSubscriptionsStateIntegerValuesIndex.java -$(GEN): $(HIDL) -$(GEN): PRIVATE_HIDL := $(HIDL) -$(GEN): PRIVATE_DEPS := $(LOCAL_PATH)/types.hal -$(GEN): PRIVATE_OUTPUT_DIR := $(intermediates) -$(GEN): PRIVATE_CUSTOM_TOOL = \ - $(PRIVATE_HIDL) -o $(PRIVATE_OUTPUT_DIR) \ - -Ljava \ - -randroid.hardware:hardware/interfaces \ - -randroid.hidl:system/libhidl/transport \ - android.hardware.automotive.vehicle@2.0::types.VmsSubscriptionsStateIntegerValuesIndex - -$(GEN): $(LOCAL_PATH)/types.hal - $(transform-generated-source) -LOCAL_GENERATED_SOURCES += $(GEN) - -# -# Build types.hal (Wheel) -# -GEN := $(intermediates)/android/hardware/automotive/vehicle/V2_0/Wheel.java -$(GEN): $(HIDL) -$(GEN): PRIVATE_HIDL := $(HIDL) -$(GEN): PRIVATE_DEPS := $(LOCAL_PATH)/types.hal -$(GEN): PRIVATE_OUTPUT_DIR := $(intermediates) -$(GEN): PRIVATE_CUSTOM_TOOL = \ - $(PRIVATE_HIDL) -o $(PRIVATE_OUTPUT_DIR) \ - -Ljava \ - -randroid.hardware:hardware/interfaces \ - -randroid.hidl:system/libhidl/transport \ - android.hardware.automotive.vehicle@2.0::types.Wheel - -$(GEN): $(LOCAL_PATH)/types.hal - $(transform-generated-source) -LOCAL_GENERATED_SOURCES += $(GEN) - -# -# Build IVehicle.hal -# -GEN := $(intermediates)/android/hardware/automotive/vehicle/V2_0/IVehicle.java -$(GEN): $(HIDL) -$(GEN): PRIVATE_HIDL := $(HIDL) -$(GEN): PRIVATE_DEPS := $(LOCAL_PATH)/IVehicle.hal -$(GEN): PRIVATE_DEPS += $(LOCAL_PATH)/IVehicleCallback.hal -$(GEN): $(LOCAL_PATH)/IVehicleCallback.hal -$(GEN): PRIVATE_DEPS += $(LOCAL_PATH)/types.hal -$(GEN): $(LOCAL_PATH)/types.hal -$(GEN): PRIVATE_OUTPUT_DIR := $(intermediates) -$(GEN): PRIVATE_CUSTOM_TOOL = \ - $(PRIVATE_HIDL) -o $(PRIVATE_OUTPUT_DIR) \ - -Ljava \ - -randroid.hardware:hardware/interfaces \ - -randroid.hidl:system/libhidl/transport \ - android.hardware.automotive.vehicle@2.0::IVehicle - -$(GEN): $(LOCAL_PATH)/IVehicle.hal - $(transform-generated-source) -LOCAL_GENERATED_SOURCES += $(GEN) - -# -# Build IVehicleCallback.hal -# -GEN := $(intermediates)/android/hardware/automotive/vehicle/V2_0/IVehicleCallback.java -$(GEN): $(HIDL) -$(GEN): PRIVATE_HIDL := $(HIDL) -$(GEN): PRIVATE_DEPS := $(LOCAL_PATH)/IVehicleCallback.hal -$(GEN): PRIVATE_DEPS += $(LOCAL_PATH)/types.hal -$(GEN): $(LOCAL_PATH)/types.hal -$(GEN): PRIVATE_OUTPUT_DIR := $(intermediates) -$(GEN): PRIVATE_CUSTOM_TOOL = \ - $(PRIVATE_HIDL) -o $(PRIVATE_OUTPUT_DIR) \ - -Ljava \ - -randroid.hardware:hardware/interfaces \ - -randroid.hidl:system/libhidl/transport \ - android.hardware.automotive.vehicle@2.0::IVehicleCallback - -$(GEN): $(LOCAL_PATH)/IVehicleCallback.hal - $(transform-generated-source) -LOCAL_GENERATED_SOURCES += $(GEN) -include $(BUILD_STATIC_JAVA_LIBRARY) - - - -include $(call all-makefiles-under,$(LOCAL_PATH)) diff --git a/automotive/vehicle/2.0/IVehicle.hal b/automotive/vehicle/2.0/IVehicle.hal index d962de02f38585f5740af982dfcdd7ac53fab0d6..1b1d391268d49de72b4831767813da61501b6e01 100644 --- a/automotive/vehicle/2.0/IVehicle.hal +++ b/automotive/vehicle/2.0/IVehicle.hal @@ -43,7 +43,7 @@ interface IVehicle { * For VehiclePropertyChangeMode::ON_CHANGE properties, it must return the * latest available value. * - * Some properties like AUDIO_VOLUME requires to pass additional data in + * Some properties like RADIO_PRESET requires to pass additional data in * GET request in VehiclePropValue object. * * If there is no data available yet, which can happen during initial stage, diff --git a/automotive/vehicle/2.0/IVehicleCallback.hal b/automotive/vehicle/2.0/IVehicleCallback.hal index 706041862bae4c2e003beafe141e17ac939a8f21..c4a756ab04896b869952e904fa57de6840bf89ed 100644 --- a/automotive/vehicle/2.0/IVehicleCallback.hal +++ b/automotive/vehicle/2.0/IVehicleCallback.hal @@ -33,7 +33,7 @@ interface IVehicleCallback { /** * This method gets called if the client was subscribed to a property using - * SubscribeFlags::SET_CALL flag and IVehicle#set(...) method was called. + * SubscribeFlags::EVENTS_FROM_ANDROID flag and IVehicle#set(...) method was called. * * These events must be delivered to subscriber immediately without any * batching. diff --git a/automotive/vehicle/2.0/default/Android.bp b/automotive/vehicle/2.0/default/Android.bp index 1690163d2aba5a64ef47649885dd7a31207d22d2..22ab079f330e72508618c2a7457c4ae12e31377e 100644 --- a/automotive/vehicle/2.0/default/Android.bp +++ b/automotive/vehicle/2.0/default/Android.bp @@ -35,7 +35,7 @@ cc_library_headers { } // Vehicle reference implementation lib -cc_library_static { +cc_library { name: "android.hardware.automotive.vehicle@2.0-manager-lib", vendor: true, defaults: ["vhal_v2_0_defaults"], @@ -46,18 +46,12 @@ cc_library_static { "common/src/VehicleObjectPool.cpp", "common/src/VehiclePropertyStore.cpp", "common/src/VehicleUtils.cpp", + "common/src/VmsUtils.cpp", ], local_include_dirs: ["common/include/vhal_v2_0"], export_include_dirs: ["common/include"], } -cc_library_shared { - name: "android.hardware.automotive.vehicle@2.0-manager-lib-shared", - vendor: true, - static_libs: ["android.hardware.automotive.vehicle@2.0-manager-lib"], - export_static_lib_headers: ["android.hardware.automotive.vehicle@2.0-manager-lib"], -} - // Vehicle default VehicleHAL implementation cc_library_static { name: "android.hardware.automotive.vehicle@2.0-default-impl-lib", @@ -68,6 +62,8 @@ cc_library_static { "impl/vhal_v2_0/VehicleEmulator.cpp", "impl/vhal_v2_0/PipeComm.cpp", "impl/vhal_v2_0/SocketComm.cpp", + "impl/vhal_v2_0/LinearFakeValueGenerator.cpp", + "impl/vhal_v2_0/JsonFakeValueGenerator.cpp", ], local_include_dirs: ["common/include/vhal_v2_0"], export_include_dirs: ["impl"], @@ -77,6 +73,7 @@ cc_library_static { "libprotobuf-cpp-lite", ], static_libs: [ + "libjsoncpp", "libqemu_pipe", "android.hardware.automotive.vehicle@2.0-libproto-native", ], @@ -93,6 +90,7 @@ cc_test { "tests/VehicleHalManager_test.cpp", "tests/VehicleObjectPool_test.cpp", "tests/VehiclePropConfigIndex_test.cpp", + "tests/VmsUtils_test.cpp", ], header_libs: ["libbase_headers"], } @@ -112,6 +110,7 @@ cc_binary { "android.hardware.automotive.vehicle@2.0-manager-lib", "android.hardware.automotive.vehicle@2.0-default-impl-lib", "android.hardware.automotive.vehicle@2.0-libproto-native", + "libjsoncpp", "libqemu_pipe", ], } diff --git a/automotive/vehicle/2.0/default/OWNERS b/automotive/vehicle/2.0/default/OWNERS new file mode 100644 index 0000000000000000000000000000000000000000..d5d9d4c26a01702e45a850099915e6f877be6bff --- /dev/null +++ b/automotive/vehicle/2.0/default/OWNERS @@ -0,0 +1,3 @@ +egranata@google.com +pavelm@google.com +spaik@google.com diff --git a/automotive/vehicle/2.0/default/android.hardware.automotive.vehicle@2.0-service.rc b/automotive/vehicle/2.0/default/android.hardware.automotive.vehicle@2.0-service.rc index 30e249e80cd3f5dd7c1ef08129b7ad3aa2c5fb60..c8c89dc821ea36605914a612e2da4ecc38b70bb2 100644 --- a/automotive/vehicle/2.0/default/android.hardware.automotive.vehicle@2.0-service.rc +++ b/automotive/vehicle/2.0/default/android.hardware.automotive.vehicle@2.0-service.rc @@ -1,4 +1,4 @@ -service vehicle-hal-2.0 /vendor/bin/hw/android.hardware.automotive.vehicle@2.0-service +service vendor.vehicle-hal-2.0 /vendor/bin/hw/android.hardware.automotive.vehicle@2.0-service class hal user vehicle_network group system inet diff --git a/automotive/vehicle/2.0/default/common/include/vhal_v2_0/SubscriptionManager.h b/automotive/vehicle/2.0/default/common/include/vhal_v2_0/SubscriptionManager.h index 8e9089d600a7cc1aa935fac051671bd838a2aaf3..6086c01116c2a7fcab5c02b42922eddcc45ed9c9 100644 --- a/automotive/vehicle/2.0/default/common/include/vhal_v2_0/SubscriptionManager.h +++ b/automotive/vehicle/2.0/default/common/include/vhal_v2_0/SubscriptionManager.h @@ -49,7 +49,7 @@ public: } void addOrUpdateSubscription(const SubscribeOptions &opts); - bool isSubscribed(int32_t propId, int32_t areaId, SubscribeFlags flags); + bool isSubscribed(int32_t propId, SubscribeFlags flags); std::vector getSubscribedProperties() const; private: @@ -87,8 +87,7 @@ public: /** * Constructs SubscriptionManager * - * @param onPropertyUnsubscribed - this callback function will be called when there are no - * more client subscribed to particular property. + * @param onPropertyUnsubscribed - called when no more clients are subscribed to the property. */ SubscriptionManager(const OnPropertyUnsubscribed& onPropertyUnsubscribed) : mOnPropertyUnsubscribed(onPropertyUnsubscribed), @@ -115,9 +114,7 @@ public: const std::vector>& propValues, SubscribeFlags flags) const; - std::list> getSubscribedClients(int32_t propId, - int32_t area, - SubscribeFlags flags) const; + std::list> getSubscribedClients(int32_t propId, SubscribeFlags flags) const; /** * If there are no clients subscribed to given properties than callback function provided * in the constructor will be called. @@ -125,10 +122,9 @@ public: void unsubscribe(ClientId clientId, int32_t propId); private: std::list> getSubscribedClientsLocked(int32_t propId, - int32_t area, SubscribeFlags flags) const; - bool updateHalEventSubscriptionLocked(const SubscribeOptions &opts, SubscribeOptions* out); + bool updateHalEventSubscriptionLocked(const SubscribeOptions& opts, SubscribeOptions* out); void addClientToPropMapLocked(int32_t propId, const sp& client); diff --git a/automotive/vehicle/2.0/default/common/include/vhal_v2_0/VehicleHal.h b/automotive/vehicle/2.0/default/common/include/vhal_v2_0/VehicleHal.h index 8203a1e61d5a63f206d03a0e8eee3871a5ba73d6..fd28483a4e101f2ab5424f644037f4abefbb8f51 100644 --- a/automotive/vehicle/2.0/default/common/include/vhal_v2_0/VehicleHal.h +++ b/automotive/vehicle/2.0/default/common/include/vhal_v2_0/VehicleHal.h @@ -48,17 +48,14 @@ public: /** * Subscribe to HAL property events. This method might be called multiple - * times for the same vehicle property to update subscribed areas or sample - * rate. + * times for the same vehicle property to update sample rate. * * @param property to subscribe - * @param areas a bitwise vehicle areas or 0 for all supported areas * @param sampleRate sample rate in Hz for properties that support sample * rate, e.g. for properties with * VehiclePropertyChangeMode::CONTINUOUS */ virtual StatusCode subscribe(int32_t property, - int32_t areas, float sampleRate) = 0; /** diff --git a/automotive/vehicle/2.0/default/common/include/vhal_v2_0/VehicleObjectPool.h b/automotive/vehicle/2.0/default/common/include/vhal_v2_0/VehicleObjectPool.h index 05c649bee0b90723ff9518641db0e95449fb27f3..946e74dddacfe4563a9e58724945481758b28b25 100644 --- a/automotive/vehicle/2.0/default/common/include/vhal_v2_0/VehicleObjectPool.h +++ b/automotive/vehicle/2.0/default/common/include/vhal_v2_0/VehicleObjectPool.h @@ -152,7 +152,7 @@ private: * VehiclePropValuePool pool; * auto v = pool.obtain(VehiclePropertyType::INT32); * v->propId = VehicleProperty::HVAC_FAN_SPEED; - * v->areaId = VehicleAreaZone::ROW_1_LEFT; + * v->areaId = VehicleAreaSeat::ROW_1_LEFT; * v->timestamp = elapsedRealtimeNano(); * v->value->int32Values[0] = 42; * @@ -191,9 +191,8 @@ public: VehiclePropValuePool& operator=(VehiclePropValuePool&) = delete; private: bool isDisposable(VehiclePropertyType type, size_t vecSize) const { - return vecSize > mMaxRecyclableVectorSize || - VehiclePropertyType::STRING == type || - VehiclePropertyType::COMPLEX == type; + return vecSize > mMaxRecyclableVectorSize || VehiclePropertyType::STRING == type || + VehiclePropertyType::MIXED == type; } RecyclableType obtainDisposable(VehiclePropertyType valueType, diff --git a/automotive/vehicle/2.0/default/common/include/vhal_v2_0/VehiclePropertyStore.h b/automotive/vehicle/2.0/default/common/include/vhal_v2_0/VehiclePropertyStore.h index eda94b77dc38b7f95cf046a48a6316503e31fc5c..0a243fe352ca3831bfc462ebcb0a8fd36aae7a76 100644 --- a/automotive/vehicle/2.0/default/common/include/vhal_v2_0/VehiclePropertyStore.h +++ b/automotive/vehicle/2.0/default/common/include/vhal_v2_0/VehiclePropertyStore.h @@ -67,7 +67,7 @@ public: /* Stores provided value. Returns true if value was written returns false if config for * example wasn't registered. */ - bool writeValue(const VehiclePropValue& propValue); + bool writeValue(const VehiclePropValue& propValue, bool updateStatus); void removeValue(const VehiclePropValue& propValue); void removeValuesForProperty(int32_t propId); diff --git a/automotive/vehicle/2.0/default/common/include/vhal_v2_0/VehicleUtils.h b/automotive/vehicle/2.0/default/common/include/vhal_v2_0/VehicleUtils.h index ce0b163c34c001af2d9f48094c880fbd53e7cf82..f97dfa1bba268ce795010c38fc5d152b39b9503e 100644 --- a/automotive/vehicle/2.0/default/common/include/vhal_v2_0/VehicleUtils.h +++ b/automotive/vehicle/2.0/default/common/include/vhal_v2_0/VehicleUtils.h @@ -33,10 +33,9 @@ namespace V2_0 { constexpr int32_t kAllSupportedAreas = 0; /** Returns underlying (integer) value for given enum. */ -template -inline constexpr typename std::underlying_type::type toInt( - ENUM const value) { - return static_cast::type>(value); +template::type> +inline constexpr U toInt(ENUM const value) { + return static_cast(value); } inline constexpr VehiclePropertyType getPropType(int32_t prop) { diff --git a/automotive/vehicle/2.0/default/common/include/vhal_v2_0/VmsUtils.h b/automotive/vehicle/2.0/default/common/include/vhal_v2_0/VmsUtils.h new file mode 100644 index 0000000000000000000000000000000000000000..9e32bb5a89660aa70f81056abba38b40f87e19b9 --- /dev/null +++ b/automotive/vehicle/2.0/default/common/include/vhal_v2_0/VmsUtils.h @@ -0,0 +1,156 @@ +/* + * Copyright (C) 2018 The Android Open Source Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#ifndef android_hardware_automotive_vehicle_V2_0_VmsUtils_H_ +#define android_hardware_automotive_vehicle_V2_0_VmsUtils_H_ + +#include +#include + +#include + +namespace android { +namespace hardware { +namespace automotive { +namespace vehicle { +namespace V2_0 { +namespace vms { + +// VmsUtils are a set of abstractions for creating and parsing Vehicle Property +// updates to VehicleProperty::VEHICLE_MAP_SERVICE. The format for parsing a +// VehiclePropValue update with a VMS message is specified in the Vehicle HIDL. +// +// This interface is meant for use by HAL clients of VMS; corresponding +// functionality is also provided by VMS in the embedded car service. + +// A VmsLayer is comprised of a type, subtype, and version. +struct VmsLayer { + VmsLayer(int type, int subtype, int version) : type(type), subtype(subtype), version(version) {} + int type; + int subtype; + int version; +}; + +struct VmsLayerAndPublisher { + VmsLayer layer; + int publisher_id; +}; + +// A VmsAssociatedLayer is used by subscribers to specify which publisher IDs +// are acceptable for a given layer. +struct VmsAssociatedLayer { + VmsLayer layer; + std::vector publisher_ids; +}; + +// A VmsLayerOffering refers to a single layer that can be published, along with +// its dependencies. Dependencies can be empty. +struct VmsLayerOffering { + VmsLayerOffering(VmsLayer layer, std::vector dependencies) + : layer(layer), dependencies(dependencies) {} + VmsLayerOffering(VmsLayer layer) : layer(layer), dependencies() {} + VmsLayer layer; + std::vector dependencies; +}; + +// A VmsSubscriptionsState is delivered in response to a +// VmsMessageType.SUBSCRIPTIONS_REQUEST or on the first SUBSCRIBE or last +// UNSUBSCRIBE for a layer. It indicates which layers or associated_layers are +// currently being subscribed to in the system. +struct VmsSubscriptionsState { + int sequence_number; + std::vector layers; + std::vector associated_layers; +}; + +struct VmsAvailabilityState { + int sequence_number; + std::vector associated_layers; +}; + +// Creates a VehiclePropValue containing a message of type +// VmsMessageType.SUBSCRIBE, specifying to the VMS service +// which layer to subscribe to. +std::unique_ptr createSubscribeMessage(const VmsLayer& layer); + +// Creates a VehiclePropValue containing a message of type +// VmsMessageType.SUBSCRIBE_TO_PUBLISHER, specifying to the VMS service +// which layer and publisher_id to subscribe to. +std::unique_ptr createSubscribeToPublisherMessage( + const VmsLayerAndPublisher& layer); + +// Creates a VehiclePropValue containing a message of type +// VmsMessageType.UNSUBSCRIBE, specifying to the VMS service +// which layer to unsubscribe from. +std::unique_ptr createUnsubscribeMessage(const VmsLayer& layer); + +// Creates a VehiclePropValue containing a message of type +// VmsMessageType.UNSUBSCRIBE_TO_PUBLISHER, specifying to the VMS service +// which layer and publisher_id to unsubscribe from. +std::unique_ptr createUnsubscribeToPublisherMessage( + const VmsLayerAndPublisher& layer); + +// Creates a VehiclePropValue containing a message of type +// VmsMessageType.OFFERING, specifying to the VMS service which layers are being +// offered and their dependencies, if any. +std::unique_ptr createOfferingMessage( + const std::vector& offering); + +// Creates a VehiclePropValue containing a message of type +// VmsMessageType.AVAILABILITY_REQUEST. +std::unique_ptr createAvailabilityRequest(); + +// Creates a VehiclePropValue containing a message of type +// VmsMessageType.AVAILABILITY_REQUEST. +std::unique_ptr createSubscriptionsRequest(); + +// Creates a VehiclePropValue containing a message of type VmsMessageType.DATA. +// Returns a nullptr if the byte string in bytes is empty. +// +// For example, to build a VehiclePropMessage containing a proto, the caller +// should convert the proto to a byte string using the SerializeToString proto +// API, then use this inteface to build the VehicleProperty. +std::unique_ptr createDataMessage(const std::string& bytes); + +// Returns true if the VehiclePropValue pointed to by value contains a valid Vms +// message, i.e. the VehicleProperty, VehicleArea, and VmsMessageType are all +// valid. Note: If the VmsMessageType enum is extended, this function will +// return false for any new message types added. +bool isValidVmsMessage(const VehiclePropValue& value); + +// Returns the message type. Expects that the VehiclePropValue contains a valid +// Vms message, as verified by isValidVmsMessage. +VmsMessageType parseMessageType(const VehiclePropValue& value); + +// Constructs a string byte array from a message of type VmsMessageType.DATA. +// Returns an empty string if the message type doesn't match or if the +// VehiclePropValue does not contain a byte array. +// +// A proto message can then be constructed by passing the result of this +// function to ParseFromString. +std::string parseData(const VehiclePropValue& value); + +// TODO(aditin): Need to implement additional parsing functions per message +// type. + +} // namespace vms +} // namespace V2_0 +} // namespace vehicle +} // namespace automotive +} // namespace hardware +} // namespace android + +#endif // android_hardware_automotive_vehicle_V2_0_VmsUtils_H_ diff --git a/automotive/vehicle/2.0/default/common/src/SubscriptionManager.cpp b/automotive/vehicle/2.0/default/common/src/SubscriptionManager.cpp index 74f0a5f55e9645d5234b4d90dacb5fddede7f671..97aeca677cfadd535f2f567af51a93bc8d0fcab9 100644 --- a/automotive/vehicle/2.0/default/common/src/SubscriptionManager.cpp +++ b/automotive/vehicle/2.0/default/common/src/SubscriptionManager.cpp @@ -34,23 +34,12 @@ namespace V2_0 { bool mergeSubscribeOptions(const SubscribeOptions &oldOpts, const SubscribeOptions &newOpts, SubscribeOptions *outResult) { - - int32_t updatedAreas = oldOpts.vehicleAreas; - if (updatedAreas != kAllSupportedAreas) { - updatedAreas = newOpts.vehicleAreas != kAllSupportedAreas - ? updatedAreas | newOpts.vehicleAreas - : kAllSupportedAreas; - } - float updatedRate = std::max(oldOpts.sampleRate, newOpts.sampleRate); SubscribeFlags updatedFlags = SubscribeFlags(oldOpts.flags | newOpts.flags); - bool updated = updatedRate > oldOpts.sampleRate - || updatedAreas != oldOpts.vehicleAreas - || updatedFlags != oldOpts.flags; + bool updated = (updatedRate > oldOpts.sampleRate) || (updatedFlags != oldOpts.flags); if (updated) { *outResult = oldOpts; - outResult->vehicleAreas = updatedAreas; outResult->sampleRate = updatedRate; outResult->flags = updatedFlags; } @@ -75,15 +64,13 @@ void HalClient::addOrUpdateSubscription(const SubscribeOptions &opts) { } bool HalClient::isSubscribed(int32_t propId, - int32_t areaId, SubscribeFlags flags) { auto it = mSubscriptions.find(propId); if (it == mSubscriptions.end()) { return false; } const SubscribeOptions& opts = it->second; - bool res = (opts.flags & flags) - && (opts.vehicleAreas == 0 || areaId == 0 || opts.vehicleAreas & areaId); + bool res = (opts.flags & flags); return res; } @@ -119,7 +106,7 @@ StatusCode SubscriptionManager::addOrUpdateSubscription( addClientToPropMapLocked(opts.propId, client); - if (SubscribeFlags::HAL_EVENT & opts.flags) { + if (SubscribeFlags::EVENTS_FROM_CAR & opts.flags) { SubscribeOptions updated; if (updateHalEventSubscriptionLocked(opts, &updated)) { outUpdatedSubscriptions->push_back(updated); @@ -139,8 +126,7 @@ std::list SubscriptionManager::distributeValuesToClients( MuxGuard g(mLock); for (const auto& propValue: propValues) { VehiclePropValue* v = propValue.get(); - auto clients = getSubscribedClientsLocked( - v->prop, v->areaId, flags); + auto clients = getSubscribedClientsLocked(v->prop, flags); for (const auto& client : clients) { clientValuesMap[client].push_back(v); } @@ -158,21 +144,21 @@ std::list SubscriptionManager::distributeValuesToClients( return clientValues; } -std::list> SubscriptionManager::getSubscribedClients( - int32_t propId, int32_t area, SubscribeFlags flags) const { +std::list> SubscriptionManager::getSubscribedClients(int32_t propId, + SubscribeFlags flags) const { MuxGuard g(mLock); - return getSubscribedClientsLocked(propId, area, flags); + return getSubscribedClientsLocked(propId, flags); } std::list> SubscriptionManager::getSubscribedClientsLocked( - int32_t propId, int32_t area, SubscribeFlags flags) const { + int32_t propId, SubscribeFlags flags) const { std::list> subscribedClients; sp propClients = getClientsForPropertyLocked(propId); if (propClients.get() != nullptr) { for (size_t i = 0; i < propClients->size(); i++) { const auto& client = propClients->itemAt(i); - if (client->isSubscribed(propId, area, flags)) { + if (client->isSubscribed(propId, flags)) { subscribedClients.push_back(client); } } diff --git a/automotive/vehicle/2.0/default/common/src/VehicleHalManager.cpp b/automotive/vehicle/2.0/default/common/src/VehicleHalManager.cpp index ae543bb3fce3e9479f0e043fcadbc33f4611a27a..b5de26251883d28c2d1cd52c264269b75d2a1321 100644 --- a/automotive/vehicle/2.0/default/common/src/VehicleHalManager.cpp +++ b/automotive/vehicle/2.0/default/common/src/VehicleHalManager.cpp @@ -142,15 +142,6 @@ Return VehicleHalManager::subscribe(const sp &call return StatusCode::INVALID_ARG; } - int32_t areas = isGlobalProp(prop) ? 0 : ops.vehicleAreas; - if (areas != 0 && ((areas & config->supportedAreas) != areas)) { - ALOGE("Failed to subscribe property 0x%x. Requested areas 0x%x are " - "out of supported range of 0x%x", prop, ops.vehicleAreas, - config->supportedAreas); - return StatusCode::INVALID_ARG; - } - - ops.vehicleAreas = areas; ops.sampleRate = checkSampleRate(*config, ops.sampleRate); } @@ -164,7 +155,7 @@ Return VehicleHalManager::subscribe(const sp &call } for (auto opt : updatedOptions) { - mHal->subscribe(opt.propId, opt.vehicleAreas, opt.sampleRate); + mHal->subscribe(opt.propId, opt.sampleRate); } return StatusCode::OK; @@ -224,8 +215,8 @@ void VehicleHalManager::onHalEvent(VehiclePropValuePtr v) { void VehicleHalManager::onHalPropertySetError(StatusCode errorCode, int32_t property, int32_t areaId) { - const auto& clients = mSubscriptionManager.getSubscribedClients( - property, 0, SubscribeFlags::HAL_EVENT); + const auto& clients = + mSubscriptionManager.getSubscribedClients(property, SubscribeFlags::EVENTS_FROM_CAR); for (auto client : clients) { client->getCallback()->onPropertySetError(errorCode, property, areaId); @@ -233,8 +224,8 @@ void VehicleHalManager::onHalPropertySetError(StatusCode errorCode, } void VehicleHalManager::onBatchHalEvent(const std::vector& values) { - const auto& clientValues = mSubscriptionManager.distributeValuesToClients( - values, SubscribeFlags::HAL_EVENT); + const auto& clientValues = + mSubscriptionManager.distributeValuesToClients(values, SubscribeFlags::EVENTS_FROM_CAR); for (const HalClientValues& cv : clientValues) { auto vecSize = cv.values.size(); @@ -259,8 +250,7 @@ void VehicleHalManager::onBatchHalEvent(const std::vector& } bool VehicleHalManager::isSampleRateFixed(VehiclePropertyChangeMode mode) { - return (mode & VehiclePropertyChangeMode::ON_SET) - || (mode & VehiclePropertyChangeMode::ON_CHANGE); + return (mode & VehiclePropertyChangeMode::ON_CHANGE); } float VehicleHalManager::checkSampleRate(const VehiclePropConfig &config, @@ -290,7 +280,7 @@ bool VehicleHalManager::isSubscribable(const VehiclePropConfig& config, SubscribeFlags flags) { bool isReadable = config.access & VehiclePropertyAccess::READ; - if (!isReadable && (SubscribeFlags::HAL_EVENT & flags)) { + if (!isReadable && (SubscribeFlags::EVENTS_FROM_CAR & flags)) { ALOGW("Cannot subscribe, property 0x%x is not readable", config.prop); return false; } @@ -298,12 +288,6 @@ bool VehicleHalManager::isSubscribable(const VehiclePropConfig& config, ALOGW("Cannot subscribe, property 0x%x is static", config.prop); return false; } - - //TODO: extend to support event notification for set from android - if (config.changeMode == VehiclePropertyChangeMode::POLL) { - ALOGW("Cannot subscribe, property 0x%x is poll only", config.prop); - return false; - } return true; } @@ -326,8 +310,8 @@ bool VehicleHalManager::checkReadPermission(const VehiclePropConfig &config) con } void VehicleHalManager::handlePropertySetEvent(const VehiclePropValue& value) { - auto clients = mSubscriptionManager.getSubscribedClients( - value.prop, value.areaId, SubscribeFlags::SET_CALL); + auto clients = + mSubscriptionManager.getSubscribedClients(value.prop, SubscribeFlags::EVENTS_FROM_ANDROID); for (auto client : clients) { client->getCallback()->onPropertySet(value); } diff --git a/automotive/vehicle/2.0/default/common/src/VehicleObjectPool.cpp b/automotive/vehicle/2.0/default/common/src/VehicleObjectPool.cpp index ac1245a08770dd7c22c00f43ca21bd05e7bf5f32..40dd56e73dfa0ea17992668692e08074779988f1 100644 --- a/automotive/vehicle/2.0/default/common/src/VehicleObjectPool.cpp +++ b/automotive/vehicle/2.0/default/common/src/VehicleObjectPool.cpp @@ -47,6 +47,7 @@ VehiclePropValuePool::RecyclableType VehiclePropValuePool::obtain( dest->prop = src.prop; dest->areaId = src.areaId; + dest->status = src.status; dest->timestamp = src.timestamp; copyVehicleRawValue(&dest->value, src.value); @@ -82,7 +83,7 @@ VehiclePropValuePool::RecyclableType VehiclePropValuePool::obtainString( } VehiclePropValuePool::RecyclableType VehiclePropValuePool::obtainComplex() { - return obtain(VehiclePropertyType::COMPLEX); + return obtain(VehiclePropertyType::MIXED); } VehiclePropValuePool::RecyclableType VehiclePropValuePool::obtainRecylable( @@ -138,18 +139,14 @@ void VehiclePropValuePool::InternalPool::recycle(VehiclePropValue* o) { } bool VehiclePropValuePool::InternalPool::check(VehiclePropValue::RawValue* v) { - return check(&v->int32Values, - (VehiclePropertyType::INT32 == mPropType - || VehiclePropertyType::INT32_VEC == mPropType - || VehiclePropertyType::BOOLEAN == mPropType)) - && check(&v->floatValues, - (VehiclePropertyType::FLOAT == mPropType - || VehiclePropertyType::FLOAT_VEC == mPropType)) - && check(&v->int64Values, - VehiclePropertyType::INT64 == mPropType) - && check(&v->bytes, - VehiclePropertyType::BYTES == mPropType) - && v->stringValue.size() == 0; + return check(&v->int32Values, (VehiclePropertyType::INT32 == mPropType || + VehiclePropertyType::INT32_VEC == mPropType || + VehiclePropertyType::BOOLEAN == mPropType)) && + check(&v->floatValues, (VehiclePropertyType::FLOAT == mPropType || + VehiclePropertyType::FLOAT_VEC == mPropType)) && + check(&v->int64Values, (VehiclePropertyType::INT64 == mPropType || + VehiclePropertyType::INT64_VEC == mPropType)) && + check(&v->bytes, VehiclePropertyType::BYTES == mPropType) && v->stringValue.size() == 0; } VehiclePropValue* VehiclePropValuePool::InternalPool::createObject() { diff --git a/automotive/vehicle/2.0/default/common/src/VehiclePropertyStore.cpp b/automotive/vehicle/2.0/default/common/src/VehiclePropertyStore.cpp index 2c3ebfccbf6f834ae2f3a45b2de148d543821437..94ace455cc54a279a838835c3146b7ed76392c26 100644 --- a/automotive/vehicle/2.0/default/common/src/VehiclePropertyStore.cpp +++ b/automotive/vehicle/2.0/default/common/src/VehiclePropertyStore.cpp @@ -41,7 +41,8 @@ void VehiclePropertyStore::registerProperty(const VehiclePropConfig& config, mConfigs.insert({ config.prop, RecordConfig { config, tokenFunc } }); } -bool VehiclePropertyStore::writeValue(const VehiclePropValue& propValue) { +bool VehiclePropertyStore::writeValue(const VehiclePropValue& propValue, + bool updateStatus) { MuxGuard g(mLock); if (!mConfigs.count(propValue.prop)) return false; @@ -52,6 +53,9 @@ bool VehiclePropertyStore::writeValue(const VehiclePropValue& propValue) { } else { valueToUpdate->timestamp = propValue.timestamp; valueToUpdate->value = propValue.value; + if (updateStatus) { + valueToUpdate->status = propValue.status; + } } return true; } diff --git a/automotive/vehicle/2.0/default/common/src/VehicleUtils.cpp b/automotive/vehicle/2.0/default/common/src/VehicleUtils.cpp index 9146fa122878ebf801a12aa6b197279922096360..5b6816ee210913dd1afede83383cb89668dbe220 100644 --- a/automotive/vehicle/2.0/default/common/src/VehicleUtils.cpp +++ b/automotive/vehicle/2.0/default/common/src/VehicleUtils.cpp @@ -42,13 +42,14 @@ std::unique_ptr createVehiclePropValue( val->value.floatValues.resize(vecSize); break; case VehiclePropertyType::INT64: + case VehiclePropertyType::INT64_VEC: val->value.int64Values.resize(vecSize); break; case VehiclePropertyType::BYTES: val->value.bytes.resize(vecSize); break; case VehiclePropertyType::STRING: - case VehiclePropertyType::COMPLEX: + case VehiclePropertyType::MIXED: break; // Valid, but nothing to do. default: ALOGE("createVehiclePropValue: unknown type: %d", type); @@ -68,6 +69,7 @@ size_t getVehicleRawValueVectorSize( case VehiclePropertyType::FLOAT_VEC: return value.floatValues.size(); case VehiclePropertyType::INT64: + case VehiclePropertyType::INT64_VEC: return value.int64Values.size(); case VehiclePropertyType::BYTES: return value.bytes.size(); @@ -112,6 +114,7 @@ void shallowCopyHidlStr(hidl_string* dest, const hidl_string& src) { void shallowCopy(VehiclePropValue* dest, const VehiclePropValue& src) { dest->prop = src.prop; dest->areaId = src.areaId; + dest->status = src.status; dest->timestamp = src.timestamp; shallowCopyHidlVec(&dest->value.int32Values, src.value.int32Values); shallowCopyHidlVec(&dest->value.int64Values, src.value.int64Values); diff --git a/automotive/vehicle/2.0/default/common/src/VmsUtils.cpp b/automotive/vehicle/2.0/default/common/src/VmsUtils.cpp new file mode 100644 index 0000000000000000000000000000000000000000..f001a3236cd7c61472948e74ad21e85d97fda529 --- /dev/null +++ b/automotive/vehicle/2.0/default/common/src/VmsUtils.cpp @@ -0,0 +1,161 @@ +/* + * Copyright (C) 2018 The Android Open Source Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#include "VmsUtils.h" + +#include + +namespace android { +namespace hardware { +namespace automotive { +namespace vehicle { +namespace V2_0 { +namespace vms { + +static constexpr int kMessageIndex = toInt(VmsBaseMessageIntegerValuesIndex::MESSAGE_TYPE); +static constexpr int kMessageTypeSize = 1; +static constexpr int kLayerNumberSize = 1; +static constexpr int kLayerSize = 3; +static constexpr int kLayerAndPublisherSize = 4; + +// TODO(aditin): We should extend the VmsMessageType enum to include a first and +// last, which would prevent breakages in this API. However, for all of the +// functions in this module, we only need to guarantee that the message type is +// between SUBSCRIBE and DATA. +static constexpr int kFirstMessageType = toInt(VmsMessageType::SUBSCRIBE); +static constexpr int kLastMessageType = toInt(VmsMessageType::DATA); + +std::unique_ptr createBaseVmsMessage(size_t message_size) { + auto result = createVehiclePropValue(VehiclePropertyType::INT32, message_size); + result->prop = toInt(VehicleProperty::VEHICLE_MAP_SERVICE); + result->areaId = toInt(VehicleArea::GLOBAL); + return result; +} + +std::unique_ptr createSubscribeMessage(const VmsLayer& layer) { + auto result = createBaseVmsMessage(kMessageTypeSize + kLayerSize); + result->value.int32Values = hidl_vec{toInt(VmsMessageType::SUBSCRIBE), layer.type, + layer.subtype, layer.version}; + return result; +} + +std::unique_ptr createSubscribeToPublisherMessage( + const VmsLayerAndPublisher& layer_publisher) { + auto result = createBaseVmsMessage(kMessageTypeSize + kLayerAndPublisherSize); + result->value.int32Values = hidl_vec{ + toInt(VmsMessageType::SUBSCRIBE_TO_PUBLISHER), layer_publisher.layer.type, + layer_publisher.layer.subtype, layer_publisher.layer.version, layer_publisher.publisher_id}; + return result; +} + +std::unique_ptr createUnsubscribeMessage(const VmsLayer& layer) { + auto result = createBaseVmsMessage(kMessageTypeSize + kLayerSize); + result->value.int32Values = hidl_vec{toInt(VmsMessageType::UNSUBSCRIBE), layer.type, + layer.subtype, layer.version}; + return result; +} + +std::unique_ptr createUnsubscribeToPublisherMessage( + const VmsLayerAndPublisher& layer_publisher) { + auto result = createBaseVmsMessage(kMessageTypeSize + kLayerAndPublisherSize); + result->value.int32Values = hidl_vec{ + toInt(VmsMessageType::UNSUBSCRIBE_TO_PUBLISHER), layer_publisher.layer.type, + layer_publisher.layer.subtype, layer_publisher.layer.version, layer_publisher.publisher_id}; + return result; +} + +std::unique_ptr createOfferingMessage( + const std::vector& offering) { + int message_size = kMessageTypeSize + kLayerNumberSize; + for (const auto& offer : offering) { + message_size += kLayerNumberSize + (1 + offer.dependencies.size()) * kLayerSize; + } + auto result = createBaseVmsMessage(message_size); + + std::vector offers = {toInt(VmsMessageType::OFFERING), + static_cast(offering.size())}; + for (const auto& offer : offering) { + std::vector layer_vector = {offer.layer.type, offer.layer.subtype, + offer.layer.version, + static_cast(offer.dependencies.size())}; + for (const auto& dependency : offer.dependencies) { + std::vector dependency_layer = {dependency.type, dependency.subtype, + dependency.version}; + layer_vector.insert(layer_vector.end(), dependency_layer.begin(), + dependency_layer.end()); + } + offers.insert(offers.end(), layer_vector.begin(), layer_vector.end()); + } + result->value.int32Values = offers; + return result; +} + +std::unique_ptr createAvailabilityRequest() { + auto result = createBaseVmsMessage(kMessageTypeSize); + result->value.int32Values = hidl_vec{ + toInt(VmsMessageType::AVAILABILITY_REQUEST), + }; + return result; +} + +std::unique_ptr createSubscriptionsRequest() { + auto result = createBaseVmsMessage(kMessageTypeSize); + result->value.int32Values = hidl_vec{ + toInt(VmsMessageType::SUBSCRIPTIONS_REQUEST), + }; + return result; +} + +std::unique_ptr createDataMessage(const std::string& bytes) { + auto result = createBaseVmsMessage(kMessageTypeSize); + result->value.int32Values = hidl_vec{toInt(VmsMessageType::DATA)}; + result->value.bytes = std::vector(bytes.begin(), bytes.end()); + return result; +} + +bool isValidVmsProperty(const VehiclePropValue& value) { + return (value.prop == toInt(VehicleProperty::VEHICLE_MAP_SERVICE)); +} + +bool isValidVmsMessageType(const VehiclePropValue& value) { + return (value.value.int32Values.size() > 0 && + value.value.int32Values[kMessageIndex] >= kFirstMessageType && + value.value.int32Values[kMessageIndex] <= kLastMessageType); +} + +bool isValidVmsMessage(const VehiclePropValue& value) { + return (isValidVmsProperty(value) && isValidVmsMessageType(value)); +} + +VmsMessageType parseMessageType(const VehiclePropValue& value) { + return static_cast(value.value.int32Values[kMessageIndex]); +} + +std::string parseData(const VehiclePropValue& value) { + if (isValidVmsMessage(value) && parseMessageType(value) == VmsMessageType::DATA && + value.value.bytes.size() > 0) { + return std::string(value.value.bytes.begin(), value.value.bytes.end()); + } else { + return std::string(); + } +} + +} // namespace vms +} // namespace V2_0 +} // namespace vehicle +} // namespace automotive +} // namespace hardware +} // namespace android diff --git a/automotive/vehicle/2.0/default/impl/vhal_v2_0/DefaultConfig.h b/automotive/vehicle/2.0/default/impl/vhal_v2_0/DefaultConfig.h index 08d3d794467439c21db53fab320b23a0a8782f8b..eb9d6605cf400ca07ee447daed6fa3ec11b7cc98 100644 --- a/automotive/vehicle/2.0/default/impl/vhal_v2_0/DefaultConfig.h +++ b/automotive/vehicle/2.0/default/impl/vhal_v2_0/DefaultConfig.h @@ -30,6 +30,10 @@ namespace impl { // // Some handy constants to avoid conversions from enum to int. constexpr int ABS_ACTIVE = (int)VehicleProperty::ABS_ACTIVE; +constexpr int AP_POWER_STATE_REQ = (int)VehicleProperty::AP_POWER_STATE_REQ; +constexpr int AP_POWER_STATE_REPORT = (int)VehicleProperty::AP_POWER_STATE_REPORT; +constexpr int DOOR_1_LEFT = (int)VehicleAreaDoor::ROW_1_LEFT; +constexpr int DOOR_1_RIGHT = (int)VehicleAreaDoor::ROW_1_RIGHT; constexpr int OBD2_LIVE_FRAME = (int)VehicleProperty::OBD2_LIVE_FRAME; constexpr int OBD2_FREEZE_FRAME = (int)VehicleProperty::OBD2_FREEZE_FRAME; constexpr int OBD2_FREEZE_FRAME_INFO = (int)VehicleProperty::OBD2_FREEZE_FRAME_INFO; @@ -38,26 +42,74 @@ constexpr int TRACTION_CONTROL_ACTIVE = (int)VehicleProperty::TRACTION_CONTROL_A constexpr int VEHICLE_MAP_SERVICE = (int)VehicleProperty::VEHICLE_MAP_SERVICE; constexpr int WHEEL_TICK = (int)VehicleProperty::WHEEL_TICK; constexpr int ALL_WHEELS = - (int)(Wheel::LEFT_FRONT | Wheel::RIGHT_FRONT | Wheel::LEFT_REAR | Wheel::RIGHT_REAR); + (int)(VehicleAreaWheel::LEFT_FRONT | VehicleAreaWheel::RIGHT_FRONT | + VehicleAreaWheel::LEFT_REAR | VehicleAreaWheel::RIGHT_REAR); +constexpr int HVAC_LEFT = (int)(VehicleAreaSeat::ROW_1_LEFT | VehicleAreaSeat::ROW_2_LEFT | + VehicleAreaSeat::ROW_2_CENTER); +constexpr int HVAC_RIGHT = (int)(VehicleAreaSeat::ROW_1_RIGHT | VehicleAreaSeat::ROW_2_RIGHT); +constexpr int HVAC_ALL = HVAC_LEFT | HVAC_RIGHT; + +/** + * This property is used for test purpose to generate fake events. Here is the test package that + * is referencing this property definition: packages/services/Car/tests/vehiclehal_test + */ +const int32_t kGenerateFakeDataControllingProperty = + 0x0666 | VehiclePropertyGroup::VENDOR | VehicleArea::GLOBAL | VehiclePropertyType::MIXED; -/* - * This property is used for test purpose to generate fake events. +/** + * FakeDataCommand enum defines the supported command type for kGenerateFakeDataControllingProperty. + * All those commands can be send independently with each other. And each will override the one sent + * previously. * - * It has the following format: + * The controlling property has the following format: * - * int32Values[0] - command (1 - start fake data generation, 0 - stop) - * int32Values[1] - VehicleProperty to which command applies + * int32Values[0] - command enum defined in FakeDataCommand * - * For start command, additional data should be provided: - * int64Values[0] - periodic interval in nanoseconds - * floatValues[0] - initial value - * floatValues[1] - dispersion defines min and max range relative to initial value - * floatValues[2] - increment, with every timer tick the value will be incremented by this amount + * The format of the arguments is defined for each command type as below: */ -const int32_t kGenerateFakeDataControllingProperty = 0x0666 - | VehiclePropertyGroup::VENDOR - | VehicleArea::GLOBAL - | VehiclePropertyType::COMPLEX; +enum class FakeDataCommand : int32_t { + /** + * Starts linear fake data generation. Caller must provide additional data: + * int32Values[1] - VehicleProperty to which command applies + * int64Values[0] - periodic interval in nanoseconds + * floatValues[0] - initial value + * floatValues[1] - dispersion defines the min/max value relative to initial value, where + * max = initial_value + dispersion, min = initial_value - dispersion. + * Dispersion should be non-negative, otherwise the behavior is undefined. + * floatValues[2] - increment, with every timer tick the value will be incremented by this + * amount. When reaching to max value, the current value will be set to min. + * It should be non-negative, otherwise the behavior is undefined. + */ + StartLinear = 0, + + /** Stops generating of fake data that was triggered by Start commands. + * int32Values[1] - VehicleProperty to which command applies. VHAL will stop the + * corresponding linear generation for that property. + */ + StopLinear = 1, + + /** + * Starts JSON-based fake data generation. Caller must provide a string value specifying + * the path to fake value JSON file: + * stringValue - path to the fake values JSON file + */ + StartJson = 2, + + /** + * Stops JSON-based fake data generation. No additional arguments needed. + */ + StopJson = 3, + + /** + * Injects key press event (HAL incorporates UP/DOWN acction and triggers 2 HAL events for every + * key-press). We set the enum with high number to leave space for future start/stop commands. + * Caller must provide the following data: + * int32Values[2] - Android key code + * int32Values[3] - target display (0 - for main display, 1 - for instrument cluster, see + * VehicleDisplay) + */ + KeyPress = 100, +}; const int32_t kHvacPowerProperties[] = { toInt(VehicleProperty::HVAC_FAN_SPEED), @@ -76,6 +128,38 @@ struct ConfigDeclaration { }; const ConfigDeclaration kVehicleProperties[]{ + {.config = + { + .prop = toInt(VehicleProperty::INFO_FUEL_CAPACITY), + .access = VehiclePropertyAccess::READ, + .changeMode = VehiclePropertyChangeMode::STATIC, + }, + .initialValue = {.floatValues = {15000}}}, + + {.config = + { + .prop = toInt(VehicleProperty::INFO_FUEL_TYPE), + .access = VehiclePropertyAccess::READ, + .changeMode = VehiclePropertyChangeMode::STATIC, + }, + .initialValue = {.int32Values = {1}}}, + + {.config = + { + .prop = toInt(VehicleProperty::INFO_EV_BATTERY_CAPACITY), + .access = VehiclePropertyAccess::READ, + .changeMode = VehiclePropertyChangeMode::STATIC, + }, + .initialValue = {.floatValues = {150000}}}, + + {.config = + { + .prop = toInt(VehicleProperty::INFO_EV_CONNECTOR_TYPE), + .access = VehiclePropertyAccess::READ, + .changeMode = VehiclePropertyChangeMode::STATIC, + }, + .initialValue = {.int32Values = {1}}}, + {.config = { .prop = toInt(VehicleProperty::INFO_MAKE), @@ -89,7 +173,7 @@ const ConfigDeclaration kVehicleProperties[]{ .access = VehiclePropertyAccess::READ, .changeMode = VehiclePropertyChangeMode::ON_CHANGE, .minSampleRate = 1.0f, - .maxSampleRate = 1000.0f, + .maxSampleRate = 10.0f, }, .initialValue = {.floatValues = {0.0f}}}, @@ -108,11 +192,59 @@ const ConfigDeclaration kVehicleProperties[]{ .access = VehiclePropertyAccess::READ, .changeMode = VehiclePropertyChangeMode::CONTINUOUS, .minSampleRate = 1.0f, - .maxSampleRate = 1000.0f, + .maxSampleRate = 10.0f, }, .initialValue = {.floatValues = {0.0f}}, }, + {.config = + { + .prop = toInt(VehicleProperty::FUEL_LEVEL), + .access = VehiclePropertyAccess::READ, + .changeMode = VehiclePropertyChangeMode::ON_CHANGE, + }, + .initialValue = {.floatValues = {15000}}}, + + {.config = + { + .prop = toInt(VehicleProperty::FUEL_DOOR_OPEN), + .access = VehiclePropertyAccess::READ, + .changeMode = VehiclePropertyChangeMode::ON_CHANGE, + }, + .initialValue = {.int32Values = {0}}}, + + {.config = + { + .prop = toInt(VehicleProperty::EV_BATTERY_LEVEL), + .access = VehiclePropertyAccess::READ, + .changeMode = VehiclePropertyChangeMode::ON_CHANGE, + }, + .initialValue = {.floatValues = {150000}}}, + + {.config = + { + .prop = toInt(VehicleProperty::EV_CHARGE_PORT_OPEN), + .access = VehiclePropertyAccess::READ, + .changeMode = VehiclePropertyChangeMode::ON_CHANGE, + }, + .initialValue = {.int32Values = {0}}}, + + {.config = + { + .prop = toInt(VehicleProperty::EV_CHARGE_PORT_CONNECTED), + .access = VehiclePropertyAccess::READ, + .changeMode = VehiclePropertyChangeMode::ON_CHANGE, + }, + .initialValue = {.int32Values = {0}}}, + + {.config = + { + .prop = toInt(VehicleProperty::EV_BATTERY_INSTANTANEOUS_CHARGE_RATE), + .access = VehiclePropertyAccess::READ, + .changeMode = VehiclePropertyChangeMode::ON_CHANGE, + }, + .initialValue = {.floatValues = {0}}}, + {.config = { .prop = toInt(VehicleProperty::CURRENT_GEAR), @@ -139,77 +271,108 @@ const ConfigDeclaration kVehicleProperties[]{ {.config = { - .prop = toInt(VehicleProperty::HVAC_POWER_ON), - .access = VehiclePropertyAccess::READ_WRITE, + .prop = toInt(VehicleProperty::HW_KEY_INPUT), + .access = VehiclePropertyAccess::READ, .changeMode = VehiclePropertyChangeMode::ON_CHANGE, - .supportedAreas = toInt(VehicleAreaZone::ROW_1), - // TODO(bryaneyler): Ideally, this is generated dynamically from - // kHvacPowerProperties. - .configString = "0x12400500,0x12400501" // HVAC_FAN_SPEED,HVAC_FAN_DIRECTION }, + .initialValue = {.int32Values = {0, 0, 0}}}, + + {.config = {.prop = toInt(VehicleProperty::HVAC_POWER_ON), + .access = VehiclePropertyAccess::READ_WRITE, + .changeMode = VehiclePropertyChangeMode::ON_CHANGE, + .areaConfigs = {VehicleAreaConfig{.areaId = HVAC_ALL}}, + // TODO(bryaneyler): Ideally, this is generated dynamically from + // kHvacPowerProperties. + .configArray = + { + 0x12400500, // HVAC_FAN_SPEED + 0x12400501 // HVAC_FAN_DIRECTION + }}, .initialValue = {.int32Values = {1}}}, { .config = {.prop = toInt(VehicleProperty::HVAC_DEFROSTER), .access = VehiclePropertyAccess::READ_WRITE, .changeMode = VehiclePropertyChangeMode::ON_CHANGE, - .supportedAreas = - VehicleAreaWindow::FRONT_WINDSHIELD | VehicleAreaWindow::REAR_WINDSHIELD}, + .areaConfigs = + {VehicleAreaConfig{.areaId = toInt(VehicleAreaWindow::FRONT_WINDSHIELD)}, + VehicleAreaConfig{.areaId = toInt(VehicleAreaWindow::REAR_WINDSHIELD)}}}, .initialValue = {.int32Values = {0}} // Will be used for all areas. }, + {.config = {.prop = toInt(VehicleProperty::HVAC_MAX_DEFROST_ON), + .access = VehiclePropertyAccess::READ_WRITE, + .changeMode = VehiclePropertyChangeMode::ON_CHANGE, + .areaConfigs = {VehicleAreaConfig{.areaId = HVAC_ALL}}}, + .initialValue = {.int32Values = {0}}}, + {.config = {.prop = toInt(VehicleProperty::HVAC_RECIRC_ON), .access = VehiclePropertyAccess::READ_WRITE, .changeMode = VehiclePropertyChangeMode::ON_CHANGE, - .supportedAreas = toInt(VehicleAreaZone::ROW_1)}, + .areaConfigs = {VehicleAreaConfig{.areaId = HVAC_ALL}}}, .initialValue = {.int32Values = {1}}}, + {.config = {.prop = toInt(VehicleProperty::HVAC_AUTO_RECIRC_ON), + .access = VehiclePropertyAccess::READ_WRITE, + .changeMode = VehiclePropertyChangeMode::ON_CHANGE, + .areaConfigs = {VehicleAreaConfig{.areaId = HVAC_ALL}}}, + .initialValue = {.int32Values = {0}}}, + {.config = {.prop = toInt(VehicleProperty::HVAC_AC_ON), .access = VehiclePropertyAccess::READ_WRITE, .changeMode = VehiclePropertyChangeMode::ON_CHANGE, - .supportedAreas = toInt(VehicleAreaZone::ROW_1)}, + .areaConfigs = {VehicleAreaConfig{.areaId = HVAC_ALL}}}, .initialValue = {.int32Values = {1}}}, + {.config = {.prop = toInt(VehicleProperty::HVAC_MAX_AC_ON), + .access = VehiclePropertyAccess::READ_WRITE, + .changeMode = VehiclePropertyChangeMode::ON_CHANGE, + .areaConfigs = {VehicleAreaConfig{.areaId = HVAC_ALL}}}, + .initialValue = {.int32Values = {0}}}, + {.config = {.prop = toInt(VehicleProperty::HVAC_AUTO_ON), .access = VehiclePropertyAccess::READ_WRITE, .changeMode = VehiclePropertyChangeMode::ON_CHANGE, - .supportedAreas = toInt(VehicleAreaZone::ROW_1)}, + .areaConfigs = {VehicleAreaConfig{.areaId = HVAC_ALL}}}, .initialValue = {.int32Values = {1}}}, + {.config = {.prop = toInt(VehicleProperty::HVAC_DUAL_ON), + .access = VehiclePropertyAccess::READ_WRITE, + .changeMode = VehiclePropertyChangeMode::ON_CHANGE, + .areaConfigs = {VehicleAreaConfig{.areaId = HVAC_ALL}}}, + .initialValue = {.int32Values = {0}}}, + {.config = {.prop = toInt(VehicleProperty::HVAC_FAN_SPEED), .access = VehiclePropertyAccess::READ_WRITE, .changeMode = VehiclePropertyChangeMode::ON_CHANGE, - .supportedAreas = toInt(VehicleAreaZone::ROW_1), - .areaConfigs = {VehicleAreaConfig{.areaId = toInt(VehicleAreaZone::ROW_1), - .minInt32Value = 1, - .maxInt32Value = 7}}}, + .areaConfigs = {VehicleAreaConfig{ + .areaId = HVAC_ALL, .minInt32Value = 1, .maxInt32Value = 7}}}, .initialValue = {.int32Values = {3}}}, - {.config = - { - .prop = toInt(VehicleProperty::HVAC_FAN_DIRECTION), - .access = VehiclePropertyAccess::READ_WRITE, - .changeMode = VehiclePropertyChangeMode::ON_CHANGE, - .supportedAreas = toInt(VehicleAreaZone::ROW_1), - }, + {.config = {.prop = toInt(VehicleProperty::HVAC_FAN_DIRECTION), + .access = VehiclePropertyAccess::READ_WRITE, + .changeMode = VehiclePropertyChangeMode::ON_CHANGE, + .areaConfigs = {VehicleAreaConfig{.areaId = HVAC_ALL}}}, .initialValue = {.int32Values = {toInt(VehicleHvacFanDirection::FACE)}}}, + {.config = {.prop = toInt(VehicleProperty::HVAC_STEERING_WHEEL_HEAT), + .access = VehiclePropertyAccess::READ_WRITE, + .changeMode = VehiclePropertyChangeMode::ON_CHANGE, + .areaConfigs = {VehicleAreaConfig{ + .areaId = (0), .minInt32Value = -2, .maxInt32Value = 2}}}, + .initialValue = {.int32Values = {0}}}, // +ve values for heating and -ve for cooling + {.config = {.prop = toInt(VehicleProperty::HVAC_TEMPERATURE_SET), .access = VehiclePropertyAccess::READ_WRITE, .changeMode = VehiclePropertyChangeMode::ON_CHANGE, - .supportedAreas = VehicleAreaZone::ROW_1_LEFT | VehicleAreaZone::ROW_1_RIGHT, .areaConfigs = {VehicleAreaConfig{ - .areaId = toInt(VehicleAreaZone::ROW_1_LEFT), - .minFloatValue = 16, - .maxFloatValue = 32, + .areaId = HVAC_LEFT, .minFloatValue = 16, .maxFloatValue = 32, }, VehicleAreaConfig{ - .areaId = toInt(VehicleAreaZone::ROW_1_RIGHT), - .minFloatValue = 16, - .maxFloatValue = 32, + .areaId = HVAC_RIGHT, .minFloatValue = 16, .maxFloatValue = 32, }}}, - .initialAreaValues = {{toInt(VehicleAreaZone::ROW_1_LEFT), {.floatValues = {16}}}, - {toInt(VehicleAreaZone::ROW_1_RIGHT), {.floatValues = {20}}}}}, + .initialAreaValues = {{HVAC_LEFT, {.floatValues = {16}}}, + {HVAC_RIGHT, {.floatValues = {20}}}}}, {.config = { @@ -222,6 +385,12 @@ const ConfigDeclaration kVehicleProperties[]{ }, .initialValue = {.floatValues = {25.0f}}}, + {.config = {.prop = toInt(VehicleProperty::HVAC_TEMPERATURE_DISPLAY_UNITS), + .access = VehiclePropertyAccess::READ_WRITE, + .changeMode = VehiclePropertyChangeMode::ON_CHANGE, + .areaConfigs = {VehicleAreaConfig{.areaId = (0)}}}, + .initialValue = {.int32Values = {(int)VehicleUnit::FAHRENHEIT}}}, + {.config = { .prop = toInt(VehicleProperty::NIGHT_MODE), @@ -232,43 +401,27 @@ const ConfigDeclaration kVehicleProperties[]{ {.config = { - .prop = toInt(VehicleProperty::DRIVING_STATUS), + .prop = toInt(VehicleProperty::GEAR_SELECTION), .access = VehiclePropertyAccess::READ, .changeMode = VehiclePropertyChangeMode::ON_CHANGE, }, - .initialValue = {.int32Values = {toInt(VehicleDrivingStatus::UNRESTRICTED)}}}, + .initialValue = {.int32Values = {toInt(VehicleGear::GEAR_PARK)}}}, {.config = { - .prop = toInt(VehicleProperty::GEAR_SELECTION), + .prop = toInt(VehicleProperty::IGNITION_STATE), .access = VehiclePropertyAccess::READ, .changeMode = VehiclePropertyChangeMode::ON_CHANGE, }, - .initialValue = {.int32Values = {toInt(VehicleGear::GEAR_PARK)}}}, - - { - .config = - { - .prop = toInt(VehicleProperty::INFO_FUEL_CAPACITY), - .access = VehiclePropertyAccess::READ, - .changeMode = VehiclePropertyChangeMode::STATIC, - }, - .initialValue = {.floatValues = {123000.0f}} // In Milliliters - }, - - {.config = {.prop = toInt(VehicleProperty::DISPLAY_BRIGHTNESS), - .access = VehiclePropertyAccess::READ_WRITE, - .changeMode = VehiclePropertyChangeMode::ON_CHANGE, - .areaConfigs = {VehicleAreaConfig{.minInt32Value = 0, .maxInt32Value = 10}}}, - .initialValue = {.int32Values = {7}}}, + .initialValue = {.int32Values = {toInt(VehicleIgnitionState::ON)}}}, {.config = { - .prop = toInt(VehicleProperty::IGNITION_STATE), + .prop = toInt(VehicleProperty::ENGINE_OIL_LEVEL), .access = VehiclePropertyAccess::READ, .changeMode = VehiclePropertyChangeMode::ON_CHANGE, }, - .initialValue = {.int32Values = {toInt(VehicleIgnitionState::ON)}}}, + .initialValue = {.int32Values = {toInt(VehicleOilLevel::NORMAL)}}}, {.config = { @@ -289,43 +442,56 @@ const ConfigDeclaration kVehicleProperties[]{ }, }, + {.config = {.prop = toInt(VehicleProperty::DOOR_LOCK), + .access = VehiclePropertyAccess::READ, + .changeMode = VehiclePropertyChangeMode::ON_CHANGE, + .areaConfigs = {VehicleAreaConfig{.areaId = DOOR_1_LEFT}, + VehicleAreaConfig{.areaId = DOOR_1_RIGHT}}}, + .initialAreaValues = {{DOOR_1_LEFT, {.int32Values = {1}}}, + {DOOR_1_RIGHT, {.int32Values = {1}}}}}, + {.config = { - .prop = toInt(VehicleProperty::DOOR_LOCK), + .prop = WHEEL_TICK, .access = VehiclePropertyAccess::READ, - .changeMode = VehiclePropertyChangeMode::ON_CHANGE, + .changeMode = VehiclePropertyChangeMode::CONTINUOUS, + .configArray = {ALL_WHEELS, 50000, 50000, 50000, 50000}, + .minSampleRate = 1.0f, + .maxSampleRate = 10.0f, }, - .initialValue = {.int32Values = {1}}}, + .initialValue = {.int64Values = {0, 100000, 200000, 300000, 400000}}}, - { - .config = - { - .prop = WHEEL_TICK, + {.config = {.prop = ABS_ACTIVE, .access = VehiclePropertyAccess::READ, - .changeMode = VehiclePropertyChangeMode::CONTINUOUS, - .configArray = {ALL_WHEELS, 50000, 50000, 50000, 50000}, - .minSampleRate = 1.0f, - .maxSampleRate = 100.0f, - }, - }, + .changeMode = VehiclePropertyChangeMode::ON_CHANGE}, + .initialValue = {.int32Values = {0}}}, - { - .config = - { - .prop = ABS_ACTIVE, + {.config = {.prop = TRACTION_CONTROL_ACTIVE, .access = VehiclePropertyAccess::READ, - .changeMode = VehiclePropertyChangeMode::ON_CHANGE, - }, - }, + .changeMode = VehiclePropertyChangeMode::ON_CHANGE}, + .initialValue = {.int32Values = {0}}}, - { - .config = - { - .prop = TRACTION_CONTROL_ACTIVE, + {.config = {.prop = toInt(VehicleProperty::AP_POWER_STATE_REQ), .access = VehiclePropertyAccess::READ, .changeMode = VehiclePropertyChangeMode::ON_CHANGE, - }, - }, + .configArray = {3}}, + .initialValue = {.int32Values = {toInt(VehicleApPowerStateReq::ON_FULL), 0}}}, + + {.config = {.prop = toInt(VehicleProperty::AP_POWER_STATE_REPORT), + .access = VehiclePropertyAccess::WRITE, + .changeMode = VehiclePropertyChangeMode::ON_CHANGE}, + .initialValue = {.int32Values = {toInt(VehicleApPowerStateReport::BOOT_COMPLETE), 0}}}, + + {.config = {.prop = toInt(VehicleProperty::DISPLAY_BRIGHTNESS), + .access = VehiclePropertyAccess::READ_WRITE, + .changeMode = VehiclePropertyChangeMode::ON_CHANGE, + .areaConfigs = {VehicleAreaConfig{.minInt32Value = 0, .maxInt32Value = 100}}}, + .initialValue = {.int32Values = {100}}}, + + {.config = {.prop = toInt(VehicleProperty::AP_POWER_BOOTUP_REASON), + .access = VehiclePropertyAccess::READ, + .changeMode = VehiclePropertyChangeMode::STATIC}, + .initialValue = {.int32Values = {toInt(VehicleApPowerBootupReason::USER_POWER_ON)}}}, { .config = {.prop = OBD2_LIVE_FRAME, diff --git a/automotive/vehicle/2.0/default/impl/vhal_v2_0/EmulatedVehicleHal.cpp b/automotive/vehicle/2.0/default/impl/vhal_v2_0/EmulatedVehicleHal.cpp index 6bc05224030256371d5f742813cb39c10204effc..07695bfe7eeb453a144afe1e7f7c186cd7eef80b 100644 --- a/automotive/vehicle/2.0/default/impl/vhal_v2_0/EmulatedVehicleHal.cpp +++ b/automotive/vehicle/2.0/default/impl/vhal_v2_0/EmulatedVehicleHal.cpp @@ -19,6 +19,8 @@ #include #include "EmulatedVehicleHal.h" +#include "JsonFakeValueGenerator.h" +#include "LinearFakeValueGenerator.h" #include "Obd2SensorStore.h" namespace android { @@ -85,18 +87,15 @@ static std::unique_ptr fillDefaultObd2Frame(size_t numVendorInt return sensorStore; } -enum class FakeDataCommand : int32_t { - Stop = 0, - Start = 1, -}; - EmulatedVehicleHal::EmulatedVehicleHal(VehiclePropertyStore* propStore) : mPropStore(propStore), mHvacPowerProps(std::begin(kHvacPowerProperties), std::end(kHvacPowerProperties)), - mRecurrentTimer(std::bind(&EmulatedVehicleHal::onContinuousPropertyTimer, - this, std::placeholders::_1)), - mFakeValueGenerator(std::bind(&EmulatedVehicleHal::onFakeValueGenerated, - this, std::placeholders::_1, std::placeholders::_2)) { + mRecurrentTimer( + std::bind(&EmulatedVehicleHal::onContinuousPropertyTimer, this, std::placeholders::_1)), + mLinearFakeValueGenerator(std::make_unique( + std::bind(&EmulatedVehicleHal::onFakeValueGenerated, this, std::placeholders::_1))), + mJsonFakeValueGenerator(std::make_unique( + std::bind(&EmulatedVehicleHal::onFakeValueGenerated, this, std::placeholders::_1))) { initStaticConfig(); for (size_t i = 0; i < arraysize(kVehicleProperties); i++) { mPropStore->registerProperty(kVehicleProperties[i].config); @@ -132,28 +131,60 @@ VehicleHal::VehiclePropValuePtr EmulatedVehicleHal::get( } StatusCode EmulatedVehicleHal::set(const VehiclePropValue& propValue) { + static constexpr bool shouldUpdateStatus = false; + if (propValue.prop == kGenerateFakeDataControllingProperty) { StatusCode status = handleGenerateFakeDataRequest(propValue); if (status != StatusCode::OK) { return status; } } else if (mHvacPowerProps.count(propValue.prop)) { - auto hvacPowerOn = mPropStore->readValueOrNull(toInt(VehicleProperty::HVAC_POWER_ON), - toInt(VehicleAreaZone::ROW_1)); + auto hvacPowerOn = mPropStore->readValueOrNull( + toInt(VehicleProperty::HVAC_POWER_ON), + (VehicleAreaSeat::ROW_1_LEFT | VehicleAreaSeat::ROW_1_RIGHT | + VehicleAreaSeat::ROW_2_LEFT | VehicleAreaSeat::ROW_2_CENTER | + VehicleAreaSeat::ROW_2_RIGHT)); if (hvacPowerOn && hvacPowerOn->value.int32Values.size() == 1 && hvacPowerOn->value.int32Values[0] == 0) { return StatusCode::NOT_AVAILABLE; } - } else if (propValue.prop == OBD2_FREEZE_FRAME_CLEAR) { - return clearObd2FreezeFrames(propValue); - } else if (propValue.prop == VEHICLE_MAP_SERVICE) { - // Placeholder for future implementation of VMS property in the default hal. For now, just - // returns OK; otherwise, hal clients crash with property not supported. - return StatusCode::OK; + } else { + // Handle property specific code + switch (propValue.prop) { + case OBD2_FREEZE_FRAME_CLEAR: + return clearObd2FreezeFrames(propValue); + case VEHICLE_MAP_SERVICE: + // Placeholder for future implementation of VMS property in the default hal. For + // now, just returns OK; otherwise, hal clients crash with property not supported. + return StatusCode::OK; + case AP_POWER_STATE_REPORT: + // This property has different behavior between get/set. When it is set, the value + // goes to the vehicle but is NOT updated in the property store back to Android. + // Commented out for now, because it may mess up automated testing that use the + // emulator interface. + // getEmulatorOrDie()->doSetValueFromClient(propValue); + return StatusCode::OK; + } } - if (!mPropStore->writeValue(propValue)) { + if (propValue.status != VehiclePropertyStatus::AVAILABLE) { + // Android side cannot set property status - this value is the + // purview of the HAL implementation to reflect the state of + // its underlying hardware + return StatusCode::INVALID_ARG; + } + auto currentPropValue = mPropStore->readValueOrNull(propValue); + + if (currentPropValue == nullptr) { + return StatusCode::INVALID_ARG; + } + if (currentPropValue->status != VehiclePropertyStatus::AVAILABLE) { + // do not allow Android side to set() a disabled/error property + return StatusCode::NOT_AVAILABLE; + } + + if (!mPropStore->writeValue(propValue, shouldUpdateStatus)) { return StatusCode::INVALID_ARG; } @@ -175,9 +206,11 @@ static bool isDiagnosticProperty(VehiclePropConfig propConfig) { // Parse supported properties list and generate vector of property values to hold current values. void EmulatedVehicleHal::onCreate() { + static constexpr bool shouldUpdateStatus = true; + for (auto& it : kVehicleProperties) { VehiclePropConfig cfg = it.config; - int32_t supportedAreas = cfg.supportedAreas; + int32_t numAreas = cfg.areaConfigs.size(); if (isDiagnosticProperty(cfg)) { // do not write an initial empty value for the diagnostic properties @@ -185,22 +218,26 @@ void EmulatedVehicleHal::onCreate() { continue; } - // A global property will have supportedAreas = 0 + // A global property will have only a single area if (isGlobalProp(cfg.prop)) { - supportedAreas = 0; + numAreas = 1; } - // This loop is a do-while so it executes at least once to handle global properties - do { - int32_t curArea = supportedAreas; - supportedAreas &= supportedAreas - 1; // Clear the right-most bit of supportedAreas. - curArea ^= supportedAreas; // Set curArea to the previously cleared bit. + for (int i = 0; i < numAreas; i++) { + int32_t curArea; + + if (isGlobalProp(cfg.prop)) { + curArea = 0; + } else { + curArea = cfg.areaConfigs[i].areaId; + } // Create a separate instance for each individual zone VehiclePropValue prop = { .prop = cfg.prop, .areaId = curArea, }; + if (it.initialAreaValues.size() > 0) { auto valueForAreaIt = it.initialAreaValues.find(curArea); if (valueForAreaIt != it.initialAreaValues.end()) { @@ -212,9 +249,8 @@ void EmulatedVehicleHal::onCreate() { } else { prop.value = it.initialValue; } - mPropStore->writeValue(prop); - - } while (supportedAreas != 0); + mPropStore->writeValue(prop, shouldUpdateStatus); + } } initObd2LiveFrame(*mPropStore->getConfigOrDie(OBD2_LIVE_FRAME)); initObd2FreezeFrame(*mPropStore->getConfigOrDie(OBD2_FREEZE_FRAME)); @@ -246,8 +282,7 @@ void EmulatedVehicleHal::onContinuousPropertyTimer(const std::vector& p } } -StatusCode EmulatedVehicleHal::subscribe(int32_t property, int32_t, - float sampleRate) { +StatusCode EmulatedVehicleHal::subscribe(int32_t property, float sampleRate) { ALOGI("%s propId: 0x%x, sampleRate: %f", __func__, property, sampleRate); if (isContinuousProperty(property)) { @@ -274,6 +309,8 @@ bool EmulatedVehicleHal::isContinuousProperty(int32_t propId) const { } bool EmulatedVehicleHal::setPropertyFromVehicle(const VehiclePropValue& propValue) { + static constexpr bool shouldUpdateStatus = true; + if (propValue.prop == kGenerateFakeDataControllingProperty) { StatusCode status = handleGenerateFakeDataRequest(propValue); if (status != StatusCode::OK) { @@ -281,7 +318,7 @@ bool EmulatedVehicleHal::setPropertyFromVehicle(const VehiclePropValue& propValu } } - if (mPropStore->writeValue(propValue)) { + if (mPropStore->writeValue(propValue, shouldUpdateStatus)) { doHalEvent(getValuePool()->obtain(propValue)); return true; } else { @@ -296,41 +333,37 @@ std::vector EmulatedVehicleHal::getAllProperties() const { StatusCode EmulatedVehicleHal::handleGenerateFakeDataRequest(const VehiclePropValue& request) { ALOGI("%s", __func__); const auto& v = request.value; - if (v.int32Values.size() < 2) { - ALOGE("%s: expected at least 2 elements in int32Values, got: %zu", __func__, - v.int32Values.size()); + if (!v.int32Values.size()) { + ALOGE("%s: expected at least \"command\" field in int32Values", __func__); return StatusCode::INVALID_ARG; } FakeDataCommand command = static_cast(v.int32Values[0]); - int32_t propId = v.int32Values[1]; switch (command) { - case FakeDataCommand::Start: { - if (!v.int64Values.size()) { - ALOGE("%s: interval is not provided in int64Values", __func__); - return StatusCode::INVALID_ARG; - } - auto interval = std::chrono::nanoseconds(v.int64Values[0]); - - if (v.floatValues.size() < 3) { - ALOGE("%s: expected at least 3 element sin floatValues, got: %zu", __func__, - v.floatValues.size()); - return StatusCode::INVALID_ARG; - } - float initialValue = v.floatValues[0]; - float dispersion = v.floatValues[1]; - float increment = v.floatValues[2]; - - ALOGI("%s, propId: %d, initalValue: %f", __func__, propId, initialValue); - mFakeValueGenerator.startGeneratingHalEvents( - interval, propId, initialValue, dispersion, increment); - - break; + case FakeDataCommand::StartLinear: { + ALOGI("%s, FakeDataCommand::StartLinear", __func__); + return mLinearFakeValueGenerator->start(request); + } + case FakeDataCommand::StartJson: { + ALOGI("%s, FakeDataCommand::StartJson", __func__); + return mJsonFakeValueGenerator->start(request); } - case FakeDataCommand::Stop: { - ALOGI("%s, FakeDataCommandStop", __func__); - mFakeValueGenerator.stopGeneratingHalEvents(propId); + case FakeDataCommand::StopLinear: { + ALOGI("%s, FakeDataCommand::StopLinear", __func__); + return mLinearFakeValueGenerator->stop(request); + } + case FakeDataCommand::StopJson: { + ALOGI("%s, FakeDataCommand::StopJson", __func__); + return mJsonFakeValueGenerator->stop(request); + } + case FakeDataCommand::KeyPress: { + ALOGI("%s, FakeDataCommand::KeyPress", __func__); + int32_t keyCode = request.value.int32Values[2]; + int32_t display = request.value.int32Values[3]; + doHalEvent( + createHwInputKeyProp(VehicleHwKeyInputAction::ACTION_DOWN, keyCode, display)); + doHalEvent(createHwInputKeyProp(VehicleHwKeyInputAction::ACTION_UP, keyCode, display)); break; } default: { @@ -341,27 +374,29 @@ StatusCode EmulatedVehicleHal::handleGenerateFakeDataRequest(const VehiclePropVa return StatusCode::OK; } -void EmulatedVehicleHal::onFakeValueGenerated(int32_t propId, float value) { - VehiclePropValuePtr updatedPropValue {}; - switch (getPropType(propId)) { - case VehiclePropertyType::FLOAT: - updatedPropValue = getValuePool()->obtainFloat(value); - break; - case VehiclePropertyType::INT32: - updatedPropValue = getValuePool()->obtainInt32(static_cast(value)); - break; - default: - ALOGE("%s: data type for property: 0x%x not supported", __func__, propId); - return; +VehicleHal::VehiclePropValuePtr EmulatedVehicleHal::createHwInputKeyProp( + VehicleHwKeyInputAction action, int32_t keyCode, int32_t targetDisplay) { + auto keyEvent = getValuePool()->obtain(VehiclePropertyType::INT32_VEC, 3); + keyEvent->prop = toInt(VehicleProperty::HW_KEY_INPUT); + keyEvent->areaId = 0; + keyEvent->timestamp = elapsedRealtimeNano(); + keyEvent->status = VehiclePropertyStatus::AVAILABLE; + keyEvent->value.int32Values[0] = toInt(action); + keyEvent->value.int32Values[1] = keyCode; + keyEvent->value.int32Values[2] = targetDisplay; + return keyEvent; +} - } +void EmulatedVehicleHal::onFakeValueGenerated(const VehiclePropValue& value) { + ALOGD("%s: %s", __func__, toString(value).c_str()); + static constexpr bool shouldUpdateStatus = false; + VehiclePropValuePtr updatedPropValue = getValuePool()->obtain(value); if (updatedPropValue) { - updatedPropValue->prop = propId; - updatedPropValue->areaId = 0; // Add area support if necessary. updatedPropValue->timestamp = elapsedRealtimeNano(); - mPropStore->writeValue(*updatedPropValue); - auto changeMode = mPropStore->getConfigOrDie(propId)->changeMode; + updatedPropValue->status = VehiclePropertyStatus::AVAILABLE; + mPropStore->writeValue(*updatedPropValue, shouldUpdateStatus); + auto changeMode = mPropStore->getConfigOrDie(value.prop)->changeMode; if (VehiclePropertyChangeMode::ON_CHANGE == changeMode) { doHalEvent(move(updatedPropValue)); } @@ -389,16 +424,20 @@ void EmulatedVehicleHal::initStaticConfig() { } void EmulatedVehicleHal::initObd2LiveFrame(const VehiclePropConfig& propConfig) { - auto liveObd2Frame = createVehiclePropValue(VehiclePropertyType::COMPLEX, 0); + static constexpr bool shouldUpdateStatus = true; + + auto liveObd2Frame = createVehiclePropValue(VehiclePropertyType::MIXED, 0); auto sensorStore = fillDefaultObd2Frame(static_cast(propConfig.configArray[0]), static_cast(propConfig.configArray[1])); sensorStore->fillPropValue("", liveObd2Frame.get()); liveObd2Frame->prop = OBD2_LIVE_FRAME; - mPropStore->writeValue(*liveObd2Frame); + mPropStore->writeValue(*liveObd2Frame, shouldUpdateStatus); } void EmulatedVehicleHal::initObd2FreezeFrame(const VehiclePropConfig& propConfig) { + static constexpr bool shouldUpdateStatus = true; + auto sensorStore = fillDefaultObd2Frame(static_cast(propConfig.configArray[0]), static_cast(propConfig.configArray[1])); @@ -406,11 +445,11 @@ void EmulatedVehicleHal::initObd2FreezeFrame(const VehiclePropConfig& propConfig "P0102" "P0123"}; for (auto&& dtc : sampleDtcs) { - auto freezeFrame = createVehiclePropValue(VehiclePropertyType::COMPLEX, 0); + auto freezeFrame = createVehiclePropValue(VehiclePropertyType::MIXED, 0); sensorStore->fillPropValue(dtc, freezeFrame.get()); freezeFrame->prop = OBD2_FREEZE_FRAME; - mPropStore->writeValue(*freezeFrame); + mPropStore->writeValue(*freezeFrame, shouldUpdateStatus); } } diff --git a/automotive/vehicle/2.0/default/impl/vhal_v2_0/EmulatedVehicleHal.h b/automotive/vehicle/2.0/default/impl/vhal_v2_0/EmulatedVehicleHal.h index 99d7edbc786cf07b5d83562506f3409d747be222..c188aefe202a52ca260a192a828e8e9e2608898c 100644 --- a/automotive/vehicle/2.0/default/impl/vhal_v2_0/EmulatedVehicleHal.h +++ b/automotive/vehicle/2.0/default/impl/vhal_v2_0/EmulatedVehicleHal.h @@ -30,9 +30,10 @@ #include "vhal_v2_0/VehiclePropertyStore.h" #include "DefaultConfig.h" -#include "VehicleEmulator.h" #include "FakeValueGenerator.h" +#include "VehicleEmulator.h" + namespace android { namespace hardware { namespace automotive { @@ -53,7 +54,7 @@ public: VehiclePropValuePtr get(const VehiclePropValue& requestedPropValue, StatusCode* outStatus) override; StatusCode set(const VehiclePropValue& propValue) override; - StatusCode subscribe(int32_t property, int32_t areas, float sampleRate) override; + StatusCode subscribe(int32_t property, float sampleRate) override; StatusCode unsubscribe(int32_t property) override; // Methods from EmulatedVehicleHalIface @@ -66,7 +67,9 @@ private: } StatusCode handleGenerateFakeDataRequest(const VehiclePropValue& request); - void onFakeValueGenerated(int32_t propId, float value); + void onFakeValueGenerated(const VehiclePropValue& value); + VehiclePropValuePtr createHwInputKeyProp(VehicleHwKeyInputAction action, int32_t keyCode, + int32_t targetDisplay); void onContinuousPropertyTimer(const std::vector& properties); bool isContinuousProperty(int32_t propId) const; @@ -82,7 +85,8 @@ private: VehiclePropertyStore* mPropStore; std::unordered_set mHvacPowerProps; RecurrentTimer mRecurrentTimer; - FakeValueGenerator mFakeValueGenerator; + std::unique_ptr mLinearFakeValueGenerator; + std::unique_ptr mJsonFakeValueGenerator; }; } // impl diff --git a/automotive/vehicle/2.0/default/impl/vhal_v2_0/FakeValueGenerator.h b/automotive/vehicle/2.0/default/impl/vhal_v2_0/FakeValueGenerator.h index 7bbbb08f156c33f4332512c6a46f821678ee6f25..1eeb88dffe9ee1f70aa837901ba86cdf8913a283 100644 --- a/automotive/vehicle/2.0/default/impl/vhal_v2_0/FakeValueGenerator.h +++ b/automotive/vehicle/2.0/default/impl/vhal_v2_0/FakeValueGenerator.h @@ -14,15 +14,11 @@ * limitations under the License. */ -#ifndef android_hardware_automotive_vehicle_V2_0_impl_FakeHalEventGenerator_H_ -#define android_hardware_automotive_vehicle_V2_0_impl_FakeHalEventGenerator_H_ - -#include +#ifndef android_hardware_automotive_vehicle_V2_0_impl_FakeValueGenerator_H_ +#define android_hardware_automotive_vehicle_V2_0_impl_FakeValueGenerator_H_ #include -#include - namespace android { namespace hardware { namespace automotive { @@ -31,89 +27,27 @@ namespace V2_0 { namespace impl { -class FakeValueGenerator { -private: - // In every timer tick we may want to generate new value based on initial value for debug - // purpose. It's better to have sequential values to see if events gets delivered in order - // to the client. - - struct GeneratorCfg { - float initialValue; // - float currentValue; // Should be in range (initialValue +/- dispersion). - float dispersion; // Defines minimum and maximum value based on initial value. - float increment; // Value that we will be added to currentValue with each timer tick. - }; +using OnHalEvent = std::function; +using MuxGuard = std::lock_guard; +class FakeValueGenerator { public: - using OnHalEvent = std::function; - - FakeValueGenerator(const OnHalEvent& onHalEvent) : - mOnHalEvent(onHalEvent), - mRecurrentTimer(std::bind(&FakeValueGenerator::onTimer, this, - std::placeholders::_1)) - {} - - ~FakeValueGenerator() = default; - - - void startGeneratingHalEvents(std::chrono::nanoseconds interval, int propId, float initialValue, - float dispersion, float increment) { - MuxGuard g(mLock); - - removeLocked(propId); - - mGenCfg.insert({propId, GeneratorCfg { - .initialValue = initialValue, - .currentValue = initialValue, - .dispersion = dispersion, - .increment = increment, - }}); - - mRecurrentTimer.registerRecurrentEvent(interval, propId); - } - - void stopGeneratingHalEvents(int propId) { - MuxGuard g(mLock); - if (propId == 0) { - // Remove all. - for (auto&& it : mGenCfg) { - removeLocked(it.first); - } - } else { - removeLocked(propId); - } - } - -private: - void removeLocked(int propId) { - if (mGenCfg.erase(propId)) { - mRecurrentTimer.unregisterRecurrentEvent(propId); - } - } - - void onTimer(const std::vector& properties) { - MuxGuard g(mLock); - - for (int32_t propId : properties) { - auto& cfg = mGenCfg[propId]; - cfg.currentValue += cfg.increment; - if (cfg.currentValue > cfg.initialValue + cfg.dispersion) { - cfg.currentValue = cfg.initialValue - cfg.dispersion; - } - mOnHalEvent(propId, cfg.currentValue); - } - } - -private: - using MuxGuard = std::lock_guard; - - mutable std::mutex mLock; - OnHalEvent mOnHalEvent; - RecurrentTimer mRecurrentTimer; - std::unordered_map mGenCfg; + virtual ~FakeValueGenerator() = default; + /** + * Starts generating VHAL events + * + * @param request in VehiclePropValue with required information to start fake data generation + * @return StatusCode of the start request + */ + virtual StatusCode start(const VehiclePropValue& request) = 0; + /** + * Stops generating VHAL events + * @param request in VehiclePropValue with required information to stop fake data generation + * @return StatusCode of the stop request + */ + virtual StatusCode stop(const VehiclePropValue& request) = 0; }; - } // impl } // namespace V2_0 @@ -122,6 +56,4 @@ private: } // namespace hardware } // namespace android - - -#endif //android_hardware_automotive_vehicle_V2_0_impl_FakeHalEventGenerator_H_ +#endif // android_hardware_automotive_vehicle_V2_0_impl_FakeValueGenerator_H_ diff --git a/automotive/vehicle/2.0/default/impl/vhal_v2_0/JsonFakeValueGenerator.cpp b/automotive/vehicle/2.0/default/impl/vhal_v2_0/JsonFakeValueGenerator.cpp new file mode 100644 index 0000000000000000000000000000000000000000..88b8f865c5235e4d9259ece14d93e29b2e5c5573 --- /dev/null +++ b/automotive/vehicle/2.0/default/impl/vhal_v2_0/JsonFakeValueGenerator.cpp @@ -0,0 +1,174 @@ +/* + * Copyright (C) 2018 The Android Open Source Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#define LOG_TAG "JsonFakeValueGenerator" + +#include + +#include +#include + +#include "JsonFakeValueGenerator.h" + +namespace android { +namespace hardware { +namespace automotive { +namespace vehicle { +namespace V2_0 { + +namespace impl { + +JsonFakeValueGenerator::JsonFakeValueGenerator(const OnHalEvent& onHalEvent) + : mOnHalEvent(onHalEvent), mThread(&JsonFakeValueGenerator::loop, this) {} + +JsonFakeValueGenerator::~JsonFakeValueGenerator() { + mStopRequested = true; + { + MuxGuard g(mLock); + mGenCfg.index = 0; + mGenCfg.events.clear(); + } + mCond.notify_one(); + if (mThread.joinable()) { + mThread.join(); + } +} + +StatusCode JsonFakeValueGenerator::start(const VehiclePropValue& request) { + const auto& v = request.value; + if (v.stringValue.empty()) { + ALOGE("%s: path to JSON file is missing", __func__); + return StatusCode::INVALID_ARG; + } + const char* file = v.stringValue.c_str(); + std::ifstream ifs(file); + if (!ifs) { + ALOGE("%s: couldn't open %s for parsing.", __func__, file); + return StatusCode::INTERNAL_ERROR; + } + std::vector fakeVhalEvents = parseFakeValueJson(ifs); + + { + MuxGuard g(mLock); + mGenCfg = {0, fakeVhalEvents}; + } + mCond.notify_one(); + return StatusCode::OK; +} + +StatusCode JsonFakeValueGenerator::stop(const VehiclePropValue& request) { + const auto& v = request.value; + if (!v.stringValue.empty()) { + ALOGI("%s: %s", __func__, v.stringValue.c_str()); + } + + { + MuxGuard g(mLock); + mGenCfg.index = 0; + mGenCfg.events.clear(); + } + mCond.notify_one(); + return StatusCode::OK; +} + +std::vector JsonFakeValueGenerator::parseFakeValueJson(std::istream& is) { + std::vector fakeVhalEvents; + + Json::Reader reader; + Json::Value rawEvents; + if (!reader.parse(is, rawEvents)) { + ALOGE("%s: Failed to parse fake data JSON file. Error: %s", __func__, + reader.getFormattedErrorMessages().c_str()); + return fakeVhalEvents; + } + + for (Json::Value::ArrayIndex i = 0; i < rawEvents.size(); i++) { + Json::Value rawEvent = rawEvents[i]; + if (!rawEvent.isObject()) { + ALOGE("%s: VHAL JSON event should be an object, %s", __func__, + rawEvent.toStyledString().c_str()); + continue; + } + if (rawEvent["prop"].empty() || rawEvent["areaId"].empty() || rawEvent["value"].empty() || + rawEvent["timestamp"].empty()) { + ALOGE("%s: VHAL JSON event has missing fields, skip it, %s", __func__, + rawEvent.toStyledString().c_str()); + continue; + } + VehiclePropValue event = {.prop = rawEvent["prop"].asInt(), + .areaId = rawEvent["areaId"].asInt(), + .timestamp = rawEvent["timestamp"].asInt64()}; + + Json::Value rawEventValue = rawEvent["value"]; + auto& value = event.value; + switch (getPropType(event.prop)) { + case VehiclePropertyType::BOOLEAN: + case VehiclePropertyType::INT32: + value.int32Values.resize(1); + value.int32Values[0] = rawEventValue.asInt(); + break; + case VehiclePropertyType::INT64: + value.int64Values.resize(1); + value.int64Values[0] = rawEventValue.asInt64(); + break; + case VehiclePropertyType::FLOAT: + value.floatValues.resize(1); + value.floatValues[0] = rawEventValue.asFloat(); + break; + case VehiclePropertyType::STRING: + value.stringValue = rawEventValue.asString(); + break; + default: + ALOGE("%s: unsupported type for property: 0x%x with value: %s", __func__, + event.prop, rawEventValue.asString().c_str()); + continue; + } + fakeVhalEvents.push_back(event); + } + return fakeVhalEvents; +} + +void JsonFakeValueGenerator::loop() { + static constexpr auto kInvalidTime = TimePoint(Nanos::max()); + + while (!mStopRequested) { + auto nextEventTime = kInvalidTime; + { + MuxGuard g(mLock); + if (mGenCfg.index < mGenCfg.events.size()) { + mOnHalEvent(mGenCfg.events[mGenCfg.index]); + } + if (!mGenCfg.events.empty() && mGenCfg.index < mGenCfg.events.size() - 1) { + Nanos intervalNano = + static_cast(mGenCfg.events[mGenCfg.index + 1].timestamp - + mGenCfg.events[mGenCfg.index].timestamp); + nextEventTime = Clock::now() + intervalNano; + } + mGenCfg.index++; + } + + std::unique_lock g(mLock); + mCond.wait_until(g, nextEventTime); + } +} + +} // namespace impl + +} // namespace V2_0 +} // namespace vehicle +} // namespace automotive +} // namespace hardware +} // namespace android diff --git a/automotive/vehicle/2.0/default/impl/vhal_v2_0/JsonFakeValueGenerator.h b/automotive/vehicle/2.0/default/impl/vhal_v2_0/JsonFakeValueGenerator.h new file mode 100644 index 0000000000000000000000000000000000000000..51da4c5383c26ad937c6ad8ba711a7b32b984724 --- /dev/null +++ b/automotive/vehicle/2.0/default/impl/vhal_v2_0/JsonFakeValueGenerator.h @@ -0,0 +1,76 @@ +/* + * Copyright (C) 2018 The Android Open Source Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#ifndef android_hardware_automotive_vehicle_V2_0_impl_JsonFakeValueGenerator_H_ +#define android_hardware_automotive_vehicle_V2_0_impl_JsonFakeValueGenerator_H_ + +#include +#include +#include +#include +#include + +#include + +#include "FakeValueGenerator.h" + +namespace android { +namespace hardware { +namespace automotive { +namespace vehicle { +namespace V2_0 { + +namespace impl { + +class JsonFakeValueGenerator : public FakeValueGenerator { +private: + using Nanos = std::chrono::nanoseconds; + using Clock = std::chrono::steady_clock; + using TimePoint = std::chrono::time_point; + + struct GeneratorCfg { + size_t index; + std::vector events; + }; + +public: + JsonFakeValueGenerator(const OnHalEvent& onHalEvent); + ~JsonFakeValueGenerator(); + StatusCode start(const VehiclePropValue& request) override; + StatusCode stop(const VehiclePropValue& request) override; + +private: + std::vector parseFakeValueJson(std::istream& is); + void loop(); + +private: + OnHalEvent mOnHalEvent; + std::thread mThread; + mutable std::mutex mLock; + std::condition_variable mCond; + GeneratorCfg mGenCfg; + std::atomic_bool mStopRequested{false}; +}; + +} // namespace impl + +} // namespace V2_0 +} // namespace vehicle +} // namespace automotive +} // namespace hardware +} // namespace android + +#endif // android_hardware_automotive_vehicle_V2_0_impl_JsonFakeValueGenerator_H_ diff --git a/automotive/vehicle/2.0/default/impl/vhal_v2_0/LinearFakeValueGenerator.cpp b/automotive/vehicle/2.0/default/impl/vhal_v2_0/LinearFakeValueGenerator.cpp new file mode 100644 index 0000000000000000000000000000000000000000..8cb9322fa6214d7add7156e0cb0b9019819dd808 --- /dev/null +++ b/automotive/vehicle/2.0/default/impl/vhal_v2_0/LinearFakeValueGenerator.cpp @@ -0,0 +1,135 @@ +/* + * Copyright (C) 2018 The Android Open Source Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#define LOG_TAG "LinearFakeValueGenerator" + +#include +#include + +#include "LinearFakeValueGenerator.h" + +namespace android { +namespace hardware { +namespace automotive { +namespace vehicle { +namespace V2_0 { + +namespace impl { + +LinearFakeValueGenerator::LinearFakeValueGenerator(const OnHalEvent& onHalEvent) + : mOnHalEvent(onHalEvent), + mRecurrentTimer(std::bind(&LinearFakeValueGenerator::onTimer, this, std::placeholders::_1)) {} + +StatusCode LinearFakeValueGenerator::start(const VehiclePropValue& request) { + const auto& v = request.value; + if (v.int32Values.size() < 2) { + ALOGE("%s: expected property ID in int32Values", __func__); + return StatusCode::INVALID_ARG; + } + int32_t propId = v.int32Values[1]; + + if (!v.int64Values.size()) { + ALOGE("%s: interval is not provided in int64Values", __func__); + return StatusCode::INVALID_ARG; + } + auto interval = std::chrono::nanoseconds(v.int64Values[0]); + + if (v.floatValues.size() < 3) { + ALOGE("%s: expected at least 3 elements in floatValues, got: %zu", __func__, + v.floatValues.size()); + return StatusCode::INVALID_ARG; + } + float initialValue = v.floatValues[0]; + float dispersion = v.floatValues[1]; + float increment = v.floatValues[2]; + + MuxGuard g(mLock); + removeLocked(propId); + mGenCfg.insert({propId, GeneratorCfg{ + .initialValue = initialValue, + .currentValue = initialValue, + .dispersion = dispersion, + .increment = increment,}}); + + mRecurrentTimer.registerRecurrentEvent(interval, propId); + return StatusCode::OK; +} + +StatusCode LinearFakeValueGenerator::stop(const VehiclePropValue& request) { + const auto& v = request.value; + if (v.int32Values.size() < 2) { + ALOGE("%s: expected property ID in int32Values", __func__); + return StatusCode::INVALID_ARG; + } + int32_t propId = v.int32Values[1]; + + MuxGuard g(mLock); + if (propId == 0) { + // Remove all. + for (auto&& it : mGenCfg) { + removeLocked(it.first); + } + } else { + removeLocked(propId); + } + return StatusCode::OK; +} + +void LinearFakeValueGenerator::removeLocked(int propId) { + if (mGenCfg.erase(propId)) { + mRecurrentTimer.unregisterRecurrentEvent(propId); + } +} + +void LinearFakeValueGenerator::onTimer(const std::vector& properties) { + MuxGuard g(mLock); + + for (int32_t propId : properties) { + auto& cfg = mGenCfg[propId]; + cfg.currentValue += cfg.increment; + if (cfg.currentValue > cfg.initialValue + cfg.dispersion) { + cfg.currentValue = cfg.initialValue - cfg.dispersion; + } + VehiclePropValue event = {.prop = propId}; + auto& value = event.value; + switch (getPropType(event.prop)) { + case VehiclePropertyType::INT32: + value.int32Values.resize(1); + value.int32Values[0] = static_cast(cfg.currentValue); + break; + case VehiclePropertyType::INT64: + value.int64Values.resize(1); + value.int64Values[0] = static_cast(cfg.currentValue); + break; + case VehiclePropertyType::FLOAT: + value.floatValues.resize(1); + value.floatValues[0] = cfg.currentValue; + break; + default: + ALOGE("%s: unsupported property type for 0x%x", __func__, event.prop); + continue; + } + mOnHalEvent(event); + } +} + +} // namespace impl + +} // namespace V2_0 +} // namespace vehicle +} // namespace automotive +} // namespace hardware +} // namespace android diff --git a/automotive/vehicle/2.0/default/impl/vhal_v2_0/LinearFakeValueGenerator.h b/automotive/vehicle/2.0/default/impl/vhal_v2_0/LinearFakeValueGenerator.h new file mode 100644 index 0000000000000000000000000000000000000000..fe6d0979627dea5d05eb7e90adb91319e7610182 --- /dev/null +++ b/automotive/vehicle/2.0/default/impl/vhal_v2_0/LinearFakeValueGenerator.h @@ -0,0 +1,70 @@ +/* + * Copyright (C) 2018 The Android Open Source Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#ifndef android_hardware_automotive_vehicle_V2_0_impl_LinearFakeValueGenerator_H_ +#define android_hardware_automotive_vehicle_V2_0_impl_LinearFakeValueGenerator_H_ + +#include + +#include "FakeValueGenerator.h" + +namespace android { +namespace hardware { +namespace automotive { +namespace vehicle { +namespace V2_0 { + +namespace impl { + +class LinearFakeValueGenerator : public FakeValueGenerator { +private: + // In every timer tick we may want to generate new value based on initial value for debug + // purpose. It's better to have sequential values to see if events gets delivered in order + // to the client. + + struct GeneratorCfg { + float initialValue; // + float currentValue; // Should be in range (initialValue +/- dispersion). + float dispersion; // Defines minimum and maximum value based on initial value. + float increment; // Value that we will be added to currentValue with each timer tick. + }; + +public: + LinearFakeValueGenerator(const OnHalEvent& onHalEvent); + ~LinearFakeValueGenerator() = default; + StatusCode start(const VehiclePropValue& request) override; + StatusCode stop(const VehiclePropValue& request) override; + +private: + void removeLocked(int propId); + void onTimer(const std::vector& properties); + +private: + mutable std::mutex mLock; + OnHalEvent mOnHalEvent; + RecurrentTimer mRecurrentTimer; + std::unordered_map mGenCfg; +}; + +} // namespace impl + +} // namespace V2_0 +} // namespace vehicle +} // namespace automotive +} // namespace hardware +} // namespace android + +#endif // android_hardware_automotive_vehicle_V2_0_impl_LinearFakeValueGenerator_H_ diff --git a/automotive/vehicle/2.0/default/impl/vhal_v2_0/VehicleEmulator.cpp b/automotive/vehicle/2.0/default/impl/vhal_v2_0/VehicleEmulator.cpp index 38cb74375e55053a5cf0baf07c0a0aee2c7e564d..bf7be09afcc4b7e2b595b7762fee05fcceb1fdad 100644 --- a/automotive/vehicle/2.0/default/impl/vhal_v2_0/VehicleEmulator.cpp +++ b/automotive/vehicle/2.0/default/impl/vhal_v2_0/VehicleEmulator.cpp @@ -138,6 +138,7 @@ void VehicleEmulator::doSetProperty(VehicleEmulator::EmulatorMessage& rxMsg, VehiclePropValue val = { .prop = protoVal.prop(), .areaId = protoVal.area_id(), + .status = (VehiclePropertyStatus)protoVal.status(), .timestamp = elapsedRealtimeNano(), }; @@ -234,10 +235,6 @@ void VehicleEmulator::populateProtoVehicleConfig(emulator::VehiclePropConfig* pr protoCfg->set_change_mode(toInt(cfg.changeMode)); protoCfg->set_value_type(toInt(getPropType(cfg.prop))); - if (!isGlobalProp(cfg.prop)) { - protoCfg->set_supported_areas(cfg.supportedAreas); - } - for (auto& configElement : cfg.configArray) { protoCfg->add_config_array(configElement); } @@ -251,9 +248,10 @@ void VehicleEmulator::populateProtoVehicleConfig(emulator::VehiclePropConfig* pr case VehiclePropertyType::STRING: case VehiclePropertyType::BOOLEAN: case VehiclePropertyType::INT32_VEC: + case VehiclePropertyType::INT64_VEC: case VehiclePropertyType::FLOAT_VEC: case VehiclePropertyType::BYTES: - case VehiclePropertyType::COMPLEX: + case VehiclePropertyType::MIXED: // Do nothing. These types don't have min/max values break; case VehiclePropertyType::INT64: @@ -291,6 +289,7 @@ void VehicleEmulator::populateProtoVehiclePropValue(emulator::VehiclePropValue* protoVal->set_prop(val->prop); protoVal->set_value_type(toInt(getPropType(val->prop))); protoVal->set_timestamp(val->timestamp); + protoVal->set_status((emulator::VehiclePropStatus)(val->status)); protoVal->set_area_id(val->areaId); // Copy value data if it is set. diff --git a/automotive/vehicle/2.0/default/impl/vhal_v2_0/proto/Android.bp b/automotive/vehicle/2.0/default/impl/vhal_v2_0/proto/Android.bp index ec352000c66f4f8617ac7b034a0ac1c162a37c40..6754843bf5b41234d16f83d5c7a3885af5f8479b 100644 --- a/automotive/vehicle/2.0/default/impl/vhal_v2_0/proto/Android.bp +++ b/automotive/vehicle/2.0/default/impl/vhal_v2_0/proto/Android.bp @@ -23,5 +23,9 @@ cc_library_static { strip: { keep_symbols: true, }, + cflags: [ + "-Wall", + "-Werror", + ], srcs: ["VehicleHalProto.proto"] } diff --git a/automotive/vehicle/2.0/default/impl/vhal_v2_0/proto/VehicleHalProto.proto b/automotive/vehicle/2.0/default/impl/vhal_v2_0/proto/VehicleHalProto.proto index 86433f50f9d81c512181399c7c056516568f6078..2ef64fbfab59811d5540eab5e66b97b74cb7ef97 100644 --- a/automotive/vehicle/2.0/default/impl/vhal_v2_0/proto/VehicleHalProto.proto +++ b/automotive/vehicle/2.0/default/impl/vhal_v2_0/proto/VehicleHalProto.proto @@ -46,6 +46,12 @@ enum Status { ERROR_INVALID_OPERATION = 8; } +enum VehiclePropStatus { + AVAILABLE = 0; + UNAVAILABLE = 1; + ERROR = 2; +} + message VehicleAreaConfig { required int32 area_id = 1; optional sint32 min_int32_value = 2; @@ -61,7 +67,7 @@ message VehiclePropConfig { optional int32 access = 2; optional int32 change_mode = 3; optional int32 value_type = 4; - optional int32 supported_areas = 5; + optional int32 supported_areas = 5; // Deprecated - DO NOT USE repeated VehicleAreaConfig area_configs = 6; optional int32 config_flags = 7; repeated int32 config_array = 8; @@ -75,6 +81,7 @@ message VehiclePropValue { required int32 prop = 1; optional int32 value_type = 2; optional int64 timestamp = 3; // required for valid data from HAL, skipped for set + optional VehiclePropStatus status = 10; // required for valid data from HAL, skipped for set // values optional int32 area_id = 4; diff --git a/automotive/vehicle/2.0/default/tests/SubscriptionManager_test.cpp b/automotive/vehicle/2.0/default/tests/SubscriptionManager_test.cpp index 5688dd6da10cc48153d69e86f29debdcacb6ed12..ab2013d08cfbd2e79010663b3a71b6180b87daa1 100644 --- a/automotive/vehicle/2.0/default/tests/SubscriptionManager_test.cpp +++ b/automotive/vehicle/2.0/default/tests/SubscriptionManager_test.cpp @@ -51,30 +51,16 @@ public: } hidl_vec subscrToProp1 = { - SubscribeOptions { - .propId = PROP1, - .vehicleAreas = toInt(VehicleAreaZone::ROW_1_LEFT), - .flags = SubscribeFlags::HAL_EVENT - }, + SubscribeOptions{.propId = PROP1, .flags = SubscribeFlags::EVENTS_FROM_CAR}, }; hidl_vec subscrToProp2 = { - SubscribeOptions { - .propId = PROP2, - .flags = SubscribeFlags::HAL_EVENT - }, + SubscribeOptions{.propId = PROP2, .flags = SubscribeFlags::EVENTS_FROM_CAR}, }; hidl_vec subscrToProp1and2 = { - SubscribeOptions { - .propId = PROP1, - .vehicleAreas = toInt(VehicleAreaZone::ROW_1_LEFT), - .flags = SubscribeFlags::HAL_EVENT - }, - SubscribeOptions { - .propId = PROP2, - .flags = SubscribeFlags::HAL_EVENT - }, + SubscribeOptions{.propId = PROP1, .flags = SubscribeFlags::EVENTS_FROM_CAR}, + SubscribeOptions{.propId = PROP2, .flags = SubscribeFlags::EVENTS_FROM_CAR}, }; static std::list> extractCallbacks( @@ -87,14 +73,11 @@ public: } std::list> clientsToProp1() { - return manager.getSubscribedClients(PROP1, - toInt(VehicleAreaZone::ROW_1_LEFT), - SubscribeFlags::DEFAULT); + return manager.getSubscribedClients(PROP1, SubscribeFlags::EVENTS_FROM_CAR); } std::list> clientsToProp2() { - return manager.getSubscribedClients(PROP2, 0, - SubscribeFlags::DEFAULT); + return manager.getSubscribedClients(PROP2, SubscribeFlags::EVENTS_FROM_CAR); } void onPropertyUnsubscribed(int propertyId) { @@ -124,10 +107,7 @@ TEST_F(SubscriptionManagerTest, multipleClients) { ASSERT_EQ(StatusCode::OK, manager.addOrUpdateSubscription(2, cb2, subscrToProp1, &updatedOptions)); - auto clients = manager.getSubscribedClients( - PROP1, - toInt(VehicleAreaZone::ROW_1_LEFT), - SubscribeFlags::HAL_EVENT); + auto clients = manager.getSubscribedClients(PROP1, SubscribeFlags::EVENTS_FROM_CAR); ASSERT_ALL_EXISTS({cb1, cb2}, extractCallbacks(clients)); } @@ -137,25 +117,13 @@ TEST_F(SubscriptionManagerTest, negativeCases) { ASSERT_EQ(StatusCode::OK, manager.addOrUpdateSubscription(1, cb1, subscrToProp1, &updatedOptions)); - // Wrong zone - auto clients = manager.getSubscribedClients( - PROP1, - toInt(VehicleAreaZone::ROW_2_LEFT), - SubscribeFlags::HAL_EVENT); - ASSERT_TRUE(clients.empty()); - // Wrong prop - clients = manager.getSubscribedClients( - toInt(VehicleProperty::AP_POWER_BOOTUP_REASON), - toInt(VehicleAreaZone::ROW_1_LEFT), - SubscribeFlags::HAL_EVENT); + auto clients = manager.getSubscribedClients(toInt(VehicleProperty::AP_POWER_BOOTUP_REASON), + SubscribeFlags::EVENTS_FROM_CAR); ASSERT_TRUE(clients.empty()); // Wrong flag - clients = manager.getSubscribedClients( - PROP1, - toInt(VehicleAreaZone::ROW_1_LEFT), - SubscribeFlags::SET_CALL); + clients = manager.getSubscribedClients(PROP1, SubscribeFlags::EVENTS_FROM_ANDROID); ASSERT_TRUE(clients.empty()); } @@ -164,31 +132,22 @@ TEST_F(SubscriptionManagerTest, mulipleSubscriptions) { ASSERT_EQ(StatusCode::OK, manager.addOrUpdateSubscription(1, cb1, subscrToProp1, &updatedOptions)); - auto clients = manager.getSubscribedClients( - PROP1, - toInt(VehicleAreaZone::ROW_1_LEFT), - SubscribeFlags::DEFAULT); + auto clients = manager.getSubscribedClients(PROP1, SubscribeFlags::EVENTS_FROM_CAR); ASSERT_EQ((size_t) 1, clients.size()); ASSERT_EQ(cb1, clients.front()->getCallback()); // Same property, but different zone, to make sure we didn't unsubscribe // from previous zone. - ASSERT_EQ(StatusCode::OK, manager.addOrUpdateSubscription(1, cb1, { - SubscribeOptions { - .propId = PROP1, - .vehicleAreas = toInt(VehicleAreaZone::ROW_2), - .flags = SubscribeFlags::DEFAULT - } - }, &updatedOptions)); - - clients = manager.getSubscribedClients(PROP1, - toInt(VehicleAreaZone::ROW_1_LEFT), - SubscribeFlags::DEFAULT); + ASSERT_EQ( + StatusCode::OK, + manager.addOrUpdateSubscription( + 1, cb1, {SubscribeOptions{.propId = PROP1, .flags = SubscribeFlags::EVENTS_FROM_CAR}}, + &updatedOptions)); + + clients = manager.getSubscribedClients(PROP1, SubscribeFlags::EVENTS_FROM_CAR); ASSERT_ALL_EXISTS({cb1}, extractCallbacks(clients)); - clients = manager.getSubscribedClients(PROP1, - toInt(VehicleAreaZone::ROW_2), - SubscribeFlags::DEFAULT); + clients = manager.getSubscribedClients(PROP1, SubscribeFlags::EVENTS_FROM_CAR); ASSERT_ALL_EXISTS({cb1}, extractCallbacks(clients)); } diff --git a/automotive/vehicle/2.0/default/tests/VehicleHalManager_test.cpp b/automotive/vehicle/2.0/default/tests/VehicleHalManager_test.cpp index 4864d5d4643feb2b947838ea1d159e13f9a1bb27..f64eab55a940e810bdd0e2332f7aa9af09747b3c 100644 --- a/automotive/vehicle/2.0/default/tests/VehicleHalManager_test.cpp +++ b/automotive/vehicle/2.0/default/tests/VehicleHalManager_test.cpp @@ -106,7 +106,6 @@ public: } StatusCode subscribe(int32_t /* property */, - int32_t /* areas */, float /* sampleRate */) override { return StatusCode::OK; } @@ -242,10 +241,7 @@ TEST_F(VehicleHalManagerTest, halErrorEvent) { sp cb = new MockedVehicleCallback(); hidl_vec options = { - SubscribeOptions { - .propId = PROP, - .flags = SubscribeFlags::DEFAULT - }, + SubscribeOptions{.propId = PROP, .flags = SubscribeFlags::EVENTS_FROM_CAR}, }; StatusCode res = manager->subscribe(cb, options); @@ -260,11 +256,7 @@ TEST_F(VehicleHalManagerTest, subscribe) { sp cb = new MockedVehicleCallback(); hidl_vec options = { - SubscribeOptions { - .propId = PROP, - .flags = SubscribeFlags::DEFAULT - } - }; + SubscribeOptions{.propId = PROP, .flags = SubscribeFlags::EVENTS_FROM_CAR}}; StatusCode res = manager->subscribe(cb, options); ASSERT_EQ(StatusCode::OK, res); @@ -286,6 +278,7 @@ TEST_F(VehicleHalManagerTest, subscribe) { cb->reset(); VehiclePropValue actualValue(*subscribedValue.get()); + actualValue.status = VehiclePropertyStatus::AVAILABLE; hal->sendPropEvent(std::move(subscribedValue)); ASSERT_TRUE(cb->waitForExpectedEvents(1)) << "Events received: " @@ -301,18 +294,14 @@ TEST_F(VehicleHalManagerTest, subscribe_WriteOnly) { sp cb = new MockedVehicleCallback(); hidl_vec options = { - SubscribeOptions { - .propId = PROP, - .flags = SubscribeFlags::HAL_EVENT - }, + SubscribeOptions{.propId = PROP, .flags = SubscribeFlags::EVENTS_FROM_CAR}, }; StatusCode res = manager->subscribe(cb, options); // Unable to subscribe on Hal Events for write-only properties. ASSERT_EQ(StatusCode::INVALID_ARG, res); - - options[0].flags = SubscribeFlags::SET_CALL; + options[0].flags = SubscribeFlags::EVENTS_FROM_ANDROID; res = manager->subscribe(cb, options); // OK to subscribe on SET method call for write-only properties. @@ -396,8 +385,8 @@ TEST_F(VehicleHalManagerTest, set_DifferentAreas) { const auto PROP = toInt(VehicleProperty::HVAC_FAN_SPEED); const auto VAL1 = 1; const auto VAL2 = 2; - const auto AREA1 = toInt(VehicleAreaZone::ROW_1_LEFT); - const auto AREA2 = toInt(VehicleAreaZone::ROW_1_RIGHT); + const auto AREA1 = toInt(VehicleAreaSeat::ROW_1_LEFT); + const auto AREA2 = toInt(VehicleAreaSeat::ROW_1_RIGHT); { auto expectedValue1 = hal->getValuePool()->obtainInt32(VAL1); diff --git a/automotive/vehicle/2.0/default/tests/VehicleHalTestUtils.h b/automotive/vehicle/2.0/default/tests/VehicleHalTestUtils.h index 2a064175a746ab19bdc31f3e357a923113d1b5c2..108e32fef294029c2931fd585693b4cf952cc364 100644 --- a/automotive/vehicle/2.0/default/tests/VehicleHalTestUtils.h +++ b/automotive/vehicle/2.0/default/tests/VehicleHalTestUtils.h @@ -29,10 +29,8 @@ namespace automotive { namespace vehicle { namespace V2_0 { -constexpr int32_t kCustomComplexProperty = 0xbeef - | VehiclePropertyGroup::VENDOR - | VehiclePropertyType::COMPLEX - | VehicleArea::GLOBAL; +constexpr int32_t kCustomComplexProperty = + 0xbeef | VehiclePropertyGroup::VENDOR | VehiclePropertyType::MIXED | VehicleArea::GLOBAL; const VehiclePropConfig kVehicleProperties[] = { { @@ -42,68 +40,38 @@ const VehiclePropConfig kVehicleProperties[] = { .configString = "Some=config,options=if,you=have_any", }, - { - .prop = toInt(VehicleProperty::HVAC_FAN_SPEED), - .access = VehiclePropertyAccess::READ_WRITE, - .changeMode = VehiclePropertyChangeMode::ON_CHANGE, - .supportedAreas = static_cast( - VehicleAreaZone::ROW_1_LEFT | VehicleAreaZone::ROW_1_RIGHT), - .areaConfigs = { - VehicleAreaConfig { - .areaId = toInt(VehicleAreaZone::ROW_1_LEFT), - .minInt32Value = 1, - .maxInt32Value = 7}, - VehicleAreaConfig { - .areaId = toInt(VehicleAreaZone::ROW_1_RIGHT), - .minInt32Value = 1, - .maxInt32Value = 5, - } - } - }, + {.prop = toInt(VehicleProperty::HVAC_FAN_SPEED), + .access = VehiclePropertyAccess::READ_WRITE, + .changeMode = VehiclePropertyChangeMode::ON_CHANGE, + .areaConfigs = + {VehicleAreaConfig{ + .areaId = toInt(VehicleAreaSeat::ROW_1_LEFT), .minInt32Value = 1, .maxInt32Value = 7}, + VehicleAreaConfig{ + .areaId = toInt(VehicleAreaSeat::ROW_1_RIGHT), .minInt32Value = 1, .maxInt32Value = 5, + }}}, // Write-only property - { - .prop = toInt(VehicleProperty::HVAC_SEAT_TEMPERATURE), - .access = VehiclePropertyAccess::WRITE, - .changeMode = VehiclePropertyChangeMode::ON_SET, - .supportedAreas = static_cast( - VehicleAreaZone::ROW_1_LEFT | VehicleAreaZone::ROW_1_RIGHT), - .areaConfigs = { - VehicleAreaConfig { - .areaId = toInt(VehicleAreaZone::ROW_1_LEFT), - .minInt32Value = 64, - .maxInt32Value = 80}, - VehicleAreaConfig { - .areaId = toInt(VehicleAreaZone::ROW_1_RIGHT), - .minInt32Value = 64, - .maxInt32Value = 80, - } - } - }, - - { - .prop = toInt(VehicleProperty::INFO_FUEL_CAPACITY), - .access = VehiclePropertyAccess::READ, - .changeMode = VehiclePropertyChangeMode::ON_CHANGE, - .areaConfigs = { - VehicleAreaConfig { - .minFloatValue = 0, - .maxFloatValue = 1.0 - } - } - }, - - { - .prop = toInt(VehicleProperty::DISPLAY_BRIGHTNESS), - .access = VehiclePropertyAccess::READ_WRITE, - .changeMode = VehiclePropertyChangeMode::ON_CHANGE, - .areaConfigs = { - VehicleAreaConfig { - .minInt32Value = 0, - .maxInt32Value = 10 - } - } - }, + {.prop = toInt(VehicleProperty::HVAC_SEAT_TEMPERATURE), + .access = VehiclePropertyAccess::WRITE, + .changeMode = VehiclePropertyChangeMode::ON_CHANGE, + .areaConfigs = {VehicleAreaConfig{.areaId = toInt(VehicleAreaSeat::ROW_1_LEFT), + .minInt32Value = 64, + .maxInt32Value = 80}, + VehicleAreaConfig{ + .areaId = toInt(VehicleAreaSeat::ROW_1_RIGHT), + .minInt32Value = 64, + .maxInt32Value = 80, + }}}, + + {.prop = toInt(VehicleProperty::INFO_FUEL_CAPACITY), + .access = VehiclePropertyAccess::READ, + .changeMode = VehiclePropertyChangeMode::ON_CHANGE, + .areaConfigs = {VehicleAreaConfig{.minFloatValue = 0, .maxFloatValue = 1.0}}}, + + {.prop = toInt(VehicleProperty::DISPLAY_BRIGHTNESS), + .access = VehiclePropertyAccess::READ_WRITE, + .changeMode = VehiclePropertyChangeMode::ON_CHANGE, + .areaConfigs = {VehicleAreaConfig{.minInt32Value = 0, .maxInt32Value = 10}}}, { .prop = toInt(VehicleProperty::MIRROR_FOLD), @@ -113,12 +81,9 @@ const VehiclePropConfig kVehicleProperties[] = { }, // Complex data type. - { - .prop = kCustomComplexProperty, - .access = VehiclePropertyAccess::READ_WRITE, - .changeMode = VehiclePropertyChangeMode::ON_CHANGE - } -}; + {.prop = kCustomComplexProperty, + .access = VehiclePropertyAccess::READ_WRITE, + .changeMode = VehiclePropertyChangeMode::ON_CHANGE}}; constexpr auto kTimeout = std::chrono::milliseconds(500); diff --git a/automotive/vehicle/2.0/default/tests/VmsUtils_test.cpp b/automotive/vehicle/2.0/default/tests/VmsUtils_test.cpp new file mode 100644 index 0000000000000000000000000000000000000000..414c5c2a42924411d92836ae94a0afc1cfeeb347 --- /dev/null +++ b/automotive/vehicle/2.0/default/tests/VmsUtils_test.cpp @@ -0,0 +1,176 @@ +/* + * Copyright (C) 2018 The Android Open Source Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#include +#include + +#include "VehicleHalTestUtils.h" +#include "vhal_v2_0/VmsUtils.h" + +namespace android { +namespace hardware { +namespace automotive { +namespace vehicle { +namespace V2_0 { +namespace vms { + +namespace { + +TEST(VmsUtilsTest, subscribeMessage) { + VmsLayer layer(1, 0, 2); + auto message = createSubscribeMessage(layer); + ASSERT_NE(message, nullptr); + EXPECT_TRUE(isValidVmsMessage(*message)); + EXPECT_EQ(message->prop, toInt(VehicleProperty::VEHICLE_MAP_SERVICE)); + EXPECT_EQ(message->value.int32Values.size(), 0x4ul); + EXPECT_EQ(parseMessageType(*message), VmsMessageType::SUBSCRIBE); + + // Layer + EXPECT_EQ(message->value.int32Values[1], 1); + EXPECT_EQ(message->value.int32Values[2], 0); + EXPECT_EQ(message->value.int32Values[3], 2); +} + +TEST(VmsUtilsTest, unsubscribeMessage) { + VmsLayer layer(1, 0, 2); + auto message = createUnsubscribeMessage(layer); + ASSERT_NE(message, nullptr); + EXPECT_TRUE(isValidVmsMessage(*message)); + EXPECT_EQ(message->prop, toInt(VehicleProperty::VEHICLE_MAP_SERVICE)); + EXPECT_EQ(message->value.int32Values.size(), 0x4ul); + EXPECT_EQ(parseMessageType(*message), VmsMessageType::UNSUBSCRIBE); + + // Layer + EXPECT_EQ(message->value.int32Values[1], 1); + EXPECT_EQ(message->value.int32Values[2], 0); + EXPECT_EQ(message->value.int32Values[3], 2); +} + +TEST(VmsUtilsTest, singleOfferingMessage) { + std::vector offering = {VmsLayerOffering(VmsLayer(1, 0, 2))}; + auto message = createOfferingMessage(offering); + ASSERT_NE(message, nullptr); + EXPECT_TRUE(isValidVmsMessage(*message)); + EXPECT_EQ(message->prop, toInt(VehicleProperty::VEHICLE_MAP_SERVICE)); + EXPECT_EQ(message->value.int32Values.size(), 0x6ul); + EXPECT_EQ(parseMessageType(*message), VmsMessageType::OFFERING); + + // Number of layer offerings + EXPECT_EQ(message->value.int32Values[1], 1); + + // Layer + EXPECT_EQ(message->value.int32Values[2], 1); + EXPECT_EQ(message->value.int32Values[3], 0); + EXPECT_EQ(message->value.int32Values[4], 2); + + // Number of dependencies + EXPECT_EQ(message->value.int32Values[5], 0); +} + +TEST(VmsUtilsTest, offeringWithDependencies) { + VmsLayer layer(1, 0, 2); + std::vector dependencies = {VmsLayer(2, 0, 2)}; + std::vector offering = {VmsLayerOffering(layer, dependencies)}; + auto message = createOfferingMessage(offering); + ASSERT_NE(message, nullptr); + EXPECT_TRUE(isValidVmsMessage(*message)); + EXPECT_EQ(message->prop, toInt(VehicleProperty::VEHICLE_MAP_SERVICE)); + EXPECT_EQ(message->value.int32Values.size(), 0x9ul); + EXPECT_EQ(parseMessageType(*message), VmsMessageType::OFFERING); + + // Number of layer offerings + EXPECT_EQ(message->value.int32Values[1], 1); + + // Layer + EXPECT_EQ(message->value.int32Values[2], 1); + EXPECT_EQ(message->value.int32Values[3], 0); + EXPECT_EQ(message->value.int32Values[4], 2); + + // Number of dependencies + EXPECT_EQ(message->value.int32Values[5], 1); + + // Dependency 1 + EXPECT_EQ(message->value.int32Values[6], 2); + EXPECT_EQ(message->value.int32Values[7], 0); + EXPECT_EQ(message->value.int32Values[8], 2); +} + +TEST(VmsUtilsTest, availabilityMessage) { + auto message = createAvailabilityRequest(); + ASSERT_NE(message, nullptr); + EXPECT_TRUE(isValidVmsMessage(*message)); + EXPECT_EQ(message->prop, toInt(VehicleProperty::VEHICLE_MAP_SERVICE)); + EXPECT_EQ(message->value.int32Values.size(), 0x1ul); + EXPECT_EQ(parseMessageType(*message), VmsMessageType::AVAILABILITY_REQUEST); +} + +TEST(VmsUtilsTest, subscriptionsMessage) { + auto message = createSubscriptionsRequest(); + ASSERT_NE(message, nullptr); + EXPECT_TRUE(isValidVmsMessage(*message)); + EXPECT_EQ(message->prop, toInt(VehicleProperty::VEHICLE_MAP_SERVICE)); + EXPECT_EQ(message->value.int32Values.size(), 0x1ul); + EXPECT_EQ(parseMessageType(*message), VmsMessageType::SUBSCRIPTIONS_REQUEST); +} + +TEST(VmsUtilsTest, dataMessage) { + std::string bytes = "aaa"; + auto message = createDataMessage(bytes); + ASSERT_NE(message, nullptr); + EXPECT_TRUE(isValidVmsMessage(*message)); + EXPECT_EQ(message->prop, toInt(VehicleProperty::VEHICLE_MAP_SERVICE)); + EXPECT_EQ(message->value.int32Values.size(), 0x1ul); + EXPECT_EQ(parseMessageType(*message), VmsMessageType::DATA); + EXPECT_EQ(message->value.bytes.size(), bytes.size()); + EXPECT_EQ(memcmp(message->value.bytes.data(), bytes.data(), bytes.size()), 0); +} + +TEST(VmsUtilsTest, emptyMessageInvalid) { + VehiclePropValue empty_prop; + EXPECT_FALSE(isValidVmsMessage(empty_prop)); +} + +TEST(VmsUtilsTest, invalidMessageType) { + VmsLayer layer(1, 0, 2); + auto message = createSubscribeMessage(layer); + message->value.int32Values[0] = 0; + + EXPECT_FALSE(isValidVmsMessage(*message)); +} + +TEST(VmsUtilsTest, parseDataMessage) { + std::string bytes = "aaa"; + auto message = createDataMessage(bytes); + auto data_str = parseData(*message); + ASSERT_FALSE(data_str.empty()); + EXPECT_EQ(data_str, bytes); +} + +TEST(VmsUtilsTest, parseInvalidDataMessage) { + VmsLayer layer(1, 0, 2); + auto message = createSubscribeMessage(layer); + auto data_str = parseData(*message); + EXPECT_TRUE(data_str.empty()); +} + +} // namespace + +} // namespace vms +} // namespace V2_0 +} // namespace vehicle +} // namespace automotive +} // namespace hardware +} // namespace android diff --git a/automotive/vehicle/2.0/types.hal b/automotive/vehicle/2.0/types.hal index 7c08b4a036416a29f5a78b2de5e8741931dab570..57179dfd67f312f6a993684a0ce29a5b5d99eee2 100644 --- a/automotive/vehicle/2.0/types.hal +++ b/automotive/vehicle/2.0/types.hal @@ -17,30 +17,34 @@ package android.hardware.automotive.vehicle@2.0; /** - * Enumerates supported data types for VehicleProperty. + * Enumerates supported data type for VehicleProperty. * - * This is a bitwise flag that supposed to be used in VehicleProperty enum. + * Used to create property ID in VehicleProperty enum. */ enum VehiclePropertyType : int32_t { - STRING = 0x00100000, - BOOLEAN = 0x00200000, - INT32 = 0x00400000, - INT32_VEC = 0x00410000, - INT64 = 0x00500000, - FLOAT = 0x00600000, - FLOAT_VEC = 0x00610000, - BYTES = 0x00700000, + STRING = 0x00100000, + BOOLEAN = 0x00200000, + INT32 = 0x00400000, + INT32_VEC = 0x00410000, + INT64 = 0x00500000, + INT64_VEC = 0x00510000, + FLOAT = 0x00600000, + FLOAT_VEC = 0x00610000, + BYTES = 0x00700000, /** * Any combination of scalar or vector types. The exact format must be * provided in the description of the property. */ - COMPLEX = 0x00e00000, + MIXED = 0x00e00000, - MASK = 0x00ff0000 + MASK = 0x00ff0000 }; /** + * Vehicle Areas + * Used to construct property IDs in the VehicleProperty enum. + * * Some properties may be associated with particular vehicle areas. For * example, VehicleProperty:DOOR_LOCK property must be associated with * particular door, thus this property must be marked with @@ -49,51 +53,86 @@ enum VehiclePropertyType : int32_t { * Other properties may not be associated with particular vehicle area, * these kind of properties must have VehicleArea:GLOBAL flag. * - * This is a bitwise flag that supposed to be used in VehicleProperty enum. - */ + * [Definition] Area: An area represents a unique element of an AreaType. + * For instance, if AreaType is WINDOW, then an area may be FRONT_WINDSHIELD. + * + * [Definition] AreaID: An AreaID is a combination of one or more areas, + * and is represented using a bitmask of Area enums. Different AreaTypes may + * not be mixed in a single AreaID. For instance, a window area cannot be + * combined with a seat area in an AreaID. + * + * Rules for mapping a zoned property to AreaIDs: + * - A property must be mapped to an array of AreaIDs that are impacted when + * the property value changes. + * - Each element in the array must represent an AreaID, in which, the + * property value can only be changed together in all the areas within + * an AreaID and never independently. That is, when the property value + * changes in one of the areas in an AreaID in the array, then it must + * automatically change in all other areas in the AreaID. + * - The property value must be independently controllable in any two + * different AreaIDs in the array. + * - An area must only appear once in the array of AreaIDs. That is, an + * area must only be part of a single AreaID in the array. + * + * [Definition] Global Property: A property that applies to the entire car + * and is not associated with a specific area. For example, FUEL_LEVEL, + * HVAC_STEERING_WHEEL_HEAT. + * + * Rules for mapping a global property to AreaIDs: + * - A global property must not be mapped to AreaIDs. +*/ enum VehicleArea : int32_t { - GLOBAL = 0x01000000, - ZONE = 0x02000000, - WINDOW = 0x03000000, - MIRROR = 0x04000000, - SEAT = 0x05000000, - DOOR = 0x06000000, - - MASK = 0x0f000000, + GLOBAL = 0x01000000, + /** WINDOW maps to enum VehicleAreaWindow */ + WINDOW = 0x03000000, + /** MIRROR maps to enum VehicleAreaMirror */ + MIRROR = 0x04000000, + /** SEAT maps to enum VehicleAreaSeat */ + SEAT = 0x05000000, + /** DOOR maps to enum VehicleAreaDoor */ + DOOR = 0x06000000, + /** WHEEL maps to enum VehicleAreaWheel */ + WHEEL = 0x07000000, + + MASK = 0x0f000000, }; /** * Enumerates property groups. * - * This is a bitwise flag that supposed to be used in VehicleProperty enum. + * Used to create property ID in VehicleProperty enum. */ enum VehiclePropertyGroup : int32_t { /** - * Properties declared in AOSP must have this flag. + * Properties declared in AOSP must use this flag. */ - SYSTEM = 0x10000000, + SYSTEM = 0x10000000, /** - * Properties declared by vendors must have this flag. + * Properties declared by vendors must use this flag. */ - VENDOR = 0x20000000, + VENDOR = 0x20000000, - MASK = 0xf0000000, + MASK = 0xf0000000, }; /** * Declares all vehicle properties. VehicleProperty has a bitwise structure. * Each property must have: - * - an unique id from range 0x0100 - 0xffff - * - associated data type using VehiclePropertyType - * - property group (VehiclePropertyGroup) - * - vehicle area (VehicleArea) + * - a unique id from range 0x0100 - 0xffff + * - associated data type using VehiclePropertyType + * - property group (VehiclePropertyGroup) + * - vehicle area (VehicleArea) * * Vendors are allowed to extend this enum with their own properties. In this * case they must use VehiclePropertyGroup:VENDOR flag when property is * declared. + * + * When a property's status field is not set to AVAILABLE: + * - IVehicle#set may return StatusCode::NOT_AVAILABLE. + * - IVehicle#get is not guaranteed to work. */ -enum VehicleProperty: int32_t { +enum VehicleProperty : int32_t { /** Undefined property. */ INVALID = 0x00000000, @@ -104,14 +143,14 @@ enum VehicleProperty: int32_t { * @change_mode VehiclePropertyChangeMode:STATIC * @access VehiclePropertyAccess:READ */ - INFO_VIN= ( + INFO_VIN = ( 0x0100 | VehiclePropertyGroup:SYSTEM | VehiclePropertyType:STRING | VehicleArea:GLOBAL), /** - * Maker name of vehicle + * Manufacturer of vehicle * * @change_mode VehiclePropertyChangeMode:STATIC * @access VehiclePropertyAccess:READ @@ -148,7 +187,7 @@ enum VehicleProperty: int32_t { | VehicleArea:GLOBAL), /** - * Fuel capacity of the vehicle + * Fuel capacity of the vehicle in milliliters * * @change_mode VehiclePropertyChangeMode:STATIC * @access VehiclePropertyAccess:READ @@ -160,10 +199,90 @@ enum VehicleProperty: int32_t { | VehiclePropertyType:FLOAT | VehicleArea:GLOBAL), + /** + * List of fuels the vehicle may use + * + * @change_mode VehiclePropertyChangeMode:STATIC + * @access VehiclePropertyAccess:READ + * @data_enum FuelType + */ + INFO_FUEL_TYPE = ( + 0x0105 + | VehiclePropertyGroup:SYSTEM + | VehiclePropertyType:INT32_VEC + | VehicleArea:GLOBAL), + + /** + * Battery capacity of the vehicle, if EV or hybrid. This is the nominal + * battery capacity when the vehicle is new. + * + * @change_mode VehiclePropertyChangeMode:STATIC + * @access VehiclePropertyAccess:READ + * @unit VehicleUnit:WH + */ + INFO_EV_BATTERY_CAPACITY = ( + 0x0106 + | VehiclePropertyGroup:SYSTEM + | VehiclePropertyType:FLOAT + | VehicleArea:GLOBAL), + + /** + * List of connectors this EV may use + * + * @change_mode VehiclePropertyChangeMode:STATIC + * @data_enum EvConnectorType + * @access VehiclePropertyAccess:READ + */ + INFO_EV_CONNECTOR_TYPE = ( + 0x0107 + | VehiclePropertyGroup:SYSTEM + | VehiclePropertyType:INT32_VEC + | VehicleArea:GLOBAL), + + /** + * Fuel door location + * + * @change_mode VehiclePropertyChangeMode:STATIC + * @data_enum FuelDoorLocationType + * @access VehiclePropertyAccess:READ + */ + INFO_FUEL_DOOR_LOCATION = ( + 0x0108 + | VehiclePropertyGroup:SYSTEM + | VehiclePropertyType:INT32 + | VehicleArea:GLOBAL), + + /** + * EV port location + * + * @change_mode VehiclePropertyChangeMode:STATIC + * @access VehiclePropertyAccess:READ + * @data_enum PortLocationType + */ + INFO_EV_PORT_LOCATION = ( + 0x0109 + | VehiclePropertyGroup:SYSTEM + | VehiclePropertyType:INT32 + | VehicleArea:GLOBAL), + + /** + * Driver's seat location + * + * @change_mode VehiclePropertyChangeMode:STATIC + * @data_enum VehicleAreaSeat + * @access VehiclePropertyAccess:READ + */ + INFO_DRIVER_SEAT = ( + 0x010A + | VehiclePropertyGroup:SYSTEM + | VehiclePropertyType:INT32 + | VehicleArea:SEAT), + + /** * Current odometer value of the vehicle * - * @change_mode VehiclePropertyChangeMode:ON_CHANGE | VehiclePropertyChangeMode:CONTINUOUS + * @change_mode VehiclePropertyChangeMode:CONTINUOUS * @access VehiclePropertyAccess:READ * @unit VehicleUnit:KILOMETER */ @@ -176,7 +295,7 @@ enum VehicleProperty: int32_t { /** * Speed of the vehicle * - * @change_mode VehiclePropertyChangeMode:ON_CHANGE|VehiclePropertyChangeMode:CONTINUOUS + * @change_mode VehiclePropertyChangeMode:CONTINUOUS * @access VehiclePropertyAccess:READ * @unit VehicleUnit:METER_PER_SEC */ @@ -189,7 +308,7 @@ enum VehicleProperty: int32_t { /** * Temperature of engine coolant * - * @change_mode VehiclePropertyChangeMode:ON_CHANGE|VehiclePropertyChangeMode:CONTINUOUS + * @change_mode VehiclePropertyChangeMode:CONTINUOUS * @access VehiclePropertyAccess:READ * @unit VehicleUnit:CELSIUS */ @@ -199,10 +318,23 @@ enum VehicleProperty: int32_t { | VehiclePropertyType:FLOAT | VehicleArea:GLOBAL), + /** + * Engine oil level + * + * @change_mode VehiclePropertyChangeMode:ON_CHANGE + * @access VehiclePropertyAccess:READ + * @data_enum VehicleOilLevel + */ + ENGINE_OIL_LEVEL = ( + 0x0303 + | VehiclePropertyGroup:SYSTEM + | VehiclePropertyType:INT32 + | VehicleArea:GLOBAL), + /** * Temperature of engine oil * - * @change_mode VehiclePropertyChangeMode:ON_CHANGE|VehiclePropertyChangeMode:CONTINUOUS + * @change_mode VehiclePropertyChangeMode:CONTINUOUS * @access VehiclePropertyAccess:READ * @unit VehicleUnit:CELSIUS */ @@ -215,7 +347,7 @@ enum VehicleProperty: int32_t { /** * Engine rpm * - * @change_mode VehiclePropertyChangeMode:ON_CHANGE|VehiclePropertyChangeMode:CONTINUOUS + * @change_mode VehiclePropertyChangeMode:CONTINUOUS * @access VehiclePropertyAccess:READ * @unit VehicleUnit:RPM */ @@ -228,23 +360,23 @@ enum VehicleProperty: int32_t { /** * Reports wheel ticks * - * The first four elements represent ticks for individual wheels in the + * The first element in the vector is a reset count. A reset indicates + * previous tick counts are not comparable with this and future ones. Some + * sort of discontinuity in tick counting has occurred. + * + * The next four elements represent ticks for individual wheels in the * following order: front left, front right, rear right, rear left. All * tick counts are cumulative. Tick counts increment when the vehicle * moves forward, and decrement when vehicles moves in reverse. The ticks * should be reset to 0 when the vehicle is started by the user. * - * The next element in the vector is a reset count. A reset indicates - * previous tick counts are not comparable with this and future ones. Some - * sort of discontinuity in tick counting has occurred. - * * int64Values[0] = reset count * int64Values[1] = front left ticks * int64Values[2] = front right ticks * int64Values[3] = rear right ticks * int64Values[4] = rear left ticks * - * configArray is used to indicate the micrometers-per-wheel-tick value as well as + * configArray is used to indicate the micrometers-per-wheel-tick value and * which wheels are supported. configArray is set as follows: * * configArray[0], bits [0:3] = supported wheels. Uses enum Wheel. @@ -259,19 +391,140 @@ enum VehicleProperty: int32_t { * * @change_mode VehiclePropertyChangeMode:CONTINUOUS * @access VehiclePropertyAccess:READ - * - * @since o.mr1 */ WHEEL_TICK = ( - 0x0306 - | VehiclePropertyGroup:SYSTEM - | VehiclePropertyType:COMPLEX - | VehicleArea:GLOBAL), + 0x0306 + | VehiclePropertyGroup:SYSTEM + | VehiclePropertyType:INT64_VEC + | VehicleArea:GLOBAL), + + + /** + * Fuel remaining in the the vehicle, in milliliters + * + * Value may not exceed INFO_FUEL_CAPACITY + * + * @change_mode VehiclePropertyChangeMode:CONTINUOUS + * @access VehiclePropertyAccess:READ + * @unit VehicleUnit:MILLILITER + */ + FUEL_LEVEL = ( + 0x0307 + | VehiclePropertyGroup:SYSTEM + | VehiclePropertyType:FLOAT + | VehicleArea:GLOBAL), + + /** + * Fuel door open + * + * @change_mode VehiclePropertyChangeMode:ON_CHANGE + * @access VehiclePropertyAccess:READ_WRITE + */ + FUEL_DOOR_OPEN = ( + 0x0308 + | VehiclePropertyGroup:SYSTEM + | VehiclePropertyType:BOOLEAN + | VehicleArea:GLOBAL), + + /** + * EV battery level in WH, if EV or hybrid + * + * Value may not exceed INFO_EV_BATTERY_CAPACITY + * + * @change_mode VehiclePropertyChangeMode:CONTINUOUS + * @access VehiclePropertyAccess:READ + * @unit VehicleUnit:WH + */ + EV_BATTERY_LEVEL = ( + 0x0309 + | VehiclePropertyGroup:SYSTEM + | VehiclePropertyType:FLOAT + | VehicleArea:GLOBAL), + + /** + * EV charge port open + * + * @change_mode VehiclePropertyChangeMode:ON_CHANGE + * @access VehiclePropertyAccess:READ_WRITE + */ + EV_CHARGE_PORT_OPEN = ( + 0x030A + | VehiclePropertyGroup:SYSTEM + | VehiclePropertyType:BOOLEAN + | VehicleArea:GLOBAL), + + /** + * EV charge port connected + * + * @change_mode VehiclePropertyChangeMode:ON_CHANGE + * @access VehiclePropertyAccess:READ + */ + EV_CHARGE_PORT_CONNECTED = ( + 0x030B + | VehiclePropertyGroup:SYSTEM + | VehiclePropertyType:BOOLEAN + | VehicleArea:GLOBAL), + + /** + * EV instantaneous charge rate in milliwatts + * + * Positive value indicates battery is being charged. + * Negative value indicates battery being discharged. + * + * @change_mode VehiclePropertyChangeMode:CONTINUOUS + * @access VehiclePropertyAccess:READ + * @unit VehicleUnit:MW + */ + EV_BATTERY_INSTANTANEOUS_CHARGE_RATE = ( + 0x030C + | VehiclePropertyGroup:SYSTEM + | VehiclePropertyType:FLOAT + | VehicleArea:GLOBAL), + + /** + * Range remaining + * + * Meters remaining of fuel and charge. Range remaining shall account for + * all energy sources in a vehicle. For example, a hybrid car's range will + * be the sum of the ranges based on fuel and battery. + * + * @change_mode VehiclePropertyChangeMode:CONTINUOUS + * @access VehiclePropertyAccess:READ_WRITE + * @unit VehicleUnit:METER + */ + RANGE_REMAINING = ( + 0x0308 + | VehiclePropertyGroup:SYSTEM + | VehiclePropertyType:FLOAT + | VehicleArea:GLOBAL), + /** + * Tire pressure + * + * min/max value indicates tire pressure sensor range. Each tire will have a separate min/max + * value denoted by its areaConfig.areaId. + * + * @change_mode VehiclePropertyChangeMode:CONTINUOUS + * @access VehiclePropertyAccess:READ + * @unit VehicleUnit:KILOPASCAL + */ + TIRE_PRESSURE = ( + 0x0309 + | VehiclePropertyGroup:SYSTEM + | VehiclePropertyType:FLOAT + | VehicleArea:WHEEL), /** * Currently selected gear * + * This is the gear selected by the user. + * + * Values in the config data must represent the list of supported gears + * for this vehicle. For example, config data for an automatic transmission + * must contain {GEAR_NEUTRAL, GEAR_REVERSE, GEAR_PARK, GEAR_DRIVE, + * GEAR_1, GEAR_2,...} and for manual transmission the list must be + * {GEAR_NEUTRAL, GEAR_REVERSE, GEAR_1, GEAR_2,...} + * * @change_mode VehiclePropertyChangeMode:ON_CHANGE * @access VehiclePropertyAccess:READ * @data_enum VehicleGear @@ -283,8 +536,17 @@ enum VehicleProperty: int32_t { | VehicleArea:GLOBAL), /** - * Current gear. In non-manual case, selected gear does not necessarily - * match the current gear. + * Current gear. In non-manual case, selected gear may not + * match the current gear. For example, if the selected gear is GEAR_DRIVE, + * the current gear will be one of GEAR_1, GEAR_2 etc, which reflects + * the actual gear the transmission is currently running in. + * + * Values in the config data must represent the list of supported gears + * for this vehicle. For example, config data for an automatic transmission + * must contain {GEAR_NEUTRAL, GEAR_REVERSE, GEAR_PARK, GEAR_1, GEAR_2,...} + * and for manual transmission the list must be + * {GEAR_NEUTRAL, GEAR_REVERSE, GEAR_1, GEAR_2,...}. This list need not be the + * same as that of the supported gears reported in GEAR_SELECTION. * * @change_mode VehiclePropertyChangeMode:ON_CHANGE * @access VehiclePropertyAccess:READ @@ -300,7 +562,7 @@ enum VehicleProperty: int32_t { * Parking brake state. * * @change_mode VehiclePropertyChangeMode:ON_CHANGE - * @access VehiclePropertyAccess:READ + * @access VehiclePropertyAccess:READ_WRITE */ PARKING_BRAKE_ON = ( 0x0402 @@ -309,21 +571,24 @@ enum VehicleProperty: int32_t { | VehicleArea:GLOBAL), /** - * Driving status policy. + * Auto-apply parking brake. * * @change_mode VehiclePropertyChangeMode:ON_CHANGE - * @access VehiclePropertyAccess:READ - * @data_enum VehicleDrivingStatus + * @access VehiclePropertyAccess:READ_WRITE */ - DRIVING_STATUS = ( - 0x0404 + PARKING_BRAKE_AUTO_APPLY = ( + 0x0403 | VehiclePropertyGroup:SYSTEM - | VehiclePropertyType:INT32 + | VehiclePropertyType:BOOLEAN | VehicleArea:GLOBAL), /** * Warning for fuel low level. * + * This property corresponds to the low fuel warning on the dashboard. + * Once FUEL_LEVEL_LOW is set, it should not be cleared until more fuel is + * added to the vehicle. + * * @change_mode VehiclePropertyChangeMode:ON_CHANGE * @access VehiclePropertyAccess:READ */ @@ -334,7 +599,9 @@ enum VehicleProperty: int32_t { | VehicleArea:GLOBAL), /** - * Night mode or not. + * Night mode + * + * True indicates that night mode is currently enabled. * * @change_mode VehiclePropertyChangeMode:ON_CHANGE * @access VehiclePropertyAccess:READ @@ -348,10 +615,9 @@ enum VehicleProperty: int32_t { /** * State of the vehicles turn signals * - * Values from VehicleTurnSignal - * * @change_mode VehiclePropertyChangeMode:ON_CHANGE * @access VehiclePropertyAccess:READ + * @data_enum VehicleTurnSignal */ TURN_SIGNAL_STATE = ( 0x0408 @@ -364,20 +630,23 @@ enum VehicleProperty: int32_t { * * @change_mode VehiclePropertyChangeMode:ON_CHANGE * @access VehiclePropertyAccess:READ + * @data_enum VehicleIgnitionState */ IGNITION_STATE = ( 0x0409 - | VehiclePropertyGroup:SYSTEM - | VehiclePropertyType:INT32 - | VehicleArea:GLOBAL), + | VehiclePropertyGroup:SYSTEM + | VehiclePropertyType:INT32 + | VehicleArea:GLOBAL), /** - * ABS is active. Set to true whenever ABS is activated. Reset to false when ABS is off. + * ABS is active + * + * Set to true when ABS is active. Reset to false when ABS is off. This + * property may be intermittently set (pulsing) based on the real-time + * state of the ABS system. * * @change_mode VehiclePropertyChangeMode:ON_CHANGE * @access VehiclePropertyAccess:READ - * - * @since o.mr1 */ ABS_ACTIVE = ( 0x040A @@ -386,12 +655,14 @@ enum VehicleProperty: int32_t { | VehicleArea:GLOBAL), /** - * Traction Control is active. + * Traction Control is active + * + * Set to true when traction control (TC) is active. Reset to false when + * TC is off. This property may be intermittently set (pulsing) based on + * the real-time state of the TC system. * * @change_mode VehiclePropertyChangeMode:ON_CHANGE * @access VehiclePropertyAccess:READ - * - * @since o.mr1 */ TRACTION_CONTROL_ACTIVE = ( 0x040B @@ -399,13 +670,44 @@ enum VehicleProperty: int32_t { | VehiclePropertyType:BOOLEAN | VehicleArea:GLOBAL), + /* + * HVAC Properties + * + * Additional rules for mapping a zoned HVAC property to AreaIDs: + * - Every seat in VehicleAreaSeat that is available in the car, must be + * part of an AreaID in the AreaID array. + * + * Example 1: A car has two front seats (ROW_1_LEFT, ROW_1_RIGHT) and three + * back seats (ROW_2_LEFT, ROW_2_CENTER, ROW_2_RIGHT). There are two + * temperature control units -- driver side and passenger side. + * - A valid mapping set of AreaIDs for HVAC_TEMPERATURE_SET would be a + * two element array: + * - ROW_1_LEFT | ROW_2_LEFT + * - ROW_1_RIGHT | ROW_2_CENTER | ROW_2_RIGHT + * - An alternative mapping for the same hardware configuration would be: + * - ROW_1_LEFT | ROW_2_CENTER | ROW_2_LEFT + * - ROW_1_RIGHT | ROW_2_RIGHT + * The temperature controllers are assigned to the seats which they + * "most influence", but every seat must be included exactly once. The + * assignment of the center rear seat to the left or right AreaID may seem + * arbitrary, but the inclusion of every seat in exactly one AreaID ensures + * that the seats in the car are all expressed and that a "reasonable" way + * to affect each seat is available. + * + * Example 2: A car has three seat rows with two seats in the front row (ROW_1_LEFT, + * ROW_1_RIGHT) and three seats in the second (ROW_2_LEFT, ROW_2_CENTER, + * ROW_2_RIGHT) and third rows (ROW_3_LEFT, ROW_3_CENTER, ROW_3_RIGHT). There + * are three temperature control units -- driver side, passenger side, and rear. + * - A reasonable way to map HVAC_TEMPERATURE_SET to AreaIDs is a three + * element array: + * - ROW_1_LEFT + * - ROW_1_RIGHT + * - ROW_2_LEFT | ROW_2_CENTER | ROW_2_RIGHT | ROW_3_LEFT | ROW_3_CENTER | ROW_3_RIGHT + */ + /** * Fan speed setting * - * IVehicle#set may return StatusCode::NOT_AVAILABLE and IVehicle#get is not - * guaranteed to work if HVAC unit is off. See HVAC_POWER_ON property for - * details. - * * @change_mode VehiclePropertyChangeMode:ON_CHANGE * @access VehiclePropertyAccess:READ_WRITE */ @@ -413,15 +715,11 @@ enum VehicleProperty: int32_t { 0x0500 | VehiclePropertyGroup:SYSTEM | VehiclePropertyType:INT32 - | VehicleArea:ZONE), + | VehicleArea:SEAT), /** * Fan direction setting * - * IVehicle#set may return StatusCode::NOT_AVAILABLE and IVehicle#get is not - * guaranteed to work if HVAC unit is off. See HVAC_POWER_ON property for - * details. - * * @change_mode VehiclePropertyChangeMode:ON_CHANGE * @access VehiclePropertyAccess:READ_WRITE * @data_enum VehicleHvacFanDirection @@ -430,46 +728,36 @@ enum VehicleProperty: int32_t { 0x0501 | VehiclePropertyGroup:SYSTEM | VehiclePropertyType:INT32 - | VehicleArea:ZONE), + | VehicleArea:SEAT), /** * HVAC current temperature. * - * IVehicle#set may return StatusCode::NOT_AVAILABLE and IVehicle#get is not - * guaranteed to work if HVAC unit is off. See HVAC_POWER_ON property for - * details. - * * @change_mode VehiclePropertyChangeMode:ON_CHANGE - * @access VehiclePropertyAccess:READ_WRITE + * @access VehiclePropertyAccess:READ + * @unit VehicleUnit:CELSIUS */ HVAC_TEMPERATURE_CURRENT = ( 0x0502 | VehiclePropertyGroup:SYSTEM | VehiclePropertyType:FLOAT - | VehicleArea:ZONE), + | VehicleArea:SEAT), /** * HVAC, target temperature set. * - * IVehicle#set may return StatusCode::NOT_AVAILABLE and IVehicle#get is not - * guaranteed to work if HVAC unit is off. See HVAC_POWER_ON property for - * details. - * * @change_mode VehiclePropertyChangeMode:ON_CHANGE * @access VehiclePropertyAccess:READ_WRITE + * @unit VehicleUnit:CELSIUS */ HVAC_TEMPERATURE_SET = ( 0x0503 | VehiclePropertyGroup:SYSTEM | VehiclePropertyType:FLOAT - | VehicleArea:ZONE), + | VehicleArea:SEAT), /** - * On/off defrost - * - * IVehicle#set may return StatusCode::NOT_AVAILABLE and IVehicle#get is not - * guaranteed to work if HVAC unit is off. See HVAC_POWER_ON property for - * details. + * On/off defrost for designated window * * @change_mode VehiclePropertyChangeMode:ON_CHANGE * @access VehiclePropertyAccess:READ_WRITE @@ -481,28 +769,25 @@ enum VehicleProperty: int32_t { | VehicleArea:WINDOW), /** - * On/off AC - * - * IVehicle#set may return StatusCode::NOT_AVAILABLE and IVehicle#get is not - * guaranteed to work if HVAC unit is off. See HVAC_POWER_ON property for - * details. + * On/off AC for designated areaId * * @change_mode VehiclePropertyChangeMode:ON_CHANGE * @access VehiclePropertyAccess:READ_WRITE - * @config_flags Supported zones + * @config_flags Supported areaIds */ HVAC_AC_ON = ( 0x0505 | VehiclePropertyGroup:SYSTEM | VehiclePropertyType:BOOLEAN - | VehicleArea:ZONE), + | VehicleArea:SEAT), /** * On/off max AC * - * IVehicle#set may return StatusCode::NOT_AVAILABLE and IVehicle#get is not - * guaranteed to work if HVAC unit is off. See HVAC_POWER_ON property for - * details. + * When MAX AC is on, the ECU may adjust the vent position, fan speed, + * temperature, etc as necessary to cool the vehicle as quickly as possible. + * Any parameters modified as a side effect of turning on/off the MAX AC + * parameter shall generate onPropertyEvent() callbacks to the VHAL. * * @change_mode VehiclePropertyChangeMode:ON_CHANGE * @access VehiclePropertyAccess:READ_WRITE @@ -511,14 +796,16 @@ enum VehicleProperty: int32_t { 0x0506 | VehiclePropertyGroup:SYSTEM | VehiclePropertyType:BOOLEAN - | VehicleArea:ZONE), + | VehicleArea:SEAT), /** * On/off max defrost * - * IVehicle#set may return StatusCode::NOT_AVAILABLE and IVehicle#get is not - * guaranteed to work if HVAC unit is off. See HVAC_POWER_ON property for - * details. + * When MAX DEFROST is on, the ECU may adjust the vent position, fan speed, + * temperature, etc as necessary to defrost the windows as quickly as + * possible. Any parameters modified as a side effect of turning on/off + * the MAX DEFROST parameter shall generate onPropertyEvent() callbacks to + * the VHAL. * * @change_mode VehiclePropertyChangeMode:ON_CHANGE * @access VehiclePropertyAccess:READ_WRITE @@ -527,14 +814,15 @@ enum VehicleProperty: int32_t { 0x0507 | VehiclePropertyGroup:SYSTEM | VehiclePropertyType:BOOLEAN - | VehicleArea:ZONE), + | VehicleArea:SEAT), /** - * On/off re-circulation + * Recirculation on/off * - * IVehicle#set may return StatusCode::NOT_AVAILABLE and IVehicle#get is not - * guaranteed to work if HVAC unit is off. See HVAC_POWER_ON property for - * details. + * Controls the supply of exterior air to the cabin. Recirc “on” means the + * majority of the airflow into the cabin is originating in the cabin. + * Recirc “off” means the majority of the airflow into the cabin is coming + * from outside the car. * * @change_mode VehiclePropertyChangeMode:ON_CHANGE * @access VehiclePropertyAccess:READ_WRITE @@ -543,14 +831,37 @@ enum VehicleProperty: int32_t { 0x0508 | VehiclePropertyGroup:SYSTEM | VehiclePropertyType:BOOLEAN - | VehicleArea:ZONE), + | VehicleArea:SEAT), /** - * On/off dual. This must be defined per each row. + * Enable temperature coupling between areas. + * + * The AreaIDs for HVAC_DUAL_ON property shall contain a combination of + * HVAC_TEMPERATURE_SET AreaIDs that can be coupled together. If + * HVAC_TEMPERATURE_SET is mapped to AreaIDs [a_1, a_2, ..., a_n], and if + * HVAC_DUAL_ON can be enabled to couple a_i and a_j, then HVAC_DUAL_ON + * property must be mapped to [a_i | a_j]. Further, if a_k and a_l can also + * be coupled together separately then HVAC_DUAL_ON must be mapped to + * [a_i | a_j, a_k | a_l]. + * + * Example: A car has two front seats (ROW_1_LEFT, ROW_1_RIGHT) and three + * back seats (ROW_2_LEFT, ROW_2_CENTER, ROW_2_RIGHT). There are two + * temperature control units -- driver side and passenger side -- which can + * be optionally synchronized. This may be expressed in the AreaIDs this way: + * - HVAC_TEMPERATURE_SET->[ROW_1_LEFT | ROW_2_LEFT, ROW_1_RIGHT | ROW_2_CENTER | ROW_2_RIGHT] + * - HVAC_DUAL_ON->[ROW_1_LEFT | ROW_2_LEFT | ROW_1_RIGHT | ROW_2_CENTER | ROW_2_RIGHT] * - * IVehicle#set may return StatusCode::NOT_AVAILABLE and IVehicle#get is not - * guaranteed to work if HVAC unit is off. See HVAC_POWER_ON property for - * details. + * When the property is enabled, the ECU must synchronize the temperature + * for the affected areas. Any parameters modified as a side effect + * of turning on/off the DUAL_ON parameter shall generate + * onPropertyEvent() callbacks to the VHAL. In addition, if setting + * a temperature (i.e. driver's temperature) changes another temperature + * (i.e. front passenger's temperature), then the appropriate + * onPropertyEvent() callbacks must be generated. If a user changes a + * temperature that breaks the coupling (e.g. setting the passenger + * temperature independently) then the VHAL must send the appropriate + * onPropertyEvent() callbacks (i.e. HVAC_DUAL_ON = false, + * HVAC_TEMPERATURE_SET[AreaID] = xxx, etc). * * @change_mode VehiclePropertyChangeMode:ON_CHANGE * @access VehiclePropertyAccess:READ_WRITE @@ -559,15 +870,11 @@ enum VehicleProperty: int32_t { 0x0509 | VehiclePropertyGroup:SYSTEM | VehiclePropertyType:BOOLEAN - | VehicleArea:ZONE), + | VehicleArea:SEAT), /** * On/off automatic mode * - * IVehicle#set may return StatusCode::NOT_AVAILABLE and IVehicle#get is not - * guaranteed to work if HVAC unit is off. See HVAC_POWER_ON property for - * details. - * * @change_mode VehiclePropertyChangeMode:ON_CHANGE * @access VehiclePropertyAccess:READ_WRITE */ @@ -575,10 +882,10 @@ enum VehicleProperty: int32_t { 0x050A | VehiclePropertyGroup:SYSTEM | VehiclePropertyType:BOOLEAN - | VehicleArea:ZONE), + | VehicleArea:SEAT), /** - * Seat temperature + * Seat heating/cooling * * Negative values indicate cooling. * 0 indicates off. @@ -588,10 +895,6 @@ enum VehicleProperty: int32_t { * min/max range defines the allowable range and number of steps in each * direction. * - * IVehicle#set may return StatusCode::NOT_AVAILABLE and IVehicle#get is not - * guaranteed to work if HVAC unit is off. See HVAC_POWER_ON property for - * details. - * * @change_mode VehiclePropertyChangeMode:ON_CHANGE * @access VehiclePropertyAccess:READ_WRITE */ @@ -604,12 +907,9 @@ enum VehicleProperty: int32_t { /** * Side Mirror Heat * - * Increase values denote higher heating levels for side mirrors. - * 0 indicates heating is turned off. - * - * IVehicle#set may return StatusCode::NOT_AVAILABLE and IVehicle#get is not - * guaranteed to work if HVAC unit is off. See HVAC_POWER_ON property for - * details. + * Increasing values denote higher heating levels for side mirrors. + * The Max value in the config data represents the highest heating level. + * The Min value in the config data MUST be zero and indicates no heating. * * @change_mode VehiclePropertyChangeMode:ON_CHANGE * @access VehiclePropertyAccess:READ_WRITE @@ -621,51 +921,44 @@ enum VehicleProperty: int32_t { | VehicleArea:MIRROR), /** - * Steering Wheel Temperature + * Steering Wheel Heating/Cooling * - * Sets the temperature for the steering wheel + * Sets the amount of heating/cooling for the steering wheel + * config data Min and Max MUST be set appropriately. * Positive value indicates heating. * Negative value indicates cooling. * 0 indicates temperature control is off. * - * IVehicle#set may return StatusCode::NOT_AVAILABLE and IVehicle#get is not - * guaranteed to work if HVAC unit is off. See HVAC_POWER_ON property for - * details. - * * @change_mode VehiclePropertyChangeMode:ON_CHANGE * @access VehiclePropertyAccess:READ_WRITE */ - HVAC_STEERING_WHEEL_TEMP = ( + HVAC_STEERING_WHEEL_HEAT = ( 0x050D | VehiclePropertyGroup:SYSTEM | VehiclePropertyType:INT32 | VehicleArea:GLOBAL), /** - * Temperature units - * - * Indicates whether the temperature is in Celsius, Fahrenheit, or a - * different unit from VehicleUnit enum. - * This parameter affects all HVAC temperatures in the system. + * Temperature units for display * - * IVehicle#get is not guaranteed to work if HVAC unit is off. See - * HVAC_POWER_ON property for details. + * Indicates whether the vehicle is displaying temperature to the user as + * Celsius or Fahrenheit. + * This parameter MAY be used for displaying any HVAC temperature in the system. + * Values must be one of VehicleUnit::CELSIUS or VehicleUnit::FAHRENHEIT + * Note that internally, all temperatures are represented in floating point Celsius. * * @change_mode VehiclePropertyChangeMode:ON_CHANGE - * @access VehiclePropertyAccess:READ + * @access VehiclePropertyAccess:READ_WRITE */ - HVAC_TEMPERATURE_UNITS = ( + HVAC_TEMPERATURE_DISPLAY_UNITS = ( 0x050E | VehiclePropertyGroup:SYSTEM | VehiclePropertyType:INT32 - | VehicleArea:ZONE), + | VehicleArea:GLOBAL), /** * Actual fan speed * - * IVehicle#get is not guaranteed to work if HVAC unit is off. See - * HVAC_POWER_ON property for details. - * * @change_mode VehiclePropertyChangeMode:ON_CHANGE * @access VehiclePropertyAccess:READ */ @@ -673,35 +966,66 @@ enum VehicleProperty: int32_t { 0x050F | VehiclePropertyGroup:SYSTEM | VehiclePropertyType:INT32 - | VehicleArea:ZONE), + | VehicleArea:SEAT), /** - * Represents power state for HVAC. Some HVAC properties must require - * matching power to be turned on to get out of OFF state. For non-zoned - * HVAC properties, VEHICLE_ALL_ZONE corresponds to global power state. + * Represents global power state for HVAC. Setting this property to false + * MAY mark some properties that control individual HVAC features/subsystems + * to UNAVAILABLE state. Setting this property to true MAY mark some + * properties that control individual HVAC features/subsystems to AVAILABLE + * state (unless any/all of them are UNAVAILABLE on their own individual + * merits). + * + * [Definition] HvacPower_DependentProperties: Properties that need HVAC to be + * powered on in order to enable their functionality. For example, in some cars, + * in order to turn on the AC, HVAC must be powered on first. + * + * HvacPower_DependentProperties list must be set in the + * VehiclePropConfig.configArray. HvacPower_DependentProperties must only contain + * properties that are associated with VehicleArea:SEAT. Properties that are not + * associated with VehicleArea:SEAT, for example, HVAC_DEFROSTER, must never + * depend on HVAC_POWER_ON property and must never be part of + * HvacPower_DependentProperties list. + * + * AreaID mapping for HVAC_POWER_ON property must contain all AreaIDs that + * HvacPower_DependentProperties are mapped to. + * + * Example 1: A car has two front seats (ROW_1_LEFT, ROW_1_RIGHT) and three back + * seats (ROW_2_LEFT, ROW_2_CENTER, ROW_2_RIGHT). If the HVAC features (AC, + * Temperature etc.) throughout the car are dependent on a single HVAC power + * controller then HVAC_POWER_ON must be mapped to + * [ROW_1_LEFT | ROW_1_RIGHT | ROW_2_LEFT | ROW_2_CENTER | ROW_2_RIGHT]. + * + * Example 2: A car has two seats in the front row (ROW_1_LEFT, ROW_1_RIGHT) and + * three seats in the second (ROW_2_LEFT, ROW_2_CENTER, ROW_2_RIGHT) and third + * rows (ROW_3_LEFT, ROW_3_CENTER, ROW_3_RIGHT). If the car has temperature + * controllers in the front row which can operate entirely independently of + * temperature controllers in the back of the vehicle, then HVAC_POWER_ON + * must be mapped to a two element array: + * - ROW_1_LEFT | ROW_1_RIGHT + * - ROW_2_LEFT | ROW_2_CENTER | ROW_2_RIGHT | ROW_3_LEFT | ROW_3_CENTER | ROW_3_RIGHT * * @change_mode VehiclePropertyChangeMode:ON_CHANGE * @access VehiclePropertyAccess:READ_WRITE - * @config_string list of HVAC properties whose power is controlled by this - * property. Format is hexa-decimal number (0x...) separated - * by comma like "0x500,0x503". All zones defined in these - * affected properties must be available in the property. */ HVAC_POWER_ON = ( 0x0510 | VehiclePropertyGroup:SYSTEM | VehiclePropertyType:BOOLEAN - | VehicleArea:ZONE), + | VehicleArea:SEAT), /** * Fan Positions Available * - * This is a bit mask of fan positions available for the zone. Each entry in - * vehicle_hvac_fan_direction is selected by bit position. For instance, if - * only the FAN_DIRECTION_FACE (0x1) and FAN_DIRECTION_DEFROST (0x4) are available, - * then this value shall be set to 0x12. - * - * 0x12 = (1 << 1) | (1 << 4) + * This is a bit mask of fan positions available for the zone. Each + * available fan direction is denoted by a separate entry in the vector. A + * fan direction may have multiple bits from vehicle_hvac_fan_direction set. + * For instance, a typical car may have the following fan positions: + * - FAN_DIRECTION_FACE (0x1) + * - FAN_DIRECTION_FLOOR (0x2) + * - FAN_DIRECTION_FACE | FAN_DIRECTION_FLOOR (0x3) + * - FAN_DIRECTION_DEFROST (0x4) + * - FAN_DIRECTION_FLOOR | FAN_DIRECTION_DEFROST (0x6) * * @change_mode VehiclePropertyChangeMode:STATIC * @access VehiclePropertyAccess:READ @@ -709,442 +1033,117 @@ enum VehicleProperty: int32_t { HVAC_FAN_DIRECTION_AVAILABLE = ( 0x0511 | VehiclePropertyGroup:SYSTEM - | VehiclePropertyType:INT32 - | VehicleArea:ZONE), + | VehiclePropertyType:INT32_VEC + | VehicleArea:SEAT), /** - * Automatic re-circulation on/off + * Automatic recirculation on/off * - * IVehicle#set and IVehicle#get must return StatusCode::NOT_AVAILABLE when HVAC unit is off. - * See HVAC_POWER_ON property for details. + * When automatic recirculation is ON, the HVAC system may automatically + * switch to recirculation mode if the vehicle detects poor incoming air + * quality. * * @change_mode VehiclePropertyChangeMode:ON_CHANGE * @access VehiclePropertyAccess:READ_WRITE - * - * @since o.mr1 */ HVAC_AUTO_RECIRC_ON = ( 0x0512 | VehiclePropertyGroup:SYSTEM | VehiclePropertyType:BOOLEAN - | VehicleArea:ZONE), - - /** - * Outside temperature - * - * @change_mode VehiclePropertyChangeMode:ON_CHANGE|VehiclePropertyChangeMode:CONTINUOUS - * @access VehiclePropertyAccess:READ - * @unit VehicleUnit:CELSIUS - */ - ENV_OUTSIDE_TEMPERATURE = ( - 0x0703 - | VehiclePropertyGroup:SYSTEM - | VehiclePropertyType:FLOAT - | VehicleArea:GLOBAL), + | VehicleArea:SEAT), /** - * Cabin temperature + * Seat ventilation * - * @change_mode VehiclePropertyChangeMode:ON_CHANGE|VehiclePropertyChangeMode:CONTINUOUS - * @access VehiclePropertyAccess:READ - * @unit VehicleUnit:CELSIUS - */ - ENV_CABIN_TEMPERATURE = ( - 0x0704 - | VehiclePropertyGroup:SYSTEM - | VehiclePropertyType:FLOAT - | VehicleArea:GLOBAL), - - /** - * Radio presets stored on the Car radio module. The data type used is int32 - * array with the following fields: - *
    - *
  • int32Values[0]: Preset number
  • - *
  • int32Values[1]: Band type (see #RADIO_BAND_FM in - * system/core/include/system/radio.h). - *
  • int32Values[2]: Channel number
  • - *
  • int32Values[3]: Sub channel number
  • - *
+ * 0 indicates off. + * Positive values indicates ventilation level. * - * NOTE: When getting a current preset config ONLY set preset number (i.e. - * int32Values[0]). For setting a preset other fields are required. + * Used by HVAC apps and Assistant to enable, change, or read state of seat + * ventilation. This is different than seating cooling. It can be on at the + * same time as cooling, or not. * * @change_mode VehiclePropertyChangeMode:ON_CHANGE * @access VehiclePropertyAccess:READ_WRITE - * @config_flags Number of presets supported */ - RADIO_PRESET = ( - 0x0801 + HVAC_SEAT_VENTILATION = ( + 0x0513 | VehiclePropertyGroup:SYSTEM - | VehiclePropertyType:INT32_VEC - | VehicleArea:GLOBAL), + | VehiclePropertyType:INT32 + | VehicleArea:SEAT), - /** - * Represents audio focus state of Android side. Note that car's audio - * module must own audio focus and grant audio focus to Android side when - * requested by Android side. The focus has both per stream characteristics - * and global characteristics. - * - * Focus request (get of this property) must take the following form with indices defined - * by VehicleAudioFocusIndex: - * int32Values[0]: VehicleAudioFocusRequest type - * int32Values[1]: bit flags of streams requested by this focus request. - * There can be up to 32 streams. - * int32Values[2]: External focus state flags. For request, only flag like - * VehicleAudioExtFocusFlag#PLAY_ONLY_FLAG or - * VehicleAudioExtFocusFlag#MUTE_MEDIA_FLAG can be - * used. - * VehicleAudioExtFocusFlag#PLAY_ONLY_FLAG is for case - * like radio where android side app still needs to hold - * focus but playback is done outside Android. - * VehicleAudioExtFocusFlag#MUTE_MEDIA_FLAG is for - * muting media channel including radio. - * VehicleAudioExtFocusFlag#PLAY_ONLY_FLAG can be set - * even if android side releases focus (request type - * REQUEST_RELEASE). In that case, audio module must - * maintain mute state until user's explicit action to - * play some media. - * int32Values[3]: Audio contexts wishing to be active. Use combination of - * flags from VehicleAudioContextFlag. - * This can be used as a hint to adjust audio policy or - * other policy decision. - * Note that there can be multiple context active at the - * same time. And android can send the same focus request - * type gain due to change in audio contexts. - * Note that each focus request can request multiple streams that is - * expected to be used for the current request. But focus request itself - * is global behavior as GAIN or GAIN_TRANSIENT expects all sounds played - * by car's audio module to stop. Note that stream already allocated to - * android before this focus request must not be affected by focus - * request. - * - * Focus response (set and subscription callback for this property) must - * take the following form with indices defined by VehicleAudioFocusIndex: - * int32Values[0]: VehicleAudioFocusState type - * int32Values[1]: bit flags of streams allowed. - * int32Values[2]: External focus state: bit flags of currently active - * audio focus in car side (outside Android). Active - * audio focus does not necessarily mean currently - * playing, but represents the state of having focus or - * waiting for focus (pause state). - * One or combination of flags from - * VehicleAudioExtFocusFlag. - * 0 means no active audio focus holder outside Android. - * The state must have following values for each - * VehicleAudioFocusState: - * GAIN: VehicleAudioExtFocusFlag#PLAY_ONLY_FLAG - * when radio is active in Android side. Otherwise, - * VehicleAudioExtFocusFlag#NONE_FLAG. - * GAIN_TRANSIENT: Can be - * VehicleAudioExtFocusFlag#PERMANENT_FLAG or - * VehicleAudioExtFocusFlag#TRANSIENT_FLAG if android - * side has requested - * REQUEST_GAIN_TRANSIENT_MAY_DUCK and car side is - * ducking. Otherwise - * VehicleAudioExtFocusFlag#NONE_FLAG. - * LOSS: VehicleAudioExtFocusFlag#NONE_FLAG when no focus - * is active in car side. - * VehicleAudioExtFocusFlag#PERMANENT_FLAG when car - * side is playing something permanent. - * LOSS_TRANSIENT: must always be - * VehicleAudioExtFocusFlag#PERMANENT_FLAG - * int32Values[3]: Audio context(s) allowed to be active. When responding positively to a - * focus request from Android, the request's original context must be - * repeated here. When taking focus away, or denying a request, the - * rejected or stopped context would have its corresponding bit cleared. - * - * A focus response must be sent per each focus request even if there is - * no change in focus state. This can happen in case like focus request - * only involving context change where android side still needs matching - * focus response to confirm that audio module has made necessary changes. - * - * If car does not support AUDIO_FOCUS, focus is assumed to be granted - * always. - * - * @change_mode VehiclePropertyChangeMode:ON_CHANGE - * @access VehiclePropertyAccess:READ_WRITE - */ - AUDIO_FOCUS = ( - 0x0900 - | VehiclePropertyGroup:SYSTEM - | VehiclePropertyType:INT32_VEC - | VehicleArea:GLOBAL), /** - * A property to allow external component to control audio focus. Depending on - * H/W architecture, audio HAL may need to control audio focus while vehicle - * HAL is still interacting with upper layer. In such case, audio HAL may set - * this property and vehicle HAL may use this property value to decide - * response sent through AUDIO_FOCUS property. - * Data format is the same as AUDIO_FOCUS property. + * Outside temperature * - * @change_mode VehiclePropertyChangeMode:ON_CHANGE - * @access VehiclePropertyAccess:READ_WRITE + * @change_mode VehiclePropertyChangeMode:CONTINUOUS + * @access VehiclePropertyAccess:READ + * @unit VehicleUnit:CELSIUS */ - AUDIO_FOCUS_EXT_SYNC = ( - 0x0910 + ENV_OUTSIDE_TEMPERATURE = ( + 0x0703 | VehiclePropertyGroup:SYSTEM - | VehiclePropertyType:INT32_VEC + | VehiclePropertyType:FLOAT | VehicleArea:GLOBAL), /** - * Property to control audio volume of each audio context. - * - * VehiclePropConfig - * configArray[0] : bit flags of all supported audio contexts from - * VehicleAudioContextFlag. If this is 0, audio volume - * is controlled per physical stream. - * configArray[1] : flags defined in VehicleAudioVolumeCapabilityFlag to - * represent audio module's capability. - * configArray[2..3] : reserved - * configArray[4..N+3] : maximum values for each audio context, where N is - * the number of audio contexts provided in - * configArray[0], minimum value is always 0 which - * indicates mute state. + * Property to control power state of application processor * - * Data type looks like: - * int32Values[0] : audio context as defined in VehicleAudioContextFlag. - * If only physical stream is supported - * (configArray[0] == 0), this must represent physical - * stream number. - * int32Values[1] : volume level, valid range is 0 (mute) to max level - * defined in the config. - * int32Values[2] : One of VehicleAudioVolumeState. + * It is assumed that AP's power state is controller by separate power + * controller. * - * HAL implementations must check the incoming value of audio context - * field in get call to return the right volume. + * For configuration information, VehiclePropConfig.configArray can have bit flag combining + * values in VehicleApPowerStateConfigFlag. * - * @change_mode VehiclePropertyChangeMode:ON_CHANGE - * @access VehiclePropertyAccess:READ_WRITE - * @config_flags all audio contexts supported. - */ - AUDIO_VOLUME = ( - 0x0901 - | VehiclePropertyGroup:SYSTEM - | VehiclePropertyType:INT32_VEC - | VehicleArea:GLOBAL), - - /** - * Property to allow audio volume sync from external components like audio HAL. - * Some vehicle HAL implementation may get volume control from audio HAL and in such - * case, setting AUDIO_VOLUME_EXT_SYNC property may trigger event in AUDIO_VOLUME property. - * Data format for this property is the same as AUDIO_VOLUME property. + * int32Values[0] : VehicleApPowerStateReq enum value + * int32Values[1] : additional parameter relevant for each state, + * 0 if not used. * * @change_mode VehiclePropertyChangeMode:ON_CHANGE - * @access VehiclePropertyAccess:READ_WRITE - * @config_flags all audio contexts supported. + * @access VEHICLE_PROP_ACCESS_READ */ - AUDIO_VOLUME_EXT_SYNC = ( - 0x0911 + AP_POWER_STATE_REQ = ( + 0x0A00 | VehiclePropertyGroup:SYSTEM | VehiclePropertyType:INT32_VEC | VehicleArea:GLOBAL), /** - * Property for handling volume limit set by user. This limits maximum - * volume that can be set per each context or physical stream. - * - * VehiclePropConfig - * configArray[0] : bit flags of all supported audio contexts. If this is - * 0, audio volume is controlled per physical stream. - * configArray[1] : flags defined in VehicleAudioVolumeCapabilityFlag - * to represent audio module's capability. - * - * Data type looks like: - * int32Values[0] : audio context as defined in VehicleAudioContextFlag. - * If only physical stream is supported - * (configArray[0] == 0), this must represent physical - * stream number. - * int32Values[1] : maximum volume set to the stream. If there is no - * restriction, this value must be equal to - * AUDIO_VOLUME's max value. + * Property to report power state of application processor * - * If car does not support this feature, this property must not be - * populated by HAL. - * HAL implementations must check the incoming value of audio context - * field in get call to return the right volume. - * - * @change_mode VehiclePropertyChangeMode:ON_CHANGE - * @access VehiclePropertyAccess:READ_WRITE - * @config_flags all audio contexts supported. - */ - AUDIO_VOLUME_LIMIT = ( - 0x0902 - | VehiclePropertyGroup:SYSTEM - | VehiclePropertyType:INT32_VEC - | VehicleArea:GLOBAL), - - /** - * Property to share audio routing policy of android side. This property is - * set at startup to pass audio policy in android side down to - * vehicle HAL and car audio module. + * It is assumed that AP's power state is controller by separate power + * controller. * - * int32Values[0] : audio stream where the audio for the application - * context must be routed by default. Note that this is - * the default setting from system, but each app may - * still use different audio stream for whatever reason. - * int32Values[1] : All audio contexts that must be sent through the - * physical stream. Flag is defined in - * VehicleAudioContextFlag. + * int32Values[0] : VehicleApPowerStateReport enum value + * int32Values[1] : Time in ms to wake up, if necessary. Otherwise 0. - * Setting of this property must be done for all available physical streams - * based on audio H/W variant information acquired from AUDIO_HW_VARIANT - * property. * * @change_mode VehiclePropertyChangeMode:ON_CHANGE - * @access VehiclePropertyAccess:WRITE + * @access VEHICLE_PROP_ACCESS_WRITE */ - AUDIO_ROUTING_POLICY = ( - 0x0903 + AP_POWER_STATE_REPORT = ( + 0x0A01 | VehiclePropertyGroup:SYSTEM | VehiclePropertyType:INT32_VEC | VehicleArea:GLOBAL), /** - * Property to return audio H/W variant type used in this car. This is a - * zero based index into the set of audio routing policies defined in - * R.array.audioRoutingPolicy on CarService, which may be overlaid to - * support multiple variants. If this property does not exist, the default - * audio policy must be used. + * Property to report bootup reason for the current power on. This is a + * static property that will not change for the whole duration until power + * off. For example, even if user presses power on button after automatic + * power on with door unlock, bootup reason must stay with + * VehicleApPowerBootupReason#USER_UNLOCK. + * + * int32Values[0] must be VehicleApPowerBootupReason. * * @change_mode VehiclePropertyChangeMode:STATIC * @access VehiclePropertyAccess:READ - * @config_flags Additional info on audio H/W. Must use - * VehicleAudioHwVariantConfigFlag for this. - */ - AUDIO_HW_VARIANT = ( - 0x0904 + */ + AP_POWER_BOOTUP_REASON = ( + 0x0A02 | VehiclePropertyGroup:SYSTEM | VehiclePropertyType:INT32 | VehicleArea:GLOBAL), - /** - * Property to pass hint on external audio routing. When android side - * request focus with VehicleAudioExtFocusflag, this - * property must be set before setting AUDIO_FOCUS property as a hint for - * external audio source routing. - * Note that setting this property alone must not trigger any change. - * Audio routing must be changed only when AUDIO_FOCUS property is set. - * Note that this property allows passing custom value as long as it is - * defined in VehiclePropConfig#configString. This allows supporting - * non-standard routing options through this property. - * It is recommended to use separate name space for custom property to - * prevent conflict in future android releases. - * Enabling each external routing option is done by enabling each bit flag - * for the routing. - * This property can support up to 128 external routings. - * To give full flexibility, there is no standard definition for each bit - * flag and assigning each bit flag to specific routing type is decided by - * VehiclePropConfig#configString. VehiclePropConfig#configString has - * format of each entry separated by ',' and each entry has format of - * bitFlagPositon:typeString[:physicalStreamNumber]. - * bitFlagPosition: represents which bit flag will be set to enable this - * routing. 0 means LSB in int32Values[0]. 31 will be MSB in - * int32Values[0]. 127 will MSB in int32Values[3]. - * typeString: string representation of external routing. Some types are - * already defined in AUDIO_EXT_ROUTING_SOURCE_* and use them first - * before adding something custom. Applications will find each routing - * using this string. - * physicalStreamNumber: This part is optional and represents physical - * stream to android which will be disabled when this routing is enabled. - * If not specified, this routing must not affect physical streams to - * android. - * As an example, let's assume a system with two physical streams, 0 for - * media and 1 for nav guidance. And let's assume external routing option - * of am fm radio, external navigation guidance, satellite radio, and one - * custom. Let's assume that radio and satellite replaces physical stream 0 - * and external navigation replaces physical stream 1. And bit flag will be - * assigned in the order listed above. This configuration will look like - * this in config_string: - * "0:RADIO_AM_FM:0,1:EXT_NAV_GUIDANCE:1,2:RADIO_SATELLITE:0,3:com.test.SOMETHING_CUSTOM" - * When android requests RADIO_AM_FM, int32Values[0] will be set to 0x1. - * When android requests RADIO_SATELLITE + EXT_NAV_GUIDANCE, int32Values[0] - * will be set to 0x2|0x4. - * - * @change_mode VehiclePropertyChangeMode:ON_CHANGE - * @access VehiclePropertyAccess:READ_WRITE - * @config_string List of all avaiable external source in the system. - */ - AUDIO_EXT_ROUTING_HINT = ( - 0x0905 - | VehiclePropertyGroup:SYSTEM - | VehiclePropertyType:INT32_VEC - | VehicleArea:GLOBAL), - - /** - * Represents state of audio stream. Audio HAL should set this when a stream is starting or - * ending. Car service can request focus for audio played without focus. If such feature - * is not required, this property does not need to be implemented. - * Car service only monitors setting of this property. It is up to each vehicle HAL - * implementation to add necessary action but default implementation will be doing nothing on - * this propery's set from audio HAL. - * Actual streaming of data should be done only after getting focus for the given stream from - * car audio module. Focus can be already granted when stream is started. Focus state can be - * monitored by monitoring AUDIO_FOCUS property. If car does not support - * AUDIO_FOCUS property, there is no need to monitor focus as focus is assumed to be - * granted always. - * Data has the following format: - * int32_array[0] : vehicle_audio_stream_state, 0: stopped, 1: started - * int32_array[1] : stream number like 0, 1, 2, ... - * - * @change_mode VehiclePropertyChangeMode:ON_CHANGE - * @access VehiclePropertyAccess:READ_WRITE - */ - AUDIO_STREAM_STATE = ( - 0x0906 - | VehiclePropertyGroup:SYSTEM - | VehiclePropertyType:INT32_VEC - | VehicleArea:GLOBAL), - - /** - * Property to control car specific audio parameters. Each parameter is defined as string key- - * value pair. - * set and event notification can pass multiple parameters using the - * following format: - * key1=value1;key2=value2;... - * get call can request multiple parameters using the following format: - * key1;key2;... - * Response for get call has the same format as set. - * - * VehiclePropConfig - * configString: give list of all supported keys with ; as separator. For example: - * key1;key2;... - * - * @change_mode VehiclePropertyChangeMode:ON_CHANGE - * @access VehiclePropertyAccess:READ_WRITE - */ - AUDIO_PARAMETERS = ( - 0x907 - | VehiclePropertyGroup:SYSTEM - | VehiclePropertyType:STRING - | VehicleArea:GLOBAL), - - /** - * Property to control power state of application processor - * - * It is assumed that AP's power state is controller by separate power - * controller. - * - * For configuration information, VehiclePropConfig.configFlags can - * have bit flag combining values in VehicleApPowerStateConfigFlag. - * - * Value format for IVehicle#get / IVehicle#subscribe: - * int32Values[0] : vehicle_ap_power_state_type - * int32Values[1] : additional parameter relevant for each state, - * 0 if not used. - * Value format for IVehicle#set: - * int32Values[0] : vehicle_ap_power_state_set_type - * int32Values[1] : additional parameter relevant for each request. should be 0 if not used. - * - * @change_mode VEHICLE_PROP_CHANGE_MODE_ON_CHANGE - * @access VEHICLE_PROP_ACCESS_READ_WRITE - */ - AP_POWER_STATE = ( - 0x0A00 - | VehiclePropertyGroup:SYSTEM - | VehiclePropertyType:INT32_VEC - | VehicleArea:GLOBAL), - /** * Property to represent brightness of the display. Some cars have single * control for the brightness of all displays and this property is to share @@ -1152,32 +1151,14 @@ enum VehicleProperty: int32_t { * * If this is writable, android side can set this value when user changes * display brightness from Settings. If this is read only, user may still - * change display brightness from Settings, but that will not be reflected + * change display brightness from Settings, but that must not be reflected * to other displays. * * @change_mode VehiclePropertyChangeMode:ON_CHANGE * @access VehiclePropertyAccess:READ_WRITE */ DISPLAY_BRIGHTNESS = ( - 0x0A01 - | VehiclePropertyGroup:SYSTEM - | VehiclePropertyType:INT32 - | VehicleArea:GLOBAL), - - /** - * Property to report bootup reason for the current power on. This is a - * static property that will not change for the whole duration until power - * off. For example, even if user presses power on button after automatic - * power on with door unlock, bootup reason must stay with - * VehicleApPowerBootupReason#USER_UNLOCK. - * - * int32Values[0] must be VehicleApPowerBootupReason. - * - * @change_mode VehiclePropertyChangeMode:STATIC - * @access VehiclePropertyAccess:READ - */ - AP_POWER_BOOTUP_REASON = ( - 0x0A02 + 0x0A03 | VehiclePropertyGroup:SYSTEM | VehiclePropertyType:INT32 | VehicleArea:GLOBAL), @@ -1190,6 +1171,7 @@ enum VehicleProperty: int32_t { * int32Values[2] : target display defined in VehicleDisplay. Events not * tied to specific display must be sent to * VehicleDisplay#MAIN. + * * @change_mode VehiclePropertyChangeMode:ON_CHANGE * @access VehiclePropertyAccess:READ * @config_flags @@ -1200,71 +1182,23 @@ enum VehicleProperty: int32_t { | VehiclePropertyType:INT32_VEC | VehicleArea:GLOBAL), - /** - * Property to define instrument cluster information. - * For VehicleInstrumentClusterType:EXTERNAL_DISPLAY: - * READ: - * int32Values[0] : The current screen mode index. Screen mode is defined - * as a configuration in car service and represents - * which area of screen is renderable. - * int32Values[1] : Android can render to instrument cluster (=1) or - * not(=0). When this is 0, instrument cluster may be - * rendering some information in the area allocated for - * android and android side rendering is invisible. - * WRITE from android: - * int32Values[0] : Preferred mode for android side. Depending on the app - * rendering to instrument cluster, preferred mode can - * change. Instrument cluster still needs to send - * event with new mode to trigger actual mode change. - * int32Values[1] : The current app context relevant for instrument - * cluster. Use the same flag with - * VehicleAudioContextFlag but this context represents - * active apps, not active audio. Instrument cluster - * side may change mode depending on the currently - * active contexts. - * When system boots up, Android side will write {0, 0, 0, 0} when it is - * ready to render to instrument cluster. Before this message, rendering - * from android must not be visible in the cluster. - * @change_mode VehiclePropertyChangeMode:ON_CHANGE - * @access VehiclePropertyAccess:READ_WRITE - * @configArray 0:VehicleInstrumentClusterType 1:hw type - */ - INSTRUMENT_CLUSTER_INFO = ( - 0x0A20 - | VehiclePropertyGroup:SYSTEM - | VehiclePropertyType:INT32_VEC - | VehicleArea:GLOBAL), - - /** - * Current date and time, encoded as Unix time. - * This value denotes the number of seconds that have elapsed since - * 1/1/1970. + /*************************************************************************** + * Most Car Cabin properties have both a POSition and MOVE parameter. These + * are used to control the various movements for seats, doors, and windows + * in a vehicle. * - * @change_mode VehiclePropertyChangeMode:ON_SET - * @access VehiclePropertyAccess:READ_WRITE - * @unit VehicleUnit:SECS - */ - UNIX_TIME = ( - 0x0A30 - | VehiclePropertyGroup:SYSTEM - | VehiclePropertyType:INT64 - | VehicleArea:GLOBAL), - - /** - * Current time only. - * Some vehicles may not keep track of date. This property only affects - * the current time, in seconds during the day. Thus, the max value for - * this parameter is 86,400 (24 * 60 * 60) + * A POS parameter allows the user to set the absolution position. For + * instance, for a door, 0 indicates fully closed and max value indicates + * fully open. Thus, a value halfway between min and max must indicate + * the door is halfway open. * - * @change_mode VehiclePropertyChangeMode:ON_SET - * @access VehiclePropertyAccess:READ_WRITE - * @unit VehicleUnit:SECS - */ - CURRENT_TIME_IN_SECONDS = ( - 0x0A31 - | VehiclePropertyGroup:SYSTEM - | VehiclePropertyType:INT32 - | VehicleArea:GLOBAL), + * A MOVE parameter moves the device in a particular direction. The sign + * indicates direction, and the magnitude indicates speed (if multiple + * speeds are available). For a door, a move of -1 will close the door, and + * a move of +1 will open it. Once a door reaches the limit of open/close, + * the door should automatically stop moving. The user must NOT need to + * send a MOVE(0) command to stop the door at the end of its range. + **************************************************************************/ /** * Door position @@ -1419,7 +1353,7 @@ enum VehicleProperty: int32_t { * * This setting allows the user to save the current seat position settings * into the selected preset slot. The maxValue for each seat position - * shall match the maxValue for SEAT_MEMORY_SELECT. + * must match the maxValue for SEAT_MEMORY_SELECT. * * @change_mode VehiclePropertyChangeMode:ON_CHANGE * @access VehiclePropertyAccess:WRITE @@ -1806,68 +1740,57 @@ enum VehicleProperty: int32_t { /** * Window Position * - * Max = window up / closed - * Min = window down / open + * Min = window up / closed + * Max = window down / open * - * @change_mode VehiclePropertyChangeMode:ON_CHANGE - * @access VehiclePropertyAccess:READ_WRITE - */ - WINDOW_POS = ( - 0x0BC0 - | VehiclePropertyGroup:SYSTEM - | VehiclePropertyType:INT32 - | VehicleArea:GLOBAL), - - /** - * Window Move + * For a window that may open out of plane (i.e. vent mode of sunroof) this + * parameter will work with negative values as follows: + * Max = sunroof completely open + * 0 = sunroof closed. + * Min = sunroof vent completely open * - * Max = window up / closed - * Min = window down / open - * Magnitude denotes relative speed. I.e. +2 is faster than +1 in raising - * the window. + * Note that in this mode, 0 indicates the window is closed. * * @change_mode VehiclePropertyChangeMode:ON_CHANGE * @access VehiclePropertyAccess:READ_WRITE */ - WINDOW_MOVE = ( - 0x0BC1 - | VehiclePropertyGroup:SYSTEM - | VehiclePropertyType:INT32 - | VehicleArea:GLOBAL), - - /** - * Window Vent Position - * - * This feature is used to control the vent feature on a sunroof. - * - * Max = vent open - * Min = vent closed - * - * @change_mode VehiclePropertyChangeMode:ON_CHANGE - * @access VehiclePropertyAccess:READ_WRITE - */ - WINDOW_VENT_POS = ( - 0x0BC2 + WINDOW_POS = ( + 0x0BC0 | VehiclePropertyGroup:SYSTEM | VehiclePropertyType:INT32 - | VehicleArea:GLOBAL), + | VehicleArea:WINDOW), /** - * Window Vent Move + * Window Move + * + * Max = Open the window as fast as possible + * Min = Close the window as fast as possible + * Magnitude denotes relative speed. I.e. +2 is faster than +1 in closing + * the window. * - * This feature is used to control the vent feature on a sunroof. + * For a window that may open out of plane (i.e. vent mode of sunroof) this + * parameter will work as follows: * - * Max = vent open - * Min = vent closed + * If sunroof is open: + * Max = open the sunroof further, automatically stop when fully open. + * Min = close the sunroof, automatically stop when sunroof is closed. + * + * If vent is open: + * Max = close the vent, automatically stop when vent is closed. + * Min = open the vent further, automatically stop when vent is fully open. + * + * If sunroof is in the closed position: + * Max = open the sunroof, automatically stop when sunroof is fully open. + * Min = open the vent, automatically stop when vent is fully open. * * @change_mode VehiclePropertyChangeMode:ON_CHANGE * @access VehiclePropertyAccess:READ_WRITE */ - WINDOW_VENT_MOVE = ( - 0x0BC3 + WINDOW_MOVE = ( + 0x0BC1 | VehiclePropertyGroup:SYSTEM | VehiclePropertyType:INT32 - | VehicleArea:GLOBAL), + | VehicleArea:WINDOW), /** * Window Lock @@ -1881,13 +1804,13 @@ enum VehicleProperty: int32_t { 0x0BC4 | VehiclePropertyGroup:SYSTEM | VehiclePropertyType:BOOLEAN - | VehicleArea:GLOBAL), + | VehicleArea:WINDOW), /** * Vehicle Maps Service (VMS) message * - * This property uses COMPLEX data to communicate vms messages. + * This property uses MIXED data to communicate vms messages. * * Its contents are to be interpreted as follows: * the indices defined in VmsMessageIntegerValuesIndex are to be used to @@ -1899,13 +1822,11 @@ enum VehicleProperty: int32_t { * * @change_mode VehiclePropertyChangeMode:ON_CHANGE * @access VehiclePropertyAccess:READ_WRITE - * - * @since o.mr1 */ VEHICLE_MAP_SERVICE = ( 0x0C00 | VehiclePropertyGroup:SYSTEM - | VehiclePropertyType:COMPLEX + | VehiclePropertyType:MIXED | VehicleArea:GLOBAL), /** @@ -1948,14 +1869,12 @@ enum VehicleProperty: int32_t { * * @change_mode VehiclePropertyChangeMode:ON_CHANGE * @access VehiclePropertyAccess:READ - * - * @since o.mr1 */ OBD2_LIVE_FRAME = ( - 0x0D00 - | VehiclePropertyGroup:SYSTEM - | VehiclePropertyType:COMPLEX - | VehicleArea:GLOBAL), + 0x0D00 + | VehiclePropertyGroup:SYSTEM + | VehiclePropertyType:MIXED + | VehicleArea:GLOBAL), /** * OBD2 Freeze Frame Sensor Data @@ -1980,14 +1899,12 @@ enum VehicleProperty: int32_t { * * @change_mode VehiclePropertyChangeMode:ON_CHANGE * @access VehiclePropertyAccess:READ - * - * @since o.mr1 */ OBD2_FREEZE_FRAME = ( - 0x0D01 - | VehiclePropertyGroup:SYSTEM - | VehiclePropertyType:COMPLEX - | VehicleArea:GLOBAL), + 0x0D01 + | VehiclePropertyGroup:SYSTEM + | VehiclePropertyType:MIXED + | VehicleArea:GLOBAL), /** * OBD2 Freeze Frame Information @@ -2003,14 +1920,12 @@ enum VehicleProperty: int32_t { * * @change_mode VehiclePropertyChangeMode:ON_CHANGE * @access VehiclePropertyAccess:READ - * - * @since o.mr1 */ OBD2_FREEZE_FRAME_INFO = ( - 0x0D02 - | VehiclePropertyGroup:SYSTEM - | VehiclePropertyType:COMPLEX - | VehicleArea:GLOBAL), + 0x0D02 + | VehiclePropertyGroup:SYSTEM + | VehiclePropertyType:MIXED + | VehicleArea:GLOBAL), /** * OBD2 Freeze Frame Clear @@ -2031,283 +1946,273 @@ enum VehicleProperty: int32_t { * * @change_mode VehiclePropertyChangeMode:ON_CHANGE * @access VehiclePropertyAccess:WRITE - * - * @since o.mr1 */ OBD2_FREEZE_FRAME_CLEAR = ( - 0x0D03 - | VehiclePropertyGroup:SYSTEM - | VehiclePropertyType:COMPLEX - | VehicleArea:GLOBAL), -}; - -/** - * Bit flags for fan direction - */ -enum VehicleHvacFanDirection : int32_t { - FACE = 0x1, - FLOOR = 0x2, - FACE_AND_FLOOR = 0x3, - DEFROST = 0x4, - DEFROST_AND_FLOOR = 0x5, -}; - -/** - * Constants relevant to radio. - */ -enum VehicleRadioConstants : int32_t { - /** Minimum value for the radio preset */ - VEHICLE_RADIO_PRESET_MIN_VALUE = 1, -}; - -enum VehicleAudioFocusRequest : int32_t { - REQUEST_GAIN = 0x1, - REQUEST_GAIN_TRANSIENT = 0x2, - REQUEST_GAIN_TRANSIENT_MAY_DUCK = 0x3, - /** - * This is for the case where android side plays sound like UI feedback - * and car side does not need to duck existing playback as long as - * requested stream is available. - */ - REQUEST_GAIN_TRANSIENT_NO_DUCK = 0x4, - REQUEST_RELEASE = 0x5, -}; - -enum VehicleAudioFocusState : int32_t { - /** - * Android side has permanent focus and can play allowed streams. - */ - STATE_GAIN = 0x1, - - /** - * Android side has transient focus and can play allowed streams. - */ - STATE_GAIN_TRANSIENT = 0x2, - - /** - * Car audio module is playing guidance kind of sound outside Android. - * Android side can still play through allowed streams with ducking. - */ - STATE_LOSS_TRANSIENT_CAN_DUCK = 0x3, + 0x0D03 + | VehiclePropertyGroup:SYSTEM + | VehiclePropertyType:MIXED + | VehicleArea:GLOBAL), /** - * Car audio module is playing transient sound outside Android. Android side - * must stop playing any sounds. + * Headlights State + * + * Return the current state of headlights. + * + * @change_mode VehiclePropertyChangeMode:ON_CHANGE + * @access VehiclePropertyAccess:READ + * @data_enum VehicleLightState */ - STATE_LOSS_TRANSIENT = 0x4, + HEADLIGHTS_STATE = ( + 0x0E00 + | VehiclePropertyGroup:SYSTEM + | VehiclePropertyType:INT32 + | VehicleArea:GLOBAL), /** - * Android side has lost focus and cannot play any sound. + * High beam lights state + * + * Return the current state of high beam lights. + * + * @change_mode VehiclePropertyChangeMode:ON_CHANGE + * @access VehiclePropertyAccess:READ + * @data_enum VehicleLightState */ - STATE_LOSS = 0x5, + HIGH_BEAM_LIGHTS_STATE = ( + 0x0E01 + | VehiclePropertyGroup:SYSTEM + | VehiclePropertyType:INT32 + | VehicleArea:GLOBAL), /** - * car audio module is playing safety critical sound, and Android side cannot - * request focus until the current state is finished. car audio module - * restore it to the previous state when it can allow Android to play. + * Fog light state + * + * Return the current state of fog lights. + * + * @change_mode VehiclePropertyChangeMode:ON_CHANGE + * @access VehiclePropertyAccess:READ + * @data_enum VehicleLightState */ - STATE_LOSS_TRANSIENT_EXLCUSIVE = 0x6, -}; - -/** - * Flags to represent multiple streams by combining these. - */ -enum VehicleAudioStreamFlag : int32_t { - STREAM0_FLAG = (0x1 << 0), - STREAM1_FLAG = (0x1 << 1), - STREAM2_FLAG = (0x1 << 2), -}; - -/** - * Represents stream number (always 0 to N -1 where N is max number of streams). - * Can be used for audio related property expecting one stream. - */ -enum VehicleAudioStream : int32_t { - STREAM0 = 0, - STREAM1 = 1, -}; + FOG_LIGHTS_STATE = ( + 0x0E02 + | VehiclePropertyGroup:SYSTEM + | VehiclePropertyType:INT32 + | VehicleArea:GLOBAL), -/** - * Flag to represent external focus state (outside Android). - */ -enum VehicleAudioExtFocusFlag : int32_t { /** - * No external focus holder. + * Hazard light status + * + * Return the current status of hazard lights. + * + * @change_mode VehiclePropertyChangeMode:ON_CHANGE + * @access VehiclePropertyAccess:READ + * @data_enum VehicleLightState */ - NONE_FLAG = 0x0, + HAZARD_LIGHTS_STATE = ( + 0x0E03 + | VehiclePropertyGroup:SYSTEM + | VehiclePropertyType:INT32 + | VehicleArea:GLOBAL), /** - * Car side (outside Android) has component holding GAIN kind of focus state. + * Headlight switch + * + * The setting that the user wants. + * + * @change_mode VehiclePropertyChangeMode:ON_CHANGE + * @access VehiclePropertyAccess:READ_WRITE + * @data_enum VehicleLightSwitch */ - PERMANENT_FLAG = 0x1, + HEADLIGHTS_SWITCH = ( + 0x0E10 + | VehiclePropertyGroup:SYSTEM + | VehiclePropertyType:INT32 + | VehicleArea:GLOBAL), /** - * Car side (outside Android) has component holding GAIN_TRANSIENT kind of - * focus state. + * High beam light switch + * + * The setting that the user wants. + * + * @change_mode VehiclePropertyChangeMode:ON_CHANGE + * @access VehiclePropertyAccess:READ_WRITE + * @data_enum VehicleLightSwitch */ - TRANSIENT_FLAG = 0x2, + HIGH_BEAM_LIGHTS_SWITCH = ( + 0x0E11 + | VehiclePropertyGroup:SYSTEM + | VehiclePropertyType:INT32 + | VehicleArea:GLOBAL), /** - * Car side is expected to play something while focus is held by Android side. - * One example can be radio attached in car side. But Android's radio app - * still must have focus, and Android side must be in GAIN state, but - * media stream will not be allocated to Android side and car side can play - * radio any time while this flag is active. + * Fog light switch + * + * The setting that the user wants. + * + * @change_mode VehiclePropertyChangeMode:ON_CHANGE + * @access VehiclePropertyAccess:READ_WRITE + * @data_enum VehicleLightSwitch */ - PLAY_ONLY_FLAG = 0x4, + FOG_LIGHTS_SWITCH = ( + 0x0E12 + | VehiclePropertyGroup:SYSTEM + | VehiclePropertyType:INT32 + | VehicleArea:GLOBAL), /** - * Car side must mute any media including radio. This can be used with any - * focus request including GAIN* and RELEASE. + * Hazard light switch + * + * The setting that the user wants. + * + * @change_mode VehiclePropertyChangeMode:ON_CHANGE + * @access VehiclePropertyAccess:READ_WRITE + * @data_enum VehicleLightSwitch */ - MUTE_MEDIA_FLAG = 0x8, + HAZARD_LIGHTS_SWITCH = ( + 0x0E13 + | VehiclePropertyGroup:SYSTEM + | VehiclePropertyType:INT32 + | VehicleArea:GLOBAL), }; /** - * Index in int32Values for VehicleProperty#AUDIO_FOCUS property. + * Used by lights state properties to enumerate the current state of the lights. + * + * Most XXX_LIGHTS_STATE properties will only report ON and OFF states. Only + * the HEADLIGHTS_STATE property will report DAYTIME_RUNNING. */ -enum VehicleAudioFocusIndex : int32_t { - FOCUS = 0, - STREAMS = 1, - EXTERNAL_FOCUS_STATE = 2, - AUDIO_CONTEXTS = 3, +enum VehicleLightState : int32_t { + + OFF = 0, + ON = 1, + DAYTIME_RUNNING = 2 }; /** - * Flags to tell the current audio context. + * Used by lights switch properties to enumerate user selected switch setting. + * + * XXX_LIGHTS_SWITCH properties report the switch settings that the user + * selects. The switch setting may be decoupled from the state reported if the + * user selects AUTOMATIC. */ -enum VehicleAudioContextFlag : int32_t { - /** Music playback is currently active. */ - MUSIC_FLAG = 0x1, - - /** Navigation is currently running. */ - NAVIGATION_FLAG = 0x2, - - /** Voice command session is currently running. */ - VOICE_COMMAND_FLAG = 0x4, - - /** Voice call is currently active. */ - CALL_FLAG = 0x8, - - /** - * Alarm is active. - * This must be only used in VehicleProperty#AUDIO_ROUTING_POLICY. - */ - ALARM_FLAG = 0x10, - +enum VehicleLightSwitch : int32_t { + OFF = 0, + ON = 1, /** - * Notification sound is active. - * This must be only used in VehicleProperty#AUDIO_ROUTING_POLICY. + * Daytime running lights mode. Most cars automatically use DRL but some + * cars allow the user to activate them manually. */ - NOTIFICATION_FLAG = 0x20, - + DAYTIME_RUNNING = 2, /** - * Context unknown. Only used for VehicleProperty#AUDIO_ROUTING_POLICY to - * represent default stream for unknown contents. + * Allows the vehicle ECU to set the lights automatically */ - UNKNOWN_FLAG = 0x40, - - /** Safety alert / warning is played. */ - SAFETY_ALERT_FLAG = 0x80, - - /** CD / DVD kind of audio is played */ - CD_ROM_FLAG = 0x100, - - /** Aux audio input is played */ - AUX_AUDIO_FLAG = 0x200, - - /** system sound like UI feedback */ - SYSTEM_SOUND_FLAG = 0x400, - - /** Radio is played */ - RADIO_FLAG = 0x800, - - /** Ext source is played. This is for tagging generic ext sources. */ - EXT_SOURCE_FLAG = 0x1000, - - /** The phone ring tone is played */ - RINGTONE_FLAG = 0x2000 + AUTOMATIC = 0x100, }; /** - * flags to represent capability of audio volume property. - * used in configArray[1] of VehiclePropConfig. + * Used by INFO_EV_CONNECTOR_TYPE to enumerate the type of connectors + * available to charge the vehicle. */ -enum VehicleAudioVolumeCapabilityFlag : int32_t { +enum EvConnectorType : int32_t { /** - * External audio module or vehicle hal has persistent storage to keep the - * volume level. When this is set, the audio volume level for each context - * will be retrieved from the property when the system starts up. - * And external audio module is also expected to adjust volume automatically - * whenever there is an audio context change. - * When this flag is not set, android side will assume that there is no - * persistent storage and the value stored in the android side will be used to - * initialize the volume level, and android side will set volume level - * of each physical stream whenever there is an audio context change. + * Default type if the vehicle does not know or report the EV connector + * type. */ - PERSISTENT_STORAGE = 0x1, + UNKNOWN = 0, + IEC_TYPE_1_AC = 1, // aka Yazaki + IEC_TYPE_2_AC = 2, // aka Mennekes + IEC_TYPE_3_AC = 3, // aka Scame + IEC_TYPE_4_DC = 4, // aka CHAdeMO + IEC_TYPE_1_CCS_DC = 5, // aka Combo 1 + IEC_TYPE_2_CCS_DC = 6, // aka Combo 2 + TESLA_ROADSTER = 7, + TESLA_HPWC = 8, + TESLA_SUPERCHARGER = 9, + GBT_AC = 10, + GBT_DC = 11, /** - * [DEPRECATED] - * When this flag is set, the H/W can support only single master volume for - * all streams. There is no way to set volume level differently for each stream - * or context. + * Connector type to use when no other types apply. Before using this + * value, work with Google to see if the EvConnectorType enum can be + * extended with an appropriate value. */ - MASTER_VOLUME_ONLY = 0x2, + OTHER = 101, }; /** - * enum to represent audio volume state. + * Used by INFO_FUEL_DOOR_LOCATION/INFO_CHARGE_PORT_LOCATION to enumerate fuel door or + * ev port location. */ -enum VehicleAudioVolumeState : int32_t { - STATE_OK = 0, - +enum PortLocationType : int32_t { /** - * Audio volume has reached volume limit set in - * VehicleProperty#AUDIO_VOLUME_LIMIT and user's request to increase volume - * further is not allowed. + * Default type if the vehicle does not know or report the Fuel door + * and ev port location. */ - STATE_LIMIT_REACHED = 1, -}; - -/** - * Index in int32Values for VehicleProperty#AUDIO_VOLUME property. - */ -enum VehicleAudioVolumeIndex : int32_t { - STREAM = 0, - VOLUME = 1, - STATE = 2, + UNKNOWN = 0, + FRONT_LEFT = 1, + FRONT_RIGHT = 2, + REAR_RIGHT = 3, + REAR_LEFT = 4, + FRONT = 5, + REAR = 6, }; /** - * Index in int32Values for VehicleProperty#AUDIO_VOLUME_LIMIT property. + * Used by INFO_FUEL_TYPE to enumerate the type of fuels this vehicle uses. + * Consistent with projection protocol. */ -enum VehicleAudioVolumeLimitIndex : int32_t { - STREAM = 0, - MAX_VOLUME = 1, +enum FuelType : int32_t { + /** + * Fuel type to use if the HU does not know on which types of fuel the vehicle + * runs. The use of this value is generally discouraged outside of aftermarket units. + */ + FUEL_TYPE_UNKNOWN = 0, + /** Unleaded gasoline */ + FUEL_TYPE_UNLEADED = 1, + /** Leaded gasoline */ + FUEL_TYPE_LEADED = 2, + /** Diesel #1 */ + FUEL_TYPE_DIESEL_1 = 3, + /** Diesel #2 */ + FUEL_TYPE_DIESEL_2 = 4, + /** Biodiesel */ + FUEL_TYPE_BIODIESEL = 5, + /** 85% ethanol/gasoline blend */ + FUEL_TYPE_E85 = 6, + /** Liquified petroleum gas */ + FUEL_TYPE_LPG = 7, + /** Compressed natural gas */ + FUEL_TYPE_CNG = 8, + /** Liquified natural gas */ + FUEL_TYPE_LNG = 9, + /** Electric */ + FUEL_TYPE_ELECTRIC = 10, + /** Hydrogen fuel cell */ + FUEL_TYPE_HYDROGEN = 11, + /** + * Fuel type to use when no other types apply. Before using this value, work with + * Google to see if the FuelType enum can be extended with an appropriate value. + */ + FUEL_TYPE_OTHER = 12, }; /** - * Index in int32Values for VehicleProperty#AUDIO_ROUTING_POLICY property. + * Bit flags for fan direction */ -enum VehicleAudioRoutingPolicyIndex : int32_t { - STREAM = 0, - CONTEXTS = 1, +enum VehicleHvacFanDirection : int32_t { + FACE = 0x1, + FLOOR = 0x2, + DEFROST = 0x4, }; -/** - * Flag to be used in VehiclePropConfig#configFlags for - * VehicleProperty#AUDIO_HW_VARIANT. - */ -enum VehicleAudioHwVariantConfigFlag : int32_t { - /** - * Flag to tell that radio is internal to android and radio must - * be treated like other android stream like media. - * When this flag is not set or AUDIO_HW_VARIANT does not exist, - * radio is treated as external module. This may affect audio focus - * handling as well. - */ - INTERNAL_RADIO_FLAG = 0x1, +enum VehicleOilLevel : int32_t { + /** + * Oil level values + */ + CRITICALLY_LOW = 0, + LOW = 1, + NORMAL = 2, + HIGH = 3, + ERROR = 4, }; enum VehicleApPowerStateConfigFlag : int32_t /* NOTE: type is guessed */ { @@ -2324,7 +2229,7 @@ enum VehicleApPowerStateConfigFlag : int32_t /* NOTE: type is guessed */ { CONFIG_SUPPORT_TIMER_POWER_ON_FLAG = 0x2, }; -enum VehicleApPowerState : int32_t /* NOTE: type is guessed */ { +enum VehicleApPowerStateReq : int32_t { /** vehicle HAL will never publish this state to AP */ OFF = 0, @@ -2349,6 +2254,16 @@ enum VehicleApPowerState : int32_t /* NOTE: type is guessed */ { SHUTDOWN_PREPARE = 4, }; +/** + * Index in int32Values for VehicleProperty#AP_POWER_STATE_REQ property. + */ +enum VehicleApPowerStateReqIndex : int32_t { + STATE = 0, + ADDITIONAL = 1, +}; + + + enum VehicleApPowerStateShutdownParam : int32_t { /** AP must shutdown immediately. Postponing is not allowed. */ SHUTDOWN_IMMEDIATELY = 1, @@ -2360,7 +2275,7 @@ enum VehicleApPowerStateShutdownParam : int32_t { SHUTDOWN_ONLY = 3, }; -enum VehicleApPowerSetState : int32_t /* NOTE: type is guessed */ { +enum VehicleApPowerStateReport : int32_t { /** * AP has finished boot up, and can start shutdown if requested by power * controller. @@ -2424,14 +2339,6 @@ enum VehicleApPowerSetState : int32_t /* NOTE: type is guessed */ { DISPLAY_ON = 0x7, }; -/** - * Index in int32Values for VehicleProperty#AP_POWER_STATE property. - */ -enum VehicleApPowerStateIndex : int32_t { - STATE = 0, - ADDITIONAL = 1, -}; - /** * Enum to represent bootup reason. */ @@ -2466,31 +2373,12 @@ enum VehicleHwKeyInputAction : int32_t { }; enum VehicleDisplay : int32_t { - /** center console */ + /** The primary Android display (for example, center console) */ MAIN = 0, INSTRUMENT_CLUSTER = 1, }; -/** - * Represents instrument cluster type available in system - */ -enum VehicleInstrumentClusterType : int32_t { - /** Android has no access to instument cluster */ - NONE = 0, - - /** - * Instrument cluster can communicate through vehicle hal with additional - * properties to exchange meta-data - */ - HAL_INTERFACE = 1, - - /** - * Instrument cluster is external display where android can render contents - */ - EXTERNAL_DISPLAY = 2, -}; - /** * Units used for int or float type with no attached enum types. */ @@ -2511,48 +2399,42 @@ enum VehicleUnit : int32_t { NANO_SECS = 0x50, SECS = 0x53, YEAR = 0x59, + KILOPASCAL = 0x70, + + // Electrical Units + WATT_HOUR = 0x60, + MILLIAMPERE = 0x61, + MILLIVOLT = 0x62, + MILLIWATTS = 0x63, }; - /** - * This describes how value of property can change. - */ +/** + * This describes how value of property can change. + */ enum VehiclePropertyChangeMode : int32_t { - /** - * Property of this type must never be changed. Subscription is not supported - * for these properties. - */ - STATIC = 0x00, - - /** - * Property of this type must be reported when there is a change. - * IVehicle#get call must return the current value. - * Set operation for this property is assumed to be asynchronous. When the - * property is read (using IVehicle#get) after IVehicle#set, it may still - * return old value until underlying H/W backing this property has actually - * changed the state. Once state is changed, the property must dispatch - * changed value as event. - */ - ON_CHANGE = 0x01, - - /** - * Property of this type change continuously and requires fixed rate of - * sampling to retrieve the data. - */ - CONTINUOUS = 0x02, - - /** - * Property of this type may be polled to get the current value. - */ - POLL = 0x03, - - /** - * This is for property where change event must be sent only when the - * value is set from external component. Normal value change must not trigger - * event. For example, clock property can send change event only when it is - * set, outside android, for case like user setting time or time getting - * update. There is no need to send it per every value change. - */ - ON_SET = 0x04, + /** + * Property of this type must never be changed. Subscription is not supported + * for these properties. + */ + STATIC = 0x00, + + /** + * Properties of this type must report when there is a change. + * IVehicle#get call must return the current value. + * Set operation for this property is assumed to be asynchronous. When the + * property is read (using IVehicle#get) after IVehicle#set, it may still + * return old value until underlying H/W backing this property has actually + * changed the state. Once state is changed, the property must dispatch + * changed value as event. + */ + ON_CHANGE = 0x01, + + /** + * Properties of this type change continuously and require a fixed rate of + * sampling to retrieve the data. Implementers may choose to send extra + * notifications on significant value changes. + */ + CONTINUOUS = 0x02, }; /** @@ -2570,28 +2452,35 @@ enum VehiclePropertyAccess : int32_t { }; /** - * Car states. - * - * The driving states determine what features of the UI will be accessible. + * Property status is a dynamic value that may change based on the vehicle state. */ -enum VehicleDrivingStatus : int32_t { - UNRESTRICTED = 0x00, - NO_VIDEO = 0x01, - NO_KEYBOARD_INPUT = 0x02, - NO_VOICE_INPUT = 0x04, - NO_CONFIG = 0x08, - LIMIT_MESSAGE_LEN = 0x10, +enum VehiclePropertyStatus : int32_t { + /** Property is available and behaving normally */ + AVAILABLE = 0x00, + /** + * A property in this state is not available for reading and writing. This + * is a transient state that depends on the availability of the underlying + * implementation (e.g. hardware or driver). It MUST NOT be used to + * represent features that this vehicle is always incapable of. A get() of + * a property in this state MAY return an undefined value, but MUST + * correctly describe its status as UNAVAILABLE A set() of a property in + * this state MAY return NOT_AVAILABLE. The HAL implementation MUST ignore + * the value of the status field when writing a property value coming from + * Android. + */ + UNAVAILABLE = 0x01, + /** There is an error with this property. */ + ERROR = 0x02, }; /** * Various gears which can be selected by user and chosen in system. */ -enum VehicleGear: int32_t { +enum VehicleGear : int32_t { GEAR_NEUTRAL = 0x0001, GEAR_REVERSE = 0x0002, GEAR_PARK = 0x0004, GEAR_DRIVE = 0x0008, - GEAR_LOW = 0x0010, GEAR_1 = 0x0010, GEAR_2 = 0x0020, GEAR_3 = 0x0040, @@ -2603,32 +2492,6 @@ enum VehicleGear: int32_t { GEAR_9 = 0x1000, }; -/** - * Various zones in the car. - * - * Zones are used for Air Conditioning purposes and divide the car into physical - * area zones. - */ -enum VehicleAreaZone : int32_t { - ROW_1_LEFT = 0x00000001, - ROW_1_CENTER = 0x00000002, - ROW_1_RIGHT = 0x00000004, - ROW_1 = 0x00000008, - ROW_2_LEFT = 0x00000010, - ROW_2_CENTER = 0x00000020, - ROW_2_RIGHT = 0x00000040, - ROW_2 = 0x00000080, - ROW_3_LEFT = 0x00000100, - ROW_3_CENTER = 0x00000200, - ROW_3_RIGHT = 0x00000400, - ROW_3 = 0x00000800, - ROW_4_LEFT = 0x00001000, - ROW_4_CENTER = 0x00002000, - ROW_4_RIGHT = 0x00004000, - ROW_4 = 0x00008000, - WHOLE_CABIN = 0x80000000, -}; - /** * Various Seats in the car. */ @@ -2648,15 +2511,18 @@ enum VehicleAreaSeat : int32_t { * Various windshields/windows in the car. */ enum VehicleAreaWindow : int32_t { - FRONT_WINDSHIELD = 0x0001, - REAR_WINDSHIELD = 0x0002, - ROOF_TOP = 0x0004, - ROW_1_LEFT = 0x0010, - ROW_1_RIGHT = 0x0020, - ROW_2_LEFT = 0x0100, - ROW_2_RIGHT = 0x0200, - ROW_3_LEFT = 0x1000, - ROW_3_RIGHT = 0x2000, + FRONT_WINDSHIELD = 0x00000001, + REAR_WINDSHIELD = 0x00000002, + ROW_1_LEFT = 0x00000010, + ROW_1_RIGHT = 0x00000040, + ROW_2_LEFT = 0x00000100, + ROW_2_RIGHT = 0x00000400, + ROW_3_LEFT = 0x00001000, + ROW_3_RIGHT = 0x00004000, + + ROOF_TOP_1 = 0x00010000, + ROOF_TOP_2 = 0x00020000, + }; enum VehicleAreaDoor : int32_t { @@ -2680,7 +2546,6 @@ enum VehicleTurnSignal : int32_t { NONE = 0x00, RIGHT = 0x01, LEFT = 0x02, - EMERGENCY = 0x04, }; struct VehicleAreaConfig { @@ -2713,26 +2578,11 @@ struct VehiclePropConfig { */ VehiclePropertyChangeMode changeMode; - /** - * Some of the properties may have associated areas (for example, some hvac - * properties are associated with VehicleAreaZone), in these - * cases the config may contain an ORed value for the associated areas. - */ - int32_t supportedAreas; - /** * Contains per-area configuration. */ vec areaConfigs; - /** - * Configuration flags for this property. - * - * For example, it may store the number of presets that are stored by the - * radio module. - */ - int32_t configFlags; - /** Contains additional configuration parameters */ vec configArray; @@ -2761,9 +2611,6 @@ struct VehiclePropConfig { * events. */ struct VehiclePropValue { - /** Property identifier */ - int32_t prop; - /** Time is elapsed nanoseconds since boot */ int64_t timestamp; @@ -2773,6 +2620,12 @@ struct VehiclePropValue { */ int32_t areaId; + /** Property identifier */ + int32_t prop; + + /** Status of the property */ + VehiclePropertyStatus status; + /** * Contains value for a single property. Depending on property data type of * this property (VehiclePropetyType) one field of this structure must be filled in. @@ -2809,11 +2662,11 @@ enum VehicleIgnitionState : int32_t { /** Steering wheel is locked */ LOCK = 1, - /** - * Steering wheel is not locked, engine and all accessories are OFF. If - * car can be in LOCK and OFF state at the same time than HAL must report - * LOCK state. - */ + /** + * Steering wheel is not locked, engine and all accessories are OFF. If + * car can be in LOCK and OFF state at the same time than HAL must report + * LOCK state. + */ OFF, /** @@ -2832,33 +2685,6 @@ enum VehicleIgnitionState : int32_t { START }; - -/** - * Represent the operation where the current error has happened. - */ -enum VehiclePropertyOperation : int32_t { - /** - * Generic error to this property which is not tied to any operation. - */ - GENERIC = 0, - - /** - * Error happened while handling property set. - */ - SET = 1, - - /** - * Error happened while handling property get. - */ - GET = 2, - - /** - * Error happened while handling property subscription. - */ - SUBSCRIBE = 3, -}; - - enum SubscribeFlags : int32_t { UNDEFINED = 0x0, @@ -2866,15 +2692,13 @@ enum SubscribeFlags : int32_t { * Subscribe to event that was originated in vehicle HAL * (most likely this event came from the vehicle itself). */ - HAL_EVENT = 0x1, + EVENTS_FROM_CAR = 0x1, /** * Use this flag to subscribe on events when IVehicle#set(...) was called by * vehicle HAL's client (e.g. Car Service). */ - SET_CALL = 0x2, - - DEFAULT = HAL_EVENT, + EVENTS_FROM_ANDROID = 0x2, }; /** @@ -2884,12 +2708,6 @@ struct SubscribeOptions { /** Property to subscribe */ int32_t propId; - /** - * Area ids - this must be a bit mask of areas to subscribe or 0 to subscribe - * to all areas. - */ - int32_t vehicleAreas; - /** * Sample rate in Hz. * @@ -2901,7 +2719,7 @@ struct SubscribeOptions { */ float sampleRate; - /** Flags that indicate what kind of events listen to. */ + /** Flags that indicate to which event sources to listen. */ SubscribeFlags flags; }; @@ -2929,7 +2747,7 @@ enum StatusCode : int32_t { INTERNAL_ERROR = 5, }; -enum Wheel : int32_t { +enum VehicleAreaWheel : int32_t { UNKNOWN = 0x0, LEFT_FRONT = 0x1, @@ -3191,11 +3009,11 @@ enum DiagnosticFloatSensorIndex : int32_t { ACCELERATOR_PEDAL_POSITION_E = 54, /* PID 0x4A */ ACCELERATOR_PEDAL_POSITION_F = 55, /* PID 0x4B */ COMMANDED_THROTTLE_ACTUATOR = 56, /* PID 0x4C */ - ETHANOL_FUEL_PERCENTAGE = 57,/* PID 0x52 */ + ETHANOL_FUEL_PERCENTAGE = 57, /* PID 0x52 */ ABSOLUTE_EVAPORATION_SYSTEM_VAPOR_PRESSURE = 58, /* PID 0x53 */ SHORT_TERM_SECONDARY_OXYGEN_SENSOR_TRIM_BANK1 = 59, /* PID 0x55 */ SHORT_TERM_SECONDARY_OXYGEN_SENSOR_TRIM_BANK2 = 60, /* PID 0x57 */ - SHORT_TERM_SECONDARY_OXYGEN_SENSOR_TRIM_BANK3 = 61,/* PID 0x55 */ + SHORT_TERM_SECONDARY_OXYGEN_SENSOR_TRIM_BANK3 = 61, /* PID 0x55 */ SHORT_TERM_SECONDARY_OXYGEN_SENSOR_TRIM_BANK4 = 62, /* PID 0x57 */ LONG_TERM_SECONDARY_OXYGEN_SENSOR_TRIM_BANK1 = 63, /* PID 0x56 */ LONG_TERM_SECONDARY_OXYGEN_SENSOR_TRIM_BANK2 = 64, /* PID 0x58 */ @@ -3295,9 +3113,42 @@ enum VmsMessageType : int32_t { * A message from the VMS service to the subscribers or from the publishers to the VMS service * with a serialized VMS data packet as defined in the VMS protocol. * - * This message type uses enum VmsBaseMessageIntegerValuesIndex. + * This message type uses enum VmsMessageWithLayerAndPublisherIdIntegerValuesIndex. */ DATA = 12, + + /** + * A request from the publishers to the VMS service to get a Publisher ID for a serialized VMS + * provider description packet as defined in the VMS protocol. + * + * This message type uses enum VmsBaseMessageIntegerValuesIndex. + */ + PUBLISHER_ID_REQUEST = 13, + + /** + * A response from the VMS service to the publisher that contains a provider description packet + * and the publisher ID assigned to it. + * + * This message type uses enum VmsPublisherInformationIntegerValuesIndex. + */ + PUBLISHER_ID_RESPONSE = 14, + + /** + * A request from the subscribers to the VMS service to get information for a Publisher ID. + * + * This message type uses enum VmsPublisherInformationIntegerValuesIndex. + */ + PUBLISHER_INFORMATION_REQUEST = 15, + + /** + * A response from the VMS service to the subscribers that contains a provider description packet + * and the publisher ID assigned to it. + * + * This message type uses enum VmsPublisherInformationIntegerValuesIndex. + */ + PUBLISHER_INFORMATION_RESPONSE = 16, + + LAST_VMS_MESSAGE_TYPE = PUBLISHER_INFORMATION_RESPONSE, }; /** @@ -3325,7 +3176,8 @@ enum VmsMessageWithLayerIntegerValuesIndex : VmsBaseMessageIntegerValuesIndex { /* * A VMS message with a layer and publisher ID is sent as part of a - * VmsMessageType.SUBSCRIBE_TO_PUBLISHER and VmsMessageType.UNSUBSCRIBE_TO_PUBLISHER messages. + * VmsMessageType.SUBSCRIBE_TO_PUBLISHER, VmsMessageType.UNSUBSCRIBE_TO_PUBLISHER messages and + * VmsMessageType.DATA . */ enum VmsMessageWithLayerAndPublisherIdIntegerValuesIndex : VmsMessageWithLayerIntegerValuesIndex { PUBLISHER_ID = 4, @@ -3387,10 +3239,18 @@ enum VmsSubscriptionsStateIntegerValuesIndex : VmsBaseMessageIntegerValuesIndex * - Layer version * - Number of publisher IDs (N) * - N x publisher ID -*/ + */ enum VmsAvailabilityStateIntegerValuesIndex : VmsBaseMessageIntegerValuesIndex { SEQUENCE_NUMBER = 1, NUMBER_OF_ASSOCIATED_LAYERS = 2, LAYERS_START = 3, }; +/* + * Publishers send the VMS service their information and assigned in response a publisher ID. + * Subscribers can request the publisher information for a publisher ID they received in other messages. + */ +enum VmsPublisherInformationIntegerValuesIndex : VmsBaseMessageIntegerValuesIndex { + PUBLISHER_ID = 1, +}; + diff --git a/biometrics/fingerprint/2.1/default/android.hardware.biometrics.fingerprint@2.1-service.rc b/biometrics/fingerprint/2.1/default/android.hardware.biometrics.fingerprint@2.1-service.rc index aa767a6009ed506eb710b39066f5a10a0d5d7cea..9bfd3bac6e6fe7e23498935187cf4296390bccee 100644 --- a/biometrics/fingerprint/2.1/default/android.hardware.biometrics.fingerprint@2.1-service.rc +++ b/biometrics/fingerprint/2.1/default/android.hardware.biometrics.fingerprint@2.1-service.rc @@ -1,7 +1,8 @@ -service fps_hal /vendor/bin/hw/android.hardware.biometrics.fingerprint@2.1-service +service vendor.fps_hal /vendor/bin/hw/android.hardware.biometrics.fingerprint@2.1-service # "class hal" causes a race condition on some devices due to files created # in /data. As a workaround, postpone startup until later in boot once # /data is mounted. class late_start user system group system input + writepid /dev/cpuset/system-background/tasks \ No newline at end of file diff --git a/biometrics/fingerprint/2.1/vts/functional/VtsHalBiometricsFingerprintV2_1TargetTest.cpp b/biometrics/fingerprint/2.1/vts/functional/VtsHalBiometricsFingerprintV2_1TargetTest.cpp index a7f40319afe680797ebeceb1e3f234492aa29c09..9911038aee2c5f1794177d9c986c13d1b4ab4a98 100644 --- a/biometrics/fingerprint/2.1/vts/functional/VtsHalBiometricsFingerprintV2_1TargetTest.cpp +++ b/biometrics/fingerprint/2.1/vts/functional/VtsHalBiometricsFingerprintV2_1TargetTest.cpp @@ -19,6 +19,7 @@ #include #include #include +#include #include #include #include @@ -28,6 +29,7 @@ #include #include +using android::base::GetUintProperty; using android::Condition; using android::hardware::biometrics::fingerprint::V2_1::IBiometricsFingerprint; using android::hardware::biometrics::fingerprint::V2_1::IBiometricsFingerprintClientCallback; @@ -44,7 +46,7 @@ namespace { static const uint32_t kTimeout = 3; static const std::chrono::seconds kTimeoutInSeconds = std::chrono::seconds(kTimeout); static const uint32_t kGroupId = 99; -static const std::string kTmpDir = "/data/system/users/0/fpdata/"; +static std::string kTmpDir = ""; static const uint32_t kIterations = 1000; // Wait for a callback to occur (signaled by the given future) up to the @@ -199,9 +201,25 @@ class FingerprintHidlTest : public ::testing::VtsHalHidlTargetTestBase { FingerprintHidlEnvironment::Instance()->getServiceName()); ASSERT_FALSE(mService == nullptr); - // Create an active group - // FP service can only write to /data/system/users/*/fpdata/ due to - // SELinux Policy and Linux Dir Permissions + /* + * Devices shipped from now on will instead store + * fingerprint data under /data/vendor_de//fpdata. + * Support for /data/vendor_de and /data/vendor_ce has been added to vold. + */ + + uint64_t api_level = GetUintProperty("ro.product.first_api_level", 0); + if (api_level == 0) { + api_level = GetUintProperty("ro.build.version.sdk", 0); + } + ASSERT_TRUE(api_level != 0); + + // 27 is the API number for O-MR1 + if (api_level <= 27) { + kTmpDir = "/data/system/users/0/fpdata/"; + } else { + kTmpDir = "/data/vendor_de/0/fpdata/"; + } + Return res = mService->setActiveGroup(kGroupId, kTmpDir); ASSERT_EQ(RequestStatus::SYS_OK, static_cast(res)); } diff --git a/bluetooth/1.0/default/android.hardware.bluetooth@1.0-service.rc b/bluetooth/1.0/default/android.hardware.bluetooth@1.0-service.rc index 8635366c6c3ebe8198eb174ab72253fd88a31934..a63444162ca9804c808aaf591ef9fb69922bccc6 100644 --- a/bluetooth/1.0/default/android.hardware.bluetooth@1.0-service.rc +++ b/bluetooth/1.0/default/android.hardware.bluetooth@1.0-service.rc @@ -1,4 +1,4 @@ -service bluetooth-1-0 /vendor/bin/hw/android.hardware.bluetooth@1.0-service +service vendor.bluetooth-1-0 /vendor/bin/hw/android.hardware.bluetooth@1.0-service class hal capabilities BLOCK_SUSPEND NET_ADMIN SYS_NICE user bluetooth @@ -6,10 +6,10 @@ service bluetooth-1-0 /vendor/bin/hw/android.hardware.bluetooth@1.0-service writepid /dev/stune/foreground/tasks on property:vts.native_server.on=1 && property:ro.build.type=userdebug - stop bluetooth-1-0 + stop vendor.bluetooth-1-0 on property:vts.native_server.on=1 && property:ro.build.type=eng - stop bluetooth-1-0 + stop vendor.bluetooth-1-0 on property:vts.native_server.on=0 && property:ro.build.type=userdebug - start bluetooth-1-0 + start vendor.bluetooth-1-0 on property:vts.native_server.on=0 && property:ro.build.type=eng - start bluetooth-1-0 + start vendor.bluetooth-1-0 diff --git a/bluetooth/a2dp/1.0/vts/functional/Android.bp b/bluetooth/a2dp/1.0/vts/functional/Android.bp new file mode 100644 index 0000000000000000000000000000000000000000..f1ffc4592cfa97b0a7224e8a458ff14a09352a7a --- /dev/null +++ b/bluetooth/a2dp/1.0/vts/functional/Android.bp @@ -0,0 +1,26 @@ +// +// Copyright (C) 2018 The Android Open Source Project +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +// + +cc_test { + name: "VtsHalBluetoothA2dpV1_0TargetTest", + defaults: ["VtsHalTargetTestDefaults"], + srcs: ["VtsHalBluetoothA2dpV1_0TargetTest.cpp"], + static_libs: [ + "android.hardware.bluetooth@1.0", + "android.hardware.bluetooth.a2dp@1.0", + "libbluetooth-types", + ], +} diff --git a/bluetooth/a2dp/1.0/vts/functional/VtsHalBluetoothA2dpV1_0TargetTest.cpp b/bluetooth/a2dp/1.0/vts/functional/VtsHalBluetoothA2dpV1_0TargetTest.cpp new file mode 100644 index 0000000000000000000000000000000000000000..1a0342f3341ea8f06626dd079aa0bef4186903a0 --- /dev/null +++ b/bluetooth/a2dp/1.0/vts/functional/VtsHalBluetoothA2dpV1_0TargetTest.cpp @@ -0,0 +1,126 @@ +/* + * Copyright (C) 2018 The Android Open Source Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#define LOG_TAG "bluetooth_a2dp_hidl_hal_test" + +#include +#include +#include +#include +#include + +#include +#include +#include + +using ::android::hardware::bluetooth::a2dp::V1_0::IBluetoothAudioHost; +using ::android::hardware::bluetooth::a2dp::V1_0::IBluetoothAudioOffload; +using ::android::hardware::bluetooth::a2dp::V1_0::Status; +using ::android::hardware::bluetooth::a2dp::V1_0::CodecType; +using ::android::hardware::bluetooth::a2dp::V1_0::SampleRate; +using ::android::hardware::bluetooth::a2dp::V1_0::BitsPerSample; +using ::android::hardware::bluetooth::a2dp::V1_0::ChannelMode; +using ::android::hardware::bluetooth::a2dp::V1_0::CodecConfiguration; +using ::android::hardware::Return; +using ::android::hardware::Void; +using ::android::sp; + +// Test environment for Bluetooth HIDL A2DP HAL. +class BluetoothA2dpHidlEnvironment : public ::testing::VtsHalHidlTargetTestEnvBase { + public: + // get the test environment singleton + static BluetoothA2dpHidlEnvironment* Instance() { + static BluetoothA2dpHidlEnvironment* instance = new BluetoothA2dpHidlEnvironment; + return instance; + } + + virtual void registerTestServices() override { registerTestService(); } + + private: + BluetoothA2dpHidlEnvironment() {} +}; + +// The main test class for Bluetooth A2DP HIDL HAL. +class BluetoothA2dpHidlTest : public ::testing::VtsHalHidlTargetTestBase { + public: + virtual void SetUp() override { + // currently test passthrough mode only + audio_offload = ::testing::VtsHalHidlTargetTestBase::getService( + BluetoothA2dpHidlEnvironment::Instance()->getServiceName()); + ASSERT_NE(audio_offload, nullptr); + + audio_host = new BluetoothAudioHost(*this); + ASSERT_NE(audio_host, nullptr); + + codec.codecType = CodecType::AAC; + codec.sampleRate = SampleRate::RATE_44100; + codec.bitsPerSample = BitsPerSample::BITS_16; + codec.channelMode = ChannelMode::STEREO; + codec.encodedAudioBitrate = 320000; + codec.peerMtu = 1000; + } + + virtual void TearDown() override {} + + // A simple test implementation of IBluetoothAudioHost. + class BluetoothAudioHost + : public ::testing::VtsHalHidlTargetCallbackBase, + public IBluetoothAudioHost { + BluetoothA2dpHidlTest& parent_; + + public: + BluetoothAudioHost(BluetoothA2dpHidlTest& parent) : parent_(parent){}; + virtual ~BluetoothAudioHost() = default; + + Return startStream() override { + parent_.audio_offload->streamStarted(Status::SUCCESS); + return Void(); + }; + + Return suspendStream() override { + parent_.audio_offload->streamSuspended(Status::SUCCESS); + return Void(); + }; + + Return stopStream() override { return Void(); }; + }; + + // audio_host is for the Audio HAL to send stream start/suspend/stop commands to Bluetooth + sp audio_host; + // audio_offload is for the Bluetooth HAL to report session started/ended and handled audio + // stream started/suspended + sp audio_offload; + // codec is the currently used codec + CodecConfiguration codec; +}; + +// Empty test: Initialize()/Close() are called in SetUp()/TearDown(). +TEST_F(BluetoothA2dpHidlTest, InitializeAndClose) {} + +// Test start and end session +TEST_F(BluetoothA2dpHidlTest, StartAndEndSession) { + EXPECT_EQ(Status::SUCCESS, audio_offload->startSession(audio_host, codec)); + audio_offload->endSession(); +} + +int main(int argc, char** argv) { + ::testing::AddGlobalTestEnvironment(BluetoothA2dpHidlEnvironment::Instance()); + ::testing::InitGoogleTest(&argc, argv); + BluetoothA2dpHidlEnvironment::Instance()->init(&argc, argv); + int status = RUN_ALL_TESTS(); + LOG(INFO) << "Test result = " << status; + return status; +} diff --git a/boot/1.0/default/android.hardware.boot@1.0-service.rc b/boot/1.0/default/android.hardware.boot@1.0-service.rc index 68e7c2273aacbb930c266d0c1ea94bb4d10f0d2b..32f3a45b356739ae5bc96e3a675b00fbb6296f10 100644 --- a/boot/1.0/default/android.hardware.boot@1.0-service.rc +++ b/boot/1.0/default/android.hardware.boot@1.0-service.rc @@ -1,4 +1,4 @@ -service boot-hal-1-0 /vendor/bin/hw/android.hardware.boot@1.0-service +service vendor.boot-hal-1-0 /vendor/bin/hw/android.hardware.boot@1.0-service class early_hal user root group root diff --git a/broadcastradio/1.0/vts/functional/Android.bp b/broadcastradio/1.0/vts/functional/Android.bp index f31a2dcf8cfcb3010ffadbd63b077253cbb3c47f..7040a017794ecabdf5724171b39ec8f9a3550de0 100644 --- a/broadcastradio/1.0/vts/functional/Android.bp +++ b/broadcastradio/1.0/vts/functional/Android.bp @@ -18,5 +18,8 @@ cc_test { name: "VtsHalBroadcastradioV1_0TargetTest", defaults: ["VtsHalTargetTestDefaults"], srcs: ["VtsHalBroadcastradioV1_0TargetTest.cpp"], - static_libs: ["android.hardware.broadcastradio@1.0"], + static_libs: [ + "android.hardware.broadcastradio@1.0", + "android.hardware.broadcastradio@vts-utils-lib", + ], } diff --git a/broadcastradio/1.0/vts/functional/VtsHalBroadcastradioV1_0TargetTest.cpp b/broadcastradio/1.0/vts/functional/VtsHalBroadcastradioV1_0TargetTest.cpp index fd048db35d41b6c96ed78b4385a9c61ee8c2ab46..90c846375575b4f1186cd24456dcbf30be8e78a9 100644 --- a/broadcastradio/1.0/vts/functional/VtsHalBroadcastradioV1_0TargetTest.cpp +++ b/broadcastradio/1.0/vts/functional/VtsHalBroadcastradioV1_0TargetTest.cpp @@ -22,12 +22,12 @@ #include #include -#include #include +#include #include #include #include - +#include using ::android::sp; using ::android::Mutex; @@ -48,6 +48,7 @@ using ::android::hardware::broadcastradio::V1_0::ProgramInfo; using ::android::hardware::broadcastradio::V1_0::MetaData; using ::android::hardware::broadcastradio::V1_0::MetadataKey; using ::android::hardware::broadcastradio::V1_0::MetadataType; +using ::android::hardware::broadcastradio::vts::BroadcastRadioHidlEnvironment; #define RETURN_IF_SKIPPED \ if (skipped) { \ @@ -55,8 +56,8 @@ using ::android::hardware::broadcastradio::V1_0::MetadataType; return; \ } +static BroadcastRadioHidlEnvironment* gEnv = nullptr; // The main test class for Broadcast Radio HIDL HAL. - class BroadcastRadioHidlTest : public ::testing::VtsHalHidlTargetTestBase, public ::testing::WithParamInterface { protected: @@ -67,7 +68,7 @@ class BroadcastRadioHidlTest : public ::testing::VtsHalHidlTargetTestBase, skipped = false; sp factory = - ::testing::VtsHalHidlTargetTestBase::getService(); + getService(gEnv->getServiceName()); ASSERT_NE(nullptr, factory.get()); Result connectResult; @@ -731,8 +732,11 @@ INSTANTIATE_TEST_CASE_P( ::testing::Values(Class::AM_FM, Class::SAT, Class::DT)); int main(int argc, char** argv) { - ::testing::InitGoogleTest(&argc, argv); - int status = RUN_ALL_TESTS(); - ALOGI("Test result = %d", status); - return status; + gEnv = new BroadcastRadioHidlEnvironment; + ::testing::AddGlobalTestEnvironment(gEnv); + ::testing::InitGoogleTest(&argc, argv); + gEnv->init(&argc, argv); + int status = RUN_ALL_TESTS(); + ALOGI("Test result = %d", status); + return status; } diff --git a/broadcastradio/1.1/default/Android.bp b/broadcastradio/1.1/default/Android.bp index 6d26b11bd848b3f3e7d995ce964b5acbc4e392ec..52fb45b497bfd1b939ac7f947b597eec6ac6f30c 100644 --- a/broadcastradio/1.1/default/Android.bp +++ b/broadcastradio/1.1/default/Android.bp @@ -33,7 +33,8 @@ cc_binary { "service.cpp" ], static_libs: [ - "android.hardware.broadcastradio@1.1-utils-lib", + "android.hardware.broadcastradio@common-utils-1x-lib", + "android.hardware.broadcastradio@common-utils-lib", ], shared_libs: [ "android.hardware.broadcastradio@1.0", diff --git a/broadcastradio/1.1/default/BroadcastRadio.cpp b/broadcastradio/1.1/default/BroadcastRadio.cpp index 1bcfd824f7b1d2aaf39b2350f48979da37dd4501..e01812e6e61a5d4152e8d3741977c58d9925beb0 100644 --- a/broadcastradio/1.1/default/BroadcastRadio.cpp +++ b/broadcastradio/1.1/default/BroadcastRadio.cpp @@ -33,6 +33,11 @@ using V1_0::BandConfig; using V1_0::Class; using V1_0::Deemphasis; using V1_0::Rds; +using V1_1::IdentifierType; +using V1_1::ProgramSelector; +using V1_1::ProgramType; +using V1_1::Properties; +using V1_1::VendorKeyValue; using std::lock_guard; using std::map; @@ -102,7 +107,7 @@ Return BroadcastRadio::getProperties_1_1(getProperties_1_1_cb _hidl_cb) { prop10.numTuners = 1; prop10.numAudioSources = 1; prop10.supportsCapture = false; - prop11.supportsBackgroundScanning = false; + prop11.supportsBackgroundScanning = true; prop11.supportedProgramTypes = hidl_vec({ static_cast(ProgramType::AM), static_cast(ProgramType::FM), static_cast(ProgramType::AM_HD), static_cast(ProgramType::FM_HD), @@ -117,28 +122,7 @@ Return BroadcastRadio::getProperties_1_1(getProperties_1_1_cb _hidl_cb) { {"com.google.dummy", "dummy"}, }); - prop10.bands.resize(mConfig.amFmBands.size()); - for (size_t i = 0; i < mConfig.amFmBands.size(); i++) { - auto& src = mConfig.amFmBands[i]; - auto& dst = prop10.bands[i]; - - dst.type = src.type; - dst.antennaConnected = true; - dst.lowerLimit = src.lowerLimit; - dst.upperLimit = src.upperLimit; - dst.spacings = src.spacings; - - if (utils::isAm(src.type)) { - dst.ext.am.stereo = true; - } else if (utils::isFm(src.type)) { - dst.ext.fm.deemphasis = static_cast(Deemphasis::D50 | Deemphasis::D75); - dst.ext.fm.stereo = true; - dst.ext.fm.rds = static_cast(Rds::WORLD | Rds::US); - dst.ext.fm.ta = true; - dst.ext.fm.af = true; - dst.ext.fm.ea = true; - } - } + prop10.bands = getAmFmBands(); _hidl_cb(prop11); return Void(); @@ -157,7 +141,7 @@ Return BroadcastRadio::openTuner(const BandConfig& config, bool audio __un mTuner = nullptr; } - sp newTuner = new Tuner(mClassId, callback); + sp newTuner = new Tuner(this, mClassId, callback); mTuner = newTuner; if (mClassId == Class::AM_FM) { auto ret = newTuner->setConfiguration(config); @@ -184,6 +168,33 @@ Return BroadcastRadio::getImage(int32_t id, getImage_cb _hidl_cb) { return Void(); } +std::vector BroadcastRadio::getAmFmBands() const { + std::vector out; + for (auto&& src : mConfig.amFmBands) { + V1_0::BandConfig dst; + + dst.type = src.type; + dst.antennaConnected = true; + dst.lowerLimit = src.lowerLimit; + dst.upperLimit = src.upperLimit; + dst.spacings = src.spacings; + + if (utils::isAm(src.type)) { + dst.ext.am.stereo = true; + } else if (utils::isFm(src.type)) { + dst.ext.fm.deemphasis = static_cast(Deemphasis::D50 | Deemphasis::D75); + dst.ext.fm.stereo = true; + dst.ext.fm.rds = static_cast(Rds::WORLD | Rds::US); + dst.ext.fm.ta = true; + dst.ext.fm.af = true; + dst.ext.fm.ea = true; + } + + out.push_back(dst); + } + return out; +} + } // namespace implementation } // namespace V1_1 } // namespace broadcastradio diff --git a/broadcastradio/1.1/default/BroadcastRadio.h b/broadcastradio/1.1/default/BroadcastRadio.h index a96a2ab93364c4722f177aa267f6c31c137c1a13..d0a73d99552ab3f442e9fb22a11c20fe7ecc8577 100644 --- a/broadcastradio/1.1/default/BroadcastRadio.h +++ b/broadcastradio/1.1/default/BroadcastRadio.h @@ -29,8 +29,8 @@ namespace implementation { struct AmFmBandConfig { V1_0::Band type; - uint32_t lowerLimit; // kHz - uint32_t upperLimit; // kHz + uint32_t lowerLimit; // kHz + uint32_t upperLimit; // kHz std::vector spacings; // kHz }; @@ -65,6 +65,8 @@ struct BroadcastRadio : public V1_1::IBroadcastRadio { openTuner_cb _hidl_cb) override; Return getImage(int32_t id, getImage_cb _hidl_cb); + std::vector getAmFmBands() const; + private: std::mutex mMut; V1_0::Class mClassId; diff --git a/broadcastradio/1.1/default/BroadcastRadioFactory.cpp b/broadcastradio/1.1/default/BroadcastRadioFactory.cpp index f57bc79feb36e7611281326e9c1d043a1ac2f202..aecc96745dc9989ddb6de3321fb5447e238a52a7 100644 --- a/broadcastradio/1.1/default/BroadcastRadioFactory.cpp +++ b/broadcastradio/1.1/default/BroadcastRadioFactory.cpp @@ -36,10 +36,6 @@ static const vector gAllClasses = { Class::AM_FM, Class::SAT, Class::DT, }; -IBroadcastRadioFactory* HIDL_FETCH_IBroadcastRadioFactory(const char* name __unused) { - return new BroadcastRadioFactory(); -} - BroadcastRadioFactory::BroadcastRadioFactory() { for (auto&& classId : gAllClasses) { if (!BroadcastRadio::isSupported(classId)) continue; diff --git a/broadcastradio/1.1/default/BroadcastRadioFactory.h b/broadcastradio/1.1/default/BroadcastRadioFactory.h index 8b67ac3637dba0808f97da58a768b41a7636e728..62b65bc8ed8bf6dc42bdfc0c7621dc369fc7a4b8 100644 --- a/broadcastradio/1.1/default/BroadcastRadioFactory.h +++ b/broadcastradio/1.1/default/BroadcastRadioFactory.h @@ -26,8 +26,6 @@ namespace broadcastradio { namespace V1_1 { namespace implementation { -extern "C" IBroadcastRadioFactory* HIDL_FETCH_IBroadcastRadioFactory(const char* name); - struct BroadcastRadioFactory : public IBroadcastRadioFactory { BroadcastRadioFactory(); @@ -35,7 +33,7 @@ struct BroadcastRadioFactory : public IBroadcastRadioFactory { Return connectModule(V1_0::Class classId, connectModule_cb _hidl_cb) override; private: - std::map> mRadioModules; + std::map> mRadioModules; }; } // namespace implementation diff --git a/broadcastradio/1.1/default/OWNERS b/broadcastradio/1.1/default/OWNERS index 0c27b71865bc3b7ca11486a11e46a17937752f03..136b607b9405843f9ac7cbf6657c0bacdb13a9b6 100644 --- a/broadcastradio/1.1/default/OWNERS +++ b/broadcastradio/1.1/default/OWNERS @@ -1,4 +1,3 @@ # Automotive team egranata@google.com -keunyoung@google.com twasilczyk@google.com diff --git a/broadcastradio/1.1/default/Tuner.cpp b/broadcastradio/1.1/default/Tuner.cpp index 9a34cb128c9783981770782d8deb3f6763f334c8..4b49b59f927814012d3f260716946eec98e00d7d 100644 --- a/broadcastradio/1.1/default/Tuner.cpp +++ b/broadcastradio/1.1/default/Tuner.cpp @@ -17,10 +17,10 @@ #define LOG_TAG "BroadcastRadioDefault.tuner" #define LOG_NDEBUG 0 -#include "BroadcastRadio.h" #include "Tuner.h" +#include "BroadcastRadio.h" -#include +#include #include namespace android { @@ -35,6 +35,13 @@ using V1_0::Band; using V1_0::BandConfig; using V1_0::Class; using V1_0::Direction; +using V1_1::IdentifierType; +using V1_1::ProgramInfo; +using V1_1::ProgramInfoFlags; +using V1_1::ProgramListResult; +using V1_1::ProgramSelector; +using V1_1::ProgramType; +using V1_1::VendorKeyValue; using utils::HalRevision; using std::chrono::milliseconds; @@ -51,10 +58,12 @@ const struct { milliseconds tune = 150ms; } gDefaultDelay; -Tuner::Tuner(V1_0::Class classId, const sp& callback) - : mClassId(classId), +Tuner::Tuner(const sp module, V1_0::Class classId, + const sp& callback) + : mModule(module), + mClassId(classId), mCallback(callback), - mCallback1_1(ITunerCallback::castFrom(callback).withDefault(nullptr)), + mCallback1_1(V1_1::ITunerCallback::castFrom(callback).withDefault(nullptr)), mVirtualRadio(getRadio(classId)), mIsAnalogForced(false) {} @@ -64,6 +73,34 @@ void Tuner::forceClose() { mThread.cancelAll(); } +void Tuner::setConfigurationInternalLocked(const BandConfig& config) { + mAmfmConfig = config; + mAmfmConfig.antennaConnected = true; + mCurrentProgram = utils::make_selector(mAmfmConfig.type, mAmfmConfig.lowerLimit); + + if (utils::isFm(mAmfmConfig.type)) { + mVirtualRadio = std::ref(getFmRadio()); + } else { + mVirtualRadio = std::ref(getAmRadio()); + } + + mIsAmfmConfigSet = true; + mCallback->configChange(Result::OK, mAmfmConfig); + if (mCallback1_1 != nullptr) mCallback1_1->programListChanged(); +} + +bool Tuner::autoConfigureLocked(uint64_t frequency) { + for (auto&& config : mModule->getAmFmBands()) { + // The check here is rather poor, but it's enough for default implementation. + if (config.lowerLimit <= frequency && config.upperLimit >= frequency) { + ALOGI("Auto-switching band to %s", toString(config).c_str()); + setConfigurationInternalLocked(config); + return true; + } + } + return false; +} + Return Tuner::setConfiguration(const BandConfig& config) { ALOGV("%s", __func__); lock_guard lk(mMut); @@ -78,19 +115,7 @@ Return Tuner::setConfiguration(const BandConfig& config) { auto task = [this, config]() { ALOGI("Setting AM/FM config"); lock_guard lk(mMut); - - mAmfmConfig = move(config); - mAmfmConfig.antennaConnected = true; - mCurrentProgram = utils::make_selector(mAmfmConfig.type, mAmfmConfig.lowerLimit); - - if (utils::isFm(mAmfmConfig.type)) { - mVirtualRadio = std::ref(getFmRadio()); - } else { - mVirtualRadio = std::ref(getAmRadio()); - } - - mIsAmfmConfigSet = true; - mCallback->configChange(Result::OK, mAmfmConfig); + setConfigurationInternalLocked(config); }; mThread.schedule(task, gDefaultDelay.config); @@ -269,7 +294,7 @@ Return Tuner::tuneByProgramSelector(const ProgramSelector& sel) { auto freq = utils::getId(sel, IdentifierType::AMFM_FREQUENCY); if (freq < mAmfmConfig.lowerLimit || freq > mAmfmConfig.upperLimit) { - return Result::INVALID_ARGUMENTS; + if (!autoConfigureLocked(freq)) return Result::INVALID_ARGUMENTS; } } else if (programType == ProgramType::DAB) { if (!utils::hasId(sel, IdentifierType::DAB_SIDECC)) return Result::INVALID_ARGUMENTS; @@ -310,9 +335,8 @@ Return Tuner::cancelAnnouncement() { Return Tuner::getProgramInformation(getProgramInformation_cb _hidl_cb) { ALOGV("%s", __func__); - return getProgramInformation_1_1([&](Result result, const ProgramInfo& info) { - _hidl_cb(result, info.base); - }); + return getProgramInformation_1_1( + [&](Result result, const ProgramInfo& info) { _hidl_cb(result, info.base); }); } Return Tuner::getProgramInformation_1_1(getProgramInformation_1_1_cb _hidl_cb) { @@ -334,7 +358,11 @@ Return Tuner::startBackgroundScan() { lock_guard lk(mMut); if (mIsClosed) return ProgramListResult::NOT_INITIALIZED; - return ProgramListResult::UNAVAILABLE; + if (mCallback1_1 != nullptr) { + mCallback1_1->backgroundScanComplete(ProgramListResult::OK); + } + + return ProgramListResult::OK; } Return Tuner::getProgramList(const hidl_vec& vendorFilter, diff --git a/broadcastradio/1.1/default/Tuner.h b/broadcastradio/1.1/default/Tuner.h index 07d31898a742bd2ec35e37456a251d9b4408aaa7..e2668d878c3dd3639be9db01cc67504b0ced98cc 100644 --- a/broadcastradio/1.1/default/Tuner.h +++ b/broadcastradio/1.1/default/Tuner.h @@ -28,8 +28,11 @@ namespace broadcastradio { namespace V1_1 { namespace implementation { +struct BroadcastRadio; + struct Tuner : public ITuner { - Tuner(V1_0::Class classId, const sp& callback); + Tuner(const sp module, V1_0::Class classId, + const sp& callback); void forceClose(); @@ -39,13 +42,13 @@ struct Tuner : public ITuner { virtual Return scan(V1_0::Direction direction, bool skipSubChannel) override; virtual Return step(V1_0::Direction direction, bool skipSubChannel) override; virtual Return tune(uint32_t channel, uint32_t subChannel) override; - virtual Return tuneByProgramSelector(const ProgramSelector& program) override; + virtual Return tuneByProgramSelector(const V1_1::ProgramSelector& program) override; virtual Return cancel() override; virtual Return cancelAnnouncement() override; virtual Return getProgramInformation(getProgramInformation_cb _hidl_cb) override; virtual Return getProgramInformation_1_1(getProgramInformation_1_1_cb _hidl_cb) override; - virtual Return startBackgroundScan() override; - virtual Return getProgramList(const hidl_vec& filter, + virtual Return startBackgroundScan() override; + virtual Return getProgramList(const hidl_vec& filter, getProgramList_cb _hidl_cb) override; virtual Return setAnalogForced(bool isForced) override; virtual Return isAnalogForced(isAnalogForced_cb _hidl_cb) override; @@ -55,6 +58,7 @@ struct Tuner : public ITuner { WorkerThread mThread; bool mIsClosed = false; + const sp mModule; V1_0::Class mClassId; const sp mCallback; const sp mCallback1_1; @@ -63,12 +67,14 @@ struct Tuner : public ITuner { bool mIsAmfmConfigSet = false; V1_0::BandConfig mAmfmConfig; bool mIsTuneCompleted = false; - ProgramSelector mCurrentProgram = {}; - ProgramInfo mCurrentProgramInfo = {}; + V1_1::ProgramSelector mCurrentProgram = {}; + V1_1::ProgramInfo mCurrentProgramInfo = {}; std::atomic mIsAnalogForced; utils::HalRevision getHalRev() const; - void tuneInternalLocked(const ProgramSelector& sel); + void setConfigurationInternalLocked(const V1_0::BandConfig& config); + void tuneInternalLocked(const V1_1::ProgramSelector& sel); + bool autoConfigureLocked(uint64_t frequency); }; } // namespace implementation diff --git a/broadcastradio/1.1/default/VirtualProgram.cpp b/broadcastradio/1.1/default/VirtualProgram.cpp index 7977391cddf84be6fc61aace9ef7b1384c554097..20dc1f5b5d0c49586bc8b3a9f28b57c04dc30a51 100644 --- a/broadcastradio/1.1/default/VirtualProgram.cpp +++ b/broadcastradio/1.1/default/VirtualProgram.cpp @@ -15,7 +15,7 @@ */ #include "VirtualProgram.h" -#include +#include #include "resources.h" @@ -30,6 +30,9 @@ using std::vector; using V1_0::MetaData; using V1_0::MetadataKey; using V1_0::MetadataType; +using V1_1::IdentifierType; +using V1_1::ProgramInfo; +using V1_1::VendorKeyValue; using utils::HalRevision; static MetaData createDemoBitmap(MetadataKey key, HalRevision halRev) { @@ -80,13 +83,6 @@ bool operator<(const VirtualProgram& lhs, const VirtualProgram& rhs) { if (l.primaryId.type != r.primaryId.type) return l.primaryId.type < r.primaryId.type; if (l.primaryId.value != r.primaryId.value) return l.primaryId.value < r.primaryId.value; - // A little exception for HD Radio subchannel - we check secondary ID too. - if (utils::hasId(l, IdentifierType::HD_SUBCHANNEL) && - utils::hasId(r, IdentifierType::HD_SUBCHANNEL)) { - return utils::getId(l, IdentifierType::HD_SUBCHANNEL) < - utils::getId(r, IdentifierType::HD_SUBCHANNEL); - } - return false; } diff --git a/broadcastradio/1.1/default/VirtualProgram.h b/broadcastradio/1.1/default/VirtualProgram.h index a14830d77af5d8e9b7ab5ffbdaf695733c649fda..fd7a5e7731f5aead51fb28cc7900097b5669107a 100644 --- a/broadcastradio/1.1/default/VirtualProgram.h +++ b/broadcastradio/1.1/default/VirtualProgram.h @@ -17,7 +17,7 @@ #define ANDROID_HARDWARE_BROADCASTRADIO_V1_1_VIRTUALPROGRAM_H #include -#include +#include namespace android { namespace hardware { @@ -32,19 +32,19 @@ namespace implementation { * not an entry for a captured station in the radio tuner memory. */ struct VirtualProgram { - ProgramSelector selector; + V1_1::ProgramSelector selector; std::string programName = ""; std::string songArtist = ""; std::string songTitle = ""; - ProgramInfo getProgramInfo(utils::HalRevision halRev) const; + V1_1::ProgramInfo getProgramInfo(utils::HalRevision halRev) const; friend bool operator<(const VirtualProgram& lhs, const VirtualProgram& rhs); }; -std::vector getProgramInfoVector(const std::vector& vec, - utils::HalRevision halRev); +std::vector getProgramInfoVector(const std::vector& vec, + utils::HalRevision halRev); } // namespace implementation } // namespace V1_1 diff --git a/broadcastradio/1.1/default/VirtualRadio.cpp b/broadcastradio/1.1/default/VirtualRadio.cpp index 36d47a92e7db10b572f90048053639e7c8a28209..5b79155522f2864b69b6ab834fcca60daecd4d95 100644 --- a/broadcastradio/1.1/default/VirtualRadio.cpp +++ b/broadcastradio/1.1/default/VirtualRadio.cpp @@ -18,7 +18,7 @@ #include "VirtualRadio.h" -#include +#include #include namespace android { @@ -29,6 +29,7 @@ namespace implementation { using V1_0::Band; using V1_0::Class; +using V1_1::ProgramSelector; using std::lock_guard; using std::move; diff --git a/broadcastradio/1.1/default/VirtualRadio.h b/broadcastradio/1.1/default/VirtualRadio.h index 3c7ae5c19391c9a18f25b3d190d93c50a804d49b..176b1b2fb9dd193c58ae3e4018122b1d679932f9 100644 --- a/broadcastradio/1.1/default/VirtualRadio.h +++ b/broadcastradio/1.1/default/VirtualRadio.h @@ -40,7 +40,7 @@ class VirtualRadio { VirtualRadio(const std::vector initialList); std::vector getProgramList(); - bool getProgram(const ProgramSelector& selector, VirtualProgram& program); + bool getProgram(const V1_1::ProgramSelector& selector, VirtualProgram& program); private: std::mutex mMut; diff --git a/broadcastradio/1.1/vts/OWNERS b/broadcastradio/1.1/vts/OWNERS index aa5ce82e16e84ffe86658b59b6c50e7aed8212cd..7736681f4ea31b66ae84c14d62f3ae5cd90e0536 100644 --- a/broadcastradio/1.1/vts/OWNERS +++ b/broadcastradio/1.1/vts/OWNERS @@ -4,5 +4,5 @@ keunyoung@google.com twasilczyk@google.com # VTS team -ryanjcampbell@google.com +yuexima@google.com yim@google.com diff --git a/broadcastradio/1.1/vts/functional/Android.bp b/broadcastradio/1.1/vts/functional/Android.bp index 4b93cbcf1f607698a6912850275ba513483c4ee9..9240cf058669fa6f3f57ca0507317c79e901f55a 100644 --- a/broadcastradio/1.1/vts/functional/Android.bp +++ b/broadcastradio/1.1/vts/functional/Android.bp @@ -21,8 +21,8 @@ cc_test { static_libs: [ "android.hardware.broadcastradio@1.0", "android.hardware.broadcastradio@1.1", - "android.hardware.broadcastradio@1.1-utils-lib", - "android.hardware.broadcastradio@1.1-vts-utils-lib", + "android.hardware.broadcastradio@common-utils-1x-lib", + "android.hardware.broadcastradio@vts-utils-lib", "libgmock", ], } diff --git a/broadcastradio/1.1/vts/functional/VtsHalBroadcastradioV1_1TargetTest.cpp b/broadcastradio/1.1/vts/functional/VtsHalBroadcastradioV1_1TargetTest.cpp index a46378ee6af64783f7805f0cfaed0cdc1a6cd022..6687731965c36efe64cbb453f55f112a90c42c8c 100644 --- a/broadcastradio/1.1/vts/functional/VtsHalBroadcastradioV1_1TargetTest.cpp +++ b/broadcastradio/1.1/vts/functional/VtsHalBroadcastradioV1_1TargetTest.cpp @@ -17,15 +17,17 @@ #define LOG_TAG "broadcastradio.vts" #include +#include #include #include #include #include #include -#include -#include +#include #include +#include #include +#include #include #include #include @@ -56,8 +58,8 @@ using V1_0::MetaData; using V1_0::MetadataKey; using V1_0::MetadataType; -using std::chrono::steady_clock; -using std::this_thread::sleep_for; +using broadcastradio::vts::clearAndWait; +using broadcastradio::vts::BroadcastRadioHidlEnvironment; static constexpr auto kConfigTimeout = 10s; static constexpr auto kConnectModuleTimeout = 1s; @@ -91,6 +93,8 @@ struct TunerCallbackMock : public ITunerCallback { MOCK_TIMEOUT_METHOD1(currentProgramInfoChanged, Return(const ProgramInfo&)); }; +static BroadcastRadioHidlEnvironment* gEnv = nullptr; + class BroadcastRadioHalTest : public ::testing::VtsHalHidlTargetTestBase, public ::testing::WithParamInterface { protected: @@ -115,32 +119,12 @@ class BroadcastRadioHalTest : public ::testing::VtsHalHidlTargetTestBase, hidl_vec mBands; }; -/** - * Clears strong pointer and waits until the object gets destroyed. - * - * @param ptr The pointer to get cleared. - * @param timeout Time to wait for other references. - */ -template -static void clearAndWait(sp& ptr, std::chrono::milliseconds timeout) { - wp wptr = ptr; - ptr.clear(); - auto limit = steady_clock::now() + timeout; - while (wptr.promote() != nullptr) { - constexpr auto step = 10ms; - if (steady_clock::now() + step > limit) { - FAIL() << "Pointer was not released within timeout"; - break; - } - sleep_for(step); - } -} - void BroadcastRadioHalTest::SetUp() { radioClass = GetParam(); // lookup HIDL service - auto factory = getService(); + auto factory = + getService(gEnv->getServiceName()); ASSERT_NE(nullptr, factory.get()); // connect radio module @@ -525,6 +509,98 @@ TEST_P(BroadcastRadioHalTest, AnalogForcedSwitch) { ASSERT_FALSE(forced); } +static void verifyIdentifier(const ProgramIdentifier& id) { + EXPECT_NE(id.type, 0u); + auto val = id.value; + + switch (static_cast(id.type)) { + case IdentifierType::AMFM_FREQUENCY: + case IdentifierType::DAB_FREQUENCY: + case IdentifierType::DRMO_FREQUENCY: + EXPECT_GT(val, 100u) << "Expected f > 100kHz"; + EXPECT_LT(val, 10000000u) << "Expected f < 10GHz"; + break; + case IdentifierType::RDS_PI: + EXPECT_GT(val, 0u); + EXPECT_LE(val, 0xFFFFu) << "Expected 16bit id"; + break; + case IdentifierType::HD_STATION_ID_EXT: { + auto stationId = val & 0xFFFFFFFF; // 32bit + val >>= 32; + auto subchannel = val & 0xF; // 4bit + val >>= 4; + auto freq = val & 0x3FFFF; // 18bit + EXPECT_GT(stationId, 0u); + EXPECT_LT(subchannel, 8u) << "Expected ch < 8"; + EXPECT_GT(freq, 100u) << "Expected f > 100kHz"; + EXPECT_LT(freq, 10000000u) << "Expected f < 10GHz"; + break; + } + case IdentifierType::HD_SUBCHANNEL: + EXPECT_LT(val, 8u) << "Expected ch < 8"; + break; + case IdentifierType::DAB_SIDECC: { + auto sid = val & 0xFFFF; // 16bit + val >>= 16; + auto ecc = val & 0xFF; // 8bit + EXPECT_NE(sid, 0u); + EXPECT_GE(ecc, 0xA0u) << "Invalid ECC, see ETSI TS 101 756 V2.1.1"; + EXPECT_LE(ecc, 0xF6u) << "Invalid ECC, see ETSI TS 101 756 V2.1.1"; + break; + } + case IdentifierType::DAB_ENSEMBLE: + EXPECT_GT(val, 0u); + EXPECT_LE(val, 0xFFFFu) << "Expected 16bit id"; + break; + case IdentifierType::DAB_SCID: + EXPECT_GT(val, 0xFu) << "Expected 12bit SCId (not 4bit SCIdS)"; + EXPECT_LE(val, 0xFFFu) << "Expected 12bit id"; + break; + case IdentifierType::DRMO_SERVICE_ID: + EXPECT_GT(val, 0u); + EXPECT_LE(val, 0xFFFFFFu) << "Expected 24bit id"; + break; + case IdentifierType::DRMO_MODULATION: + EXPECT_GE(val, static_cast(Modulation::AM)); + EXPECT_LE(val, static_cast(Modulation::FM)); + break; + case IdentifierType::SXM_SERVICE_ID: + EXPECT_GT(val, 0u); + EXPECT_LE(val, 0xFFFFFFFFu) << "Expected 32bit id"; + break; + case IdentifierType::SXM_CHANNEL: + EXPECT_LT(val, 1000u); + break; + case IdentifierType::VENDOR_PRIMARY_START: + case IdentifierType::VENDOR_PRIMARY_END: + // skip + break; + } +} + +/** + * Test ProgramIdentifier format. + * + * Verifies that: + * - values of ProgramIdentifier match their definitions at IdentifierType. + */ +TEST_P(BroadcastRadioHalTest, VerifyIdentifiersFormat) { + if (skipped) return; + ASSERT_TRUE(openTuner()); + + do { + auto getCb = [&](const hidl_vec& list) { + for (auto&& program : list) { + verifyIdentifier(program.selector.primaryId); + for (auto&& id : program.selector.secondaryIds) { + verifyIdentifier(id); + } + } + }; + getProgramList(getCb); + } while (nextBand()); +} + INSTANTIATE_TEST_CASE_P(BroadcastRadioHalTestCases, BroadcastRadioHalTest, ::testing::Values(Class::AM_FM, Class::SAT, Class::DT)); @@ -535,8 +611,14 @@ INSTANTIATE_TEST_CASE_P(BroadcastRadioHalTestCases, BroadcastRadioHalTest, } // namespace android int main(int argc, char** argv) { - ::testing::InitGoogleTest(&argc, argv); - int status = RUN_ALL_TESTS(); - ALOGI("Test result = %d", status); - return status; + using android::hardware::broadcastradio::V1_1::vts::gEnv; + using android::hardware::broadcastradio::V1_1::IBroadcastRadioFactory; + using android::hardware::broadcastradio::vts::BroadcastRadioHidlEnvironment; + gEnv = new BroadcastRadioHidlEnvironment; + ::testing::AddGlobalTestEnvironment(gEnv); + ::testing::InitGoogleTest(&argc, argv); + gEnv->init(&argc, argv); + int status = RUN_ALL_TESTS(); + ALOGI("Test result = %d", status); + return status; } diff --git a/broadcastradio/2.0/Android.bp b/broadcastradio/2.0/Android.bp new file mode 100644 index 0000000000000000000000000000000000000000..2434fdc8599fb68eb64c50d5d16eedb5ca569634 --- /dev/null +++ b/broadcastradio/2.0/Android.bp @@ -0,0 +1,45 @@ +// This file is autogenerated by hidl-gen -Landroidbp. + +hidl_interface { + name: "android.hardware.broadcastradio@2.0", + root: "android.hardware", + vndk: { + enabled: true, + }, + srcs: [ + "types.hal", + "IAnnouncementListener.hal", + "IBroadcastRadio.hal", + "ICloseHandle.hal", + "ITunerCallback.hal", + "ITunerSession.hal", + ], + interfaces: [ + "android.hidl.base@1.0", + ], + types: [ + "AmFmBandRange", + "AmFmRegionConfig", + "Announcement", + "AnnouncementType", + "ConfigFlag", + "Constants", + "DabTableEntry", + "Deemphasis", + "IdentifierType", + "Metadata", + "MetadataKey", + "ProgramFilter", + "ProgramIdentifier", + "ProgramInfo", + "ProgramInfoFlags", + "ProgramListChunk", + "ProgramSelector", + "Properties", + "Rds", + "Result", + "VendorKeyValue", + ], + gen_java: true, +} + diff --git a/broadcastradio/2.0/IAnnouncementListener.hal b/broadcastradio/2.0/IAnnouncementListener.hal new file mode 100644 index 0000000000000000000000000000000000000000..1b4960ce0d8a6574bcc0d057cf68190826b59fc8 --- /dev/null +++ b/broadcastradio/2.0/IAnnouncementListener.hal @@ -0,0 +1,30 @@ +/* Copyright (C) 2017 The Android Open Source Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package android.hardware.broadcastradio@2.0; + +/** + * Callback interface for announcement listener. + * + * For typical configuration, the listener is a broadcast radio service. + */ +interface IAnnouncementListener { + /** + * Called whenever announcement list has changed. + * + * @param announcements The complete list of currently active announcements. + */ + oneway onListUpdated(vec announcements); +}; diff --git a/broadcastradio/2.0/IBroadcastRadio.hal b/broadcastradio/2.0/IBroadcastRadio.hal new file mode 100644 index 0000000000000000000000000000000000000000..bedc362e065b1d1bbb25c599eb0feb3829c87148 --- /dev/null +++ b/broadcastradio/2.0/IBroadcastRadio.hal @@ -0,0 +1,129 @@ +/* Copyright (C) 2017 The Android Open Source Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package android.hardware.broadcastradio@2.0; + +import IAnnouncementListener; +import ICloseHandle; +import ITunerCallback; +import ITunerSession; + +/** + * Represents a hardware broadcast radio module. A single module may contain + * multiple hardware tuners (i.e. with an additional background tuner), but the + * layers above the HAL see them as a single logical unit. + */ +interface IBroadcastRadio { + /** + * Returns module properties: a description of a module and its + * capabilities. This method must not fail. + * + * @return properties Module description. + */ + getProperties() generates (Properties properties); + + /** + * Fetches current or possible AM/FM region configuration. + * + * @param full If true, returns full hardware capabilities. + * If false, returns current regional configuration. + * @return result OK in case of success. + * NOT_SUPPORTED if the tuner doesn't support AM/FM. + * @return config Hardware capabilities (full=true) or + * current configuration (full=false). + */ + getAmFmRegionConfig(bool full) + generates (Result result, AmFmRegionConfig config); + + /** + * Fetches current DAB region configuration. + * + * @return result OK in case of success. + * NOT_SUPPORTED if the tuner doesn't support DAB. + * @return config Current configuration. + */ + getDabRegionConfig() generates (Result result, vec config); + + /** + * Opens a new tuner session. + * + * There may be only one session active at a time. If the new session was + * requested when the old one was active, the old must be terminated + * (aggressive open). + * + * @param callback The callback interface. + * @return result OK in case of success. + * @return session The session interface. + */ + openSession(ITunerCallback callback) + generates (Result result, ITunerSession session); + + /** + * Fetch image from radio module cache. + * + * This is out-of-band transport mechanism for images carried with metadata. + * The metadata vector only passes the identifier, so the client may cache + * images or even not fetch them. + * + * The identifier may be any arbitrary number (i.e. sha256 prefix) selected + * by the vendor. It must be stable across sessions so the application may + * cache it. + * + * The data must be a valid PNG, JPEG, GIF or BMP file. + * Image data with an invalid format must be handled gracefully in the same + * way as a missing image. + * + * The image identifier may become invalid after some time from passing it + * with metadata struct (due to resource cleanup at the HAL implementation). + * However, it must remain valid for a currently tuned program at least + * until onCurrentProgramInfoChanged is called. + * + * There is still a race condition possible between + * onCurrentProgramInfoChanged callback and the HAL implementation eagerly + * clearing the cache (because the next onCurrentProgramInfoChanged came). + * In such case, client application may expect the new + * onCurrentProgramInfoChanged callback with updated image identifier. + * + * @param id Identifier of an image (value of Constants::INVALID_IMAGE is + * reserved and must be treated as invalid image). + * @return image A binary blob with image data + * or a zero-length vector if identifier doesn't exist. + */ + getImage(uint32_t id) generates (vec image); + + /** + * Registers announcement listener. + * + * If there is at least one observer registered, HAL implementation must + * notify about announcements even if no sessions are active. + * + * If the observer dies, the HAL implementation must unregister observer + * automatically. + * + * @param enabled The list of announcement types to watch for. + * @param listener The listener interface. + * @return result OK in case of success. + * NOT_SUPPORTED if the tuner doesn't support announcements. + * @return closeHandle A handle to unregister observer, + * nullptr if result was not OK. + */ + registerAnnouncementListener( + vec enabled, + IAnnouncementListener listener + ) generates ( + Result result, + ICloseHandle closeHandle + ); +}; diff --git a/broadcastradio/2.0/ICloseHandle.hal b/broadcastradio/2.0/ICloseHandle.hal new file mode 100644 index 0000000000000000000000000000000000000000..34cea146d315fc4b0229070522cd6d25eb94b164 --- /dev/null +++ b/broadcastradio/2.0/ICloseHandle.hal @@ -0,0 +1,32 @@ +/* Copyright (C) 2017 The Android Open Source Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package android.hardware.broadcastradio@2.0; + +/** + * Represents a generic close handle to remove a callback that doesn't need + * active interface. + */ +interface ICloseHandle { + /** + * Closes the handle. + * + * The call must not fail and must only be issued once. + * + * After the close call is executed, no other calls to this interface + * are allowed. + */ + close(); +}; diff --git a/broadcastradio/2.0/ITunerCallback.hal b/broadcastradio/2.0/ITunerCallback.hal new file mode 100644 index 0000000000000000000000000000000000000000..a174c9cea888ec046181d9f893d10df71745f6a9 --- /dev/null +++ b/broadcastradio/2.0/ITunerCallback.hal @@ -0,0 +1,85 @@ +/* Copyright (C) 2017 The Android Open Source Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package android.hardware.broadcastradio@2.0; + +interface ITunerCallback { + /** + * Method called by the HAL when a tuning operation fails asynchronously + * following ITunerSession::tune(), ITunerSession::scan() or + * ITunerSession::step(). + * + * This callback is only called when the step(), scan() or tune() command + * returned OK at first. + * + * @param result TIMEOUT in case of time out. + * @param selector A ProgramSelector structure passed from tune() call; + * empty for step() and scan(). + */ + oneway onTuneFailed(Result result, ProgramSelector selector); + + /** + * Method called by the HAL when current program information (including + * metadata) is updated. + * + * This is also called when the radio tuned to the static (not a valid + * station), see the TUNED flag of ProgramInfoFlags. + * + * @param info Current program information. + */ + oneway onCurrentProgramInfoChanged(ProgramInfo info); + + /** + * A delta update of the program list, called whenever there's a change in + * the list. + * + * If there are frequent changes, HAL implementation must throttle the rate + * of the updates. + * + * There is a hard limit on binder transaction buffer, and the list must + * not exceed it. For large lists, HAL implementation must split them to + * multiple chunks, no larger than 500kiB each. + * + * @param chunk A chunk of the program list update. + */ + oneway onProgramListUpdated(ProgramListChunk chunk); + + /** + * Method called by the HAL when the antenna gets connected or disconnected. + * + * For a new tuner session, client must assume the antenna is connected. + * If it's not, then antennaStateChange must be called within + * Constants::ANTENNA_DISCONNECTED_TIMEOUT_MS to indicate that. + * + * @param connected True if the antenna is now connected, false otherwise. + */ + oneway onAntennaStateChange(bool connected); + + /** + * Generic callback for passing updates to vendor-specific parameter values. + * The framework does not interpret the parameters, they are passed + * in an opaque manner between a vendor application and HAL. + * + * It's up to the HAL implementation if and how to implement this callback, + * as long as it obeys the prefix rule. In particular, only selected keys + * may be notified this way. However, setParameters must not trigger + * this callback, while an internal event can change parameters + * asynchronously. + * + * @param parameters Vendor-specific key-value pairs, + * opaque to Android framework. + */ + oneway onParametersUpdated(vec parameters); +}; diff --git a/broadcastradio/2.0/ITunerSession.hal b/broadcastradio/2.0/ITunerSession.hal new file mode 100644 index 0000000000000000000000000000000000000000..3c27246f5cbaa2b9b1fef46377b06dcb06c53554 --- /dev/null +++ b/broadcastradio/2.0/ITunerSession.hal @@ -0,0 +1,196 @@ +/* Copyright (C) 2017 The Android Open Source Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package android.hardware.broadcastradio@2.0; + +interface ITunerSession { + /** + * Tune to a specified program. + * + * Automatically cancels pending tune(), scan() or step(). + * If the method returns OK, tuneFailed or currentProgramInfoChanged + * callback must be called. + * + * @param program Program to tune to. + * @return result OK if successfully started tuning. + * NOT_SUPPORTED if the program selector doesn't contain any + * supported identifier. + * INVALID_ARGUMENTS if the program selector contains + * identifiers in invalid format (i.e. out of range). + */ + tune(ProgramSelector program) generates (Result result); + + /** + * Tune (seek) to the next valid program on the "air". + * + * This might more naturally be called "seek" but for legacy reasons, the + * entry point remains "scan". This should not be confused with the actual + * scan operation (where the radio seeks through programs in a loop until + * user chooses to stay on one of them) nor background scan operation (that + * a tuner may do in order to locate all available programs. This function + * is meant to advance to the next detected program and stay there. + * + * Automatically cancels pending tune(), scan() or step(). + * If the method returns OK, tuneFailed or currentProgramInfoChanged + * callback must be called. + * + * The skipSubChannel parameter is used to skip digital radio subchannels: + * - HD Radio SPS; + * - DAB secondary service. + * + * As an implementation detail, the HAL has the option to perform an actual + * seek or select the next program from the list retrieved in the + * background, if one is not stale. + * + * @param directionUp True to change towards higher numeric values + * (frequency, channel number), false towards lower. + * @param skipSubChannel Don't tune to subchannels. + * @return result OK if the operation has successfully started. + */ + scan(bool directionUp, bool skipSubChannel) generates (Result result); + + /** + * Tune to the adjacent channel, which may not be occupied by any program. + * + * Automatically cancels pending tune(), scan() or step(). + * If the method returns OK, tuneFailed or currentProgramInfoChanged + * callback must be called. + * + * @param directionUp True to change towards higher numeric values + * (frequency, channel number), false towards lower. + * @return result OK successfully started tuning. + * NOT_SUPPORTED if tuning to an unoccupied channel is not + * supported (i.e. for satellite radio). + */ + step(bool directionUp) generates (Result result); + + /** + * Cancel a pending tune(), scan() or step(). + * + * If there is no such operation running, the call must be ignored. + */ + cancel(); + + /** + * Applies a filter to the program list and starts sending program list + * updates over onProgramListUpdated callback. + * + * There may be only one updates stream active at the moment. Calling this + * method again must result in cancelling the previous update request. + * + * This call clears the program list on the client side, the HAL must send + * the whole list again. + * + * If the program list scanning hardware (i.e. background tuner) is + * unavailable at the moment, the call must succeed and start updates + * when it becomes available. + * + * @param filter Filter to apply on the fetched program list. + * @return result OK successfully started fetching list updates. + * NOT_SUPPORTED program list scanning is not supported + * by the hardware. + */ + startProgramListUpdates(ProgramFilter filter) generates (Result result); + + /** + * Stops sending program list updates. + */ + stopProgramListUpdates(); + + /** + * Fetches the current setting of a given config flag. + * + * The success/failure result must be consistent with setConfigFlag. + * + * @param flag Flag to fetch. + * @return result OK successfully fetched the flag. + * INVALID_STATE if the flag is not applicable right now. + * NOT_SUPPORTED if the flag is not supported at all. + * @return value The current value of the flag, if result is OK. + */ + isConfigFlagSet(ConfigFlag flag) generates (Result result, bool value); + + /** + * Sets the config flag. + * + * The success/failure result must be consistent with isConfigFlagSet. + * + * @param flag Flag to set. + * @param value The new value of a given flag. + * @return result OK successfully set the flag. + * INVALID_STATE if the flag is not applicable right now. + * NOT_SUPPORTED if the flag is not supported at all. + */ + setConfigFlag(ConfigFlag flag, bool value) generates (Result result); + + /** + * Generic method for setting vendor-specific parameter values. + * The framework does not interpret the parameters, they are passed + * in an opaque manner between a vendor application and HAL. + * + * Framework does not make any assumptions on the keys or values, other than + * ones stated in VendorKeyValue documentation (a requirement of key + * prefixes). + * + * For each pair in the result vector, the key must be one of the keys + * contained in the input (possibly with wildcards expanded), and the value + * must be a vendor-specific result status (i.e. the string "OK" or an error + * code). The implementation may choose to return an empty vector, or only + * return a status for a subset of the provided inputs, at its discretion. + * + * Application and HAL must not use keys with unknown prefix. In particular, + * it must not place a key-value pair in results vector for unknown key from + * parameters vector - instead, an unknown key should simply be ignored. + * In other words, results vector may contain a subset of parameter keys + * (however, the framework doesn't enforce a strict subset - the only + * formal requirement is vendor domain prefix for keys). + * + * @param parameters Vendor-specific key-value pairs. + * @return results Operation completion status for parameters being set. + */ + setParameters(vec parameters) + generates (vec results); + + /** + * Generic method for retrieving vendor-specific parameter values. + * The framework does not interpret the parameters, they are passed + * in an opaque manner between a vendor application and HAL. + * + * Framework does not cache set/get requests, so it's allowed for + * getParameter to return a different value than previous setParameter call. + * + * The syntax and semantics of keys are up to the vendor (as long as prefix + * rules are obeyed). For instance, vendors may include some form of + * wildcard support. In such case, result vector may be of different size + * than requested keys vector. However, wildcards are not recognized by + * framework and they are passed as-is to the HAL implementation. + * + * Unknown keys must be ignored and not placed into results vector. + * + * @param keys Parameter keys to fetch. + * @return parameters Vendor-specific key-value pairs. + */ + getParameters(vec keys) generates (vec parameters); + + /** + * Closes the session. + * + * The call must not fail and must only be issued once. + * + * After the close call is executed, no other calls to this interface + * are allowed. + */ + close(); +}; diff --git a/broadcastradio/2.0/default/Android.bp b/broadcastradio/2.0/default/Android.bp new file mode 100644 index 0000000000000000000000000000000000000000..900454e78bfb66e989e0c95ea63249d5a8b39b4d --- /dev/null +++ b/broadcastradio/2.0/default/Android.bp @@ -0,0 +1,49 @@ +// +// Copyright (C) 2017 The Android Open Source Project +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +// + +cc_binary { + name: "android.hardware.broadcastradio@2.0-service", + init_rc: ["android.hardware.broadcastradio@2.0-service.rc"], + vendor: true, + relative_install_path: "hw", + cflags: [ + "-Wall", + "-Wextra", + "-Werror", + ], + cppflags: [ + "-std=c++1z", + ], + srcs: [ + "BroadcastRadio.cpp", + "TunerSession.cpp", + "VirtualProgram.cpp", + "VirtualRadio.cpp", + "service.cpp" + ], + static_libs: [ + "android.hardware.broadcastradio@common-utils-2x-lib", + "android.hardware.broadcastradio@common-utils-lib", + ], + shared_libs: [ + "android.hardware.broadcastradio@2.0", + "libbase", + "libhidlbase", + "libhidltransport", + "liblog", + "libutils", + ], +} diff --git a/broadcastradio/2.0/default/BroadcastRadio.cpp b/broadcastradio/2.0/default/BroadcastRadio.cpp new file mode 100644 index 0000000000000000000000000000000000000000..0148fecc48afc1930e80abd3a7ecdf64cb9ab586 --- /dev/null +++ b/broadcastradio/2.0/default/BroadcastRadio.cpp @@ -0,0 +1,162 @@ +/* + * Copyright (C) 2017 The Android Open Source Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +#define LOG_TAG "BcRadioDef.module" +#define LOG_NDEBUG 0 + +#include "BroadcastRadio.h" + +#include + +#include "resources.h" + +namespace android { +namespace hardware { +namespace broadcastradio { +namespace V2_0 { +namespace implementation { + +using std::lock_guard; +using std::map; +using std::mutex; +using std::vector; + +static const AmFmRegionConfig gDefaultAmFmConfig = { // + { + {87500, 108000, 100, 100}, // FM + {153, 282, 3, 9}, // AM LW + {531, 1620, 9, 9}, // AM MW + {1600, 30000, 1, 5}, // AM SW + }, + static_cast(Deemphasis::D50), + static_cast(Rds::RDS)}; + +static Properties initProperties(const VirtualRadio& virtualRadio) { + Properties prop = {}; + + prop.maker = "Google"; + prop.product = virtualRadio.getName(); + prop.supportedIdentifierTypes = hidl_vec({ + static_cast(IdentifierType::AMFM_FREQUENCY), + static_cast(IdentifierType::RDS_PI), + static_cast(IdentifierType::HD_STATION_ID_EXT), + }); + prop.vendorInfo = hidl_vec({ + {"com.google.dummy", "dummy"}, + }); + + return prop; +} + +BroadcastRadio::BroadcastRadio(const VirtualRadio& virtualRadio) + : mVirtualRadio(virtualRadio), + mProperties(initProperties(virtualRadio)), + mAmFmConfig(gDefaultAmFmConfig) {} + +Return BroadcastRadio::getProperties(getProperties_cb _hidl_cb) { + ALOGV("%s", __func__); + _hidl_cb(mProperties); + return {}; +} + +AmFmRegionConfig BroadcastRadio::getAmFmConfig() const { + lock_guard lk(mMut); + return mAmFmConfig; +} + +Return BroadcastRadio::getAmFmRegionConfig(bool full, getAmFmRegionConfig_cb _hidl_cb) { + ALOGV("%s(%d)", __func__, full); + + if (full) { + AmFmRegionConfig config = {}; + config.ranges = hidl_vec({ + {65000, 108000, 10, 0}, // FM + {150, 30000, 1, 0}, // AM + }); + config.fmDeemphasis = Deemphasis::D50 | Deemphasis::D75; + config.fmRds = Rds::RDS | Rds::RBDS; + _hidl_cb(Result::OK, config); + return {}; + } else { + _hidl_cb(Result::OK, getAmFmConfig()); + return {}; + } +} + +Return BroadcastRadio::getDabRegionConfig(getDabRegionConfig_cb _hidl_cb) { + ALOGV("%s", __func__); + + hidl_vec config = { + {"5A", 174928}, {"7D", 194064}, {"8A", 195936}, {"8B", 197648}, {"9A", 202928}, + {"9B", 204640}, {"9C", 206352}, {"10B", 211648}, {"10C", 213360}, {"10D", 215072}, + {"11A", 216928}, {"11B", 218640}, {"11C", 220352}, {"11D", 222064}, {"12A", 223936}, + {"12B", 225648}, {"12C", 227360}, {"12D", 229072}, + }; + + _hidl_cb(Result::OK, config); + return {}; +} + +Return BroadcastRadio::openSession(const sp& callback, + openSession_cb _hidl_cb) { + ALOGV("%s", __func__); + + /* For the needs of default implementation it's fine to instantiate new session object + * out of the lock scope. If your implementation needs it, use reentrant lock. + */ + sp newSession = new TunerSession(*this, callback); + + lock_guard lk(mMut); + + auto oldSession = mSession.promote(); + if (oldSession != nullptr) { + ALOGI("Closing previously opened tuner"); + oldSession->close(); + mSession = nullptr; + } + + mSession = newSession; + + _hidl_cb(Result::OK, newSession); + return {}; +} + +Return BroadcastRadio::getImage(uint32_t id, getImage_cb _hidl_cb) { + ALOGV("%s(%x)", __func__, id); + + if (id == resources::demoPngId) { + _hidl_cb(std::vector(resources::demoPng, std::end(resources::demoPng))); + return {}; + } + + ALOGI("Image %x doesn't exists", id); + _hidl_cb({}); + return {}; +} + +Return BroadcastRadio::registerAnnouncementListener( + const hidl_vec& enabled, const sp& /* listener */, + registerAnnouncementListener_cb _hidl_cb) { + ALOGV("%s(%s)", __func__, toString(enabled).c_str()); + + _hidl_cb(Result::NOT_SUPPORTED, nullptr); + return {}; +} + +} // namespace implementation +} // namespace V2_0 +} // namespace broadcastradio +} // namespace hardware +} // namespace android diff --git a/broadcastradio/2.0/default/BroadcastRadio.h b/broadcastradio/2.0/default/BroadcastRadio.h new file mode 100644 index 0000000000000000000000000000000000000000..8c14d9e6627e5cbcbb0cc4345f54a8faf14c8daf --- /dev/null +++ b/broadcastradio/2.0/default/BroadcastRadio.h @@ -0,0 +1,60 @@ +/* + * Copyright (C) 2017 The Android Open Source Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +#ifndef ANDROID_HARDWARE_BROADCASTRADIO_V2_0_BROADCASTRADIO_H +#define ANDROID_HARDWARE_BROADCASTRADIO_V2_0_BROADCASTRADIO_H + +#include "TunerSession.h" + +#include +#include + +namespace android { +namespace hardware { +namespace broadcastradio { +namespace V2_0 { +namespace implementation { + +struct BroadcastRadio : public IBroadcastRadio { + BroadcastRadio(const VirtualRadio& virtualRadio); + + // V2_0::IBroadcastRadio methods + Return getProperties(getProperties_cb _hidl_cb) override; + Return getAmFmRegionConfig(bool full, getAmFmRegionConfig_cb _hidl_cb); + Return getDabRegionConfig(getDabRegionConfig_cb _hidl_cb); + Return openSession(const sp& callback, openSession_cb _hidl_cb) override; + Return getImage(uint32_t id, getImage_cb _hidl_cb); + Return registerAnnouncementListener(const hidl_vec& enabled, + const sp& listener, + registerAnnouncementListener_cb _hidl_cb); + + std::reference_wrapper mVirtualRadio; + Properties mProperties; + + AmFmRegionConfig getAmFmConfig() const; + + private: + mutable std::mutex mMut; + AmFmRegionConfig mAmFmConfig; + wp mSession; +}; + +} // namespace implementation +} // namespace V2_0 +} // namespace broadcastradio +} // namespace hardware +} // namespace android + +#endif // ANDROID_HARDWARE_BROADCASTRADIO_V2_0_BROADCASTRADIO_H diff --git a/broadcastradio/1.1/utils/OWNERS b/broadcastradio/2.0/default/OWNERS similarity index 74% rename from broadcastradio/1.1/utils/OWNERS rename to broadcastradio/2.0/default/OWNERS index 0c27b71865bc3b7ca11486a11e46a17937752f03..136b607b9405843f9ac7cbf6657c0bacdb13a9b6 100644 --- a/broadcastradio/1.1/utils/OWNERS +++ b/broadcastradio/2.0/default/OWNERS @@ -1,4 +1,3 @@ # Automotive team egranata@google.com -keunyoung@google.com twasilczyk@google.com diff --git a/broadcastradio/2.0/default/TunerSession.cpp b/broadcastradio/2.0/default/TunerSession.cpp new file mode 100644 index 0000000000000000000000000000000000000000..da9756208f5f4f94c3bc930f15442b06dd643034 --- /dev/null +++ b/broadcastradio/2.0/default/TunerSession.cpp @@ -0,0 +1,324 @@ +/* + * Copyright (C) 2017 The Android Open Source Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#define LOG_TAG "BcRadioDef.tuner" +#define LOG_NDEBUG 0 + +#include "TunerSession.h" + +#include "BroadcastRadio.h" + +#include +#include + +namespace android { +namespace hardware { +namespace broadcastradio { +namespace V2_0 { +namespace implementation { + +using namespace std::chrono_literals; + +using utils::tunesTo; + +using std::lock_guard; +using std::move; +using std::mutex; +using std::sort; +using std::vector; + +namespace delay { + +static constexpr auto seek = 200ms; +static constexpr auto step = 100ms; +static constexpr auto tune = 150ms; +static constexpr auto list = 1s; + +} // namespace delay + +TunerSession::TunerSession(BroadcastRadio& module, const sp& callback) + : mCallback(callback), mModule(module) { + auto&& ranges = module.getAmFmConfig().ranges; + if (ranges.size() > 0) { + tuneInternalLocked(utils::make_selector_amfm(ranges[0].lowerBound)); + } +} + +// makes ProgramInfo that points to no program +static ProgramInfo makeDummyProgramInfo(const ProgramSelector& selector) { + ProgramInfo info = {}; + info.selector = selector; + info.logicallyTunedTo = utils::make_identifier( + IdentifierType::AMFM_FREQUENCY, utils::getId(selector, IdentifierType::AMFM_FREQUENCY)); + info.physicallyTunedTo = info.logicallyTunedTo; + return info; +} + +void TunerSession::tuneInternalLocked(const ProgramSelector& sel) { + ALOGV("%s(%s)", __func__, toString(sel).c_str()); + + VirtualProgram virtualProgram; + ProgramInfo programInfo; + if (virtualRadio().getProgram(sel, virtualProgram)) { + mCurrentProgram = virtualProgram.selector; + programInfo = virtualProgram; + } else { + mCurrentProgram = sel; + programInfo = makeDummyProgramInfo(sel); + } + mIsTuneCompleted = true; + + mCallback->onCurrentProgramInfoChanged(programInfo); +} + +const BroadcastRadio& TunerSession::module() const { + return mModule.get(); +} + +const VirtualRadio& TunerSession::virtualRadio() const { + return module().mVirtualRadio; +} + +Return TunerSession::tune(const ProgramSelector& sel) { + ALOGV("%s(%s)", __func__, toString(sel).c_str()); + lock_guard lk(mMut); + if (mIsClosed) return Result::INVALID_STATE; + + if (!utils::isSupported(module().mProperties, sel)) { + ALOGW("Selector not supported"); + return Result::NOT_SUPPORTED; + } + + if (!utils::isValid(sel)) { + ALOGE("ProgramSelector is not valid"); + return Result::INVALID_ARGUMENTS; + } + + cancelLocked(); + + mIsTuneCompleted = false; + auto task = [this, sel]() { + lock_guard lk(mMut); + tuneInternalLocked(sel); + }; + mThread.schedule(task, delay::tune); + + return Result::OK; +} + +Return TunerSession::scan(bool directionUp, bool /* skipSubChannel */) { + ALOGV("%s", __func__); + lock_guard lk(mMut); + if (mIsClosed) return Result::INVALID_STATE; + + cancelLocked(); + + auto list = virtualRadio().getProgramList(); + + if (list.empty()) { + mIsTuneCompleted = false; + auto task = [this, directionUp]() { + ALOGI("Performing failed seek up=%d", directionUp); + + mCallback->onTuneFailed(Result::TIMEOUT, {}); + }; + mThread.schedule(task, delay::seek); + + return Result::OK; + } + + // Not optimal (O(sort) instead of O(n)), but not a big deal here; + // also, it's likely that list is already sorted (so O(n) anyway). + sort(list.begin(), list.end()); + auto current = mCurrentProgram; + auto found = lower_bound(list.begin(), list.end(), VirtualProgram({current})); + if (directionUp) { + if (found < list.end() - 1) { + if (tunesTo(current, found->selector)) found++; + } else { + found = list.begin(); + } + } else { + if (found > list.begin() && found != list.end()) { + found--; + } else { + found = list.end() - 1; + } + } + auto tuneTo = found->selector; + + mIsTuneCompleted = false; + auto task = [this, tuneTo, directionUp]() { + ALOGI("Performing seek up=%d", directionUp); + + lock_guard lk(mMut); + tuneInternalLocked(tuneTo); + }; + mThread.schedule(task, delay::seek); + + return Result::OK; +} + +Return TunerSession::step(bool directionUp) { + ALOGV("%s", __func__); + lock_guard lk(mMut); + if (mIsClosed) return Result::INVALID_STATE; + + cancelLocked(); + + if (!utils::hasId(mCurrentProgram, IdentifierType::AMFM_FREQUENCY)) { + ALOGE("Can't step in anything else than AM/FM"); + return Result::NOT_SUPPORTED; + } + + auto stepTo = utils::getId(mCurrentProgram, IdentifierType::AMFM_FREQUENCY); + auto range = getAmFmRangeLocked(); + if (!range) { + ALOGE("Can't find current band"); + return Result::INTERNAL_ERROR; + } + + if (directionUp) { + stepTo += range->spacing; + } else { + stepTo -= range->spacing; + } + if (stepTo > range->upperBound) stepTo = range->lowerBound; + if (stepTo < range->lowerBound) stepTo = range->upperBound; + + mIsTuneCompleted = false; + auto task = [this, stepTo]() { + ALOGI("Performing step to %s", std::to_string(stepTo).c_str()); + + lock_guard lk(mMut); + + tuneInternalLocked(utils::make_selector_amfm(stepTo)); + }; + mThread.schedule(task, delay::step); + + return Result::OK; +} + +void TunerSession::cancelLocked() { + ALOGV("%s", __func__); + + mThread.cancelAll(); + if (utils::getType(mCurrentProgram.primaryId) != IdentifierType::INVALID) { + mIsTuneCompleted = true; + } +} + +Return TunerSession::cancel() { + ALOGV("%s", __func__); + lock_guard lk(mMut); + if (mIsClosed) return {}; + + cancelLocked(); + + return {}; +} + +Return TunerSession::startProgramListUpdates(const ProgramFilter& filter) { + ALOGV("%s(%s)", __func__, toString(filter).c_str()); + lock_guard lk(mMut); + if (mIsClosed) return Result::INVALID_STATE; + + auto list = virtualRadio().getProgramList(); + vector filteredList; + auto filterCb = [&filter](const VirtualProgram& program) { + return utils::satisfies(filter, program.selector); + }; + std::copy_if(list.begin(), list.end(), std::back_inserter(filteredList), filterCb); + + auto task = [this, list]() { + lock_guard lk(mMut); + + ProgramListChunk chunk = {}; + chunk.purge = true; + chunk.complete = true; + chunk.modified = hidl_vec(list.begin(), list.end()); + + mCallback->onProgramListUpdated(chunk); + }; + mThread.schedule(task, delay::list); + + return Result::OK; +} + +Return TunerSession::stopProgramListUpdates() { + ALOGV("%s", __func__); + return {}; +} + +Return TunerSession::isConfigFlagSet(ConfigFlag flag, isConfigFlagSet_cb _hidl_cb) { + ALOGV("%s(%s)", __func__, toString(flag).c_str()); + + _hidl_cb(Result::NOT_SUPPORTED, false); + return {}; +} + +Return TunerSession::setConfigFlag(ConfigFlag flag, bool value) { + ALOGV("%s(%s, %d)", __func__, toString(flag).c_str(), value); + + return Result::NOT_SUPPORTED; +} + +Return TunerSession::setParameters(const hidl_vec& /* parameters */, + setParameters_cb _hidl_cb) { + ALOGV("%s", __func__); + + _hidl_cb({}); + return {}; +} + +Return TunerSession::getParameters(const hidl_vec& /* keys */, + getParameters_cb _hidl_cb) { + ALOGV("%s", __func__); + + _hidl_cb({}); + return {}; +} + +Return TunerSession::close() { + ALOGV("%s", __func__); + lock_guard lk(mMut); + if (mIsClosed) return {}; + + mIsClosed = true; + mThread.cancelAll(); + return {}; +} + +std::optional TunerSession::getAmFmRangeLocked() const { + if (!mIsTuneCompleted) { + ALOGW("tune operation in process"); + return {}; + } + if (!utils::hasId(mCurrentProgram, IdentifierType::AMFM_FREQUENCY)) return {}; + + auto freq = utils::getId(mCurrentProgram, IdentifierType::AMFM_FREQUENCY); + for (auto&& range : module().getAmFmConfig().ranges) { + if (range.lowerBound <= freq && range.upperBound >= freq) return range; + } + + return {}; +} + +} // namespace implementation +} // namespace V2_0 +} // namespace broadcastradio +} // namespace hardware +} // namespace android diff --git a/broadcastradio/2.0/default/TunerSession.h b/broadcastradio/2.0/default/TunerSession.h new file mode 100644 index 0000000000000000000000000000000000000000..bf7c607fa0454e1c32c129c12a1de7064cf6c062 --- /dev/null +++ b/broadcastradio/2.0/default/TunerSession.h @@ -0,0 +1,78 @@ +/* + * Copyright (C) 2017 The Android Open Source Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +#ifndef ANDROID_HARDWARE_BROADCASTRADIO_V2_0_TUNER_H +#define ANDROID_HARDWARE_BROADCASTRADIO_V2_0_TUNER_H + +#include "VirtualRadio.h" + +#include +#include +#include + +#include + +namespace android { +namespace hardware { +namespace broadcastradio { +namespace V2_0 { +namespace implementation { + +struct BroadcastRadio; + +struct TunerSession : public ITunerSession { + TunerSession(BroadcastRadio& module, const sp& callback); + + // V2_0::ITunerSession methods + virtual Return tune(const ProgramSelector& program) override; + virtual Return scan(bool directionUp, bool skipSubChannel) override; + virtual Return step(bool directionUp) override; + virtual Return cancel() override; + virtual Return startProgramListUpdates(const ProgramFilter& filter); + virtual Return stopProgramListUpdates(); + virtual Return isConfigFlagSet(ConfigFlag flag, isConfigFlagSet_cb _hidl_cb); + virtual Return setConfigFlag(ConfigFlag flag, bool value); + virtual Return setParameters(const hidl_vec& parameters, + setParameters_cb _hidl_cb) override; + virtual Return getParameters(const hidl_vec& keys, + getParameters_cb _hidl_cb) override; + virtual Return close() override; + + std::optional getAmFmRangeLocked() const; + + private: + std::mutex mMut; + WorkerThread mThread; + bool mIsClosed = false; + + const sp mCallback; + + std::reference_wrapper mModule; + bool mIsTuneCompleted = false; + ProgramSelector mCurrentProgram = {}; + + void cancelLocked(); + void tuneInternalLocked(const ProgramSelector& sel); + const VirtualRadio& virtualRadio() const; + const BroadcastRadio& module() const; +}; + +} // namespace implementation +} // namespace V2_0 +} // namespace broadcastradio +} // namespace hardware +} // namespace android + +#endif // ANDROID_HARDWARE_BROADCASTRADIO_V2_0_TUNER_H diff --git a/broadcastradio/2.0/default/VirtualProgram.cpp b/broadcastradio/2.0/default/VirtualProgram.cpp new file mode 100644 index 0000000000000000000000000000000000000000..acde704f9ba852760b5d41d845f85fe922569e5c --- /dev/null +++ b/broadcastradio/2.0/default/VirtualProgram.cpp @@ -0,0 +1,113 @@ +/* + * Copyright (C) 2017 The Android Open Source Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +#define LOG_TAG "BcRadioDef.VirtualProgram" + +#include "VirtualProgram.h" + +#include "resources.h" + +#include +#include +#include + +namespace android { +namespace hardware { +namespace broadcastradio { +namespace V2_0 { +namespace implementation { + +using utils::getType; +using utils::make_metadata; + +using std::vector; + +VirtualProgram::operator ProgramInfo() const { + ProgramInfo info = {}; + + info.selector = selector; + + auto pType = getType(selector.primaryId); + auto isDigital = (pType != IdentifierType::AMFM_FREQUENCY && pType != IdentifierType::RDS_PI); + + auto selectId = [&info](IdentifierType type) { + return utils::make_identifier(type, utils::getId(info.selector, type)); + }; + + switch (pType) { + case IdentifierType::AMFM_FREQUENCY: + info.logicallyTunedTo = info.physicallyTunedTo = + selectId(IdentifierType::AMFM_FREQUENCY); + break; + case IdentifierType::RDS_PI: + info.logicallyTunedTo = selectId(IdentifierType::RDS_PI); + info.physicallyTunedTo = selectId(IdentifierType::AMFM_FREQUENCY); + break; + case IdentifierType::HD_STATION_ID_EXT: + info.logicallyTunedTo = selectId(IdentifierType::HD_STATION_ID_EXT); + info.physicallyTunedTo = selectId(IdentifierType::AMFM_FREQUENCY); + break; + case IdentifierType::DAB_SID_EXT: + info.logicallyTunedTo = selectId(IdentifierType::DAB_SID_EXT); + info.physicallyTunedTo = selectId(IdentifierType::DAB_ENSEMBLE); + break; + case IdentifierType::DRMO_SERVICE_ID: + info.logicallyTunedTo = selectId(IdentifierType::DRMO_SERVICE_ID); + info.physicallyTunedTo = selectId(IdentifierType::DRMO_FREQUENCY); + break; + case IdentifierType::SXM_SERVICE_ID: + info.logicallyTunedTo = selectId(IdentifierType::SXM_SERVICE_ID); + info.physicallyTunedTo = selectId(IdentifierType::SXM_CHANNEL); + break; + default: + LOG(FATAL) << "Unsupported program type: " << toString(pType); + } + + info.infoFlags |= ProgramInfoFlags::TUNED; + info.infoFlags |= ProgramInfoFlags::STEREO; + info.signalQuality = isDigital ? 100 : 80; + + info.metadata = hidl_vec({ + make_metadata(MetadataKey::RDS_PS, programName), + make_metadata(MetadataKey::SONG_TITLE, songTitle), + make_metadata(MetadataKey::SONG_ARTIST, songArtist), + make_metadata(MetadataKey::STATION_ICON, resources::demoPngId), + make_metadata(MetadataKey::ALBUM_ART, resources::demoPngId), + }); + + info.vendorInfo = hidl_vec({ + {"com.google.dummy", "dummy"}, + {"com.google.dummy.VirtualProgram", std::to_string(reinterpret_cast(this))}, + }); + + return info; +} + +bool operator<(const VirtualProgram& lhs, const VirtualProgram& rhs) { + auto& l = lhs.selector; + auto& r = rhs.selector; + + // Two programs with the same primaryId are considered the same. + if (l.primaryId.type != r.primaryId.type) return l.primaryId.type < r.primaryId.type; + if (l.primaryId.value != r.primaryId.value) return l.primaryId.value < r.primaryId.value; + + return false; +} + +} // namespace implementation +} // namespace V2_0 +} // namespace broadcastradio +} // namespace hardware +} // namespace android diff --git a/broadcastradio/2.0/default/VirtualProgram.h b/broadcastradio/2.0/default/VirtualProgram.h new file mode 100644 index 0000000000000000000000000000000000000000..6502616415338cdea07331d15b957eb03540c34d --- /dev/null +++ b/broadcastradio/2.0/default/VirtualProgram.h @@ -0,0 +1,57 @@ +/* + * Copyright (C) 2017 The Android Open Source Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +#ifndef ANDROID_HARDWARE_BROADCASTRADIO_V2_0_VIRTUALPROGRAM_H +#define ANDROID_HARDWARE_BROADCASTRADIO_V2_0_VIRTUALPROGRAM_H + +#include + +namespace android { +namespace hardware { +namespace broadcastradio { +namespace V2_0 { +namespace implementation { + +/** + * A radio program mock. + * + * This represents broadcast waves flying over the air, + * not an entry for a captured station in the radio tuner memory. + */ +struct VirtualProgram { + ProgramSelector selector; + + std::string programName = ""; + std::string songArtist = ""; + std::string songTitle = ""; + + operator ProgramInfo() const; + + /** + * Defines order in which virtual programs appear on the "air" with + * ITunerSession::scan(). + * + * It's for default implementation purposes, may not be complete or correct. + */ + friend bool operator<(const VirtualProgram& lhs, const VirtualProgram& rhs); +}; + +} // namespace implementation +} // namespace V2_0 +} // namespace broadcastradio +} // namespace hardware +} // namespace android + +#endif // ANDROID_HARDWARE_BROADCASTRADIO_V2_0_VIRTUALPROGRAM_H diff --git a/broadcastradio/2.0/default/VirtualRadio.cpp b/broadcastradio/2.0/default/VirtualRadio.cpp new file mode 100644 index 0000000000000000000000000000000000000000..f601d41637c08fb609db47b71485239af509bd6e --- /dev/null +++ b/broadcastradio/2.0/default/VirtualRadio.cpp @@ -0,0 +1,75 @@ +/* + * Copyright (C) 2017 The Android Open Source Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +#define LOG_TAG "BcRadioDef.VirtualRadio" +//#define LOG_NDEBUG 0 + +#include "VirtualRadio.h" + +#include +#include + +namespace android { +namespace hardware { +namespace broadcastradio { +namespace V2_0 { +namespace implementation { + +using std::lock_guard; +using std::move; +using std::mutex; +using std::vector; +using utils::make_selector_amfm; + +VirtualRadio gAmFmRadio( + "AM/FM radio mock", + { + {make_selector_amfm(94900), "Wild 94.9", "Drake ft. Rihanna", "Too Good"}, + {make_selector_amfm(96500), "KOIT", "Celine Dion", "All By Myself"}, + {make_selector_amfm(97300), "Alice@97.3", "Drops of Jupiter", "Train"}, + {make_selector_amfm(99700), "99.7 Now!", "The Chainsmokers", "Closer"}, + {make_selector_amfm(101300), "101-3 KISS-FM", "Justin Timberlake", "Rock Your Body"}, + {make_selector_amfm(103700), "iHeart80s @ 103.7", "Michael Jackson", "Billie Jean"}, + {make_selector_amfm(106100), "106 KMEL", "Drake", "Marvins Room"}, + }); + +VirtualRadio::VirtualRadio(const std::string& name, const vector& initialList) + : mName(name), mPrograms(initialList) {} + +std::string VirtualRadio::getName() const { + return mName; +} + +vector VirtualRadio::getProgramList() const { + lock_guard lk(mMut); + return mPrograms; +} + +bool VirtualRadio::getProgram(const ProgramSelector& selector, VirtualProgram& programOut) const { + lock_guard lk(mMut); + for (auto&& program : mPrograms) { + if (utils::tunesTo(selector, program.selector)) { + programOut = program; + return true; + } + } + return false; +} + +} // namespace implementation +} // namespace V2_0 +} // namespace broadcastradio +} // namespace hardware +} // namespace android diff --git a/broadcastradio/2.0/default/VirtualRadio.h b/broadcastradio/2.0/default/VirtualRadio.h new file mode 100644 index 0000000000000000000000000000000000000000..9c07816b4bd6196102c622ba9b9a15fa2481e2b8 --- /dev/null +++ b/broadcastradio/2.0/default/VirtualRadio.h @@ -0,0 +1,61 @@ +/* + * Copyright (C) 2017 The Android Open Source Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +#ifndef ANDROID_HARDWARE_BROADCASTRADIO_V2_0_VIRTUALRADIO_H +#define ANDROID_HARDWARE_BROADCASTRADIO_V2_0_VIRTUALRADIO_H + +#include "VirtualProgram.h" + +#include +#include + +namespace android { +namespace hardware { +namespace broadcastradio { +namespace V2_0 { +namespace implementation { + +/** + * A radio frequency space mock. + * + * This represents all broadcast waves in the air for a given radio technology, + * not a captured station list in the radio tuner memory. + * + * It's meant to abstract out radio content from default tuner implementation. + */ +class VirtualRadio { + public: + VirtualRadio(const std::string& name, const std::vector& initialList); + + std::string getName() const; + std::vector getProgramList() const; + bool getProgram(const ProgramSelector& selector, VirtualProgram& program) const; + + private: + mutable std::mutex mMut; + std::string mName; + std::vector mPrograms; +}; + +/** AM/FM virtual radio space. */ +extern VirtualRadio gAmFmRadio; + +} // namespace implementation +} // namespace V2_0 +} // namespace broadcastradio +} // namespace hardware +} // namespace android + +#endif // ANDROID_HARDWARE_BROADCASTRADIO_V2_0_VIRTUALRADIO_H diff --git a/broadcastradio/2.0/default/android.hardware.broadcastradio@2.0-service.rc b/broadcastradio/2.0/default/android.hardware.broadcastradio@2.0-service.rc new file mode 100644 index 0000000000000000000000000000000000000000..7d68b6cebb8e5cff94e0cbfcc93160be6390e282 --- /dev/null +++ b/broadcastradio/2.0/default/android.hardware.broadcastradio@2.0-service.rc @@ -0,0 +1,4 @@ +service broadcastradio-hal2 /vendor/bin/hw/android.hardware.broadcastradio@2.0-service + class hal + user audioserver + group audio diff --git a/broadcastradio/2.0/default/resources.h b/broadcastradio/2.0/default/resources.h new file mode 100644 index 0000000000000000000000000000000000000000..97360dd95cdd1127de41718e9b8e0fe0277da724 --- /dev/null +++ b/broadcastradio/2.0/default/resources.h @@ -0,0 +1,46 @@ +/* + * Copyright (C) 2017 The Android Open Source Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +#ifndef ANDROID_HARDWARE_BROADCASTRADIO_V2_0_RESOURCES_H +#define ANDROID_HARDWARE_BROADCASTRADIO_V2_0_RESOURCES_H + +namespace android { +namespace hardware { +namespace broadcastradio { +namespace V2_0 { +namespace implementation { +namespace resources { + +constexpr int32_t demoPngId = 123456; +constexpr uint8_t demoPng[] = { + 0x89, 0x50, 0x4e, 0x47, 0x0d, 0x0a, 0x1a, 0x0a, 0x00, 0x00, 0x00, 0x0d, 0x49, 0x48, 0x44, + 0x52, 0x00, 0x00, 0x00, 0x40, 0x00, 0x00, 0x00, 0x40, 0x08, 0x02, 0x00, 0x00, 0x00, 0x25, + 0x0b, 0xe6, 0x89, 0x00, 0x00, 0x00, 0x5d, 0x49, 0x44, 0x41, 0x54, 0x68, 0xde, 0xed, 0xd9, + 0xc1, 0x09, 0x00, 0x30, 0x08, 0x04, 0xc1, 0x33, 0xfd, 0xf7, 0x6c, 0x6a, 0xc8, 0x23, 0x04, + 0xc9, 0x6c, 0x01, 0xc2, 0x20, 0xbe, 0x4c, 0x86, 0x57, 0x49, 0xba, 0xfb, 0xd6, 0xf4, 0xba, + 0x3e, 0x7f, 0x4d, 0xdf, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, + 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0xc0, 0x8f, 0x00, 0xbd, 0xce, 0x7f, + 0xc0, 0x11, 0x03, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, + 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0xe8, 0xb8, 0x0d, 0x32, 0xd4, 0x0c, 0x77, 0xbd, + 0xfb, 0xc1, 0xce, 0x00, 0x00, 0x00, 0x00, 0x49, 0x45, 0x4e, 0x44, 0xae, 0x42, 0x60, 0x82}; + +} // namespace resources +} // namespace implementation +} // namespace V2_0 +} // namespace broadcastradio +} // namespace hardware +} // namespace android + +#endif // ANDROID_HARDWARE_BROADCASTRADIO_V2_0_RESOURCES_H diff --git a/wifi/1.1/default/service.cpp b/broadcastradio/2.0/default/service.cpp similarity index 50% rename from wifi/1.1/default/service.cpp rename to broadcastradio/2.0/default/service.cpp index b4aed6c199d45f67c58ea50aaa6b9ad162615cbc..7e677a13237a67481d0e65c042431f7447355d5e 100644 --- a/wifi/1.1/default/service.cpp +++ b/broadcastradio/2.0/default/service.cpp @@ -1,5 +1,5 @@ /* - * Copyright (C) 2016 The Android Open Source Project + * Copyright (C) 2017 The Android Open Source Project * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. @@ -13,32 +13,26 @@ * See the License for the specific language governing permissions and * limitations under the License. */ +#define LOG_TAG "BcRadioDef.service" #include #include -#include -#include -#include "wifi.h" +#include "BroadcastRadio.h" +#include "VirtualRadio.h" using android::hardware::configureRpcThreadpool; using android::hardware::joinRpcThreadpool; +using android::hardware::broadcastradio::V2_0::implementation::BroadcastRadio; +using android::hardware::broadcastradio::V2_0::implementation::gAmFmRadio; -int main(int /*argc*/, char** argv) { - android::base::InitLogging(argv, - android::base::LogdLogger(android::base::SYSTEM)); - LOG(INFO) << "Wifi Hal is booting up..."; +int main(int /* argc */, char** /* argv */) { + configureRpcThreadpool(4, true); - configureRpcThreadpool(1, true /* callerWillJoin */); + BroadcastRadio broadcastRadio(gAmFmRadio); + auto status = broadcastRadio.registerAsService(); + CHECK_EQ(status, android::OK) << "Failed to register Broadcast Radio HAL implementation"; - // Setup hwbinder service - android::sp service = - new android::hardware::wifi::V1_1::implementation::Wifi(); - CHECK_EQ(service->registerAsService(), android::NO_ERROR) - << "Failed to register wifi HAL"; - - joinRpcThreadpool(); - - LOG(INFO) << "Wifi Hal is terminating..."; - return 0; + joinRpcThreadpool(); + return 1; // joinRpcThreadpool shouldn't exit } diff --git a/broadcastradio/2.0/types.hal b/broadcastradio/2.0/types.hal new file mode 100644 index 0000000000000000000000000000000000000000..987572a4dcbff2cd4917503087664139fe8e27f1 --- /dev/null +++ b/broadcastradio/2.0/types.hal @@ -0,0 +1,870 @@ +/* Copyright (C) 2017 The Android Open Source Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package android.hardware.broadcastradio@2.0; + +/** Constants used by broadcast radio HAL. */ +enum Constants : int32_t { + /** Invalid identifier for IBroadcastRadio::getImage. */ + INVALID_IMAGE = 0, + + /** + * If the antenna is disconnected from the beginning, the + * onAntennaStateChange callback must be called within this time. + */ + ANTENNA_DISCONNECTED_TIMEOUT_MS = 100, + + LIST_COMPLETE_TIMEOUT_MS = 300000, +}; + +enum Result : int32_t { + OK, + UNKNOWN_ERROR, + INTERNAL_ERROR, + INVALID_ARGUMENTS, + INVALID_STATE, + NOT_SUPPORTED, + TIMEOUT, +}; + +/** + * Configuration flags to be used with isConfigFlagSet and setConfigFlag methods + * of ITunerSession. + */ +enum ConfigFlag : uint32_t { + /** + * Forces mono audio stream reception. + * + * Analog broadcasts can recover poor reception conditions by jointing + * stereo channels into one. Mainly for, but not limited to AM/FM. + */ + FORCE_MONO = 1, + + /** + * Forces the analog playback for the supporting radio technology. + * + * User may disable digital playback for FM HD Radio or hybrid FM/DAB with + * this option. This is purely user choice, ie. does not reflect digital- + * analog handover state managed from the HAL implementation side. + * + * Some radio technologies may not support this, ie. DAB. + */ + FORCE_ANALOG, + + /** + * Forces the digital playback for the supporting radio technology. + * + * User may disable digital-analog handover that happens with poor + * reception conditions. With digital forced, the radio will remain silent + * instead of switching to analog channel if it's available. This is purely + * user choice, it does not reflect the actual state of handover. + */ + FORCE_DIGITAL, + + /** + * RDS Alternative Frequencies. + * + * If set and the currently tuned RDS station broadcasts on multiple + * channels, radio tuner automatically switches to the best available + * alternative. + */ + RDS_AF, + + /** + * RDS region-specific program lock-down. + * + * Allows user to lock to the current region as they move into the + * other region. + */ + RDS_REG, + + /** Enables DAB-DAB hard- and implicit-linking (the same content). */ + DAB_DAB_LINKING, + + /** Enables DAB-FM hard- and implicit-linking (the same content). */ + DAB_FM_LINKING, + + /** Enables DAB-DAB soft-linking (related content). */ + DAB_DAB_SOFT_LINKING, + + /** Enables DAB-FM soft-linking (related content). */ + DAB_FM_SOFT_LINKING, +}; + +/** + * A key-value pair for vendor-specific information to be passed as-is through + * Android framework to the front-end application. + */ +struct VendorKeyValue { + /** + * Key must start with unique vendor Java-style namespace, + * eg. 'com.somecompany.parameter1'. + */ + string key; + + /** + * Value must be passed through the framework without any changes. + * Format of this string can vary across vendors. + */ + string value; +}; + +/** + * A supported or configured RDS variant. + * + * Both might be set for hardware capabilities check (with full=true when + * calling getAmFmRegionConfig), but only one (or none) for specific + * region settings. + */ +enum Rds : uint8_t { + /** Standard variant, used everywhere except North America. */ + RDS = 1 << 0, + + /** Variant used in North America. */ + RBDS = 1 << 1, +}; + +/** + * FM de-emphasis filter supported or configured. + * + * Both might be set for hardware capabilities check (with full=true when + * calling getAmFmRegionConfig), but exactly one for specific region settings. + */ +enum Deemphasis : uint8_t { + D50 = 1 << 0, + D75 = 1 << 1, +}; + +/** + * Regional configuration for AM/FM. + * + * For hardware capabilities check (with full=true when calling + * getAmFmRegionConfig), HAL implementation fills entire supported range of + * frequencies and features. + * + * When checking current configuration, at most one bit in each bitfield + * can be set. + */ +struct AmFmRegionConfig { + /** + * All supported or configured AM/FM bands. + * + * AM/FM bands are identified by frequency value + * (see IdentifierType::AMFM_FREQUENCY). + * + * With typical configuration, it's expected to have two frequency ranges + * for capabilities check (AM and FM) and four ranges for specific region + * configuration (AM LW, AM MW, AM SW, FM). + */ + vec ranges; + + /** De-emphasis filter supported/configured. */ + bitfield fmDeemphasis; + + /** RDS/RBDS variant supported/configured. */ + bitfield fmRds; +}; + +/** + * AM/FM band range for region configuration. + * + * Defines channel grid: each possible channel is set at + * lowerBound + channelNumber * spacing, up to upperBound. + */ +struct AmFmBandRange { + /** The frequency (in kHz) of the first channel within the range. */ + uint32_t lowerBound; + + /** The frequency (in kHz) of the last channel within the range. */ + uint32_t upperBound; + + /** Channel grid resolution (in kHz), how far apart are the channels. */ + uint32_t spacing; + + /** + * Channel spacing (in kHz) used to speed up seeking to the next station + * via the ITunerSession::scan() operation. + * + * It must be a multiple of channel grid resolution. + * + * Tuner may first quickly check every n-th channel and if it detects echo + * from a station, it fine-tunes to find the exact frequency. + * + * It's ignored for capabilities check (with full=true when calling + * getAmFmRegionConfig). + */ + uint32_t scanSpacing; +}; + +/** + * An entry in regional configuration for DAB. + * + * Defines a frequency table row for ensembles. + */ +struct DabTableEntry { + /** + * Channel name, i.e. 5A, 7B. + * + * It must match the following regular expression: + * /^[A-Z0-9][A-Z0-9 ]{0,5}[A-Z0-9]$/ (2-7 uppercase alphanumeric characters + * without spaces allowed at the beginning nor end). + */ + string label; + + /** Frequency, in kHz. */ + uint32_t frequency; +}; + +/** + * Properties of a given broadcast radio module. + */ +struct Properties { + /** + * A company name who made the radio module. Must be a valid, registered + * name of the company itself. + * + * It must be opaque to the Android framework. + */ + string maker; + + /** + * A product name. Must be unique within the company. + * + * It must be opaque to the Android framework. + */ + string product; + + /** + * Version of the hardware module. + * + * It must be opaque to the Android framework. + */ + string version; + + /** + * Hardware serial number (for subscription services). + * + * It must be opaque to the Android framework. + */ + string serial; + + /** + * A list of supported IdentifierType values. + * + * If an identifier is supported by radio module, it means it can use it for + * tuning to ProgramSelector with either primary or secondary Identifier of + * a given type. + * + * Support for VENDOR identifier type does not guarantee compatibility, as + * other module properties (implementor, product, version) must be checked. + */ + vec supportedIdentifierTypes; + + /** + * Vendor-specific information. + * + * It may be used for extra features, not supported by the platform, + * for example: com.me.preset-slots=6; com.me.ultra-hd-capable=false. + */ + vec vendorInfo; +}; + +/** + * Program (channel, station) information. + * + * Carries both user-visible information (like station name) and technical + * details (tuning selector). + */ +struct ProgramInfo { + /** + * An identifier used to point at the program (primarily to tune to it). + * + * This field is required - its type field must not be set to + * IdentifierType::INVALID. + */ + ProgramSelector selector; + + /** + * Identifier currently used for program selection. + * + * It allows to determine which technology is currently used for reception. + * + * Some program selectors contain tuning information for different radio + * technologies (i.e. FM RDS and DAB). For example, user may tune using + * a ProgramSelector with RDS_PI primary identifier, but the tuner hardware + * may choose to use DAB technology to make actual tuning. This identifier + * must reflect that. + * + * This field is required for currently tuned program only. + * For all other items on the program list, its type field must be + * initialized to IdentifierType::INVALID. + * + * Only primary identifiers for a given radio technology are valid: + * - AMFM_FREQUENCY for analog AM/FM; + * - RDS_PI for FM RDS; + * - HD_STATION_ID_EXT; + * - DAB_SID_EXT; + * - DRMO_SERVICE_ID; + * - SXM_SERVICE_ID; + * - VENDOR_*; + * - more might come in next minor versions of this HAL. + */ + ProgramIdentifier logicallyTunedTo; + + /** + * Identifier currently used by hardware to physically tune to a channel. + * + * Some radio technologies broadcast the same program on multiple channels, + * i.e. with RDS AF the same program may be broadcasted on multiple + * alternative frequencies; the same DAB program may be broadcast on + * multiple ensembles. This identifier points to the channel to which the + * radio hardware is physically tuned to. + * + * This field is required for currently tuned program only. + * For all other items on the program list, its type field must be + * initialized to IdentifierType::INVALID. + * + * Only physical identifiers are valid: + * - AMFM_FREQUENCY; + * - DAB_ENSEMBLE; + * - DRMO_FREQUENCY; + * - SXM_CHANNEL; + * - VENDOR_*; + * - more might come in next minor versions of this HAL. + */ + ProgramIdentifier physicallyTunedTo; + + /** + * Primary identifiers of related contents. + * + * Some radio technologies provide pointers to other programs that carry + * related content (i.e. DAB soft-links). This field is a list of pointers + * to other programs on the program list. + * + * This is not a list of programs that carry the same content (i.e. + * DAB hard-links, RDS AF). Switching to programs from this list usually + * require user action. + * + * Please note, that these identifiers do not have to exist on the program + * list - i.e. DAB tuner may provide information on FM RDS alternatives + * despite not supporting FM RDS. If the system has multiple tuners, another + * one may have it on its list. + * + * This field is optional (can be empty). + */ + vec relatedContent; + + bitfield infoFlags; + + /** + * Signal quality measured in 0% to 100% range to be shown in the UI. + * + * The purpose of this field is primarily informative, must not be used to + * determine to which frequency should it tune to. + */ + uint32_t signalQuality; + + /** + * Program metadata (station name, PTY, song title). + */ + vec metadata; + + /** + * Vendor-specific information. + * + * It may be used for extra features, not supported by the platform, + * for example: paid-service=true; bitrate=320kbps. + */ + vec vendorInfo; +}; + +enum ProgramInfoFlags : uint32_t { + /** + * Set when the program is currently playing live stream. + * This may result in a slightly altered reception parameters, + * usually targetted at reduced latency. + */ + LIVE = 1 << 0, + + /** + * Radio stream is not playing, ie. due to bad reception conditions or + * buffering. In this state volume knob MAY be disabled to prevent user + * increasing volume too much. + */ + MUTED = 1 << 1, + + /** + * Station broadcasts traffic information regularly, + * but not necessarily right now. + */ + TRAFFIC_PROGRAM = 1 << 2, + + /** + * Station is broadcasting traffic information at the very moment. + */ + TRAFFIC_ANNOUNCEMENT = 1 << 3, + + /** + * Tuned to a program (not playing static). + * + * It's the same condition that would stop a seek operation + * (ie: ITunerSession::scan()). + * + * By definition, this flag must be set for all items on the program list. + */ + TUNED = 1 << 4, + + /** + * Audio stream is MONO if this bit is not set. + */ + STEREO = 1 << 5, +}; + +/** + * Type of program identifier component. + * + * Each identifier type corresponds to exactly one radio technology, + * i.e. DAB_ENSEMBLE is specifically for DAB. + * + * VENDOR identifier types must be opaque to the framework. + * + * The value format for each (but VENDOR_*) identifier is strictly defined + * to maintain interoperability between devices made by different vendors. + * + * All other values are reserved for future use. + * Values not matching any enumerated constant must be ignored. + */ +enum IdentifierType : uint32_t { + /** + * Primary/secondary identifier for vendor-specific radio technology. + * The value format is determined by a vendor. + * + * The vendor identifiers have limited serialization capabilities - see + * ProgramSelector description. + */ + VENDOR_START = 1000, + + /** See VENDOR_START */ + VENDOR_END = 1999, + + INVALID = 0, + + /** + * Primary identifier for analogue (without RDS) AM/FM stations: + * frequency in kHz. + * + * This identifier also contains band information: + * - <500kHz: AM LW; + * - 500kHz - 1705kHz: AM MW; + * - 1.71MHz - 30MHz: AM SW; + * - >60MHz: FM. + */ + AMFM_FREQUENCY, + + /** + * 16bit primary identifier for FM RDS station. + */ + RDS_PI, + + /** + * 64bit compound primary identifier for HD Radio. + * + * Consists of (from the LSB): + * - 32bit: Station ID number; + * - 4bit: HD Radio subchannel; + * - 18bit: AMFM_FREQUENCY. + * + * While station ID number should be unique globally, it sometimes get + * abused by broadcasters (i.e. not being set at all). To ensure local + * uniqueness, AMFM_FREQUENCY was added here. Global uniqueness is + * a best-effort - see HD_STATION_NAME. + * + * HD Radio subchannel is a value in range 0-7. + * This index is 0-based (where 0 is MPS and 1..7 are SPS), + * as opposed to HD Radio standard (where it's 1-based). + * + * The remaining bits should be set to zeros when writing on the chip side + * and ignored when read. + */ + HD_STATION_ID_EXT, + + /** + * 64bit additional identifier for HD Radio. + * + * Due to Station ID abuse, some HD_STATION_ID_EXT identifiers may be not + * globally unique. To provide a best-effort solution, a short version of + * station name may be carried as additional identifier and may be used + * by the tuner hardware to double-check tuning. + * + * The name is limited to the first 8 A-Z0-9 characters (lowercase letters + * must be converted to uppercase). Encoded in little-endian ASCII: + * the first character of the name is the LSB. + * + * For example: "Abc" is encoded as 0x434241. + */ + HD_STATION_NAME, + + /** + * 28bit compound primary identifier for Digital Audio Broadcasting. + * + * Consists of (from the LSB): + * - 16bit: SId; + * - 8bit: ECC code; + * - 4bit: SCIdS. + * + * SCIdS (Service Component Identifier within the Service) value + * of 0 represents the main service, while 1 and above represents + * secondary services. + * + * The remaining bits should be set to zeros when writing on the chip side + * and ignored when read. + */ + DAB_SID_EXT, + + /** 16bit */ + DAB_ENSEMBLE, + + /** 12bit */ + DAB_SCID, + + /** kHz (see AMFM_FREQUENCY) */ + DAB_FREQUENCY, + + /** + * 24bit primary identifier for Digital Radio Mondiale. + */ + DRMO_SERVICE_ID, + + /** kHz (see AMFM_FREQUENCY) */ + DRMO_FREQUENCY, + + /** + * 32bit primary identifier for SiriusXM Satellite Radio. + */ + SXM_SERVICE_ID = DRMO_FREQUENCY + 2, + + /** 0-999 range */ + SXM_CHANNEL, +}; + +/** + * A single program identifier component, i.e. frequency or channel ID. + */ +struct ProgramIdentifier { + /** + * Maps to IdentifierType enum. The enum may be extended in future versions + * of the HAL. Values out of the enum range must not be used when writing + * and ignored when reading. + */ + uint32_t type; + + /** + * The uint64_t value field holds the value in format described in comments + * for IdentifierType enum. + */ + uint64_t value; +}; + +/** + * A set of identifiers necessary to tune to a given station. + * + * This can hold a combination of various identifiers, like: + * - AM/FM frequency, + * - HD Radio subchannel, + * - DAB service ID. + * + * The type of radio technology is determined by the primary identifier - if the + * primary identifier is for DAB, the program is DAB. However, a program of a + * specific radio technology may have additional secondary identifiers for other + * technologies, i.e. a satellite program may have FM fallback frequency, + * if a station broadcasts both via satellite and FM. + * + * The identifiers from VENDOR_START..VENDOR_END range have limited + * serialization capabilities: they are serialized locally, but ignored by the + * cloud services. If a program has primary id from vendor range, it's not + * synchronized with other devices at all. + */ +struct ProgramSelector { + /** + * Primary program identifier. + * + * This identifier uniquely identifies a station and can be used for + * equality check. + * + * It can hold only a subset of identifier types, one per each + * radio technology: + * - analogue AM/FM: AMFM_FREQUENCY; + * - FM RDS: RDS_PI; + * - HD Radio: HD_STATION_ID_EXT; + * - DAB: DAB_SID_EXT; + * - Digital Radio Mondiale: DRMO_SERVICE_ID; + * - SiriusXM: SXM_SERVICE_ID; + * - vendor-specific: VENDOR_START..VENDOR_END. + * + * The list may change in future versions, so the implementation must obey, + * but not rely on it. + */ + ProgramIdentifier primaryId; + + /** + * Secondary program identifiers. + * + * These identifiers are supplementary and can speed up tuning process, + * but the primary ID must be sufficient (i.e. RDS PI is enough to select + * a station from the list after a full band scan). + * + * Two selectors with different secondary IDs, but the same primary ID are + * considered equal. In particular, secondary IDs vector may get updated for + * an entry on the program list (ie. when a better frequency for a given + * station is found). + */ + vec secondaryIds; +}; + +enum MetadataKey : int32_t { + /** RDS PS (string) */ + RDS_PS = 1, + + /** RDS PTY (uint8_t) */ + RDS_PTY, + + /** RBDS PTY (uint8_t) */ + RBDS_PTY, + + /** RDS RT (string) */ + RDS_RT, + + /** Song title (string) */ + SONG_TITLE, + + /** Artist name (string) */ + SONG_ARTIST, + + /** Album name (string) */ + SONG_ALBUM, + + /** Station icon (uint32_t, see IBroadcastRadio::getImage) */ + STATION_ICON, + + /** Album art (uint32_t, see IBroadcastRadio::getImage) */ + ALBUM_ART, + + /** + * Station name. + * + * This is a generic field to cover any radio technology. + * + * If the PROGRAM_NAME has the same content as DAB_*_NAME or RDS_PS, + * it may not be present, to preserve space - framework must repopulate + * it on the client side. + */ + PROGRAM_NAME, + + /** DAB ensemble name (string) */ + DAB_ENSEMBLE_NAME, + + /** + * DAB ensemble name abbreviated (string). + * + * The string must be up to 8 characters long. + * + * If the short variant is present, the long (DAB_ENSEMBLE_NAME) one must be + * present as well. + */ + DAB_ENSEMBLE_NAME_SHORT, + + /** DAB service name (string) */ + DAB_SERVICE_NAME, + + /** DAB service name abbreviated (see DAB_ENSEMBLE_NAME_SHORT) (string) */ + DAB_SERVICE_NAME_SHORT, + + /** DAB component name (string) */ + DAB_COMPONENT_NAME, + + /** DAB component name abbreviated (see DAB_ENSEMBLE_NAME_SHORT) (string) */ + DAB_COMPONENT_NAME_SHORT, +}; + +/** + * An element of metadata vector. + * + * Contains one of the entries explained in MetadataKey. + * + * Depending on a type described in the comment for a specific key, either the + * intValue or stringValue field must be populated. + */ +struct Metadata { + /** + * Maps to MetadataKey enum. The enum may be extended in future versions + * of the HAL. Values out of the enum range must not be used when writing + * and ignored when reading. + */ + uint32_t key; + + int64_t intValue; + string stringValue; +}; + +/** + * An update packet of the program list. + * + * The order of entries in the vectors is unspecified. + */ +struct ProgramListChunk { + /** + * Treats all previously added entries as removed. + * + * This is meant to save binder transaction bandwidth on 'removed' vector + * and provide a clear empty state. + * + * If set, 'removed' vector must be empty. + * + * The client may wait with taking action on this until it received the + * chunk with complete flag set (to avoid part of stations temporarily + * disappearing from the list). + */ + bool purge; + + /** + * If false, it means there are still programs not transmitted, + * due for transmission in following updates. + * + * Used by UIs that wait for complete list instead of displaying + * programs while scanning. + * + * After the whole channel range was scanned and all discovered programs + * were transmitted, the last chunk must have set this flag to true. + * This must happen within Constants::LIST_COMPLETE_TIMEOUT_MS from the + * startProgramListUpdates call. If it doesn't, client may assume the tuner + * came into a bad state and display error message. + */ + bool complete; + + /** + * Added or modified program list entries. + * + * Two entries with the same primaryId (ProgramSelector member) + * are considered the same. + */ + vec modified; + + /** + * Removed program list entries. + * + * Contains primaryId (ProgramSelector member) of a program to remove. + */ + vec removed; +}; + +/** + * Large-grain filter to the program list. + * + * This is meant to reduce binder transaction bandwidth, not for fine-grained + * filtering user might expect. + * + * The filter is designed as conjunctive normal form: the entry that passes the + * filter must satisfy all the clauses (members of this struct). Vector clauses + * are disjunctions of literals. In other words, there is AND between each + * high-level group and OR inside it. + */ +struct ProgramFilter { + /** + * List of identifier types that satisfy the filter. + * + * If the program list entry contains at least one identifier of the type + * listed, it satisfies this condition. + * + * Empty list means no filtering on identifier type. + */ + vec identifierTypes; + + /** + * List of identifiers that satisfy the filter. + * + * If the program list entry contains at least one listed identifier, + * it satisfies this condition. + * + * Empty list means no filtering on identifier. + */ + vec identifiers; + + /** + * Includes non-tunable entries that define tree structure on the + * program list (i.e. DAB ensembles). + */ + bool includeCategories; + + /** + * Disable updates on entry modifications. + * + * If true, 'modified' vector of ProgramListChunk must contain list + * additions only. Once the program is added to the list, it's not + * updated anymore. + */ + bool excludeModifications; +}; + +/** + * Type of an announcement. + * + * It maps to different announcement types per each radio technology. + */ +enum AnnouncementType : uint8_t { + /** DAB alarm, RDS emergency program type (PTY 31). */ + EMERGENCY = 1, + + /** DAB warning. */ + WARNING, + + /** DAB road traffic, RDS TA, HD Radio transportation. */ + TRAFFIC, + + /** Weather. */ + WEATHER, + + /** News. */ + NEWS, + + /** DAB event, special event. */ + EVENT, + + /** DAB sport report, RDS sports. */ + SPORT, + + /** All others. */ + MISC, +}; + +/** + * A pointer to a station broadcasting active announcement. + */ +struct Announcement { + /** + * Program selector to tune to the announcement. + */ + ProgramSelector selector; + + /** Announcement type. */ + AnnouncementType type; + + /** + * Vendor-specific information. + * + * It may be used for extra features, not supported by the platform, + * for example: com.me.hdradio.urgency=100; com.me.hdradio.certainity=50. + */ + vec vendorInfo; +}; diff --git a/broadcastradio/1.1/tests/OWNERS b/broadcastradio/2.0/vts/OWNERS similarity index 65% rename from broadcastradio/1.1/tests/OWNERS rename to broadcastradio/2.0/vts/OWNERS index aa5ce82e16e84ffe86658b59b6c50e7aed8212cd..12adf5762be84427805e02ff0978da59a5873c83 100644 --- a/broadcastradio/1.1/tests/OWNERS +++ b/broadcastradio/2.0/vts/OWNERS @@ -1,8 +1,7 @@ # Automotive team egranata@google.com -keunyoung@google.com twasilczyk@google.com # VTS team -ryanjcampbell@google.com +yuexima@google.com yim@google.com diff --git a/broadcastradio/2.0/vts/functional/Android.bp b/broadcastradio/2.0/vts/functional/Android.bp new file mode 100644 index 0000000000000000000000000000000000000000..6940bca49e059d36376db7da8bf0461abe577abb --- /dev/null +++ b/broadcastradio/2.0/vts/functional/Android.bp @@ -0,0 +1,31 @@ +// +// Copyright (C) 2017 The Android Open Source Project +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +// + +cc_test { + name: "VtsHalBroadcastradioV2_0TargetTest", + defaults: ["VtsHalTargetTestDefaults"], + cppflags: [ + "-std=c++1z", + ], + srcs: ["VtsHalBroadcastradioV2_0TargetTest.cpp"], + static_libs: [ + "android.hardware.broadcastradio@2.0", + "android.hardware.broadcastradio@common-utils-2x-lib", + "android.hardware.broadcastradio@vts-utils-lib", + "android.hardware.broadcastradio@vts-utils-lib", + "libgmock", + ], +} diff --git a/broadcastradio/2.0/vts/functional/VtsHalBroadcastradioV2_0TargetTest.cpp b/broadcastradio/2.0/vts/functional/VtsHalBroadcastradioV2_0TargetTest.cpp new file mode 100644 index 0000000000000000000000000000000000000000..3d7039dc9bccf95d9a12b1344b183f4b1fd45f9a --- /dev/null +++ b/broadcastradio/2.0/vts/functional/VtsHalBroadcastradioV2_0TargetTest.cpp @@ -0,0 +1,833 @@ +/* + * Copyright (C) 2017 The Android Open Source Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#define LOG_TAG "BcRadio.vts" +#define LOG_NDEBUG 0 +#define EGMOCK_VERBOSE 1 + +#include +#include +#include +#include +#include +#include +#include +#include +#include +#include +#include +#include +#include +#include + +#include +#include +#include + +namespace android { +namespace hardware { +namespace broadcastradio { +namespace V2_0 { +namespace vts { + +using namespace std::chrono_literals; + +using std::unordered_set; +using std::vector; +using testing::_; +using testing::AnyNumber; +using testing::ByMove; +using testing::DoAll; +using testing::Invoke; +using testing::SaveArg; + +using broadcastradio::vts::BroadcastRadioHidlEnvironment; +using broadcastradio::vts::CallBarrier; +using broadcastradio::vts::clearAndWait; +using utils::make_identifier; +using utils::make_selector_amfm; + +namespace timeout { + +static constexpr auto tune = 30s; +static constexpr auto programListScan = 5min; + +} // namespace timeout + +static constexpr auto gTuneWorkaround = 200ms; + +static const ConfigFlag gConfigFlagValues[] = { + ConfigFlag::FORCE_MONO, + ConfigFlag::FORCE_ANALOG, + ConfigFlag::FORCE_DIGITAL, + ConfigFlag::RDS_AF, + ConfigFlag::RDS_REG, + ConfigFlag::DAB_DAB_LINKING, + ConfigFlag::DAB_FM_LINKING, + ConfigFlag::DAB_DAB_SOFT_LINKING, + ConfigFlag::DAB_FM_SOFT_LINKING, +}; + +class TunerCallbackMock : public ITunerCallback { + public: + TunerCallbackMock(); + + MOCK_METHOD2(onTuneFailed, Return(Result, const ProgramSelector&)); + MOCK_TIMEOUT_METHOD1(onCurrentProgramInfoChanged_, Return(const ProgramInfo&)); + virtual Return onCurrentProgramInfoChanged(const ProgramInfo& info); + Return onProgramListUpdated(const ProgramListChunk& chunk); + MOCK_METHOD1(onAntennaStateChange, Return(bool connected)); + MOCK_METHOD1(onParametersUpdated, Return(const hidl_vec& parameters)); + + MOCK_TIMEOUT_METHOD0(onProgramListReady, void()); + + std::mutex mLock; + utils::ProgramInfoSet mProgramList; +}; + +struct AnnouncementListenerMock : public IAnnouncementListener { + MOCK_METHOD1(onListUpdated, Return(const hidl_vec&)); +}; + +static BroadcastRadioHidlEnvironment* gEnv = nullptr; + +class BroadcastRadioHalTest : public ::testing::VtsHalHidlTargetTestBase { + protected: + virtual void SetUp() override; + virtual void TearDown() override; + + bool openSession(); + bool getAmFmRegionConfig(bool full, AmFmRegionConfig* config); + std::optional getProgramList(); + + sp mModule; + Properties mProperties; + sp mSession; + sp mCallback = new TunerCallbackMock(); +}; + +static void printSkipped(std::string msg) { + std::cout << "[ SKIPPED ] " << msg << std::endl; +} + +MATCHER_P(InfoHasId, id, + std::string(negation ? "does not contain" : "contains") + " " + toString(id)) { + auto ids = utils::getAllIds(arg.selector, utils::getType(id)); + return ids.end() != find(ids.begin(), ids.end(), id.value); +} + +TunerCallbackMock::TunerCallbackMock() { + EXPECT_TIMEOUT_CALL(*this, onCurrentProgramInfoChanged_, _).Times(AnyNumber()); + + // we expect the antenna is connected through the whole test + EXPECT_CALL(*this, onAntennaStateChange(false)).Times(0); +} + +Return TunerCallbackMock::onCurrentProgramInfoChanged(const ProgramInfo& info) { + for (auto&& id : info.selector) { + EXPECT_NE(IdentifierType::INVALID, utils::getType(id)); + } + + auto logically = utils::getType(info.logicallyTunedTo); + /* This field is required for currently tuned program and should be INVALID + * for entries from the program list. + */ + EXPECT_TRUE( + logically == IdentifierType::AMFM_FREQUENCY || logically == IdentifierType::RDS_PI || + logically == IdentifierType::HD_STATION_ID_EXT || + logically == IdentifierType::DAB_SID_EXT || logically == IdentifierType::DRMO_SERVICE_ID || + logically == IdentifierType::SXM_SERVICE_ID || + (logically >= IdentifierType::VENDOR_START && logically <= IdentifierType::VENDOR_END) || + logically > IdentifierType::SXM_CHANNEL); + + auto physically = utils::getType(info.physicallyTunedTo); + // ditto (see "logically" above) + EXPECT_TRUE( + physically == IdentifierType::AMFM_FREQUENCY || + physically == IdentifierType::DAB_ENSEMBLE || + physically == IdentifierType::DRMO_FREQUENCY || physically == IdentifierType::SXM_CHANNEL || + (physically >= IdentifierType::VENDOR_START && physically <= IdentifierType::VENDOR_END) || + physically > IdentifierType::SXM_CHANNEL); + + if (logically == IdentifierType::AMFM_FREQUENCY) { + auto ps = utils::getMetadataString(info, MetadataKey::RDS_PS); + if (ps.has_value()) { + EXPECT_NE("", android::base::Trim(*ps)) + << "Don't use empty RDS_PS as an indicator of missing RSD PS data."; + } + } + + return onCurrentProgramInfoChanged_(info); +} + +Return TunerCallbackMock::onProgramListUpdated(const ProgramListChunk& chunk) { + std::lock_guard lk(mLock); + + updateProgramList(mProgramList, chunk); + + if (chunk.complete) onProgramListReady(); + + return {}; +} + +void BroadcastRadioHalTest::SetUp() { + EXPECT_EQ(nullptr, mModule.get()) << "Module is already open"; + + // lookup HIDL service (radio module) + mModule = getService(gEnv->getServiceName()); + ASSERT_NE(nullptr, mModule.get()) << "Couldn't find broadcast radio HAL implementation"; + + // get module properties + auto propResult = mModule->getProperties([&](const Properties& p) { mProperties = p; }); + ASSERT_TRUE(propResult.isOk()); + + EXPECT_FALSE(mProperties.maker.empty()); + EXPECT_FALSE(mProperties.product.empty()); + EXPECT_GT(mProperties.supportedIdentifierTypes.size(), 0u); +} + +void BroadcastRadioHalTest::TearDown() { + mSession.clear(); + mModule.clear(); + clearAndWait(mCallback, 1s); +} + +bool BroadcastRadioHalTest::openSession() { + EXPECT_EQ(nullptr, mSession.get()) << "Session is already open"; + + Result halResult = Result::UNKNOWN_ERROR; + auto openCb = [&](Result result, const sp& session) { + halResult = result; + if (result != Result::OK) return; + mSession = session; + }; + auto hidlResult = mModule->openSession(mCallback, openCb); + + EXPECT_TRUE(hidlResult.isOk()); + EXPECT_EQ(Result::OK, halResult); + EXPECT_NE(nullptr, mSession.get()); + + return nullptr != mSession.get(); +} + +bool BroadcastRadioHalTest::getAmFmRegionConfig(bool full, AmFmRegionConfig* config) { + auto halResult = Result::UNKNOWN_ERROR; + auto cb = [&](Result result, AmFmRegionConfig configCb) { + halResult = result; + if (config) *config = configCb; + }; + + auto hidlResult = mModule->getAmFmRegionConfig(full, cb); + EXPECT_TRUE(hidlResult.isOk()); + + if (halResult == Result::NOT_SUPPORTED) return false; + + EXPECT_EQ(Result::OK, halResult); + return halResult == Result::OK; +} + +std::optional BroadcastRadioHalTest::getProgramList() { + EXPECT_TIMEOUT_CALL(*mCallback, onProgramListReady).Times(AnyNumber()); + + auto startResult = mSession->startProgramListUpdates({}); + if (startResult == Result::NOT_SUPPORTED) { + printSkipped("Program list not supported"); + return nullopt; + } + EXPECT_EQ(Result::OK, startResult); + if (startResult != Result::OK) return nullopt; + + EXPECT_TIMEOUT_CALL_WAIT(*mCallback, onProgramListReady, timeout::programListScan); + + auto stopResult = mSession->stopProgramListUpdates(); + EXPECT_TRUE(stopResult.isOk()); + + return mCallback->mProgramList; +} + +/** + * Test session opening. + * + * Verifies that: + * - the method succeeds on a first and subsequent calls; + * - the method succeeds when called for the second time without + * closing previous session. + */ +TEST_F(BroadcastRadioHalTest, OpenSession) { + // simply open session for the first time + ASSERT_TRUE(openSession()); + + // drop (without explicit close) and re-open the session + mSession.clear(); + ASSERT_TRUE(openSession()); + + // open the second session (the first one should be forcibly closed) + auto secondSession = mSession; + mSession.clear(); + ASSERT_TRUE(openSession()); +} + +static bool isValidAmFmFreq(uint64_t freq) { + auto id = utils::make_identifier(IdentifierType::AMFM_FREQUENCY, freq); + return utils::isValid(id); +} + +static void validateRange(const AmFmBandRange& range) { + EXPECT_TRUE(isValidAmFmFreq(range.lowerBound)); + EXPECT_TRUE(isValidAmFmFreq(range.upperBound)); + EXPECT_LT(range.lowerBound, range.upperBound); + EXPECT_GT(range.spacing, 0u); + EXPECT_EQ(0u, (range.upperBound - range.lowerBound) % range.spacing); +} + +static bool supportsFM(const AmFmRegionConfig& config) { + for (auto&& range : config.ranges) { + if (utils::getBand(range.lowerBound) == utils::FrequencyBand::FM) return true; + } + return false; +} + +/** + * Test fetching AM/FM regional configuration. + * + * Verifies that: + * - AM/FM regional configuration is either set at startup or not supported at all by the hardware; + * - there is at least one AM/FM band configured; + * - FM Deemphasis and RDS are correctly configured for FM-capable radio; + * - all channel grids (frequency ranges and spacings) are valid; + * - seek spacing is a multiple of the manual spacing value. + */ +TEST_F(BroadcastRadioHalTest, GetAmFmRegionConfig) { + AmFmRegionConfig config; + bool supported = getAmFmRegionConfig(false, &config); + if (!supported) { + printSkipped("AM/FM not supported"); + return; + } + + EXPECT_GT(config.ranges.size(), 0u); + EXPECT_LE(popcountll(config.fmDeemphasis), 1); + EXPECT_LE(popcountll(config.fmRds), 1); + + for (auto&& range : config.ranges) { + validateRange(range); + EXPECT_EQ(0u, range.scanSpacing % range.spacing); + EXPECT_GE(range.scanSpacing, range.spacing); + } + + if (supportsFM(config)) { + EXPECT_EQ(popcountll(config.fmDeemphasis), 1); + } +} + +/** + * Test fetching AM/FM regional capabilities. + * + * Verifies that: + * - AM/FM regional capabilities are either available or not supported at all by the hardware; + * - there is at least one AM/FM range supported; + * - there is at least one de-emphasis filter mode supported for FM-capable radio; + * - all channel grids (frequency ranges and spacings) are valid; + * - seek spacing is not set. + */ +TEST_F(BroadcastRadioHalTest, GetAmFmRegionConfigCapabilities) { + AmFmRegionConfig config; + bool supported = getAmFmRegionConfig(true, &config); + if (!supported) { + printSkipped("AM/FM not supported"); + return; + } + + EXPECT_GT(config.ranges.size(), 0u); + + for (auto&& range : config.ranges) { + validateRange(range); + EXPECT_EQ(0u, range.scanSpacing); + } + + if (supportsFM(config)) { + EXPECT_GE(popcountll(config.fmDeemphasis), 1); + } +} + +/** + * Test fetching DAB regional configuration. + * + * Verifies that: + * - DAB regional configuration is either set at startup or not supported at all by the hardware; + * - all channel labels match correct format; + * - all channel frequencies are in correct range. + */ +TEST_F(BroadcastRadioHalTest, GetDabRegionConfig) { + Result halResult; + hidl_vec config; + auto cb = [&](Result result, hidl_vec configCb) { + halResult = result; + config = configCb; + }; + auto hidlResult = mModule->getDabRegionConfig(cb); + ASSERT_TRUE(hidlResult.isOk()); + + if (halResult == Result::NOT_SUPPORTED) { + printSkipped("DAB not supported"); + return; + } + ASSERT_EQ(Result::OK, halResult); + + std::regex re("^[A-Z0-9][A-Z0-9 ]{0,5}[A-Z0-9]$"); + // double-check correctness of the test + ASSERT_TRUE(std::regex_match("5A", re)); + ASSERT_FALSE(std::regex_match("5a", re)); + ASSERT_FALSE(std::regex_match("1234ABCD", re)); + ASSERT_TRUE(std::regex_match("CN 12D", re)); + ASSERT_FALSE(std::regex_match(" 5A", re)); + + for (auto&& entry : config) { + EXPECT_TRUE(std::regex_match(std::string(entry.label), re)); + + auto id = utils::make_identifier(IdentifierType::DAB_FREQUENCY, entry.frequency); + EXPECT_TRUE(utils::isValid(id)); + } +} + +/** + * Test tuning with FM selector. + * + * Verifies that: + * - if AM/FM selector is not supported, the method returns NOT_SUPPORTED; + * - if it is supported, the method succeeds; + * - after a successful tune call, onCurrentProgramInfoChanged callback is + * invoked carrying a proper selector; + * - program changes exactly to what was requested. + */ +TEST_F(BroadcastRadioHalTest, FmTune) { + ASSERT_TRUE(openSession()); + + uint64_t freq = 100100; // 100.1 FM + auto sel = make_selector_amfm(freq); + + /* TODO(b/69958777): there is a race condition between tune() and onCurrentProgramInfoChanged + * callback setting infoCb, because egmock cannot distinguish calls with different matchers + * (there is one here and one in callback constructor). + * + * This sleep workaround will fix default implementation, but the real HW tests will still be + * flaky. We probably need to implement egmock alternative based on actions. + */ + std::this_thread::sleep_for(gTuneWorkaround); + + // try tuning + ProgramInfo infoCb = {}; + EXPECT_TIMEOUT_CALL(*mCallback, onCurrentProgramInfoChanged_, + InfoHasId(utils::make_identifier(IdentifierType::AMFM_FREQUENCY, freq))) + .Times(AnyNumber()) + .WillOnce(DoAll(SaveArg<0>(&infoCb), testing::Return(ByMove(Void())))); + auto result = mSession->tune(sel); + + // expect a failure if it's not supported + if (!utils::isSupported(mProperties, sel)) { + EXPECT_EQ(Result::NOT_SUPPORTED, result); + return; + } + + // expect a callback if it succeeds + EXPECT_EQ(Result::OK, result); + EXPECT_TIMEOUT_CALL_WAIT(*mCallback, onCurrentProgramInfoChanged_, timeout::tune); + + ALOGD("current program info: %s", toString(infoCb).c_str()); + + // it should tune exactly to what was requested + auto freqs = utils::getAllIds(infoCb.selector, IdentifierType::AMFM_FREQUENCY); + EXPECT_NE(freqs.end(), find(freqs.begin(), freqs.end(), freq)); +} + +/** + * Test tuning with invalid selectors. + * + * Verifies that: + * - if the selector is not supported, it's ignored; + * - if it is supported, an invalid value results with INVALID_ARGUMENTS; + */ +TEST_F(BroadcastRadioHalTest, TuneFailsWithInvalid) { + ASSERT_TRUE(openSession()); + + vector invalid = { + make_identifier(IdentifierType::AMFM_FREQUENCY, 0), + make_identifier(IdentifierType::RDS_PI, 0x10000), + make_identifier(IdentifierType::HD_STATION_ID_EXT, 0x100000000), + make_identifier(IdentifierType::DAB_SID_EXT, 0), + make_identifier(IdentifierType::DRMO_SERVICE_ID, 0x100000000), + make_identifier(IdentifierType::SXM_SERVICE_ID, 0x100000000), + }; + + for (auto&& id : invalid) { + ProgramSelector sel{id, {}}; + + auto result = mSession->tune(sel); + + if (utils::isSupported(mProperties, sel)) { + EXPECT_EQ(Result::INVALID_ARGUMENTS, result); + } else { + EXPECT_EQ(Result::NOT_SUPPORTED, result); + } + } +} + +/** + * Test tuning with empty program selector. + * + * Verifies that: + * - tune fails with NOT_SUPPORTED when program selector is not initialized. + */ +TEST_F(BroadcastRadioHalTest, TuneFailsWithEmpty) { + ASSERT_TRUE(openSession()); + + // Program type is 1-based, so 0 will always be invalid. + ProgramSelector sel = {}; + auto result = mSession->tune(sel); + ASSERT_EQ(Result::NOT_SUPPORTED, result); +} + +/** + * Test seeking to next/prev station via ITunerSession::scan(). + * + * Verifies that: + * - the method succeeds; + * - the program info is changed within timeout::tune; + * - works both directions and with or without skipping sub-channel. + */ +TEST_F(BroadcastRadioHalTest, Seek) { + ASSERT_TRUE(openSession()); + + // TODO(b/69958777): see FmTune workaround + std::this_thread::sleep_for(gTuneWorkaround); + + EXPECT_TIMEOUT_CALL(*mCallback, onCurrentProgramInfoChanged_, _); + auto result = mSession->scan(true /* up */, true /* skip subchannel */); + EXPECT_EQ(Result::OK, result); + EXPECT_TIMEOUT_CALL_WAIT(*mCallback, onCurrentProgramInfoChanged_, timeout::tune); + + EXPECT_TIMEOUT_CALL(*mCallback, onCurrentProgramInfoChanged_, _); + result = mSession->scan(false /* down */, false /* don't skip subchannel */); + EXPECT_EQ(Result::OK, result); + EXPECT_TIMEOUT_CALL_WAIT(*mCallback, onCurrentProgramInfoChanged_, timeout::tune); +} + +/** + * Test step operation. + * + * Verifies that: + * - the method succeeds or returns NOT_SUPPORTED; + * - the program info is changed within timeout::tune if the method succeeded; + * - works both directions. + */ +TEST_F(BroadcastRadioHalTest, Step) { + ASSERT_TRUE(openSession()); + + // TODO(b/69958777): see FmTune workaround + std::this_thread::sleep_for(gTuneWorkaround); + + EXPECT_TIMEOUT_CALL(*mCallback, onCurrentProgramInfoChanged_, _).Times(AnyNumber()); + auto result = mSession->step(true /* up */); + if (result == Result::NOT_SUPPORTED) { + printSkipped("step not supported"); + return; + } + EXPECT_EQ(Result::OK, result); + EXPECT_TIMEOUT_CALL_WAIT(*mCallback, onCurrentProgramInfoChanged_, timeout::tune); + + EXPECT_TIMEOUT_CALL(*mCallback, onCurrentProgramInfoChanged_, _); + result = mSession->step(false /* down */); + EXPECT_EQ(Result::OK, result); + EXPECT_TIMEOUT_CALL_WAIT(*mCallback, onCurrentProgramInfoChanged_, timeout::tune); +} + +/** + * Test tune cancellation. + * + * Verifies that: + * - the method does not crash after being invoked multiple times. + */ +TEST_F(BroadcastRadioHalTest, Cancel) { + ASSERT_TRUE(openSession()); + + for (int i = 0; i < 10; i++) { + auto result = mSession->scan(true /* up */, true /* skip subchannel */); + ASSERT_EQ(Result::OK, result); + + auto cancelResult = mSession->cancel(); + ASSERT_TRUE(cancelResult.isOk()); + } +} + +/** + * Test IBroadcastRadio::get|setParameters() methods called with no parameters. + * + * Verifies that: + * - callback is called for empty parameters set. + */ +TEST_F(BroadcastRadioHalTest, NoParameters) { + ASSERT_TRUE(openSession()); + + hidl_vec halResults = {}; + bool wasCalled = false; + auto cb = [&](hidl_vec results) { + wasCalled = true; + halResults = results; + }; + + auto hidlResult = mSession->setParameters({}, cb); + ASSERT_TRUE(hidlResult.isOk()); + ASSERT_TRUE(wasCalled); + ASSERT_EQ(0u, halResults.size()); + + wasCalled = false; + hidlResult = mSession->getParameters({}, cb); + ASSERT_TRUE(hidlResult.isOk()); + ASSERT_TRUE(wasCalled); + ASSERT_EQ(0u, halResults.size()); +} + +/** + * Test IBroadcastRadio::get|setParameters() methods called with unknown parameters. + * + * Verifies that: + * - unknown parameters are ignored; + * - callback is called also for empty results set. + */ +TEST_F(BroadcastRadioHalTest, UnknownParameters) { + ASSERT_TRUE(openSession()); + + hidl_vec halResults = {}; + bool wasCalled = false; + auto cb = [&](hidl_vec results) { + wasCalled = true; + halResults = results; + }; + + auto hidlResult = mSession->setParameters({{"com.google.unknown", "dummy"}}, cb); + ASSERT_TRUE(hidlResult.isOk()); + ASSERT_TRUE(wasCalled); + ASSERT_EQ(0u, halResults.size()); + + wasCalled = false; + hidlResult = mSession->getParameters({{"com.google.unknown*", "dummy"}}, cb); + ASSERT_TRUE(hidlResult.isOk()); + ASSERT_TRUE(wasCalled); + ASSERT_EQ(0u, halResults.size()); +} + +/** + * Test session closing. + * + * Verifies that: + * - the method does not crash after being invoked multiple times. + */ +TEST_F(BroadcastRadioHalTest, Close) { + ASSERT_TRUE(openSession()); + + for (int i = 0; i < 10; i++) { + auto cancelResult = mSession->close(); + ASSERT_TRUE(cancelResult.isOk()); + } +} + +/** + * Test geting image of invalid ID. + * + * Verifies that: + * - getImage call handles argument 0 gracefully. + */ +TEST_F(BroadcastRadioHalTest, GetNoImage) { + size_t len = 0; + auto result = mModule->getImage(0, [&](hidl_vec rawImage) { len = rawImage.size(); }); + + ASSERT_TRUE(result.isOk()); + ASSERT_EQ(0u, len); +} + +/** + * Test getting config flags. + * + * Verifies that: + * - isConfigFlagSet either succeeds or ends with NOT_SUPPORTED or INVALID_STATE; + * - call success or failure is consistent with setConfigFlag. + */ +TEST_F(BroadcastRadioHalTest, FetchConfigFlags) { + ASSERT_TRUE(openSession()); + + for (auto flag : gConfigFlagValues) { + auto halResult = Result::UNKNOWN_ERROR; + auto cb = [&](Result result, bool) { halResult = result; }; + auto hidlResult = mSession->isConfigFlagSet(flag, cb); + EXPECT_TRUE(hidlResult.isOk()); + + if (halResult != Result::NOT_SUPPORTED && halResult != Result::INVALID_STATE) { + ASSERT_EQ(Result::OK, halResult); + } + + // set must fail or succeed the same way as get + auto setResult = mSession->setConfigFlag(flag, false); + EXPECT_EQ(halResult, setResult); + setResult = mSession->setConfigFlag(flag, true); + EXPECT_EQ(halResult, setResult); + } +} + +/** + * Test setting config flags. + * + * Verifies that: + * - setConfigFlag either succeeds or ends with NOT_SUPPORTED or INVALID_STATE; + * - isConfigFlagSet reflects the state requested immediately after the set call. + */ +TEST_F(BroadcastRadioHalTest, SetConfigFlags) { + ASSERT_TRUE(openSession()); + + auto get = [&](ConfigFlag flag) { + auto halResult = Result::UNKNOWN_ERROR; + bool gotValue = false; + auto cb = [&](Result result, bool value) { + halResult = result; + gotValue = value; + }; + auto hidlResult = mSession->isConfigFlagSet(flag, cb); + EXPECT_TRUE(hidlResult.isOk()); + EXPECT_EQ(Result::OK, halResult); + return gotValue; + }; + + for (auto flag : gConfigFlagValues) { + auto result = mSession->setConfigFlag(flag, false); + if (result == Result::NOT_SUPPORTED || result == Result::INVALID_STATE) { + // setting to true must result in the same error as false + auto secondResult = mSession->setConfigFlag(flag, true); + EXPECT_EQ(result, secondResult); + continue; + } + ASSERT_EQ(Result::OK, result); + + // verify false is set + auto value = get(flag); + EXPECT_FALSE(value); + + // try setting true this time + result = mSession->setConfigFlag(flag, true); + ASSERT_EQ(Result::OK, result); + value = get(flag); + EXPECT_TRUE(value); + + // false again + result = mSession->setConfigFlag(flag, false); + ASSERT_EQ(Result::OK, result); + value = get(flag); + EXPECT_FALSE(value); + } +} + +/** + * Test getting program list. + * + * Verifies that: + * - startProgramListUpdates either succeeds or returns NOT_SUPPORTED; + * - the complete list is fetched within timeout::programListScan; + * - stopProgramListUpdates does not crash. + */ +TEST_F(BroadcastRadioHalTest, GetProgramList) { + ASSERT_TRUE(openSession()); + + getProgramList(); +} + +/** + * Test HD_STATION_NAME correctness. + * + * Verifies that if a program on the list contains HD_STATION_NAME identifier: + * - the program provides station name in its metadata; + * - the identifier matches the name; + * - there is only one identifier of that type. + */ +TEST_F(BroadcastRadioHalTest, HdRadioStationNameId) { + ASSERT_TRUE(openSession()); + + auto list = getProgramList(); + if (!list) return; + + for (auto&& program : *list) { + auto nameIds = utils::getAllIds(program.selector, IdentifierType::HD_STATION_NAME); + EXPECT_LE(nameIds.size(), 1u); + if (nameIds.size() == 0) continue; + + auto name = utils::getMetadataString(program, MetadataKey::PROGRAM_NAME); + if (!name) name = utils::getMetadataString(program, MetadataKey::RDS_PS); + ASSERT_TRUE(name.has_value()); + + auto expectedId = utils::make_hdradio_station_name(*name); + EXPECT_EQ(expectedId.value, nameIds[0]); + } +} + +/** + * Test announcement listener registration. + * + * Verifies that: + * - registerAnnouncementListener either succeeds or returns NOT_SUPPORTED; + * - if it succeeds, it returns a valid close handle (which is a nullptr otherwise); + * - closing handle does not crash. + */ +TEST_F(BroadcastRadioHalTest, AnnouncementListenerRegistration) { + sp listener = new AnnouncementListenerMock(); + + Result halResult = Result::UNKNOWN_ERROR; + sp closeHandle = nullptr; + auto cb = [&](Result result, const sp& closeHandle_) { + halResult = result; + closeHandle = closeHandle_; + }; + + auto hidlResult = + mModule->registerAnnouncementListener({AnnouncementType::EMERGENCY}, listener, cb); + ASSERT_TRUE(hidlResult.isOk()); + + if (halResult == Result::NOT_SUPPORTED) { + ASSERT_EQ(nullptr, closeHandle.get()); + printSkipped("Announcements not supported"); + return; + } + + ASSERT_EQ(Result::OK, halResult); + ASSERT_NE(nullptr, closeHandle.get()); + + closeHandle->close(); +} + +} // namespace vts +} // namespace V2_0 +} // namespace broadcastradio +} // namespace hardware +} // namespace android + +int main(int argc, char** argv) { + using android::hardware::broadcastradio::V2_0::vts::gEnv; + using android::hardware::broadcastradio::V2_0::IBroadcastRadio; + using android::hardware::broadcastradio::vts::BroadcastRadioHidlEnvironment; + gEnv = new BroadcastRadioHidlEnvironment; + ::testing::AddGlobalTestEnvironment(gEnv); + ::testing::InitGoogleTest(&argc, argv); + gEnv->init(&argc, argv); + int status = RUN_ALL_TESTS(); + ALOGI("Test result = %d", status); + return status; +} diff --git a/broadcastradio/common/OWNERS b/broadcastradio/common/OWNERS new file mode 100644 index 0000000000000000000000000000000000000000..136b607b9405843f9ac7cbf6657c0bacdb13a9b6 --- /dev/null +++ b/broadcastradio/common/OWNERS @@ -0,0 +1,3 @@ +# Automotive team +egranata@google.com +twasilczyk@google.com diff --git a/broadcastradio/common/tests/Android.bp b/broadcastradio/common/tests/Android.bp new file mode 100644 index 0000000000000000000000000000000000000000..3ba31db11a5854041cf3dc2c4c4705416e0bfcbf --- /dev/null +++ b/broadcastradio/common/tests/Android.bp @@ -0,0 +1,76 @@ +// +// Copyright (C) 2017 The Android Open Source Project +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +// + +cc_test { + name: "android.hardware.broadcastradio@common-utils-xx-tests", + vendor: true, + cflags: [ + "-Wall", + "-Wextra", + "-Werror", + ], + cppflags: [ + "-std=c++1z", + ], + srcs: [ + "CommonXX_test.cpp", + ], + static_libs: [ + "android.hardware.broadcastradio@common-utils-1x-lib", + "android.hardware.broadcastradio@common-utils-2x-lib", + ], + shared_libs: [ + "android.hardware.broadcastradio@1.1", + "android.hardware.broadcastradio@2.0", + ], +} + +cc_test { + name: "android.hardware.broadcastradio@common-utils-2x-tests", + vendor: true, + cflags: [ + "-Wall", + "-Wextra", + "-Werror", + ], + cppflags: [ + "-std=c++1z", + ], + srcs: [ + "IdentifierIterator_test.cpp", + "ProgramIdentifier_test.cpp", + ], + static_libs: [ + "android.hardware.broadcastradio@common-utils-2x-lib", + ], + shared_libs: [ + "android.hardware.broadcastradio@2.0", + ], +} + +cc_test { + name: "android.hardware.broadcastradio@common-utils-tests", + vendor: true, + cflags: [ + "-Wall", + "-Wextra", + "-Werror", + ], + srcs: [ + "WorkerThread_test.cpp", + ], + static_libs: ["android.hardware.broadcastradio@common-utils-lib"], +} diff --git a/broadcastradio/common/tests/CommonXX_test.cpp b/broadcastradio/common/tests/CommonXX_test.cpp new file mode 100644 index 0000000000000000000000000000000000000000..d19204ebe3dd3177bb6ce48edcc926f11dcd75f9 --- /dev/null +++ b/broadcastradio/common/tests/CommonXX_test.cpp @@ -0,0 +1,18 @@ +/* + * Copyright (C) 2017 The Android Open Source Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#include +#include diff --git a/broadcastradio/common/tests/IdentifierIterator_test.cpp b/broadcastradio/common/tests/IdentifierIterator_test.cpp new file mode 100644 index 0000000000000000000000000000000000000000..75e0d49dbef58b40b4504c65c614b91f07fffde8 --- /dev/null +++ b/broadcastradio/common/tests/IdentifierIterator_test.cpp @@ -0,0 +1,136 @@ +/* + * Copyright (C) 2017 The Android Open Source Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#include +#include + +namespace { + +namespace V2_0 = android::hardware::broadcastradio::V2_0; +namespace utils = android::hardware::broadcastradio::utils; + +using V2_0::IdentifierType; +using V2_0::ProgramSelector; + +TEST(IdentifierIteratorTest, singleSecondary) { + // clang-format off + V2_0::ProgramSelector sel { + utils::make_identifier(IdentifierType::RDS_PI, 0xBEEF), + {utils::make_identifier(IdentifierType::AMFM_FREQUENCY, 100100)} + }; + // clang-format on + + auto it = V2_0::begin(sel); + auto end = V2_0::end(sel); + + ASSERT_NE(end, it); + EXPECT_EQ(sel.primaryId, *it); + ASSERT_NE(end, ++it); + EXPECT_EQ(sel.secondaryIds[0], *it); + ASSERT_EQ(end, ++it); +} + +TEST(IdentifierIteratorTest, empty) { + V2_0::ProgramSelector sel{}; + + auto it = V2_0::begin(sel); + auto end = V2_0::end(sel); + + ASSERT_NE(end, it++); // primary id is always present + ASSERT_EQ(end, it); +} + +TEST(IdentifierIteratorTest, twoSelectors) { + V2_0::ProgramSelector sel1{}; + V2_0::ProgramSelector sel2{}; + + auto it1 = V2_0::begin(sel1); + auto it2 = V2_0::begin(sel2); + + EXPECT_NE(it1, it2); +} + +TEST(IdentifierIteratorTest, increments) { + V2_0::ProgramSelector sel{{}, {{}, {}}}; + + auto it = V2_0::begin(sel); + auto end = V2_0::end(sel); + auto pre = it; + auto post = it; + + EXPECT_NE(++pre, post++); + EXPECT_EQ(pre, post); + EXPECT_EQ(pre, it + 1); + ASSERT_NE(end, pre); +} + +TEST(IdentifierIteratorTest, findType) { + using namespace std::placeholders; + + uint64_t rds_pi1 = 0xDEAD; + uint64_t rds_pi2 = 0xBEEF; + uint64_t freq1 = 100100; + uint64_t freq2 = 107900; + + // clang-format off + V2_0::ProgramSelector sel { + utils::make_identifier(IdentifierType::RDS_PI, rds_pi1), + { + utils::make_identifier(IdentifierType::AMFM_FREQUENCY, freq1), + utils::make_identifier(IdentifierType::RDS_PI, rds_pi2), + utils::make_identifier(IdentifierType::AMFM_FREQUENCY, freq2), + } + }; + // clang-format on + + auto typeEquals = [](const V2_0::ProgramIdentifier& id, V2_0::IdentifierType type) { + return utils::getType(id) == type; + }; + auto isRdsPi = std::bind(typeEquals, _1, IdentifierType::RDS_PI); + auto isFreq = std::bind(typeEquals, _1, IdentifierType::AMFM_FREQUENCY); + + auto end = V2_0::end(sel); + auto it = std::find_if(V2_0::begin(sel), end, isRdsPi); + ASSERT_NE(end, it); + EXPECT_EQ(rds_pi1, it->value); + + it = std::find_if(it + 1, end, isRdsPi); + ASSERT_NE(end, it); + EXPECT_EQ(rds_pi2, it->value); + + it = std::find_if(V2_0::begin(sel), end, isFreq); + ASSERT_NE(end, it); + EXPECT_EQ(freq1, it->value); + + it = std::find_if(++it, end, isFreq); + ASSERT_NE(end, it); + EXPECT_EQ(freq2, it->value); +} + +TEST(IdentifierIteratorTest, rangeLoop) { + V2_0::ProgramSelector sel{{}, {{}, {}, {}}}; + + unsigned count = 0; + for (auto&& id : sel) { + ASSERT_EQ(0u, id.type); + count++; + } + + const auto expectedCount = 1 + sel.secondaryIds.size(); + ASSERT_EQ(expectedCount, count); +} + +} // anonymous namespace diff --git a/broadcastradio/common/tests/ProgramIdentifier_test.cpp b/broadcastradio/common/tests/ProgramIdentifier_test.cpp new file mode 100644 index 0000000000000000000000000000000000000000..51ad0145ee620566e837094e98d51a34d78343c1 --- /dev/null +++ b/broadcastradio/common/tests/ProgramIdentifier_test.cpp @@ -0,0 +1,40 @@ +/* + * Copyright (C) 2017 The Android Open Source Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#include +#include + +#include + +namespace { + +namespace utils = android::hardware::broadcastradio::utils; + +TEST(ProgramIdentifierTest, hdRadioStationName) { + auto verify = [](std::string name, uint64_t nameId) { + auto id = utils::make_hdradio_station_name(name); + EXPECT_EQ(nameId, id.value) << "Failed to convert '" << name << "'"; + }; + + verify("", 0); + verify("Abc", 0x434241); + verify("Some Station 1", 0x54415453454d4f53); + verify("Station1", 0x314e4f4954415453); + verify("!@#$%^&*()_+", 0); + verify("-=[]{};':\"0", 0x30); +} + +} // anonymous namespace diff --git a/broadcastradio/1.1/tests/WorkerThread_test.cpp b/broadcastradio/common/tests/WorkerThread_test.cpp similarity index 100% rename from broadcastradio/1.1/tests/WorkerThread_test.cpp rename to broadcastradio/common/tests/WorkerThread_test.cpp diff --git a/broadcastradio/1.1/tests/Android.bp b/broadcastradio/common/utils/Android.bp similarity index 71% rename from broadcastradio/1.1/tests/Android.bp rename to broadcastradio/common/utils/Android.bp index fa1fd944090b5e23c3b0b21e13ee26670115c370..33ba7da22fd6c8521d657b5b7fa5e233986da2fb 100644 --- a/broadcastradio/1.1/tests/Android.bp +++ b/broadcastradio/common/utils/Android.bp @@ -14,16 +14,22 @@ // limitations under the License. // -cc_test { - name: "android.hardware.broadcastradio@1.1-utils-tests", - vendor: true, +cc_library_static { + name: "android.hardware.broadcastradio@common-utils-lib", + vendor_available: true, + relative_install_path: "hw", cflags: [ "-Wall", "-Wextra", "-Werror", ], srcs: [ - "WorkerThread_test.cpp", + "WorkerThread.cpp", + ], + export_include_dirs: ["include"], + shared_libs: [ + "libbase", + "liblog", + "libutils", ], - static_libs: ["android.hardware.broadcastradio@1.1-utils-lib"], } diff --git a/broadcastradio/1.1/utils/WorkerThread.cpp b/broadcastradio/common/utils/WorkerThread.cpp similarity index 100% rename from broadcastradio/1.1/utils/WorkerThread.cpp rename to broadcastradio/common/utils/WorkerThread.cpp diff --git a/broadcastradio/1.1/utils/include/broadcastradio-utils/WorkerThread.h b/broadcastradio/common/utils/include/broadcastradio-utils/WorkerThread.h similarity index 87% rename from broadcastradio/1.1/utils/include/broadcastradio-utils/WorkerThread.h rename to broadcastradio/common/utils/include/broadcastradio-utils/WorkerThread.h index 635876fbcbcc96150c9c0ef491d2097dfa4fd205..62bede6ca0aec91893810dc44d8d466a568a822e 100644 --- a/broadcastradio/1.1/utils/include/broadcastradio-utils/WorkerThread.h +++ b/broadcastradio/common/utils/include/broadcastradio-utils/WorkerThread.h @@ -13,8 +13,8 @@ * See the License for the specific language governing permissions and * limitations under the License. */ -#ifndef ANDROID_HARDWARE_BROADCASTRADIO_V1_1_WORKERTHREAD_H -#define ANDROID_HARDWARE_BROADCASTRADIO_V1_1_WORKERTHREAD_H +#ifndef ANDROID_HARDWARE_BROADCASTRADIO_COMMON_WORKERTHREAD_H +#define ANDROID_HARDWARE_BROADCASTRADIO_COMMON_WORKERTHREAD_H #include #include @@ -48,4 +48,4 @@ class WorkerThread { } // namespace android -#endif // ANDROID_HARDWARE_BROADCASTRADIO_V1_1_WORKERTHREAD_H +#endif // ANDROID_HARDWARE_BROADCASTRADIO_COMMON_WORKERTHREAD_H diff --git a/broadcastradio/1.1/utils/Android.bp b/broadcastradio/common/utils1x/Android.bp similarity index 91% rename from broadcastradio/1.1/utils/Android.bp rename to broadcastradio/common/utils1x/Android.bp index e80d133dca0af851d5a9a4896c622233407aed5b..443dca158d6606dc88d3269d36aeb0f049a57a3f 100644 --- a/broadcastradio/1.1/utils/Android.bp +++ b/broadcastradio/common/utils1x/Android.bp @@ -15,7 +15,7 @@ // cc_library_static { - name: "android.hardware.broadcastradio@1.1-utils-lib", + name: "android.hardware.broadcastradio@common-utils-1x-lib", vendor_available: true, relative_install_path: "hw", cflags: [ @@ -25,7 +25,6 @@ cc_library_static { ], srcs: [ "Utils.cpp", - "WorkerThread.cpp", ], export_include_dirs: ["include"], shared_libs: [ diff --git a/broadcastradio/1.1/utils/Utils.cpp b/broadcastradio/common/utils1x/Utils.cpp similarity index 91% rename from broadcastradio/1.1/utils/Utils.cpp rename to broadcastradio/common/utils1x/Utils.cpp index 4dd6b139c4995aedb596af3ece77bca8b7215165..9c2cc3d51e98bcc2368db6df15b25030501d7e55 100644 --- a/broadcastradio/1.1/utils/Utils.cpp +++ b/broadcastradio/common/utils1x/Utils.cpp @@ -16,17 +16,20 @@ #define LOG_TAG "BroadcastRadioDefault.utils" //#define LOG_NDEBUG 0 -#include +#include #include namespace android { namespace hardware { namespace broadcastradio { -namespace V1_1 { namespace utils { using V1_0::Band; +using V1_1::IdentifierType; +using V1_1::ProgramIdentifier; +using V1_1::ProgramSelector; +using V1_1::ProgramType; static bool isCompatibleProgramType(const uint32_t ia, const uint32_t ib) { auto a = static_cast(ia); @@ -56,9 +59,7 @@ static bool haveEqualIds(const ProgramSelector& a, const ProgramSelector& b, /* We should check all Ids of a given type (ie. other AF), * but it doesn't matter for default implementation. */ - auto aId = getId(a, type); - auto bId = getId(b, type); - return aId == bId; + return getId(a, type) == getId(b, type); } bool tunesTo(const ProgramSelector& a, const ProgramSelector& b) { @@ -123,23 +124,36 @@ bool isFm(const Band band) { return band == Band::FM || band == Band::FM_HD; } -bool hasId(const ProgramSelector& sel, const IdentifierType type) { +static bool maybeGetId(const ProgramSelector& sel, const IdentifierType type, uint64_t* val) { auto itype = static_cast(type); - if (sel.primaryId.type == itype) return true; + + if (sel.primaryId.type == itype) { + if (val) *val = sel.primaryId.value; + return true; + } + // not optimal, but we don't care in default impl for (auto&& id : sel.secondaryIds) { - if (id.type == itype) return true; + if (id.type == itype) { + if (val) *val = id.value; + return true; + } } + return false; } +bool hasId(const ProgramSelector& sel, const IdentifierType type) { + return maybeGetId(sel, type, nullptr); +} + uint64_t getId(const ProgramSelector& sel, const IdentifierType type) { - auto itype = static_cast(type); - if (sel.primaryId.type == itype) return sel.primaryId.value; - // not optimal, but we don't care in default impl - for (auto&& id : sel.secondaryIds) { - if (id.type == itype) return id.value; + uint64_t val; + + if (maybeGetId(sel, type, &val)) { + return val; } + ALOGW("Identifier %s not found", toString(type).c_str()); return 0; } @@ -208,19 +222,20 @@ bool isDigital(const ProgramSelector& sel) { } } // namespace utils -} // namespace V1_1 namespace V1_0 { bool operator==(const BandConfig& l, const BandConfig& r) { + using namespace utils; + if (l.type != r.type) return false; if (l.antennaConnected != r.antennaConnected) return false; if (l.lowerLimit != r.lowerLimit) return false; if (l.upperLimit != r.upperLimit) return false; if (l.spacings != r.spacings) return false; - if (V1_1::utils::isAm(l.type)) { + if (isAm(l.type)) { return l.ext.am == r.ext.am; - } else if (V1_1::utils::isFm(l.type)) { + } else if (isFm(l.type)) { return l.ext.fm == r.ext.fm; } else { ALOGW("Unsupported band config type: %s", toString(l.type).c_str()); diff --git a/broadcastradio/1.1/utils/include/broadcastradio-utils/Utils.h b/broadcastradio/common/utils1x/include/broadcastradio-utils-1x/Utils.h similarity index 66% rename from broadcastradio/1.1/utils/include/broadcastradio-utils/Utils.h rename to broadcastradio/common/utils1x/include/broadcastradio-utils-1x/Utils.h index 24c60ee460b9ff18d9ebdc24049f975a2ccabfc9..d47746d7c8d8fd784b3ddfc0234eb44d92ef644d 100644 --- a/broadcastradio/1.1/utils/include/broadcastradio-utils/Utils.h +++ b/broadcastradio/common/utils1x/include/broadcastradio-utils-1x/Utils.h @@ -13,8 +13,8 @@ * See the License for the specific language governing permissions and * limitations under the License. */ -#ifndef ANDROID_HARDWARE_BROADCASTRADIO_V1_1_UTILS_H -#define ANDROID_HARDWARE_BROADCASTRADIO_V1_1_UTILS_H +#ifndef ANDROID_HARDWARE_BROADCASTRADIO_COMMON_UTILS_1X_H +#define ANDROID_HARDWARE_BROADCASTRADIO_COMMON_UTILS_1X_H #include #include @@ -24,10 +24,8 @@ namespace android { namespace hardware { namespace broadcastradio { -namespace V1_1 { namespace utils { -// TODO(b/64115813): move it out from frameworks/base/services/core/jni/BroadcastRadio/types.h enum class HalRevision : uint32_t { V1_0 = 1, V1_1, @@ -43,38 +41,38 @@ enum class HalRevision : uint32_t { * @param pointer selector we're trying to match against channel. * @param channel existing channel. */ -bool tunesTo(const ProgramSelector& pointer, const ProgramSelector& channel); +bool tunesTo(const V1_1::ProgramSelector& pointer, const V1_1::ProgramSelector& channel); -ProgramType getType(const ProgramSelector& sel); -bool isAmFm(const ProgramType type); +V1_1::ProgramType getType(const V1_1::ProgramSelector& sel); +bool isAmFm(const V1_1::ProgramType type); bool isAm(const V1_0::Band band); bool isFm(const V1_0::Band band); -bool hasId(const ProgramSelector& sel, const IdentifierType type); +bool hasId(const V1_1::ProgramSelector& sel, const V1_1::IdentifierType type); /** * Returns ID (either primary or secondary) for a given program selector. * * If the selector does not contain given type, returns 0 and emits a warning. */ -uint64_t getId(const ProgramSelector& sel, const IdentifierType type); +uint64_t getId(const V1_1::ProgramSelector& sel, const V1_1::IdentifierType type); /** * Returns ID (either primary or secondary) for a given program selector. * * If the selector does not contain given type, returns default value. */ -uint64_t getId(const ProgramSelector& sel, const IdentifierType type, uint64_t defval); +uint64_t getId(const V1_1::ProgramSelector& sel, const V1_1::IdentifierType type, uint64_t defval); -ProgramSelector make_selector(V1_0::Band band, uint32_t channel, uint32_t subChannel = 0); +V1_1::ProgramSelector make_selector(V1_0::Band band, uint32_t channel, uint32_t subChannel = 0); -bool getLegacyChannel(const ProgramSelector& sel, uint32_t* channelOut, uint32_t* subChannelOut); +bool getLegacyChannel(const V1_1::ProgramSelector& sel, uint32_t* channelOut, + uint32_t* subChannelOut); -bool isDigital(const ProgramSelector& sel); +bool isDigital(const V1_1::ProgramSelector& sel); } // namespace utils -} // namespace V1_1 namespace V1_0 { @@ -86,4 +84,4 @@ bool operator==(const BandConfig& l, const BandConfig& r); } // namespace hardware } // namespace android -#endif // ANDROID_HARDWARE_BROADCASTRADIO_V1_1_UTILS_H +#endif // ANDROID_HARDWARE_BROADCASTRADIO_COMMON_UTILS_1X_H diff --git a/broadcastradio/common/utils2x/Android.bp b/broadcastradio/common/utils2x/Android.bp new file mode 100644 index 0000000000000000000000000000000000000000..aab94f2a1f7658ca76e036882e81c104e9c4ce03 --- /dev/null +++ b/broadcastradio/common/utils2x/Android.bp @@ -0,0 +1,36 @@ +// +// Copyright (C) 2017 The Android Open Source Project +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +// + +cc_library_static { + name: "android.hardware.broadcastradio@common-utils-2x-lib", + vendor_available: true, + relative_install_path: "hw", + cflags: [ + "-Wall", + "-Wextra", + "-Werror", + ], + cppflags: [ + "-std=c++1z", + ], + srcs: [ + "Utils.cpp", + ], + export_include_dirs: ["include"], + shared_libs: [ + "android.hardware.broadcastradio@2.0", + ], +} diff --git a/broadcastradio/common/utils2x/Utils.cpp b/broadcastradio/common/utils2x/Utils.cpp new file mode 100644 index 0000000000000000000000000000000000000000..3e20b357f624438d9f152788cf1e0466da6f831d --- /dev/null +++ b/broadcastradio/common/utils2x/Utils.cpp @@ -0,0 +1,420 @@ +/* + * Copyright (C) 2017 The Android Open Source Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +#define LOG_TAG "BcRadioDef.utils" +//#define LOG_NDEBUG 0 + +#include + +#include +#include + +namespace android { +namespace hardware { +namespace broadcastradio { +namespace utils { + +using V2_0::IdentifierType; +using V2_0::Metadata; +using V2_0::MetadataKey; +using V2_0::ProgramFilter; +using V2_0::ProgramIdentifier; +using V2_0::ProgramInfo; +using V2_0::ProgramListChunk; +using V2_0::ProgramSelector; +using V2_0::Properties; + +using std::string; +using std::vector; + +IdentifierType getType(uint32_t typeAsInt) { + return static_cast(typeAsInt); +} + +IdentifierType getType(const ProgramIdentifier& id) { + return getType(id.type); +} + +IdentifierIterator::IdentifierIterator(const V2_0::ProgramSelector& sel) + : IdentifierIterator(sel, 0) {} + +IdentifierIterator::IdentifierIterator(const V2_0::ProgramSelector& sel, size_t pos) + : mSel(sel), mPos(pos) {} + +IdentifierIterator IdentifierIterator::operator++(int) { + auto i = *this; + mPos++; + return i; +} + +IdentifierIterator& IdentifierIterator::operator++() { + ++mPos; + return *this; +} + +IdentifierIterator::ref_type IdentifierIterator::operator*() const { + if (mPos == 0) return sel().primaryId; + + // mPos is 1-based for secondary identifiers + DCHECK(mPos <= sel().secondaryIds.size()); + return sel().secondaryIds[mPos - 1]; +} + +bool IdentifierIterator::operator==(const IdentifierIterator& rhs) const { + // Check, if both iterators points at the same selector. + if (reinterpret_cast(&sel()) != reinterpret_cast(&rhs.sel())) { + return false; + } + + return mPos == rhs.mPos; +} + +FrequencyBand getBand(uint64_t freq) { + // keep in sync with + // frameworks/base/services/core/java/com/android/server/broadcastradio/hal2/Utils.java + if (freq < 30) return FrequencyBand::UNKNOWN; + if (freq < 500) return FrequencyBand::AM_LW; + if (freq < 1705) return FrequencyBand::AM_MW; + if (freq < 30000) return FrequencyBand::AM_SW; + if (freq < 60000) return FrequencyBand::UNKNOWN; + if (freq < 110000) return FrequencyBand::FM; + return FrequencyBand::UNKNOWN; +} + +static bool bothHaveId(const ProgramSelector& a, const ProgramSelector& b, + const IdentifierType type) { + return hasId(a, type) && hasId(b, type); +} + +static bool haveEqualIds(const ProgramSelector& a, const ProgramSelector& b, + const IdentifierType type) { + if (!bothHaveId(a, b, type)) return false; + /* We should check all Ids of a given type (ie. other AF), + * but it doesn't matter for default implementation. + */ + return getId(a, type) == getId(b, type); +} + +static int getHdSubchannel(const ProgramSelector& sel) { + auto hdsidext = getId(sel, IdentifierType::HD_STATION_ID_EXT, 0); + hdsidext >>= 32; // Station ID number + return hdsidext & 0xF; // HD Radio subchannel +} + +bool tunesTo(const ProgramSelector& a, const ProgramSelector& b) { + auto type = getType(b.primaryId); + + switch (type) { + case IdentifierType::HD_STATION_ID_EXT: + case IdentifierType::RDS_PI: + case IdentifierType::AMFM_FREQUENCY: + if (haveEqualIds(a, b, IdentifierType::HD_STATION_ID_EXT)) return true; + if (haveEqualIds(a, b, IdentifierType::RDS_PI)) return true; + return getHdSubchannel(b) == 0 && haveEqualIds(a, b, IdentifierType::AMFM_FREQUENCY); + case IdentifierType::DAB_SID_EXT: + return haveEqualIds(a, b, IdentifierType::DAB_SID_EXT); + case IdentifierType::DRMO_SERVICE_ID: + return haveEqualIds(a, b, IdentifierType::DRMO_SERVICE_ID); + case IdentifierType::SXM_SERVICE_ID: + return haveEqualIds(a, b, IdentifierType::SXM_SERVICE_ID); + default: // includes all vendor types + ALOGW("Unsupported program type: %s", toString(type).c_str()); + return false; + } +} + +static bool maybeGetId(const ProgramSelector& sel, const IdentifierType type, uint64_t* val) { + auto itype = static_cast(type); + + if (sel.primaryId.type == itype) { + if (val) *val = sel.primaryId.value; + return true; + } + + // TODO(twasilczyk): use IdentifierIterator + // not optimal, but we don't care in default impl + for (auto&& id : sel.secondaryIds) { + if (id.type == itype) { + if (val) *val = id.value; + return true; + } + } + + return false; +} + +bool hasId(const ProgramSelector& sel, const IdentifierType type) { + return maybeGetId(sel, type, nullptr); +} + +uint64_t getId(const ProgramSelector& sel, const IdentifierType type) { + uint64_t val; + + if (maybeGetId(sel, type, &val)) { + return val; + } + + ALOGW("Identifier %s not found", toString(type).c_str()); + return 0; +} + +uint64_t getId(const ProgramSelector& sel, const IdentifierType type, uint64_t defval) { + if (!hasId(sel, type)) return defval; + return getId(sel, type); +} + +vector getAllIds(const ProgramSelector& sel, const IdentifierType type) { + vector ret; + auto itype = static_cast(type); + + if (sel.primaryId.type == itype) ret.push_back(sel.primaryId.value); + + // TODO(twasilczyk): use IdentifierIterator + for (auto&& id : sel.secondaryIds) { + if (id.type == itype) ret.push_back(id.value); + } + + return ret; +} + +bool isSupported(const Properties& prop, const ProgramSelector& sel) { + // TODO(twasilczyk): use IdentifierIterator + // Not optimal, but it doesn't matter for default impl nor VTS tests. + for (auto&& idType : prop.supportedIdentifierTypes) { + if (hasId(sel, getType(idType))) return true; + } + return false; +} + +bool isValid(const ProgramIdentifier& id) { + auto val = id.value; + bool valid = true; + + auto expect = [&valid](bool condition, std::string message) { + if (!condition) { + valid = false; + ALOGE("Identifier not valid, expected %s", message.c_str()); + } + }; + + switch (getType(id)) { + case IdentifierType::INVALID: + expect(false, "IdentifierType::INVALID"); + break; + case IdentifierType::DAB_FREQUENCY: + expect(val > 100000u, "f > 100MHz"); + // fallthrough + case IdentifierType::AMFM_FREQUENCY: + case IdentifierType::DRMO_FREQUENCY: + expect(val > 100u, "f > 100kHz"); + expect(val < 10000000u, "f < 10GHz"); + break; + case IdentifierType::RDS_PI: + expect(val != 0u, "RDS PI != 0"); + expect(val <= 0xFFFFu, "16bit id"); + break; + case IdentifierType::HD_STATION_ID_EXT: { + auto stationId = val & 0xFFFFFFFF; // 32bit + val >>= 32; + auto subchannel = val & 0xF; // 4bit + val >>= 4; + auto freq = val & 0x3FFFF; // 18bit + expect(stationId != 0u, "HD station id != 0"); + expect(subchannel < 8u, "HD subch < 8"); + expect(freq > 100u, "f > 100kHz"); + expect(freq < 10000000u, "f < 10GHz"); + break; + } + case IdentifierType::HD_STATION_NAME: { + while (val > 0) { + auto ch = static_cast(val & 0xFF); + val >>= 8; + expect((ch >= '0' && ch <= '9') || (ch >= 'A' && ch <= 'Z'), + "HD_STATION_NAME does not match [A-Z0-9]+"); + } + break; + } + case IdentifierType::DAB_SID_EXT: { + auto sid = val & 0xFFFF; // 16bit + val >>= 16; + auto ecc = val & 0xFF; // 8bit + expect(sid != 0u, "DAB SId != 0"); + expect(ecc >= 0xA0u && ecc <= 0xF6u, "Invalid ECC, see ETSI TS 101 756 V2.1.1"); + break; + } + case IdentifierType::DAB_ENSEMBLE: + expect(val != 0u, "DAB ensemble != 0"); + expect(val <= 0xFFFFu, "16bit id"); + break; + case IdentifierType::DAB_SCID: + expect(val > 0xFu, "12bit SCId (not 4bit SCIdS)"); + expect(val <= 0xFFFu, "12bit id"); + break; + case IdentifierType::DRMO_SERVICE_ID: + expect(val != 0u, "DRM SId != 0"); + expect(val <= 0xFFFFFFu, "24bit id"); + break; + case IdentifierType::SXM_SERVICE_ID: + expect(val != 0u, "SXM SId != 0"); + expect(val <= 0xFFFFFFFFu, "32bit id"); + break; + case IdentifierType::SXM_CHANNEL: + expect(val < 1000u, "SXM channel < 1000"); + break; + case IdentifierType::VENDOR_START: + case IdentifierType::VENDOR_END: + // skip + break; + } + + return valid; +} + +bool isValid(const ProgramSelector& sel) { + if (!isValid(sel.primaryId)) return false; + // TODO(twasilczyk): use IdentifierIterator + for (auto&& id : sel.secondaryIds) { + if (!isValid(id)) return false; + } + return true; +} + +ProgramIdentifier make_identifier(IdentifierType type, uint64_t value) { + return {static_cast(type), value}; +} + +ProgramSelector make_selector_amfm(uint32_t frequency) { + ProgramSelector sel = {}; + sel.primaryId = make_identifier(IdentifierType::AMFM_FREQUENCY, frequency); + return sel; +} + +Metadata make_metadata(MetadataKey key, int64_t value) { + Metadata meta = {}; + meta.key = static_cast(key); + meta.intValue = value; + return meta; +} + +Metadata make_metadata(MetadataKey key, string value) { + Metadata meta = {}; + meta.key = static_cast(key); + meta.stringValue = value; + return meta; +} + +bool satisfies(const ProgramFilter& filter, const ProgramSelector& sel) { + if (filter.identifierTypes.size() > 0) { + auto typeEquals = [](const V2_0::ProgramIdentifier& id, uint32_t type) { + return id.type == type; + }; + auto it = std::find_first_of(begin(sel), end(sel), filter.identifierTypes.begin(), + filter.identifierTypes.end(), typeEquals); + if (it == end(sel)) return false; + } + + if (filter.identifiers.size() > 0) { + auto it = std::find_first_of(begin(sel), end(sel), filter.identifiers.begin(), + filter.identifiers.end()); + if (it == end(sel)) return false; + } + + if (!filter.includeCategories) { + if (getType(sel.primaryId) == IdentifierType::DAB_ENSEMBLE) return false; + } + + return true; +} + +size_t ProgramInfoHasher::operator()(const ProgramInfo& info) const { + auto& id = info.selector.primaryId; + + /* This is not the best hash implementation, but good enough for default HAL + * implementation and tests. */ + auto h = std::hash{}(id.type); + h += 0x9e3779b9; + h ^= std::hash{}(id.value); + + return h; +} + +bool ProgramInfoKeyEqual::operator()(const ProgramInfo& info1, const ProgramInfo& info2) const { + auto& id1 = info1.selector.primaryId; + auto& id2 = info2.selector.primaryId; + return id1.type == id2.type && id1.value == id2.value; +} + +void updateProgramList(ProgramInfoSet& list, const ProgramListChunk& chunk) { + if (chunk.purge) list.clear(); + + list.insert(chunk.modified.begin(), chunk.modified.end()); + + for (auto&& id : chunk.removed) { + ProgramInfo info = {}; + info.selector.primaryId = id; + list.erase(info); + } +} + +std::optional getMetadataString(const V2_0::ProgramInfo& info, + const V2_0::MetadataKey key) { + auto isKey = [key](const V2_0::Metadata& item) { + return static_cast(item.key) == key; + }; + + auto it = std::find_if(info.metadata.begin(), info.metadata.end(), isKey); + if (it == info.metadata.end()) return std::nullopt; + + return it->stringValue; +} + +V2_0::ProgramIdentifier make_hdradio_station_name(const std::string& name) { + constexpr size_t maxlen = 8; + + std::string shortName; + shortName.reserve(maxlen); + + auto&& loc = std::locale::classic(); + for (char ch : name) { + if (!std::isalnum(ch, loc)) continue; + shortName.push_back(std::toupper(ch, loc)); + if (shortName.length() >= maxlen) break; + } + + uint64_t val = 0; + for (auto rit = shortName.rbegin(); rit != shortName.rend(); ++rit) { + val <<= 8; + val |= static_cast(*rit); + } + + return make_identifier(IdentifierType::HD_STATION_NAME, val); +} + +} // namespace utils + +namespace V2_0 { + +utils::IdentifierIterator begin(const ProgramSelector& sel) { + return utils::IdentifierIterator(sel); +} + +utils::IdentifierIterator end(const ProgramSelector& sel) { + return utils::IdentifierIterator(sel) + 1 /* primary id */ + sel.secondaryIds.size(); +} + +} // namespace V2_0 +} // namespace broadcastradio +} // namespace hardware +} // namespace android diff --git a/broadcastradio/common/utils2x/include/broadcastradio-utils-2x/Utils.h b/broadcastradio/common/utils2x/include/broadcastradio-utils-2x/Utils.h new file mode 100644 index 0000000000000000000000000000000000000000..c4aecb218fb8d51dfaa4c4737488984693911b8d --- /dev/null +++ b/broadcastradio/common/utils2x/include/broadcastradio-utils-2x/Utils.h @@ -0,0 +1,164 @@ +/* + * Copyright (C) 2017 The Android Open Source Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +#ifndef ANDROID_HARDWARE_BROADCASTRADIO_COMMON_UTILS_2X_H +#define ANDROID_HARDWARE_BROADCASTRADIO_COMMON_UTILS_2X_H + +#include +#include +#include +#include +#include +#include + +namespace android { +namespace hardware { +namespace broadcastradio { +namespace utils { + +enum class FrequencyBand { + UNKNOWN, + FM, + AM_LW, + AM_MW, + AM_SW, +}; + +V2_0::IdentifierType getType(uint32_t typeAsInt); +V2_0::IdentifierType getType(const V2_0::ProgramIdentifier& id); + +class IdentifierIterator + : public std::iterator { + using traits = std::iterator_traits; + using ptr_type = typename traits::pointer; + using ref_type = typename traits::reference; + using diff_type = typename traits::difference_type; + + public: + explicit IdentifierIterator(const V2_0::ProgramSelector& sel); + + IdentifierIterator operator++(int); + IdentifierIterator& operator++(); + ref_type operator*() const; + inline ptr_type operator->() const { return &operator*(); } + IdentifierIterator operator+(diff_type v) const { return IdentifierIterator(mSel, mPos + v); } + bool operator==(const IdentifierIterator& rhs) const; + inline bool operator!=(const IdentifierIterator& rhs) const { return !operator==(rhs); }; + + private: + explicit IdentifierIterator(const V2_0::ProgramSelector& sel, size_t pos); + + std::reference_wrapper mSel; + + const V2_0::ProgramSelector& sel() const { return mSel.get(); } + + /** 0 is the primary identifier, 1-n are secondary identifiers. */ + size_t mPos = 0; +}; + +/** + * Guesses band from the frequency value. + * + * The band bounds are not exact to cover multiple regions. + * The function is biased towards success, i.e. it never returns + * FrequencyBand::UNKNOWN for correct frequency, but a result for + * incorrect one is undefined (it doesn't have to return UNKNOWN). + */ +FrequencyBand getBand(uint64_t frequency); + +/** + * Checks, if {@code pointer} tunes to {@channel}. + * + * For example, having a channel {AMFM_FREQUENCY = 103.3}: + * - selector {AMFM_FREQUENCY = 103.3, HD_SUBCHANNEL = 0} can tune to this channel; + * - selector {AMFM_FREQUENCY = 103.3, HD_SUBCHANNEL = 1} can't. + * + * @param pointer selector we're trying to match against channel. + * @param channel existing channel. + */ +bool tunesTo(const V2_0::ProgramSelector& pointer, const V2_0::ProgramSelector& channel); + +bool hasId(const V2_0::ProgramSelector& sel, const V2_0::IdentifierType type); + +/** + * Returns ID (either primary or secondary) for a given program selector. + * + * If the selector does not contain given type, returns 0 and emits a warning. + */ +uint64_t getId(const V2_0::ProgramSelector& sel, const V2_0::IdentifierType type); + +/** + * Returns ID (either primary or secondary) for a given program selector. + * + * If the selector does not contain given type, returns default value. + */ +uint64_t getId(const V2_0::ProgramSelector& sel, const V2_0::IdentifierType type, uint64_t defval); + +/** + * Returns all IDs of a given type. + */ +std::vector getAllIds(const V2_0::ProgramSelector& sel, const V2_0::IdentifierType type); + +/** + * Checks, if a given selector is supported by the radio module. + * + * @param prop Module description. + * @param sel The selector to check. + * @return True, if the selector is supported, false otherwise. + */ +bool isSupported(const V2_0::Properties& prop, const V2_0::ProgramSelector& sel); + +bool isValid(const V2_0::ProgramIdentifier& id); +bool isValid(const V2_0::ProgramSelector& sel); + +V2_0::ProgramIdentifier make_identifier(V2_0::IdentifierType type, uint64_t value); +V2_0::ProgramSelector make_selector_amfm(uint32_t frequency); +V2_0::Metadata make_metadata(V2_0::MetadataKey key, int64_t value); +V2_0::Metadata make_metadata(V2_0::MetadataKey key, std::string value); + +bool satisfies(const V2_0::ProgramFilter& filter, const V2_0::ProgramSelector& sel); + +struct ProgramInfoHasher { + size_t operator()(const V2_0::ProgramInfo& info) const; +}; + +struct ProgramInfoKeyEqual { + bool operator()(const V2_0::ProgramInfo& info1, const V2_0::ProgramInfo& info2) const; +}; + +typedef std::unordered_set + ProgramInfoSet; + +void updateProgramList(ProgramInfoSet& list, const V2_0::ProgramListChunk& chunk); + +std::optional getMetadataString(const V2_0::ProgramInfo& info, + const V2_0::MetadataKey key); + +V2_0::ProgramIdentifier make_hdradio_station_name(const std::string& name); + +} // namespace utils + +namespace V2_0 { + +utils::IdentifierIterator begin(const ProgramSelector& sel); +utils::IdentifierIterator end(const ProgramSelector& sel); + +} // namespace V2_0 +} // namespace broadcastradio +} // namespace hardware +} // namespace android + +#endif // ANDROID_HARDWARE_BROADCASTRADIO_COMMON_UTILS_2X_H diff --git a/broadcastradio/1.1/vts/utils/Android.bp b/broadcastradio/common/vts/utils/Android.bp similarity index 84% rename from broadcastradio/1.1/vts/utils/Android.bp rename to broadcastradio/common/vts/utils/Android.bp index 0c7e2a4433319c40ab079f86f957a32991b79acc..d3edc76678142add607009c81c4f06a19c5425b4 100644 --- a/broadcastradio/1.1/vts/utils/Android.bp +++ b/broadcastradio/common/vts/utils/Android.bp @@ -15,7 +15,7 @@ // cc_library_static { - name: "android.hardware.broadcastradio@1.1-vts-utils-lib", + name: "android.hardware.broadcastradio@vts-utils-lib", srcs: [ "call-barrier.cpp", ], @@ -25,4 +25,8 @@ cc_library_static { "-Wextra", "-Werror", ], + static_libs: [ + "VtsHalHidlTargetTestBase", + ], + group_static_libs: true, } diff --git a/broadcastradio/1.1/vts/utils/call-barrier.cpp b/broadcastradio/common/vts/utils/call-barrier.cpp similarity index 100% rename from broadcastradio/1.1/vts/utils/call-barrier.cpp rename to broadcastradio/common/vts/utils/call-barrier.cpp diff --git a/broadcastradio/1.1/vts/utils/include/broadcastradio-vts-utils/call-barrier.h b/broadcastradio/common/vts/utils/include/broadcastradio-vts-utils/call-barrier.h similarity index 100% rename from broadcastradio/1.1/vts/utils/include/broadcastradio-vts-utils/call-barrier.h rename to broadcastradio/common/vts/utils/include/broadcastradio-vts-utils/call-barrier.h diff --git a/broadcastradio/common/vts/utils/include/broadcastradio-vts-utils/environment-utils.h b/broadcastradio/common/vts/utils/include/broadcastradio-vts-utils/environment-utils.h new file mode 100644 index 0000000000000000000000000000000000000000..274e6322b8efc6c8746de7f5f08523b651cfa5d6 --- /dev/null +++ b/broadcastradio/common/vts/utils/include/broadcastradio-vts-utils/environment-utils.h @@ -0,0 +1,41 @@ +/* + * Copyright (C) 2018 The Android Open Source Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +#ifndef ANDROID_HARDWARE_BROADCASTRADIO_VTS_ENVIRONMENT_UTILS +#define ANDROID_HARDWARE_BROADCASTRADIO_VTS_ENVIRONMENT_UTILS + +#include + +namespace android { +namespace hardware { +namespace broadcastradio { +namespace vts { + +// Test environment for BroadcastRadio HIDL HAL. +template +class BroadcastRadioHidlEnvironment : public ::testing::VtsHalHidlTargetTestEnvBase { + public: + virtual void registerTestServices() override { + using expander = int[]; + (void)expander{0, (registerTestService(), 0)...}; + } +}; + +} // namespace vts +} // namespace broadcastradio +} // namespace hardware +} // namespace android + +#endif // ANDROID_HARDWARE_BROADCASTRADIO_VTS_ENVIRONMENT_UTILS diff --git a/broadcastradio/1.1/vts/utils/include/broadcastradio-vts-utils/mock-timeout.h b/broadcastradio/common/vts/utils/include/broadcastradio-vts-utils/mock-timeout.h similarity index 68% rename from broadcastradio/1.1/vts/utils/include/broadcastradio-vts-utils/mock-timeout.h rename to broadcastradio/common/vts/utils/include/broadcastradio-vts-utils/mock-timeout.h index b0ce08806d3b791a87547ba35a442858ec134823..1f716f1bced17a51fc2379867508d846a34614f8 100644 --- a/broadcastradio/1.1/vts/utils/include/broadcastradio-vts-utils/mock-timeout.h +++ b/broadcastradio/common/vts/utils/include/broadcastradio-vts-utils/mock-timeout.h @@ -13,12 +13,27 @@ * See the License for the specific language governing permissions and * limitations under the License. */ -#ifndef ANDROID_HARDWARE_BROADCASTRADIO_V1_1_MOCK_TIMEOUT -#define ANDROID_HARDWARE_BROADCASTRADIO_V1_1_MOCK_TIMEOUT +#ifndef ANDROID_HARDWARE_BROADCASTRADIO_VTS_MOCK_TIMEOUT +#define ANDROID_HARDWARE_BROADCASTRADIO_VTS_MOCK_TIMEOUT #include #include +#ifndef EGMOCK_VERBOSE +#define EGMOCK_VERBOSE 0 +#endif + +/** + * Print log message. + * + * INTERNAL IMPLEMENTATION - don't use in user code. + */ +#if EGMOCK_VERBOSE +#define EGMOCK_LOG_(...) ALOGV("egmock: " __VA_ARGS__) +#else +#define EGMOCK_LOG_(...) +#endif + /** * Common helper objects for gmock timeout extension. * @@ -30,18 +45,42 @@ std::condition_variable egmock_cond_##Method; /** - * Common method body for gmock timeout extension. + * Function similar to comma operator, to make it possible to return any value returned by mocked + * function (which may be void) and discard the result of the other operation (notification about + * a call). + * + * We need to invoke the mocked function (which result is returned) before the notification (which + * result is dropped) - that's exactly the opposite of comma operator. * * INTERNAL IMPLEMENTATION - don't use in user code. */ -#define EGMOCK_TIMEOUT_METHOD_BODY_(Method, ...) \ - auto ret = egmock_##Method(__VA_ARGS__); \ - { \ - std::lock_guard lk(egmock_mut_##Method); \ - egmock_called_##Method = true; \ - egmock_cond_##Method.notify_all(); \ - } \ +template +static T EGMockFlippedComma_(std::function returned, std::function discarded) { + auto ret = returned(); + discarded(); return ret; +} + +template <> +inline void EGMockFlippedComma_(std::function returned, std::function discarded) { + returned(); + discarded(); +} + +/** + * Common method body for gmock timeout extension. + * + * INTERNAL IMPLEMENTATION - don't use in user code. + */ +#define EGMOCK_TIMEOUT_METHOD_BODY_(Method, ...) \ + auto invokeMock = [&]() { return egmock_##Method(__VA_ARGS__); }; \ + auto notify = [&]() { \ + std::lock_guard lk(egmock_mut_##Method); \ + EGMOCK_LOG_(#Method " called"); \ + egmock_called_##Method = true; \ + egmock_cond_##Method.notify_all(); \ + }; \ + return EGMockFlippedComma_(invokeMock, notify); /** * Gmock MOCK_METHOD0 timeout-capable extension. @@ -82,6 +121,7 @@ * EXPECT_TIMEOUT_CALL(account, charge, 100, Currency::USD); */ #define EXPECT_TIMEOUT_CALL(obj, Method, ...) \ + EGMOCK_LOG_(#Method " expected to call"); \ (obj).egmock_called_##Method = false; \ EXPECT_CALL(obj, egmock_##Method(__VA_ARGS__)) @@ -101,6 +141,7 @@ */ #define EXPECT_TIMEOUT_CALL_WAIT(obj, Method, timeout) \ { \ + EGMOCK_LOG_("waiting for " #Method " call"); \ std::unique_lock lk((obj).egmock_mut_##Method); \ if (!(obj).egmock_called_##Method) { \ auto status = (obj).egmock_cond_##Method.wait_for(lk, timeout); \ @@ -108,4 +149,4 @@ } \ } -#endif // ANDROID_HARDWARE_BROADCASTRADIO_V1_1_MOCK_TIMEOUT +#endif // ANDROID_HARDWARE_BROADCASTRADIO_VTS_MOCK_TIMEOUT diff --git a/broadcastradio/common/vts/utils/include/broadcastradio-vts-utils/pointer-utils.h b/broadcastradio/common/vts/utils/include/broadcastradio-vts-utils/pointer-utils.h new file mode 100644 index 0000000000000000000000000000000000000000..0b6f5eb59bf5f28de922d6b3b04f9c4ffbc97d39 --- /dev/null +++ b/broadcastradio/common/vts/utils/include/broadcastradio-vts-utils/pointer-utils.h @@ -0,0 +1,57 @@ +/* + * Copyright (C) 2017 The Android Open Source Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +#ifndef ANDROID_HARDWARE_BROADCASTRADIO_VTS_POINTER_UTILS +#define ANDROID_HARDWARE_BROADCASTRADIO_VTS_POINTER_UTILS + +#include +#include + +namespace android { +namespace hardware { +namespace broadcastradio { +namespace vts { + +/** + * Clears strong pointer and waits until the object gets destroyed. + * + * @param ptr The pointer to get cleared. + * @param timeout Time to wait for other references. + */ +template +static void clearAndWait(sp& ptr, std::chrono::milliseconds timeout) { + using std::chrono::steady_clock; + + constexpr auto step = 10ms; + + wp wptr = ptr; + ptr.clear(); + + auto limit = steady_clock::now() + timeout; + while (wptr.promote() != nullptr) { + if (steady_clock::now() + step > limit) { + FAIL() << "Pointer was not released within timeout"; + break; + } + std::this_thread::sleep_for(step); + } +} + +} // namespace vts +} // namespace broadcastradio +} // namespace hardware +} // namespace android + +#endif // ANDROID_HARDWARE_BROADCASTRADIO_VTS_POINTER_UTILS diff --git a/camera/common/1.0/default/Android.bp b/camera/common/1.0/default/Android.bp index 6209cb83a7148f6b22cb312bd0750ee627d18947..21f81f5acc23963e56508d0ab3a6d5782eaf94c8 100644 --- a/camera/common/1.0/default/Android.bp +++ b/camera/common/1.0/default/Android.bp @@ -7,7 +7,9 @@ cc_library_static { "CameraMetadata.cpp", "CameraParameters.cpp", "VendorTagDescriptor.cpp", - "HandleImporter.cpp"], + "HandleImporter.cpp", + "Exif.cpp" + ], cflags: [ "-Werror", "-Wextra", @@ -17,7 +19,9 @@ cc_library_static { "liblog", "libhardware", "libcamera_metadata", - "android.hardware.graphics.mapper@2.0"], + "android.hardware.graphics.mapper@2.0", + "libexif", + ], include_dirs: ["system/media/private/camera/include"], export_include_dirs : ["include"] } diff --git a/camera/common/1.0/default/CameraModule.cpp b/camera/common/1.0/default/CameraModule.cpp index 9217a823b75c267a4e4db2707ad399bc4263e3f8..dc4e0f01ffee6c16fc0bc09db13148e9f6918f59 100644 --- a/camera/common/1.0/default/CameraModule.cpp +++ b/camera/common/1.0/default/CameraModule.cpp @@ -306,7 +306,7 @@ int CameraModule::getCameraInfo(int cameraId, struct camera_info *info) { return ret; } CameraMetadata m; - m = rawInfo.static_camera_characteristics; + m.append(rawInfo.static_camera_characteristics); deriveCameraCharacteristicsKeys(rawInfo.device_version, m); cameraInfo = rawInfo; cameraInfo.static_camera_characteristics = m.release(); diff --git a/camera/common/1.0/default/Exif.cpp b/camera/common/1.0/default/Exif.cpp new file mode 100644 index 0000000000000000000000000000000000000000..6054999a074b87c69ef47d686c7fa06f536215be --- /dev/null +++ b/camera/common/1.0/default/Exif.cpp @@ -0,0 +1,1115 @@ +/* + * Copyright (C) 2018 The Android Open Source Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#define LOG_TAG "CamComm1.0-Exif" +#define ATRACE_TAG ATRACE_TAG_CAMERA +//#define LOG_NDEBUG 0 + +#include + +#include +#include +#include +#include +#include + +#include "Exif.h" + +extern "C" { +#include +} + +namespace std { + +template <> +struct default_delete { + inline void operator()(ExifEntry* entry) const { exif_entry_unref(entry); } +}; + +} // namespace std + + +namespace android { +namespace hardware { +namespace camera { +namespace common { +namespace V1_0 { +namespace helper { + + +class ExifUtilsImpl : public ExifUtils { + public: + ExifUtilsImpl(); + + virtual ~ExifUtilsImpl(); + + // Initialize() can be called multiple times. The setting of Exif tags will be + // cleared. + virtual bool initialize(); + + // set all known fields from a metadata structure + virtual bool setFromMetadata(const CameraMetadata& metadata, + const size_t imageWidth, + const size_t imageHeight); + + // sets the len aperture. + // Returns false if memory allocation fails. + virtual bool setAperture(uint32_t numerator, uint32_t denominator); + + // sets the value of brightness. + // Returns false if memory allocation fails. + virtual bool setBrightness(int32_t numerator, int32_t denominator); + + // sets the color space. + // Returns false if memory allocation fails. + virtual bool setColorSpace(uint16_t color_space); + + // sets the information to compressed data. + // Returns false if memory allocation fails. + virtual bool setComponentsConfiguration(const std::string& components_configuration); + + // sets the compression scheme used for the image data. + // Returns false if memory allocation fails. + virtual bool setCompression(uint16_t compression); + + // sets image contrast. + // Returns false if memory allocation fails. + virtual bool setContrast(uint16_t contrast); + + // sets the date and time of image last modified. It takes local time. The + // name of the tag is DateTime in IFD0. + // Returns false if memory allocation fails. + virtual bool setDateTime(const struct tm& t); + + // sets the image description. + // Returns false if memory allocation fails. + virtual bool setDescription(const std::string& description); + + // sets the digital zoom ratio. If the numerator is 0, it means digital zoom + // was not used. + // Returns false if memory allocation fails. + virtual bool setDigitalZoomRatio(uint32_t numerator, uint32_t denominator); + + // sets the exposure bias. + // Returns false if memory allocation fails. + virtual bool setExposureBias(int32_t numerator, int32_t denominator); + + // sets the exposure mode set when the image was shot. + // Returns false if memory allocation fails. + virtual bool setExposureMode(uint16_t exposure_mode); + + // sets the program used by the camera to set exposure when the picture is + // taken. + // Returns false if memory allocation fails. + virtual bool setExposureProgram(uint16_t exposure_program); + + // sets the exposure time, given in seconds. + // Returns false if memory allocation fails. + virtual bool setExposureTime(uint32_t numerator, uint32_t denominator); + + // sets the status of flash. + // Returns false if memory allocation fails. + virtual bool setFlash(uint16_t flash); + + // sets the F number. + // Returns false if memory allocation fails. + virtual bool setFNumber(uint32_t numerator, uint32_t denominator); + + // sets the focal length of lens used to take the image in millimeters. + // Returns false if memory allocation fails. + virtual bool setFocalLength(uint32_t numerator, uint32_t denominator); + + // sets the degree of overall image gain adjustment. + // Returns false if memory allocation fails. + virtual bool setGainControl(uint16_t gain_control); + + // sets the altitude in meters. + // Returns false if memory allocation fails. + virtual bool setGpsAltitude(double altitude); + + // sets the latitude with degrees minutes seconds format. + // Returns false if memory allocation fails. + virtual bool setGpsLatitude(double latitude); + + // sets the longitude with degrees minutes seconds format. + // Returns false if memory allocation fails. + virtual bool setGpsLongitude(double longitude); + + // sets GPS processing method. + // Returns false if memory allocation fails. + virtual bool setGpsProcessingMethod(const std::string& method); + + // sets GPS date stamp and time stamp (atomic clock). It takes UTC time. + // Returns false if memory allocation fails. + virtual bool setGpsTimestamp(const struct tm& t); + + // sets the length (number of rows) of main image. + // Returns false if memory allocation fails. + virtual bool setImageHeight(uint32_t length); + + // sets the width (number of columes) of main image. + // Returns false if memory allocation fails. + virtual bool setImageWidth(uint32_t width); + + // sets the ISO speed. + // Returns false if memory allocation fails. + virtual bool setIsoSpeedRating(uint16_t iso_speed_ratings); + + // sets the kind of light source. + // Returns false if memory allocation fails. + virtual bool setLightSource(uint16_t light_source); + + // sets the smallest F number of the lens. + // Returns false if memory allocation fails. + virtual bool setMaxAperture(uint32_t numerator, uint32_t denominator); + + // sets the metering mode. + // Returns false if memory allocation fails. + virtual bool setMeteringMode(uint16_t metering_mode); + + // sets image orientation. + // Returns false if memory allocation fails. + virtual bool setOrientation(uint16_t orientation); + + // sets the unit for measuring XResolution and YResolution. + // Returns false if memory allocation fails. + virtual bool setResolutionUnit(uint16_t resolution_unit); + + // sets image saturation. + // Returns false if memory allocation fails. + virtual bool setSaturation(uint16_t saturation); + + // sets the type of scene that was shot. + // Returns false if memory allocation fails. + virtual bool setSceneCaptureType(uint16_t type); + + // sets image sharpness. + // Returns false if memory allocation fails. + virtual bool setSharpness(uint16_t sharpness); + + // sets the shutter speed. + // Returns false if memory allocation fails. + virtual bool setShutterSpeed(int32_t numerator, int32_t denominator); + + // sets the distance to the subject, given in meters. + // Returns false if memory allocation fails. + virtual bool setSubjectDistance(uint32_t numerator, uint32_t denominator); + + // sets the fractions of seconds for the tag. + // Returns false if memory allocation fails. + virtual bool setSubsecTime(const std::string& subsec_time); + + // sets the white balance mode set when the image was shot. + // Returns false if memory allocation fails. + virtual bool setWhiteBalance(uint16_t white_balance); + + // sets the number of pixels per resolution unit in the image width. + // Returns false if memory allocation fails. + virtual bool setXResolution(uint32_t numerator, uint32_t denominator); + + // sets the position of chrominance components in relation to the luminance + // component. + // Returns false if memory allocation fails. + virtual bool setYCbCrPositioning(uint16_t ycbcr_positioning); + + // sets the number of pixels per resolution unit in the image length. + // Returns false if memory allocation fails. + virtual bool setYResolution(uint32_t numerator, uint32_t denominator); + + // sets the manufacturer of camera. + // Returns false if memory allocation fails. + virtual bool setMake(const std::string& make); + + // sets the model number of camera. + // Returns false if memory allocation fails. + virtual bool setModel(const std::string& model); + + // Generates APP1 segment. + // Returns false if generating APP1 segment fails. + virtual bool generateApp1(const void* thumbnail_buffer, uint32_t size); + + // Gets buffer of APP1 segment. This method must be called only after calling + // GenerateAPP1(). + virtual const uint8_t* getApp1Buffer(); + + // Gets length of APP1 segment. This method must be called only after calling + // GenerateAPP1(). + virtual unsigned int getApp1Length(); + + protected: + // sets the version of this standard supported. + // Returns false if memory allocation fails. + virtual bool setExifVersion(const std::string& exif_version); + + + // Resets the pointers and memories. + virtual void reset(); + + // Adds a variable length tag to |exif_data_|. It will remove the original one + // if the tag exists. + // Returns the entry of the tag. The reference count of returned ExifEntry is + // two. + virtual std::unique_ptr addVariableLengthEntry(ExifIfd ifd, + ExifTag tag, + ExifFormat format, + uint64_t components, + unsigned int size); + + // Adds a entry of |tag| in |exif_data_|. It won't remove the original one if + // the tag exists. + // Returns the entry of the tag. It adds one reference count to returned + // ExifEntry. + virtual std::unique_ptr addEntry(ExifIfd ifd, ExifTag tag); + + // Helpe functions to add exif data with different types. + virtual bool setShort(ExifIfd ifd, + ExifTag tag, + uint16_t value, + const std::string& msg); + + virtual bool setLong(ExifIfd ifd, + ExifTag tag, + uint32_t value, + const std::string& msg); + + virtual bool setRational(ExifIfd ifd, + ExifTag tag, + uint32_t numerator, + uint32_t denominator, + const std::string& msg); + + virtual bool setSRational(ExifIfd ifd, + ExifTag tag, + int32_t numerator, + int32_t denominator, + const std::string& msg); + + virtual bool setString(ExifIfd ifd, + ExifTag tag, + ExifFormat format, + const std::string& buffer, + const std::string& msg); + + // Destroys the buffer of APP1 segment if exists. + virtual void destroyApp1(); + + // The Exif data (APP1). Owned by this class. + ExifData* exif_data_; + // The raw data of APP1 segment. It's allocated by ExifMem in |exif_data_| but + // owned by this class. + uint8_t* app1_buffer_; + // The length of |app1_buffer_|. + unsigned int app1_length_; + +}; + +#define SET_SHORT(ifd, tag, value) \ + do { \ + if (setShort(ifd, tag, value, #tag) == false) \ + return false; \ + } while (0); + +#define SET_LONG(ifd, tag, value) \ + do { \ + if (setLong(ifd, tag, value, #tag) == false) \ + return false; \ + } while (0); + +#define SET_RATIONAL(ifd, tag, numerator, denominator) \ + do { \ + if (setRational(ifd, tag, numerator, denominator, #tag) == false) \ + return false; \ + } while (0); + +#define SET_SRATIONAL(ifd, tag, numerator, denominator) \ + do { \ + if (setSRational(ifd, tag, numerator, denominator, #tag) == false) \ + return false; \ + } while (0); + +#define SET_STRING(ifd, tag, format, buffer) \ + do { \ + if (setString(ifd, tag, format, buffer, #tag) == false) \ + return false; \ + } while (0); + +// This comes from the Exif Version 2.2 standard table 6. +const char gExifAsciiPrefix[] = {0x41, 0x53, 0x43, 0x49, 0x49, 0x0, 0x0, 0x0}; + +static void setLatitudeOrLongitudeData(unsigned char* data, double num) { + // Take the integer part of |num|. + ExifLong degrees = static_cast(num); + ExifLong minutes = static_cast(60 * (num - degrees)); + ExifLong microseconds = + static_cast(3600000000u * (num - degrees - minutes / 60.0)); + exif_set_rational(data, EXIF_BYTE_ORDER_INTEL, {degrees, 1}); + exif_set_rational(data + sizeof(ExifRational), EXIF_BYTE_ORDER_INTEL, + {minutes, 1}); + exif_set_rational(data + 2 * sizeof(ExifRational), EXIF_BYTE_ORDER_INTEL, + {microseconds, 1000000}); +} + +ExifUtils *ExifUtils::create() { + return new ExifUtilsImpl(); +} + +ExifUtils::~ExifUtils() { +} + +ExifUtilsImpl::ExifUtilsImpl() + : exif_data_(nullptr), app1_buffer_(nullptr), app1_length_(0) {} + +ExifUtilsImpl::~ExifUtilsImpl() { + reset(); +} + + +bool ExifUtilsImpl::initialize() { + reset(); + exif_data_ = exif_data_new(); + if (exif_data_ == nullptr) { + ALOGE("%s: allocate memory for exif_data_ failed", __FUNCTION__); + return false; + } + // set the image options. + exif_data_set_option(exif_data_, EXIF_DATA_OPTION_FOLLOW_SPECIFICATION); + exif_data_set_data_type(exif_data_, EXIF_DATA_TYPE_COMPRESSED); + exif_data_set_byte_order(exif_data_, EXIF_BYTE_ORDER_INTEL); + + // set exif version to 2.2. + if (!setExifVersion("0220")) { + return false; + } + + return true; +} + +bool ExifUtilsImpl::setAperture(uint32_t numerator, uint32_t denominator) { + SET_RATIONAL(EXIF_IFD_EXIF, EXIF_TAG_APERTURE_VALUE, numerator, denominator); + return true; +} + +bool ExifUtilsImpl::setBrightness(int32_t numerator, int32_t denominator) { + SET_SRATIONAL(EXIF_IFD_EXIF, EXIF_TAG_BRIGHTNESS_VALUE, numerator, + denominator); + return true; +} + +bool ExifUtilsImpl::setColorSpace(uint16_t color_space) { + SET_SHORT(EXIF_IFD_EXIF, EXIF_TAG_COLOR_SPACE, color_space); + return true; +} + +bool ExifUtilsImpl::setComponentsConfiguration( + const std::string& components_configuration) { + SET_STRING(EXIF_IFD_EXIF, EXIF_TAG_COMPONENTS_CONFIGURATION, + EXIF_FORMAT_UNDEFINED, components_configuration); + return true; +} + +bool ExifUtilsImpl::setCompression(uint16_t compression) { + SET_SHORT(EXIF_IFD_0, EXIF_TAG_COMPRESSION, compression); + return true; +} + +bool ExifUtilsImpl::setContrast(uint16_t contrast) { + SET_SHORT(EXIF_IFD_EXIF, EXIF_TAG_CONTRAST, contrast); + return true; +} + +bool ExifUtilsImpl::setDateTime(const struct tm& t) { + // The length is 20 bytes including NULL for termination in Exif standard. + char str[20]; + int result = snprintf(str, sizeof(str), "%04i:%02i:%02i %02i:%02i:%02i", + t.tm_year + 1900, t.tm_mon + 1, t.tm_mday, t.tm_hour, + t.tm_min, t.tm_sec); + if (result != sizeof(str) - 1) { + ALOGW("%s: Input time is invalid", __FUNCTION__); + return false; + } + std::string buffer(str); + SET_STRING(EXIF_IFD_0, EXIF_TAG_DATE_TIME, EXIF_FORMAT_ASCII, buffer); + SET_STRING(EXIF_IFD_EXIF, EXIF_TAG_DATE_TIME_ORIGINAL, EXIF_FORMAT_ASCII, + buffer); + SET_STRING(EXIF_IFD_EXIF, EXIF_TAG_DATE_TIME_DIGITIZED, EXIF_FORMAT_ASCII, + buffer); + return true; +} + +bool ExifUtilsImpl::setDescription(const std::string& description) { + SET_STRING(EXIF_IFD_0, EXIF_TAG_IMAGE_DESCRIPTION, EXIF_FORMAT_ASCII, + description); + return true; +} + +bool ExifUtilsImpl::setDigitalZoomRatio(uint32_t numerator, uint32_t denominator) { + SET_RATIONAL(EXIF_IFD_EXIF, EXIF_TAG_DIGITAL_ZOOM_RATIO, numerator, + denominator); + return true; +} + +bool ExifUtilsImpl::setExposureBias(int32_t numerator, int32_t denominator) { + SET_SRATIONAL(EXIF_IFD_EXIF, EXIF_TAG_EXPOSURE_BIAS_VALUE, numerator, + denominator); + return true; +} + +bool ExifUtilsImpl::setExposureMode(uint16_t exposure_mode) { + SET_SHORT(EXIF_IFD_EXIF, EXIF_TAG_EXPOSURE_MODE, exposure_mode); + return true; +} + +bool ExifUtilsImpl::setExposureProgram(uint16_t exposure_program) { + SET_SHORT(EXIF_IFD_EXIF, EXIF_TAG_EXPOSURE_PROGRAM, exposure_program); + return true; +} + +bool ExifUtilsImpl::setExposureTime(uint32_t numerator, uint32_t denominator) { + SET_RATIONAL(EXIF_IFD_EXIF, EXIF_TAG_EXPOSURE_TIME, numerator, denominator); + return true; +} + +bool ExifUtilsImpl::setFlash(uint16_t flash) { + SET_SHORT(EXIF_IFD_EXIF, EXIF_TAG_FLASH, flash); + return true; +} + +bool ExifUtilsImpl::setFNumber(uint32_t numerator, uint32_t denominator) { + SET_RATIONAL(EXIF_IFD_EXIF, EXIF_TAG_FNUMBER, numerator, denominator); + return true; +} + +bool ExifUtilsImpl::setFocalLength(uint32_t numerator, uint32_t denominator) { + SET_RATIONAL(EXIF_IFD_EXIF, EXIF_TAG_FOCAL_LENGTH, numerator, denominator); + return true; +} + +bool ExifUtilsImpl::setGainControl(uint16_t gain_control) { + SET_SHORT(EXIF_IFD_EXIF, EXIF_TAG_GAIN_CONTROL, gain_control); + return true; +} + +bool ExifUtilsImpl::setGpsAltitude(double altitude) { + ExifTag refTag = static_cast(EXIF_TAG_GPS_ALTITUDE_REF); + std::unique_ptr refEntry = + addVariableLengthEntry(EXIF_IFD_GPS, refTag, EXIF_FORMAT_BYTE, 1, 1); + if (!refEntry) { + ALOGE("%s: Adding GPSAltitudeRef exif entry failed", __FUNCTION__); + return false; + } + if (altitude >= 0) { + *refEntry->data = 0; + } else { + *refEntry->data = 1; + altitude *= -1; + } + + ExifTag tag = static_cast(EXIF_TAG_GPS_ALTITUDE); + std::unique_ptr entry = addVariableLengthEntry( + EXIF_IFD_GPS, tag, EXIF_FORMAT_RATIONAL, 1, sizeof(ExifRational)); + if (!entry) { + exif_content_remove_entry(exif_data_->ifd[EXIF_IFD_GPS], refEntry.get()); + ALOGE("%s: Adding GPSAltitude exif entry failed", __FUNCTION__); + return false; + } + exif_set_rational(entry->data, EXIF_BYTE_ORDER_INTEL, + {static_cast(altitude * 1000), 1000}); + + return true; +} + +bool ExifUtilsImpl::setGpsLatitude(double latitude) { + const ExifTag refTag = static_cast(EXIF_TAG_GPS_LATITUDE_REF); + std::unique_ptr refEntry = + addVariableLengthEntry(EXIF_IFD_GPS, refTag, EXIF_FORMAT_ASCII, 2, 2); + if (!refEntry) { + ALOGE("%s: Adding GPSLatitudeRef exif entry failed", __FUNCTION__); + return false; + } + if (latitude >= 0) { + memcpy(refEntry->data, "N", sizeof("N")); + } else { + memcpy(refEntry->data, "S", sizeof("S")); + latitude *= -1; + } + + const ExifTag tag = static_cast(EXIF_TAG_GPS_LATITUDE); + std::unique_ptr entry = addVariableLengthEntry( + EXIF_IFD_GPS, tag, EXIF_FORMAT_RATIONAL, 3, 3 * sizeof(ExifRational)); + if (!entry) { + exif_content_remove_entry(exif_data_->ifd[EXIF_IFD_GPS], refEntry.get()); + ALOGE("%s: Adding GPSLatitude exif entry failed", __FUNCTION__); + return false; + } + setLatitudeOrLongitudeData(entry->data, latitude); + + return true; +} + +bool ExifUtilsImpl::setGpsLongitude(double longitude) { + ExifTag refTag = static_cast(EXIF_TAG_GPS_LONGITUDE_REF); + std::unique_ptr refEntry = + addVariableLengthEntry(EXIF_IFD_GPS, refTag, EXIF_FORMAT_ASCII, 2, 2); + if (!refEntry) { + ALOGE("%s: Adding GPSLongitudeRef exif entry failed", __FUNCTION__); + return false; + } + if (longitude >= 0) { + memcpy(refEntry->data, "E", sizeof("E")); + } else { + memcpy(refEntry->data, "W", sizeof("W")); + longitude *= -1; + } + + ExifTag tag = static_cast(EXIF_TAG_GPS_LONGITUDE); + std::unique_ptr entry = addVariableLengthEntry( + EXIF_IFD_GPS, tag, EXIF_FORMAT_RATIONAL, 3, 3 * sizeof(ExifRational)); + if (!entry) { + exif_content_remove_entry(exif_data_->ifd[EXIF_IFD_GPS], refEntry.get()); + ALOGE("%s: Adding GPSLongitude exif entry failed", __FUNCTION__); + return false; + } + setLatitudeOrLongitudeData(entry->data, longitude); + + return true; +} + +bool ExifUtilsImpl::setGpsProcessingMethod(const std::string& method) { + std::string buffer = + std::string(gExifAsciiPrefix, sizeof(gExifAsciiPrefix)) + method; + SET_STRING(EXIF_IFD_GPS, static_cast(EXIF_TAG_GPS_PROCESSING_METHOD), + EXIF_FORMAT_UNDEFINED, buffer); + return true; +} + +bool ExifUtilsImpl::setGpsTimestamp(const struct tm& t) { + const ExifTag dateTag = static_cast(EXIF_TAG_GPS_DATE_STAMP); + const size_t kGpsDateStampSize = 11; + std::unique_ptr entry = + addVariableLengthEntry(EXIF_IFD_GPS, dateTag, EXIF_FORMAT_ASCII, + kGpsDateStampSize, kGpsDateStampSize); + if (!entry) { + ALOGE("%s: Adding GPSDateStamp exif entry failed", __FUNCTION__); + return false; + } + int result = + snprintf(reinterpret_cast(entry->data), kGpsDateStampSize, + "%04i:%02i:%02i", t.tm_year + 1900, t.tm_mon + 1, t.tm_mday); + if (result != kGpsDateStampSize - 1) { + ALOGW("%s: Input time is invalid", __FUNCTION__); + return false; + } + + const ExifTag timeTag = static_cast(EXIF_TAG_GPS_TIME_STAMP); + entry = addVariableLengthEntry(EXIF_IFD_GPS, timeTag, EXIF_FORMAT_RATIONAL, 3, + 3 * sizeof(ExifRational)); + if (!entry) { + ALOGE("%s: Adding GPSTimeStamp exif entry failed", __FUNCTION__); + return false; + } + exif_set_rational(entry->data, EXIF_BYTE_ORDER_INTEL, + {static_cast(t.tm_hour), 1}); + exif_set_rational(entry->data + sizeof(ExifRational), EXIF_BYTE_ORDER_INTEL, + {static_cast(t.tm_min), 1}); + exif_set_rational(entry->data + 2 * sizeof(ExifRational), + EXIF_BYTE_ORDER_INTEL, + {static_cast(t.tm_sec), 1}); + + return true; +} + +bool ExifUtilsImpl::setImageHeight(uint32_t length) { + SET_LONG(EXIF_IFD_0, EXIF_TAG_IMAGE_LENGTH, length); + SET_LONG(EXIF_IFD_EXIF, EXIF_TAG_PIXEL_Y_DIMENSION, length); + return true; +} + +bool ExifUtilsImpl::setImageWidth(uint32_t width) { + SET_LONG(EXIF_IFD_0, EXIF_TAG_IMAGE_WIDTH, width); + SET_LONG(EXIF_IFD_EXIF, EXIF_TAG_PIXEL_X_DIMENSION, width); + return true; +} + +bool ExifUtilsImpl::setIsoSpeedRating(uint16_t iso_speed_ratings) { + SET_SHORT(EXIF_IFD_EXIF, EXIF_TAG_ISO_SPEED_RATINGS, iso_speed_ratings); + return true; +} + +bool ExifUtilsImpl::setLightSource(uint16_t light_source) { + SET_SHORT(EXIF_IFD_EXIF, EXIF_TAG_LIGHT_SOURCE, light_source); + return true; +} + +bool ExifUtilsImpl::setMaxAperture(uint32_t numerator, uint32_t denominator) { + SET_RATIONAL(EXIF_IFD_EXIF, EXIF_TAG_MAX_APERTURE_VALUE, numerator, + denominator); + return true; +} + +bool ExifUtilsImpl::setMeteringMode(uint16_t metering_mode) { + SET_SHORT(EXIF_IFD_EXIF, EXIF_TAG_METERING_MODE, metering_mode); + return true; +} + +bool ExifUtilsImpl::setOrientation(uint16_t orientation) { + /* + * Orientation value: + * 1 2 3 4 5 6 7 8 + * + * 888888 888888 88 88 8888888888 88 88 8888888888 + * 88 88 88 88 88 88 88 88 88 88 88 88 + * 8888 8888 8888 8888 88 8888888888 8888888888 88 + * 88 88 88 88 + * 88 88 888888 888888 + */ + int value = 1; + switch (orientation) { + case 90: + value = 6; + break; + case 180: + value = 3; + break; + case 270: + value = 8; + break; + default: + break; + } + SET_SHORT(EXIF_IFD_0, EXIF_TAG_ORIENTATION, value); + return true; +} + +bool ExifUtilsImpl::setResolutionUnit(uint16_t resolution_unit) { + SET_SHORT(EXIF_IFD_EXIF, EXIF_TAG_RESOLUTION_UNIT, resolution_unit); + return true; +} + +bool ExifUtilsImpl::setSaturation(uint16_t saturation) { + SET_SHORT(EXIF_IFD_EXIF, EXIF_TAG_SATURATION, saturation); + return true; +} + +bool ExifUtilsImpl::setSceneCaptureType(uint16_t type) { + SET_SHORT(EXIF_IFD_EXIF, EXIF_TAG_SCENE_CAPTURE_TYPE, type); + return true; +} + +bool ExifUtilsImpl::setSharpness(uint16_t sharpness) { + SET_SHORT(EXIF_IFD_EXIF, EXIF_TAG_SHARPNESS, sharpness); + return true; +} + +bool ExifUtilsImpl::setShutterSpeed(int32_t numerator, int32_t denominator) { + SET_SRATIONAL(EXIF_IFD_EXIF, EXIF_TAG_SHUTTER_SPEED_VALUE, numerator, + denominator); + return true; +} + +bool ExifUtilsImpl::setSubjectDistance(uint32_t numerator, uint32_t denominator) { + SET_RATIONAL(EXIF_IFD_EXIF, EXIF_TAG_SUBJECT_DISTANCE, numerator, + denominator); + return true; +} + +bool ExifUtilsImpl::setSubsecTime(const std::string& subsec_time) { + SET_STRING(EXIF_IFD_EXIF, EXIF_TAG_SUB_SEC_TIME, EXIF_FORMAT_ASCII, + subsec_time); + SET_STRING(EXIF_IFD_EXIF, EXIF_TAG_SUB_SEC_TIME_ORIGINAL, EXIF_FORMAT_ASCII, + subsec_time); + SET_STRING(EXIF_IFD_EXIF, EXIF_TAG_SUB_SEC_TIME_DIGITIZED, EXIF_FORMAT_ASCII, + subsec_time); + return true; +} + +bool ExifUtilsImpl::setWhiteBalance(uint16_t white_balance) { + SET_SHORT(EXIF_IFD_EXIF, EXIF_TAG_WHITE_BALANCE, white_balance); + return true; +} + +bool ExifUtilsImpl::setXResolution(uint32_t numerator, uint32_t denominator) { + SET_RATIONAL(EXIF_IFD_EXIF, EXIF_TAG_X_RESOLUTION, numerator, denominator); + return true; +} + +bool ExifUtilsImpl::setYCbCrPositioning(uint16_t ycbcr_positioning) { + SET_SHORT(EXIF_IFD_0, EXIF_TAG_YCBCR_POSITIONING, ycbcr_positioning); + return true; +} + +bool ExifUtilsImpl::setYResolution(uint32_t numerator, uint32_t denominator) { + SET_RATIONAL(EXIF_IFD_EXIF, EXIF_TAG_Y_RESOLUTION, numerator, denominator); + return true; +} + +bool ExifUtilsImpl::generateApp1(const void* thumbnail_buffer, uint32_t size) { + destroyApp1(); + exif_data_->data = const_cast(static_cast(thumbnail_buffer)); + exif_data_->size = size; + // Save the result into |app1_buffer_|. + exif_data_save_data(exif_data_, &app1_buffer_, &app1_length_); + if (!app1_length_) { + ALOGE("%s: Allocate memory for app1_buffer_ failed", __FUNCTION__); + return false; + } + /* + * The JPEG segment size is 16 bits in spec. The size of APP1 segment should + * be smaller than 65533 because there are two bytes for segment size field. + */ + if (app1_length_ > 65533) { + destroyApp1(); + ALOGE("%s: The size of APP1 segment is too large", __FUNCTION__); + return false; + } + return true; +} + +const uint8_t* ExifUtilsImpl::getApp1Buffer() { + return app1_buffer_; +} + +unsigned int ExifUtilsImpl::getApp1Length() { + return app1_length_; +} + +bool ExifUtilsImpl::setExifVersion(const std::string& exif_version) { + SET_STRING(EXIF_IFD_EXIF, EXIF_TAG_EXIF_VERSION, EXIF_FORMAT_UNDEFINED, exif_version); + return true; +} + +bool ExifUtilsImpl::setMake(const std::string& make) { + SET_STRING(EXIF_IFD_0, EXIF_TAG_MAKE, EXIF_FORMAT_ASCII, make); + return true; +} + +bool ExifUtilsImpl::setModel(const std::string& model) { + SET_STRING(EXIF_IFD_0, EXIF_TAG_MODEL, EXIF_FORMAT_ASCII, model); + return true; +} + +void ExifUtilsImpl::reset() { + destroyApp1(); + if (exif_data_) { + /* + * Since we decided to ignore the original APP1, we are sure that there is + * no thumbnail allocated by libexif. |exif_data_->data| is actually + * allocated by JpegCompressor. sets |exif_data_->data| to nullptr to + * prevent exif_data_unref() destroy it incorrectly. + */ + exif_data_->data = nullptr; + exif_data_->size = 0; + exif_data_unref(exif_data_); + exif_data_ = nullptr; + } +} + +std::unique_ptr ExifUtilsImpl::addVariableLengthEntry(ExifIfd ifd, + ExifTag tag, + ExifFormat format, + uint64_t components, + unsigned int size) { + // Remove old entry if exists. + exif_content_remove_entry(exif_data_->ifd[ifd], + exif_content_get_entry(exif_data_->ifd[ifd], tag)); + ExifMem* mem = exif_mem_new_default(); + if (!mem) { + ALOGE("%s: Allocate memory for exif entry failed", __FUNCTION__); + return nullptr; + } + std::unique_ptr entry(exif_entry_new_mem(mem)); + if (!entry) { + ALOGE("%s: Allocate memory for exif entry failed", __FUNCTION__); + exif_mem_unref(mem); + return nullptr; + } + void* tmpBuffer = exif_mem_alloc(mem, size); + if (!tmpBuffer) { + ALOGE("%s: Allocate memory for exif entry failed", __FUNCTION__); + exif_mem_unref(mem); + return nullptr; + } + + entry->data = static_cast(tmpBuffer); + entry->tag = tag; + entry->format = format; + entry->components = components; + entry->size = size; + + exif_content_add_entry(exif_data_->ifd[ifd], entry.get()); + exif_mem_unref(mem); + + return entry; +} + +std::unique_ptr ExifUtilsImpl::addEntry(ExifIfd ifd, ExifTag tag) { + std::unique_ptr entry(exif_content_get_entry(exif_data_->ifd[ifd], tag)); + if (entry) { + // exif_content_get_entry() won't ref the entry, so we ref here. + exif_entry_ref(entry.get()); + return entry; + } + entry.reset(exif_entry_new()); + if (!entry) { + ALOGE("%s: Allocate memory for exif entry failed", __FUNCTION__); + return nullptr; + } + entry->tag = tag; + exif_content_add_entry(exif_data_->ifd[ifd], entry.get()); + exif_entry_initialize(entry.get(), tag); + return entry; +} + +bool ExifUtilsImpl::setShort(ExifIfd ifd, + ExifTag tag, + uint16_t value, + const std::string& msg) { + std::unique_ptr entry = addEntry(ifd, tag); + if (!entry) { + ALOGE("%s: Adding '%s' entry failed", __FUNCTION__, msg.c_str()); + return false; + } + exif_set_short(entry->data, EXIF_BYTE_ORDER_INTEL, value); + return true; +} + +bool ExifUtilsImpl::setLong(ExifIfd ifd, + ExifTag tag, + uint32_t value, + const std::string& msg) { + std::unique_ptr entry = addEntry(ifd, tag); + if (!entry) { + ALOGE("%s: Adding '%s' entry failed", __FUNCTION__, msg.c_str()); + return false; + } + exif_set_long(entry->data, EXIF_BYTE_ORDER_INTEL, value); + return true; +} + +bool ExifUtilsImpl::setRational(ExifIfd ifd, + ExifTag tag, + uint32_t numerator, + uint32_t denominator, + const std::string& msg) { + std::unique_ptr entry = addEntry(ifd, tag); + if (!entry) { + ALOGE("%s: Adding '%s' entry failed", __FUNCTION__, msg.c_str()); + return false; + } + exif_set_rational(entry->data, EXIF_BYTE_ORDER_INTEL, + {numerator, denominator}); + return true; +} + +bool ExifUtilsImpl::setSRational(ExifIfd ifd, + ExifTag tag, + int32_t numerator, + int32_t denominator, + const std::string& msg) { + std::unique_ptr entry = addEntry(ifd, tag); + if (!entry) { + ALOGE("%s: Adding '%s' entry failed", __FUNCTION__, msg.c_str()); + return false; + } + exif_set_srational(entry->data, EXIF_BYTE_ORDER_INTEL, + {numerator, denominator}); + return true; +} + +bool ExifUtilsImpl::setString(ExifIfd ifd, + ExifTag tag, + ExifFormat format, + const std::string& buffer, + const std::string& msg) { + size_t entry_size = buffer.length(); + // Since the exif format is undefined, NULL termination is not necessary. + if (format == EXIF_FORMAT_ASCII) { + entry_size++; + } + std::unique_ptr entry = + addVariableLengthEntry(ifd, tag, format, entry_size, entry_size); + if (!entry) { + ALOGE("%s: Adding '%s' entry failed", __FUNCTION__, msg.c_str()); + return false; + } + memcpy(entry->data, buffer.c_str(), entry_size); + return true; +} + +void ExifUtilsImpl::destroyApp1() { + /* + * Since there is no API to access ExifMem in ExifData->priv, we use free + * here, which is the default free function in libexif. See + * exif_data_save_data() for detail. + */ + free(app1_buffer_); + app1_buffer_ = nullptr; + app1_length_ = 0; +} + +bool ExifUtilsImpl::setFromMetadata(const CameraMetadata& metadata, + const size_t imageWidth, + const size_t imageHeight) { + // How precise the float-to-rational conversion for EXIF tags would be. + constexpr int kRationalPrecision = 10000; + if (!setImageWidth(imageWidth) || + !setImageHeight(imageHeight)) { + ALOGE("%s: setting image resolution failed.", __FUNCTION__); + return false; + } + + struct timespec tp; + struct tm time_info; + bool time_available = clock_gettime(CLOCK_REALTIME, &tp) != -1; + localtime_r(&tp.tv_sec, &time_info); + if (!setDateTime(time_info)) { + ALOGE("%s: setting data time failed.", __FUNCTION__); + return false; + } + + float focal_length; + camera_metadata_ro_entry entry = metadata.find(ANDROID_LENS_FOCAL_LENGTH); + if (entry.count) { + focal_length = entry.data.f[0]; + + if (!setFocalLength( + static_cast(focal_length * kRationalPrecision), + kRationalPrecision)) { + ALOGE("%s: setting focal length failed.", __FUNCTION__); + return false; + } + } else { + ALOGV("%s: Cannot find focal length in metadata.", __FUNCTION__); + } + + if (metadata.exists(ANDROID_JPEG_GPS_COORDINATES)) { + entry = metadata.find(ANDROID_JPEG_GPS_COORDINATES); + if (entry.count < 3) { + ALOGE("%s: Gps coordinates in metadata is not complete.", __FUNCTION__); + return false; + } + if (!setGpsLatitude(entry.data.d[0])) { + ALOGE("%s: setting gps latitude failed.", __FUNCTION__); + return false; + } + if (!setGpsLongitude(entry.data.d[1])) { + ALOGE("%s: setting gps longitude failed.", __FUNCTION__); + return false; + } + if (!setGpsAltitude(entry.data.d[2])) { + ALOGE("%s: setting gps altitude failed.", __FUNCTION__); + return false; + } + } + + if (metadata.exists(ANDROID_JPEG_GPS_PROCESSING_METHOD)) { + entry = metadata.find(ANDROID_JPEG_GPS_PROCESSING_METHOD); + std::string method_str(reinterpret_cast(entry.data.u8)); + if (!setGpsProcessingMethod(method_str)) { + ALOGE("%s: setting gps processing method failed.", __FUNCTION__); + return false; + } + } + + if (time_available && metadata.exists(ANDROID_JPEG_GPS_TIMESTAMP)) { + entry = metadata.find(ANDROID_JPEG_GPS_TIMESTAMP); + time_t timestamp = static_cast(entry.data.i64[0]); + if (gmtime_r(×tamp, &time_info)) { + if (!setGpsTimestamp(time_info)) { + ALOGE("%s: setting gps timestamp failed.", __FUNCTION__); + return false; + } + } else { + ALOGE("%s: Time tranformation failed.", __FUNCTION__); + return false; + } + } + + if (metadata.exists(ANDROID_JPEG_ORIENTATION)) { + entry = metadata.find(ANDROID_JPEG_ORIENTATION); + if (!setOrientation(entry.data.i32[0])) { + ALOGE("%s: setting orientation failed.", __FUNCTION__); + return false; + } + } + + if (metadata.exists(ANDROID_SENSOR_EXPOSURE_TIME)) { + entry = metadata.find(ANDROID_SENSOR_EXPOSURE_TIME); + // int64_t of nanoseconds + if (!setExposureTime(entry.data.i64[0],1000000000u)) { + ALOGE("%s: setting exposure time failed.", __FUNCTION__); + return false; + } + } + + if (metadata.exists(ANDROID_LENS_APERTURE)) { + const int kAperturePrecision = 10000; + entry = metadata.find(ANDROID_LENS_APERTURE); + if (!setFNumber(entry.data.f[0] * kAperturePrecision, + kAperturePrecision)) { + ALOGE("%s: setting F number failed.", __FUNCTION__); + return false; + } + } + + if (metadata.exists(ANDROID_FLASH_INFO_AVAILABLE)) { + entry = metadata.find(ANDROID_FLASH_INFO_AVAILABLE); + if (entry.data.u8[0] == ANDROID_FLASH_INFO_AVAILABLE_FALSE) { + const uint32_t kNoFlashFunction = 0x20; + if (!setFlash(kNoFlashFunction)) { + ALOGE("%s: setting flash failed.", __FUNCTION__); + return false; + } + } else { + ALOGE("%s: Unsupported flash info: %d",__FUNCTION__, entry.data.u8[0]); + return false; + } + } + + if (metadata.exists(ANDROID_CONTROL_AWB_MODE)) { + entry = metadata.find(ANDROID_CONTROL_AWB_MODE); + if (entry.data.u8[0] == ANDROID_CONTROL_AWB_MODE_AUTO) { + const uint16_t kAutoWhiteBalance = 0; + if (!setWhiteBalance(kAutoWhiteBalance)) { + ALOGE("%s: setting white balance failed.", __FUNCTION__); + return false; + } + } else { + ALOGE("%s: Unsupported awb mode: %d", __FUNCTION__, entry.data.u8[0]); + return false; + } + } + + if (time_available) { + char str[4]; + if (snprintf(str, sizeof(str), "%03ld", tp.tv_nsec / 1000000) < 0) { + ALOGE("%s: Subsec is invalid: %ld", __FUNCTION__, tp.tv_nsec); + return false; + } + if (!setSubsecTime(std::string(str))) { + ALOGE("%s: setting subsec time failed.", __FUNCTION__); + return false; + } + } + + return true; +} + +} // namespace helper +} // namespace V1_0 +} // namespace common +} // namespace camera +} // namespace hardware +} // namespace android diff --git a/camera/common/1.0/default/HandleImporter.cpp b/camera/common/1.0/default/HandleImporter.cpp index fd8b943dc365badd7cfd4da002585ea5208b81ee..21706a84a312709dfed8741000ac39784574d691 100644 --- a/camera/common/1.0/default/HandleImporter.cpp +++ b/camera/common/1.0/default/HandleImporter.cpp @@ -134,6 +134,97 @@ void HandleImporter::closeFence(int fd) const { } } +void* HandleImporter::lock( + buffer_handle_t& buf, uint64_t cpuUsage, size_t size) { + Mutex::Autolock lock(mLock); + void *ret = 0; + IMapper::Rect accessRegion { 0, 0, static_cast(size), 1 }; + + if (!mInitialized) { + initializeLocked(); + } + + if (mMapper == nullptr) { + ALOGE("%s: mMapper is null!", __FUNCTION__); + return ret; + } + + hidl_handle acquireFenceHandle; + auto buffer = const_cast(buf); + mMapper->lock(buffer, cpuUsage, accessRegion, acquireFenceHandle, + [&](const auto& tmpError, const auto& tmpPtr) { + if (tmpError == MapperError::NONE) { + ret = tmpPtr; + } else { + ALOGE("%s: failed to lock error %d!", + __FUNCTION__, tmpError); + } + }); + + ALOGV("%s: ptr %p size: %zu", __FUNCTION__, ret, size); + return ret; +} + + +YCbCrLayout HandleImporter::lockYCbCr( + buffer_handle_t& buf, uint64_t cpuUsage, + const IMapper::Rect& accessRegion) { + Mutex::Autolock lock(mLock); + YCbCrLayout layout = {}; + + if (!mInitialized) { + initializeLocked(); + } + + if (mMapper == nullptr) { + ALOGE("%s: mMapper is null!", __FUNCTION__); + return layout; + } + + hidl_handle acquireFenceHandle; + auto buffer = const_cast(buf); + mMapper->lockYCbCr(buffer, cpuUsage, accessRegion, acquireFenceHandle, + [&](const auto& tmpError, const auto& tmpLayout) { + if (tmpError == MapperError::NONE) { + layout = tmpLayout; + } else { + ALOGE("%s: failed to lockYCbCr error %d!", __FUNCTION__, tmpError); + } + }); + + ALOGV("%s: layout y %p cb %p cr %p y_str %d c_str %d c_step %d", + __FUNCTION__, layout.y, layout.cb, layout.cr, + layout.yStride, layout.cStride, layout.chromaStep); + return layout; +} + +int HandleImporter::unlock(buffer_handle_t& buf) { + int releaseFence = -1; + auto buffer = const_cast(buf); + mMapper->unlock( + buffer, [&](const auto& tmpError, const auto& tmpReleaseFence) { + if (tmpError == MapperError::NONE) { + auto fenceHandle = tmpReleaseFence.getNativeHandle(); + if (fenceHandle) { + if (fenceHandle->numInts != 0 || fenceHandle->numFds != 1) { + ALOGE("%s: bad release fence numInts %d numFds %d", + __FUNCTION__, fenceHandle->numInts, fenceHandle->numFds); + return; + } + releaseFence = dup(fenceHandle->data[0]); + if (releaseFence <= 0) { + ALOGE("%s: bad release fence FD %d", + __FUNCTION__, releaseFence); + } + } + } else { + ALOGE("%s: failed to unlock error %d!", __FUNCTION__, tmpError); + } + }); + + return releaseFence; +} + } // namespace helper } // namespace V1_0 } // namespace common diff --git a/camera/common/1.0/default/VendorTagDescriptor.cpp b/camera/common/1.0/default/VendorTagDescriptor.cpp index bc182706058d76d51c569baa3626a811ab80b5b2..1f5385795b51e9640e43cd7276dfdc2c8cb5f98e 100644 --- a/camera/common/1.0/default/VendorTagDescriptor.cpp +++ b/camera/common/1.0/default/VendorTagDescriptor.cpp @@ -116,11 +116,11 @@ const char* VendorTagDescriptor::getTagName(uint32_t tag) const { } int VendorTagDescriptor::getTagType(uint32_t tag) const { - ssize_t index = mTagToNameMap.indexOfKey(tag); - if (index < 0) { + auto iter = mTagToTypeMap.find(tag); + if (iter == mTagToTypeMap.end()) { return VENDOR_TAG_TYPE_ERR; } - return mTagToTypeMap.valueFor(tag); + return iter->second; } const SortedVector* VendorTagDescriptor::getAllSectionNames() const { @@ -167,7 +167,7 @@ void VendorTagDescriptor::dump(int fd, int verbosity, int indentation) const { String8 name = mTagToNameMap.valueAt(i); uint32_t sectionId = mTagToSectionMap.valueFor(tag); String8 sectionName = mSections[sectionId]; - int type = mTagToTypeMap.valueFor(tag); + int type = mTagToTypeMap.at(tag); const char* typeName = (type >= 0 && type < NUM_TYPES) ? camera_metadata_type_names[type] : "UNKNOWN"; dprintf(fd, "%*s0x%x (%s) with type %d (%s) defined in section %s\n", indentation + 2, @@ -251,7 +251,7 @@ status_t VendorTagDescriptor::createDescriptorFromOps(const vendor_tag_ops_t* vO ALOGE("%s: tag type %d from vendor ops does not exist.", __FUNCTION__, tagType); return BAD_VALUE; } - desc->mTagToTypeMap.add(tag, tagType); + desc->mTagToTypeMap.insert(std::make_pair(tag, tagType)); } desc->mSections = sections; diff --git a/camera/common/1.0/default/include/Exif.h b/camera/common/1.0/default/include/Exif.h new file mode 100644 index 0000000000000000000000000000000000000000..dc31679a67c580fc2965704859e826f68557065a --- /dev/null +++ b/camera/common/1.0/default/include/Exif.h @@ -0,0 +1,256 @@ +/* + * Copyright (C) 2018 The Android Open Source Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#ifndef ANDROID_HARDWARE_INTERFACES_CAMERA_COMMON_1_0_EXIF_H +#define ANDROID_HARDWARE_INTERFACES_CAMERA_COMMON_1_0_EXIF_H + +#include "CameraMetadata.h" + +namespace android { +namespace hardware { +namespace camera { +namespace common { +namespace V1_0 { +namespace helper { + + +// This is based on the original ChromeOS ARC implementation of a V4L2 HAL + +// ExifUtils can generate APP1 segment with tags which caller set. ExifUtils can +// also add a thumbnail in the APP1 segment if thumbnail size is specified. +// ExifUtils can be reused with different images by calling initialize(). +// +// Example of using this class : +// std::unique_ptr utils(ExifUtils::Create()); +// utils->initialize(); +// ... +// // Call ExifUtils functions to set Exif tags. +// ... +// utils->GenerateApp1(thumbnail_buffer, thumbnail_size); +// unsigned int app1Length = utils->GetApp1Length(); +// uint8_t* app1Buffer = new uint8_t[app1Length]; +// memcpy(app1Buffer, utils->GetApp1Buffer(), app1Length); +class ExifUtils { + + public: + virtual ~ExifUtils(); + + static ExifUtils* create(); + + // Initialize() can be called multiple times. The setting of Exif tags will be + // cleared. + virtual bool initialize() = 0; + + // Set all known fields from a metadata structure + virtual bool setFromMetadata(const CameraMetadata& metadata, + const size_t imageWidth, + const size_t imageHeight) = 0; + + // Sets the len aperture. + // Returns false if memory allocation fails. + virtual bool setAperture(uint32_t numerator, uint32_t denominator) = 0; + + // Sets the value of brightness. + // Returns false if memory allocation fails. + virtual bool setBrightness(int32_t numerator, int32_t denominator) = 0; + + // Sets the color space. + // Returns false if memory allocation fails. + virtual bool setColorSpace(uint16_t color_space) = 0; + + // Sets the information to compressed data. + // Returns false if memory allocation fails. + virtual bool setComponentsConfiguration(const std::string& components_configuration) = 0; + + // Sets the compression scheme used for the image data. + // Returns false if memory allocation fails. + virtual bool setCompression(uint16_t compression) = 0; + + // Sets image contrast. + // Returns false if memory allocation fails. + virtual bool setContrast(uint16_t contrast) = 0; + + // Sets the date and time of image last modified. It takes local time. The + // name of the tag is DateTime in IFD0. + // Returns false if memory allocation fails. + virtual bool setDateTime(const struct tm& t) = 0; + + // Sets the image description. + // Returns false if memory allocation fails. + virtual bool setDescription(const std::string& description) = 0; + + // Sets the digital zoom ratio. If the numerator is 0, it means digital zoom + // was not used. + // Returns false if memory allocation fails. + virtual bool setDigitalZoomRatio(uint32_t numerator, uint32_t denominator) = 0; + + // Sets the exposure bias. + // Returns false if memory allocation fails. + virtual bool setExposureBias(int32_t numerator, int32_t denominator) = 0; + + // Sets the exposure mode set when the image was shot. + // Returns false if memory allocation fails. + virtual bool setExposureMode(uint16_t exposure_mode) = 0; + + // Sets the program used by the camera to set exposure when the picture is + // taken. + // Returns false if memory allocation fails. + virtual bool setExposureProgram(uint16_t exposure_program) = 0; + + // Sets the exposure time, given in seconds. + // Returns false if memory allocation fails. + virtual bool setExposureTime(uint32_t numerator, uint32_t denominator) = 0; + + // Sets the status of flash. + // Returns false if memory allocation fails. + virtual bool setFlash(uint16_t flash) = 0; + + // Sets the F number. + // Returns false if memory allocation fails. + virtual bool setFNumber(uint32_t numerator, uint32_t denominator) = 0; + + // Sets the focal length of lens used to take the image in millimeters. + // Returns false if memory allocation fails. + virtual bool setFocalLength(uint32_t numerator, uint32_t denominator) = 0; + + // Sets the degree of overall image gain adjustment. + // Returns false if memory allocation fails. + virtual bool setGainControl(uint16_t gain_control) = 0; + + // Sets the altitude in meters. + // Returns false if memory allocation fails. + virtual bool setGpsAltitude(double altitude) = 0; + + // Sets the latitude with degrees minutes seconds format. + // Returns false if memory allocation fails. + virtual bool setGpsLatitude(double latitude) = 0; + + // Sets the longitude with degrees minutes seconds format. + // Returns false if memory allocation fails. + virtual bool setGpsLongitude(double longitude) = 0; + + // Sets GPS processing method. + // Returns false if memory allocation fails. + virtual bool setGpsProcessingMethod(const std::string& method) = 0; + + // Sets GPS date stamp and time stamp (atomic clock). It takes UTC time. + // Returns false if memory allocation fails. + virtual bool setGpsTimestamp(const struct tm& t) = 0; + + // Sets the height (number of rows) of main image. + // Returns false if memory allocation fails. + virtual bool setImageHeight(uint32_t length) = 0; + + // Sets the width (number of columns) of main image. + // Returns false if memory allocation fails. + virtual bool setImageWidth(uint32_t width) = 0; + + // Sets the ISO speed. + // Returns false if memory allocation fails. + virtual bool setIsoSpeedRating(uint16_t iso_speed_ratings) = 0; + + // Sets the kind of light source. + // Returns false if memory allocation fails. + virtual bool setLightSource(uint16_t light_source) = 0; + + // Sets the smallest F number of the lens. + // Returns false if memory allocation fails. + virtual bool setMaxAperture(uint32_t numerator, uint32_t denominator) = 0; + + // Sets the metering mode. + // Returns false if memory allocation fails. + virtual bool setMeteringMode(uint16_t metering_mode) = 0; + + // Sets image orientation. + // Returns false if memory allocation fails. + virtual bool setOrientation(uint16_t orientation) = 0; + + // Sets the unit for measuring XResolution and YResolution. + // Returns false if memory allocation fails. + virtual bool setResolutionUnit(uint16_t resolution_unit) = 0; + + // Sets image saturation. + // Returns false if memory allocation fails. + virtual bool setSaturation(uint16_t saturation) = 0; + + // Sets the type of scene that was shot. + // Returns false if memory allocation fails. + virtual bool setSceneCaptureType(uint16_t type) = 0; + + // Sets image sharpness. + // Returns false if memory allocation fails. + virtual bool setSharpness(uint16_t sharpness) = 0; + + // Sets the shutter speed. + // Returns false if memory allocation fails. + virtual bool setShutterSpeed(int32_t numerator, int32_t denominator) = 0; + + // Sets the distance to the subject, given in meters. + // Returns false if memory allocation fails. + virtual bool setSubjectDistance(uint32_t numerator, uint32_t denominator) = 0; + + // Sets the fractions of seconds for the tag. + // Returns false if memory allocation fails. + virtual bool setSubsecTime(const std::string& subsec_time) = 0; + + // Sets the white balance mode set when the image was shot. + // Returns false if memory allocation fails. + virtual bool setWhiteBalance(uint16_t white_balance) = 0; + + // Sets the number of pixels per resolution unit in the image width. + // Returns false if memory allocation fails. + virtual bool setXResolution(uint32_t numerator, uint32_t denominator) = 0; + + // Sets the position of chrominance components in relation to the luminance + // component. + // Returns false if memory allocation fails. + virtual bool setYCbCrPositioning(uint16_t ycbcr_positioning) = 0; + + // Sets the number of pixels per resolution unit in the image length. + // Returns false if memory allocation fails. + virtual bool setYResolution(uint32_t numerator, uint32_t denominator) = 0; + + // Sets the manufacturer of camera. + // Returns false if memory allocation fails. + virtual bool setMake(const std::string& make) = 0; + + // Sets the model number of camera. + // Returns false if memory allocation fails. + virtual bool setModel(const std::string& model) = 0; + + // Generates APP1 segment. + // Returns false if generating APP1 segment fails. + virtual bool generateApp1(const void* thumbnail_buffer, uint32_t size) = 0; + + // Gets buffer of APP1 segment. This method must be called only after calling + // GenerateAPP1(). + virtual const uint8_t* getApp1Buffer() = 0; + + // Gets length of APP1 segment. This method must be called only after calling + // GenerateAPP1(). + virtual unsigned int getApp1Length() = 0; +}; + + +} // namespace helper +} // namespace V1_0 +} // namespace common +} // namespace camera +} // namespace hardware +} // namespace android + + +#endif // ANDROID_HARDWARE_INTERFACES_CAMERA_COMMON_1_0_EXIF_H diff --git a/camera/common/1.0/default/include/HandleImporter.h b/camera/common/1.0/default/include/HandleImporter.h index e47397c6a659bbf3450b9948a0a957025196b64f..f9cd9fb604eda87da7080d80678492a3c531831f 100644 --- a/camera/common/1.0/default/include/HandleImporter.h +++ b/camera/common/1.0/default/include/HandleImporter.h @@ -22,6 +22,7 @@ #include using android::hardware::graphics::mapper::V2_0::IMapper; +using android::hardware::graphics::mapper::V2_0::YCbCrLayout; namespace android { namespace hardware { @@ -43,6 +44,15 @@ public: bool importFence(const native_handle_t* handle, int& fd) const; void closeFence(int fd) const; + // Assume caller has done waiting for acquire fences + void* lock(buffer_handle_t& buf, uint64_t cpuUsage, size_t size); + + // Assume caller has done waiting for acquire fences + YCbCrLayout lockYCbCr(buffer_handle_t& buf, uint64_t cpuUsage, + const IMapper::Rect& accessRegion); + + int unlock(buffer_handle_t& buf); // returns release fence + private: void initializeLocked(); void cleanup(); @@ -60,4 +70,4 @@ private: } // namespace hardware } // namespace android -#endif // CAMERA_COMMON_1_0_HANDLEIMPORTED_H \ No newline at end of file +#endif // CAMERA_COMMON_1_0_HANDLEIMPORTED_H diff --git a/camera/common/1.0/default/include/VendorTagDescriptor.h b/camera/common/1.0/default/include/VendorTagDescriptor.h index 8d8ded9556accb46ef47c5ab9df9afe2d53b5f52..a040540edb6c88a234367cb64897f41f3a788b6f 100644 --- a/camera/common/1.0/default/include/VendorTagDescriptor.h +++ b/camera/common/1.0/default/include/VendorTagDescriptor.h @@ -24,6 +24,7 @@ #include #include +#include namespace android { namespace hardware { @@ -94,7 +95,8 @@ class VendorTagDescriptor { KeyedVector*> mReverseMapping; KeyedVector mTagToNameMap; KeyedVector mTagToSectionMap; // Value is offset in mSections - KeyedVector mTagToTypeMap; + + std::unordered_map mTagToTypeMap; SortedVector mSections; // must be int32_t to be compatible with Parcel::writeInt32 int32_t mTagCount; diff --git a/camera/device/3.2/default/CameraDeviceSession.cpp b/camera/device/3.2/default/CameraDeviceSession.cpp index d6a04bc56ba8dce0afbe98bfd2a47fbc381da74e..69f853562cfc07d60ce1c14d29ed79c6c2556a02 100644 --- a/camera/device/3.2/default/CameraDeviceSession.cpp +++ b/camera/device/3.2/default/CameraDeviceSession.cpp @@ -18,6 +18,7 @@ #include #include +#include #include #include #include @@ -31,9 +32,16 @@ namespace V3_2 { namespace implementation { // Size of request metadata fast message queue. Change to 0 to always use hwbinder buffer. -static constexpr size_t CAMERA_REQUEST_METADATA_QUEUE_SIZE = 1 << 20 /* 1MB */; +static constexpr int32_t CAMERA_REQUEST_METADATA_QUEUE_SIZE = 1 << 20 /* 1MB */; // Size of result metadata fast message queue. Change to 0 to always use hwbinder buffer. -static constexpr size_t CAMERA_RESULT_METADATA_QUEUE_SIZE = 1 << 20 /* 1MB */; +static constexpr int32_t CAMERA_RESULT_METADATA_QUEUE_SIZE = 1 << 20 /* 1MB */; + +// Metadata sent by HAL will be replaced by a compact copy +// if their (total size >= compact size + METADATA_SHRINK_ABS_THRESHOLD && +// total_size >= compact size * METADATA_SHRINK_REL_THRESHOLD) +// Heuristically picked by size of one page +static constexpr int METADATA_SHRINK_ABS_THRESHOLD = 4096; +static constexpr int METADATA_SHRINK_REL_THRESHOLD = 2; HandleImporter CameraDeviceSession::sHandleImporter; const int CameraDeviceSession::ResultBatcher::NOT_BATCHED; @@ -88,14 +96,30 @@ bool CameraDeviceSession::initialize() { return true; } + int32_t reqFMQSize = property_get_int32("ro.camera.req.fmq.size", /*default*/-1); + if (reqFMQSize < 0) { + reqFMQSize = CAMERA_REQUEST_METADATA_QUEUE_SIZE; + } else { + ALOGV("%s: request FMQ size overridden to %d", __FUNCTION__, reqFMQSize); + } + mRequestMetadataQueue = std::make_unique( - CAMERA_REQUEST_METADATA_QUEUE_SIZE, false /* non blocking */); + static_cast(reqFMQSize), + false /* non blocking */); if (!mRequestMetadataQueue->isValid()) { ALOGE("%s: invalid request fmq", __FUNCTION__); return true; } + + int32_t resFMQSize = property_get_int32("ro.camera.res.fmq.size", /*default*/-1); + if (resFMQSize < 0) { + resFMQSize = CAMERA_RESULT_METADATA_QUEUE_SIZE; + } else { + ALOGV("%s: result FMQ size overridden to %d", __FUNCTION__, resFMQSize); + } mResultMetadataQueue = std::make_shared( - CAMERA_RESULT_METADATA_QUEUE_SIZE, false /* non blocking */); + static_cast(resFMQSize), + false /* non blocking */); if (!mResultMetadataQueue->isValid()) { ALOGE("%s: invalid result fmq", __FUNCTION__); return true; @@ -333,11 +357,10 @@ void CameraDeviceSession::ResultBatcher::setResultMetadataQueue( mResultMetadataQueue = q; } -void CameraDeviceSession::ResultBatcher::registerBatch( - const hidl_vec& requests) { +void CameraDeviceSession::ResultBatcher::registerBatch(uint32_t frameNumber, uint32_t batchSize) { auto batch = std::make_shared(); - batch->mFirstFrame = requests[0].frameNumber; - batch->mBatchSize = requests.size(); + batch->mFirstFrame = frameNumber; + batch->mBatchSize = batchSize; batch->mLastFrame = batch->mFirstFrame + batch->mBatchSize - 1; batch->mNumPartialResults = mNumPartialResults; for (int id : mStreamsToBatch) { @@ -394,7 +417,11 @@ void CameraDeviceSession::ResultBatcher::sendBatchShutterCbsLocked( return; } - mCallback->notify(batch->mShutterMsgs); + auto ret = mCallback->notify(batch->mShutterMsgs); + if (!ret.isOk()) { + ALOGE("%s: notify shutter transaction failed: %s", + __FUNCTION__, ret.description().c_str()); + } batch->mShutterDelivered = true; batch->mShutterMsgs.clear(); } @@ -564,7 +591,11 @@ void CameraDeviceSession::ResultBatcher::sendBatchMetadataLocked( } void CameraDeviceSession::ResultBatcher::notifySingleMsg(NotifyMsg& msg) { - mCallback->notify({msg}); + auto ret = mCallback->notify({msg}); + if (!ret.isOk()) { + ALOGE("%s: notify transaction failed: %s", + __FUNCTION__, ret.description().c_str()); + } return; } @@ -646,13 +677,20 @@ void CameraDeviceSession::ResultBatcher::invokeProcessCaptureResultCallback( result.fmqResultSize = result.result.size(); result.result.resize(0); } else { - ALOGW("%s: couldn't utilize fmq, fall back to hwbinder", __FUNCTION__); + ALOGW("%s: couldn't utilize fmq, fall back to hwbinder, result size: %zu," + "shared message queue available size: %zu", + __FUNCTION__, result.result.size(), + mResultMetadataQueue->availableToWrite()); result.fmqResultSize = 0; } } } } - mCallback->processCaptureResult(results); + auto ret = mCallback->processCaptureResult(results); + if (!ret.isOk()) { + ALOGE("%s: processCaptureResult transaction failed: %s", + __FUNCTION__, ret.description().c_str()); + } mProcessCaptureResultLock.unlock(); } @@ -739,8 +777,14 @@ void CameraDeviceSession::ResultBatcher::processCaptureResult(CaptureResult& res // Methods from ::android::hardware::camera::device::V3_2::ICameraDeviceSession follow. Return CameraDeviceSession::constructDefaultRequestSettings( RequestTemplate type, ICameraDeviceSession::constructDefaultRequestSettings_cb _hidl_cb) { - Status status = initStatus(); CameraMetadata outMetadata; + Status status = constructDefaultRequestSettingsRaw( (int) type, &outMetadata); + _hidl_cb(status, outMetadata); + return Void(); +} + +Status CameraDeviceSession::constructDefaultRequestSettingsRaw(int type, CameraMetadata *outMetadata) { + Status status = initStatus(); const camera_metadata_t *rawRequest; if (status == Status::OK) { ATRACE_BEGIN("camera3->construct_default_request_settings"); @@ -760,17 +804,14 @@ Return CameraDeviceSession::constructDefaultRequestSettings( mOverridenRequest.update( ANDROID_CONTROL_POST_RAW_SENSITIVITY_BOOST, defaultBoost, 1); - const camera_metadata_t *metaBuffer = - mOverridenRequest.getAndLock(); - convertToHidl(metaBuffer, &outMetadata); - mOverridenRequest.unlock(metaBuffer); - } else { - convertToHidl(rawRequest, &outMetadata); } + const camera_metadata_t *metaBuffer = + mOverridenRequest.getAndLock(); + convertToHidl(metaBuffer, outMetadata); + mOverridenRequest.unlock(metaBuffer); } } - _hidl_cb(status, outMetadata); - return Void(); + return status; } /** @@ -803,6 +844,89 @@ android_dataspace CameraDeviceSession::mapToLegacyDataspace( return dataSpace; } +bool CameraDeviceSession::preProcessConfigurationLocked( + const StreamConfiguration& requestedConfiguration, + camera3_stream_configuration_t *stream_list /*out*/, + hidl_vec *streams /*out*/) { + + if ((stream_list == nullptr) || (streams == nullptr)) { + return false; + } + + stream_list->operation_mode = (uint32_t) requestedConfiguration.operationMode; + stream_list->num_streams = requestedConfiguration.streams.size(); + streams->resize(stream_list->num_streams); + stream_list->streams = streams->data(); + + for (uint32_t i = 0; i < stream_list->num_streams; i++) { + int id = requestedConfiguration.streams[i].id; + + if (mStreamMap.count(id) == 0) { + Camera3Stream stream; + convertFromHidl(requestedConfiguration.streams[i], &stream); + mStreamMap[id] = stream; + mStreamMap[id].data_space = mapToLegacyDataspace( + mStreamMap[id].data_space); + mCirculatingBuffers.emplace(stream.mId, CirculatingBuffers{}); + } else { + // width/height/format must not change, but usage/rotation might need to change + if (mStreamMap[id].stream_type != + (int) requestedConfiguration.streams[i].streamType || + mStreamMap[id].width != requestedConfiguration.streams[i].width || + mStreamMap[id].height != requestedConfiguration.streams[i].height || + mStreamMap[id].format != (int) requestedConfiguration.streams[i].format || + mStreamMap[id].data_space != + mapToLegacyDataspace( static_cast ( + requestedConfiguration.streams[i].dataSpace))) { + ALOGE("%s: stream %d configuration changed!", __FUNCTION__, id); + return false; + } + mStreamMap[id].rotation = (int) requestedConfiguration.streams[i].rotation; + mStreamMap[id].usage = (uint32_t) requestedConfiguration.streams[i].usage; + } + (*streams)[i] = &mStreamMap[id]; + } + + return true; +} + +void CameraDeviceSession::postProcessConfigurationLocked( + const StreamConfiguration& requestedConfiguration) { + // delete unused streams, note we do this after adding new streams to ensure new stream + // will not have the same address as deleted stream, and HAL has a chance to reference + // the to be deleted stream in configure_streams call + for(auto it = mStreamMap.begin(); it != mStreamMap.end();) { + int id = it->first; + bool found = false; + for (const auto& stream : requestedConfiguration.streams) { + if (id == stream.id) { + found = true; + break; + } + } + if (!found) { + // Unmap all buffers of deleted stream + // in case the configuration call succeeds and HAL + // is able to release the corresponding resources too. + cleanupBuffersLocked(id); + it = mStreamMap.erase(it); + } else { + ++it; + } + } + + // Track video streams + mVideoStreamIds.clear(); + for (const auto& stream : requestedConfiguration.streams) { + if (stream.streamType == StreamType::OUTPUT && + stream.usage & + graphics::common::V1_0::BufferUsage::VIDEO_ENCODER) { + mVideoStreamIds.push_back(stream.id); + } + } + mResultBatcher.setBatchedStreams(mVideoStreamIds); +} + Return CameraDeviceSession::configureStreams( const StreamConfiguration& requestedConfiguration, ICameraDeviceSession::configureStreams_cb _hidl_cb) { @@ -840,42 +964,11 @@ Return CameraDeviceSession::configureStreams( return Void(); } - camera3_stream_configuration_t stream_list; + camera3_stream_configuration_t stream_list{}; hidl_vec streams; - - stream_list.operation_mode = (uint32_t) requestedConfiguration.operationMode; - stream_list.num_streams = requestedConfiguration.streams.size(); - streams.resize(stream_list.num_streams); - stream_list.streams = streams.data(); - - for (uint32_t i = 0; i < stream_list.num_streams; i++) { - int id = requestedConfiguration.streams[i].id; - - if (mStreamMap.count(id) == 0) { - Camera3Stream stream; - convertFromHidl(requestedConfiguration.streams[i], &stream); - mStreamMap[id] = stream; - mStreamMap[id].data_space = mapToLegacyDataspace( - mStreamMap[id].data_space); - mCirculatingBuffers.emplace(stream.mId, CirculatingBuffers{}); - } else { - // width/height/format must not change, but usage/rotation might need to change - if (mStreamMap[id].stream_type != - (int) requestedConfiguration.streams[i].streamType || - mStreamMap[id].width != requestedConfiguration.streams[i].width || - mStreamMap[id].height != requestedConfiguration.streams[i].height || - mStreamMap[id].format != (int) requestedConfiguration.streams[i].format || - mStreamMap[id].data_space != - mapToLegacyDataspace( static_cast ( - requestedConfiguration.streams[i].dataSpace))) { - ALOGE("%s: stream %d configuration changed!", __FUNCTION__, id); - _hidl_cb(Status::INTERNAL_ERROR, outStreams); - return Void(); - } - mStreamMap[id].rotation = (int) requestedConfiguration.streams[i].rotation; - mStreamMap[id].usage = (uint32_t) requestedConfiguration.streams[i].usage; - } - streams[i] = &mStreamMap[id]; + if (!preProcessConfigurationLocked(requestedConfiguration, &stream_list, &streams)) { + _hidl_cb(Status::INTERNAL_ERROR, outStreams); + return Void(); } ATRACE_BEGIN("camera3->configure_streams"); @@ -885,39 +978,7 @@ Return CameraDeviceSession::configureStreams( // In case Hal returns error most likely it was not able to release // the corresponding resources of the deleted streams. if (ret == OK) { - // delete unused streams, note we do this after adding new streams to ensure new stream - // will not have the same address as deleted stream, and HAL has a chance to reference - // the to be deleted stream in configure_streams call - for(auto it = mStreamMap.begin(); it != mStreamMap.end();) { - int id = it->first; - bool found = false; - for (const auto& stream : requestedConfiguration.streams) { - if (id == stream.id) { - found = true; - break; - } - } - if (!found) { - // Unmap all buffers of deleted stream - // in case the configuration call succeeds and HAL - // is able to release the corresponding resources too. - cleanupBuffersLocked(id); - it = mStreamMap.erase(it); - } else { - ++it; - } - } - - // Track video streams - mVideoStreamIds.clear(); - for (const auto& stream : requestedConfiguration.streams) { - if (stream.streamType == StreamType::OUTPUT && - stream.usage & - graphics::common::V1_0::BufferUsage::VIDEO_ENCODER) { - mVideoStreamIds.push_back(stream.id); - } - } - mResultBatcher.setBatchedStreams(mVideoStreamIds); + postProcessConfigurationLocked(requestedConfiguration); } if (ret == -EINVAL) { @@ -990,7 +1051,7 @@ Return CameraDeviceSession::processCaptureRequest( } if (s == Status::OK && requests.size() > 1) { - mResultBatcher.registerBatch(requests); + mResultBatcher.registerBatch(requests[0].frameNumber, requests.size()); } _hidl_cb(s, numRequestProcessed); @@ -1091,6 +1152,7 @@ Status CameraDeviceSession::processOneCaptureRequest(const CaptureRequest& reque halRequest.settings = settingsOverride.getAndLock(); } } + halRequest.num_physcam_settings = 0; ATRACE_ASYNC_BEGIN("frame capture", request.frameNumber); ATRACE_BEGIN("camera3->process_capture_request"); @@ -1173,29 +1235,22 @@ Return CameraDeviceSession::close() { return Void(); } -/** - * Static callback forwarding methods from HAL to instance - */ -void CameraDeviceSession::sProcessCaptureResult( - const camera3_callback_ops *cb, - const camera3_capture_result *hal_result) { - CameraDeviceSession *d = - const_cast(static_cast(cb)); - +status_t CameraDeviceSession::constructCaptureResult(CaptureResult& result, + const camera3_capture_result *hal_result) { uint32_t frameNumber = hal_result->frame_number; bool hasInputBuf = (hal_result->input_buffer != nullptr); size_t numOutputBufs = hal_result->num_output_buffers; size_t numBufs = numOutputBufs + (hasInputBuf ? 1 : 0); if (numBufs > 0) { - Mutex::Autolock _l(d->mInflightLock); + Mutex::Autolock _l(mInflightLock); if (hasInputBuf) { int streamId = static_cast(hal_result->input_buffer->stream)->mId; // validate if buffer is inflight auto key = std::make_pair(streamId, frameNumber); - if (d->mInflightBuffers.count(key) != 1) { + if (mInflightBuffers.count(key) != 1) { ALOGE("%s: input buffer for stream %d frame %d is not inflight!", __FUNCTION__, streamId, frameNumber); - return; + return -EINVAL; } } @@ -1203,73 +1258,72 @@ void CameraDeviceSession::sProcessCaptureResult( int streamId = static_cast(hal_result->output_buffers[i].stream)->mId; // validate if buffer is inflight auto key = std::make_pair(streamId, frameNumber); - if (d->mInflightBuffers.count(key) != 1) { + if (mInflightBuffers.count(key) != 1) { ALOGE("%s: output buffer for stream %d frame %d is not inflight!", __FUNCTION__, streamId, frameNumber); - return; + return -EINVAL; } } } // We don't need to validate/import fences here since we will be passing them to camera service // within the scope of this function - CaptureResult result; result.frameNumber = frameNumber; result.fmqResultSize = 0; result.partialResult = hal_result->partial_result; convertToHidl(hal_result->result, &result.result); if (nullptr != hal_result->result) { bool resultOverriden = false; - Mutex::Autolock _l(d->mInflightLock); + Mutex::Autolock _l(mInflightLock); // Derive some new keys for backward compatibility - if (d->mDerivePostRawSensKey) { + if (mDerivePostRawSensKey) { camera_metadata_ro_entry entry; if (find_camera_metadata_ro_entry(hal_result->result, ANDROID_CONTROL_POST_RAW_SENSITIVITY_BOOST, &entry) == 0) { - d->mInflightRawBoostPresent[frameNumber] = true; + mInflightRawBoostPresent[frameNumber] = true; } else { - auto entry = d->mInflightRawBoostPresent.find(frameNumber); - if (d->mInflightRawBoostPresent.end() == entry) { - d->mInflightRawBoostPresent[frameNumber] = false; + auto entry = mInflightRawBoostPresent.find(frameNumber); + if (mInflightRawBoostPresent.end() == entry) { + mInflightRawBoostPresent[frameNumber] = false; } } - if ((hal_result->partial_result == d->mNumPartialResults)) { - if (!d->mInflightRawBoostPresent[frameNumber]) { + if ((hal_result->partial_result == mNumPartialResults)) { + if (!mInflightRawBoostPresent[frameNumber]) { if (!resultOverriden) { - d->mOverridenResult.clear(); - d->mOverridenResult.append(hal_result->result); + mOverridenResult.clear(); + mOverridenResult.append(hal_result->result); resultOverriden = true; } int32_t defaultBoost[1] = {100}; - d->mOverridenResult.update( + mOverridenResult.update( ANDROID_CONTROL_POST_RAW_SENSITIVITY_BOOST, defaultBoost, 1); } - d->mInflightRawBoostPresent.erase(frameNumber); + mInflightRawBoostPresent.erase(frameNumber); } } - auto entry = d->mInflightAETriggerOverrides.find(frameNumber); - if (d->mInflightAETriggerOverrides.end() != entry) { + auto entry = mInflightAETriggerOverrides.find(frameNumber); + if (mInflightAETriggerOverrides.end() != entry) { if (!resultOverriden) { - d->mOverridenResult.clear(); - d->mOverridenResult.append(hal_result->result); + mOverridenResult.clear(); + mOverridenResult.append(hal_result->result); resultOverriden = true; } - d->overrideResultForPrecaptureCancelLocked(entry->second, - &d->mOverridenResult); - if (hal_result->partial_result == d->mNumPartialResults) { - d->mInflightAETriggerOverrides.erase(frameNumber); + overrideResultForPrecaptureCancelLocked(entry->second, + &mOverridenResult); + if (hal_result->partial_result == mNumPartialResults) { + mInflightAETriggerOverrides.erase(frameNumber); } } if (resultOverriden) { const camera_metadata_t *metaBuffer = - d->mOverridenResult.getAndLock(); + mOverridenResult.getAndLock(); convertToHidl(metaBuffer, &result.result); - d->mOverridenResult.unlock(metaBuffer); + mOverridenResult.unlock(metaBuffer); } } if (hasInputBuf) { @@ -1310,25 +1364,104 @@ void CameraDeviceSession::sProcessCaptureResult( // configure_streams right after the processCaptureResult call so we need to finish // updating inflight queues first if (numBufs > 0) { - Mutex::Autolock _l(d->mInflightLock); + Mutex::Autolock _l(mInflightLock); if (hasInputBuf) { int streamId = static_cast(hal_result->input_buffer->stream)->mId; auto key = std::make_pair(streamId, frameNumber); - d->mInflightBuffers.erase(key); + mInflightBuffers.erase(key); } for (size_t i = 0; i < numOutputBufs; i++) { int streamId = static_cast(hal_result->output_buffers[i].stream)->mId; auto key = std::make_pair(streamId, frameNumber); - d->mInflightBuffers.erase(key); + mInflightBuffers.erase(key); } - if (d->mInflightBuffers.empty()) { + if (mInflightBuffers.empty()) { ALOGV("%s: inflight buffer queue is now empty!", __FUNCTION__); } } + return OK; +} + +// Static helper method to copy/shrink capture result metadata sent by HAL +void CameraDeviceSession::sShrinkCaptureResult( + camera3_capture_result* dst, const camera3_capture_result* src, + std::vector<::android::hardware::camera::common::V1_0::helper::CameraMetadata>* mds, + std::vector* physCamMdArray, + bool handlePhysCam) { + *dst = *src; + // Reserve maximum number of entries to avoid metadata re-allocation. + mds->reserve(1 + (handlePhysCam ? src->num_physcam_metadata : 0)); + if (sShouldShrink(src->result)) { + mds->emplace_back(sCreateCompactCopy(src->result)); + dst->result = mds->back().getAndLock(); + } + + if (handlePhysCam) { + // First determine if we need to create new camera_metadata_t* array + bool needShrink = false; + for (uint32_t i = 0; i < src->num_physcam_metadata; i++) { + if (sShouldShrink(src->physcam_metadata[i])) { + needShrink = true; + } + } + + if (!needShrink) return; + + physCamMdArray->reserve(src->num_physcam_metadata); + dst->physcam_metadata = physCamMdArray->data(); + for (uint32_t i = 0; i < src->num_physcam_metadata; i++) { + if (sShouldShrink(src->physcam_metadata[i])) { + mds->emplace_back(sCreateCompactCopy(src->physcam_metadata[i])); + dst->physcam_metadata[i] = mds->back().getAndLock(); + } else { + dst->physcam_metadata[i] = src->physcam_metadata[i]; + } + } + } +} + +bool CameraDeviceSession::sShouldShrink(const camera_metadata_t* md) { + size_t compactSize = get_camera_metadata_compact_size(md); + size_t totalSize = get_camera_metadata_size(md); + if (totalSize >= compactSize + METADATA_SHRINK_ABS_THRESHOLD && + totalSize >= compactSize * METADATA_SHRINK_REL_THRESHOLD) { + ALOGV("Camera metadata should be shrunk from %zu to %zu", totalSize, compactSize); + return true; + } + return false; +} + +camera_metadata_t* CameraDeviceSession::sCreateCompactCopy(const camera_metadata_t* src) { + size_t compactSize = get_camera_metadata_compact_size(src); + void* buffer = calloc(1, compactSize); + if (buffer == nullptr) { + ALOGE("%s: Allocating %zu bytes failed", __FUNCTION__, compactSize); + } + return copy_camera_metadata(buffer, compactSize, src); +} - d->mResultBatcher.processCaptureResult(result); +/** + * Static callback forwarding methods from HAL to instance + */ +void CameraDeviceSession::sProcessCaptureResult( + const camera3_callback_ops *cb, + const camera3_capture_result *hal_result) { + CameraDeviceSession *d = + const_cast(static_cast(cb)); + + CaptureResult result = {}; + camera3_capture_result shadowResult; + bool handlePhysCam = (d->mDeviceVersion >= CAMERA_DEVICE_API_VERSION_3_5); + std::vector<::android::hardware::camera::common::V1_0::helper::CameraMetadata> compactMds; + std::vector physCamMdArray; + sShrinkCaptureResult(&shadowResult, hal_result, &compactMds, &physCamMdArray, handlePhysCam); + + status_t ret = d->constructCaptureResult(result, &shadowResult); + if (ret == OK) { + d->mResultBatcher.processCaptureResult(result); + } } void CameraDeviceSession::sNotify( diff --git a/camera/device/3.2/default/CameraDeviceSession.h b/camera/device/3.2/default/CameraDeviceSession.h index 69e2e2c802531cfe9258b55839340d7c0b49224a..af90e5a00f072420549e271777b433da48dcbd9b 100644 --- a/camera/device/3.2/default/CameraDeviceSession.h +++ b/camera/device/3.2/default/CameraDeviceSession.h @@ -112,6 +112,14 @@ protected: Return flush(); Return close(); + // Helper methods + Status constructDefaultRequestSettingsRaw(int type, CameraMetadata *outMetadata); + + bool preProcessConfigurationLocked(const StreamConfiguration& requestedConfiguration, + camera3_stream_configuration_t *stream_list /*out*/, + hidl_vec *streams /*out*/); + void postProcessConfigurationLocked(const StreamConfiguration& requestedConfiguration); + protected: // protecting mClosed/mDisconnected/mInitFail @@ -133,7 +141,7 @@ protected: }; camera3_device_t* mDevice; - uint32_t mDeviceVersion; + const uint32_t mDeviceVersion; bool mIsAELockAvailable; bool mDerivePostRawSensKey; uint32_t mNumPartialResults; @@ -178,11 +186,11 @@ protected: void setBatchedStreams(const std::vector& streamsToBatch); void setResultMetadataQueue(std::shared_ptr q); - void registerBatch(const hidl_vec& requests); + void registerBatch(uint32_t frameNumber, uint32_t batchSize); void notify(NotifyMsg& msg); void processCaptureResult(CaptureResult& result); - private: + protected: struct InflightBatch { // Protect access to entire struct. Acquire this lock before read/write any data or // calling any methods. processCaptureResult and notify will compete for this lock @@ -227,7 +235,6 @@ protected: bool mRemoved = false; }; - static const int NOT_BATCHED = -1; // Get the batch index and pointer to InflightBatch (nullptrt if the frame is not batched) // Caller must acquire the InflightBatch::mLock before accessing the InflightBatch @@ -237,6 +244,16 @@ protected: // This method will hold ResultBatcher::mLock briefly std::pair> getBatch(uint32_t frameNumber); + static const int NOT_BATCHED = -1; + + // move/push function avoids "hidl_handle& operator=(hidl_handle&)", which clones native + // handle + void moveStreamBuffer(StreamBuffer&& src, StreamBuffer& dst); + void pushStreamBuffer(StreamBuffer&& src, std::vector& dst); + + void sendBatchMetadataLocked( + std::shared_ptr batch, uint32_t lastPartialResultIdx); + // Check if the first batch in mInflightBatches is ready to be removed, and remove it if so // This method will hold ResultBatcher::mLock briefly void checkAndRemoveFirstBatch(); @@ -249,9 +266,7 @@ protected: // send buffers for specified streams void sendBatchBuffersLocked( std::shared_ptr batch, const std::vector& streams); - void sendBatchMetadataLocked( - std::shared_ptr batch, uint32_t lastPartialResultIdx); - // End of sendXXXX methods + // End of sendXXXX methods // helper methods void freeReleaseFences(hidl_vec&); @@ -259,11 +274,6 @@ protected: void processOneCaptureResult(CaptureResult& result); void invokeProcessCaptureResultCallback(hidl_vec &results, bool tryWriteFmq); - // move/push function avoids "hidl_handle& operator=(hidl_handle&)", which clones native - // handle - void moveStreamBuffer(StreamBuffer&& src, StreamBuffer& dst); - void pushStreamBuffer(StreamBuffer&& src, std::vector& dst); - // Protect access to mInflightBatches, mNumPartialResults and mStreamsToBatch // processCaptureRequest, processCaptureResult, notify will compete for this lock // Do NOT issue HIDL IPCs while holding this lock (except when HAL reports error) @@ -317,6 +327,19 @@ protected: static callbacks_process_capture_result_t sProcessCaptureResult; static callbacks_notify_t sNotify; + status_t constructCaptureResult(CaptureResult& result, + const camera3_capture_result *hal_result); + + // Static helper method to copy/shrink capture result metadata sent by HAL + // Temporarily allocated metadata copy will be hold in mds + static void sShrinkCaptureResult( + camera3_capture_result* dst, const camera3_capture_result* src, + std::vector<::android::hardware::camera::common::V1_0::helper::CameraMetadata>* mds, + std::vector* physCamMdArray, + bool handlePhysCam); + static bool sShouldShrink(const camera_metadata_t* md); + static camera_metadata_t* sCreateCompactCopy(const camera_metadata_t* src); + private: struct TrampolineSessionInterface_3_2 : public ICameraDeviceSession { diff --git a/camera/device/3.3/default/CameraDeviceSession.cpp b/camera/device/3.3/default/CameraDeviceSession.cpp index f877895ebbe1d04353cf028491ad546dbe9f76e4..d36e9ed4a01dfa1931c12b6e3267e5d7a4d13f19 100644 --- a/camera/device/3.3/default/CameraDeviceSession.cpp +++ b/camera/device/3.3/default/CameraDeviceSession.cpp @@ -77,42 +77,11 @@ Return CameraDeviceSession::configureStreams_3_3( return Void(); } - camera3_stream_configuration_t stream_list; + camera3_stream_configuration_t stream_list{}; hidl_vec streams; - - stream_list.operation_mode = (uint32_t) requestedConfiguration.operationMode; - stream_list.num_streams = requestedConfiguration.streams.size(); - streams.resize(stream_list.num_streams); - stream_list.streams = streams.data(); - - for (uint32_t i = 0; i < stream_list.num_streams; i++) { - int id = requestedConfiguration.streams[i].id; - - if (mStreamMap.count(id) == 0) { - Camera3Stream stream; - V3_2::implementation::convertFromHidl(requestedConfiguration.streams[i], &stream); - mStreamMap[id] = stream; - mStreamMap[id].data_space = mapToLegacyDataspace( - mStreamMap[id].data_space); - mCirculatingBuffers.emplace(stream.mId, CirculatingBuffers{}); - } else { - // width/height/format must not change, but usage/rotation might need to change - if (mStreamMap[id].stream_type != - (int) requestedConfiguration.streams[i].streamType || - mStreamMap[id].width != requestedConfiguration.streams[i].width || - mStreamMap[id].height != requestedConfiguration.streams[i].height || - mStreamMap[id].format != (int) requestedConfiguration.streams[i].format || - mStreamMap[id].data_space != - mapToLegacyDataspace( static_cast ( - requestedConfiguration.streams[i].dataSpace))) { - ALOGE("%s: stream %d configuration changed!", __FUNCTION__, id); - _hidl_cb(Status::INTERNAL_ERROR, outStreams); - return Void(); - } - mStreamMap[id].rotation = (int) requestedConfiguration.streams[i].rotation; - mStreamMap[id].usage = (uint32_t) requestedConfiguration.streams[i].usage; - } - streams[i] = &mStreamMap[id]; + if (!preProcessConfigurationLocked(requestedConfiguration, &stream_list, &streams)) { + _hidl_cb(Status::INTERNAL_ERROR, outStreams); + return Void(); } ATRACE_BEGIN("camera3->configure_streams"); @@ -122,39 +91,7 @@ Return CameraDeviceSession::configureStreams_3_3( // In case Hal returns error most likely it was not able to release // the corresponding resources of the deleted streams. if (ret == OK) { - // delete unused streams, note we do this after adding new streams to ensure new stream - // will not have the same address as deleted stream, and HAL has a chance to reference - // the to be deleted stream in configure_streams call - for(auto it = mStreamMap.begin(); it != mStreamMap.end();) { - int id = it->first; - bool found = false; - for (const auto& stream : requestedConfiguration.streams) { - if (id == stream.id) { - found = true; - break; - } - } - if (!found) { - // Unmap all buffers of deleted stream - // in case the configuration call succeeds and HAL - // is able to release the corresponding resources too. - cleanupBuffersLocked(id); - it = mStreamMap.erase(it); - } else { - ++it; - } - } - - // Track video streams - mVideoStreamIds.clear(); - for (const auto& stream : requestedConfiguration.streams) { - if (stream.streamType == V3_2::StreamType::OUTPUT && - stream.usage & - graphics::common::V1_0::BufferUsage::VIDEO_ENCODER) { - mVideoStreamIds.push_back(stream.id); - } - } - mResultBatcher.setBatchedStreams(mVideoStreamIds); + postProcessConfigurationLocked(requestedConfiguration); } if (ret == -EINVAL) { diff --git a/camera/device/3.4/Android.bp b/camera/device/3.4/Android.bp new file mode 100644 index 0000000000000000000000000000000000000000..2c649baa2ebf7c8014a7b3b725206154bbf7b6ba --- /dev/null +++ b/camera/device/3.4/Android.bp @@ -0,0 +1,33 @@ +// This file is autogenerated by hidl-gen -Landroidbp. + +hidl_interface { + name: "android.hardware.camera.device@3.4", + root: "android.hardware", + vndk: { + enabled: true, + }, + srcs: [ + "types.hal", + "ICameraDeviceCallback.hal", + "ICameraDeviceSession.hal", + ], + interfaces: [ + "android.hardware.camera.common@1.0", + "android.hardware.camera.device@3.2", + "android.hardware.camera.device@3.3", + "android.hardware.graphics.common@1.0", + "android.hidl.base@1.0", + ], + types: [ + "CaptureRequest", + "CaptureResult", + "HalStream", + "HalStreamConfiguration", + "PhysicalCameraMetadata", + "PhysicalCameraSetting", + "Stream", + "StreamConfiguration", + ], + gen_java: false, +} + diff --git a/camera/device/3.4/ICameraDeviceCallback.hal b/camera/device/3.4/ICameraDeviceCallback.hal new file mode 100644 index 0000000000000000000000000000000000000000..8ce8d4b74b6fd1e86082ebcdc10a650a7b7943b5 --- /dev/null +++ b/camera/device/3.4/ICameraDeviceCallback.hal @@ -0,0 +1,43 @@ +/* + * Copyright (C) 2018 The Android Open Source Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package android.hardware.camera.device@3.4; + +import @3.2::ICameraDeviceCallback; + +/** + * + * Callback methods for the HAL to call into the framework. + * + * These methods are used to return metadata and image buffers for a completed + * or failed captures, and to notify the framework of asynchronous events such + * as errors. + * + * The framework must not call back into the HAL from within these callbacks, + * and these calls must not block for extended periods. + * + */ +interface ICameraDeviceCallback extends @3.2::ICameraDeviceCallback { + /** + * processCaptureResult_3_4: + * + * Identical to @3.2::ICameraDeviceCallback.processCaptureResult, except + * that it takes a list of @3.4::CaptureResult, which could contain + * physical camera metadata for logical multi-camera. + * + */ + processCaptureResult_3_4(vec<@3.4::CaptureResult> results); +}; diff --git a/camera/device/3.4/ICameraDeviceSession.hal b/camera/device/3.4/ICameraDeviceSession.hal new file mode 100644 index 0000000000000000000000000000000000000000..c41d90e27aeba09b9003fe2b5c5181037a545163 --- /dev/null +++ b/camera/device/3.4/ICameraDeviceSession.hal @@ -0,0 +1,109 @@ +/* + * Copyright (C) 2017-2018 The Android Open Source Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package android.hardware.camera.device@3.4; + +import android.hardware.camera.common@1.0::Status; +import @3.2::CameraMetadata; +import @3.3::ICameraDeviceSession; +import @3.3::HalStreamConfiguration; +import @3.2::BufferCache; + +/** + * Camera device active session interface. + * + * Obtained via ICameraDevice::open(), this interface contains the methods to + * configure and request captures from an active camera device. + */ +interface ICameraDeviceSession extends @3.3::ICameraDeviceSession { + + /** + * configureStreams_3_4: + * + * Identical to @3.3::ICameraDeviceSession.configureStreams, except that: + * + * - The requested configuration includes session parameters. + * + * @return Status Status code for the operation, one of: + * OK: + * On successful stream configuration. + * INTERNAL_ERROR: + * If there has been a fatal error and the device is no longer + * operational. Only close() can be called successfully by the + * framework after this error is returned. + * ILLEGAL_ARGUMENT: + * If the requested stream configuration is invalid. Some examples + * of invalid stream configurations include: + * - Including more than 1 INPUT stream + * - Not including any OUTPUT streams + * - Including streams with unsupported formats, or an unsupported + * size for that format. + * - Including too many output streams of a certain format. + * - Unsupported rotation configuration + * - Stream sizes/formats don't satisfy the + * camera3_stream_configuration_t->operation_mode requirements + * for non-NORMAL mode, or the requested operation_mode is not + * supported by the HAL. + * - Unsupported usage flag + * The camera service cannot filter out all possible illegal stream + * configurations, since some devices may support more simultaneous + * streams or larger stream resolutions than the minimum required + * for a given camera device hardware level. The HAL must return an + * ILLEGAL_ARGUMENT for any unsupported stream set, and then be + * ready to accept a future valid stream configuration in a later + * configureStreams call. + * @return halConfiguration The stream parameters desired by the HAL for + * each stream, including maximum buffers, the usage flags, and the + * override format. + */ + configureStreams_3_4(@3.4::StreamConfiguration requestedConfiguration) + generates (Status status, + @3.4::HalStreamConfiguration halConfiguration); + + /** + * processCaptureRequest_3_4: + * + * Identical to @3.2::ICameraDeviceSession.processCaptureRequest, except that: + * + * - The capture request can include individual settings for physical camera devices + * backing a logical multi-camera. + * + * @return status Status code for the operation, one of: + * OK: + * On a successful start to processing the capture request + * ILLEGAL_ARGUMENT: + * If the input is malformed (the settings are empty when not + * allowed, the physical camera settings are invalid, there are 0 + * output buffers, etc) and capture processing + * cannot start. Failures during request processing must be + * handled by calling ICameraDeviceCallback::notify(). In case of + * this error, the framework retains responsibility for the + * stream buffers' fences and the buffer handles; the HAL must not + * close the fences or return these buffers with + * ICameraDeviceCallback::processCaptureResult(). + * INTERNAL_ERROR: + * If the camera device has encountered a serious error. After this + * error is returned, only the close() method can be successfully + * called by the framework. + * @return numRequestProcessed Number of requests successfully processed by + * camera HAL. When status is OK, this must be equal to the size of + * requests. When the call fails, this number is the number of requests + * that HAL processed successfully before HAL runs into an error. + * + */ + processCaptureRequest_3_4(vec requests, vec cachesToRemove) + generates (Status status, uint32_t numRequestProcessed); +}; diff --git a/camera/device/3.4/default/Android.bp b/camera/device/3.4/default/Android.bp new file mode 100644 index 0000000000000000000000000000000000000000..272bf42baab17b75ecb6b0e560fc6917743fa8a2 --- /dev/null +++ b/camera/device/3.4/default/Android.bp @@ -0,0 +1,104 @@ +// +// Copyright (C) 2017 The Android Open Source Project +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +// + +cc_library_headers { + name: "camera.device@3.4-impl_headers", + vendor: true, + export_include_dirs: ["include/device_v3_4_impl"] +} + +cc_library_headers { + name: "camera.device@3.4-external-impl_headers", + vendor: true, + export_include_dirs: ["include/ext_device_v3_4_impl"] +} + +cc_library_shared { + name: "camera.device@3.4-impl", + defaults: ["hidl_defaults"], + proprietary: true, + vendor: true, + srcs: [ + "CameraDevice.cpp", + "CameraDeviceSession.cpp", + "convert.cpp" + ], + shared_libs: [ + "libhidlbase", + "libhidltransport", + "libutils", + "libcutils", + "camera.device@3.2-impl", + "camera.device@3.3-impl", + "android.hardware.camera.device@3.2", + "android.hardware.camera.device@3.3", + "android.hardware.camera.device@3.4", + "android.hardware.camera.provider@2.4", + "android.hardware.graphics.mapper@2.0", + "liblog", + "libhardware", + "libcamera_metadata", + "libfmq", + ], + static_libs: [ + "android.hardware.camera.common@1.0-helper", + ], + local_include_dirs: ["include/device_v3_4_impl"], + export_shared_lib_headers: [ + "libfmq", + ], +} + +cc_library_shared { + name: "camera.device@3.4-external-impl", + defaults: ["hidl_defaults"], + proprietary: true, + vendor: true, + srcs: [ + "ExternalCameraDevice.cpp", + "ExternalCameraDeviceSession.cpp", + "ExternalCameraUtils.cpp", + ], + shared_libs: [ + "libhidlbase", + "libhidltransport", + "libutils", + "libcutils", + "camera.device@3.2-impl", + "camera.device@3.3-impl", + "android.hardware.camera.device@3.2", + "android.hardware.camera.device@3.3", + "android.hardware.camera.device@3.4", + "android.hardware.camera.provider@2.4", + "android.hardware.graphics.mapper@2.0", + "liblog", + "libhardware", + "libcamera_metadata", + "libfmq", + "libsync", + "libyuv", + "libjpeg", + "libexif", + "libtinyxml2" + ], + static_libs: [ + "android.hardware.camera.common@1.0-helper", + ], + local_include_dirs: ["include/ext_device_v3_4_impl"], + export_shared_lib_headers: [ + "libfmq", + ], +} diff --git a/camera/device/3.4/default/CameraDevice.cpp b/camera/device/3.4/default/CameraDevice.cpp new file mode 100644 index 0000000000000000000000000000000000000000..d73833a9c3d7b9afabbeba8302267a09c70d2a53 --- /dev/null +++ b/camera/device/3.4/default/CameraDevice.cpp @@ -0,0 +1,67 @@ +/* + * Copyright (C) 2017 The Android Open Source Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#define LOG_TAG "CamDev@3.4-impl" +#include + +#include +#include +#include "CameraDevice_3_4.h" +#include + +namespace android { +namespace hardware { +namespace camera { +namespace device { +namespace V3_4 { +namespace implementation { + +using ::android::hardware::camera::common::V1_0::Status; +using namespace ::android::hardware::camera::device; + +CameraDevice::CameraDevice( + sp module, const std::string& cameraId, + const SortedVector>& cameraDeviceNames) : + V3_2::implementation::CameraDevice(module, cameraId, cameraDeviceNames) { +} + +CameraDevice::~CameraDevice() { +} + +sp CameraDevice::createSession(camera3_device_t* device, + const camera_metadata_t* deviceInfo, + const sp& callback) { + sp session = new CameraDeviceSession(device, deviceInfo, callback); + IF_ALOGV() { + session->getInterface()->interfaceChain([]( + ::android::hardware::hidl_vec<::android::hardware::hidl_string> interfaceChain) { + ALOGV("Session interface chain:"); + for (auto iface : interfaceChain) { + ALOGV(" %s", iface.c_str()); + } + }); + } + return session; +} + +// End of methods from ::android::hardware::camera::device::V3_2::ICameraDevice. + +} // namespace implementation +} // namespace V3_4 +} // namespace device +} // namespace camera +} // namespace hardware +} // namespace android diff --git a/camera/device/3.4/default/CameraDeviceSession.cpp b/camera/device/3.4/default/CameraDeviceSession.cpp new file mode 100644 index 0000000000000000000000000000000000000000..6a18161f20527b754ab8287d25dae27c04a21ee4 --- /dev/null +++ b/camera/device/3.4/default/CameraDeviceSession.cpp @@ -0,0 +1,722 @@ +/* + * Copyright (C) 2017-2018 The Android Open Source Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#define LOG_TAG "CamDevSession@3.4-impl" +#include + +#include +#include +#include +#include +#include "CameraDeviceSession.h" + +namespace android { +namespace hardware { +namespace camera { +namespace device { +namespace V3_4 { +namespace implementation { + +CameraDeviceSession::CameraDeviceSession( + camera3_device_t* device, + const camera_metadata_t* deviceInfo, + const sp& callback) : + V3_3::implementation::CameraDeviceSession(device, deviceInfo, callback), + mResultBatcher_3_4(callback) { + + mHasCallback_3_4 = false; + + auto castResult = ICameraDeviceCallback::castFrom(callback); + if (castResult.isOk()) { + sp callback3_4 = castResult; + if (callback3_4 != nullptr) { + process_capture_result = sProcessCaptureResult_3_4; + notify = sNotify_3_4; + mHasCallback_3_4 = true; + if (!mInitFail) { + mResultBatcher_3_4.setResultMetadataQueue(mResultMetadataQueue); + } + } + } + + mResultBatcher_3_4.setNumPartialResults(mNumPartialResults); + + camera_metadata_entry_t capabilities = + mDeviceInfo.find(ANDROID_REQUEST_AVAILABLE_CAPABILITIES); + bool isLogicalMultiCamera = false; + for (size_t i = 0; i < capabilities.count; i++) { + if (capabilities.data.u8[i] == + ANDROID_REQUEST_AVAILABLE_CAPABILITIES_LOGICAL_MULTI_CAMERA) { + isLogicalMultiCamera = true; + break; + } + } + if (isLogicalMultiCamera) { + camera_metadata_entry entry = + mDeviceInfo.find(ANDROID_LOGICAL_MULTI_CAMERA_PHYSICAL_IDS); + const uint8_t* ids = entry.data.u8; + size_t start = 0; + for (size_t i = 0; i < entry.count; ++i) { + if (ids[i] == '\0') { + if (start != i) { + const char* physicalId = reinterpret_cast(ids+start); + mPhysicalCameraIds.emplace(physicalId); + } + start = i + 1; + } + } + } +} + +CameraDeviceSession::~CameraDeviceSession() { +} + +Return CameraDeviceSession::configureStreams_3_4( + const StreamConfiguration& requestedConfiguration, + ICameraDeviceSession::configureStreams_3_4_cb _hidl_cb) { + Status status = initStatus(); + HalStreamConfiguration outStreams; + + // If callback is 3.2, make sure no physical stream is configured + if (!mHasCallback_3_4) { + for (size_t i = 0; i < requestedConfiguration.streams.size(); i++) { + if (requestedConfiguration.streams[i].physicalCameraId.size() > 0) { + ALOGE("%s: trying to configureStreams with physical camera id with V3.2 callback", + __FUNCTION__); + _hidl_cb(Status::INTERNAL_ERROR, outStreams); + return Void(); + } + } + } + + // hold the inflight lock for entire configureStreams scope since there must not be any + // inflight request/results during stream configuration. + Mutex::Autolock _l(mInflightLock); + if (!mInflightBuffers.empty()) { + ALOGE("%s: trying to configureStreams while there are still %zu inflight buffers!", + __FUNCTION__, mInflightBuffers.size()); + _hidl_cb(Status::INTERNAL_ERROR, outStreams); + return Void(); + } + + if (!mInflightAETriggerOverrides.empty()) { + ALOGE("%s: trying to configureStreams while there are still %zu inflight" + " trigger overrides!", __FUNCTION__, + mInflightAETriggerOverrides.size()); + _hidl_cb(Status::INTERNAL_ERROR, outStreams); + return Void(); + } + + if (!mInflightRawBoostPresent.empty()) { + ALOGE("%s: trying to configureStreams while there are still %zu inflight" + " boost overrides!", __FUNCTION__, + mInflightRawBoostPresent.size()); + _hidl_cb(Status::INTERNAL_ERROR, outStreams); + return Void(); + } + + if (status != Status::OK) { + _hidl_cb(status, outStreams); + return Void(); + } + + const camera_metadata_t *paramBuffer = nullptr; + if (0 < requestedConfiguration.sessionParams.size()) { + V3_2::implementation::convertFromHidl(requestedConfiguration.sessionParams, ¶mBuffer); + } + + camera3_stream_configuration_t stream_list{}; + hidl_vec streams; + stream_list.session_parameters = paramBuffer; + if (!preProcessConfigurationLocked_3_4(requestedConfiguration, &stream_list, &streams)) { + _hidl_cb(Status::INTERNAL_ERROR, outStreams); + return Void(); + } + + ATRACE_BEGIN("camera3->configure_streams"); + status_t ret = mDevice->ops->configure_streams(mDevice, &stream_list); + ATRACE_END(); + + // In case Hal returns error most likely it was not able to release + // the corresponding resources of the deleted streams. + if (ret == OK) { + postProcessConfigurationLocked_3_4(requestedConfiguration); + } + + if (ret == -EINVAL) { + status = Status::ILLEGAL_ARGUMENT; + } else if (ret != OK) { + status = Status::INTERNAL_ERROR; + } else { + V3_4::implementation::convertToHidl(stream_list, &outStreams); + mFirstRequest = true; + } + + _hidl_cb(status, outStreams); + return Void(); +} + +bool CameraDeviceSession::preProcessConfigurationLocked_3_4( + const StreamConfiguration& requestedConfiguration, + camera3_stream_configuration_t *stream_list /*out*/, + hidl_vec *streams /*out*/) { + + if ((stream_list == nullptr) || (streams == nullptr)) { + return false; + } + + stream_list->operation_mode = (uint32_t) requestedConfiguration.operationMode; + stream_list->num_streams = requestedConfiguration.streams.size(); + streams->resize(stream_list->num_streams); + stream_list->streams = streams->data(); + + for (uint32_t i = 0; i < stream_list->num_streams; i++) { + int id = requestedConfiguration.streams[i].v3_2.id; + + if (mStreamMap.count(id) == 0) { + Camera3Stream stream; + convertFromHidl(requestedConfiguration.streams[i], &stream); + mStreamMap[id] = stream; + mPhysicalCameraIdMap[id] = requestedConfiguration.streams[i].physicalCameraId; + mStreamMap[id].data_space = mapToLegacyDataspace( + mStreamMap[id].data_space); + mStreamMap[id].physical_camera_id = mPhysicalCameraIdMap[id].c_str(); + mCirculatingBuffers.emplace(stream.mId, CirculatingBuffers{}); + } else { + // width/height/format must not change, but usage/rotation might need to change + if (mStreamMap[id].stream_type != + (int) requestedConfiguration.streams[i].v3_2.streamType || + mStreamMap[id].width != requestedConfiguration.streams[i].v3_2.width || + mStreamMap[id].height != requestedConfiguration.streams[i].v3_2.height || + mStreamMap[id].format != (int) requestedConfiguration.streams[i].v3_2.format || + mStreamMap[id].data_space != + mapToLegacyDataspace( static_cast ( + requestedConfiguration.streams[i].v3_2.dataSpace)) || + mPhysicalCameraIdMap[id] != requestedConfiguration.streams[i].physicalCameraId) { + ALOGE("%s: stream %d configuration changed!", __FUNCTION__, id); + return false; + } + mStreamMap[id].rotation = (int) requestedConfiguration.streams[i].v3_2.rotation; + mStreamMap[id].usage = (uint32_t) requestedConfiguration.streams[i].v3_2.usage; + } + (*streams)[i] = &mStreamMap[id]; + } + + return true; +} + +void CameraDeviceSession::postProcessConfigurationLocked_3_4( + const StreamConfiguration& requestedConfiguration) { + // delete unused streams, note we do this after adding new streams to ensure new stream + // will not have the same address as deleted stream, and HAL has a chance to reference + // the to be deleted stream in configure_streams call + for(auto it = mStreamMap.begin(); it != mStreamMap.end();) { + int id = it->first; + bool found = false; + for (const auto& stream : requestedConfiguration.streams) { + if (id == stream.v3_2.id) { + found = true; + break; + } + } + if (!found) { + // Unmap all buffers of deleted stream + // in case the configuration call succeeds and HAL + // is able to release the corresponding resources too. + cleanupBuffersLocked(id); + it = mStreamMap.erase(it); + } else { + ++it; + } + } + + // Track video streams + mVideoStreamIds.clear(); + for (const auto& stream : requestedConfiguration.streams) { + if (stream.v3_2.streamType == StreamType::OUTPUT && + stream.v3_2.usage & + graphics::common::V1_0::BufferUsage::VIDEO_ENCODER) { + mVideoStreamIds.push_back(stream.v3_2.id); + } + } + mResultBatcher_3_4.setBatchedStreams(mVideoStreamIds); +} + +Return CameraDeviceSession::processCaptureRequest_3_4( + const hidl_vec& requests, + const hidl_vec& cachesToRemove, + ICameraDeviceSession::processCaptureRequest_3_4_cb _hidl_cb) { + updateBufferCaches(cachesToRemove); + + uint32_t numRequestProcessed = 0; + Status s = Status::OK; + for (size_t i = 0; i < requests.size(); i++, numRequestProcessed++) { + s = processOneCaptureRequest_3_4(requests[i]); + if (s != Status::OK) { + break; + } + } + + if (s == Status::OK && requests.size() > 1) { + mResultBatcher_3_4.registerBatch(requests[0].v3_2.frameNumber, requests.size()); + } + + _hidl_cb(s, numRequestProcessed); + return Void(); +} + +Status CameraDeviceSession::processOneCaptureRequest_3_4(const V3_4::CaptureRequest& request) { + Status status = initStatus(); + if (status != Status::OK) { + ALOGE("%s: camera init failed or disconnected", __FUNCTION__); + return status; + } + // If callback is 3.2, make sure there are no physical settings. + if (!mHasCallback_3_4) { + if (request.physicalCameraSettings.size() > 0) { + ALOGE("%s: trying to call processCaptureRequest_3_4 with physical camera id " + "and V3.2 callback", __FUNCTION__); + return Status::INTERNAL_ERROR; + } + } + + camera3_capture_request_t halRequest; + halRequest.frame_number = request.v3_2.frameNumber; + + bool converted = true; + V3_2::CameraMetadata settingsFmq; // settings from FMQ + if (request.v3_2.fmqSettingsSize > 0) { + // non-blocking read; client must write metadata before calling + // processOneCaptureRequest + settingsFmq.resize(request.v3_2.fmqSettingsSize); + bool read = mRequestMetadataQueue->read(settingsFmq.data(), request.v3_2.fmqSettingsSize); + if (read) { + converted = V3_2::implementation::convertFromHidl(settingsFmq, &halRequest.settings); + } else { + ALOGE("%s: capture request settings metadata couldn't be read from fmq!", __FUNCTION__); + converted = false; + } + } else { + converted = V3_2::implementation::convertFromHidl(request.v3_2.settings, + &halRequest.settings); + } + + if (!converted) { + ALOGE("%s: capture request settings metadata is corrupt!", __FUNCTION__); + return Status::ILLEGAL_ARGUMENT; + } + + if (mFirstRequest && halRequest.settings == nullptr) { + ALOGE("%s: capture request settings must not be null for first request!", + __FUNCTION__); + return Status::ILLEGAL_ARGUMENT; + } + + hidl_vec allBufPtrs; + hidl_vec allFences; + bool hasInputBuf = (request.v3_2.inputBuffer.streamId != -1 && + request.v3_2.inputBuffer.bufferId != 0); + size_t numOutputBufs = request.v3_2.outputBuffers.size(); + size_t numBufs = numOutputBufs + (hasInputBuf ? 1 : 0); + + if (numOutputBufs == 0) { + ALOGE("%s: capture request must have at least one output buffer!", __FUNCTION__); + return Status::ILLEGAL_ARGUMENT; + } + + status = importRequest(request.v3_2, allBufPtrs, allFences); + if (status != Status::OK) { + return status; + } + + hidl_vec outHalBufs; + outHalBufs.resize(numOutputBufs); + bool aeCancelTriggerNeeded = false; + ::android::hardware::camera::common::V1_0::helper::CameraMetadata settingsOverride; + { + Mutex::Autolock _l(mInflightLock); + if (hasInputBuf) { + auto streamId = request.v3_2.inputBuffer.streamId; + auto key = std::make_pair(request.v3_2.inputBuffer.streamId, request.v3_2.frameNumber); + auto& bufCache = mInflightBuffers[key] = camera3_stream_buffer_t{}; + convertFromHidl( + allBufPtrs[numOutputBufs], request.v3_2.inputBuffer.status, + &mStreamMap[request.v3_2.inputBuffer.streamId], allFences[numOutputBufs], + &bufCache); + bufCache.stream->physical_camera_id = mPhysicalCameraIdMap[streamId].c_str(); + halRequest.input_buffer = &bufCache; + } else { + halRequest.input_buffer = nullptr; + } + + halRequest.num_output_buffers = numOutputBufs; + for (size_t i = 0; i < numOutputBufs; i++) { + auto streamId = request.v3_2.outputBuffers[i].streamId; + auto key = std::make_pair(streamId, request.v3_2.frameNumber); + auto& bufCache = mInflightBuffers[key] = camera3_stream_buffer_t{}; + convertFromHidl( + allBufPtrs[i], request.v3_2.outputBuffers[i].status, + &mStreamMap[streamId], allFences[i], + &bufCache); + bufCache.stream->physical_camera_id = mPhysicalCameraIdMap[streamId].c_str(); + outHalBufs[i] = bufCache; + } + halRequest.output_buffers = outHalBufs.data(); + + AETriggerCancelOverride triggerOverride; + aeCancelTriggerNeeded = handleAePrecaptureCancelRequestLocked( + halRequest, &settingsOverride /*out*/, &triggerOverride/*out*/); + if (aeCancelTriggerNeeded) { + mInflightAETriggerOverrides[halRequest.frame_number] = + triggerOverride; + halRequest.settings = settingsOverride.getAndLock(); + } + } + + std::vector physicalCameraIds; + std::vector physicalCameraSettings; + std::vector physicalFmq; + size_t settingsCount = request.physicalCameraSettings.size(); + if (settingsCount > 0) { + physicalCameraIds.reserve(settingsCount); + physicalCameraSettings.reserve(settingsCount); + physicalFmq.reserve(settingsCount); + + for (size_t i = 0; i < settingsCount; i++) { + uint64_t settingsSize = request.physicalCameraSettings[i].fmqSettingsSize; + const camera_metadata_t *settings = nullptr; + if (settingsSize > 0) { + physicalFmq.push_back(V3_2::CameraMetadata(settingsSize)); + bool read = mRequestMetadataQueue->read(physicalFmq[i].data(), settingsSize); + if (read) { + converted = V3_2::implementation::convertFromHidl(physicalFmq[i], &settings); + physicalCameraSettings.push_back(settings); + } else { + ALOGE("%s: physical camera settings metadata couldn't be read from fmq!", + __FUNCTION__); + converted = false; + } + } else { + converted = V3_2::implementation::convertFromHidl( + request.physicalCameraSettings[i].settings, &settings); + physicalCameraSettings.push_back(settings); + } + + if (!converted) { + ALOGE("%s: physical camera settings metadata is corrupt!", __FUNCTION__); + return Status::ILLEGAL_ARGUMENT; + } + + if (mFirstRequest && settings == nullptr) { + ALOGE("%s: Individual request settings must not be null for first request!", + __FUNCTION__); + return Status::ILLEGAL_ARGUMENT; + } + + physicalCameraIds.push_back(request.physicalCameraSettings[i].physicalCameraId.c_str()); + } + } + halRequest.num_physcam_settings = settingsCount; + halRequest.physcam_id = physicalCameraIds.data(); + halRequest.physcam_settings = physicalCameraSettings.data(); + + ATRACE_ASYNC_BEGIN("frame capture", request.v3_2.frameNumber); + ATRACE_BEGIN("camera3->process_capture_request"); + status_t ret = mDevice->ops->process_capture_request(mDevice, &halRequest); + ATRACE_END(); + if (aeCancelTriggerNeeded) { + settingsOverride.unlock(halRequest.settings); + } + if (ret != OK) { + Mutex::Autolock _l(mInflightLock); + ALOGE("%s: HAL process_capture_request call failed!", __FUNCTION__); + + cleanupInflightFences(allFences, numBufs); + if (hasInputBuf) { + auto key = std::make_pair(request.v3_2.inputBuffer.streamId, request.v3_2.frameNumber); + mInflightBuffers.erase(key); + } + for (size_t i = 0; i < numOutputBufs; i++) { + auto key = std::make_pair(request.v3_2.outputBuffers[i].streamId, + request.v3_2.frameNumber); + mInflightBuffers.erase(key); + } + if (aeCancelTriggerNeeded) { + mInflightAETriggerOverrides.erase(request.v3_2.frameNumber); + } + + if (ret == BAD_VALUE) { + return Status::ILLEGAL_ARGUMENT; + } else { + return Status::INTERNAL_ERROR; + } + } + + mFirstRequest = false; + return Status::OK; +} + +/** + * Static callback forwarding methods from HAL to instance + */ +void CameraDeviceSession::sProcessCaptureResult_3_4( + const camera3_callback_ops *cb, + const camera3_capture_result *hal_result) { + CameraDeviceSession *d = + const_cast(static_cast(cb)); + + CaptureResult result = {}; + camera3_capture_result shadowResult; + bool handlePhysCam = (d->mDeviceVersion >= CAMERA_DEVICE_API_VERSION_3_5); + std::vector<::android::hardware::camera::common::V1_0::helper::CameraMetadata> compactMds; + std::vector physCamMdArray; + sShrinkCaptureResult(&shadowResult, hal_result, &compactMds, &physCamMdArray, handlePhysCam); + + status_t ret = d->constructCaptureResult(result.v3_2, &shadowResult); + if (ret != OK) { + return; + } + + if (handlePhysCam) { + if (shadowResult.num_physcam_metadata > d->mPhysicalCameraIds.size()) { + ALOGE("%s: Fatal: Invalid num_physcam_metadata %u", __FUNCTION__, + shadowResult.num_physcam_metadata); + return; + } + result.physicalCameraMetadata.resize(shadowResult.num_physcam_metadata); + for (uint32_t i = 0; i < shadowResult.num_physcam_metadata; i++) { + std::string physicalId = shadowResult.physcam_ids[i]; + if (d->mPhysicalCameraIds.find(physicalId) == d->mPhysicalCameraIds.end()) { + ALOGE("%s: Fatal: Invalid physcam_ids[%u]: %s", __FUNCTION__, + i, shadowResult.physcam_ids[i]); + return; + } + V3_2::CameraMetadata physicalMetadata; + V3_2::implementation::convertToHidl( + shadowResult.physcam_metadata[i], &physicalMetadata); + PhysicalCameraMetadata physicalCameraMetadata = { + .fmqMetadataSize = 0, + .physicalCameraId = physicalId, + .metadata = physicalMetadata }; + result.physicalCameraMetadata[i] = physicalCameraMetadata; + } + } + d->mResultBatcher_3_4.processCaptureResult_3_4(result); +} + +void CameraDeviceSession::sNotify_3_4( + const camera3_callback_ops *cb, + const camera3_notify_msg *msg) { + CameraDeviceSession *d = + const_cast(static_cast(cb)); + V3_2::NotifyMsg hidlMsg; + V3_2::implementation::convertToHidl(msg, &hidlMsg); + + if (hidlMsg.type == (V3_2::MsgType) CAMERA3_MSG_ERROR && + hidlMsg.msg.error.errorStreamId != -1) { + if (d->mStreamMap.count(hidlMsg.msg.error.errorStreamId) != 1) { + ALOGE("%s: unknown stream ID %d reports an error!", + __FUNCTION__, hidlMsg.msg.error.errorStreamId); + return; + } + } + + if (static_cast(hidlMsg.type) == CAMERA3_MSG_ERROR) { + switch (hidlMsg.msg.error.errorCode) { + case V3_2::ErrorCode::ERROR_DEVICE: + case V3_2::ErrorCode::ERROR_REQUEST: + case V3_2::ErrorCode::ERROR_RESULT: { + Mutex::Autolock _l(d->mInflightLock); + auto entry = d->mInflightAETriggerOverrides.find( + hidlMsg.msg.error.frameNumber); + if (d->mInflightAETriggerOverrides.end() != entry) { + d->mInflightAETriggerOverrides.erase( + hidlMsg.msg.error.frameNumber); + } + + auto boostEntry = d->mInflightRawBoostPresent.find( + hidlMsg.msg.error.frameNumber); + if (d->mInflightRawBoostPresent.end() != boostEntry) { + d->mInflightRawBoostPresent.erase( + hidlMsg.msg.error.frameNumber); + } + + } + break; + case V3_2::ErrorCode::ERROR_BUFFER: + default: + break; + } + + } + + d->mResultBatcher_3_4.notify(hidlMsg); +} + +CameraDeviceSession::ResultBatcher_3_4::ResultBatcher_3_4( + const sp& callback) : + V3_3::implementation::CameraDeviceSession::ResultBatcher(callback) { + auto castResult = ICameraDeviceCallback::castFrom(callback); + if (castResult.isOk()) { + mCallback_3_4 = castResult; + } +} + +void CameraDeviceSession::ResultBatcher_3_4::processCaptureResult_3_4(CaptureResult& result) { + auto pair = getBatch(result.v3_2.frameNumber); + int batchIdx = pair.first; + if (batchIdx == NOT_BATCHED) { + processOneCaptureResult_3_4(result); + return; + } + std::shared_ptr batch = pair.second; + { + Mutex::Autolock _l(batch->mLock); + // Check if the batch is removed (mostly by notify error) before lock was acquired + if (batch->mRemoved) { + // Fall back to non-batch path + processOneCaptureResult_3_4(result); + return; + } + + // queue metadata + if (result.v3_2.result.size() != 0) { + // Save a copy of metadata + batch->mResultMds[result.v3_2.partialResult].mMds.push_back( + std::make_pair(result.v3_2.frameNumber, result.v3_2.result)); + } + + // queue buffer + std::vector filledStreams; + std::vector nonBatchedBuffers; + for (auto& buffer : result.v3_2.outputBuffers) { + auto it = batch->mBatchBufs.find(buffer.streamId); + if (it != batch->mBatchBufs.end()) { + InflightBatch::BufferBatch& bb = it->second; + pushStreamBuffer(std::move(buffer), bb.mBuffers); + filledStreams.push_back(buffer.streamId); + } else { + pushStreamBuffer(std::move(buffer), nonBatchedBuffers); + } + } + + // send non-batched buffers up + if (nonBatchedBuffers.size() > 0 || result.v3_2.inputBuffer.streamId != -1) { + CaptureResult nonBatchedResult; + nonBatchedResult.v3_2.frameNumber = result.v3_2.frameNumber; + nonBatchedResult.v3_2.fmqResultSize = 0; + nonBatchedResult.v3_2.outputBuffers.resize(nonBatchedBuffers.size()); + for (size_t i = 0; i < nonBatchedBuffers.size(); i++) { + moveStreamBuffer( + std::move(nonBatchedBuffers[i]), nonBatchedResult.v3_2.outputBuffers[i]); + } + moveStreamBuffer(std::move(result.v3_2.inputBuffer), nonBatchedResult.v3_2.inputBuffer); + nonBatchedResult.v3_2.partialResult = 0; // 0 for buffer only results + processOneCaptureResult_3_4(nonBatchedResult); + } + + if (result.v3_2.frameNumber == batch->mLastFrame) { + // Send data up + if (result.v3_2.partialResult > 0) { + sendBatchMetadataLocked(batch, result.v3_2.partialResult); + } + // send buffer up + if (filledStreams.size() > 0) { + sendBatchBuffersLocked(batch, filledStreams); + } + } + } // end of batch lock scope + + // see if the batch is complete + if (result.v3_2.frameNumber == batch->mLastFrame) { + checkAndRemoveFirstBatch(); + } +} + +void CameraDeviceSession::ResultBatcher_3_4::processOneCaptureResult_3_4(CaptureResult& result) { + hidl_vec results; + results.resize(1); + results[0] = std::move(result); + invokeProcessCaptureResultCallback_3_4(results, /* tryWriteFmq */true); + freeReleaseFences_3_4(results); + return; +} + +void CameraDeviceSession::ResultBatcher_3_4::invokeProcessCaptureResultCallback_3_4( + hidl_vec &results, bool tryWriteFmq) { + if (mProcessCaptureResultLock.tryLock() != OK) { + ALOGV("%s: previous call is not finished! waiting 1s...", __FUNCTION__); + if (mProcessCaptureResultLock.timedLock(1000000000 /* 1s */) != OK) { + ALOGE("%s: cannot acquire lock in 1s, cannot proceed", + __FUNCTION__); + return; + } + } + if (tryWriteFmq && mResultMetadataQueue->availableToWrite() > 0) { + for (CaptureResult &result : results) { + if (result.v3_2.result.size() > 0) { + if (mResultMetadataQueue->write(result.v3_2.result.data(), + result.v3_2.result.size())) { + result.v3_2.fmqResultSize = result.v3_2.result.size(); + result.v3_2.result.resize(0); + } else { + ALOGW("%s: couldn't utilize fmq, fall back to hwbinder", __FUNCTION__); + result.v3_2.fmqResultSize = 0; + } + } + + for (auto& onePhysMetadata : result.physicalCameraMetadata) { + if (mResultMetadataQueue->write(onePhysMetadata.metadata.data(), + onePhysMetadata.metadata.size())) { + onePhysMetadata.fmqMetadataSize = onePhysMetadata.metadata.size(); + onePhysMetadata.metadata.resize(0); + } else { + ALOGW("%s: couldn't utilize fmq, fall back to hwbinder", __FUNCTION__); + onePhysMetadata.fmqMetadataSize = 0; + } + } + } + } + mCallback_3_4->processCaptureResult_3_4(results); + mProcessCaptureResultLock.unlock(); +} + +void CameraDeviceSession::ResultBatcher_3_4::freeReleaseFences_3_4(hidl_vec& results) { + for (auto& result : results) { + if (result.v3_2.inputBuffer.releaseFence.getNativeHandle() != nullptr) { + native_handle_t* handle = const_cast( + result.v3_2.inputBuffer.releaseFence.getNativeHandle()); + native_handle_close(handle); + native_handle_delete(handle); + } + for (auto& buf : result.v3_2.outputBuffers) { + if (buf.releaseFence.getNativeHandle() != nullptr) { + native_handle_t* handle = const_cast( + buf.releaseFence.getNativeHandle()); + native_handle_close(handle); + native_handle_delete(handle); + } + } + } + return; +} + +} // namespace implementation +} // namespace V3_4 +} // namespace device +} // namespace camera +} // namespace hardware +} // namespace android diff --git a/camera/device/3.4/default/ExternalCameraDevice.cpp b/camera/device/3.4/default/ExternalCameraDevice.cpp new file mode 100644 index 0000000000000000000000000000000000000000..ee7ffaa8d66a8ef33ad4c7aa062f10c3f6421d08 --- /dev/null +++ b/camera/device/3.4/default/ExternalCameraDevice.cpp @@ -0,0 +1,928 @@ +/* + * Copyright (C) 2018 The Android Open Source Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#define LOG_TAG "ExtCamDev@3.4" +//#define LOG_NDEBUG 0 +#include + +#include +#include +#include +#include "android-base/macros.h" +#include "CameraMetadata.h" +#include "../../3.2/default/include/convert.h" +#include "ExternalCameraDevice_3_4.h" + +namespace android { +namespace hardware { +namespace camera { +namespace device { +namespace V3_4 { +namespace implementation { + +namespace { +// Only support MJPEG for now as it seems to be the one supports higher fps +// Other formats to consider in the future: +// * V4L2_PIX_FMT_YVU420 (== YV12) +// * V4L2_PIX_FMT_YVYU (YVYU: can be converted to YV12 or other YUV420_888 formats) +const std::array kSupportedFourCCs {{ + V4L2_PIX_FMT_MJPEG +}}; // double braces required in C++11 + +constexpr int MAX_RETRY = 5; // Allow retry v4l2 open failures a few times. +constexpr int OPEN_RETRY_SLEEP_US = 100000; // 100ms * MAX_RETRY = 0.5 seconds + +} // anonymous namespace + +ExternalCameraDevice::ExternalCameraDevice( + const std::string& cameraId, const ExternalCameraConfig& cfg) : + mCameraId(cameraId), + mCfg(cfg) { + + status_t ret = initCameraCharacteristics(); + if (ret != OK) { + ALOGE("%s: init camera characteristics failed: errorno %d", __FUNCTION__, ret); + mInitFailed = true; + } +} + +ExternalCameraDevice::~ExternalCameraDevice() {} + +bool ExternalCameraDevice::isInitFailed() { + return mInitFailed; +} + +Return ExternalCameraDevice::getResourceCost(getResourceCost_cb _hidl_cb) { + CameraResourceCost resCost; + resCost.resourceCost = 100; + _hidl_cb(Status::OK, resCost); + return Void(); +} + +Return ExternalCameraDevice::getCameraCharacteristics( + getCameraCharacteristics_cb _hidl_cb) { + Mutex::Autolock _l(mLock); + V3_2::CameraMetadata hidlChars; + + if (isInitFailed()) { + _hidl_cb(Status::INTERNAL_ERROR, hidlChars); + return Void(); + } + + const camera_metadata_t* rawMetadata = mCameraCharacteristics.getAndLock(); + V3_2::implementation::convertToHidl(rawMetadata, &hidlChars); + _hidl_cb(Status::OK, hidlChars); + mCameraCharacteristics.unlock(rawMetadata); + return Void(); +} + +Return ExternalCameraDevice::setTorchMode(TorchMode) { + return Status::METHOD_NOT_SUPPORTED; +} + +Return ExternalCameraDevice::open( + const sp& callback, open_cb _hidl_cb) { + Status status = Status::OK; + sp session = nullptr; + + if (callback == nullptr) { + ALOGE("%s: cannot open camera %s. callback is null!", + __FUNCTION__, mCameraId.c_str()); + _hidl_cb(Status::ILLEGAL_ARGUMENT, nullptr); + return Void(); + } + + if (isInitFailed()) { + ALOGE("%s: cannot open camera %s. camera init failed!", + __FUNCTION__, mCameraId.c_str()); + _hidl_cb(Status::INTERNAL_ERROR, nullptr); + return Void(); + } + + mLock.lock(); + + ALOGV("%s: Initializing device for camera %s", __FUNCTION__, mCameraId.c_str()); + session = mSession.promote(); + if (session != nullptr && !session->isClosed()) { + ALOGE("%s: cannot open an already opened camera!", __FUNCTION__); + mLock.unlock(); + _hidl_cb(Status::CAMERA_IN_USE, nullptr); + return Void(); + } + + unique_fd fd(::open(mCameraId.c_str(), O_RDWR)); + if (fd.get() < 0) { + int numAttempt = 0; + do { + ALOGW("%s: v4l2 device %s open failed, wait 33ms and try again", + __FUNCTION__, mCameraId.c_str()); + usleep(OPEN_RETRY_SLEEP_US); // sleep and try again + fd.reset(::open(mCameraId.c_str(), O_RDWR)); + numAttempt++; + } while (fd.get() < 0 && numAttempt <= MAX_RETRY); + + if (fd.get() < 0) { + ALOGE("%s: v4l2 device open %s failed: %s", + __FUNCTION__, mCameraId.c_str(), strerror(errno)); + mLock.unlock(); + _hidl_cb(Status::INTERNAL_ERROR, nullptr); + return Void(); + } + } + + session = new ExternalCameraDeviceSession( + callback, mCfg, mSupportedFormats, mCroppingType, + mCameraCharacteristics, mCameraId, std::move(fd)); + if (session == nullptr) { + ALOGE("%s: camera device session allocation failed", __FUNCTION__); + mLock.unlock(); + _hidl_cb(Status::INTERNAL_ERROR, nullptr); + return Void(); + } + if (session->isInitFailed()) { + ALOGE("%s: camera device session init failed", __FUNCTION__); + session = nullptr; + mLock.unlock(); + _hidl_cb(Status::INTERNAL_ERROR, nullptr); + return Void(); + } + mSession = session; + + mLock.unlock(); + + _hidl_cb(status, session->getInterface()); + return Void(); +} + +Return ExternalCameraDevice::dumpState(const ::android::hardware::hidl_handle& handle) { + Mutex::Autolock _l(mLock); + if (handle.getNativeHandle() == nullptr) { + ALOGE("%s: handle must not be null", __FUNCTION__); + return Void(); + } + if (handle->numFds != 1 || handle->numInts != 0) { + ALOGE("%s: handle must contain 1 FD and 0 integers! Got %d FDs and %d ints", + __FUNCTION__, handle->numFds, handle->numInts); + return Void(); + } + int fd = handle->data[0]; + if (mSession == nullptr) { + dprintf(fd, "No active camera device session instance\n"); + return Void(); + } + auto session = mSession.promote(); + if (session == nullptr) { + dprintf(fd, "No active camera device session instance\n"); + return Void(); + } + // Call into active session to dump states + session->dumpState(handle); + return Void(); +} + + +status_t ExternalCameraDevice::initCameraCharacteristics() { + if (mCameraCharacteristics.isEmpty()) { + // init camera characteristics + unique_fd fd(::open(mCameraId.c_str(), O_RDWR)); + if (fd.get() < 0) { + ALOGE("%s: v4l2 device open %s failed", __FUNCTION__, mCameraId.c_str()); + return DEAD_OBJECT; + } + + status_t ret; + ret = initDefaultCharsKeys(&mCameraCharacteristics); + if (ret != OK) { + ALOGE("%s: init default characteristics key failed: errorno %d", __FUNCTION__, ret); + mCameraCharacteristics.clear(); + return ret; + } + + ret = initCameraControlsCharsKeys(fd.get(), &mCameraCharacteristics); + if (ret != OK) { + ALOGE("%s: init camera control characteristics key failed: errorno %d", __FUNCTION__, ret); + mCameraCharacteristics.clear(); + return ret; + } + + ret = initOutputCharsKeys(fd.get(), &mCameraCharacteristics); + if (ret != OK) { + ALOGE("%s: init output characteristics key failed: errorno %d", __FUNCTION__, ret); + mCameraCharacteristics.clear(); + return ret; + } + } + return OK; +} + +#define ARRAY_SIZE(a) (sizeof(a) / sizeof(a[0])) +#define UPDATE(tag, data, size) \ +do { \ + if (metadata->update((tag), (data), (size))) { \ + ALOGE("Update " #tag " failed!"); \ + return -EINVAL; \ + } \ +} while (0) + +status_t ExternalCameraDevice::initDefaultCharsKeys( + ::android::hardware::camera::common::V1_0::helper::CameraMetadata* metadata) { + const uint8_t hardware_level = ANDROID_INFO_SUPPORTED_HARDWARE_LEVEL_EXTERNAL; + UPDATE(ANDROID_INFO_SUPPORTED_HARDWARE_LEVEL, &hardware_level, 1); + + // android.colorCorrection + const uint8_t availableAberrationModes[] = { + ANDROID_COLOR_CORRECTION_ABERRATION_MODE_OFF}; + UPDATE(ANDROID_COLOR_CORRECTION_AVAILABLE_ABERRATION_MODES, + availableAberrationModes, ARRAY_SIZE(availableAberrationModes)); + + // android.control + const uint8_t antibandingMode = + ANDROID_CONTROL_AE_ANTIBANDING_MODE_AUTO; + UPDATE(ANDROID_CONTROL_AE_AVAILABLE_ANTIBANDING_MODES, + &antibandingMode, 1); + + const int32_t controlMaxRegions[] = {/*AE*/ 0, /*AWB*/ 0, /*AF*/ 0}; + UPDATE(ANDROID_CONTROL_MAX_REGIONS, controlMaxRegions, + ARRAY_SIZE(controlMaxRegions)); + + const uint8_t videoStabilizationMode = + ANDROID_CONTROL_VIDEO_STABILIZATION_MODE_OFF; + UPDATE(ANDROID_CONTROL_AVAILABLE_VIDEO_STABILIZATION_MODES, + &videoStabilizationMode, 1); + + const uint8_t awbAvailableMode = ANDROID_CONTROL_AWB_MODE_AUTO; + UPDATE(ANDROID_CONTROL_AWB_AVAILABLE_MODES, &awbAvailableMode, 1); + + const uint8_t aeAvailableMode = ANDROID_CONTROL_AE_MODE_ON; + UPDATE(ANDROID_CONTROL_AE_AVAILABLE_MODES, &aeAvailableMode, 1); + + const uint8_t availableFffect = ANDROID_CONTROL_EFFECT_MODE_OFF; + UPDATE(ANDROID_CONTROL_AVAILABLE_EFFECTS, &availableFffect, 1); + + const uint8_t controlAvailableModes[] = {ANDROID_CONTROL_MODE_OFF, + ANDROID_CONTROL_MODE_AUTO}; + UPDATE(ANDROID_CONTROL_AVAILABLE_MODES, controlAvailableModes, + ARRAY_SIZE(controlAvailableModes)); + + // android.edge + const uint8_t edgeMode = ANDROID_EDGE_MODE_OFF; + UPDATE(ANDROID_EDGE_AVAILABLE_EDGE_MODES, &edgeMode, 1); + + // android.flash + const uint8_t flashInfo = ANDROID_FLASH_INFO_AVAILABLE_FALSE; + UPDATE(ANDROID_FLASH_INFO_AVAILABLE, &flashInfo, 1); + + // android.hotPixel + const uint8_t hotPixelMode = ANDROID_HOT_PIXEL_MODE_OFF; + UPDATE(ANDROID_HOT_PIXEL_AVAILABLE_HOT_PIXEL_MODES, &hotPixelMode, 1); + + // android.jpeg + // TODO: b/72261675 See if we can provide thumbnail size for all jpeg aspect ratios + const int32_t jpegAvailableThumbnailSizes[] = {0, 0, 240, 180}; + UPDATE(ANDROID_JPEG_AVAILABLE_THUMBNAIL_SIZES, jpegAvailableThumbnailSizes, + ARRAY_SIZE(jpegAvailableThumbnailSizes)); + + const int32_t jpegMaxSize = mCfg.maxJpegBufSize; + UPDATE(ANDROID_JPEG_MAX_SIZE, &jpegMaxSize, 1); + + // android.lens + const uint8_t focusDistanceCalibration = + ANDROID_LENS_INFO_FOCUS_DISTANCE_CALIBRATION_UNCALIBRATED; + UPDATE(ANDROID_LENS_INFO_FOCUS_DISTANCE_CALIBRATION, &focusDistanceCalibration, 1); + + const uint8_t opticalStabilizationMode = + ANDROID_LENS_OPTICAL_STABILIZATION_MODE_OFF; + UPDATE(ANDROID_LENS_INFO_AVAILABLE_OPTICAL_STABILIZATION, + &opticalStabilizationMode, 1); + + const uint8_t facing = ANDROID_LENS_FACING_EXTERNAL; + UPDATE(ANDROID_LENS_FACING, &facing, 1); + + // android.noiseReduction + const uint8_t noiseReductionMode = ANDROID_NOISE_REDUCTION_MODE_OFF; + UPDATE(ANDROID_NOISE_REDUCTION_AVAILABLE_NOISE_REDUCTION_MODES, + &noiseReductionMode, 1); + UPDATE(ANDROID_NOISE_REDUCTION_MODE, &noiseReductionMode, 1); + + // android.request + const uint8_t availableCapabilities[] = { + ANDROID_REQUEST_AVAILABLE_CAPABILITIES_BACKWARD_COMPATIBLE}; + UPDATE(ANDROID_REQUEST_AVAILABLE_CAPABILITIES, availableCapabilities, + ARRAY_SIZE(availableCapabilities)); + + const int32_t partialResultCount = 1; + UPDATE(ANDROID_REQUEST_PARTIAL_RESULT_COUNT, &partialResultCount, 1); + + // This means pipeline latency of X frame intervals. The maximum number is 4. + const uint8_t requestPipelineMaxDepth = 4; + UPDATE(ANDROID_REQUEST_PIPELINE_MAX_DEPTH, &requestPipelineMaxDepth, 1); + + // Three numbers represent the maximum numbers of different types of output + // streams simultaneously. The types are raw sensor, processed (but not + // stalling), and processed (but stalling). For usb limited mode, raw sensor + // is not supported. Stalling stream is JPEG. Non-stalling streams are + // YUV_420_888 or YV12. + const int32_t requestMaxNumOutputStreams[] = { + /*RAW*/0, + /*Processed*/ExternalCameraDeviceSession::kMaxProcessedStream, + /*Stall*/ExternalCameraDeviceSession::kMaxStallStream}; + UPDATE(ANDROID_REQUEST_MAX_NUM_OUTPUT_STREAMS, requestMaxNumOutputStreams, + ARRAY_SIZE(requestMaxNumOutputStreams)); + + // Limited mode doesn't support reprocessing. + const int32_t requestMaxNumInputStreams = 0; + UPDATE(ANDROID_REQUEST_MAX_NUM_INPUT_STREAMS, &requestMaxNumInputStreams, + 1); + + // android.scaler + // TODO: b/72263447 V4L2_CID_ZOOM_* + const float scalerAvailableMaxDigitalZoom[] = {1}; + UPDATE(ANDROID_SCALER_AVAILABLE_MAX_DIGITAL_ZOOM, + scalerAvailableMaxDigitalZoom, + ARRAY_SIZE(scalerAvailableMaxDigitalZoom)); + + const uint8_t croppingType = ANDROID_SCALER_CROPPING_TYPE_CENTER_ONLY; + UPDATE(ANDROID_SCALER_CROPPING_TYPE, &croppingType, 1); + + const int32_t testPatternModes[] = { + ANDROID_SENSOR_TEST_PATTERN_MODE_OFF}; + UPDATE(ANDROID_SENSOR_AVAILABLE_TEST_PATTERN_MODES, testPatternModes, + ARRAY_SIZE(testPatternModes)); + + const uint8_t timestampSource = ANDROID_SENSOR_INFO_TIMESTAMP_SOURCE_UNKNOWN; + UPDATE(ANDROID_SENSOR_INFO_TIMESTAMP_SOURCE, ×tampSource, 1); + + // Orientation probably isn't useful for external facing camera? + const int32_t orientation = 0; + UPDATE(ANDROID_SENSOR_ORIENTATION, &orientation, 1); + + // android.shading + const uint8_t availabeMode = ANDROID_SHADING_MODE_OFF; + UPDATE(ANDROID_SHADING_AVAILABLE_MODES, &availabeMode, 1); + + // android.statistics + const uint8_t faceDetectMode = ANDROID_STATISTICS_FACE_DETECT_MODE_OFF; + UPDATE(ANDROID_STATISTICS_INFO_AVAILABLE_FACE_DETECT_MODES, &faceDetectMode, + 1); + + const int32_t maxFaceCount = 0; + UPDATE(ANDROID_STATISTICS_INFO_MAX_FACE_COUNT, &maxFaceCount, 1); + + const uint8_t availableHotpixelMode = + ANDROID_STATISTICS_HOT_PIXEL_MAP_MODE_OFF; + UPDATE(ANDROID_STATISTICS_INFO_AVAILABLE_HOT_PIXEL_MAP_MODES, + &availableHotpixelMode, 1); + + const uint8_t lensShadingMapMode = + ANDROID_STATISTICS_LENS_SHADING_MAP_MODE_OFF; + UPDATE(ANDROID_STATISTICS_INFO_AVAILABLE_LENS_SHADING_MAP_MODES, + &lensShadingMapMode, 1); + + // android.sync + const int32_t maxLatency = ANDROID_SYNC_MAX_LATENCY_UNKNOWN; + UPDATE(ANDROID_SYNC_MAX_LATENCY, &maxLatency, 1); + + /* Other sensor/RAW realted keys: + * android.sensor.info.colorFilterArrangement -> no need if we don't do RAW + * android.sensor.info.physicalSize -> not available + * android.sensor.info.whiteLevel -> not available/not needed + * android.sensor.info.lensShadingApplied -> not needed + * android.sensor.info.preCorrectionActiveArraySize -> not available/not needed + * android.sensor.blackLevelPattern -> not available/not needed + */ + + const int32_t availableRequestKeys[] = { + ANDROID_COLOR_CORRECTION_ABERRATION_MODE, + ANDROID_CONTROL_AE_ANTIBANDING_MODE, + ANDROID_CONTROL_AE_EXPOSURE_COMPENSATION, + ANDROID_CONTROL_AE_LOCK, + ANDROID_CONTROL_AE_MODE, + ANDROID_CONTROL_AE_PRECAPTURE_TRIGGER, + ANDROID_CONTROL_AE_TARGET_FPS_RANGE, + ANDROID_CONTROL_AF_MODE, + ANDROID_CONTROL_AF_TRIGGER, + ANDROID_CONTROL_AWB_LOCK, + ANDROID_CONTROL_AWB_MODE, + ANDROID_CONTROL_CAPTURE_INTENT, + ANDROID_CONTROL_EFFECT_MODE, + ANDROID_CONTROL_MODE, + ANDROID_CONTROL_SCENE_MODE, + ANDROID_CONTROL_VIDEO_STABILIZATION_MODE, + ANDROID_FLASH_MODE, + ANDROID_JPEG_ORIENTATION, + ANDROID_JPEG_QUALITY, + ANDROID_JPEG_THUMBNAIL_QUALITY, + ANDROID_JPEG_THUMBNAIL_SIZE, + ANDROID_LENS_OPTICAL_STABILIZATION_MODE, + ANDROID_NOISE_REDUCTION_MODE, + ANDROID_SCALER_CROP_REGION, + ANDROID_SENSOR_TEST_PATTERN_MODE, + ANDROID_STATISTICS_FACE_DETECT_MODE, + ANDROID_STATISTICS_HOT_PIXEL_MAP_MODE}; + UPDATE(ANDROID_REQUEST_AVAILABLE_REQUEST_KEYS, availableRequestKeys, + ARRAY_SIZE(availableRequestKeys)); + + const int32_t availableResultKeys[] = { + ANDROID_COLOR_CORRECTION_ABERRATION_MODE, + ANDROID_CONTROL_AE_ANTIBANDING_MODE, + ANDROID_CONTROL_AE_EXPOSURE_COMPENSATION, + ANDROID_CONTROL_AE_LOCK, + ANDROID_CONTROL_AE_MODE, + ANDROID_CONTROL_AE_PRECAPTURE_TRIGGER, + ANDROID_CONTROL_AE_STATE, + ANDROID_CONTROL_AE_TARGET_FPS_RANGE, + ANDROID_CONTROL_AF_MODE, + ANDROID_CONTROL_AF_STATE, + ANDROID_CONTROL_AF_TRIGGER, + ANDROID_CONTROL_AWB_LOCK, + ANDROID_CONTROL_AWB_MODE, + ANDROID_CONTROL_AWB_STATE, + ANDROID_CONTROL_CAPTURE_INTENT, + ANDROID_CONTROL_EFFECT_MODE, + ANDROID_CONTROL_MODE, + ANDROID_CONTROL_SCENE_MODE, + ANDROID_CONTROL_VIDEO_STABILIZATION_MODE, + ANDROID_FLASH_MODE, + ANDROID_FLASH_STATE, + ANDROID_JPEG_ORIENTATION, + ANDROID_JPEG_QUALITY, + ANDROID_JPEG_THUMBNAIL_QUALITY, + ANDROID_JPEG_THUMBNAIL_SIZE, + ANDROID_LENS_OPTICAL_STABILIZATION_MODE, + ANDROID_NOISE_REDUCTION_MODE, + ANDROID_REQUEST_PIPELINE_DEPTH, + ANDROID_SCALER_CROP_REGION, + ANDROID_SENSOR_TIMESTAMP, + ANDROID_STATISTICS_FACE_DETECT_MODE, + ANDROID_STATISTICS_HOT_PIXEL_MAP_MODE, + ANDROID_STATISTICS_LENS_SHADING_MAP_MODE, + ANDROID_STATISTICS_SCENE_FLICKER}; + UPDATE(ANDROID_REQUEST_AVAILABLE_RESULT_KEYS, availableResultKeys, + ARRAY_SIZE(availableResultKeys)); + + const int32_t availableCharacteristicsKeys[] = { + ANDROID_COLOR_CORRECTION_AVAILABLE_ABERRATION_MODES, + ANDROID_CONTROL_AE_AVAILABLE_ANTIBANDING_MODES, + ANDROID_CONTROL_AE_AVAILABLE_MODES, + ANDROID_CONTROL_AE_AVAILABLE_TARGET_FPS_RANGES, + ANDROID_CONTROL_AE_COMPENSATION_RANGE, + ANDROID_CONTROL_AE_COMPENSATION_STEP, + ANDROID_CONTROL_AE_LOCK_AVAILABLE, + ANDROID_CONTROL_AF_AVAILABLE_MODES, + ANDROID_CONTROL_AVAILABLE_EFFECTS, + ANDROID_CONTROL_AVAILABLE_MODES, + ANDROID_CONTROL_AVAILABLE_SCENE_MODES, + ANDROID_CONTROL_AVAILABLE_VIDEO_STABILIZATION_MODES, + ANDROID_CONTROL_AWB_AVAILABLE_MODES, + ANDROID_CONTROL_AWB_LOCK_AVAILABLE, + ANDROID_CONTROL_MAX_REGIONS, + ANDROID_FLASH_INFO_AVAILABLE, + ANDROID_INFO_SUPPORTED_HARDWARE_LEVEL, + ANDROID_JPEG_AVAILABLE_THUMBNAIL_SIZES, + ANDROID_LENS_FACING, + ANDROID_LENS_INFO_AVAILABLE_OPTICAL_STABILIZATION, + ANDROID_LENS_INFO_FOCUS_DISTANCE_CALIBRATION, + ANDROID_NOISE_REDUCTION_AVAILABLE_NOISE_REDUCTION_MODES, + ANDROID_REQUEST_AVAILABLE_CAPABILITIES, + ANDROID_REQUEST_MAX_NUM_INPUT_STREAMS, + ANDROID_REQUEST_MAX_NUM_OUTPUT_STREAMS, + ANDROID_REQUEST_PARTIAL_RESULT_COUNT, + ANDROID_REQUEST_PIPELINE_MAX_DEPTH, + ANDROID_SCALER_AVAILABLE_MAX_DIGITAL_ZOOM, + ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS, + ANDROID_SCALER_CROPPING_TYPE, + ANDROID_SENSOR_INFO_ACTIVE_ARRAY_SIZE, + ANDROID_SENSOR_INFO_MAX_FRAME_DURATION, + ANDROID_SENSOR_INFO_PIXEL_ARRAY_SIZE, + ANDROID_SENSOR_INFO_PRE_CORRECTION_ACTIVE_ARRAY_SIZE, + ANDROID_SENSOR_INFO_TIMESTAMP_SOURCE, + ANDROID_SENSOR_ORIENTATION, + ANDROID_SHADING_AVAILABLE_MODES, + ANDROID_STATISTICS_INFO_AVAILABLE_FACE_DETECT_MODES, + ANDROID_STATISTICS_INFO_AVAILABLE_HOT_PIXEL_MAP_MODES, + ANDROID_STATISTICS_INFO_AVAILABLE_LENS_SHADING_MAP_MODES, + ANDROID_STATISTICS_INFO_MAX_FACE_COUNT, + ANDROID_SYNC_MAX_LATENCY}; + UPDATE(ANDROID_REQUEST_AVAILABLE_CHARACTERISTICS_KEYS, + availableCharacteristicsKeys, + ARRAY_SIZE(availableCharacteristicsKeys)); + + return OK; +} + +status_t ExternalCameraDevice::initCameraControlsCharsKeys(int, + ::android::hardware::camera::common::V1_0::helper::CameraMetadata* metadata) { + /** + * android.sensor.info.sensitivityRange -> V4L2_CID_ISO_SENSITIVITY + * android.sensor.info.exposureTimeRange -> V4L2_CID_EXPOSURE_ABSOLUTE + * android.sensor.info.maxFrameDuration -> TBD + * android.lens.info.minimumFocusDistance -> V4L2_CID_FOCUS_ABSOLUTE + * android.lens.info.hyperfocalDistance + * android.lens.info.availableFocalLengths -> not available? + */ + + // android.control + // No AE compensation support for now. + // TODO: V4L2_CID_EXPOSURE_BIAS + const int32_t controlAeCompensationRange[] = {0, 0}; + UPDATE(ANDROID_CONTROL_AE_COMPENSATION_RANGE, controlAeCompensationRange, + ARRAY_SIZE(controlAeCompensationRange)); + const camera_metadata_rational_t controlAeCompensationStep[] = {{0, 1}}; + UPDATE(ANDROID_CONTROL_AE_COMPENSATION_STEP, controlAeCompensationStep, + ARRAY_SIZE(controlAeCompensationStep)); + + + // TODO: Check V4L2_CID_AUTO_FOCUS_*. + const uint8_t afAvailableModes[] = {ANDROID_CONTROL_AF_MODE_AUTO, + ANDROID_CONTROL_AF_MODE_OFF}; + UPDATE(ANDROID_CONTROL_AF_AVAILABLE_MODES, afAvailableModes, + ARRAY_SIZE(afAvailableModes)); + + // TODO: V4L2_CID_SCENE_MODE + const uint8_t availableSceneMode = ANDROID_CONTROL_SCENE_MODE_DISABLED; + UPDATE(ANDROID_CONTROL_AVAILABLE_SCENE_MODES, &availableSceneMode, 1); + + // TODO: V4L2_CID_3A_LOCK + const uint8_t aeLockAvailable = ANDROID_CONTROL_AE_LOCK_AVAILABLE_FALSE; + UPDATE(ANDROID_CONTROL_AE_LOCK_AVAILABLE, &aeLockAvailable, 1); + const uint8_t awbLockAvailable = ANDROID_CONTROL_AWB_LOCK_AVAILABLE_FALSE; + UPDATE(ANDROID_CONTROL_AWB_LOCK_AVAILABLE, &awbLockAvailable, 1); + + // TODO: V4L2_CID_ZOOM_* + const float scalerAvailableMaxDigitalZoom[] = {1}; + UPDATE(ANDROID_SCALER_AVAILABLE_MAX_DIGITAL_ZOOM, + scalerAvailableMaxDigitalZoom, + ARRAY_SIZE(scalerAvailableMaxDigitalZoom)); + + return OK; +} + +status_t ExternalCameraDevice::initOutputCharsKeys(int fd, + ::android::hardware::camera::common::V1_0::helper::CameraMetadata* metadata) { + initSupportedFormatsLocked(fd); + if (mSupportedFormats.empty()) { + ALOGE("%s: Init supported format list failed", __FUNCTION__); + return UNKNOWN_ERROR; + } + + std::vector streamConfigurations; + std::vector minFrameDurations; + std::vector stallDurations; + int32_t maxFps = std::numeric_limits::min(); + int32_t minFps = std::numeric_limits::max(); + std::set framerates; + + std::array halFormats{{ + HAL_PIXEL_FORMAT_BLOB, + HAL_PIXEL_FORMAT_YCbCr_420_888, + HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED}}; + + for (const auto& supportedFormat : mSupportedFormats) { + for (const auto& format : halFormats) { + streamConfigurations.push_back(format); + streamConfigurations.push_back(supportedFormat.width); + streamConfigurations.push_back(supportedFormat.height); + streamConfigurations.push_back( + ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS_OUTPUT); + } + + int64_t minFrameDuration = std::numeric_limits::max(); + for (const auto& fr : supportedFormat.frameRates) { + // 1000000000LL < (2^32 - 1) and + // fr.durationNumerator is uint32_t, so no overflow here + int64_t frameDuration = 1000000000LL * fr.durationNumerator / + fr.durationDenominator; + if (frameDuration < minFrameDuration) { + minFrameDuration = frameDuration; + } + int32_t frameRateInt = static_cast(fr.getDouble()); + if (minFps > frameRateInt) { + minFps = frameRateInt; + } + if (maxFps < frameRateInt) { + maxFps = frameRateInt; + } + framerates.insert(frameRateInt); + } + + for (const auto& format : halFormats) { + minFrameDurations.push_back(format); + minFrameDurations.push_back(supportedFormat.width); + minFrameDurations.push_back(supportedFormat.height); + minFrameDurations.push_back(minFrameDuration); + } + + // The stall duration is 0 for non-jpeg formats. For JPEG format, stall + // duration can be 0 if JPEG is small. Here we choose 1 sec for JPEG. + // TODO: b/72261675. Maybe set this dynamically + for (const auto& format : halFormats) { + const int64_t NS_TO_SECOND = 1000000000; + int64_t stall_duration = + (format == HAL_PIXEL_FORMAT_BLOB) ? NS_TO_SECOND : 0; + stallDurations.push_back(format); + stallDurations.push_back(supportedFormat.width); + stallDurations.push_back(supportedFormat.height); + stallDurations.push_back(stall_duration); + } + } + + std::vector fpsRanges; + // FPS ranges + for (const auto& framerate : framerates) { + // Empirical: webcams often have close to 2x fps error and cannot support fixed fps range + fpsRanges.push_back(framerate / 2); + fpsRanges.push_back(framerate); + } + minFps /= 2; + int64_t maxFrameDuration = 1000000000LL / minFps; + + UPDATE(ANDROID_CONTROL_AE_AVAILABLE_TARGET_FPS_RANGES, fpsRanges.data(), + fpsRanges.size()); + + UPDATE(ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS, + streamConfigurations.data(), streamConfigurations.size()); + + UPDATE(ANDROID_SCALER_AVAILABLE_MIN_FRAME_DURATIONS, + minFrameDurations.data(), minFrameDurations.size()); + + UPDATE(ANDROID_SCALER_AVAILABLE_STALL_DURATIONS, stallDurations.data(), + stallDurations.size()); + + UPDATE(ANDROID_SENSOR_INFO_MAX_FRAME_DURATION, &maxFrameDuration, 1); + + SupportedV4L2Format maximumFormat {.width = 0, .height = 0}; + for (const auto& supportedFormat : mSupportedFormats) { + if (supportedFormat.width >= maximumFormat.width && + supportedFormat.height >= maximumFormat.height) { + maximumFormat = supportedFormat; + } + } + int32_t activeArraySize[] = {0, 0, + static_cast(maximumFormat.width), + static_cast(maximumFormat.height)}; + UPDATE(ANDROID_SENSOR_INFO_PRE_CORRECTION_ACTIVE_ARRAY_SIZE, + activeArraySize, ARRAY_SIZE(activeArraySize)); + UPDATE(ANDROID_SENSOR_INFO_ACTIVE_ARRAY_SIZE, activeArraySize, + ARRAY_SIZE(activeArraySize)); + + int32_t pixelArraySize[] = {static_cast(maximumFormat.width), + static_cast(maximumFormat.height)}; + UPDATE(ANDROID_SENSOR_INFO_PIXEL_ARRAY_SIZE, pixelArraySize, + ARRAY_SIZE(pixelArraySize)); + return OK; +} + +#undef ARRAY_SIZE +#undef UPDATE + +void ExternalCameraDevice::getFrameRateList( + int fd, double fpsUpperBound, SupportedV4L2Format* format) { + format->frameRates.clear(); + + v4l2_frmivalenum frameInterval { + .pixel_format = format->fourcc, + .width = format->width, + .height = format->height, + .index = 0 + }; + + for (frameInterval.index = 0; + TEMP_FAILURE_RETRY(ioctl(fd, VIDIOC_ENUM_FRAMEINTERVALS, &frameInterval)) == 0; + ++frameInterval.index) { + if (frameInterval.type == V4L2_FRMIVAL_TYPE_DISCRETE) { + if (frameInterval.discrete.numerator != 0) { + SupportedV4L2Format::FrameRate fr = { + frameInterval.discrete.numerator, + frameInterval.discrete.denominator}; + double framerate = fr.getDouble(); + if (framerate > fpsUpperBound) { + continue; + } + ALOGV("index:%d, format:%c%c%c%c, w %d, h %d, framerate %f", + frameInterval.index, + frameInterval.pixel_format & 0xFF, + (frameInterval.pixel_format >> 8) & 0xFF, + (frameInterval.pixel_format >> 16) & 0xFF, + (frameInterval.pixel_format >> 24) & 0xFF, + frameInterval.width, frameInterval.height, framerate); + format->frameRates.push_back(fr); + } + } + } + + if (format->frameRates.empty()) { + ALOGE("%s: failed to get supported frame rates for format:%c%c%c%c w %d h %d", + __FUNCTION__, + frameInterval.pixel_format & 0xFF, + (frameInterval.pixel_format >> 8) & 0xFF, + (frameInterval.pixel_format >> 16) & 0xFF, + (frameInterval.pixel_format >> 24) & 0xFF, + frameInterval.width, frameInterval.height); + } +} + +void ExternalCameraDevice::trimSupportedFormats( + CroppingType cropType, + /*inout*/std::vector* pFmts) { + std::vector& sortedFmts = *pFmts; + if (cropType == VERTICAL) { + std::sort(sortedFmts.begin(), sortedFmts.end(), + [](const SupportedV4L2Format& a, const SupportedV4L2Format& b) -> bool { + if (a.width == b.width) { + return a.height < b.height; + } + return a.width < b.width; + }); + } else { + std::sort(sortedFmts.begin(), sortedFmts.end(), + [](const SupportedV4L2Format& a, const SupportedV4L2Format& b) -> bool { + if (a.height == b.height) { + return a.width < b.width; + } + return a.height < b.height; + }); + } + + if (sortedFmts.size() == 0) { + ALOGE("%s: input format list is empty!", __FUNCTION__); + return; + } + + const auto& maxSize = sortedFmts[sortedFmts.size() - 1]; + float maxSizeAr = ASPECT_RATIO(maxSize); + + // Remove formats that has aspect ratio not croppable from largest size + std::vector out; + for (const auto& fmt : sortedFmts) { + float ar = ASPECT_RATIO(fmt); + if (isAspectRatioClose(ar, maxSizeAr)) { + out.push_back(fmt); + } else if (cropType == HORIZONTAL && ar < maxSizeAr) { + out.push_back(fmt); + } else if (cropType == VERTICAL && ar > maxSizeAr) { + out.push_back(fmt); + } else { + ALOGV("%s: size (%d,%d) is removed due to unable to crop %s from (%d,%d)", + __FUNCTION__, fmt.width, fmt.height, + cropType == VERTICAL ? "vertically" : "horizontally", + maxSize.width, maxSize.height); + } + } + sortedFmts = out; +} + +std::vector +ExternalCameraDevice::getCandidateSupportedFormatsLocked( + int fd, CroppingType cropType, + const std::vector& fpsLimits) { + std::vector outFmts; + struct v4l2_fmtdesc fmtdesc { + .index = 0, + .type = V4L2_BUF_TYPE_VIDEO_CAPTURE}; + int ret = 0; + while (ret == 0) { + ret = TEMP_FAILURE_RETRY(ioctl(fd, VIDIOC_ENUM_FMT, &fmtdesc)); + ALOGV("index:%d,ret:%d, format:%c%c%c%c", fmtdesc.index, ret, + fmtdesc.pixelformat & 0xFF, + (fmtdesc.pixelformat >> 8) & 0xFF, + (fmtdesc.pixelformat >> 16) & 0xFF, + (fmtdesc.pixelformat >> 24) & 0xFF); + if (ret == 0 && !(fmtdesc.flags & V4L2_FMT_FLAG_EMULATED)) { + auto it = std::find ( + kSupportedFourCCs.begin(), kSupportedFourCCs.end(), fmtdesc.pixelformat); + if (it != kSupportedFourCCs.end()) { + // Found supported format + v4l2_frmsizeenum frameSize { + .index = 0, + .pixel_format = fmtdesc.pixelformat}; + for (; TEMP_FAILURE_RETRY(ioctl(fd, VIDIOC_ENUM_FRAMESIZES, &frameSize)) == 0; + ++frameSize.index) { + if (frameSize.type == V4L2_FRMSIZE_TYPE_DISCRETE) { + ALOGV("index:%d, format:%c%c%c%c, w %d, h %d", frameSize.index, + fmtdesc.pixelformat & 0xFF, + (fmtdesc.pixelformat >> 8) & 0xFF, + (fmtdesc.pixelformat >> 16) & 0xFF, + (fmtdesc.pixelformat >> 24) & 0xFF, + frameSize.discrete.width, frameSize.discrete.height); + // Disregard h > w formats so all aspect ratio (h/w) <= 1.0 + // This will simplify the crop/scaling logic down the road + if (frameSize.discrete.height > frameSize.discrete.width) { + continue; + } + SupportedV4L2Format format { + .width = frameSize.discrete.width, + .height = frameSize.discrete.height, + .fourcc = fmtdesc.pixelformat + }; + + double fpsUpperBound = -1.0; + for (const auto& limit : fpsLimits) { + if (cropType == VERTICAL) { + if (format.width <= limit.size.width) { + fpsUpperBound = limit.fpsUpperBound; + break; + } + } else { // HORIZONTAL + if (format.height <= limit.size.height) { + fpsUpperBound = limit.fpsUpperBound; + break; + } + } + + } + if (fpsUpperBound < 0.f) { + continue; + } + + getFrameRateList(fd, fpsUpperBound, &format); + if (!format.frameRates.empty()) { + outFmts.push_back(format); + } + } + } + } + } + fmtdesc.index++; + } + trimSupportedFormats(cropType, &outFmts); + return outFmts; +} + +void ExternalCameraDevice::initSupportedFormatsLocked(int fd) { + + std::vector horizontalFmts = + getCandidateSupportedFormatsLocked(fd, HORIZONTAL, mCfg.fpsLimits); + std::vector verticalFmts = + getCandidateSupportedFormatsLocked(fd, VERTICAL, mCfg.fpsLimits); + + size_t horiSize = horizontalFmts.size(); + size_t vertSize = verticalFmts.size(); + + if (horiSize == 0 && vertSize == 0) { + ALOGE("%s: cannot find suitable cropping type!", __FUNCTION__); + return; + } + + if (horiSize == 0) { + mSupportedFormats = verticalFmts; + mCroppingType = VERTICAL; + return; + } else if (vertSize == 0) { + mSupportedFormats = horizontalFmts; + mCroppingType = HORIZONTAL; + return; + } + + const auto& maxHoriSize = horizontalFmts[horizontalFmts.size() - 1]; + const auto& maxVertSize = verticalFmts[verticalFmts.size() - 1]; + + // Try to keep largest possible output size + // When they are the same or ambiguous, pick the one support more sizes + if (maxHoriSize.width == maxVertSize.width && + maxHoriSize.height == maxVertSize.height) { + if (horiSize > vertSize) { + mSupportedFormats = horizontalFmts; + mCroppingType = HORIZONTAL; + } else { + mSupportedFormats = verticalFmts; + mCroppingType = VERTICAL; + } + } else if (maxHoriSize.width >= maxVertSize.width && + maxHoriSize.height >= maxVertSize.height) { + mSupportedFormats = horizontalFmts; + mCroppingType = HORIZONTAL; + } else if (maxHoriSize.width <= maxVertSize.width && + maxHoriSize.height <= maxVertSize.height) { + mSupportedFormats = verticalFmts; + mCroppingType = VERTICAL; + } else { + if (horiSize > vertSize) { + mSupportedFormats = horizontalFmts; + mCroppingType = HORIZONTAL; + } else { + mSupportedFormats = verticalFmts; + mCroppingType = VERTICAL; + } + } +} + +} // namespace implementation +} // namespace V3_4 +} // namespace device +} // namespace camera +} // namespace hardware +} // namespace android + diff --git a/camera/device/3.4/default/ExternalCameraDeviceSession.cpp b/camera/device/3.4/default/ExternalCameraDeviceSession.cpp new file mode 100644 index 0000000000000000000000000000000000000000..28ca0f351c87ac5a5196fedb52ba598a7f1e4ab8 --- /dev/null +++ b/camera/device/3.4/default/ExternalCameraDeviceSession.cpp @@ -0,0 +1,2851 @@ +/* + * Copyright (C) 2018 The Android Open Source Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +#define LOG_TAG "ExtCamDevSsn@3.4" +//#define LOG_NDEBUG 0 +#define ATRACE_TAG ATRACE_TAG_CAMERA +#include + +#include +#include "ExternalCameraDeviceSession.h" + +#include "android-base/macros.h" +#include +#include +#include +#include + +#define HAVE_JPEG // required for libyuv.h to export MJPEG decode APIs +#include + +#include + + +namespace android { +namespace hardware { +namespace camera { +namespace device { +namespace V3_4 { +namespace implementation { + +namespace { +// Size of request/result metadata fast message queue. Change to 0 to always use hwbinder buffer. +static constexpr size_t kMetadataMsgQueueSize = 1 << 18 /* 256kB */; + +const int kBadFramesAfterStreamOn = 1; // drop x frames after streamOn to get rid of some initial + // bad frames. TODO: develop a better bad frame detection + // method +constexpr int MAX_RETRY = 15; // Allow retry some ioctl failures a few times to account for some + // webcam showing temporarily ioctl failures. +constexpr int IOCTL_RETRY_SLEEP_US = 33000; // 33ms * MAX_RETRY = 0.5 seconds + +// Constants for tryLock during dumpstate +static constexpr int kDumpLockRetries = 50; +static constexpr int kDumpLockSleep = 60000; + +bool tryLock(Mutex& mutex) +{ + bool locked = false; + for (int i = 0; i < kDumpLockRetries; ++i) { + if (mutex.tryLock() == NO_ERROR) { + locked = true; + break; + } + usleep(kDumpLockSleep); + } + return locked; +} + +bool tryLock(std::mutex& mutex) +{ + bool locked = false; + for (int i = 0; i < kDumpLockRetries; ++i) { + if (mutex.try_lock()) { + locked = true; + break; + } + usleep(kDumpLockSleep); + } + return locked; +} + +} // Anonymous namespace + +// Static instances +const int ExternalCameraDeviceSession::kMaxProcessedStream; +const int ExternalCameraDeviceSession::kMaxStallStream; +HandleImporter ExternalCameraDeviceSession::sHandleImporter; + +ExternalCameraDeviceSession::ExternalCameraDeviceSession( + const sp& callback, + const ExternalCameraConfig& cfg, + const std::vector& sortedFormats, + const CroppingType& croppingType, + const common::V1_0::helper::CameraMetadata& chars, + const std::string& cameraId, + unique_fd v4l2Fd) : + mCallback(callback), + mCfg(cfg), + mCameraCharacteristics(chars), + mSupportedFormats(sortedFormats), + mCroppingType(croppingType), + mCameraId(cameraId), + mV4l2Fd(std::move(v4l2Fd)), + mOutputThread(new OutputThread(this, mCroppingType)), + mMaxThumbResolution(getMaxThumbResolution()), + mMaxJpegResolution(getMaxJpegResolution()) { + mInitFail = initialize(); +} + +bool ExternalCameraDeviceSession::initialize() { + if (mV4l2Fd.get() < 0) { + ALOGE("%s: invalid v4l2 device fd %d!", __FUNCTION__, mV4l2Fd.get()); + return true; + } + + struct v4l2_capability capability; + int ret = ioctl(mV4l2Fd.get(), VIDIOC_QUERYCAP, &capability); + std::string make, model; + if (ret < 0) { + ALOGW("%s v4l2 QUERYCAP failed", __FUNCTION__); + make = "Generic UVC webcam"; + model = "Generic UVC webcam"; + } else { + // capability.card is UTF-8 encoded + char card[32]; + int j = 0; + for (int i = 0; i < 32; i++) { + if (capability.card[i] < 128) { + card[j++] = capability.card[i]; + } + if (capability.card[i] == '\0') { + break; + } + } + if (j == 0 || card[j - 1] != '\0') { + make = "Generic UVC webcam"; + model = "Generic UVC webcam"; + } else { + make = card; + model = card; + } + } + mOutputThread->setExifMakeModel(make, model); + + status_t status = initDefaultRequests(); + if (status != OK) { + ALOGE("%s: init default requests failed!", __FUNCTION__); + return true; + } + + mRequestMetadataQueue = std::make_unique( + kMetadataMsgQueueSize, false /* non blocking */); + if (!mRequestMetadataQueue->isValid()) { + ALOGE("%s: invalid request fmq", __FUNCTION__); + return true; + } + mResultMetadataQueue = std::make_shared( + kMetadataMsgQueueSize, false /* non blocking */); + if (!mResultMetadataQueue->isValid()) { + ALOGE("%s: invalid result fmq", __FUNCTION__); + return true; + } + + // TODO: check is PRIORITY_DISPLAY enough? + mOutputThread->run("ExtCamOut", PRIORITY_DISPLAY); + return false; +} + +Status ExternalCameraDeviceSession::initStatus() const { + Mutex::Autolock _l(mLock); + Status status = Status::OK; + if (mInitFail || mClosed) { + ALOGI("%s: sesssion initFailed %d closed %d", __FUNCTION__, mInitFail, mClosed); + status = Status::INTERNAL_ERROR; + } + return status; +} + +ExternalCameraDeviceSession::~ExternalCameraDeviceSession() { + if (!isClosed()) { + ALOGE("ExternalCameraDeviceSession deleted before close!"); + close(); + } +} + + +void ExternalCameraDeviceSession::dumpState(const native_handle_t* handle) { + if (handle->numFds != 1 || handle->numInts != 0) { + ALOGE("%s: handle must contain 1 FD and 0 integers! Got %d FDs and %d ints", + __FUNCTION__, handle->numFds, handle->numInts); + return; + } + int fd = handle->data[0]; + + bool intfLocked = tryLock(mInterfaceLock); + if (!intfLocked) { + dprintf(fd, "!! ExternalCameraDeviceSession interface may be deadlocked !!\n"); + } + + if (isClosed()) { + dprintf(fd, "External camera %s is closed\n", mCameraId.c_str()); + return; + } + + bool streaming = false; + size_t v4L2BufferCount = 0; + SupportedV4L2Format streamingFmt; + { + bool sessionLocked = tryLock(mLock); + if (!sessionLocked) { + dprintf(fd, "!! ExternalCameraDeviceSession mLock may be deadlocked !!\n"); + } + streaming = mV4l2Streaming; + streamingFmt = mV4l2StreamingFmt; + v4L2BufferCount = mV4L2BufferCount; + + if (sessionLocked) { + mLock.unlock(); + } + } + + std::unordered_set inflightFrames; + { + bool iffLocked = tryLock(mInflightFramesLock); + if (!iffLocked) { + dprintf(fd, + "!! ExternalCameraDeviceSession mInflightFramesLock may be deadlocked !!\n"); + } + inflightFrames = mInflightFrames; + if (iffLocked) { + mInflightFramesLock.unlock(); + } + } + + dprintf(fd, "External camera %s V4L2 FD %d, cropping type %s, %s\n", + mCameraId.c_str(), mV4l2Fd.get(), + (mCroppingType == VERTICAL) ? "vertical" : "horizontal", + streaming ? "streaming" : "not streaming"); + if (streaming) { + // TODO: dump fps later + dprintf(fd, "Current V4L2 format %c%c%c%c %dx%d @ %ffps\n", + streamingFmt.fourcc & 0xFF, + (streamingFmt.fourcc >> 8) & 0xFF, + (streamingFmt.fourcc >> 16) & 0xFF, + (streamingFmt.fourcc >> 24) & 0xFF, + streamingFmt.width, streamingFmt.height, + mV4l2StreamingFps); + + size_t numDequeuedV4l2Buffers = 0; + { + std::lock_guard lk(mV4l2BufferLock); + numDequeuedV4l2Buffers = mNumDequeuedV4l2Buffers; + } + dprintf(fd, "V4L2 buffer queue size %zu, dequeued %zu\n", + v4L2BufferCount, numDequeuedV4l2Buffers); + } + + dprintf(fd, "In-flight frames (not sorted):"); + for (const auto& frameNumber : inflightFrames) { + dprintf(fd, "%d, ", frameNumber); + } + dprintf(fd, "\n"); + mOutputThread->dump(fd); + dprintf(fd, "\n"); + + if (intfLocked) { + mInterfaceLock.unlock(); + } + + return; +} + +Return ExternalCameraDeviceSession::constructDefaultRequestSettings( + V3_2::RequestTemplate type, + V3_2::ICameraDeviceSession::constructDefaultRequestSettings_cb _hidl_cb) { + V3_2::CameraMetadata outMetadata; + Status status = constructDefaultRequestSettingsRaw( + static_cast(type), &outMetadata); + _hidl_cb(status, outMetadata); + return Void(); +} + +Status ExternalCameraDeviceSession::constructDefaultRequestSettingsRaw(RequestTemplate type, + V3_2::CameraMetadata *outMetadata) { + CameraMetadata emptyMd; + Status status = initStatus(); + if (status != Status::OK) { + return status; + } + + switch (type) { + case RequestTemplate::PREVIEW: + case RequestTemplate::STILL_CAPTURE: + case RequestTemplate::VIDEO_RECORD: + case RequestTemplate::VIDEO_SNAPSHOT: { + *outMetadata = mDefaultRequests[type]; + break; + } + case RequestTemplate::MANUAL: + case RequestTemplate::ZERO_SHUTTER_LAG: + // Don't support MANUAL, ZSL templates + status = Status::ILLEGAL_ARGUMENT; + break; + default: + ALOGE("%s: unknown request template type %d", __FUNCTION__, static_cast(type)); + status = Status::ILLEGAL_ARGUMENT; + break; + } + return status; +} + +Return ExternalCameraDeviceSession::configureStreams( + const V3_2::StreamConfiguration& streams, + ICameraDeviceSession::configureStreams_cb _hidl_cb) { + V3_2::HalStreamConfiguration outStreams; + V3_3::HalStreamConfiguration outStreams_v33; + Mutex::Autolock _il(mInterfaceLock); + + Status status = configureStreams(streams, &outStreams_v33); + size_t size = outStreams_v33.streams.size(); + outStreams.streams.resize(size); + for (size_t i = 0; i < size; i++) { + outStreams.streams[i] = outStreams_v33.streams[i].v3_2; + } + _hidl_cb(status, outStreams); + return Void(); +} + +Return ExternalCameraDeviceSession::configureStreams_3_3( + const V3_2::StreamConfiguration& streams, + ICameraDeviceSession::configureStreams_3_3_cb _hidl_cb) { + V3_3::HalStreamConfiguration outStreams; + Mutex::Autolock _il(mInterfaceLock); + + Status status = configureStreams(streams, &outStreams); + _hidl_cb(status, outStreams); + return Void(); +} + +Return ExternalCameraDeviceSession::configureStreams_3_4( + const V3_4::StreamConfiguration& requestedConfiguration, + ICameraDeviceSession::configureStreams_3_4_cb _hidl_cb) { + V3_2::StreamConfiguration config_v32; + V3_3::HalStreamConfiguration outStreams_v33; + Mutex::Autolock _il(mInterfaceLock); + + config_v32.operationMode = requestedConfiguration.operationMode; + config_v32.streams.resize(requestedConfiguration.streams.size()); + for (size_t i = 0; i < config_v32.streams.size(); i++) { + config_v32.streams[i] = requestedConfiguration.streams[i].v3_2; + } + + Status status = configureStreams(config_v32, &outStreams_v33); + + V3_4::HalStreamConfiguration outStreams; + outStreams.streams.resize(outStreams_v33.streams.size()); + for (size_t i = 0; i < outStreams.streams.size(); i++) { + outStreams.streams[i].v3_3 = outStreams_v33.streams[i]; + } + _hidl_cb(status, outStreams); + return Void(); +} + +Return ExternalCameraDeviceSession::getCaptureRequestMetadataQueue( + ICameraDeviceSession::getCaptureRequestMetadataQueue_cb _hidl_cb) { + Mutex::Autolock _il(mInterfaceLock); + _hidl_cb(*mRequestMetadataQueue->getDesc()); + return Void(); +} + +Return ExternalCameraDeviceSession::getCaptureResultMetadataQueue( + ICameraDeviceSession::getCaptureResultMetadataQueue_cb _hidl_cb) { + Mutex::Autolock _il(mInterfaceLock); + _hidl_cb(*mResultMetadataQueue->getDesc()); + return Void(); +} + +Return ExternalCameraDeviceSession::processCaptureRequest( + const hidl_vec& requests, + const hidl_vec& cachesToRemove, + ICameraDeviceSession::processCaptureRequest_cb _hidl_cb) { + Mutex::Autolock _il(mInterfaceLock); + updateBufferCaches(cachesToRemove); + + uint32_t numRequestProcessed = 0; + Status s = Status::OK; + for (size_t i = 0; i < requests.size(); i++, numRequestProcessed++) { + s = processOneCaptureRequest(requests[i]); + if (s != Status::OK) { + break; + } + } + + _hidl_cb(s, numRequestProcessed); + return Void(); +} + +Return ExternalCameraDeviceSession::processCaptureRequest_3_4( + const hidl_vec& requests, + const hidl_vec& cachesToRemove, + ICameraDeviceSession::processCaptureRequest_3_4_cb _hidl_cb) { + Mutex::Autolock _il(mInterfaceLock); + updateBufferCaches(cachesToRemove); + + uint32_t numRequestProcessed = 0; + Status s = Status::OK; + for (size_t i = 0; i < requests.size(); i++, numRequestProcessed++) { + s = processOneCaptureRequest(requests[i].v3_2); + if (s != Status::OK) { + break; + } + } + + _hidl_cb(s, numRequestProcessed); + return Void(); +} + +Return ExternalCameraDeviceSession::flush() { + ATRACE_CALL(); + Mutex::Autolock _il(mInterfaceLock); + Status status = initStatus(); + if (status != Status::OK) { + return status; + } + mOutputThread->flush(); + return Status::OK; +} + +Return ExternalCameraDeviceSession::close() { + Mutex::Autolock _il(mInterfaceLock); + bool closed = isClosed(); + if (!closed) { + mOutputThread->flush(); + mOutputThread->requestExit(); + mOutputThread->join(); + + Mutex::Autolock _l(mLock); + // free all buffers + for(auto pair : mStreamMap) { + cleanupBuffersLocked(/*Stream ID*/pair.first); + } + v4l2StreamOffLocked(); + ALOGV("%s: closing V4L2 camera FD %d", __FUNCTION__, mV4l2Fd.get()); + mV4l2Fd.reset(); + mClosed = true; + } + return Void(); +} + +Status ExternalCameraDeviceSession::importRequest( + const CaptureRequest& request, + hidl_vec& allBufPtrs, + hidl_vec& allFences) { + size_t numOutputBufs = request.outputBuffers.size(); + size_t numBufs = numOutputBufs; + // Validate all I/O buffers + hidl_vec allBufs; + hidl_vec allBufIds; + allBufs.resize(numBufs); + allBufIds.resize(numBufs); + allBufPtrs.resize(numBufs); + allFences.resize(numBufs); + std::vector streamIds(numBufs); + + for (size_t i = 0; i < numOutputBufs; i++) { + allBufs[i] = request.outputBuffers[i].buffer.getNativeHandle(); + allBufIds[i] = request.outputBuffers[i].bufferId; + allBufPtrs[i] = &allBufs[i]; + streamIds[i] = request.outputBuffers[i].streamId; + } + + for (size_t i = 0; i < numBufs; i++) { + buffer_handle_t buf = allBufs[i]; + uint64_t bufId = allBufIds[i]; + CirculatingBuffers& cbs = mCirculatingBuffers[streamIds[i]]; + if (cbs.count(bufId) == 0) { + if (buf == nullptr) { + ALOGE("%s: bufferId %" PRIu64 " has null buffer handle!", __FUNCTION__, bufId); + return Status::ILLEGAL_ARGUMENT; + } + // Register a newly seen buffer + buffer_handle_t importedBuf = buf; + sHandleImporter.importBuffer(importedBuf); + if (importedBuf == nullptr) { + ALOGE("%s: output buffer %zu is invalid!", __FUNCTION__, i); + return Status::INTERNAL_ERROR; + } else { + cbs[bufId] = importedBuf; + } + } + allBufPtrs[i] = &cbs[bufId]; + } + + // All buffers are imported. Now validate output buffer acquire fences + for (size_t i = 0; i < numOutputBufs; i++) { + if (!sHandleImporter.importFence( + request.outputBuffers[i].acquireFence, allFences[i])) { + ALOGE("%s: output buffer %zu acquire fence is invalid", __FUNCTION__, i); + cleanupInflightFences(allFences, i); + return Status::INTERNAL_ERROR; + } + } + return Status::OK; +} + +void ExternalCameraDeviceSession::cleanupInflightFences( + hidl_vec& allFences, size_t numFences) { + for (size_t j = 0; j < numFences; j++) { + sHandleImporter.closeFence(allFences[j]); + } +} + +int ExternalCameraDeviceSession::waitForV4L2BufferReturnLocked(std::unique_lock& lk) { + ATRACE_CALL(); + std::chrono::seconds timeout = std::chrono::seconds(kBufferWaitTimeoutSec); + mLock.unlock(); + auto st = mV4L2BufferReturned.wait_for(lk, timeout); + // Here we introduce a order where mV4l2BufferLock is acquired before mLock, while + // the normal lock acquisition order is reversed. This is fine because in most of + // cases we are protected by mInterfaceLock. The only thread that can cause deadlock + // is the OutputThread, where we do need to make sure we don't acquire mLock then + // mV4l2BufferLock + mLock.lock(); + if (st == std::cv_status::timeout) { + ALOGE("%s: wait for V4L2 buffer return timeout!", __FUNCTION__); + return -1; + } + return 0; +} + +Status ExternalCameraDeviceSession::processOneCaptureRequest(const CaptureRequest& request) { + ATRACE_CALL(); + Status status = initStatus(); + if (status != Status::OK) { + return status; + } + + if (request.inputBuffer.streamId != -1) { + ALOGE("%s: external camera does not support reprocessing!", __FUNCTION__); + return Status::ILLEGAL_ARGUMENT; + } + + Mutex::Autolock _l(mLock); + if (!mV4l2Streaming) { + ALOGE("%s: cannot process request in streamOff state!", __FUNCTION__); + return Status::INTERNAL_ERROR; + } + + const camera_metadata_t *rawSettings = nullptr; + bool converted = true; + CameraMetadata settingsFmq; // settings from FMQ + if (request.fmqSettingsSize > 0) { + // non-blocking read; client must write metadata before calling + // processOneCaptureRequest + settingsFmq.resize(request.fmqSettingsSize); + bool read = mRequestMetadataQueue->read(settingsFmq.data(), request.fmqSettingsSize); + if (read) { + converted = V3_2::implementation::convertFromHidl(settingsFmq, &rawSettings); + } else { + ALOGE("%s: capture request settings metadata couldn't be read from fmq!", __FUNCTION__); + converted = false; + } + } else { + converted = V3_2::implementation::convertFromHidl(request.settings, &rawSettings); + } + + if (converted && rawSettings != nullptr) { + mLatestReqSetting = rawSettings; + } + + if (!converted) { + ALOGE("%s: capture request settings metadata is corrupt!", __FUNCTION__); + return Status::ILLEGAL_ARGUMENT; + } + + if (mFirstRequest && rawSettings == nullptr) { + ALOGE("%s: capture request settings must not be null for first request!", + __FUNCTION__); + return Status::ILLEGAL_ARGUMENT; + } + + hidl_vec allBufPtrs; + hidl_vec allFences; + size_t numOutputBufs = request.outputBuffers.size(); + + if (numOutputBufs == 0) { + ALOGE("%s: capture request must have at least one output buffer!", __FUNCTION__); + return Status::ILLEGAL_ARGUMENT; + } + + camera_metadata_entry fpsRange = mLatestReqSetting.find(ANDROID_CONTROL_AE_TARGET_FPS_RANGE); + if (fpsRange.count == 2) { + double requestFpsMax = fpsRange.data.i32[1]; + double closestFps = 0.0; + double fpsError = 1000.0; + bool fpsSupported = false; + for (const auto& fr : mV4l2StreamingFmt.frameRates) { + double f = fr.getDouble(); + if (std::fabs(requestFpsMax - f) < 1.0) { + fpsSupported = true; + break; + } + if (std::fabs(requestFpsMax - f) < fpsError) { + fpsError = std::fabs(requestFpsMax - f); + closestFps = f; + } + } + if (!fpsSupported) { + /* This can happen in a few scenarios: + * 1. The application is sending a FPS range not supported by the configured outputs. + * 2. The application is sending a valid FPS range for all cofigured outputs, but + * the selected V4L2 size can only run at slower speed. This should be very rare + * though: for this to happen a sensor needs to support at least 3 different aspect + * ratio outputs, and when (at least) two outputs are both not the main aspect ratio + * of the webcam, a third size that's larger might be picked and runs into this + * issue. + */ + ALOGW("%s: cannot reach fps %d! Will do %f instead", + __FUNCTION__, fpsRange.data.i32[1], closestFps); + requestFpsMax = closestFps; + } + + if (requestFpsMax != mV4l2StreamingFps) { + { + std::unique_lock lk(mV4l2BufferLock); + while (mNumDequeuedV4l2Buffers != 0) { + // Wait until pipeline is idle before reconfigure stream + int waitRet = waitForV4L2BufferReturnLocked(lk); + if (waitRet != 0) { + ALOGE("%s: wait for pipeline idle failed!", __FUNCTION__); + return Status::INTERNAL_ERROR; + } + } + } + configureV4l2StreamLocked(mV4l2StreamingFmt, requestFpsMax); + } + } + + status = importRequest(request, allBufPtrs, allFences); + if (status != Status::OK) { + return status; + } + + nsecs_t shutterTs = 0; + sp frameIn = dequeueV4l2FrameLocked(&shutterTs); + if ( frameIn == nullptr) { + ALOGE("%s: V4L2 deque frame failed!", __FUNCTION__); + return Status::INTERNAL_ERROR; + } + + std::shared_ptr halReq = std::make_shared(); + halReq->frameNumber = request.frameNumber; + halReq->setting = mLatestReqSetting; + halReq->frameIn = frameIn; + halReq->shutterTs = shutterTs; + halReq->buffers.resize(numOutputBufs); + for (size_t i = 0; i < numOutputBufs; i++) { + HalStreamBuffer& halBuf = halReq->buffers[i]; + int streamId = halBuf.streamId = request.outputBuffers[i].streamId; + halBuf.bufferId = request.outputBuffers[i].bufferId; + const Stream& stream = mStreamMap[streamId]; + halBuf.width = stream.width; + halBuf.height = stream.height; + halBuf.format = stream.format; + halBuf.usage = stream.usage; + halBuf.bufPtr = allBufPtrs[i]; + halBuf.acquireFence = allFences[i]; + halBuf.fenceTimeout = false; + } + { + std::lock_guard lk(mInflightFramesLock); + mInflightFrames.insert(halReq->frameNumber); + } + // Send request to OutputThread for the rest of processing + mOutputThread->submitRequest(halReq); + mFirstRequest = false; + return Status::OK; +} + +void ExternalCameraDeviceSession::notifyShutter(uint32_t frameNumber, nsecs_t shutterTs) { + NotifyMsg msg; + msg.type = MsgType::SHUTTER; + msg.msg.shutter.frameNumber = frameNumber; + msg.msg.shutter.timestamp = shutterTs; + mCallback->notify({msg}); +} + +void ExternalCameraDeviceSession::notifyError( + uint32_t frameNumber, int32_t streamId, ErrorCode ec) { + NotifyMsg msg; + msg.type = MsgType::ERROR; + msg.msg.error.frameNumber = frameNumber; + msg.msg.error.errorStreamId = streamId; + msg.msg.error.errorCode = ec; + mCallback->notify({msg}); +} + +//TODO: refactor with processCaptureResult +Status ExternalCameraDeviceSession::processCaptureRequestError( + const std::shared_ptr& req) { + ATRACE_CALL(); + // Return V4L2 buffer to V4L2 buffer queue + enqueueV4l2Frame(req->frameIn); + + // NotifyShutter + notifyShutter(req->frameNumber, req->shutterTs); + + notifyError(/*frameNum*/req->frameNumber, /*stream*/-1, ErrorCode::ERROR_REQUEST); + + // Fill output buffers + hidl_vec results; + results.resize(1); + CaptureResult& result = results[0]; + result.frameNumber = req->frameNumber; + result.partialResult = 1; + result.inputBuffer.streamId = -1; + result.outputBuffers.resize(req->buffers.size()); + for (size_t i = 0; i < req->buffers.size(); i++) { + result.outputBuffers[i].streamId = req->buffers[i].streamId; + result.outputBuffers[i].bufferId = req->buffers[i].bufferId; + result.outputBuffers[i].status = BufferStatus::ERROR; + if (req->buffers[i].acquireFence >= 0) { + native_handle_t* handle = native_handle_create(/*numFds*/1, /*numInts*/0); + handle->data[0] = req->buffers[i].acquireFence; + result.outputBuffers[i].releaseFence.setTo(handle, /*shouldOwn*/false); + } + } + + // update inflight records + { + std::lock_guard lk(mInflightFramesLock); + mInflightFrames.erase(req->frameNumber); + } + + // Callback into framework + invokeProcessCaptureResultCallback(results, /* tryWriteFmq */true); + freeReleaseFences(results); + return Status::OK; +} + +Status ExternalCameraDeviceSession::processCaptureResult(std::shared_ptr& req) { + ATRACE_CALL(); + // Return V4L2 buffer to V4L2 buffer queue + enqueueV4l2Frame(req->frameIn); + + // NotifyShutter + notifyShutter(req->frameNumber, req->shutterTs); + + // Fill output buffers + hidl_vec results; + results.resize(1); + CaptureResult& result = results[0]; + result.frameNumber = req->frameNumber; + result.partialResult = 1; + result.inputBuffer.streamId = -1; + result.outputBuffers.resize(req->buffers.size()); + for (size_t i = 0; i < req->buffers.size(); i++) { + result.outputBuffers[i].streamId = req->buffers[i].streamId; + result.outputBuffers[i].bufferId = req->buffers[i].bufferId; + if (req->buffers[i].fenceTimeout) { + result.outputBuffers[i].status = BufferStatus::ERROR; + native_handle_t* handle = native_handle_create(/*numFds*/1, /*numInts*/0); + handle->data[0] = req->buffers[i].acquireFence; + result.outputBuffers[i].releaseFence.setTo(handle, /*shouldOwn*/false); + notifyError(req->frameNumber, req->buffers[i].streamId, ErrorCode::ERROR_BUFFER); + } else { + result.outputBuffers[i].status = BufferStatus::OK; + // TODO: refactor + if (req->buffers[i].acquireFence > 0) { + native_handle_t* handle = native_handle_create(/*numFds*/1, /*numInts*/0); + handle->data[0] = req->buffers[i].acquireFence; + result.outputBuffers[i].releaseFence.setTo(handle, /*shouldOwn*/false); + } + } + } + + // Fill capture result metadata + fillCaptureResult(req->setting, req->shutterTs); + const camera_metadata_t *rawResult = req->setting.getAndLock(); + V3_2::implementation::convertToHidl(rawResult, &result.result); + req->setting.unlock(rawResult); + + // update inflight records + { + std::lock_guard lk(mInflightFramesLock); + mInflightFrames.erase(req->frameNumber); + } + + // Callback into framework + invokeProcessCaptureResultCallback(results, /* tryWriteFmq */true); + freeReleaseFences(results); + return Status::OK; +} + +void ExternalCameraDeviceSession::invokeProcessCaptureResultCallback( + hidl_vec &results, bool tryWriteFmq) { + if (mProcessCaptureResultLock.tryLock() != OK) { + const nsecs_t NS_TO_SECOND = 1000000000; + ALOGV("%s: previous call is not finished! waiting 1s...", __FUNCTION__); + if (mProcessCaptureResultLock.timedLock(/* 1s */NS_TO_SECOND) != OK) { + ALOGE("%s: cannot acquire lock in 1s, cannot proceed", + __FUNCTION__); + return; + } + } + if (tryWriteFmq && mResultMetadataQueue->availableToWrite() > 0) { + for (CaptureResult &result : results) { + if (result.result.size() > 0) { + if (mResultMetadataQueue->write(result.result.data(), result.result.size())) { + result.fmqResultSize = result.result.size(); + result.result.resize(0); + } else { + ALOGW("%s: couldn't utilize fmq, fall back to hwbinder", __FUNCTION__); + result.fmqResultSize = 0; + } + } else { + result.fmqResultSize = 0; + } + } + } + auto status = mCallback->processCaptureResult(results); + if (!status.isOk()) { + ALOGE("%s: processCaptureResult ERROR : %s", __FUNCTION__, + status.description().c_str()); + } + + mProcessCaptureResultLock.unlock(); +} + +void ExternalCameraDeviceSession::freeReleaseFences(hidl_vec& results) { + for (auto& result : results) { + if (result.inputBuffer.releaseFence.getNativeHandle() != nullptr) { + native_handle_t* handle = const_cast( + result.inputBuffer.releaseFence.getNativeHandle()); + native_handle_close(handle); + native_handle_delete(handle); + } + for (auto& buf : result.outputBuffers) { + if (buf.releaseFence.getNativeHandle() != nullptr) { + native_handle_t* handle = const_cast( + buf.releaseFence.getNativeHandle()); + native_handle_close(handle); + native_handle_delete(handle); + } + } + } + return; +} + +ExternalCameraDeviceSession::OutputThread::OutputThread( + wp parent, + CroppingType ct) : mParent(parent), mCroppingType(ct) {} + +ExternalCameraDeviceSession::OutputThread::~OutputThread() {} + +void ExternalCameraDeviceSession::OutputThread::setExifMakeModel( + const std::string& make, const std::string& model) { + mExifMake = make; + mExifModel = model; +} + +uint32_t ExternalCameraDeviceSession::OutputThread::getFourCcFromLayout( + const YCbCrLayout& layout) { + intptr_t cb = reinterpret_cast(layout.cb); + intptr_t cr = reinterpret_cast(layout.cr); + if (std::abs(cb - cr) == 1 && layout.chromaStep == 2) { + // Interleaved format + if (layout.cb > layout.cr) { + return V4L2_PIX_FMT_NV21; + } else { + return V4L2_PIX_FMT_NV12; + } + } else if (layout.chromaStep == 1) { + // Planar format + if (layout.cb > layout.cr) { + return V4L2_PIX_FMT_YVU420; // YV12 + } else { + return V4L2_PIX_FMT_YUV420; // YU12 + } + } else { + return FLEX_YUV_GENERIC; + } +} + +int ExternalCameraDeviceSession::OutputThread::getCropRect( + CroppingType ct, const Size& inSize, const Size& outSize, IMapper::Rect* out) { + if (out == nullptr) { + ALOGE("%s: out is null", __FUNCTION__); + return -1; + } + + uint32_t inW = inSize.width; + uint32_t inH = inSize.height; + uint32_t outW = outSize.width; + uint32_t outH = outSize.height; + + // Handle special case where aspect ratio is close to input but scaled + // dimension is slightly larger than input + float arIn = ASPECT_RATIO(inSize); + float arOut = ASPECT_RATIO(outSize); + if (isAspectRatioClose(arIn, arOut)) { + out->left = 0; + out->top = 0; + out->width = inW; + out->height = inH; + return 0; + } + + if (ct == VERTICAL) { + uint64_t scaledOutH = static_cast(outH) * inW / outW; + if (scaledOutH > inH) { + ALOGE("%s: Output size %dx%d cannot be vertically cropped from input size %dx%d", + __FUNCTION__, outW, outH, inW, inH); + return -1; + } + scaledOutH = scaledOutH & ~0x1; // make it multiple of 2 + + out->left = 0; + out->top = ((inH - scaledOutH) / 2) & ~0x1; + out->width = inW; + out->height = static_cast(scaledOutH); + ALOGV("%s: crop %dx%d to %dx%d: top %d, scaledH %d", + __FUNCTION__, inW, inH, outW, outH, out->top, static_cast(scaledOutH)); + } else { + uint64_t scaledOutW = static_cast(outW) * inH / outH; + if (scaledOutW > inW) { + ALOGE("%s: Output size %dx%d cannot be horizontally cropped from input size %dx%d", + __FUNCTION__, outW, outH, inW, inH); + return -1; + } + scaledOutW = scaledOutW & ~0x1; // make it multiple of 2 + + out->left = ((inW - scaledOutW) / 2) & ~0x1; + out->top = 0; + out->width = static_cast(scaledOutW); + out->height = inH; + ALOGV("%s: crop %dx%d to %dx%d: top %d, scaledW %d", + __FUNCTION__, inW, inH, outW, outH, out->top, static_cast(scaledOutW)); + } + + return 0; +} + +int ExternalCameraDeviceSession::OutputThread::cropAndScaleLocked( + sp& in, const Size& outSz, YCbCrLayout* out) { + Size inSz = {in->mWidth, in->mHeight}; + + int ret; + if (inSz == outSz) { + ret = in->getLayout(out); + if (ret != 0) { + ALOGE("%s: failed to get input image layout", __FUNCTION__); + return ret; + } + return ret; + } + + // Cropping to output aspect ratio + IMapper::Rect inputCrop; + ret = getCropRect(mCroppingType, inSz, outSz, &inputCrop); + if (ret != 0) { + ALOGE("%s: failed to compute crop rect for output size %dx%d", + __FUNCTION__, outSz.width, outSz.height); + return ret; + } + + YCbCrLayout croppedLayout; + ret = in->getCroppedLayout(inputCrop, &croppedLayout); + if (ret != 0) { + ALOGE("%s: failed to crop input image %dx%d to output size %dx%d", + __FUNCTION__, inSz.width, inSz.height, outSz.width, outSz.height); + return ret; + } + + if ((mCroppingType == VERTICAL && inSz.width == outSz.width) || + (mCroppingType == HORIZONTAL && inSz.height == outSz.height)) { + // No scale is needed + *out = croppedLayout; + return 0; + } + + auto it = mScaledYu12Frames.find(outSz); + sp scaledYu12Buf; + if (it != mScaledYu12Frames.end()) { + scaledYu12Buf = it->second; + } else { + it = mIntermediateBuffers.find(outSz); + if (it == mIntermediateBuffers.end()) { + ALOGE("%s: failed to find intermediate buffer size %dx%d", + __FUNCTION__, outSz.width, outSz.height); + return -1; + } + scaledYu12Buf = it->second; + } + // Scale + YCbCrLayout outLayout; + ret = scaledYu12Buf->getLayout(&outLayout); + if (ret != 0) { + ALOGE("%s: failed to get output buffer layout", __FUNCTION__); + return ret; + } + + ret = libyuv::I420Scale( + static_cast(croppedLayout.y), + croppedLayout.yStride, + static_cast(croppedLayout.cb), + croppedLayout.cStride, + static_cast(croppedLayout.cr), + croppedLayout.cStride, + inputCrop.width, + inputCrop.height, + static_cast(outLayout.y), + outLayout.yStride, + static_cast(outLayout.cb), + outLayout.cStride, + static_cast(outLayout.cr), + outLayout.cStride, + outSz.width, + outSz.height, + // TODO: b/72261744 see if we can use better filter without losing too much perf + libyuv::FilterMode::kFilterNone); + + if (ret != 0) { + ALOGE("%s: failed to scale buffer from %dx%d to %dx%d. Ret %d", + __FUNCTION__, inputCrop.width, inputCrop.height, + outSz.width, outSz.height, ret); + return ret; + } + + *out = outLayout; + mScaledYu12Frames.insert({outSz, scaledYu12Buf}); + return 0; +} + + +int ExternalCameraDeviceSession::OutputThread::cropAndScaleThumbLocked( + sp& in, const Size &outSz, YCbCrLayout* out) { + Size inSz {in->mWidth, in->mHeight}; + + if ((outSz.width * outSz.height) > + (mYu12ThumbFrame->mWidth * mYu12ThumbFrame->mHeight)) { + ALOGE("%s: Requested thumbnail size too big (%d,%d) > (%d,%d)", + __FUNCTION__, outSz.width, outSz.height, + mYu12ThumbFrame->mWidth, mYu12ThumbFrame->mHeight); + return -1; + } + + int ret; + + /* This will crop-and-zoom the input YUV frame to the thumbnail size + * Based on the following logic: + * 1) Square pixels come in, square pixels come out, therefore single + * scale factor is computed to either make input bigger or smaller + * depending on if we are upscaling or downscaling + * 2) That single scale factor would either make height too tall or width + * too wide so we need to crop the input either horizontally or vertically + * but not both + */ + + /* Convert the input and output dimensions into floats for ease of math */ + float fWin = static_cast(inSz.width); + float fHin = static_cast(inSz.height); + float fWout = static_cast(outSz.width); + float fHout = static_cast(outSz.height); + + /* Compute the one scale factor from (1) above, it will be the smaller of + * the two possibilities. */ + float scaleFactor = std::min( fHin / fHout, fWin / fWout ); + + /* Since we are crop-and-zooming (as opposed to letter/pillar boxing) we can + * simply multiply the output by our scaleFactor to get the cropped input + * size. Note that at least one of {fWcrop, fHcrop} is going to wind up + * being {fWin, fHin} respectively because fHout or fWout cancels out the + * scaleFactor calculation above. + * + * Specifically: + * if ( fHin / fHout ) < ( fWin / fWout ) we crop the sides off + * input, in which case + * scaleFactor = fHin / fHout + * fWcrop = fHin / fHout * fWout + * fHcrop = fHin + * + * Note that fWcrop <= fWin ( because ( fHin / fHout ) * fWout < fWin, which + * is just the inequality above with both sides multiplied by fWout + * + * on the other hand if ( fWin / fWout ) < ( fHin / fHout) we crop the top + * and the bottom off of input, and + * scaleFactor = fWin / fWout + * fWcrop = fWin + * fHCrop = fWin / fWout * fHout + */ + float fWcrop = scaleFactor * fWout; + float fHcrop = scaleFactor * fHout; + + /* Convert to integer and truncate to an even number */ + Size cropSz = { 2*static_cast(fWcrop/2.0f), + 2*static_cast(fHcrop/2.0f) }; + + /* Convert to a centered rectange with even top/left */ + IMapper::Rect inputCrop { + 2*static_cast((inSz.width - cropSz.width)/4), + 2*static_cast((inSz.height - cropSz.height)/4), + static_cast(cropSz.width), + static_cast(cropSz.height) }; + + if ((inputCrop.top < 0) || + (inputCrop.top >= static_cast(inSz.height)) || + (inputCrop.left < 0) || + (inputCrop.left >= static_cast(inSz.width)) || + (inputCrop.width <= 0) || + (inputCrop.width + inputCrop.left > static_cast(inSz.width)) || + (inputCrop.height <= 0) || + (inputCrop.height + inputCrop.top > static_cast(inSz.height))) + { + ALOGE("%s: came up with really wrong crop rectangle",__FUNCTION__); + ALOGE("%s: input layout %dx%d to for output size %dx%d", + __FUNCTION__, inSz.width, inSz.height, outSz.width, outSz.height); + ALOGE("%s: computed input crop +%d,+%d %dx%d", + __FUNCTION__, inputCrop.left, inputCrop.top, + inputCrop.width, inputCrop.height); + return -1; + } + + YCbCrLayout inputLayout; + ret = in->getCroppedLayout(inputCrop, &inputLayout); + if (ret != 0) { + ALOGE("%s: failed to crop input layout %dx%d to for output size %dx%d", + __FUNCTION__, inSz.width, inSz.height, outSz.width, outSz.height); + ALOGE("%s: computed input crop +%d,+%d %dx%d", + __FUNCTION__, inputCrop.left, inputCrop.top, + inputCrop.width, inputCrop.height); + return ret; + } + ALOGV("%s: crop input layout %dx%d to for output size %dx%d", + __FUNCTION__, inSz.width, inSz.height, outSz.width, outSz.height); + ALOGV("%s: computed input crop +%d,+%d %dx%d", + __FUNCTION__, inputCrop.left, inputCrop.top, + inputCrop.width, inputCrop.height); + + + // Scale + YCbCrLayout outFullLayout; + + ret = mYu12ThumbFrame->getLayout(&outFullLayout); + if (ret != 0) { + ALOGE("%s: failed to get output buffer layout", __FUNCTION__); + return ret; + } + + + ret = libyuv::I420Scale( + static_cast(inputLayout.y), + inputLayout.yStride, + static_cast(inputLayout.cb), + inputLayout.cStride, + static_cast(inputLayout.cr), + inputLayout.cStride, + inputCrop.width, + inputCrop.height, + static_cast(outFullLayout.y), + outFullLayout.yStride, + static_cast(outFullLayout.cb), + outFullLayout.cStride, + static_cast(outFullLayout.cr), + outFullLayout.cStride, + outSz.width, + outSz.height, + libyuv::FilterMode::kFilterNone); + + if (ret != 0) { + ALOGE("%s: failed to scale buffer from %dx%d to %dx%d. Ret %d", + __FUNCTION__, inputCrop.width, inputCrop.height, + outSz.width, outSz.height, ret); + return ret; + } + + *out = outFullLayout; + return 0; +} + +int ExternalCameraDeviceSession::OutputThread::formatConvertLocked( + const YCbCrLayout& in, const YCbCrLayout& out, Size sz, uint32_t format) { + int ret = 0; + switch (format) { + case V4L2_PIX_FMT_NV21: + ret = libyuv::I420ToNV21( + static_cast(in.y), + in.yStride, + static_cast(in.cb), + in.cStride, + static_cast(in.cr), + in.cStride, + static_cast(out.y), + out.yStride, + static_cast(out.cr), + out.cStride, + sz.width, + sz.height); + if (ret != 0) { + ALOGE("%s: convert to NV21 buffer failed! ret %d", + __FUNCTION__, ret); + return ret; + } + break; + case V4L2_PIX_FMT_NV12: + ret = libyuv::I420ToNV12( + static_cast(in.y), + in.yStride, + static_cast(in.cb), + in.cStride, + static_cast(in.cr), + in.cStride, + static_cast(out.y), + out.yStride, + static_cast(out.cb), + out.cStride, + sz.width, + sz.height); + if (ret != 0) { + ALOGE("%s: convert to NV12 buffer failed! ret %d", + __FUNCTION__, ret); + return ret; + } + break; + case V4L2_PIX_FMT_YVU420: // YV12 + case V4L2_PIX_FMT_YUV420: // YU12 + // TODO: maybe we can speed up here by somehow save this copy? + ret = libyuv::I420Copy( + static_cast(in.y), + in.yStride, + static_cast(in.cb), + in.cStride, + static_cast(in.cr), + in.cStride, + static_cast(out.y), + out.yStride, + static_cast(out.cb), + out.cStride, + static_cast(out.cr), + out.cStride, + sz.width, + sz.height); + if (ret != 0) { + ALOGE("%s: copy to YV12 or YU12 buffer failed! ret %d", + __FUNCTION__, ret); + return ret; + } + break; + case FLEX_YUV_GENERIC: + // TODO: b/72261744 write to arbitrary flexible YUV layout. Slow. + ALOGE("%s: unsupported flexible yuv layout" + " y %p cb %p cr %p y_str %d c_str %d c_step %d", + __FUNCTION__, out.y, out.cb, out.cr, + out.yStride, out.cStride, out.chromaStep); + return -1; + default: + ALOGE("%s: unknown YUV format 0x%x!", __FUNCTION__, format); + return -1; + } + return 0; +} + +int ExternalCameraDeviceSession::OutputThread::encodeJpegYU12( + const Size & inSz, const YCbCrLayout& inLayout, + int jpegQuality, const void *app1Buffer, size_t app1Size, + void *out, const size_t maxOutSize, size_t &actualCodeSize) +{ + /* libjpeg is a C library so we use C-style "inheritance" by + * putting libjpeg's jpeg_destination_mgr first in our custom + * struct. This allows us to cast jpeg_destination_mgr* to + * CustomJpegDestMgr* when we get it passed to us in a callback */ + struct CustomJpegDestMgr { + struct jpeg_destination_mgr mgr; + JOCTET *mBuffer; + size_t mBufferSize; + size_t mEncodedSize; + bool mSuccess; + } dmgr; + + jpeg_compress_struct cinfo = {}; + jpeg_error_mgr jerr; + + /* Initialize error handling with standard callbacks, but + * then override output_message (to print to ALOG) and + * error_exit to set a flag and print a message instead + * of killing the whole process */ + cinfo.err = jpeg_std_error(&jerr); + + cinfo.err->output_message = [](j_common_ptr cinfo) { + char buffer[JMSG_LENGTH_MAX]; + + /* Create the message */ + (*cinfo->err->format_message)(cinfo, buffer); + ALOGE("libjpeg error: %s", buffer); + }; + cinfo.err->error_exit = [](j_common_ptr cinfo) { + (*cinfo->err->output_message)(cinfo); + if(cinfo->client_data) { + auto & dmgr = + *reinterpret_cast(cinfo->client_data); + dmgr.mSuccess = false; + } + }; + /* Now that we initialized some callbacks, let's create our compressor */ + jpeg_create_compress(&cinfo); + + /* Initialize our destination manager */ + dmgr.mBuffer = static_cast(out); + dmgr.mBufferSize = maxOutSize; + dmgr.mEncodedSize = 0; + dmgr.mSuccess = true; + cinfo.client_data = static_cast(&dmgr); + + /* These lambdas become C-style function pointers and as per C++11 spec + * may not capture anything */ + dmgr.mgr.init_destination = [](j_compress_ptr cinfo) { + auto & dmgr = reinterpret_cast(*cinfo->dest); + dmgr.mgr.next_output_byte = dmgr.mBuffer; + dmgr.mgr.free_in_buffer = dmgr.mBufferSize; + ALOGV("%s:%d jpeg start: %p [%zu]", + __FUNCTION__, __LINE__, dmgr.mBuffer, dmgr.mBufferSize); + }; + + dmgr.mgr.empty_output_buffer = [](j_compress_ptr cinfo __unused) { + ALOGV("%s:%d Out of buffer", __FUNCTION__, __LINE__); + return 0; + }; + + dmgr.mgr.term_destination = [](j_compress_ptr cinfo) { + auto & dmgr = reinterpret_cast(*cinfo->dest); + dmgr.mEncodedSize = dmgr.mBufferSize - dmgr.mgr.free_in_buffer; + ALOGV("%s:%d Done with jpeg: %zu", __FUNCTION__, __LINE__, dmgr.mEncodedSize); + }; + cinfo.dest = reinterpret_cast(&dmgr); + + /* We are going to be using JPEG in raw data mode, so we are passing + * straight subsampled planar YCbCr and it will not touch our pixel + * data or do any scaling or anything */ + cinfo.image_width = inSz.width; + cinfo.image_height = inSz.height; + cinfo.input_components = 3; + cinfo.in_color_space = JCS_YCbCr; + + /* Initialize defaults and then override what we want */ + jpeg_set_defaults(&cinfo); + + jpeg_set_quality(&cinfo, jpegQuality, 1); + jpeg_set_colorspace(&cinfo, JCS_YCbCr); + cinfo.raw_data_in = 1; + cinfo.dct_method = JDCT_IFAST; + + /* Configure sampling factors. The sampling factor is JPEG subsampling 420 + * because the source format is YUV420. Note that libjpeg sampling factors + * are... a little weird. Sampling of Y=2,U=1,V=1 means there is 1 U and + * 1 V value for each 2 Y values */ + cinfo.comp_info[0].h_samp_factor = 2; + cinfo.comp_info[0].v_samp_factor = 2; + cinfo.comp_info[1].h_samp_factor = 1; + cinfo.comp_info[1].v_samp_factor = 1; + cinfo.comp_info[2].h_samp_factor = 1; + cinfo.comp_info[2].v_samp_factor = 1; + + /* Let's not hardcode YUV420 in 6 places... 5 was enough */ + int maxVSampFactor = std::max( { + cinfo.comp_info[0].v_samp_factor, + cinfo.comp_info[1].v_samp_factor, + cinfo.comp_info[2].v_samp_factor + }); + int cVSubSampling = cinfo.comp_info[0].v_samp_factor / + cinfo.comp_info[1].v_samp_factor; + + /* Start the compressor */ + jpeg_start_compress(&cinfo, TRUE); + + /* Compute our macroblock height, so we can pad our input to be vertically + * macroblock aligned. + * TODO: Does it need to be horizontally MCU aligned too? */ + + size_t mcuV = DCTSIZE*maxVSampFactor; + size_t paddedHeight = mcuV * ((inSz.height + mcuV - 1) / mcuV); + + /* libjpeg uses arrays of row pointers, which makes it really easy to pad + * data vertically (unfortunately doesn't help horizontally) */ + std::vector yLines (paddedHeight); + std::vector cbLines(paddedHeight/cVSubSampling); + std::vector crLines(paddedHeight/cVSubSampling); + + uint8_t *py = static_cast(inLayout.y); + uint8_t *pcr = static_cast(inLayout.cr); + uint8_t *pcb = static_cast(inLayout.cb); + + for(uint32_t i = 0; i < paddedHeight; i++) + { + /* Once we are in the padding territory we still point to the last line + * effectively replicating it several times ~ CLAMP_TO_EDGE */ + int li = std::min(i, inSz.height - 1); + yLines[i] = static_cast(py + li * inLayout.yStride); + if(i < paddedHeight / cVSubSampling) + { + crLines[i] = static_cast(pcr + li * inLayout.cStride); + cbLines[i] = static_cast(pcb + li * inLayout.cStride); + } + } + + /* If APP1 data was passed in, use it */ + if(app1Buffer && app1Size) + { + jpeg_write_marker(&cinfo, JPEG_APP0 + 1, + static_cast(app1Buffer), app1Size); + } + + /* While we still have padded height left to go, keep giving it one + * macroblock at a time. */ + while (cinfo.next_scanline < cinfo.image_height) { + const uint32_t batchSize = DCTSIZE * maxVSampFactor; + const uint32_t nl = cinfo.next_scanline; + JSAMPARRAY planes[3]{ &yLines[nl], + &cbLines[nl/cVSubSampling], + &crLines[nl/cVSubSampling] }; + + uint32_t done = jpeg_write_raw_data(&cinfo, planes, batchSize); + + if (done != batchSize) { + ALOGE("%s: compressed %u lines, expected %u (total %u/%u)", + __FUNCTION__, done, batchSize, cinfo.next_scanline, + cinfo.image_height); + return -1; + } + } + + /* This will flush everything */ + jpeg_finish_compress(&cinfo); + + /* Grab the actual code size and set it */ + actualCodeSize = dmgr.mEncodedSize; + + return 0; +} + +/* + * TODO: There needs to be a mechanism to discover allocated buffer size + * in the HAL. + * + * This is very fragile because it is duplicated computation from: + * frameworks/av/services/camera/libcameraservice/device3/Camera3Device.cpp + * + */ + +/* This assumes mSupportedFormats have all been declared as supporting + * HAL_PIXEL_FORMAT_BLOB to the framework */ +Size ExternalCameraDeviceSession::getMaxJpegResolution() const { + Size ret { 0, 0 }; + for(auto & fmt : mSupportedFormats) { + if(fmt.width * fmt.height > ret.width * ret.height) { + ret = Size { fmt.width, fmt.height }; + } + } + return ret; +} + +Size ExternalCameraDeviceSession::getMaxThumbResolution() const { + Size thumbSize { 0, 0 }; + camera_metadata_ro_entry entry = + mCameraCharacteristics.find(ANDROID_JPEG_AVAILABLE_THUMBNAIL_SIZES); + for(uint32_t i = 0; i < entry.count; i += 2) { + Size sz { static_cast(entry.data.i32[i]), + static_cast(entry.data.i32[i+1]) }; + if(sz.width * sz.height > thumbSize.width * thumbSize.height) { + thumbSize = sz; + } + } + + if (thumbSize.width * thumbSize.height == 0) { + ALOGW("%s: non-zero thumbnail size not available", __FUNCTION__); + } + + return thumbSize; +} + + +ssize_t ExternalCameraDeviceSession::getJpegBufferSize( + uint32_t width, uint32_t height) const { + // Constant from camera3.h + const ssize_t kMinJpegBufferSize = 256 * 1024 + sizeof(CameraBlob); + // Get max jpeg size (area-wise). + if (mMaxJpegResolution.width == 0) { + ALOGE("%s: Do not have a single supported JPEG stream", + __FUNCTION__); + return BAD_VALUE; + } + + // Get max jpeg buffer size + ssize_t maxJpegBufferSize = 0; + camera_metadata_ro_entry jpegBufMaxSize = + mCameraCharacteristics.find(ANDROID_JPEG_MAX_SIZE); + if (jpegBufMaxSize.count == 0) { + ALOGE("%s: Can't find maximum JPEG size in static metadata!", + __FUNCTION__); + return BAD_VALUE; + } + maxJpegBufferSize = jpegBufMaxSize.data.i32[0]; + + if (maxJpegBufferSize <= kMinJpegBufferSize) { + ALOGE("%s: ANDROID_JPEG_MAX_SIZE (%zd) <= kMinJpegBufferSize (%zd)", + __FUNCTION__, maxJpegBufferSize, kMinJpegBufferSize); + return BAD_VALUE; + } + + // Calculate final jpeg buffer size for the given resolution. + float scaleFactor = ((float) (width * height)) / + (mMaxJpegResolution.width * mMaxJpegResolution.height); + ssize_t jpegBufferSize = scaleFactor * (maxJpegBufferSize - kMinJpegBufferSize) + + kMinJpegBufferSize; + if (jpegBufferSize > maxJpegBufferSize) { + jpegBufferSize = maxJpegBufferSize; + } + + return jpegBufferSize; +} + +int ExternalCameraDeviceSession::OutputThread::createJpegLocked( + HalStreamBuffer &halBuf, + const std::shared_ptr& req) +{ + ATRACE_CALL(); + int ret; + auto lfail = [&](auto... args) { + ALOGE(args...); + + return 1; + }; + auto parent = mParent.promote(); + if (parent == nullptr) { + ALOGE("%s: session has been disconnected!", __FUNCTION__); + return 1; + } + + ALOGV("%s: HAL buffer sid: %d bid: %" PRIu64 " w: %u h: %u", + __FUNCTION__, halBuf.streamId, static_cast(halBuf.bufferId), + halBuf.width, halBuf.height); + ALOGV("%s: HAL buffer fmt: %x usage: %" PRIx64 " ptr: %p", + __FUNCTION__, halBuf.format, static_cast(halBuf.usage), + halBuf.bufPtr); + ALOGV("%s: YV12 buffer %d x %d", + __FUNCTION__, + mYu12Frame->mWidth, mYu12Frame->mHeight); + + int jpegQuality, thumbQuality; + Size thumbSize; + bool outputThumbnail = true; + + if (req->setting.exists(ANDROID_JPEG_QUALITY)) { + camera_metadata_entry entry = + req->setting.find(ANDROID_JPEG_QUALITY); + jpegQuality = entry.data.u8[0]; + } else { + return lfail("%s: ANDROID_JPEG_QUALITY not set",__FUNCTION__); + } + + if (req->setting.exists(ANDROID_JPEG_THUMBNAIL_QUALITY)) { + camera_metadata_entry entry = + req->setting.find(ANDROID_JPEG_THUMBNAIL_QUALITY); + thumbQuality = entry.data.u8[0]; + } else { + return lfail( + "%s: ANDROID_JPEG_THUMBNAIL_QUALITY not set", + __FUNCTION__); + } + + if (req->setting.exists(ANDROID_JPEG_THUMBNAIL_SIZE)) { + camera_metadata_entry entry = + req->setting.find(ANDROID_JPEG_THUMBNAIL_SIZE); + thumbSize = Size { static_cast(entry.data.i32[0]), + static_cast(entry.data.i32[1]) + }; + if (thumbSize.width == 0 && thumbSize.height == 0) { + outputThumbnail = false; + } + } else { + return lfail( + "%s: ANDROID_JPEG_THUMBNAIL_SIZE not set", __FUNCTION__); + } + + /* Cropped and scaled YU12 buffer for main and thumbnail */ + YCbCrLayout yu12Main; + Size jpegSize { halBuf.width, halBuf.height }; + + /* Compute temporary buffer sizes accounting for the following: + * thumbnail can't exceed APP1 size of 64K + * main image needs to hold APP1, headers, and at most a poorly + * compressed image */ + const ssize_t maxThumbCodeSize = 64 * 1024; + const ssize_t maxJpegCodeSize = parent->getJpegBufferSize(jpegSize.width, + jpegSize.height); + + /* Check that getJpegBufferSize did not return an error */ + if (maxJpegCodeSize < 0) { + return lfail( + "%s: getJpegBufferSize returned %zd",__FUNCTION__,maxJpegCodeSize); + } + + + /* Hold actual thumbnail and main image code sizes */ + size_t thumbCodeSize = 0, jpegCodeSize = 0; + /* Temporary thumbnail code buffer */ + std::vector thumbCode(outputThumbnail ? maxThumbCodeSize : 0); + + YCbCrLayout yu12Thumb; + if (outputThumbnail) { + ret = cropAndScaleThumbLocked(mYu12Frame, thumbSize, &yu12Thumb); + + if (ret != 0) { + return lfail( + "%s: crop and scale thumbnail failed!", __FUNCTION__); + } + } + + /* Scale and crop main jpeg */ + ret = cropAndScaleLocked(mYu12Frame, jpegSize, &yu12Main); + + if (ret != 0) { + return lfail("%s: crop and scale main failed!", __FUNCTION__); + } + + /* Encode the thumbnail image */ + if (outputThumbnail) { + ret = encodeJpegYU12(thumbSize, yu12Thumb, + thumbQuality, 0, 0, + &thumbCode[0], maxThumbCodeSize, thumbCodeSize); + + if (ret != 0) { + return lfail("%s: thumbnail encodeJpegYU12 failed with %d",__FUNCTION__, ret); + } + } + + /* Combine camera characteristics with request settings to form EXIF + * metadata */ + common::V1_0::helper::CameraMetadata meta(parent->mCameraCharacteristics); + meta.append(req->setting); + + /* Generate EXIF object */ + std::unique_ptr utils(ExifUtils::create()); + /* Make sure it's initialized */ + utils->initialize(); + + utils->setFromMetadata(meta, jpegSize.width, jpegSize.height); + utils->setMake(mExifMake); + utils->setModel(mExifModel); + + ret = utils->generateApp1(outputThumbnail ? &thumbCode[0] : 0, thumbCodeSize); + + if (!ret) { + return lfail("%s: generating APP1 failed", __FUNCTION__); + } + + /* Get internal buffer */ + size_t exifDataSize = utils->getApp1Length(); + const uint8_t* exifData = utils->getApp1Buffer(); + + /* Lock the HAL jpeg code buffer */ + void *bufPtr = sHandleImporter.lock( + *(halBuf.bufPtr), halBuf.usage, maxJpegCodeSize); + + if (!bufPtr) { + return lfail("%s: could not lock %zu bytes", __FUNCTION__, maxJpegCodeSize); + } + + /* Encode the main jpeg image */ + ret = encodeJpegYU12(jpegSize, yu12Main, + jpegQuality, exifData, exifDataSize, + bufPtr, maxJpegCodeSize, jpegCodeSize); + + /* TODO: Not sure this belongs here, maybe better to pass jpegCodeSize out + * and do this when returning buffer to parent */ + CameraBlob blob { CameraBlobId::JPEG, static_cast(jpegCodeSize) }; + void *blobDst = + reinterpret_cast(reinterpret_cast(bufPtr) + + maxJpegCodeSize - + sizeof(CameraBlob)); + memcpy(blobDst, &blob, sizeof(CameraBlob)); + + /* Unlock the HAL jpeg code buffer */ + int relFence = sHandleImporter.unlock(*(halBuf.bufPtr)); + if (relFence > 0) { + halBuf.acquireFence = relFence; + } + + /* Check if our JPEG actually succeeded */ + if (ret != 0) { + return lfail( + "%s: encodeJpegYU12 failed with %d",__FUNCTION__, ret); + } + + ALOGV("%s: encoded JPEG (ret:%d) with Q:%d max size: %zu", + __FUNCTION__, ret, jpegQuality, maxJpegCodeSize); + + return 0; +} + +bool ExternalCameraDeviceSession::OutputThread::threadLoop() { + std::shared_ptr req; + auto parent = mParent.promote(); + if (parent == nullptr) { + ALOGE("%s: session has been disconnected!", __FUNCTION__); + return false; + } + + // TODO: maybe we need to setup a sensor thread to dq/enq v4l frames + // regularly to prevent v4l buffer queue filled with stale buffers + // when app doesn't program a preveiw request + waitForNextRequest(&req); + if (req == nullptr) { + // No new request, wait again + return true; + } + + auto onDeviceError = [&](auto... args) { + ALOGE(args...); + parent->notifyError( + req->frameNumber, /*stream*/-1, ErrorCode::ERROR_DEVICE); + signalRequestDone(); + return false; + }; + + if (req->frameIn->mFourcc != V4L2_PIX_FMT_MJPEG) { + return onDeviceError("%s: do not support V4L2 format %c%c%c%c", __FUNCTION__, + req->frameIn->mFourcc & 0xFF, + (req->frameIn->mFourcc >> 8) & 0xFF, + (req->frameIn->mFourcc >> 16) & 0xFF, + (req->frameIn->mFourcc >> 24) & 0xFF); + } + + std::unique_lock lk(mBufferLock); + // Convert input V4L2 frame to YU12 of the same size + // TODO: see if we can save some computation by converting to YV12 here + uint8_t* inData; + size_t inDataSize; + req->frameIn->map(&inData, &inDataSize); + // TODO: in some special case maybe we can decode jpg directly to gralloc output? + ATRACE_BEGIN("MJPGtoI420"); + int res = libyuv::MJPGToI420( + inData, inDataSize, + static_cast(mYu12FrameLayout.y), + mYu12FrameLayout.yStride, + static_cast(mYu12FrameLayout.cb), + mYu12FrameLayout.cStride, + static_cast(mYu12FrameLayout.cr), + mYu12FrameLayout.cStride, + mYu12Frame->mWidth, mYu12Frame->mHeight, + mYu12Frame->mWidth, mYu12Frame->mHeight); + ATRACE_END(); + + if (res != 0) { + // For some webcam, the first few V4L2 frames might be malformed... + ALOGE("%s: Convert V4L2 frame to YU12 failed! res %d", __FUNCTION__, res); + lk.unlock(); + Status st = parent->processCaptureRequestError(req); + if (st != Status::OK) { + return onDeviceError("%s: failed to process capture request error!", __FUNCTION__); + } + signalRequestDone(); + return true; + } + + ALOGV("%s processing new request", __FUNCTION__); + const int kSyncWaitTimeoutMs = 500; + for (auto& halBuf : req->buffers) { + if (halBuf.acquireFence != -1) { + int ret = sync_wait(halBuf.acquireFence, kSyncWaitTimeoutMs); + if (ret) { + halBuf.fenceTimeout = true; + } else { + ::close(halBuf.acquireFence); + halBuf.acquireFence = -1; + } + } + + if (halBuf.fenceTimeout) { + continue; + } + + // Gralloc lockYCbCr the buffer + switch (halBuf.format) { + case PixelFormat::BLOB: { + int ret = createJpegLocked(halBuf, req); + + if(ret != 0) { + lk.unlock(); + return onDeviceError("%s: createJpegLocked failed with %d", + __FUNCTION__, ret); + } + } break; + case PixelFormat::YCBCR_420_888: + case PixelFormat::YV12: { + IMapper::Rect outRect {0, 0, + static_cast(halBuf.width), + static_cast(halBuf.height)}; + YCbCrLayout outLayout = sHandleImporter.lockYCbCr( + *(halBuf.bufPtr), halBuf.usage, outRect); + ALOGV("%s: outLayout y %p cb %p cr %p y_str %d c_str %d c_step %d", + __FUNCTION__, outLayout.y, outLayout.cb, outLayout.cr, + outLayout.yStride, outLayout.cStride, outLayout.chromaStep); + + // Convert to output buffer size/format + uint32_t outputFourcc = getFourCcFromLayout(outLayout); + ALOGV("%s: converting to format %c%c%c%c", __FUNCTION__, + outputFourcc & 0xFF, + (outputFourcc >> 8) & 0xFF, + (outputFourcc >> 16) & 0xFF, + (outputFourcc >> 24) & 0xFF); + + YCbCrLayout cropAndScaled; + ATRACE_BEGIN("cropAndScaleLocked"); + int ret = cropAndScaleLocked( + mYu12Frame, + Size { halBuf.width, halBuf.height }, + &cropAndScaled); + ATRACE_END(); + if (ret != 0) { + lk.unlock(); + return onDeviceError("%s: crop and scale failed!", __FUNCTION__); + } + + Size sz {halBuf.width, halBuf.height}; + ATRACE_BEGIN("formatConvertLocked"); + ret = formatConvertLocked(cropAndScaled, outLayout, sz, outputFourcc); + ATRACE_END(); + if (ret != 0) { + lk.unlock(); + return onDeviceError("%s: format coversion failed!", __FUNCTION__); + } + int relFence = sHandleImporter.unlock(*(halBuf.bufPtr)); + if (relFence > 0) { + halBuf.acquireFence = relFence; + } + } break; + default: + lk.unlock(); + return onDeviceError("%s: unknown output format %x", __FUNCTION__, halBuf.format); + } + } // for each buffer + mScaledYu12Frames.clear(); + + // Don't hold the lock while calling back to parent + lk.unlock(); + Status st = parent->processCaptureResult(req); + if (st != Status::OK) { + return onDeviceError("%s: failed to process capture result!", __FUNCTION__); + } + signalRequestDone(); + return true; +} + +Status ExternalCameraDeviceSession::OutputThread::allocateIntermediateBuffers( + const Size& v4lSize, const Size& thumbSize, + const hidl_vec& streams) { + std::lock_guard lk(mBufferLock); + if (mScaledYu12Frames.size() != 0) { + ALOGE("%s: intermediate buffer pool has %zu inflight buffers! (expect 0)", + __FUNCTION__, mScaledYu12Frames.size()); + return Status::INTERNAL_ERROR; + } + + // Allocating intermediate YU12 frame + if (mYu12Frame == nullptr || mYu12Frame->mWidth != v4lSize.width || + mYu12Frame->mHeight != v4lSize.height) { + mYu12Frame.clear(); + mYu12Frame = new AllocatedFrame(v4lSize.width, v4lSize.height); + int ret = mYu12Frame->allocate(&mYu12FrameLayout); + if (ret != 0) { + ALOGE("%s: allocating YU12 frame failed!", __FUNCTION__); + return Status::INTERNAL_ERROR; + } + } + + // Allocating intermediate YU12 thumbnail frame + if (mYu12ThumbFrame == nullptr || + mYu12ThumbFrame->mWidth != thumbSize.width || + mYu12ThumbFrame->mHeight != thumbSize.height) { + mYu12ThumbFrame.clear(); + mYu12ThumbFrame = new AllocatedFrame(thumbSize.width, thumbSize.height); + int ret = mYu12ThumbFrame->allocate(&mYu12ThumbFrameLayout); + if (ret != 0) { + ALOGE("%s: allocating YU12 thumb frame failed!", __FUNCTION__); + return Status::INTERNAL_ERROR; + } + } + + // Allocating scaled buffers + for (const auto& stream : streams) { + Size sz = {stream.width, stream.height}; + if (sz == v4lSize) { + continue; // Don't need an intermediate buffer same size as v4lBuffer + } + if (mIntermediateBuffers.count(sz) == 0) { + // Create new intermediate buffer + sp buf = new AllocatedFrame(stream.width, stream.height); + int ret = buf->allocate(); + if (ret != 0) { + ALOGE("%s: allocating intermediate YU12 frame %dx%d failed!", + __FUNCTION__, stream.width, stream.height); + return Status::INTERNAL_ERROR; + } + mIntermediateBuffers[sz] = buf; + } + } + + // Remove unconfigured buffers + auto it = mIntermediateBuffers.begin(); + while (it != mIntermediateBuffers.end()) { + bool configured = false; + auto sz = it->first; + for (const auto& stream : streams) { + if (stream.width == sz.width && stream.height == sz.height) { + configured = true; + break; + } + } + if (configured) { + it++; + } else { + it = mIntermediateBuffers.erase(it); + } + } + return Status::OK; +} + +Status ExternalCameraDeviceSession::OutputThread::submitRequest( + const std::shared_ptr& req) { + std::unique_lock lk(mRequestListLock); + mRequestList.push_back(req); + lk.unlock(); + mRequestCond.notify_one(); + return Status::OK; +} + +void ExternalCameraDeviceSession::OutputThread::flush() { + ATRACE_CALL(); + auto parent = mParent.promote(); + if (parent == nullptr) { + ALOGE("%s: session has been disconnected!", __FUNCTION__); + return; + } + + std::unique_lock lk(mRequestListLock); + std::list> reqs = std::move(mRequestList); + mRequestList.clear(); + if (mProcessingRequest) { + std::chrono::seconds timeout = std::chrono::seconds(kFlushWaitTimeoutSec); + auto st = mRequestDoneCond.wait_for(lk, timeout); + if (st == std::cv_status::timeout) { + ALOGE("%s: wait for inflight request finish timeout!", __FUNCTION__); + } + } + + ALOGV("%s: flusing inflight requests", __FUNCTION__); + lk.unlock(); + for (const auto& req : reqs) { + parent->processCaptureRequestError(req); + } +} + +void ExternalCameraDeviceSession::OutputThread::waitForNextRequest( + std::shared_ptr* out) { + ATRACE_CALL(); + if (out == nullptr) { + ALOGE("%s: out is null", __FUNCTION__); + return; + } + + std::unique_lock lk(mRequestListLock); + int waitTimes = 0; + while (mRequestList.empty()) { + if (exitPending()) { + return; + } + std::chrono::milliseconds timeout = std::chrono::milliseconds(kReqWaitTimeoutMs); + auto st = mRequestCond.wait_for(lk, timeout); + if (st == std::cv_status::timeout) { + waitTimes++; + if (waitTimes == kReqWaitTimesMax) { + // no new request, return + return; + } + } + } + *out = mRequestList.front(); + mRequestList.pop_front(); + mProcessingRequest = true; + mProcessingFrameNumer = (*out)->frameNumber; +} + +void ExternalCameraDeviceSession::OutputThread::signalRequestDone() { + std::unique_lock lk(mRequestListLock); + mProcessingRequest = false; + mProcessingFrameNumer = 0; + lk.unlock(); + mRequestDoneCond.notify_one(); +} + +void ExternalCameraDeviceSession::OutputThread::dump(int fd) { + std::lock_guard lk(mRequestListLock); + if (mProcessingRequest) { + dprintf(fd, "OutputThread processing frame %d\n", mProcessingFrameNumer); + } else { + dprintf(fd, "OutputThread not processing any frames\n"); + } + dprintf(fd, "OutputThread request list contains frame: "); + for (const auto& req : mRequestList) { + dprintf(fd, "%d, ", req->frameNumber); + } + dprintf(fd, "\n"); +} + +void ExternalCameraDeviceSession::cleanupBuffersLocked(int id) { + for (auto& pair : mCirculatingBuffers.at(id)) { + sHandleImporter.freeBuffer(pair.second); + } + mCirculatingBuffers[id].clear(); + mCirculatingBuffers.erase(id); +} + +void ExternalCameraDeviceSession::updateBufferCaches(const hidl_vec& cachesToRemove) { + Mutex::Autolock _l(mLock); + for (auto& cache : cachesToRemove) { + auto cbsIt = mCirculatingBuffers.find(cache.streamId); + if (cbsIt == mCirculatingBuffers.end()) { + // The stream could have been removed + continue; + } + CirculatingBuffers& cbs = cbsIt->second; + auto it = cbs.find(cache.bufferId); + if (it != cbs.end()) { + sHandleImporter.freeBuffer(it->second); + cbs.erase(it); + } else { + ALOGE("%s: stream %d buffer %" PRIu64 " is not cached", + __FUNCTION__, cache.streamId, cache.bufferId); + } + } +} + +bool ExternalCameraDeviceSession::isSupported(const Stream& stream) { + int32_t ds = static_cast(stream.dataSpace); + PixelFormat fmt = stream.format; + uint32_t width = stream.width; + uint32_t height = stream.height; + // TODO: check usage flags + + if (stream.streamType != StreamType::OUTPUT) { + ALOGE("%s: does not support non-output stream type", __FUNCTION__); + return false; + } + + if (stream.rotation != StreamRotation::ROTATION_0) { + ALOGE("%s: does not support stream rotation", __FUNCTION__); + return false; + } + + if (ds & Dataspace::DEPTH) { + ALOGI("%s: does not support depth output", __FUNCTION__); + return false; + } + + switch (fmt) { + case PixelFormat::BLOB: + if (ds != static_cast(Dataspace::V0_JFIF)) { + ALOGI("%s: BLOB format does not support dataSpace %x", __FUNCTION__, ds); + return false; + } + case PixelFormat::IMPLEMENTATION_DEFINED: + case PixelFormat::YCBCR_420_888: + case PixelFormat::YV12: + // TODO: check what dataspace we can support here. + // intentional no-ops. + break; + default: + ALOGI("%s: does not support format %x", __FUNCTION__, fmt); + return false; + } + + // Assume we can convert any V4L2 format to any of supported output format for now, i.e, + // ignoring v4l2Fmt.fourcc for now. Might need more subtle check if we support more v4l format + // in the futrue. + for (const auto& v4l2Fmt : mSupportedFormats) { + if (width == v4l2Fmt.width && height == v4l2Fmt.height) { + return true; + } + } + ALOGI("%s: resolution %dx%d is not supported", __FUNCTION__, width, height); + return false; +} + +int ExternalCameraDeviceSession::v4l2StreamOffLocked() { + if (!mV4l2Streaming) { + return OK; + } + + { + std::lock_guard lk(mV4l2BufferLock); + if (mNumDequeuedV4l2Buffers != 0) { + ALOGE("%s: there are %zu inflight V4L buffers", + __FUNCTION__, mNumDequeuedV4l2Buffers); + return -1; + } + } + mV4L2BufferCount = 0; + + // VIDIOC_STREAMOFF + v4l2_buf_type capture_type = V4L2_BUF_TYPE_VIDEO_CAPTURE; + if (TEMP_FAILURE_RETRY(ioctl(mV4l2Fd.get(), VIDIOC_STREAMOFF, &capture_type)) < 0) { + ALOGE("%s: STREAMOFF failed: %s", __FUNCTION__, strerror(errno)); + return -errno; + } + + // VIDIOC_REQBUFS: clear buffers + v4l2_requestbuffers req_buffers{}; + req_buffers.type = V4L2_BUF_TYPE_VIDEO_CAPTURE; + req_buffers.memory = V4L2_MEMORY_MMAP; + req_buffers.count = 0; + if (TEMP_FAILURE_RETRY(ioctl(mV4l2Fd.get(), VIDIOC_REQBUFS, &req_buffers)) < 0) { + ALOGE("%s: REQBUFS failed: %s", __FUNCTION__, strerror(errno)); + return -errno; + } + + mV4l2Streaming = false; + return OK; +} + +int ExternalCameraDeviceSession::setV4l2FpsLocked(double fps) { + // VIDIOC_G_PARM/VIDIOC_S_PARM: set fps + v4l2_streamparm streamparm = { .type = V4L2_BUF_TYPE_VIDEO_CAPTURE }; + // The following line checks that the driver knows about framerate get/set. + int ret = TEMP_FAILURE_RETRY(ioctl(mV4l2Fd.get(), VIDIOC_G_PARM, &streamparm)); + if (ret != 0) { + if (errno == -EINVAL) { + ALOGW("%s: device does not support VIDIOC_G_PARM", __FUNCTION__); + } + return -errno; + } + // Now check if the device is able to accept a capture framerate set. + if (!(streamparm.parm.capture.capability & V4L2_CAP_TIMEPERFRAME)) { + ALOGW("%s: device does not support V4L2_CAP_TIMEPERFRAME", __FUNCTION__); + return -EINVAL; + } + + // fps is float, approximate by a fraction. + const int kFrameRatePrecision = 10000; + streamparm.parm.capture.timeperframe.numerator = kFrameRatePrecision; + streamparm.parm.capture.timeperframe.denominator = + (fps * kFrameRatePrecision); + + if (TEMP_FAILURE_RETRY(ioctl(mV4l2Fd.get(), VIDIOC_S_PARM, &streamparm)) < 0) { + ALOGE("%s: failed to set framerate to %f: %s", __FUNCTION__, fps, strerror(errno)); + return -1; + } + + double retFps = streamparm.parm.capture.timeperframe.denominator / + static_cast(streamparm.parm.capture.timeperframe.numerator); + if (std::fabs(fps - retFps) > 1.0) { + ALOGE("%s: expect fps %f, got %f instead", __FUNCTION__, fps, retFps); + return -1; + } + mV4l2StreamingFps = fps; + return 0; +} + +int ExternalCameraDeviceSession::configureV4l2StreamLocked( + const SupportedV4L2Format& v4l2Fmt, double requestFps) { + ATRACE_CALL(); + int ret = v4l2StreamOffLocked(); + if (ret != OK) { + ALOGE("%s: stop v4l2 streaming failed: ret %d", __FUNCTION__, ret); + return ret; + } + + // VIDIOC_S_FMT w/h/fmt + v4l2_format fmt; + fmt.type = V4L2_BUF_TYPE_VIDEO_CAPTURE; + fmt.fmt.pix.width = v4l2Fmt.width; + fmt.fmt.pix.height = v4l2Fmt.height; + fmt.fmt.pix.pixelformat = v4l2Fmt.fourcc; + ret = TEMP_FAILURE_RETRY(ioctl(mV4l2Fd.get(), VIDIOC_S_FMT, &fmt)); + if (ret < 0) { + int numAttempt = 0; + while (ret < 0) { + ALOGW("%s: VIDIOC_S_FMT failed, wait 33ms and try again", __FUNCTION__); + usleep(IOCTL_RETRY_SLEEP_US); // sleep and try again + ret = TEMP_FAILURE_RETRY(ioctl(mV4l2Fd.get(), VIDIOC_S_FMT, &fmt)); + if (numAttempt == MAX_RETRY) { + break; + } + numAttempt++; + } + if (ret < 0) { + ALOGE("%s: S_FMT ioctl failed: %s", __FUNCTION__, strerror(errno)); + return -errno; + } + } + + if (v4l2Fmt.width != fmt.fmt.pix.width || v4l2Fmt.height != fmt.fmt.pix.height || + v4l2Fmt.fourcc != fmt.fmt.pix.pixelformat) { + ALOGE("%s: S_FMT expect %c%c%c%c %dx%d, got %c%c%c%c %dx%d instead!", __FUNCTION__, + v4l2Fmt.fourcc & 0xFF, + (v4l2Fmt.fourcc >> 8) & 0xFF, + (v4l2Fmt.fourcc >> 16) & 0xFF, + (v4l2Fmt.fourcc >> 24) & 0xFF, + v4l2Fmt.width, v4l2Fmt.height, + fmt.fmt.pix.pixelformat & 0xFF, + (fmt.fmt.pix.pixelformat >> 8) & 0xFF, + (fmt.fmt.pix.pixelformat >> 16) & 0xFF, + (fmt.fmt.pix.pixelformat >> 24) & 0xFF, + fmt.fmt.pix.width, fmt.fmt.pix.height); + return -EINVAL; + } + uint32_t bufferSize = fmt.fmt.pix.sizeimage; + ALOGI("%s: V4L2 buffer size is %d", __FUNCTION__, bufferSize); + uint32_t expectedMaxBufferSize = kMaxBytesPerPixel * fmt.fmt.pix.width * fmt.fmt.pix.height; + if ((bufferSize == 0) || (bufferSize > expectedMaxBufferSize)) { + ALOGE("%s: V4L2 buffer size: %u looks invalid. Expected maximum size: %u", __FUNCTION__, + bufferSize, expectedMaxBufferSize); + return -EINVAL; + } + mMaxV4L2BufferSize = bufferSize; + + const double kDefaultFps = 30.0; + double fps = 1000.0; + if (requestFps != 0.0) { + fps = requestFps; + } else { + double maxFps = -1.0; + // Try to pick the slowest fps that is at least 30 + for (const auto& fr : v4l2Fmt.frameRates) { + double f = fr.getDouble(); + if (maxFps < f) { + maxFps = f; + } + if (f >= kDefaultFps && f < fps) { + fps = f; + } + } + if (fps == 1000.0) { + fps = maxFps; + } + } + + int fpsRet = setV4l2FpsLocked(fps); + if (fpsRet != 0 && fpsRet != -EINVAL) { + ALOGE("%s: set fps failed: %s", __FUNCTION__, strerror(fpsRet)); + return fpsRet; + } + + uint32_t v4lBufferCount = (fps >= kDefaultFps) ? + mCfg.numVideoBuffers : mCfg.numStillBuffers; + // VIDIOC_REQBUFS: create buffers + v4l2_requestbuffers req_buffers{}; + req_buffers.type = V4L2_BUF_TYPE_VIDEO_CAPTURE; + req_buffers.memory = V4L2_MEMORY_MMAP; + req_buffers.count = v4lBufferCount; + if (TEMP_FAILURE_RETRY(ioctl(mV4l2Fd.get(), VIDIOC_REQBUFS, &req_buffers)) < 0) { + ALOGE("%s: VIDIOC_REQBUFS failed: %s", __FUNCTION__, strerror(errno)); + return -errno; + } + + // Driver can indeed return more buffer if it needs more to operate + if (req_buffers.count < v4lBufferCount) { + ALOGE("%s: VIDIOC_REQBUFS expected %d buffers, got %d instead", + __FUNCTION__, v4lBufferCount, req_buffers.count); + return NO_MEMORY; + } + + // VIDIOC_QUERYBUF: get buffer offset in the V4L2 fd + // VIDIOC_QBUF: send buffer to driver + mV4L2BufferCount = req_buffers.count; + for (uint32_t i = 0; i < req_buffers.count; i++) { + v4l2_buffer buffer = { + .type = V4L2_BUF_TYPE_VIDEO_CAPTURE, + .index = i, + .memory = V4L2_MEMORY_MMAP}; + + if (TEMP_FAILURE_RETRY(ioctl(mV4l2Fd.get(), VIDIOC_QUERYBUF, &buffer)) < 0) { + ALOGE("%s: QUERYBUF %d failed: %s", __FUNCTION__, i, strerror(errno)); + return -errno; + } + + if (TEMP_FAILURE_RETRY(ioctl(mV4l2Fd.get(), VIDIOC_QBUF, &buffer)) < 0) { + ALOGE("%s: QBUF %d failed: %s", __FUNCTION__, i, strerror(errno)); + return -errno; + } + } + + // VIDIOC_STREAMON: start streaming + v4l2_buf_type capture_type = V4L2_BUF_TYPE_VIDEO_CAPTURE; + ret = TEMP_FAILURE_RETRY(ioctl(mV4l2Fd.get(), VIDIOC_STREAMON, &capture_type)); + if (ret < 0) { + int numAttempt = 0; + while (ret < 0) { + ALOGW("%s: VIDIOC_STREAMON failed, wait 33ms and try again", __FUNCTION__); + usleep(IOCTL_RETRY_SLEEP_US); // sleep 100 ms and try again + ret = TEMP_FAILURE_RETRY(ioctl(mV4l2Fd.get(), VIDIOC_STREAMON, &capture_type)); + if (numAttempt == MAX_RETRY) { + break; + } + numAttempt++; + } + if (ret < 0) { + ALOGE("%s: VIDIOC_STREAMON ioctl failed: %s", __FUNCTION__, strerror(errno)); + return -errno; + } + } + + // Swallow first few frames after streamOn to account for bad frames from some devices + for (int i = 0; i < kBadFramesAfterStreamOn; i++) { + v4l2_buffer buffer{}; + buffer.type = V4L2_BUF_TYPE_VIDEO_CAPTURE; + buffer.memory = V4L2_MEMORY_MMAP; + if (TEMP_FAILURE_RETRY(ioctl(mV4l2Fd.get(), VIDIOC_DQBUF, &buffer)) < 0) { + ALOGE("%s: DQBUF fails: %s", __FUNCTION__, strerror(errno)); + return -errno; + } + + if (TEMP_FAILURE_RETRY(ioctl(mV4l2Fd.get(), VIDIOC_QBUF, &buffer)) < 0) { + ALOGE("%s: QBUF index %d fails: %s", __FUNCTION__, buffer.index, strerror(errno)); + return -errno; + } + } + + ALOGI("%s: start V4L2 streaming %dx%d@%ffps", + __FUNCTION__, v4l2Fmt.width, v4l2Fmt.height, fps); + mV4l2StreamingFmt = v4l2Fmt; + mV4l2Streaming = true; + return OK; +} + +sp ExternalCameraDeviceSession::dequeueV4l2FrameLocked(/*out*/nsecs_t* shutterTs) { + ATRACE_CALL(); + sp ret = nullptr; + + if (shutterTs == nullptr) { + ALOGE("%s: shutterTs must not be null!", __FUNCTION__); + return ret; + } + + { + std::unique_lock lk(mV4l2BufferLock); + if (mNumDequeuedV4l2Buffers == mV4L2BufferCount) { + int waitRet = waitForV4L2BufferReturnLocked(lk); + if (waitRet != 0) { + return ret; + } + } + } + + ATRACE_BEGIN("VIDIOC_DQBUF"); + v4l2_buffer buffer{}; + buffer.type = V4L2_BUF_TYPE_VIDEO_CAPTURE; + buffer.memory = V4L2_MEMORY_MMAP; + if (TEMP_FAILURE_RETRY(ioctl(mV4l2Fd.get(), VIDIOC_DQBUF, &buffer)) < 0) { + ALOGE("%s: DQBUF fails: %s", __FUNCTION__, strerror(errno)); + return ret; + } + ATRACE_END(); + + if (buffer.index >= mV4L2BufferCount) { + ALOGE("%s: Invalid buffer id: %d", __FUNCTION__, buffer.index); + return ret; + } + + if (buffer.flags & V4L2_BUF_FLAG_ERROR) { + ALOGE("%s: v4l2 buf error! buf flag 0x%x", __FUNCTION__, buffer.flags); + // TODO: try to dequeue again + } + + if (buffer.bytesused > mMaxV4L2BufferSize) { + ALOGE("%s: v4l2 buffer bytes used: %u maximum %u", __FUNCTION__, buffer.bytesused, + mMaxV4L2BufferSize); + return ret; + } + + if (buffer.flags & V4L2_BUF_FLAG_TIMESTAMP_MONOTONIC) { + // Ideally we should also check for V4L2_BUF_FLAG_TSTAMP_SRC_SOE, but + // even V4L2_BUF_FLAG_TSTAMP_SRC_EOF is better than capture a timestamp now + *shutterTs = static_cast(buffer.timestamp.tv_sec)*1000000000LL + + buffer.timestamp.tv_usec * 1000LL; + } else { + *shutterTs = systemTime(SYSTEM_TIME_MONOTONIC); + } + + { + std::lock_guard lk(mV4l2BufferLock); + mNumDequeuedV4l2Buffers++; + } + return new V4L2Frame( + mV4l2StreamingFmt.width, mV4l2StreamingFmt.height, mV4l2StreamingFmt.fourcc, + buffer.index, mV4l2Fd.get(), buffer.bytesused, buffer.m.offset); +} + +void ExternalCameraDeviceSession::enqueueV4l2Frame(const sp& frame) { + ATRACE_CALL(); + frame->unmap(); + ATRACE_BEGIN("VIDIOC_QBUF"); + v4l2_buffer buffer{}; + buffer.type = V4L2_BUF_TYPE_VIDEO_CAPTURE; + buffer.memory = V4L2_MEMORY_MMAP; + buffer.index = frame->mBufferIndex; + if (TEMP_FAILURE_RETRY(ioctl(mV4l2Fd.get(), VIDIOC_QBUF, &buffer)) < 0) { + ALOGE("%s: QBUF index %d fails: %s", __FUNCTION__, + frame->mBufferIndex, strerror(errno)); + return; + } + ATRACE_END(); + + { + std::lock_guard lk(mV4l2BufferLock); + mNumDequeuedV4l2Buffers--; + } + mV4L2BufferReturned.notify_one(); +} + +Status ExternalCameraDeviceSession::configureStreams( + const V3_2::StreamConfiguration& config, V3_3::HalStreamConfiguration* out) { + ATRACE_CALL(); + if (config.operationMode != StreamConfigurationMode::NORMAL_MODE) { + ALOGE("%s: unsupported operation mode: %d", __FUNCTION__, config.operationMode); + return Status::ILLEGAL_ARGUMENT; + } + + if (config.streams.size() == 0) { + ALOGE("%s: cannot configure zero stream", __FUNCTION__); + return Status::ILLEGAL_ARGUMENT; + } + + int numProcessedStream = 0; + int numStallStream = 0; + for (const auto& stream : config.streams) { + // Check if the format/width/height combo is supported + if (!isSupported(stream)) { + return Status::ILLEGAL_ARGUMENT; + } + if (stream.format == PixelFormat::BLOB) { + numStallStream++; + } else { + numProcessedStream++; + } + } + + if (numProcessedStream > kMaxProcessedStream) { + ALOGE("%s: too many processed streams (expect <= %d, got %d)", __FUNCTION__, + kMaxProcessedStream, numProcessedStream); + return Status::ILLEGAL_ARGUMENT; + } + + if (numStallStream > kMaxStallStream) { + ALOGE("%s: too many stall streams (expect <= %d, got %d)", __FUNCTION__, + kMaxStallStream, numStallStream); + return Status::ILLEGAL_ARGUMENT; + } + + Status status = initStatus(); + if (status != Status::OK) { + return status; + } + + + { + std::lock_guard lk(mInflightFramesLock); + if (!mInflightFrames.empty()) { + ALOGE("%s: trying to configureStreams while there are still %zu inflight frames!", + __FUNCTION__, mInflightFrames.size()); + return Status::INTERNAL_ERROR; + } + } + + Mutex::Autolock _l(mLock); + // Add new streams + for (const auto& stream : config.streams) { + if (mStreamMap.count(stream.id) == 0) { + mStreamMap[stream.id] = stream; + mCirculatingBuffers.emplace(stream.id, CirculatingBuffers{}); + } + } + + // Cleanup removed streams + for(auto it = mStreamMap.begin(); it != mStreamMap.end();) { + int id = it->first; + bool found = false; + for (const auto& stream : config.streams) { + if (id == stream.id) { + found = true; + break; + } + } + if (!found) { + // Unmap all buffers of deleted stream + cleanupBuffersLocked(id); + it = mStreamMap.erase(it); + } else { + ++it; + } + } + + // Now select a V4L2 format to produce all output streams + float desiredAr = (mCroppingType == VERTICAL) ? kMaxAspectRatio : kMinAspectRatio; + uint32_t maxDim = 0; + for (const auto& stream : config.streams) { + float aspectRatio = ASPECT_RATIO(stream); + ALOGI("%s: request stream %dx%d", __FUNCTION__, stream.width, stream.height); + if ((mCroppingType == VERTICAL && aspectRatio < desiredAr) || + (mCroppingType == HORIZONTAL && aspectRatio > desiredAr)) { + desiredAr = aspectRatio; + } + + // The dimension that's not cropped + uint32_t dim = (mCroppingType == VERTICAL) ? stream.width : stream.height; + if (dim > maxDim) { + maxDim = dim; + } + } + // Find the smallest format that matches the desired aspect ratio and is wide/high enough + SupportedV4L2Format v4l2Fmt {.width = 0, .height = 0}; + for (const auto& fmt : mSupportedFormats) { + uint32_t dim = (mCroppingType == VERTICAL) ? fmt.width : fmt.height; + if (dim >= maxDim) { + float aspectRatio = ASPECT_RATIO(fmt); + if (isAspectRatioClose(aspectRatio, desiredAr)) { + v4l2Fmt = fmt; + // since mSupportedFormats is sorted by width then height, the first matching fmt + // will be the smallest one with matching aspect ratio + break; + } + } + } + if (v4l2Fmt.width == 0) { + // Cannot find exact good aspect ratio candidate, try to find a close one + for (const auto& fmt : mSupportedFormats) { + uint32_t dim = (mCroppingType == VERTICAL) ? fmt.width : fmt.height; + if (dim >= maxDim) { + float aspectRatio = ASPECT_RATIO(fmt); + if ((mCroppingType == VERTICAL && aspectRatio < desiredAr) || + (mCroppingType == HORIZONTAL && aspectRatio > desiredAr)) { + v4l2Fmt = fmt; + break; + } + } + } + } + + if (v4l2Fmt.width == 0) { + ALOGE("%s: unable to find a resolution matching (%s at least %d, aspect ratio %f)" + , __FUNCTION__, (mCroppingType == VERTICAL) ? "width" : "height", + maxDim, desiredAr); + return Status::ILLEGAL_ARGUMENT; + } + + if (configureV4l2StreamLocked(v4l2Fmt) != 0) { + ALOGE("V4L configuration failed!, format:%c%c%c%c, w %d, h %d", + v4l2Fmt.fourcc & 0xFF, + (v4l2Fmt.fourcc >> 8) & 0xFF, + (v4l2Fmt.fourcc >> 16) & 0xFF, + (v4l2Fmt.fourcc >> 24) & 0xFF, + v4l2Fmt.width, v4l2Fmt.height); + return Status::INTERNAL_ERROR; + } + + Size v4lSize = {v4l2Fmt.width, v4l2Fmt.height}; + Size thumbSize { 0, 0 }; + camera_metadata_ro_entry entry = + mCameraCharacteristics.find(ANDROID_JPEG_AVAILABLE_THUMBNAIL_SIZES); + for(uint32_t i = 0; i < entry.count; i += 2) { + Size sz { static_cast(entry.data.i32[i]), + static_cast(entry.data.i32[i+1]) }; + if(sz.width * sz.height > thumbSize.width * thumbSize.height) { + thumbSize = sz; + } + } + + if (thumbSize.width * thumbSize.height == 0) { + ALOGE("%s: non-zero thumbnail size not available", __FUNCTION__); + return Status::INTERNAL_ERROR; + } + + status = mOutputThread->allocateIntermediateBuffers(v4lSize, + mMaxThumbResolution, config.streams); + if (status != Status::OK) { + ALOGE("%s: allocating intermediate buffers failed!", __FUNCTION__); + return status; + } + + out->streams.resize(config.streams.size()); + for (size_t i = 0; i < config.streams.size(); i++) { + out->streams[i].overrideDataSpace = config.streams[i].dataSpace; + out->streams[i].v3_2.id = config.streams[i].id; + // TODO: double check should we add those CAMERA flags + mStreamMap[config.streams[i].id].usage = + out->streams[i].v3_2.producerUsage = config.streams[i].usage | + BufferUsage::CPU_WRITE_OFTEN | + BufferUsage::CAMERA_OUTPUT; + out->streams[i].v3_2.consumerUsage = 0; + out->streams[i].v3_2.maxBuffers = mV4L2BufferCount; + + switch (config.streams[i].format) { + case PixelFormat::BLOB: + case PixelFormat::YCBCR_420_888: + case PixelFormat::YV12: // Used by SurfaceTexture + // No override + out->streams[i].v3_2.overrideFormat = config.streams[i].format; + break; + case PixelFormat::IMPLEMENTATION_DEFINED: + // Override based on VIDEO or not + out->streams[i].v3_2.overrideFormat = + (config.streams[i].usage & BufferUsage::VIDEO_ENCODER) ? + PixelFormat::YCBCR_420_888 : PixelFormat::YV12; + // Save overridden formt in mStreamMap + mStreamMap[config.streams[i].id].format = out->streams[i].v3_2.overrideFormat; + break; + default: + ALOGE("%s: unsupported format 0x%x", __FUNCTION__, config.streams[i].format); + return Status::ILLEGAL_ARGUMENT; + } + } + + mFirstRequest = true; + return Status::OK; +} + +bool ExternalCameraDeviceSession::isClosed() { + Mutex::Autolock _l(mLock); + return mClosed; +} + +#define ARRAY_SIZE(a) (sizeof(a) / sizeof(a[0])) +#define UPDATE(md, tag, data, size) \ +do { \ + if ((md).update((tag), (data), (size))) { \ + ALOGE("Update " #tag " failed!"); \ + return BAD_VALUE; \ + } \ +} while (0) + +status_t ExternalCameraDeviceSession::initDefaultRequests() { + ::android::hardware::camera::common::V1_0::helper::CameraMetadata md; + + const uint8_t aberrationMode = ANDROID_COLOR_CORRECTION_ABERRATION_MODE_OFF; + UPDATE(md, ANDROID_COLOR_CORRECTION_ABERRATION_MODE, &aberrationMode, 1); + + const int32_t exposureCompensation = 0; + UPDATE(md, ANDROID_CONTROL_AE_EXPOSURE_COMPENSATION, &exposureCompensation, 1); + + const uint8_t videoStabilizationMode = ANDROID_CONTROL_VIDEO_STABILIZATION_MODE_OFF; + UPDATE(md, ANDROID_CONTROL_VIDEO_STABILIZATION_MODE, &videoStabilizationMode, 1); + + const uint8_t awbMode = ANDROID_CONTROL_AWB_MODE_AUTO; + UPDATE(md, ANDROID_CONTROL_AWB_MODE, &awbMode, 1); + + const uint8_t aeMode = ANDROID_CONTROL_AE_MODE_ON; + UPDATE(md, ANDROID_CONTROL_AE_MODE, &aeMode, 1); + + const uint8_t aePrecaptureTrigger = ANDROID_CONTROL_AE_PRECAPTURE_TRIGGER_IDLE; + UPDATE(md, ANDROID_CONTROL_AE_PRECAPTURE_TRIGGER, &aePrecaptureTrigger, 1); + + const uint8_t afMode = ANDROID_CONTROL_AF_MODE_AUTO; + UPDATE(md, ANDROID_CONTROL_AF_MODE, &afMode, 1); + + const uint8_t afTrigger = ANDROID_CONTROL_AF_TRIGGER_IDLE; + UPDATE(md, ANDROID_CONTROL_AF_TRIGGER, &afTrigger, 1); + + const uint8_t sceneMode = ANDROID_CONTROL_SCENE_MODE_DISABLED; + UPDATE(md, ANDROID_CONTROL_SCENE_MODE, &sceneMode, 1); + + const uint8_t effectMode = ANDROID_CONTROL_EFFECT_MODE_OFF; + UPDATE(md, ANDROID_CONTROL_EFFECT_MODE, &effectMode, 1); + + const uint8_t flashMode = ANDROID_FLASH_MODE_OFF; + UPDATE(md, ANDROID_FLASH_MODE, &flashMode, 1); + + const int32_t thumbnailSize[] = {240, 180}; + UPDATE(md, ANDROID_JPEG_THUMBNAIL_SIZE, thumbnailSize, 2); + + const uint8_t jpegQuality = 90; + UPDATE(md, ANDROID_JPEG_QUALITY, &jpegQuality, 1); + UPDATE(md, ANDROID_JPEG_THUMBNAIL_QUALITY, &jpegQuality, 1); + + const int32_t jpegOrientation = 0; + UPDATE(md, ANDROID_JPEG_ORIENTATION, &jpegOrientation, 1); + + const uint8_t oisMode = ANDROID_LENS_OPTICAL_STABILIZATION_MODE_OFF; + UPDATE(md, ANDROID_LENS_OPTICAL_STABILIZATION_MODE, &oisMode, 1); + + const uint8_t nrMode = ANDROID_NOISE_REDUCTION_MODE_OFF; + UPDATE(md, ANDROID_NOISE_REDUCTION_MODE, &nrMode, 1); + + const int32_t testPatternModes = ANDROID_SENSOR_TEST_PATTERN_MODE_OFF; + UPDATE(md, ANDROID_SENSOR_TEST_PATTERN_MODE, &testPatternModes, 1); + + const uint8_t fdMode = ANDROID_STATISTICS_FACE_DETECT_MODE_OFF; + UPDATE(md, ANDROID_STATISTICS_FACE_DETECT_MODE, &fdMode, 1); + + const uint8_t hotpixelMode = ANDROID_STATISTICS_HOT_PIXEL_MAP_MODE_OFF; + UPDATE(md, ANDROID_STATISTICS_HOT_PIXEL_MAP_MODE, &hotpixelMode, 1); + + bool support30Fps = false; + int32_t maxFps = std::numeric_limits::min(); + for (const auto& supportedFormat : mSupportedFormats) { + for (const auto& fr : supportedFormat.frameRates) { + int32_t framerateInt = static_cast(fr.getDouble()); + if (maxFps < framerateInt) { + maxFps = framerateInt; + } + if (framerateInt == 30) { + support30Fps = true; + break; + } + } + if (support30Fps) { + break; + } + } + int32_t defaultFramerate = support30Fps ? 30 : maxFps; + int32_t defaultFpsRange[] = {defaultFramerate / 2, defaultFramerate}; + UPDATE(md, ANDROID_CONTROL_AE_TARGET_FPS_RANGE, defaultFpsRange, ARRAY_SIZE(defaultFpsRange)); + + uint8_t antibandingMode = ANDROID_CONTROL_AE_ANTIBANDING_MODE_AUTO; + UPDATE(md, ANDROID_CONTROL_AE_ANTIBANDING_MODE, &antibandingMode, 1); + + const uint8_t controlMode = ANDROID_CONTROL_MODE_AUTO; + UPDATE(md, ANDROID_CONTROL_MODE, &controlMode, 1); + + auto requestTemplates = hidl_enum_range(); + for (RequestTemplate type : requestTemplates) { + ::android::hardware::camera::common::V1_0::helper::CameraMetadata mdCopy = md; + uint8_t intent = ANDROID_CONTROL_CAPTURE_INTENT_PREVIEW; + switch (type) { + case RequestTemplate::PREVIEW: + intent = ANDROID_CONTROL_CAPTURE_INTENT_PREVIEW; + break; + case RequestTemplate::STILL_CAPTURE: + intent = ANDROID_CONTROL_CAPTURE_INTENT_STILL_CAPTURE; + break; + case RequestTemplate::VIDEO_RECORD: + intent = ANDROID_CONTROL_CAPTURE_INTENT_VIDEO_RECORD; + break; + case RequestTemplate::VIDEO_SNAPSHOT: + intent = ANDROID_CONTROL_CAPTURE_INTENT_VIDEO_SNAPSHOT; + break; + default: + ALOGV("%s: unsupported RequestTemplate type %d", __FUNCTION__, type); + continue; + } + UPDATE(mdCopy, ANDROID_CONTROL_CAPTURE_INTENT, &intent, 1); + + camera_metadata_t* rawMd = mdCopy.release(); + CameraMetadata hidlMd; + hidlMd.setToExternal( + (uint8_t*) rawMd, get_camera_metadata_size(rawMd)); + mDefaultRequests[type] = hidlMd; + free_camera_metadata(rawMd); + } + + return OK; +} + +status_t ExternalCameraDeviceSession::fillCaptureResult( + common::V1_0::helper::CameraMetadata &md, nsecs_t timestamp) { + // android.control + // For USB camera, we don't know the AE state. Set the state to converged to + // indicate the frame should be good to use. Then apps don't have to wait the + // AE state. + const uint8_t aeState = ANDROID_CONTROL_AE_STATE_CONVERGED; + UPDATE(md, ANDROID_CONTROL_AE_STATE, &aeState, 1); + + const uint8_t ae_lock = ANDROID_CONTROL_AE_LOCK_OFF; + UPDATE(md, ANDROID_CONTROL_AE_LOCK, &ae_lock, 1); + + bool afTrigger = false; + { + std::lock_guard lk(mAfTriggerLock); + afTrigger = mAfTrigger; + if (md.exists(ANDROID_CONTROL_AF_TRIGGER)) { + camera_metadata_entry entry = md.find(ANDROID_CONTROL_AF_TRIGGER); + if (entry.data.u8[0] == ANDROID_CONTROL_AF_TRIGGER_START) { + mAfTrigger = afTrigger = true; + } else if (entry.data.u8[0] == ANDROID_CONTROL_AF_TRIGGER_CANCEL) { + mAfTrigger = afTrigger = false; + } + } + } + + // For USB camera, the USB camera handles everything and we don't have control + // over AF. We only simply fake the AF metadata based on the request + // received here. + uint8_t afState; + if (afTrigger) { + afState = ANDROID_CONTROL_AF_STATE_FOCUSED_LOCKED; + } else { + afState = ANDROID_CONTROL_AF_STATE_INACTIVE; + } + UPDATE(md, ANDROID_CONTROL_AF_STATE, &afState, 1); + + // Set AWB state to converged to indicate the frame should be good to use. + const uint8_t awbState = ANDROID_CONTROL_AWB_STATE_CONVERGED; + UPDATE(md, ANDROID_CONTROL_AWB_STATE, &awbState, 1); + + const uint8_t awbLock = ANDROID_CONTROL_AWB_LOCK_OFF; + UPDATE(md, ANDROID_CONTROL_AWB_LOCK, &awbLock, 1); + + camera_metadata_ro_entry active_array_size = + mCameraCharacteristics.find(ANDROID_SENSOR_INFO_ACTIVE_ARRAY_SIZE); + + if (active_array_size.count == 0) { + ALOGE("%s: cannot find active array size!", __FUNCTION__); + return -EINVAL; + } + + const uint8_t flashState = ANDROID_FLASH_STATE_UNAVAILABLE; + UPDATE(md, ANDROID_FLASH_STATE, &flashState, 1); + + // This means pipeline latency of X frame intervals. The maximum number is 4. + const uint8_t requestPipelineMaxDepth = 4; + UPDATE(md, ANDROID_REQUEST_PIPELINE_DEPTH, &requestPipelineMaxDepth, 1); + + // android.scaler + const int32_t crop_region[] = { + active_array_size.data.i32[0], active_array_size.data.i32[1], + active_array_size.data.i32[2], active_array_size.data.i32[3], + }; + UPDATE(md, ANDROID_SCALER_CROP_REGION, crop_region, ARRAY_SIZE(crop_region)); + + // android.sensor + UPDATE(md, ANDROID_SENSOR_TIMESTAMP, ×tamp, 1); + + // android.statistics + const uint8_t lensShadingMapMode = ANDROID_STATISTICS_LENS_SHADING_MAP_MODE_OFF; + UPDATE(md, ANDROID_STATISTICS_LENS_SHADING_MAP_MODE, &lensShadingMapMode, 1); + + const uint8_t sceneFlicker = ANDROID_STATISTICS_SCENE_FLICKER_NONE; + UPDATE(md, ANDROID_STATISTICS_SCENE_FLICKER, &sceneFlicker, 1); + + return OK; +} + +#undef ARRAY_SIZE +#undef UPDATE + +} // namespace implementation +} // namespace V3_4 +} // namespace device +} // namespace camera +} // namespace hardware +} // namespace android diff --git a/camera/device/3.4/default/ExternalCameraUtils.cpp b/camera/device/3.4/default/ExternalCameraUtils.cpp new file mode 100644 index 0000000000000000000000000000000000000000..2e2f73b362a13c0f93c4d1d616b36bd3e6929d64 --- /dev/null +++ b/camera/device/3.4/default/ExternalCameraUtils.cpp @@ -0,0 +1,295 @@ +/* + * Copyright (C) 2018 The Android Open Source Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +#define LOG_TAG "ExtCamUtils@3.4" +//#define LOG_NDEBUG 0 +#include + +#include +#include +#include +#include "ExternalCameraUtils.h" +#include "tinyxml2.h" // XML parsing + +namespace android { +namespace hardware { +namespace camera { +namespace device { +namespace V3_4 { +namespace implementation { + +V4L2Frame::V4L2Frame( + uint32_t w, uint32_t h, uint32_t fourcc, + int bufIdx, int fd, uint32_t dataSize, uint64_t offset) : + mWidth(w), mHeight(h), mFourcc(fourcc), + mBufferIndex(bufIdx), mFd(fd), mDataSize(dataSize), mOffset(offset) {} + +int V4L2Frame::map(uint8_t** data, size_t* dataSize) { + if (data == nullptr || dataSize == nullptr) { + ALOGI("%s: V4L2 buffer map bad argument: data %p, dataSize %p", + __FUNCTION__, data, dataSize); + return -EINVAL; + } + + std::lock_guard lk(mLock); + if (!mMapped) { + void* addr = mmap(NULL, mDataSize, PROT_READ, MAP_SHARED, mFd, mOffset); + if (addr == MAP_FAILED) { + ALOGE("%s: V4L2 buffer map failed: %s", __FUNCTION__, strerror(errno)); + return -EINVAL; + } + mData = static_cast(addr); + mMapped = true; + } + *data = mData; + *dataSize = mDataSize; + ALOGV("%s: V4L map FD %d, data %p size %zu", __FUNCTION__, mFd, mData, mDataSize); + return 0; +} + +int V4L2Frame::unmap() { + std::lock_guard lk(mLock); + if (mMapped) { + ALOGV("%s: V4L unmap data %p size %zu", __FUNCTION__, mData, mDataSize); + if (munmap(mData, mDataSize) != 0) { + ALOGE("%s: V4L2 buffer unmap failed: %s", __FUNCTION__, strerror(errno)); + return -EINVAL; + } + mMapped = false; + } + return 0; +} + +V4L2Frame::~V4L2Frame() { + unmap(); +} + +AllocatedFrame::AllocatedFrame( + uint32_t w, uint32_t h) : + mWidth(w), mHeight(h), mFourcc(V4L2_PIX_FMT_YUV420) {}; + +AllocatedFrame::~AllocatedFrame() {} + +int AllocatedFrame::allocate(YCbCrLayout* out) { + std::lock_guard lk(mLock); + if ((mWidth % 2) || (mHeight % 2)) { + ALOGE("%s: bad dimension %dx%d (not multiple of 2)", __FUNCTION__, mWidth, mHeight); + return -EINVAL; + } + + uint32_t dataSize = mWidth * mHeight * 3 / 2; // YUV420 + if (mData.size() != dataSize) { + mData.resize(dataSize); + } + + if (out != nullptr) { + out->y = mData.data(); + out->yStride = mWidth; + uint8_t* cbStart = mData.data() + mWidth * mHeight; + uint8_t* crStart = cbStart + mWidth * mHeight / 4; + out->cb = cbStart; + out->cr = crStart; + out->cStride = mWidth / 2; + out->chromaStep = 1; + } + return 0; +} + +int AllocatedFrame::getLayout(YCbCrLayout* out) { + IMapper::Rect noCrop = {0, 0, + static_cast(mWidth), + static_cast(mHeight)}; + return getCroppedLayout(noCrop, out); +} + +int AllocatedFrame::getCroppedLayout(const IMapper::Rect& rect, YCbCrLayout* out) { + if (out == nullptr) { + ALOGE("%s: null out", __FUNCTION__); + return -1; + } + + std::lock_guard lk(mLock); + if ((rect.left + rect.width) > static_cast(mWidth) || + (rect.top + rect.height) > static_cast(mHeight) || + (rect.left % 2) || (rect.top % 2) || (rect.width % 2) || (rect.height % 2)) { + ALOGE("%s: bad rect left %d top %d w %d h %d", __FUNCTION__, + rect.left, rect.top, rect.width, rect.height); + return -1; + } + + out->y = mData.data() + mWidth * rect.top + rect.left; + out->yStride = mWidth; + uint8_t* cbStart = mData.data() + mWidth * mHeight; + uint8_t* crStart = cbStart + mWidth * mHeight / 4; + out->cb = cbStart + mWidth * rect.top / 4 + rect.left / 2; + out->cr = crStart + mWidth * rect.top / 4 + rect.left / 2; + out->cStride = mWidth / 2; + out->chromaStep = 1; + return 0; +} + +bool isAspectRatioClose(float ar1, float ar2) { + const float kAspectRatioMatchThres = 0.025f; // This threshold is good enough to distinguish + // 4:3/16:9/20:9 + // 1.33 / 1.78 / 2 + return (std::abs(ar1 - ar2) < kAspectRatioMatchThres); +} + +double SupportedV4L2Format::FrameRate::getDouble() const { + return durationDenominator / static_cast(durationNumerator); +} + +} // namespace implementation +} // namespace V3_4 +} // namespace device + + +namespace external { +namespace common { + +namespace { + const int kDefaultJpegBufSize = 5 << 20; // 5MB + const int kDefaultNumVideoBuffer = 4; + const int kDefaultNumStillBuffer = 2; +} // anonymous namespace + +const char* ExternalCameraConfig::kDefaultCfgPath = "/vendor/etc/external_camera_config.xml"; + +ExternalCameraConfig ExternalCameraConfig::loadFromCfg(const char* cfgPath) { + using namespace tinyxml2; + ExternalCameraConfig ret; + + XMLDocument configXml; + XMLError err = configXml.LoadFile(cfgPath); + if (err != XML_SUCCESS) { + ALOGE("%s: Unable to load external camera config file '%s'. Error: %s", + __FUNCTION__, cfgPath, XMLDocument::ErrorIDToName(err)); + return ret; + } else { + ALOGI("%s: load external camera config succeed!", __FUNCTION__); + } + + XMLElement *extCam = configXml.FirstChildElement("ExternalCamera"); + if (extCam == nullptr) { + ALOGI("%s: no external camera config specified", __FUNCTION__); + return ret; + } + + XMLElement *providerCfg = extCam->FirstChildElement("Provider"); + if (providerCfg == nullptr) { + ALOGI("%s: no external camera provider config specified", __FUNCTION__); + return ret; + } + + XMLElement *ignore = providerCfg->FirstChildElement("ignore"); + if (ignore == nullptr) { + ALOGI("%s: no internal ignored device specified", __FUNCTION__); + return ret; + } + + XMLElement *id = ignore->FirstChildElement("id"); + while (id != nullptr) { + const char* text = id->GetText(); + if (text != nullptr) { + ret.mInternalDevices.insert(text); + ALOGI("%s: device %s will be ignored by external camera provider", + __FUNCTION__, text); + } + id = id->NextSiblingElement("id"); + } + + XMLElement *deviceCfg = extCam->FirstChildElement("Device"); + if (deviceCfg == nullptr) { + ALOGI("%s: no external camera device config specified", __FUNCTION__); + return ret; + } + + XMLElement *jpegBufSz = deviceCfg->FirstChildElement("MaxJpegBufferSize"); + if (jpegBufSz == nullptr) { + ALOGI("%s: no max jpeg buffer size specified", __FUNCTION__); + } else { + ret.maxJpegBufSize = jpegBufSz->UnsignedAttribute("bytes", /*Default*/kDefaultJpegBufSize); + } + + XMLElement *numVideoBuf = deviceCfg->FirstChildElement("NumVideoBuffers"); + if (numVideoBuf == nullptr) { + ALOGI("%s: no num video buffers specified", __FUNCTION__); + } else { + ret.numVideoBuffers = + numVideoBuf->UnsignedAttribute("count", /*Default*/kDefaultNumVideoBuffer); + } + + XMLElement *numStillBuf = deviceCfg->FirstChildElement("NumStillBuffers"); + if (numStillBuf == nullptr) { + ALOGI("%s: no num still buffers specified", __FUNCTION__); + } else { + ret.numStillBuffers = + numStillBuf->UnsignedAttribute("count", /*Default*/kDefaultNumStillBuffer); + } + + XMLElement *fpsList = deviceCfg->FirstChildElement("FpsList"); + if (fpsList == nullptr) { + ALOGI("%s: no fps list specified", __FUNCTION__); + } else { + std::vector limits; + XMLElement *row = fpsList->FirstChildElement("Limit"); + while (row != nullptr) { + FpsLimitation prevLimit {{0, 0}, 1000.0}; + FpsLimitation limit; + limit.size = { + row->UnsignedAttribute("width", /*Default*/0), + row->UnsignedAttribute("height", /*Default*/0)}; + limit.fpsUpperBound = row->DoubleAttribute("fpsBound", /*Default*/1000.0); + if (limit.size.width <= prevLimit.size.width || + limit.size.height <= prevLimit.size.height || + limit.fpsUpperBound >= prevLimit.fpsUpperBound) { + ALOGE("%s: FPS limit list must have increasing size and decreasing fps!" + " Prev %dx%d@%f, Current %dx%d@%f", __FUNCTION__, + prevLimit.size.width, prevLimit.size.height, prevLimit.fpsUpperBound, + limit.size.width, limit.size.height, limit.fpsUpperBound); + return ret; + } + limits.push_back(limit); + row = row->NextSiblingElement("Limit"); + } + ret.fpsLimits = limits; + } + + ALOGI("%s: external camera cfg loaded: maxJpgBufSize %d," + " num video buffers %d, num still buffers %d", + __FUNCTION__, ret.maxJpegBufSize, + ret.numVideoBuffers, ret.numStillBuffers); + for (const auto& limit : ret.fpsLimits) { + ALOGI("%s: fpsLimitList: %dx%d@%f", __FUNCTION__, + limit.size.width, limit.size.height, limit.fpsUpperBound); + } + return ret; +} + +ExternalCameraConfig::ExternalCameraConfig() : + maxJpegBufSize(kDefaultJpegBufSize), + numVideoBuffers(kDefaultNumVideoBuffer), + numStillBuffers(kDefaultNumStillBuffer) { + fpsLimits.push_back({/*Size*/{ 640, 480}, /*FPS upper bound*/30.0}); + fpsLimits.push_back({/*Size*/{1280, 720}, /*FPS upper bound*/7.5}); + fpsLimits.push_back({/*Size*/{1920, 1080}, /*FPS upper bound*/5.0}); +} + + +} // namespace common +} // namespace external +} // namespace camera +} // namespace hardware +} // namespace android diff --git a/camera/device/3.4/default/OWNERS b/camera/device/3.4/default/OWNERS new file mode 100644 index 0000000000000000000000000000000000000000..18acfee14555ac44b0fb66f42f150160ea75add1 --- /dev/null +++ b/camera/device/3.4/default/OWNERS @@ -0,0 +1,6 @@ +cychen@google.com +epeev@google.com +etalvala@google.com +shuzhenwang@google.com +yinchiayeh@google.com +zhijunhe@google.com diff --git a/camera/device/3.4/default/convert.cpp b/camera/device/3.4/default/convert.cpp new file mode 100644 index 0000000000000000000000000000000000000000..f12230ca2ba2b6fb29c0a53c1170d07c932c8beb --- /dev/null +++ b/camera/device/3.4/default/convert.cpp @@ -0,0 +1,59 @@ +/* + * Copyright (C) 2018 The Android Open Source Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#define LOG_TAG "android.hardware.camera.device@3.4-convert-impl" +#include + +#include +#include "include/convert.h" + +namespace android { +namespace hardware { +namespace camera { +namespace device { +namespace V3_4 { +namespace implementation { + +using ::android::hardware::graphics::common::V1_0::Dataspace; +using ::android::hardware::graphics::common::V1_0::PixelFormat; +using ::android::hardware::camera::device::V3_2::BufferUsageFlags; + +void convertToHidl(const Camera3Stream* src, HalStream* dst) { + V3_3::implementation::convertToHidl(src, &dst->v3_3); + dst->physicalCameraId = src->physical_camera_id; +} + +void convertToHidl(const camera3_stream_configuration_t& src, HalStreamConfiguration* dst) { + dst->streams.resize(src.num_streams); + for (uint32_t i = 0; i < src.num_streams; i++) { + convertToHidl(static_cast(src.streams[i]), &dst->streams[i]); + } + return; +} + +void convertFromHidl(const Stream &src, Camera3Stream* dst) { + V3_2::implementation::convertFromHidl(src.v3_2, dst); + // Initialize physical_camera_id + dst->physical_camera_id = nullptr; + return; +} + +} // namespace implementation +} // namespace V3_4 +} // namespace device +} // namespace camera +} // namespace hardware +} // namespace android diff --git a/camera/device/3.4/default/include/convert.h b/camera/device/3.4/default/include/convert.h new file mode 100644 index 0000000000000000000000000000000000000000..e8e39513b5527cb14885ca99ac62c9d1e4e485b4 --- /dev/null +++ b/camera/device/3.4/default/include/convert.h @@ -0,0 +1,46 @@ +/* + * Copyright (C) 2018 The Android Open Source Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#ifndef HARDWARE_INTERFACES_CAMERA_DEVICE_V3_4_DEFAULT_INCLUDE_CONVERT_H_ +#define HARDWARE_INTERFACES_CAMERA_DEVICE_V3_4_DEFAULT_INCLUDE_CONVERT_H_ + +#include +#include "hardware/camera3.h" +#include "../../3.3/default/include/convert.h" + +namespace android { +namespace hardware { +namespace camera { +namespace device { +namespace V3_4 { +namespace implementation { + +using ::android::hardware::camera::device::V3_2::implementation::Camera3Stream; + +void convertToHidl(const Camera3Stream* src, HalStream* dst); + +void convertToHidl(const camera3_stream_configuration_t& src, HalStreamConfiguration* dst); + +void convertFromHidl(const Stream &src, Camera3Stream* dst); + +} // namespace implementation +} // namespace V3_4 +} // namespace device +} // namespace camera +} // namespace hardware +} // namespace android + +#endif // HARDWARE_INTERFACES_CAMERA_DEVICE_V3_4_DEFAULT_INCLUDE_CONVERT_H_ diff --git a/camera/device/3.4/default/include/device_v3_4_impl/CameraDeviceSession.h b/camera/device/3.4/default/include/device_v3_4_impl/CameraDeviceSession.h new file mode 100644 index 0000000000000000000000000000000000000000..5d6a112e377aa109c82715b643f919b4dfbc34e8 --- /dev/null +++ b/camera/device/3.4/default/include/device_v3_4_impl/CameraDeviceSession.h @@ -0,0 +1,190 @@ +/* + * Copyright (C) 2017-2018 The Android Open Source Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#ifndef ANDROID_HARDWARE_CAMERA_DEVICE_V3_4_CAMERADEVICE3SESSION_H +#define ANDROID_HARDWARE_CAMERA_DEVICE_V3_4_CAMERADEVICE3SESSION_H + +#include +#include +#include +#include <../../3.3/default/CameraDeviceSession.h> +#include <../../3.3/default/include/convert.h> +#include +#include +#include +#include +#include +#include +#include +#include "CameraMetadata.h" +#include "HandleImporter.h" +#include "hardware/camera3.h" +#include "hardware/camera_common.h" +#include "utils/Mutex.h" + +namespace android { +namespace hardware { +namespace camera { +namespace device { +namespace V3_4 { +namespace implementation { + +using namespace ::android::hardware::camera::device; +using ::android::hardware::camera::device::V3_2::CaptureRequest; +using ::android::hardware::camera::device::V3_2::StreamType; +using ::android::hardware::camera::device::V3_4::StreamConfiguration; +using ::android::hardware::camera::device::V3_4::HalStreamConfiguration; +using ::android::hardware::camera::device::V3_4::ICameraDeviceSession; +using ::android::hardware::camera::device::V3_4::ICameraDeviceCallback; +using ::android::hardware::camera::common::V1_0::Status; +using ::android::hardware::camera::common::V1_0::helper::HandleImporter; +using ::android::hardware::kSynchronizedReadWrite; +using ::android::hardware::MessageQueue; +using ::android::hardware::MQDescriptorSync; +using ::android::hardware::Return; +using ::android::hardware::Void; +using ::android::hardware::hidl_vec; +using ::android::hardware::hidl_string; +using ::android::sp; +using ::android::Mutex; + +struct CameraDeviceSession : public V3_3::implementation::CameraDeviceSession { + + CameraDeviceSession(camera3_device_t*, + const camera_metadata_t* deviceInfo, + const sp&); + virtual ~CameraDeviceSession(); + + virtual sp getInterface() override { + return new TrampolineSessionInterface_3_4(this); + } + +protected: + // Methods from v3.3 and earlier will trampoline to inherited implementation + + Return configureStreams_3_4( + const StreamConfiguration& requestedConfiguration, + ICameraDeviceSession::configureStreams_3_4_cb _hidl_cb); + + bool preProcessConfigurationLocked_3_4( + const StreamConfiguration& requestedConfiguration, + camera3_stream_configuration_t *stream_list /*out*/, + hidl_vec *streams /*out*/); + void postProcessConfigurationLocked_3_4(const StreamConfiguration& requestedConfiguration); + + Return processCaptureRequest_3_4( + const hidl_vec& requests, + const hidl_vec& cachesToRemove, + ICameraDeviceSession::processCaptureRequest_3_4_cb _hidl_cb); + Status processOneCaptureRequest_3_4(const V3_4::CaptureRequest& request); + + std::map mPhysicalCameraIdMap; + + static V3_2::implementation::callbacks_process_capture_result_t sProcessCaptureResult_3_4; + static V3_2::implementation::callbacks_notify_t sNotify_3_4; + + class ResultBatcher_3_4 : public V3_3::implementation::CameraDeviceSession::ResultBatcher { + public: + ResultBatcher_3_4(const sp& callback); + void processCaptureResult_3_4(CaptureResult& result); + private: + void freeReleaseFences_3_4(hidl_vec&); + void processOneCaptureResult_3_4(CaptureResult& result); + void invokeProcessCaptureResultCallback_3_4(hidl_vec &results, + bool tryWriteFmq); + + sp mCallback_3_4; + } mResultBatcher_3_4; + + // Whether this camera device session is created with version 3.4 callback. + bool mHasCallback_3_4; + + // Physical camera ids for the logical multi-camera. Empty if this + // is not a logical multi-camera. + std::unordered_set mPhysicalCameraIds; +private: + + struct TrampolineSessionInterface_3_4 : public ICameraDeviceSession { + TrampolineSessionInterface_3_4(sp parent) : + mParent(parent) {} + + virtual Return constructDefaultRequestSettings( + V3_2::RequestTemplate type, + V3_3::ICameraDeviceSession::constructDefaultRequestSettings_cb _hidl_cb) override { + return mParent->constructDefaultRequestSettings(type, _hidl_cb); + } + + virtual Return configureStreams( + const V3_2::StreamConfiguration& requestedConfiguration, + V3_3::ICameraDeviceSession::configureStreams_cb _hidl_cb) override { + return mParent->configureStreams(requestedConfiguration, _hidl_cb); + } + + virtual Return processCaptureRequest_3_4(const hidl_vec& requests, + const hidl_vec& cachesToRemove, + ICameraDeviceSession::processCaptureRequest_3_4_cb _hidl_cb) override { + return mParent->processCaptureRequest_3_4(requests, cachesToRemove, _hidl_cb); + } + + virtual Return processCaptureRequest(const hidl_vec& requests, + const hidl_vec& cachesToRemove, + V3_3::ICameraDeviceSession::processCaptureRequest_cb _hidl_cb) override { + return mParent->processCaptureRequest(requests, cachesToRemove, _hidl_cb); + } + + virtual Return getCaptureRequestMetadataQueue( + V3_3::ICameraDeviceSession::getCaptureRequestMetadataQueue_cb _hidl_cb) override { + return mParent->getCaptureRequestMetadataQueue(_hidl_cb); + } + + virtual Return getCaptureResultMetadataQueue( + V3_3::ICameraDeviceSession::getCaptureResultMetadataQueue_cb _hidl_cb) override { + return mParent->getCaptureResultMetadataQueue(_hidl_cb); + } + + virtual Return flush() override { + return mParent->flush(); + } + + virtual Return close() override { + return mParent->close(); + } + + virtual Return configureStreams_3_3( + const V3_2::StreamConfiguration& requestedConfiguration, + configureStreams_3_3_cb _hidl_cb) override { + return mParent->configureStreams_3_3(requestedConfiguration, _hidl_cb); + } + + virtual Return configureStreams_3_4( + const StreamConfiguration& requestedConfiguration, + configureStreams_3_4_cb _hidl_cb) override { + return mParent->configureStreams_3_4(requestedConfiguration, _hidl_cb); + } + + private: + sp mParent; + }; +}; + +} // namespace implementation +} // namespace V3_4 +} // namespace device +} // namespace camera +} // namespace hardware +} // namespace android + +#endif // ANDROID_HARDWARE_CAMERA_DEVICE_V3_4_CAMERADEVICE3SESSION_H diff --git a/camera/device/3.4/default/include/device_v3_4_impl/CameraDevice_3_4.h b/camera/device/3.4/default/include/device_v3_4_impl/CameraDevice_3_4.h new file mode 100644 index 0000000000000000000000000000000000000000..95ee20e35d546096aa8ba3c7cb30531b0560f8de --- /dev/null +++ b/camera/device/3.4/default/include/device_v3_4_impl/CameraDevice_3_4.h @@ -0,0 +1,75 @@ +/* + * Copyright (C) 2017 The Android Open Source Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#ifndef ANDROID_HARDWARE_CAMERA_DEVICE_V3_4_CAMERADEVICE_H +#define ANDROID_HARDWARE_CAMERA_DEVICE_V3_4_CAMERADEVICE_H + +#include "utils/Mutex.h" +#include "CameraModule.h" +#include "CameraMetadata.h" +#include "CameraDeviceSession.h" +#include <../../3.2/default/CameraDevice_3_2.h> + +#include +#include +#include + +namespace android { +namespace hardware { +namespace camera { +namespace device { +namespace V3_4 { +namespace implementation { + +using namespace ::android::hardware::camera::device; +using ::android::hardware::camera::common::V1_0::helper::CameraModule; +using ::android::hardware::Return; +using ::android::hardware::Void; +using ::android::hardware::hidl_vec; +using ::android::hardware::hidl_string; +using ::android::sp; + +/* + * The camera device HAL implementation is opened lazily (via the open call) + */ +struct CameraDevice : public V3_2::implementation::CameraDevice { + + // Called by provider HAL. + // Provider HAL must ensure the uniqueness of CameraDevice object per cameraId, or there could + // be multiple CameraDevice trying to access the same physical camera. Also, provider will have + // to keep track of all CameraDevice objects in order to notify CameraDevice when the underlying + // camera is detached. + // Delegates nearly all work to CameraDevice_3_2 + CameraDevice(sp module, + const std::string& cameraId, + const SortedVector>& cameraDeviceNames); + ~CameraDevice(); + +protected: + virtual sp createSession(camera3_device_t*, + const camera_metadata_t* deviceInfo, + const sp&) override; + +}; + +} // namespace implementation +} // namespace V3_4 +} // namespace device +} // namespace camera +} // namespace hardware +} // namespace android + +#endif // ANDROID_HARDWARE_CAMERA_DEVICE_V3_4_CAMERADEVICE_H diff --git a/camera/device/3.4/default/include/ext_device_v3_4_impl/ExternalCameraDeviceSession.h b/camera/device/3.4/default/include/ext_device_v3_4_impl/ExternalCameraDeviceSession.h new file mode 100644 index 0000000000000000000000000000000000000000..0b94c112222fcfc676615692a495e08a3d826ba8 --- /dev/null +++ b/camera/device/3.4/default/include/ext_device_v3_4_impl/ExternalCameraDeviceSession.h @@ -0,0 +1,442 @@ +/* + * Copyright (C) 2018 The Android Open Source Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#ifndef ANDROID_HARDWARE_CAMERA_DEVICE_V3_4_EXTCAMERADEVICE3SESSION_H +#define ANDROID_HARDWARE_CAMERA_DEVICE_V3_4_EXTCAMERADEVICE3SESSION_H + +#include +#include +#include +#include +#include +#include +#include +#include +#include +#include +#include +#include "CameraMetadata.h" +#include "HandleImporter.h" +#include "Exif.h" +#include "utils/KeyedVector.h" +#include "utils/Mutex.h" +#include "utils/Thread.h" +#include "android-base/unique_fd.h" +#include "ExternalCameraUtils.h" + +namespace android { +namespace hardware { +namespace camera { +namespace device { +namespace V3_4 { +namespace implementation { + +using ::android::hardware::camera::device::V3_2::BufferCache; +using ::android::hardware::camera::device::V3_2::BufferStatus; +using ::android::hardware::camera::device::V3_2::CameraMetadata; +using ::android::hardware::camera::device::V3_2::CaptureRequest; +using ::android::hardware::camera::device::V3_2::CaptureResult; +using ::android::hardware::camera::device::V3_2::ErrorCode; +using ::android::hardware::camera::device::V3_2::ICameraDeviceCallback; +using ::android::hardware::camera::device::V3_2::MsgType; +using ::android::hardware::camera::device::V3_2::NotifyMsg; +using ::android::hardware::camera::device::V3_2::RequestTemplate; +using ::android::hardware::camera::device::V3_2::Stream; +using ::android::hardware::camera::device::V3_4::StreamConfiguration; +using ::android::hardware::camera::device::V3_2::StreamConfigurationMode; +using ::android::hardware::camera::device::V3_2::StreamRotation; +using ::android::hardware::camera::device::V3_2::StreamType; +using ::android::hardware::camera::device::V3_2::DataspaceFlags; +using ::android::hardware::camera::device::V3_2::CameraBlob; +using ::android::hardware::camera::device::V3_2::CameraBlobId; +using ::android::hardware::camera::device::V3_4::HalStreamConfiguration; +using ::android::hardware::camera::device::V3_4::ICameraDeviceSession; +using ::android::hardware::camera::common::V1_0::Status; +using ::android::hardware::camera::common::V1_0::helper::HandleImporter; +using ::android::hardware::camera::common::V1_0::helper::ExifUtils; +using ::android::hardware::camera::external::common::ExternalCameraConfig; +using ::android::hardware::camera::external::common::Size; +using ::android::hardware::camera::external::common::SizeHasher; +using ::android::hardware::graphics::common::V1_0::BufferUsage; +using ::android::hardware::graphics::common::V1_0::Dataspace; +using ::android::hardware::graphics::common::V1_0::PixelFormat; +using ::android::hardware::kSynchronizedReadWrite; +using ::android::hardware::MessageQueue; +using ::android::hardware::MQDescriptorSync; +using ::android::hardware::Return; +using ::android::hardware::Void; +using ::android::hardware::hidl_vec; +using ::android::hardware::hidl_string; +using ::android::sp; +using ::android::Mutex; +using ::android::base::unique_fd; + +struct ExternalCameraDeviceSession : public virtual RefBase { + + ExternalCameraDeviceSession(const sp&, + const ExternalCameraConfig& cfg, + const std::vector& sortedFormats, + const CroppingType& croppingType, + const common::V1_0::helper::CameraMetadata& chars, + const std::string& cameraId, + unique_fd v4l2Fd); + virtual ~ExternalCameraDeviceSession(); + // Call by CameraDevice to dump active device states + void dumpState(const native_handle_t*); + // Caller must use this method to check if CameraDeviceSession ctor failed + bool isInitFailed() { return mInitFail; } + bool isClosed(); + + // Retrieve the HIDL interface, split into its own class to avoid inheritance issues when + // dealing with minor version revs and simultaneous implementation and interface inheritance + virtual sp getInterface() { + return new TrampolineSessionInterface_3_4(this); + } + + static const int kMaxProcessedStream = 2; + static const int kMaxStallStream = 1; + static const uint32_t kMaxBytesPerPixel = 2; + +protected: + + // Methods from ::android::hardware::camera::device::V3_2::ICameraDeviceSession follow + + Return constructDefaultRequestSettings( + RequestTemplate, + ICameraDeviceSession::constructDefaultRequestSettings_cb _hidl_cb); + + Return configureStreams( + const V3_2::StreamConfiguration&, + ICameraDeviceSession::configureStreams_cb); + + Return getCaptureRequestMetadataQueue( + ICameraDeviceSession::getCaptureRequestMetadataQueue_cb); + + Return getCaptureResultMetadataQueue( + ICameraDeviceSession::getCaptureResultMetadataQueue_cb); + + Return processCaptureRequest( + const hidl_vec&, + const hidl_vec&, + ICameraDeviceSession::processCaptureRequest_cb); + + Return flush(); + Return close(); + + Return configureStreams_3_3( + const V3_2::StreamConfiguration&, + ICameraDeviceSession::configureStreams_3_3_cb); + + Return configureStreams_3_4( + const V3_4::StreamConfiguration& requestedConfiguration, + ICameraDeviceSession::configureStreams_3_4_cb _hidl_cb); + + Return processCaptureRequest_3_4( + const hidl_vec& requests, + const hidl_vec& cachesToRemove, + ICameraDeviceSession::processCaptureRequest_3_4_cb _hidl_cb); + +protected: + struct HalStreamBuffer { + int32_t streamId; + uint64_t bufferId; + uint32_t width; + uint32_t height; + PixelFormat format; + V3_2::BufferUsageFlags usage; + buffer_handle_t* bufPtr; + int acquireFence; + bool fenceTimeout; + }; + + struct HalRequest { + uint32_t frameNumber; + common::V1_0::helper::CameraMetadata setting; + sp frameIn; + nsecs_t shutterTs; + std::vector buffers; + }; + + Status constructDefaultRequestSettingsRaw(RequestTemplate type, + V3_2::CameraMetadata *outMetadata); + + bool initialize(); + Status initStatus() const; + status_t initDefaultRequests(); + status_t fillCaptureResult(common::V1_0::helper::CameraMetadata& md, nsecs_t timestamp); + Status configureStreams(const V3_2::StreamConfiguration&, V3_3::HalStreamConfiguration* out); + // fps = 0.0 means default, which is + // slowest fps that is at least 30, or fastest fps if 30 is not supported + int configureV4l2StreamLocked(const SupportedV4L2Format& fmt, double fps = 0.0); + int v4l2StreamOffLocked(); + int setV4l2FpsLocked(double fps); + + // TODO: change to unique_ptr for better tracking + sp dequeueV4l2FrameLocked(/*out*/nsecs_t* shutterTs); // Called with mLock hold + void enqueueV4l2Frame(const sp&); + + // Check if input Stream is one of supported stream setting on this device + bool isSupported(const Stream&); + + // Validate and import request's output buffers and acquire fence + Status importRequest( + const CaptureRequest& request, + hidl_vec& allBufPtrs, + hidl_vec& allFences); + static void cleanupInflightFences( + hidl_vec& allFences, size_t numFences); + void cleanupBuffersLocked(int id); + void updateBufferCaches(const hidl_vec& cachesToRemove); + + Status processOneCaptureRequest(const CaptureRequest& request); + + Status processCaptureResult(std::shared_ptr&); + Status processCaptureRequestError(const std::shared_ptr&); + void notifyShutter(uint32_t frameNumber, nsecs_t shutterTs); + void notifyError(uint32_t frameNumber, int32_t streamId, ErrorCode ec); + void invokeProcessCaptureResultCallback( + hidl_vec &results, bool tryWriteFmq); + static void freeReleaseFences(hidl_vec&); + + Size getMaxJpegResolution() const; + Size getMaxThumbResolution() const; + + ssize_t getJpegBufferSize(uint32_t width, uint32_t height) const; + + int waitForV4L2BufferReturnLocked(std::unique_lock& lk); + + class OutputThread : public android::Thread { + public: + OutputThread(wp parent, CroppingType); + ~OutputThread(); + + Status allocateIntermediateBuffers( + const Size& v4lSize, const Size& thumbSize, + const hidl_vec& streams); + Status submitRequest(const std::shared_ptr&); + void flush(); + void dump(int fd); + virtual bool threadLoop() override; + + void setExifMakeModel(const std::string& make, const std::string& model); + private: + static const uint32_t FLEX_YUV_GENERIC = static_cast('F') | + static_cast('L') << 8 | static_cast('E') << 16 | + static_cast('X') << 24; + // returns FLEX_YUV_GENERIC for formats other than YV12/YU12/NV12/NV21 + static uint32_t getFourCcFromLayout(const YCbCrLayout&); + static int getCropRect( + CroppingType ct, const Size& inSize, const Size& outSize, IMapper::Rect* out); + + static const int kFlushWaitTimeoutSec = 3; // 3 sec + static const int kReqWaitTimeoutMs = 33; // 33ms + static const int kReqWaitTimesMax = 90; // 33ms * 90 ~= 3 sec + + void waitForNextRequest(std::shared_ptr* out); + void signalRequestDone(); + + int cropAndScaleLocked( + sp& in, const Size& outSize, + YCbCrLayout* out); + + int cropAndScaleThumbLocked( + sp& in, const Size& outSize, + YCbCrLayout* out); + + int formatConvertLocked(const YCbCrLayout& in, const YCbCrLayout& out, + Size sz, uint32_t format); + + static int encodeJpegYU12(const Size &inSz, + const YCbCrLayout& inLayout, int jpegQuality, + const void *app1Buffer, size_t app1Size, + void *out, size_t maxOutSize, + size_t &actualCodeSize); + + int createJpegLocked(HalStreamBuffer &halBuf, const std::shared_ptr& req); + + const wp mParent; + const CroppingType mCroppingType; + + mutable std::mutex mRequestListLock; // Protect acccess to mRequestList, + // mProcessingRequest and mProcessingFrameNumer + std::condition_variable mRequestCond; // signaled when a new request is submitted + std::condition_variable mRequestDoneCond; // signaled when a request is done processing + std::list> mRequestList; + bool mProcessingRequest = false; + uint32_t mProcessingFrameNumer = 0; + + // V4L2 frameIn + // (MJPG decode)-> mYu12Frame + // (Scale)-> mScaledYu12Frames + // (Format convert) -> output gralloc frames + mutable std::mutex mBufferLock; // Protect access to intermediate buffers + sp mYu12Frame; + sp mYu12ThumbFrame; + std::unordered_map, SizeHasher> mIntermediateBuffers; + std::unordered_map, SizeHasher> mScaledYu12Frames; + YCbCrLayout mYu12FrameLayout; + YCbCrLayout mYu12ThumbFrameLayout; + + std::string mExifMake; + std::string mExifModel; + }; + + // Protect (most of) HIDL interface methods from synchronized-entering + mutable Mutex mInterfaceLock; + + mutable Mutex mLock; // Protect all private members except otherwise noted + const sp mCallback; + const ExternalCameraConfig& mCfg; + const common::V1_0::helper::CameraMetadata mCameraCharacteristics; + const std::vector mSupportedFormats; + const CroppingType mCroppingType; + const std::string& mCameraId; + + // Not protected by mLock, this is almost a const. + // Setup in constructor, reset in close() after OutputThread is joined + unique_fd mV4l2Fd; + + // device is closed either + // - closed by user + // - init failed + // - camera disconnected + bool mClosed = false; + bool mInitFail = false; + bool mFirstRequest = false; + common::V1_0::helper::CameraMetadata mLatestReqSetting; + + bool mV4l2Streaming = false; + SupportedV4L2Format mV4l2StreamingFmt; + double mV4l2StreamingFps = 0.0; + size_t mV4L2BufferCount = 0; + + static const int kBufferWaitTimeoutSec = 3; // TODO: handle long exposure (or not allowing) + std::mutex mV4l2BufferLock; // protect the buffer count and condition below + std::condition_variable mV4L2BufferReturned; + size_t mNumDequeuedV4l2Buffers = 0; + uint32_t mMaxV4L2BufferSize = 0; + + // Not protected by mLock (but might be used when mLock is locked) + sp mOutputThread; + + // Stream ID -> Camera3Stream cache + std::unordered_map mStreamMap; + + std::mutex mInflightFramesLock; // protect mInflightFrames + std::unordered_set mInflightFrames; + + // buffers currently circulating between HAL and camera service + // key: bufferId sent via HIDL interface + // value: imported buffer_handle_t + // Buffer will be imported during processCaptureRequest and will be freed + // when the its stream is deleted or camera device session is closed + typedef std::unordered_map CirculatingBuffers; + // Stream ID -> circulating buffers map + std::map mCirculatingBuffers; + + std::mutex mAfTriggerLock; // protect mAfTrigger + bool mAfTrigger = false; + + static HandleImporter sHandleImporter; + + /* Beginning of members not changed after initialize() */ + using RequestMetadataQueue = MessageQueue; + std::unique_ptr mRequestMetadataQueue; + using ResultMetadataQueue = MessageQueue; + std::shared_ptr mResultMetadataQueue; + + // Protect against invokeProcessCaptureResultCallback() + Mutex mProcessCaptureResultLock; + + std::unordered_map mDefaultRequests; + + const Size mMaxThumbResolution; + const Size mMaxJpegResolution; + /* End of members not changed after initialize() */ + +private: + + struct TrampolineSessionInterface_3_4 : public ICameraDeviceSession { + TrampolineSessionInterface_3_4(sp parent) : + mParent(parent) {} + + virtual Return constructDefaultRequestSettings( + RequestTemplate type, + V3_3::ICameraDeviceSession::constructDefaultRequestSettings_cb _hidl_cb) override { + return mParent->constructDefaultRequestSettings(type, _hidl_cb); + } + + virtual Return configureStreams( + const V3_2::StreamConfiguration& requestedConfiguration, + V3_3::ICameraDeviceSession::configureStreams_cb _hidl_cb) override { + return mParent->configureStreams(requestedConfiguration, _hidl_cb); + } + + virtual Return processCaptureRequest(const hidl_vec& requests, + const hidl_vec& cachesToRemove, + V3_3::ICameraDeviceSession::processCaptureRequest_cb _hidl_cb) override { + return mParent->processCaptureRequest(requests, cachesToRemove, _hidl_cb); + } + + virtual Return getCaptureRequestMetadataQueue( + V3_3::ICameraDeviceSession::getCaptureRequestMetadataQueue_cb _hidl_cb) override { + return mParent->getCaptureRequestMetadataQueue(_hidl_cb); + } + + virtual Return getCaptureResultMetadataQueue( + V3_3::ICameraDeviceSession::getCaptureResultMetadataQueue_cb _hidl_cb) override { + return mParent->getCaptureResultMetadataQueue(_hidl_cb); + } + + virtual Return flush() override { + return mParent->flush(); + } + + virtual Return close() override { + return mParent->close(); + } + + virtual Return configureStreams_3_3( + const V3_2::StreamConfiguration& requestedConfiguration, + configureStreams_3_3_cb _hidl_cb) override { + return mParent->configureStreams_3_3(requestedConfiguration, _hidl_cb); + } + + virtual Return configureStreams_3_4( + const V3_4::StreamConfiguration& requestedConfiguration, + configureStreams_3_4_cb _hidl_cb) override { + return mParent->configureStreams_3_4(requestedConfiguration, _hidl_cb); + } + + virtual Return processCaptureRequest_3_4(const hidl_vec& requests, + const hidl_vec& cachesToRemove, + ICameraDeviceSession::processCaptureRequest_3_4_cb _hidl_cb) override { + return mParent->processCaptureRequest_3_4(requests, cachesToRemove, _hidl_cb); + } + + private: + sp mParent; + }; +}; + +} // namespace implementation +} // namespace V3_4 +} // namespace device +} // namespace camera +} // namespace hardware +} // namespace android + +#endif // ANDROID_HARDWARE_CAMERA_DEVICE_V3_4_EXTCAMERADEVICE3SESSION_H diff --git a/camera/device/3.4/default/include/ext_device_v3_4_impl/ExternalCameraDevice_3_4.h b/camera/device/3.4/default/include/ext_device_v3_4_impl/ExternalCameraDevice_3_4.h new file mode 100644 index 0000000000000000000000000000000000000000..a52f0e4ccb769e734b37f40827a934fdcfc8f3fb --- /dev/null +++ b/camera/device/3.4/default/include/ext_device_v3_4_impl/ExternalCameraDevice_3_4.h @@ -0,0 +1,123 @@ +/* + * Copyright (C) 2018 The Android Open Source Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#ifndef ANDROID_HARDWARE_CAMERA_DEVICE_V3_4_EXTCAMERADEVICE_H +#define ANDROID_HARDWARE_CAMERA_DEVICE_V3_4_EXTCAMERADEVICE_H + +#include "utils/Mutex.h" +#include "CameraMetadata.h" + +#include +#include +#include +#include "ExternalCameraDeviceSession.h" + +namespace android { +namespace hardware { +namespace camera { +namespace device { +namespace V3_4 { +namespace implementation { + +using namespace ::android::hardware::camera::device; +using ::android::hardware::camera::device::V3_2::ICameraDevice; +using ::android::hardware::camera::device::V3_2::ICameraDeviceCallback; +using ::android::hardware::camera::common::V1_0::CameraResourceCost; +using ::android::hardware::camera::common::V1_0::TorchMode; +using ::android::hardware::camera::common::V1_0::Status; +using ::android::hardware::camera::external::common::ExternalCameraConfig; +using ::android::hardware::camera::external::common::Size; +using ::android::hardware::Return; +using ::android::hardware::Void; +using ::android::hardware::hidl_vec; +using ::android::hardware::hidl_string; +using ::android::sp; + +/* + * The camera device HAL implementation is opened lazily (via the open call) + */ +struct ExternalCameraDevice : public ICameraDevice { + + // Called by external camera provider HAL. + // Provider HAL must ensure the uniqueness of CameraDevice object per cameraId, or there could + // be multiple CameraDevice trying to access the same physical camera. Also, provider will have + // to keep track of all CameraDevice objects in order to notify CameraDevice when the underlying + // camera is detached. + ExternalCameraDevice(const std::string& cameraId, const ExternalCameraConfig& cfg); + ~ExternalCameraDevice(); + + // Caller must use this method to check if CameraDevice ctor failed + bool isInitFailed(); + + /* Methods from ::android::hardware::camera::device::V3_2::ICameraDevice follow. */ + // The following method can be called without opening the actual camera device + Return getResourceCost(getResourceCost_cb _hidl_cb) override; + + Return getCameraCharacteristics(getCameraCharacteristics_cb _hidl_cb) override; + + Return setTorchMode(TorchMode) override; + + // Open the device HAL and also return a default capture session + Return open(const sp&, open_cb) override; + + // Forward the dump call to the opened session, or do nothing + Return dumpState(const ::android::hardware::hidl_handle&) override; + /* End of Methods from ::android::hardware::camera::device::V3_2::ICameraDevice */ + +protected: + // Init supported w/h/format/fps in mSupportedFormats. Caller still owns fd + void initSupportedFormatsLocked(int fd); + + status_t initCameraCharacteristics(); + // Init non-device dependent keys + status_t initDefaultCharsKeys(::android::hardware::camera::common::V1_0::helper::CameraMetadata*); + // Init camera control chars keys. Caller still owns fd + status_t initCameraControlsCharsKeys(int fd, + ::android::hardware::camera::common::V1_0::helper::CameraMetadata*); + // Init camera output configuration related keys. Caller still owns fd + status_t initOutputCharsKeys(int fd, + ::android::hardware::camera::common::V1_0::helper::CameraMetadata*); + + static void getFrameRateList(int fd, double fpsUpperBound, SupportedV4L2Format* format); + + // Get candidate supported formats list of input cropping type. + static std::vector getCandidateSupportedFormatsLocked( + int fd, CroppingType cropType, + const std::vector& fpsLimits); + // Trim supported format list by the cropping type. Also sort output formats by width/height + static void trimSupportedFormats(CroppingType cropType, + /*inout*/std::vector* pFmts); + + Mutex mLock; + bool mInitFailed = false; + std::string mCameraId; + const ExternalCameraConfig& mCfg; + std::vector mSupportedFormats; + CroppingType mCroppingType; + + wp mSession = nullptr; + + ::android::hardware::camera::common::V1_0::helper::CameraMetadata mCameraCharacteristics; +}; + +} // namespace implementation +} // namespace V3_4 +} // namespace device +} // namespace camera +} // namespace hardware +} // namespace android + +#endif // ANDROID_HARDWARE_CAMERA_DEVICE_V3_4_EXTCAMERADEVICE_H diff --git a/camera/device/3.4/default/include/ext_device_v3_4_impl/ExternalCameraUtils.h b/camera/device/3.4/default/include/ext_device_v3_4_impl/ExternalCameraUtils.h new file mode 100644 index 0000000000000000000000000000000000000000..37e7cfb32e56635844717478e3342a3f658fcb55 --- /dev/null +++ b/camera/device/3.4/default/include/ext_device_v3_4_impl/ExternalCameraUtils.h @@ -0,0 +1,163 @@ +/* + * Copyright (C) 2018 The Android Open Source Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#ifndef ANDROID_HARDWARE_CAMERA_DEVICE_V3_4_EXTCAMUTIL_H +#define ANDROID_HARDWARE_CAMERA_DEVICE_V3_4_EXTCAMUTIL_H + +#include +#include "utils/LightRefBase.h" +#include +#include +#include +#include + +using android::hardware::graphics::mapper::V2_0::IMapper; +using android::hardware::graphics::mapper::V2_0::YCbCrLayout; + +namespace android { +namespace hardware { +namespace camera { + +namespace external { +namespace common { + +struct Size { + uint32_t width; + uint32_t height; + + bool operator==(const Size& other) const { + return (width == other.width && height == other.height); + } +}; + +struct SizeHasher { + size_t operator()(const Size& sz) const { + size_t result = 1; + result = 31 * result + sz.width; + result = 31 * result + sz.height; + return result; + } +}; + +struct ExternalCameraConfig { + static const char* kDefaultCfgPath; + static ExternalCameraConfig loadFromCfg(const char* cfgPath = kDefaultCfgPath); + + // List of internal V4L2 video nodes external camera HAL must ignore. + std::unordered_set mInternalDevices; + + // Maximal size of a JPEG buffer, in bytes + uint32_t maxJpegBufSize; + + // Maximum Size that can sustain 30fps streaming + Size maxVideoSize; + + // Size of v4l2 buffer queue when streaming <= kMaxVideoSize + uint32_t numVideoBuffers; + + // Size of v4l2 buffer queue when streaming > kMaxVideoSize + uint32_t numStillBuffers; + + struct FpsLimitation { + Size size; + double fpsUpperBound; + }; + std::vector fpsLimits; + +private: + ExternalCameraConfig(); +}; + +} // common +} // external + +namespace device { +namespace V3_4 { +namespace implementation { + +struct SupportedV4L2Format { + uint32_t width; + uint32_t height; + uint32_t fourcc; + // All supported frame rate for this w/h/fourcc combination + struct FrameRate { + uint32_t durationNumerator; // frame duration numerator. Ex: 1 + uint32_t durationDenominator; // frame duration denominator. Ex: 30 + double getDouble() const; // FrameRate in double. Ex: 30.0 + }; + std::vector frameRates; +}; + +// A class provide access to a dequeued V4L2 frame buffer (mostly in MJPG format) +// Also contains necessary information to enqueue the buffer back to V4L2 buffer queue +class V4L2Frame : public virtual VirtualLightRefBase { +public: + V4L2Frame(uint32_t w, uint32_t h, uint32_t fourcc, int bufIdx, int fd, + uint32_t dataSize, uint64_t offset); + ~V4L2Frame() override; + const uint32_t mWidth; + const uint32_t mHeight; + const uint32_t mFourcc; + const int mBufferIndex; // for later enqueue + int map(uint8_t** data, size_t* dataSize); + int unmap(); +private: + std::mutex mLock; + const int mFd; // used for mmap but doesn't claim ownership + const size_t mDataSize; + const uint64_t mOffset; // used for mmap + uint8_t* mData = nullptr; + bool mMapped = false; +}; + +// A RAII class representing a CPU allocated YUV frame used as intermeidate buffers +// when generating output images. +class AllocatedFrame : public virtual VirtualLightRefBase { +public: + AllocatedFrame(uint32_t w, uint32_t h); // TODO: use Size? + ~AllocatedFrame() override; + const uint32_t mWidth; + const uint32_t mHeight; + const uint32_t mFourcc; // Only support YU12 format for now + int allocate(YCbCrLayout* out = nullptr); + int getLayout(YCbCrLayout* out); + int getCroppedLayout(const IMapper::Rect&, YCbCrLayout* out); // return non-zero for bad input +private: + std::mutex mLock; + std::vector mData; +}; + +enum CroppingType { + HORIZONTAL = 0, + VERTICAL = 1 +}; + +// Aspect ratio is defined as width/height here and ExternalCameraDevice +// will guarantee all supported sizes has width >= height (so aspect ratio >= 1.0) +#define ASPECT_RATIO(sz) (static_cast((sz).width) / (sz).height) +const float kMaxAspectRatio = std::numeric_limits::max(); +const float kMinAspectRatio = 1.f; + +bool isAspectRatioClose(float ar1, float ar2); + +} // namespace implementation +} // namespace V3_4 +} // namespace device +} // namespace camera +} // namespace hardware +} // namespace android + +#endif // ANDROID_HARDWARE_CAMERA_DEVICE_V3_4_EXTCAMUTIL_H diff --git a/camera/device/3.4/types.hal b/camera/device/3.4/types.hal new file mode 100644 index 0000000000000000000000000000000000000000..bf2b3fcedc7ad1e16ae5c0b29b04faae7ed27e6c --- /dev/null +++ b/camera/device/3.4/types.hal @@ -0,0 +1,290 @@ +/* + * Copyright (C) 2017 The Android Open Source Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package android.hardware.camera.device@3.4; + +import @3.2::RequestTemplate; +import @3.2::StreamConfigurationMode; +import @3.2::Stream; +import @3.3::HalStream; +import @3.2::CameraMetadata; +import @3.2::CaptureRequest; +import @3.2::CaptureResult; + +/** + * Stream: + * + * A descriptor for a single camera input or output stream. A stream is defined + * by the framework by its buffer resolution and format, and additionally by the + * HAL with the gralloc usage flags and the maximum in-flight buffer count. + * + * This version extends the @3.2 Stream with the physicalCameraId and bufferSize field. + */ +struct Stream { + /** + * The definition of Stream from the prior version + */ + @3.2::Stream v3_2; + + /** + * The physical camera id this stream belongs to. + * + * If the camera device is not a logical multi camera, or if the camera is a logical + * multi camera but the stream is not a physical output stream, this field will point to a + * 0-length string. + * + * A logical multi camera is a camera device backed by multiple physical cameras that + * are also exposed to the application. And for a logical multi camera, a physical output + * stream is an output stream specifically requested on an underlying physical camera. + * + * A logical camera is a camera device backed by multiple physical camera + * devices. And a physical stream is a stream specifically requested on a + * underlying physical camera device. + * + * For an input stream, this field is guaranteed to be a 0-length string. + * + * When not empty, this field is the field of one of the full-qualified device + * instance names returned by getCameraIdList(). + */ + string physicalCameraId; + + /** + * The size of a buffer from this Stream, in bytes. + * + * For non PixelFormat::BLOB formats, this entry must be 0 and HAL should use + * android.hardware.graphics.mapper lockYCbCr API to get buffer layout. + * + * For BLOB format with dataSpace Dataspace::DEPTH, this must be zero and and HAL must + * determine the buffer size based on ANDROID_DEPTH_MAX_DEPTH_SAMPLES. + * + * For BLOB format with dataSpace Dataspace::JFIF, this must be non-zero and represent the + * maximal size HAL can lock using android.hardware.graphics.mapper lock API. + * + */ + uint32_t bufferSize; +}; + +/** + * StreamConfiguration: + * + * Identical to @3.2::StreamConfiguration, except that it contains session + * parameters, and the streams vector contains @3.4::Stream. + */ +struct StreamConfiguration { + /** + * An array of camera stream pointers, defining the input/output + * configuration for the camera HAL device. + */ + vec streams; + + /** + * The definition of operation mode from prior version. + * + */ + StreamConfigurationMode operationMode; + + /** + * Session wide camera parameters. + * + * The session parameters contain the initial values of any request keys that were + * made available via ANDROID_REQUEST_AVAILABLE_SESSION_KEYS. The Hal implementation + * can advertise any settings that can potentially introduce unexpected delays when + * their value changes during active process requests. Typical examples are + * parameters that trigger time-consuming HW re-configurations or internal camera + * pipeline updates. The field is optional, clients can choose to ignore it and avoid + * including any initial settings. If parameters are present, then hal must examine + * their values and configure the internal camera pipeline accordingly. + */ + CameraMetadata sessionParams; +}; + +/** + * HalStream: + * + * The camera HAL's response to each requested stream configuration. + * + * This version extends the @3.3 HalStream with the physicalCameraId + * field + */ +struct HalStream { + /** + * The definition of HalStream from the prior version. + */ + @3.3::HalStream v3_3; + + /** + * The physical camera id the current Hal stream belongs to. + * + * If current camera device isn't a logical camera, or the Hal stream isn't + * from a physical camera of the logical camera, this must be an empty + * string. + * + * A logical camera is a camera device backed by multiple physical camera + * devices. + * + * When not empty, this field is the field of one of the full-qualified device + * instance names returned by getCameraIdList(). + */ + string physicalCameraId; +}; + +/** + * HalStreamConfiguration: + * + * Identical to @3.3::HalStreamConfiguration, except that it contains @3.4::HalStream entries. + * + */ +struct HalStreamConfiguration { + vec streams; +}; + +/** + * PhysicalCameraSetting: + * + * Individual camera settings for logical camera backed by multiple physical devices. + * Clients are allowed to pass separate settings for each physical device that has + * corresponding configured HalStream and the respective stream id is present in the + * output buffers of the capture request. + */ +struct PhysicalCameraSetting { + /** + * If non-zero, read settings from request queue instead + * (see ICameraDeviceSession.getCaptureRequestMetadataQueue). + * If zero, read settings from .settings field. + */ + uint64_t fmqSettingsSize; + + /** + * Contains the physical device camera id. Any settings passed by client here + * should be applied for this physical device. In case the physical id is invalid or + * it is not present among the last configured streams, Hal should fail the process + * request and return Status::ILLEGAL_ARGUMENT. + */ + string physicalCameraId; + + /** + * If fmqSettingsSize is zero, the settings buffer contains the capture and + * processing parameters for the physical device with id 'physicalCameraId'. + * As a special case, an empty settings buffer indicates that the + * settings are identical to the most-recently submitted capture request. + * An empty buffer cannot be used as the first submitted request after + * a configureStreams() call. + * + * This field must be used if fmqSettingsSize is zero. It must not be used + * if fmqSettingsSize is non-zero. + */ + CameraMetadata settings; +}; + +/** + * CaptureRequest: + * + * A single request for image capture/buffer reprocessing, sent to the Camera + * HAL device by the framework in processCaptureRequest(). + * + * The request contains the settings to be used for this capture, and the set of + * output buffers to write the resulting image data in. It may optionally + * contain an input buffer, in which case the request is for reprocessing that + * input buffer instead of capturing a new image with the camera sensor. The + * capture is identified by the frameNumber. + * + * In response, the camera HAL device must send a CaptureResult + * structure asynchronously to the framework, using the processCaptureResult() + * callback. + * + * Identical to @3.2::CaptureRequest, except that it contains @3.4::physCamSettings vector. + * + */ +struct CaptureRequest { + /** + * The definition of CaptureRequest from prior version. + */ + @3.2::CaptureRequest v3_2; + + /** + * A vector containing individual camera settings for logical camera backed by multiple physical + * devices. In case the vector is empty, Hal should use the settings field in 'v3_2'. The + * individual settings should only be honored for physical devices that have respective Hal + * stream. Physical devices that have a corresponding Hal stream but don't have attached + * settings here should use the settings field in 'v3_2'. + * If any of the physical settings in the array are applied on one or more devices, then the + * visual effect on any Hal streams attached to the logical camera is undefined. + */ + vec physicalCameraSettings; +}; + +/** + * PhysicalCameraMetadata: + * + * Individual camera metadata for a physical camera as part of a logical + * multi-camera. Camera HAL should return one such metadata for each physical + * camera being requested on. + */ +struct PhysicalCameraMetadata { + /** + * If non-zero, read metadata from result metadata queue instead + * (see ICameraDeviceSession.getCaptureResultMetadataQueue). + * If zero, read metadata from .metadata field. + */ + uint64_t fmqMetadataSize; + + /** + * Contains the physical device camera id. As long as the corresponding + * processCaptureRequest requests on a particular physical camera stream, + * the metadata for that physical camera should be generated for the capture + * result. */ + string physicalCameraId; + + /** + * If fmqMetadataSize is zero, the metadata buffer contains the metadata + * for the physical device with physicalCameraId. + * + * The v3_2 CaptureResult metadata is read first from the FMQ, followed by + * the physical cameras' metadata starting from index 0. + */ + CameraMetadata metadata; +}; + +/** + * CaptureResult: + * + * Identical to @3.2::CaptureResult, except that it contains a list of + * physical camera metadata. + * + * Physical camera metadata needs to be generated if and only if a + * request is pending on a stream from that physical camera. For example, + * if the processCaptureRequest call doesn't request on physical camera + * streams, the physicalCameraMetadata field of the CaptureResult being returned + * should be an 0-size vector. If the processCaptureRequest call requests on + * streams from one of the physical camera, the physicalCameraMetadata field + * should contain one metadata describing the capture from that physical camera. + * + * For a CaptureResult that contains physical camera metadata, its + * partialResult field must be android.request.partialResultCount. In other + * words, the physicalCameraMetadata must only be contained in a final capture + * result. + */ +struct CaptureResult { + /** + * The definition of CaptureResult from the prior version. + */ + @3.2::CaptureResult v3_2; + + /** + * The physical metadata for logical multi-camera. + */ + vec physicalCameraMetadata; +}; diff --git a/camera/device/README.md b/camera/device/README.md index 9f607816d8b68ff6390e58005bce0fe226db7f8b..3709cb89ba95050158cfb183795fdaa07cf99eb3 100644 --- a/camera/device/README.md +++ b/camera/device/README.md @@ -87,3 +87,11 @@ A minor revision to the ICameraDevice.hal@3.2. supported in the legacy camera HAL. Added in Android 8.1. + +### ICameraDevice.hal@3.4: + +A minor revision to the ICameraDevice.hal@3.3. + + - Adds support for session parameters during stream configuration. + +Added in Android 9 diff --git a/camera/metadata/3.2/docs.html b/camera/metadata/3.2/docs.html deleted file mode 100644 index 004ecaecf369b1ea27b013af504e8bcfc6891d1d..0000000000000000000000000000000000000000 --- a/camera/metadata/3.2/docs.html +++ /dev/null @@ -1,27340 +0,0 @@ - - - - - - - Android Camera HAL3.4 Properties - - - - - - - - -

Android Camera HAL3.2 Properties

- - -

Table of Contents

- - - -

Properties

- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
Property NameTypeDescriptionUnitsRangeTags
colorCorrection
controls
Property NameTypeDescriptionUnitsRangeTags
- android.colorCorrection.mode - - byte - - [public] - - - [full] - - - -
    -
  • - TRANSFORM_MATRIX -

    Use the android.colorCorrection.transform matrix -and android.colorCorrection.gains to do color conversion.

    -

    All advanced white balance adjustments (not specified -by our white balance pipeline) must be disabled.

    -

    If AWB is enabled with android.control.awbMode != OFF, then -TRANSFORM_MATRIX is ignored. The camera device will override -this value to either FAST or HIGH_QUALITY.

    -
  • -
  • - FAST -

    Color correction processing must not slow down -capture rate relative to sensor raw output.

    -

    Advanced white balance adjustments above and beyond -the specified white balance pipeline may be applied.

    -

    If AWB is enabled with android.control.awbMode != OFF, then -the camera device uses the last frame's AWB values -(or defaults if AWB has never been run).

    -
  • -
  • - HIGH_QUALITY -

    Color correction processing operates at improved -quality but the capture rate might be reduced (relative to sensor -raw output rate)

    -

    Advanced white balance adjustments above and beyond -the specified white balance pipeline may be applied.

    -

    If AWB is enabled with android.control.awbMode != OFF, then -the camera device uses the last frame's AWB values -(or defaults if AWB has never been run).

    -
  • -
- -
-

The mode control selects how the image data is converted from the -sensor's native color into linear sRGB color.

-
- -
Details
-

When auto-white balance (AWB) is enabled with android.control.awbMode, this -control is overridden by the AWB routine. When AWB is disabled, the -application controls how the color mapping is performed.

-

We define the expected processing pipeline below. For consistency -across devices, this is always the case with TRANSFORM_MATRIX.

-

When either FULL or HIGH_QUALITY is used, the camera device may -do additional processing but android.colorCorrection.gains and -android.colorCorrection.transform will still be provided by the -camera device (in the results) and be roughly correct.

-

Switching to TRANSFORM_MATRIX and using the data provided from -FAST or HIGH_QUALITY will yield a picture with the same white point -as what was produced by the camera device in the earlier frame.

-

The expected processing pipeline is as follows:

-

White balance processing pipeline

-

The white balance is encoded by two values, a 4-channel white-balance -gain vector (applied in the Bayer domain), and a 3x3 color transform -matrix (applied after demosaic).

-

The 4-channel white-balance gains are defined as:

-
android.colorCorrection.gains = [ R G_even G_odd B ]
-
-

where G_even is the gain for green pixels on even rows of the -output, and G_odd is the gain for green pixels on the odd rows. -These may be identical for a given camera device implementation; if -the camera device does not support a separate gain for even/odd green -channels, it will use the G_even value, and write G_odd equal to -G_even in the output result metadata.

-

The matrices for color transforms are defined as a 9-entry vector:

-
android.colorCorrection.transform = [ I0 I1 I2 I3 I4 I5 I6 I7 I8 ]
-
-

which define a transform from input sensor colors, P_in = [ r g b ], -to output linear sRGB, P_out = [ r' g' b' ],

-

with colors as follows:

-
r' = I0r + I1g + I2b
-g' = I3r + I4g + I5b
-b' = I6r + I7g + I8b
-
-

Both the input and output value ranges must match. Overflow/underflow -values are clipped to fit within the range.

-
HAL Implementation Details
-

HAL must support both FAST and HIGH_QUALITY if color correction control is available -on the camera device, but the underlying implementation can be the same for both modes. -That is, if the highest quality implementation on the camera device does not slow down -capture rate, then FAST and HIGH_QUALITY should generate the same output.

-
- android.colorCorrection.transform - - rational - x - - - 3 x 3 - - [public as colorSpaceTransform] - - - [full] - - -
3x3 rational matrix in row-major order
- - -
-

A color transform matrix to use to transform -from sensor RGB color space to output linear sRGB color space.

-
- Unitless scale factors - -
Details
-

This matrix is either set by the camera device when the request -android.colorCorrection.mode is not TRANSFORM_MATRIX, or -directly by the application in the request when the -android.colorCorrection.mode is TRANSFORM_MATRIX.

-

In the latter case, the camera device may round the matrix to account -for precision issues; the final rounded matrix should be reported back -in this matrix result metadata. The transform should keep the magnitude -of the output color values within [0, 1.0] (assuming input color -values is within the normalized range [0, 1.0]), or clipping may occur.

-

The valid range of each matrix element varies on different devices, but -values within [-1.5, 3.0] are guaranteed not to be clipped.

-
- android.colorCorrection.gains - - float - x - - - 4 - - [public as rggbChannelVector] - - - [full] - - -
A 1D array of floats for 4 color channel gains
- - -
-

Gains applying to Bayer raw color channels for -white-balance.

-
- Unitless gain factors - -
Details
-

These per-channel gains are either set by the camera device -when the request android.colorCorrection.mode is not -TRANSFORM_MATRIX, or directly by the application in the -request when the android.colorCorrection.mode is -TRANSFORM_MATRIX.

-

The gains in the result metadata are the gains actually -applied by the camera device to the current frame.

-

The valid range of gains varies on different devices, but gains -between [1.0, 3.0] are guaranteed not to be clipped. Even if a given -device allows gains below 1.0, this is usually not recommended because -this can create color artifacts.

-
HAL Implementation Details
-

The 4-channel white-balance gains are defined in -the order of [R G_even G_odd B], where G_even is the gain -for green pixels on even rows of the output, and G_odd -is the gain for green pixels on the odd rows.

-

If a HAL does not support a separate gain for even/odd green -channels, it must use the G_even value, and write -G_odd equal to G_even in the output result metadata.

-
- android.colorCorrection.aberrationMode - - byte - - [public] - - - [legacy] - - - -
    -
  • - OFF -

    No aberration correction is applied.

    -
  • -
  • - FAST -

    Aberration correction will not slow down capture rate -relative to sensor raw output.

    -
  • -
  • - HIGH_QUALITY -

    Aberration correction operates at improved quality but the capture rate might be -reduced (relative to sensor raw output rate)

    -
  • -
- -
-

Mode of operation for the chromatic aberration correction algorithm.

-
- -

android.colorCorrection.availableAberrationModes

-
Details
-

Chromatic (color) aberration is caused by the fact that different wavelengths of light -can not focus on the same point after exiting from the lens. This metadata defines -the high level control of chromatic aberration correction algorithm, which aims to -minimize the chromatic artifacts that may occur along the object boundaries in an -image.

-

FAST/HIGH_QUALITY both mean that camera device determined aberration -correction will be applied. HIGH_QUALITY mode indicates that the camera device will -use the highest-quality aberration correction algorithms, even if it slows down -capture rate. FAST means the camera device will not slow down capture rate when -applying aberration correction.

-

LEGACY devices will always be in FAST mode.

-
dynamic
Property NameTypeDescriptionUnitsRangeTags
- android.colorCorrection.mode - - byte - - [public] - - - [full] - - - -
    -
  • - TRANSFORM_MATRIX -

    Use the android.colorCorrection.transform matrix -and android.colorCorrection.gains to do color conversion.

    -

    All advanced white balance adjustments (not specified -by our white balance pipeline) must be disabled.

    -

    If AWB is enabled with android.control.awbMode != OFF, then -TRANSFORM_MATRIX is ignored. The camera device will override -this value to either FAST or HIGH_QUALITY.

    -
  • -
  • - FAST -

    Color correction processing must not slow down -capture rate relative to sensor raw output.

    -

    Advanced white balance adjustments above and beyond -the specified white balance pipeline may be applied.

    -

    If AWB is enabled with android.control.awbMode != OFF, then -the camera device uses the last frame's AWB values -(or defaults if AWB has never been run).

    -
  • -
  • - HIGH_QUALITY -

    Color correction processing operates at improved -quality but the capture rate might be reduced (relative to sensor -raw output rate)

    -

    Advanced white balance adjustments above and beyond -the specified white balance pipeline may be applied.

    -

    If AWB is enabled with android.control.awbMode != OFF, then -the camera device uses the last frame's AWB values -(or defaults if AWB has never been run).

    -
  • -
- -
-

The mode control selects how the image data is converted from the -sensor's native color into linear sRGB color.

-
- -
Details
-

When auto-white balance (AWB) is enabled with android.control.awbMode, this -control is overridden by the AWB routine. When AWB is disabled, the -application controls how the color mapping is performed.

-

We define the expected processing pipeline below. For consistency -across devices, this is always the case with TRANSFORM_MATRIX.

-

When either FULL or HIGH_QUALITY is used, the camera device may -do additional processing but android.colorCorrection.gains and -android.colorCorrection.transform will still be provided by the -camera device (in the results) and be roughly correct.

-

Switching to TRANSFORM_MATRIX and using the data provided from -FAST or HIGH_QUALITY will yield a picture with the same white point -as what was produced by the camera device in the earlier frame.

-

The expected processing pipeline is as follows:

-

White balance processing pipeline

-

The white balance is encoded by two values, a 4-channel white-balance -gain vector (applied in the Bayer domain), and a 3x3 color transform -matrix (applied after demosaic).

-

The 4-channel white-balance gains are defined as:

-
android.colorCorrection.gains = [ R G_even G_odd B ]
-
-

where G_even is the gain for green pixels on even rows of the -output, and G_odd is the gain for green pixels on the odd rows. -These may be identical for a given camera device implementation; if -the camera device does not support a separate gain for even/odd green -channels, it will use the G_even value, and write G_odd equal to -G_even in the output result metadata.

-

The matrices for color transforms are defined as a 9-entry vector:

-
android.colorCorrection.transform = [ I0 I1 I2 I3 I4 I5 I6 I7 I8 ]
-
-

which define a transform from input sensor colors, P_in = [ r g b ], -to output linear sRGB, P_out = [ r' g' b' ],

-

with colors as follows:

-
r' = I0r + I1g + I2b
-g' = I3r + I4g + I5b
-b' = I6r + I7g + I8b
-
-

Both the input and output value ranges must match. Overflow/underflow -values are clipped to fit within the range.

-
HAL Implementation Details
-

HAL must support both FAST and HIGH_QUALITY if color correction control is available -on the camera device, but the underlying implementation can be the same for both modes. -That is, if the highest quality implementation on the camera device does not slow down -capture rate, then FAST and HIGH_QUALITY should generate the same output.

-
- android.colorCorrection.transform - - rational - x - - - 3 x 3 - - [public as colorSpaceTransform] - - - [full] - - -
3x3 rational matrix in row-major order
- - -
-

A color transform matrix to use to transform -from sensor RGB color space to output linear sRGB color space.

-
- Unitless scale factors - -
Details
-

This matrix is either set by the camera device when the request -android.colorCorrection.mode is not TRANSFORM_MATRIX, or -directly by the application in the request when the -android.colorCorrection.mode is TRANSFORM_MATRIX.

-

In the latter case, the camera device may round the matrix to account -for precision issues; the final rounded matrix should be reported back -in this matrix result metadata. The transform should keep the magnitude -of the output color values within [0, 1.0] (assuming input color -values is within the normalized range [0, 1.0]), or clipping may occur.

-

The valid range of each matrix element varies on different devices, but -values within [-1.5, 3.0] are guaranteed not to be clipped.

-
- android.colorCorrection.gains - - float - x - - - 4 - - [public as rggbChannelVector] - - - [full] - - -
A 1D array of floats for 4 color channel gains
- - -
-

Gains applying to Bayer raw color channels for -white-balance.

-
- Unitless gain factors - -
Details
-

These per-channel gains are either set by the camera device -when the request android.colorCorrection.mode is not -TRANSFORM_MATRIX, or directly by the application in the -request when the android.colorCorrection.mode is -TRANSFORM_MATRIX.

-

The gains in the result metadata are the gains actually -applied by the camera device to the current frame.

-

The valid range of gains varies on different devices, but gains -between [1.0, 3.0] are guaranteed not to be clipped. Even if a given -device allows gains below 1.0, this is usually not recommended because -this can create color artifacts.

-
HAL Implementation Details
-

The 4-channel white-balance gains are defined in -the order of [R G_even G_odd B], where G_even is the gain -for green pixels on even rows of the output, and G_odd -is the gain for green pixels on the odd rows.

-

If a HAL does not support a separate gain for even/odd green -channels, it must use the G_even value, and write -G_odd equal to G_even in the output result metadata.

-
- android.colorCorrection.aberrationMode - - byte - - [public] - - - [legacy] - - - -
    -
  • - OFF -

    No aberration correction is applied.

    -
  • -
  • - FAST -

    Aberration correction will not slow down capture rate -relative to sensor raw output.

    -
  • -
  • - HIGH_QUALITY -

    Aberration correction operates at improved quality but the capture rate might be -reduced (relative to sensor raw output rate)

    -
  • -
- -
-

Mode of operation for the chromatic aberration correction algorithm.

-
- -

android.colorCorrection.availableAberrationModes

-
Details
-

Chromatic (color) aberration is caused by the fact that different wavelengths of light -can not focus on the same point after exiting from the lens. This metadata defines -the high level control of chromatic aberration correction algorithm, which aims to -minimize the chromatic artifacts that may occur along the object boundaries in an -image.

-

FAST/HIGH_QUALITY both mean that camera device determined aberration -correction will be applied. HIGH_QUALITY mode indicates that the camera device will -use the highest-quality aberration correction algorithms, even if it slows down -capture rate. FAST means the camera device will not slow down capture rate when -applying aberration correction.

-

LEGACY devices will always be in FAST mode.

-
static
Property NameTypeDescriptionUnitsRangeTags
- android.colorCorrection.availableAberrationModes - - byte - x - - - n - - [public as enumList] - - - [legacy] - - -
list of enums
- - -
-

List of aberration correction modes for android.colorCorrection.aberrationMode that are -supported by this camera device.

-
- -

Any value listed in android.colorCorrection.aberrationMode

-
Details
-

This key lists the valid modes for android.colorCorrection.aberrationMode. If no -aberration correction modes are available for a device, this list will solely include -OFF mode. All camera devices will support either OFF or FAST mode.

-

Camera devices that support the MANUAL_POST_PROCESSING capability will always list -OFF mode. This includes all FULL level devices.

-

LEGACY devices will always only support FAST mode.

-
HAL Implementation Details
-

HAL must support both FAST and HIGH_QUALITY if chromatic aberration control is available -on the camera device, but the underlying implementation can be the same for both modes. -That is, if the highest quality implementation on the camera device does not slow down -capture rate, then FAST and HIGH_QUALITY will generate the same output.

-
control
controls
Property NameTypeDescriptionUnitsRangeTags
- android.control.aeAntibandingMode - - byte - - [public] - - - [legacy] - - - -
    -
  • - OFF -

    The camera device will not adjust exposure duration to -avoid banding problems.

    -
  • -
  • - 50HZ -

    The camera device will adjust exposure duration to -avoid banding problems with 50Hz illumination sources.

    -
  • -
  • - 60HZ -

    The camera device will adjust exposure duration to -avoid banding problems with 60Hz illumination -sources.

    -
  • -
  • - AUTO -

    The camera device will automatically adapt its -antibanding routine to the current illumination -condition. This is the default mode if AUTO is -available on given camera device.

    -
  • -
- -
-

The desired setting for the camera device's auto-exposure -algorithm's antibanding compensation.

-
- -

android.control.aeAvailableAntibandingModes

-
Details
-

Some kinds of lighting fixtures, such as some fluorescent -lights, flicker at the rate of the power supply frequency -(60Hz or 50Hz, depending on country). While this is -typically not noticeable to a person, it can be visible to -a camera device. If a camera sets its exposure time to the -wrong value, the flicker may become visible in the -viewfinder as flicker or in a final captured image, as a -set of variable-brightness bands across the image.

-

Therefore, the auto-exposure routines of camera devices -include antibanding routines that ensure that the chosen -exposure value will not cause such banding. The choice of -exposure time depends on the rate of flicker, which the -camera device can detect automatically, or the expected -rate can be selected by the application using this -control.

-

A given camera device may not support all of the possible -options for the antibanding mode. The -android.control.aeAvailableAntibandingModes key contains -the available modes for a given camera device.

-

AUTO mode is the default if it is available on given -camera device. When AUTO mode is not available, the -default will be either 50HZ or 60HZ, and both 50HZ -and 60HZ will be available.

-

If manual exposure control is enabled (by setting -android.control.aeMode or android.control.mode to OFF), -then this setting has no effect, and the application must -ensure it selects exposure times that do not cause banding -issues. The android.statistics.sceneFlicker key can assist -the application in this.

-
HAL Implementation Details
-

For all capture request templates, this field must be set -to AUTO if AUTO mode is available. If AUTO is not available, -the default must be either 50HZ or 60HZ, and both 50HZ and -60HZ must be available.

-

If manual exposure control is enabled (by setting -android.control.aeMode or android.control.mode to OFF), -then the exposure values provided by the application must not be -adjusted for antibanding.

-
- android.control.aeExposureCompensation - - int32 - - [public] - - - [legacy] - - - - - -

Adjustment to auto-exposure (AE) target image -brightness.

-
- Compensation steps - -

android.control.aeCompensationRange

-
Details
-

The adjustment is measured as a count of steps, with the -step size defined by android.control.aeCompensationStep and the -allowed range by android.control.aeCompensationRange.

-

For example, if the exposure value (EV) step is 0.333, '6' -will mean an exposure compensation of +2 EV; -3 will mean an -exposure compensation of -1 EV. One EV represents a doubling -of image brightness. Note that this control will only be -effective if android.control.aeMode != OFF. This control -will take effect even when android.control.aeLock == true.

-

In the event of exposure compensation value being changed, camera device -may take several frames to reach the newly requested exposure target. -During that time, android.control.aeState field will be in the SEARCHING -state. Once the new exposure target is reached, android.control.aeState will -change from SEARCHING to either CONVERGED, LOCKED (if AE lock is enabled), or -FLASH_REQUIRED (if the scene is too dark for still capture).

-
- android.control.aeLock - - byte - - [public as boolean] - - - [legacy] - - - -
    -
  • - OFF -

    Auto-exposure lock is disabled; the AE algorithm -is free to update its parameters.

    -
  • -
  • - ON -

    Auto-exposure lock is enabled; the AE algorithm -must not update the exposure and sensitivity parameters -while the lock is active.

    -

    android.control.aeExposureCompensation setting changes -will still take effect while auto-exposure is locked.

    -

    Some rare LEGACY devices may not support -this, in which case the value will always be overridden to OFF.

    -
  • -
- -
-

Whether auto-exposure (AE) is currently locked to its latest -calculated values.

-
- -
Details
-

When set to true (ON), the AE algorithm is locked to its latest parameters, -and will not change exposure settings until the lock is set to false (OFF).

-

Note that even when AE is locked, the flash may be fired if -the android.control.aeMode is ON_AUTO_FLASH / -ON_ALWAYS_FLASH / ON_AUTO_FLASH_REDEYE.

-

When android.control.aeExposureCompensation is changed, even if the AE lock -is ON, the camera device will still adjust its exposure value.

-

If AE precapture is triggered (see android.control.aePrecaptureTrigger) -when AE is already locked, the camera device will not change the exposure time -(android.sensor.exposureTime) and sensitivity (android.sensor.sensitivity) -parameters. The flash may be fired if the android.control.aeMode -is ON_AUTO_FLASH/ON_AUTO_FLASH_REDEYE and the scene is too dark. If the -android.control.aeMode is ON_ALWAYS_FLASH, the scene may become overexposed. -Similarly, AE precapture trigger CANCEL has no effect when AE is already locked.

-

When an AE precapture sequence is triggered, AE unlock will not be able to unlock -the AE if AE is locked by the camera device internally during precapture metering -sequence In other words, submitting requests with AE unlock has no effect for an -ongoing precapture metering sequence. Otherwise, the precapture metering sequence -will never succeed in a sequence of preview requests where AE lock is always set -to false.

-

Since the camera device has a pipeline of in-flight requests, the settings that -get locked do not necessarily correspond to the settings that were present in the -latest capture result received from the camera device, since additional captures -and AE updates may have occurred even before the result was sent out. If an -application is switching between automatic and manual control and wishes to eliminate -any flicker during the switch, the following procedure is recommended:

-
    -
  1. Starting in auto-AE mode:
  2. -
  3. Lock AE
  4. -
  5. Wait for the first result to be output that has the AE locked
  6. -
  7. Copy exposure settings from that result into a request, set the request to manual AE
  8. -
  9. Submit the capture request, proceed to run manual AE as desired.
  10. -
-

See android.control.aeState for AE lock related state transition details.

-
- android.control.aeMode - - byte - - [public] - - - [legacy] - - - -
    -
  • - OFF -

    The camera device's autoexposure routine is disabled.

    -

    The application-selected android.sensor.exposureTime, -android.sensor.sensitivity and -android.sensor.frameDuration are used by the camera -device, along with android.flash.* fields, if there's -a flash unit for this camera device.

    -

    Note that auto-white balance (AWB) and auto-focus (AF) -behavior is device dependent when AE is in OFF mode. -To have consistent behavior across different devices, -it is recommended to either set AWB and AF to OFF mode -or lock AWB and AF before setting AE to OFF. -See android.control.awbMode, android.control.afMode, -android.control.awbLock, and android.control.afTrigger -for more details.

    -

    LEGACY devices do not support the OFF mode and will -override attempts to use this value to ON.

    -
  • -
  • - ON -

    The camera device's autoexposure routine is active, -with no flash control.

    -

    The application's values for -android.sensor.exposureTime, -android.sensor.sensitivity, and -android.sensor.frameDuration are ignored. The -application has control over the various -android.flash.* fields.

    -
  • -
  • - ON_AUTO_FLASH -

    Like ON, except that the camera device also controls -the camera's flash unit, firing it in low-light -conditions.

    -

    The flash may be fired during a precapture sequence -(triggered by android.control.aePrecaptureTrigger) and -may be fired for captures for which the -android.control.captureIntent field is set to -STILL_CAPTURE

    -
  • -
  • - ON_ALWAYS_FLASH -

    Like ON, except that the camera device also controls -the camera's flash unit, always firing it for still -captures.

    -

    The flash may be fired during a precapture sequence -(triggered by android.control.aePrecaptureTrigger) and -will always be fired for captures for which the -android.control.captureIntent field is set to -STILL_CAPTURE

    -
  • -
  • - ON_AUTO_FLASH_REDEYE -

    Like ON_AUTO_FLASH, but with automatic red eye -reduction.

    -

    If deemed necessary by the camera device, a red eye -reduction flash will fire during the precapture -sequence.

    -
  • -
- -
-

The desired mode for the camera device's -auto-exposure routine.

-
- -

android.control.aeAvailableModes

-
Details
-

This control is only effective if android.control.mode is -AUTO.

-

When set to any of the ON modes, the camera device's -auto-exposure routine is enabled, overriding the -application's selected exposure time, sensor sensitivity, -and frame duration (android.sensor.exposureTime, -android.sensor.sensitivity, and -android.sensor.frameDuration). If one of the FLASH modes -is selected, the camera device's flash unit controls are -also overridden.

-

The FLASH modes are only available if the camera device -has a flash unit (android.flash.info.available is true).

-

If flash TORCH mode is desired, this field must be set to -ON or OFF, and android.flash.mode set to TORCH.

-

When set to any of the ON modes, the values chosen by the -camera device auto-exposure routine for the overridden -fields for a given capture will be available in its -CaptureResult.

-
- android.control.aeRegions - - int32 - x - - - 5 x area_count - - [public as meteringRectangle] - - - - - - - -

List of metering areas to use for auto-exposure adjustment.

-
- Pixel coordinates within android.sensor.info.activeArraySize - -

Coordinates must be between [(0,0), (width, height)) of -android.sensor.info.activeArraySize

-
Details
-

Not available if android.control.maxRegionsAe is 0. -Otherwise will always be present.

-

The maximum number of regions supported by the device is determined by the value -of android.control.maxRegionsAe.

-

The coordinate system is based on the active pixel array, -with (0,0) being the top-left pixel in the active pixel array, and -(android.sensor.info.activeArraySize.width - 1, -android.sensor.info.activeArraySize.height - 1) being the -bottom-right pixel in the active pixel array.

-

The weight must be within [0, 1000], and represents a weight -for every pixel in the area. This means that a large metering area -with the same weight as a smaller area will have more effect in -the metering result. Metering areas can partially overlap and the -camera device will add the weights in the overlap region.

-

The weights are relative to weights of other exposure metering regions, so if only one -region is used, all non-zero weights will have the same effect. A region with 0 -weight is ignored.

-

If all regions have 0 weight, then no specific metering area needs to be used by the -camera device.

-

If the metering region is outside the used android.scaler.cropRegion returned in -capture result metadata, the camera device will ignore the sections outside the crop -region and output only the intersection rectangle as the metering region in the result -metadata. If the region is entirely outside the crop region, it will be ignored and -not reported in the result metadata.

-
HAL Implementation Details
-

The HAL level representation of MeteringRectangle[] is a -int[5 * area_count]. -Every five elements represent a metering region of -(xmin, ymin, xmax, ymax, weight). -The rectangle is defined to be inclusive on xmin and ymin, but -exclusive on xmax and ymax.

-
- android.control.aeTargetFpsRange - - int32 - x - - - 2 - - [public as rangeInt] - - - [legacy] - - - - - -

Range over which the auto-exposure routine can -adjust the capture frame rate to maintain good -exposure.

-
- Frames per second (FPS) - -

Any of the entries in android.control.aeAvailableTargetFpsRanges

-
Details
-

Only constrains auto-exposure (AE) algorithm, not -manual control of android.sensor.exposureTime and -android.sensor.frameDuration.

-
- android.control.aePrecaptureTrigger - - byte - - [public] - - - [limited] - - - -
    -
  • - IDLE -

    The trigger is idle.

    -
  • -
  • - START -

    The precapture metering sequence will be started -by the camera device.

    -

    The exact effect of the precapture trigger depends on -the current AE mode and state.

    -
  • -
  • - CANCEL -

    The camera device will cancel any currently active or completed -precapture metering sequence, the auto-exposure routine will return to its -initial state.

    -
  • -
- -
-

Whether the camera device will trigger a precapture -metering sequence when it processes this request.

-
- -
Details
-

This entry is normally set to IDLE, or is not -included at all in the request settings. When included and -set to START, the camera device will trigger the auto-exposure (AE) -precapture metering sequence.

-

When set to CANCEL, the camera device will cancel any active -precapture metering trigger, and return to its initial AE state. -If a precapture metering sequence is already completed, and the camera -device has implicitly locked the AE for subsequent still capture, the -CANCEL trigger will unlock the AE and return to its initial AE state.

-

The precapture sequence should be triggered before starting a -high-quality still capture for final metering decisions to -be made, and for firing pre-capture flash pulses to estimate -scene brightness and required final capture flash power, when -the flash is enabled.

-

Normally, this entry should be set to START for only a -single request, and the application should wait until the -sequence completes before starting a new one.

-

When a precapture metering sequence is finished, the camera device -may lock the auto-exposure routine internally to be able to accurately expose the -subsequent still capture image (android.control.captureIntent == STILL_CAPTURE). -For this case, the AE may not resume normal scan if no subsequent still capture is -submitted. To ensure that the AE routine restarts normal scan, the application should -submit a request with android.control.aeLock == true, followed by a request -with android.control.aeLock == false, if the application decides not to submit a -still capture request after the precapture sequence completes. Alternatively, for -API level 23 or newer devices, the CANCEL can be used to unlock the camera device -internally locked AE if the application doesn't submit a still capture request after -the AE precapture trigger. Note that, the CANCEL was added in API level 23, and must not -be used in devices that have earlier API levels.

-

The exact effect of auto-exposure (AE) precapture trigger -depends on the current AE mode and state; see -android.control.aeState for AE precapture state transition -details.

-

On LEGACY-level devices, the precapture trigger is not supported; -capturing a high-resolution JPEG image will automatically trigger a -precapture sequence before the high-resolution capture, including -potentially firing a pre-capture flash.

-

Using the precapture trigger and the auto-focus trigger android.control.afTrigger -simultaneously is allowed. However, since these triggers often require cooperation between -the auto-focus and auto-exposure routines (for example, the may need to be enabled for a -focus sweep), the camera device may delay acting on a later trigger until the previous -trigger has been fully handled. This may lead to longer intervals between the trigger and -changes to android.control.aeState indicating the start of the precapture sequence, for -example.

-

If both the precapture and the auto-focus trigger are activated on the same request, then -the camera device will complete them in the optimal order for that device.

-
HAL Implementation Details
-

The HAL must support triggering the AE precapture trigger while an AF trigger is active -(and vice versa), or at the same time as the AF trigger. It is acceptable for the HAL to -treat these as two consecutive triggers, for example handling the AF trigger and then the -AE trigger. Or the HAL may choose to optimize the case with both triggers fired at once, -to minimize the latency for converging both focus and exposure/flash usage.

-
- android.control.afMode - - byte - - [public] - - - [legacy] - - - -
    -
  • - OFF -

    The auto-focus routine does not control the lens; -android.lens.focusDistance is controlled by the -application.

    -
  • -
  • - AUTO -

    Basic automatic focus mode.

    -

    In this mode, the lens does not move unless -the autofocus trigger action is called. When that trigger -is activated, AF will transition to ACTIVE_SCAN, then to -the outcome of the scan (FOCUSED or NOT_FOCUSED).

    -

    Always supported if lens is not fixed focus.

    -

    Use android.lens.info.minimumFocusDistance to determine if lens -is fixed-focus.

    -

    Triggering AF_CANCEL resets the lens position to default, -and sets the AF state to INACTIVE.

    -
  • -
  • - MACRO -

    Close-up focusing mode.

    -

    In this mode, the lens does not move unless the -autofocus trigger action is called. When that trigger is -activated, AF will transition to ACTIVE_SCAN, then to -the outcome of the scan (FOCUSED or NOT_FOCUSED). This -mode is optimized for focusing on objects very close to -the camera.

    -

    When that trigger is activated, AF will transition to -ACTIVE_SCAN, then to the outcome of the scan (FOCUSED or -NOT_FOCUSED). Triggering cancel AF resets the lens -position to default, and sets the AF state to -INACTIVE.

    -
  • -
  • - CONTINUOUS_VIDEO -

    In this mode, the AF algorithm modifies the lens -position continually to attempt to provide a -constantly-in-focus image stream.

    -

    The focusing behavior should be suitable for good quality -video recording; typically this means slower focus -movement and no overshoots. When the AF trigger is not -involved, the AF algorithm should start in INACTIVE state, -and then transition into PASSIVE_SCAN and PASSIVE_FOCUSED -states as appropriate. When the AF trigger is activated, -the algorithm should immediately transition into -AF_FOCUSED or AF_NOT_FOCUSED as appropriate, and lock the -lens position until a cancel AF trigger is received.

    -

    Once cancel is received, the algorithm should transition -back to INACTIVE and resume passive scan. Note that this -behavior is not identical to CONTINUOUS_PICTURE, since an -ongoing PASSIVE_SCAN must immediately be -canceled.

    -
  • -
  • - CONTINUOUS_PICTURE -

    In this mode, the AF algorithm modifies the lens -position continually to attempt to provide a -constantly-in-focus image stream.

    -

    The focusing behavior should be suitable for still image -capture; typically this means focusing as fast as -possible. When the AF trigger is not involved, the AF -algorithm should start in INACTIVE state, and then -transition into PASSIVE_SCAN and PASSIVE_FOCUSED states as -appropriate as it attempts to maintain focus. When the AF -trigger is activated, the algorithm should finish its -PASSIVE_SCAN if active, and then transition into -AF_FOCUSED or AF_NOT_FOCUSED as appropriate, and lock the -lens position until a cancel AF trigger is received.

    -

    When the AF cancel trigger is activated, the algorithm -should transition back to INACTIVE and then act as if it -has just been started.

    -
  • -
  • - EDOF -

    Extended depth of field (digital focus) mode.

    -

    The camera device will produce images with an extended -depth of field automatically; no special focusing -operations need to be done before taking a picture.

    -

    AF triggers are ignored, and the AF state will always be -INACTIVE.

    -
  • -
- -
-

Whether auto-focus (AF) is currently enabled, and what -mode it is set to.

-
- -

android.control.afAvailableModes

-
Details
-

Only effective if android.control.mode = AUTO and the lens is not fixed focus -(i.e. android.lens.info.minimumFocusDistance > 0). Also note that -when android.control.aeMode is OFF, the behavior of AF is device -dependent. It is recommended to lock AF by using android.control.afTrigger before -setting android.control.aeMode to OFF, or set AF mode to OFF when AE is OFF.

-

If the lens is controlled by the camera device auto-focus algorithm, -the camera device will report the current AF status in android.control.afState -in result metadata.

-
HAL Implementation Details
-

When afMode is AUTO or MACRO, the lens must not move until an AF trigger is sent in a -request (android.control.afTrigger == START). After an AF trigger, the afState will end -up with either FOCUSED_LOCKED or NOT_FOCUSED_LOCKED state (see -android.control.afState for detailed state transitions), which indicates that the lens is -locked and will not move. If camera movement (e.g. tilting camera) causes the lens to move -after the lens is locked, the HAL must compensate this movement appropriately such that -the same focal plane remains in focus.

-

When afMode is one of the continuous auto focus modes, the HAL is free to start a AF -scan whenever it's not locked. When the lens is locked after an AF trigger -(see android.control.afState for detailed state transitions), the HAL should maintain the -same lock behavior as above.

-

When afMode is OFF, the application controls focus manually. The accuracy of the -focus distance control depends on the android.lens.info.focusDistanceCalibration. -However, the lens must not move regardless of the camera movement for any focus distance -manual control.

-

To put this in concrete terms, if the camera has lens elements which may move based on -camera orientation or motion (e.g. due to gravity), then the HAL must drive the lens to -remain in a fixed position invariant to the camera's orientation or motion, for example, -by using accelerometer measurements in the lens control logic. This is a typical issue -that will arise on camera modules with open-loop VCMs.

-
- android.control.afRegions - - int32 - x - - - 5 x area_count - - [public as meteringRectangle] - - - - - - - -

List of metering areas to use for auto-focus.

-
- Pixel coordinates within android.sensor.info.activeArraySize - -

Coordinates must be between [(0,0), (width, height)) of -android.sensor.info.activeArraySize

-
Details
-

Not available if android.control.maxRegionsAf is 0. -Otherwise will always be present.

-

The maximum number of focus areas supported by the device is determined by the value -of android.control.maxRegionsAf.

-

The coordinate system is based on the active pixel array, -with (0,0) being the top-left pixel in the active pixel array, and -(android.sensor.info.activeArraySize.width - 1, -android.sensor.info.activeArraySize.height - 1) being the -bottom-right pixel in the active pixel array.

-

The weight must be within [0, 1000], and represents a weight -for every pixel in the area. This means that a large metering area -with the same weight as a smaller area will have more effect in -the metering result. Metering areas can partially overlap and the -camera device will add the weights in the overlap region.

-

The weights are relative to weights of other metering regions, so if only one region -is used, all non-zero weights will have the same effect. A region with 0 weight is -ignored.

-

If all regions have 0 weight, then no specific metering area needs to be used by the -camera device.

-

If the metering region is outside the used android.scaler.cropRegion returned in -capture result metadata, the camera device will ignore the sections outside the crop -region and output only the intersection rectangle as the metering region in the result -metadata. If the region is entirely outside the crop region, it will be ignored and -not reported in the result metadata.

-
HAL Implementation Details
-

The HAL level representation of MeteringRectangle[] is a -int[5 * area_count]. -Every five elements represent a metering region of -(xmin, ymin, xmax, ymax, weight). -The rectangle is defined to be inclusive on xmin and ymin, but -exclusive on xmax and ymax.

-
- android.control.afTrigger - - byte - - [public] - - - [legacy] - - - -
    -
  • - IDLE -

    The trigger is idle.

    -
  • -
  • - START -

    Autofocus will trigger now.

    -
  • -
  • - CANCEL -

    Autofocus will return to its initial -state, and cancel any currently active trigger.

    -
  • -
- -
-

Whether the camera device will trigger autofocus for this request.

-
- -
Details
-

This entry is normally set to IDLE, or is not -included at all in the request settings.

-

When included and set to START, the camera device will trigger the -autofocus algorithm. If autofocus is disabled, this trigger has no effect.

-

When set to CANCEL, the camera device will cancel any active trigger, -and return to its initial AF state.

-

Generally, applications should set this entry to START or CANCEL for only a -single capture, and then return it to IDLE (or not set at all). Specifying -START for multiple captures in a row means restarting the AF operation over -and over again.

-

See android.control.afState for what the trigger means for each AF mode.

-

Using the autofocus trigger and the precapture trigger android.control.aePrecaptureTrigger -simultaneously is allowed. However, since these triggers often require cooperation between -the auto-focus and auto-exposure routines (for example, the may need to be enabled for a -focus sweep), the camera device may delay acting on a later trigger until the previous -trigger has been fully handled. This may lead to longer intervals between the trigger and -changes to android.control.afState, for example.

-
HAL Implementation Details
-

The HAL must support triggering the AF trigger while an AE precapture trigger is active -(and vice versa), or at the same time as the AE trigger. It is acceptable for the HAL to -treat these as two consecutive triggers, for example handling the AF trigger and then the -AE trigger. Or the HAL may choose to optimize the case with both triggers fired at once, -to minimize the latency for converging both focus and exposure/flash usage.

-
- android.control.awbLock - - byte - - [public as boolean] - - - [legacy] - - - -
    -
  • - OFF -

    Auto-white balance lock is disabled; the AWB -algorithm is free to update its parameters if in AUTO -mode.

    -
  • -
  • - ON -

    Auto-white balance lock is enabled; the AWB -algorithm will not update its parameters while the lock -is active.

    -
  • -
- -
-

Whether auto-white balance (AWB) is currently locked to its -latest calculated values.

-
- -
Details
-

When set to true (ON), the AWB algorithm is locked to its latest parameters, -and will not change color balance settings until the lock is set to false (OFF).

-

Since the camera device has a pipeline of in-flight requests, the settings that -get locked do not necessarily correspond to the settings that were present in the -latest capture result received from the camera device, since additional captures -and AWB updates may have occurred even before the result was sent out. If an -application is switching between automatic and manual control and wishes to eliminate -any flicker during the switch, the following procedure is recommended:

-
    -
  1. Starting in auto-AWB mode:
  2. -
  3. Lock AWB
  4. -
  5. Wait for the first result to be output that has the AWB locked
  6. -
  7. Copy AWB settings from that result into a request, set the request to manual AWB
  8. -
  9. Submit the capture request, proceed to run manual AWB as desired.
  10. -
-

Note that AWB lock is only meaningful when -android.control.awbMode is in the AUTO mode; in other modes, -AWB is already fixed to a specific setting.

-

Some LEGACY devices may not support ON; the value is then overridden to OFF.

-
- android.control.awbMode - - byte - - [public] - - - [legacy] - - - -
    -
  • - OFF -

    The camera device's auto-white balance routine is disabled.

    -

    The application-selected color transform matrix -(android.colorCorrection.transform) and gains -(android.colorCorrection.gains) are used by the camera -device for manual white balance control.

    -
  • -
  • - AUTO -

    The camera device's auto-white balance routine is active.

    -

    The application's values for android.colorCorrection.transform -and android.colorCorrection.gains are ignored. -For devices that support the MANUAL_POST_PROCESSING capability, the -values used by the camera device for the transform and gains -will be available in the capture result for this request.

    -
  • -
  • - INCANDESCENT -

    The camera device's auto-white balance routine is disabled; -the camera device uses incandescent light as the assumed scene -illumination for white balance.

    -

    While the exact white balance transforms are up to the -camera device, they will approximately match the CIE -standard illuminant A.

    -

    The application's values for android.colorCorrection.transform -and android.colorCorrection.gains are ignored. -For devices that support the MANUAL_POST_PROCESSING capability, the -values used by the camera device for the transform and gains -will be available in the capture result for this request.

    -
  • -
  • - FLUORESCENT -

    The camera device's auto-white balance routine is disabled; -the camera device uses fluorescent light as the assumed scene -illumination for white balance.

    -

    While the exact white balance transforms are up to the -camera device, they will approximately match the CIE -standard illuminant F2.

    -

    The application's values for android.colorCorrection.transform -and android.colorCorrection.gains are ignored. -For devices that support the MANUAL_POST_PROCESSING capability, the -values used by the camera device for the transform and gains -will be available in the capture result for this request.

    -
  • -
  • - WARM_FLUORESCENT -

    The camera device's auto-white balance routine is disabled; -the camera device uses warm fluorescent light as the assumed scene -illumination for white balance.

    -

    While the exact white balance transforms are up to the -camera device, they will approximately match the CIE -standard illuminant F4.

    -

    The application's values for android.colorCorrection.transform -and android.colorCorrection.gains are ignored. -For devices that support the MANUAL_POST_PROCESSING capability, the -values used by the camera device for the transform and gains -will be available in the capture result for this request.

    -
  • -
  • - DAYLIGHT -

    The camera device's auto-white balance routine is disabled; -the camera device uses daylight light as the assumed scene -illumination for white balance.

    -

    While the exact white balance transforms are up to the -camera device, they will approximately match the CIE -standard illuminant D65.

    -

    The application's values for android.colorCorrection.transform -and android.colorCorrection.gains are ignored. -For devices that support the MANUAL_POST_PROCESSING capability, the -values used by the camera device for the transform and gains -will be available in the capture result for this request.

    -
  • -
  • - CLOUDY_DAYLIGHT -

    The camera device's auto-white balance routine is disabled; -the camera device uses cloudy daylight light as the assumed scene -illumination for white balance.

    -

    The application's values for android.colorCorrection.transform -and android.colorCorrection.gains are ignored. -For devices that support the MANUAL_POST_PROCESSING capability, the -values used by the camera device for the transform and gains -will be available in the capture result for this request.

    -
  • -
  • - TWILIGHT -

    The camera device's auto-white balance routine is disabled; -the camera device uses twilight light as the assumed scene -illumination for white balance.

    -

    The application's values for android.colorCorrection.transform -and android.colorCorrection.gains are ignored. -For devices that support the MANUAL_POST_PROCESSING capability, the -values used by the camera device for the transform and gains -will be available in the capture result for this request.

    -
  • -
  • - SHADE -

    The camera device's auto-white balance routine is disabled; -the camera device uses shade light as the assumed scene -illumination for white balance.

    -

    The application's values for android.colorCorrection.transform -and android.colorCorrection.gains are ignored. -For devices that support the MANUAL_POST_PROCESSING capability, the -values used by the camera device for the transform and gains -will be available in the capture result for this request.

    -
  • -
- -
-

Whether auto-white balance (AWB) is currently setting the color -transform fields, and what its illumination target -is.

-
- -

android.control.awbAvailableModes

-
Details
-

This control is only effective if android.control.mode is AUTO.

-

When set to the ON mode, the camera device's auto-white balance -routine is enabled, overriding the application's selected -android.colorCorrection.transform, android.colorCorrection.gains and -android.colorCorrection.mode. Note that when android.control.aeMode -is OFF, the behavior of AWB is device dependent. It is recommened to -also set AWB mode to OFF or lock AWB by using android.control.awbLock before -setting AE mode to OFF.

-

When set to the OFF mode, the camera device's auto-white balance -routine is disabled. The application manually controls the white -balance by android.colorCorrection.transform, android.colorCorrection.gains -and android.colorCorrection.mode.

-

When set to any other modes, the camera device's auto-white -balance routine is disabled. The camera device uses each -particular illumination target for white balance -adjustment. The application's values for -android.colorCorrection.transform, -android.colorCorrection.gains and -android.colorCorrection.mode are ignored.

-
- android.control.awbRegions - - int32 - x - - - 5 x area_count - - [public as meteringRectangle] - - - - - - - -

List of metering areas to use for auto-white-balance illuminant -estimation.

-
- Pixel coordinates within android.sensor.info.activeArraySize - -

Coordinates must be between [(0,0), (width, height)) of -android.sensor.info.activeArraySize

-
Details
-

Not available if android.control.maxRegionsAwb is 0. -Otherwise will always be present.

-

The maximum number of regions supported by the device is determined by the value -of android.control.maxRegionsAwb.

-

The coordinate system is based on the active pixel array, -with (0,0) being the top-left pixel in the active pixel array, and -(android.sensor.info.activeArraySize.width - 1, -android.sensor.info.activeArraySize.height - 1) being the -bottom-right pixel in the active pixel array.

-

The weight must range from 0 to 1000, and represents a weight -for every pixel in the area. This means that a large metering area -with the same weight as a smaller area will have more effect in -the metering result. Metering areas can partially overlap and the -camera device will add the weights in the overlap region.

-

The weights are relative to weights of other white balance metering regions, so if -only one region is used, all non-zero weights will have the same effect. A region with -0 weight is ignored.

-

If all regions have 0 weight, then no specific metering area needs to be used by the -camera device.

-

If the metering region is outside the used android.scaler.cropRegion returned in -capture result metadata, the camera device will ignore the sections outside the crop -region and output only the intersection rectangle as the metering region in the result -metadata. If the region is entirely outside the crop region, it will be ignored and -not reported in the result metadata.

-
HAL Implementation Details
-

The HAL level representation of MeteringRectangle[] is a -int[5 * area_count]. -Every five elements represent a metering region of -(xmin, ymin, xmax, ymax, weight). -The rectangle is defined to be inclusive on xmin and ymin, but -exclusive on xmax and ymax.

-
- android.control.captureIntent - - byte - - [public] - - - [legacy] - - - -
    -
  • - CUSTOM -

    The goal of this request doesn't fall into the other -categories. The camera device will default to preview-like -behavior.

    -
  • -
  • - PREVIEW -

    This request is for a preview-like use case.

    -

    The precapture trigger may be used to start off a metering -w/flash sequence.

    -
  • -
  • - STILL_CAPTURE -

    This request is for a still capture-type -use case.

    -

    If the flash unit is under automatic control, it may fire as needed.

    -
  • -
  • - VIDEO_RECORD -

    This request is for a video recording -use case.

    -
  • -
  • - VIDEO_SNAPSHOT -

    This request is for a video snapshot (still -image while recording video) use case.

    -

    The camera device should take the highest-quality image -possible (given the other settings) without disrupting the -frame rate of video recording.

    -
  • -
  • - ZERO_SHUTTER_LAG -

    This request is for a ZSL usecase; the -application will stream full-resolution images and -reprocess one or several later for a final -capture.

    -
  • -
  • - MANUAL -

    This request is for manual capture use case where -the applications want to directly control the capture parameters.

    -

    For example, the application may wish to manually control -android.sensor.exposureTime, android.sensor.sensitivity, etc.

    -
  • -
- -
-

Information to the camera device 3A (auto-exposure, -auto-focus, auto-white balance) routines about the purpose -of this capture, to help the camera device to decide optimal 3A -strategy.

-
- -
Details
-

This control (except for MANUAL) is only effective if -android.control.mode != OFF and any 3A routine is active.

-

ZERO_SHUTTER_LAG will be supported if android.request.availableCapabilities -contains PRIVATE_REPROCESSING or YUV_REPROCESSING. MANUAL will be supported if -android.request.availableCapabilities contains MANUAL_SENSOR. Other intent values are -always supported.

-
- android.control.effectMode - - byte - - [public] - - - [legacy] - - - -
    -
  • - OFF -

    No color effect will be applied.

    -
  • -
  • - MONO - [optional] -

    A "monocolor" effect where the image is mapped into -a single color.

    -

    This will typically be grayscale.

    -
  • -
  • - NEGATIVE - [optional] -

    A "photo-negative" effect where the image's colors -are inverted.

    -
  • -
  • - SOLARIZE - [optional] -

    A "solarisation" effect (Sabattier effect) where the -image is wholly or partially reversed in -tone.

    -
  • -
  • - SEPIA - [optional] -

    A "sepia" effect where the image is mapped into warm -gray, red, and brown tones.

    -
  • -
  • - POSTERIZE - [optional] -

    A "posterization" effect where the image uses -discrete regions of tone rather than a continuous -gradient of tones.

    -
  • -
  • - WHITEBOARD - [optional] -

    A "whiteboard" effect where the image is typically displayed -as regions of white, with black or grey details.

    -
  • -
  • - BLACKBOARD - [optional] -

    A "blackboard" effect where the image is typically displayed -as regions of black, with white or grey details.

    -
  • -
  • - AQUA - [optional] -

    An "aqua" effect where a blue hue is added to the image.

    -
  • -
- -
-

A special color effect to apply.

-
- -

android.control.availableEffects

-
Details
-

When this mode is set, a color effect will be applied -to images produced by the camera device. The interpretation -and implementation of these color effects is left to the -implementor of the camera device, and should not be -depended on to be consistent (or present) across all -devices.

-
- android.control.mode - - byte - - [public] - - - [legacy] - - - -
    -
  • - OFF -

    Full application control of pipeline.

    -

    All control by the device's metering and focusing (3A) -routines is disabled, and no other settings in -android.control.* have any effect, except that -android.control.captureIntent may be used by the camera -device to select post-processing values for processing -blocks that do not allow for manual control, or are not -exposed by the camera API.

    -

    However, the camera device's 3A routines may continue to -collect statistics and update their internal state so that -when control is switched to AUTO mode, good control values -can be immediately applied.

    -
  • -
  • - AUTO -

    Use settings for each individual 3A routine.

    -

    Manual control of capture parameters is disabled. All -controls in android.control.* besides sceneMode take -effect.

    -
  • -
  • - USE_SCENE_MODE - [optional] -

    Use a specific scene mode.

    -

    Enabling this disables control.aeMode, control.awbMode and -control.afMode controls; the camera device will ignore -those settings while USE_SCENE_MODE is active (except for -FACE_PRIORITY scene mode). Other control entries are still active. -This setting can only be used if scene mode is supported (i.e. -android.control.availableSceneModes -contain some modes other than DISABLED).

    -
  • -
  • - OFF_KEEP_STATE - [optional] -

    Same as OFF mode, except that this capture will not be -used by camera device background auto-exposure, auto-white balance and -auto-focus algorithms (3A) to update their statistics.

    -

    Specifically, the 3A routines are locked to the last -values set from a request with AUTO, OFF, or -USE_SCENE_MODE, and any statistics or state updates -collected from manual captures with OFF_KEEP_STATE will be -discarded by the camera device.

    -
  • -
- -
-

Overall mode of 3A (auto-exposure, auto-white-balance, auto-focus) control -routines.

-
- -

android.control.availableModes

-
Details
-

This is a top-level 3A control switch. When set to OFF, all 3A control -by the camera device is disabled. The application must set the fields for -capture parameters itself.

-

When set to AUTO, the individual algorithm controls in -android.control.* are in effect, such as android.control.afMode.

-

When set to USE_SCENE_MODE, the individual controls in -android.control.* are mostly disabled, and the camera device implements -one of the scene mode settings (such as ACTION, SUNSET, or PARTY) -as it wishes. The camera device scene mode 3A settings are provided by -capture results.

-

When set to OFF_KEEP_STATE, it is similar to OFF mode, the only difference -is that this frame will not be used by camera device background 3A statistics -update, as if this frame is never captured. This mode can be used in the scenario -where the application doesn't want a 3A manual control capture to affect -the subsequent auto 3A capture results.

-
- android.control.sceneMode - - byte - - [public] - - - [legacy] - - - -
    -
  • - DISABLED - 0 -

    Indicates that no scene modes are set for a given capture request.

    -
  • -
  • - FACE_PRIORITY -

    If face detection support exists, use face -detection data for auto-focus, auto-white balance, and -auto-exposure routines.

    -

    If face detection statistics are disabled -(i.e. android.statistics.faceDetectMode is set to OFF), -this should still operate correctly (but will not return -face detection statistics to the framework).

    -

    Unlike the other scene modes, android.control.aeMode, -android.control.awbMode, and android.control.afMode -remain active when FACE_PRIORITY is set.

    -
  • -
  • - ACTION - [optional] -

    Optimized for photos of quickly moving objects.

    -

    Similar to SPORTS.

    -
  • -
  • - PORTRAIT - [optional] -

    Optimized for still photos of people.

    -
  • -
  • - LANDSCAPE - [optional] -

    Optimized for photos of distant macroscopic objects.

    -
  • -
  • - NIGHT - [optional] -

    Optimized for low-light settings.

    -
  • -
  • - NIGHT_PORTRAIT - [optional] -

    Optimized for still photos of people in low-light -settings.

    -
  • -
  • - THEATRE - [optional] -

    Optimized for dim, indoor settings where flash must -remain off.

    -
  • -
  • - BEACH - [optional] -

    Optimized for bright, outdoor beach settings.

    -
  • -
  • - SNOW - [optional] -

    Optimized for bright, outdoor settings containing snow.

    -
  • -
  • - SUNSET - [optional] -

    Optimized for scenes of the setting sun.

    -
  • -
  • - STEADYPHOTO - [optional] -

    Optimized to avoid blurry photos due to small amounts of -device motion (for example: due to hand shake).

    -
  • -
  • - FIREWORKS - [optional] -

    Optimized for nighttime photos of fireworks.

    -
  • -
  • - SPORTS - [optional] -

    Optimized for photos of quickly moving people.

    -

    Similar to ACTION.

    -
  • -
  • - PARTY - [optional] -

    Optimized for dim, indoor settings with multiple moving -people.

    -
  • -
  • - CANDLELIGHT - [optional] -

    Optimized for dim settings where the main light source -is a flame.

    -
  • -
  • - BARCODE - [optional] -

    Optimized for accurately capturing a photo of barcode -for use by camera applications that wish to read the -barcode value.

    -
  • -
  • - HIGH_SPEED_VIDEO - [deprecated] - [optional] -

    This is deprecated, please use CameraDevice#createConstrainedHighSpeedCaptureSession -and CameraConstrainedHighSpeedCaptureSession#createHighSpeedRequestList -for high speed video recording.

    -

    Optimized for high speed video recording (frame rate >=60fps) use case.

    -

    The supported high speed video sizes and fps ranges are specified in -android.control.availableHighSpeedVideoConfigurations. To get desired -output frame rates, the application is only allowed to select video size -and fps range combinations listed in this static metadata. The fps range -can be control via android.control.aeTargetFpsRange.

    -

    In this mode, the camera device will override aeMode, awbMode, and afMode to -ON, ON, and CONTINUOUS_VIDEO, respectively. All post-processing block mode -controls will be overridden to be FAST. Therefore, no manual control of capture -and post-processing parameters is possible. All other controls operate the -same as when android.control.mode == AUTO. This means that all other -android.control.* fields continue to work, such as

    - -

    Outside of android.control.*, the following controls will work:

    - -

    For high speed recording use case, the actual maximum supported frame rate may -be lower than what camera can output, depending on the destination Surfaces for -the image data. For example, if the destination surface is from video encoder, -the application need check if the video encoder is capable of supporting the -high frame rate for a given video size, or it will end up with lower recording -frame rate. If the destination surface is from preview window, the preview frame -rate will be bounded by the screen refresh rate.

    -

    The camera device will only support up to 2 output high speed streams -(processed non-stalling format defined in android.request.maxNumOutputStreams) -in this mode. This control will be effective only if all of below conditions are true:

    - -

    When above conditions are NOT satistied, the controls of this mode and -android.control.aeTargetFpsRange will be ignored by the camera device, -the camera device will fall back to android.control.mode == AUTO, -and the returned capture result metadata will give the fps range choosen -by the camera device.

    -

    Switching into or out of this mode may trigger some camera ISP/sensor -reconfigurations, which may introduce extra latency. It is recommended that -the application avoids unnecessary scene mode switch as much as possible.

    -
  • -
  • - HDR - [optional] -

    Turn on a device-specific high dynamic range (HDR) mode.

    -

    In this scene mode, the camera device captures images -that keep a larger range of scene illumination levels -visible in the final image. For example, when taking a -picture of a object in front of a bright window, both -the object and the scene through the window may be -visible when using HDR mode, while in normal AUTO mode, -one or the other may be poorly exposed. As a tradeoff, -HDR mode generally takes much longer to capture a single -image, has no user control, and may have other artifacts -depending on the HDR method used.

    -

    Therefore, HDR captures operate at a much slower rate -than regular captures.

    -

    In this mode, on LIMITED or FULL devices, when a request -is made with a android.control.captureIntent of -STILL_CAPTURE, the camera device will capture an image -using a high dynamic range capture technique. On LEGACY -devices, captures that target a JPEG-format output will -be captured with HDR, and the capture intent is not -relevant.

    -

    The HDR capture may involve the device capturing a burst -of images internally and combining them into one, or it -may involve the device using specialized high dynamic -range capture hardware. In all cases, a single image is -produced in response to a capture request submitted -while in HDR mode.

    -

    Since substantial post-processing is generally needed to -produce an HDR image, only YUV, PRIVATE, and JPEG -outputs are supported for LIMITED/FULL device HDR -captures, and only JPEG outputs are supported for LEGACY -HDR captures. Using a RAW output for HDR capture is not -supported.

    -

    Some devices may also support always-on HDR, which -applies HDR processing at full frame rate. For these -devices, intents other than STILL_CAPTURE will also -produce an HDR output with no frame rate impact compared -to normal operation, though the quality may be lower -than for STILL_CAPTURE intents.

    -

    If SCENE_MODE_HDR is used with unsupported output types -or capture intents, the images captured will be as if -the SCENE_MODE was not enabled at all.

    -
  • -
  • - FACE_PRIORITY_LOW_LIGHT - [optional] - [hidden] -

    Same as FACE_PRIORITY scene mode, except that the camera -device will choose higher sensitivity values (android.sensor.sensitivity) -under low light conditions.

    -

    The camera device may be tuned to expose the images in a reduced -sensitivity range to produce the best quality images. For example, -if the android.sensor.info.sensitivityRange gives range of [100, 1600], -the camera device auto-exposure routine tuning process may limit the actual -exposure sensitivity range to [100, 1200] to ensure that the noise level isn't -exessive in order to preserve the image quality. Under this situation, the image under -low light may be under-exposed when the sensor max exposure time (bounded by the -android.control.aeTargetFpsRange when android.control.aeMode is one of the -ON_* modes) and effective max sensitivity are reached. This scene mode allows the -camera device auto-exposure routine to increase the sensitivity up to the max -sensitivity specified by android.sensor.info.sensitivityRange when the scene is too -dark and the max exposure time is reached. The captured images may be noisier -compared with the images captured in normal FACE_PRIORITY mode; therefore, it is -recommended that the application only use this scene mode when it is capable of -reducing the noise level of the captured images.

    -

    Unlike the other scene modes, android.control.aeMode, -android.control.awbMode, and android.control.afMode -remain active when FACE_PRIORITY_LOW_LIGHT is set.

    -
  • -
  • - DEVICE_CUSTOM_START - [optional] - [hidden] - 100 -

    Scene mode values within the range of -[DEVICE_CUSTOM_START, DEVICE_CUSTOM_END] are reserved for device specific -customized scene modes.

    -
  • -
  • - DEVICE_CUSTOM_END - [optional] - [hidden] - 127 -

    Scene mode values within the range of -[DEVICE_CUSTOM_START, DEVICE_CUSTOM_END] are reserved for device specific -customized scene modes.

    -
  • -
- -
-

Control for which scene mode is currently active.

-
- -

android.control.availableSceneModes

-
Details
-

Scene modes are custom camera modes optimized for a certain set of conditions and -capture settings.

-

This is the mode that that is active when -android.control.mode == USE_SCENE_MODE. Aside from FACE_PRIORITY, these modes will -disable android.control.aeMode, android.control.awbMode, and android.control.afMode -while in use.

-

The interpretation and implementation of these scene modes is left -to the implementor of the camera device. Their behavior will not be -consistent across all devices, and any given device may only implement -a subset of these modes.

-
HAL Implementation Details
-

HAL implementations that include scene modes are expected to provide -the per-scene settings to use for android.control.aeMode, -android.control.awbMode, and android.control.afMode in -android.control.sceneModeOverrides.

-

For HIGH_SPEED_VIDEO mode, if it is included in android.control.availableSceneModes, -the HAL must list supported video size and fps range in -android.control.availableHighSpeedVideoConfigurations. For a given size, e.g. -1280x720, if the HAL has two different sensor configurations for normal streaming -mode and high speed streaming, when this scene mode is set/reset in a sequence of capture -requests, the HAL may have to switch between different sensor modes. -This mode is deprecated in HAL3.3, to support high speed video recording, please implement -android.control.availableHighSpeedVideoConfigurations and CONSTRAINED_HIGH_SPEED_VIDEO -capbility defined in android.request.availableCapabilities.

-
- android.control.videoStabilizationMode - - byte - - [public] - - - [legacy] - - - -
    -
  • - OFF -

    Video stabilization is disabled.

    -
  • -
  • - ON -

    Video stabilization is enabled.

    -
  • -
- -
-

Whether video stabilization is -active.

-
- -
Details
-

Video stabilization automatically warps images from -the camera in order to stabilize motion between consecutive frames.

-

If enabled, video stabilization can modify the -android.scaler.cropRegion to keep the video stream stabilized.

-

Switching between different video stabilization modes may take several -frames to initialize, the camera device will report the current mode -in capture result metadata. For example, When "ON" mode is requested, -the video stabilization modes in the first several capture results may -still be "OFF", and it will become "ON" when the initialization is -done.

-

In addition, not all recording sizes or frame rates may be supported for -stabilization by a device that reports stabilization support. It is guaranteed -that an output targeting a MediaRecorder or MediaCodec will be stabilized if -the recording resolution is less than or equal to 1920 x 1080 (width less than -or equal to 1920, height less than or equal to 1080), and the recording -frame rate is less than or equal to 30fps. At other sizes, the CaptureResult -android.control.videoStabilizationMode field will return -OFF if the recording output is not stabilized, or if there are no output -Surface types that can be stabilized.

-

If a camera device supports both this mode and OIS -(android.lens.opticalStabilizationMode), turning both modes on may -produce undesirable interaction, so it is recommended not to enable -both at the same time.

-
- android.control.postRawSensitivityBoost - - int32 - - [public] - - - - - - - -

The amount of additional sensitivity boost applied to output images -after RAW sensor data is captured.

-
- ISO arithmetic units, the same as android.sensor.sensitivity - -

android.control.postRawSensitivityBoostRange

-
Details
-

Some camera devices support additional digital sensitivity boosting in the -camera processing pipeline after sensor RAW image is captured. -Such a boost will be applied to YUV/JPEG format output images but will not -have effect on RAW output formats like RAW_SENSOR, RAW10, RAW12 or RAW_OPAQUE.

-

This key will be null for devices that do not support any RAW format -outputs. For devices that do support RAW format outputs, this key will always -present, and if a device does not support post RAW sensitivity boost, it will -list 100 in this key.

-

If the camera device cannot apply the exact boost requested, it will reduce the -boost to the nearest supported value. -The final boost value used will be available in the output capture result.

-

For devices that support post RAW sensitivity boost, the YUV/JPEG output images -of such device will have the total sensitivity of -android.sensor.sensitivity * android.control.postRawSensitivityBoost / 100 -The sensitivity of RAW format images will always be android.sensor.sensitivity

-

This control is only effective if android.control.aeMode or android.control.mode is set to -OFF; otherwise the auto-exposure algorithm will override this value.

-
static
Property NameTypeDescriptionUnitsRangeTags
- android.control.aeAvailableAntibandingModes - - byte - x - - - n - - [public as enumList] - - - [legacy] - - -
list of enums
- - -
-

List of auto-exposure antibanding modes for android.control.aeAntibandingMode that are -supported by this camera device.

-
- -

Any value listed in android.control.aeAntibandingMode

-
Details
-

Not all of the auto-exposure anti-banding modes may be -supported by a given camera device. This field lists the -valid anti-banding modes that the application may request -for this camera device with the -android.control.aeAntibandingMode control.

-
- android.control.aeAvailableModes - - byte - x - - - n - - [public as enumList] - - - [legacy] - - -
list of enums
- - -
-

List of auto-exposure modes for android.control.aeMode that are supported by this camera -device.

-
- -

Any value listed in android.control.aeMode

-
Details
-

Not all the auto-exposure modes may be supported by a -given camera device, especially if no flash unit is -available. This entry lists the valid modes for -android.control.aeMode for this camera device.

-

All camera devices support ON, and all camera devices with flash -units support ON_AUTO_FLASH and ON_ALWAYS_FLASH.

-

FULL mode camera devices always support OFF mode, -which enables application control of camera exposure time, -sensitivity, and frame duration.

-

LEGACY mode camera devices never support OFF mode. -LIMITED mode devices support OFF if they support the MANUAL_SENSOR -capability.

-
- android.control.aeAvailableTargetFpsRanges - - int32 - x - - - 2 x n - - [public as rangeInt] - - - [legacy] - - -
list of pairs of frame rates
- - -
-

List of frame rate ranges for android.control.aeTargetFpsRange supported by -this camera device.

-
- Frames per second (FPS) - -
Details
-

For devices at the LEGACY level or above:

- -

For devices at the LIMITED level or above:

-
    -
  • For YUV_420_888 burst capture use case, this list will always include (min, max) -and (max, max) where min <= 15 and max = the maximum output frame rate of the -maximum YUV_420_888 output size.
  • -
-
- android.control.aeCompensationRange - - int32 - x - - - 2 - - [public as rangeInt] - - - [legacy] - - - - - -

Maximum and minimum exposure compensation values for -android.control.aeExposureCompensation, in counts of android.control.aeCompensationStep, -that are supported by this camera device.

-
- -

Range [0,0] indicates that exposure compensation is not supported.

-

For LIMITED and FULL devices, range must follow below requirements if exposure -compensation is supported (range != [0, 0]):

-

Min.exposure compensation * android.control.aeCompensationStep <= -2 EV

-

Max.exposure compensation * android.control.aeCompensationStep >= 2 EV

-

LEGACY devices may support a smaller range than this.

-
- android.control.aeCompensationStep - - rational - - [public] - - - [legacy] - - - - - -

Smallest step by which the exposure compensation -can be changed.

-
- Exposure Value (EV) - -
Details
-

This is the unit for android.control.aeExposureCompensation. For example, if this key has -a value of 1/2, then a setting of -2 for android.control.aeExposureCompensation means -that the target EV offset for the auto-exposure routine is -1 EV.

-

One unit of EV compensation changes the brightness of the captured image by a factor -of two. +1 EV doubles the image brightness, while -1 EV halves the image brightness.

-
HAL Implementation Details
-

This must be less than or equal to 1/2.

-
- android.control.afAvailableModes - - byte - x - - - n - - [public as enumList] - - - [legacy] - - -
List of enums
- - -
-

List of auto-focus (AF) modes for android.control.afMode that are -supported by this camera device.

-
- -

Any value listed in android.control.afMode

-
Details
-

Not all the auto-focus modes may be supported by a -given camera device. This entry lists the valid modes for -android.control.afMode for this camera device.

-

All LIMITED and FULL mode camera devices will support OFF mode, and all -camera devices with adjustable focuser units -(android.lens.info.minimumFocusDistance > 0) will support AUTO mode.

-

LEGACY devices will support OFF mode only if they support -focusing to infinity (by also setting android.lens.focusDistance to -0.0f).

-
- android.control.availableEffects - - byte - x - - - n - - [public as enumList] - - - [legacy] - - -
List of enums (android.control.effectMode).
- - -
-

List of color effects for android.control.effectMode that are supported by this camera -device.

-
- -

Any value listed in android.control.effectMode

-
Details
-

This list contains the color effect modes that can be applied to -images produced by the camera device. -Implementations are not expected to be consistent across all devices. -If no color effect modes are available for a device, this will only list -OFF.

-

A color effect will only be applied if -android.control.mode != OFF. OFF is always included in this list.

-

This control has no effect on the operation of other control routines such -as auto-exposure, white balance, or focus.

-
- android.control.availableSceneModes - - byte - x - - - n - - [public as enumList] - - - [legacy] - - -
List of enums (android.control.sceneMode).
- - -
-

List of scene modes for android.control.sceneMode that are supported by this camera -device.

-
- -

Any value listed in android.control.sceneMode

-
Details
-

This list contains scene modes that can be set for the camera device. -Only scene modes that have been fully implemented for the -camera device may be included here. Implementations are not expected -to be consistent across all devices.

-

If no scene modes are supported by the camera device, this -will be set to DISABLED. Otherwise DISABLED will not be listed.

-

FACE_PRIORITY is always listed if face detection is -supported (i.e.android.statistics.info.maxFaceCount > -0).

-
- android.control.availableVideoStabilizationModes - - byte - x - - - n - - [public as enumList] - - - [legacy] - - -
List of enums.
- - -
-

List of video stabilization modes for android.control.videoStabilizationMode -that are supported by this camera device.

-
- -

Any value listed in android.control.videoStabilizationMode

-
Details
-

OFF will always be listed.

-
- android.control.awbAvailableModes - - byte - x - - - n - - [public as enumList] - - - [legacy] - - -
List of enums
- - -
-

List of auto-white-balance modes for android.control.awbMode that are supported by this -camera device.

-
- -

Any value listed in android.control.awbMode

-
Details
-

Not all the auto-white-balance modes may be supported by a -given camera device. This entry lists the valid modes for -android.control.awbMode for this camera device.

-

All camera devices will support ON mode.

-

Camera devices that support the MANUAL_POST_PROCESSING capability will always support OFF -mode, which enables application control of white balance, by using -android.colorCorrection.transform and android.colorCorrection.gains(android.colorCorrection.mode must be set to TRANSFORM_MATRIX). This includes all FULL -mode camera devices.

-
- android.control.maxRegions - - int32 - x - - - 3 - - [ndk_public] - - - [legacy] - - - - - -

List of the maximum number of regions that can be used for metering in -auto-exposure (AE), auto-white balance (AWB), and auto-focus (AF); -this corresponds to the the maximum number of elements in -android.control.aeRegions, android.control.awbRegions, -and android.control.afRegions.

-
- -

Value must be >= 0 for each element. For full-capability devices -this value must be >= 1 for AE and AF. The order of the elements is: -(AE, AWB, AF).

-
- android.control.maxRegionsAe - - int32 - - [java_public] - - [synthetic] - - [legacy] - - - - - -

The maximum number of metering regions that can be used by the auto-exposure (AE) -routine.

-
- -

Value will be >= 0. For FULL-capability devices, this -value will be >= 1.

-
Details
-

This corresponds to the the maximum allowed number of elements in -android.control.aeRegions.

-
HAL Implementation Details
-

This entry is private to the framework. Fill in -maxRegions to have this entry be automatically populated.

-
- android.control.maxRegionsAwb - - int32 - - [java_public] - - [synthetic] - - [legacy] - - - - - -

The maximum number of metering regions that can be used by the auto-white balance (AWB) -routine.

-
- -

Value will be >= 0.

-
Details
-

This corresponds to the the maximum allowed number of elements in -android.control.awbRegions.

-
HAL Implementation Details
-

This entry is private to the framework. Fill in -maxRegions to have this entry be automatically populated.

-
- android.control.maxRegionsAf - - int32 - - [java_public] - - [synthetic] - - [legacy] - - - - - -

The maximum number of metering regions that can be used by the auto-focus (AF) routine.

-
- -

Value will be >= 0. For FULL-capability devices, this -value will be >= 1.

-
Details
-

This corresponds to the the maximum allowed number of elements in -android.control.afRegions.

-
HAL Implementation Details
-

This entry is private to the framework. Fill in -maxRegions to have this entry be automatically populated.

-
- android.control.sceneModeOverrides - - byte - x - - - 3 x length(availableSceneModes) - - [system] - - - [limited] - - - - - -

Ordered list of auto-exposure, auto-white balance, and auto-focus -settings to use with each available scene mode.

-
- -

For each available scene mode, the list must contain three -entries containing the android.control.aeMode, -android.control.awbMode, and android.control.afMode values used -by the camera device. The entry order is (aeMode, awbMode, afMode) -where aeMode has the lowest index position.

-
Details
-

When a scene mode is enabled, the camera device is expected -to override android.control.aeMode, android.control.awbMode, -and android.control.afMode with its preferred settings for -that scene mode.

-

The order of this list matches that of availableSceneModes, -with 3 entries for each mode. The overrides listed -for FACE_PRIORITY and FACE_PRIORITY_LOW_LIGHT (if supported) are ignored, -since for that mode the application-set android.control.aeMode, -android.control.awbMode, and android.control.afMode values are -used instead, matching the behavior when android.control.mode -is set to AUTO. It is recommended that the FACE_PRIORITY and -FACE_PRIORITY_LOW_LIGHT (if supported) overrides should be set to 0.

-

For example, if availableSceneModes contains -(FACE_PRIORITY, ACTION, NIGHT), then the camera framework -expects sceneModeOverrides to have 9 entries formatted like: -(0, 0, 0, ON_AUTO_FLASH, AUTO, CONTINUOUS_PICTURE, -ON_AUTO_FLASH, INCANDESCENT, AUTO).

-
HAL Implementation Details
-

To maintain backward compatibility, this list will be made available -in the static metadata of the camera service. The camera service will -use these values to set android.control.aeMode, -android.control.awbMode, and android.control.afMode when using a scene -mode other than FACE_PRIORITY and FACE_PRIORITY_LOW_LIGHT (if supported).

-
- android.control.availableHighSpeedVideoConfigurations - - int32 - x - - - 5 x n - - [hidden as highSpeedVideoConfiguration] - - - [limited] - - - - - -

List of available high speed video size, fps range and max batch size configurations -supported by the camera device, in the format of (width, height, fps_min, fps_max, batch_size_max).

-
- -

For each configuration, the fps_max >= 120fps.

-
Details
-

When CONSTRAINED_HIGH_SPEED_VIDEO is supported in android.request.availableCapabilities, -this metadata will list the supported high speed video size, fps range and max batch size -configurations. All the sizes listed in this configuration will be a subset of the sizes -reported by StreamConfigurationMap#getOutputSizes -for processed non-stalling formats.

-

For the high speed video use case, the application must -select the video size and fps range from this metadata to configure the recording and -preview streams and setup the recording requests. For example, if the application intends -to do high speed recording, it can select the maximum size reported by this metadata to -configure output streams. Once the size is selected, application can filter this metadata -by selected size and get the supported fps ranges, and use these fps ranges to setup the -recording requests. Note that for the use case of multiple output streams, application -must select one unique size from this metadata to use (e.g., preview and recording streams -must have the same size). Otherwise, the high speed capture session creation will fail.

-

The min and max fps will be multiple times of 30fps.

-

High speed video streaming extends significant performance pressue to camera hardware, -to achieve efficient high speed streaming, the camera device may have to aggregate -multiple frames together and send to camera device for processing where the request -controls are same for all the frames in this batch. Max batch size indicates -the max possible number of frames the camera device will group together for this high -speed stream configuration. This max batch size will be used to generate a high speed -recording request list by -CameraConstrainedHighSpeedCaptureSession#createHighSpeedRequestList. -The max batch size for each configuration will satisfy below conditions:

-
    -
  • Each max batch size will be a divisor of its corresponding fps_max / 30. For example, -if max_fps is 300, max batch size will only be 1, 2, 5, or 10.
  • -
  • The camera device may choose smaller internal batch size for each configuration, but -the actual batch size will be a divisor of max batch size. For example, if the max batch -size is 8, the actual batch size used by camera device will only be 1, 2, 4, or 8.
  • -
  • The max batch size in each configuration entry must be no larger than 32.
  • -
-

The camera device doesn't have to support batch mode to achieve high speed video recording, -in such case, batch_size_max will be reported as 1 in each configuration entry.

-

This fps ranges in this configuration list can only be used to create requests -that are submitted to a high speed camera capture session created by -CameraDevice#createConstrainedHighSpeedCaptureSession. -The fps ranges reported in this metadata must not be used to setup capture requests for -normal capture session, or it will cause request error.

-
HAL Implementation Details
-

All the sizes listed in this configuration will be a subset of the sizes reported by -android.scaler.availableStreamConfigurations for processed non-stalling output formats. -Note that for all high speed video configurations, HAL must be able to support a minimum -of two streams, though the application might choose to configure just one stream.

-

The HAL may support multiple sensor modes for high speed outputs, for example, 120fps -sensor mode and 120fps recording, 240fps sensor mode for 240fps recording. The application -usually starts preview first, then starts recording. To avoid sensor mode switch caused -stutter when starting recording as much as possible, the application may want to ensure -the same sensor mode is used for preview and recording. Therefore, The HAL must advertise -the variable fps range [30, fps_max] for each fixed fps range in this configuration list. -For example, if the HAL advertises [120, 120] and [240, 240], the HAL must also advertise -[30, 120] and [30, 240] for each configuration. In doing so, if the application intends to -do 120fps recording, it can select [30, 120] to start preview, and [120, 120] to start -recording. For these variable fps ranges, it's up to the HAL to decide the actual fps -values that are suitable for smooth preview streaming. If the HAL sees different max_fps -values that fall into different sensor modes in a sequence of requests, the HAL must -switch the sensor mode as quick as possible to minimize the mode switch caused stutter.

-
- android.control.aeLockAvailable - - byte - - [public as boolean] - - - [legacy] - - - -
    -
  • - FALSE -
  • -
  • - TRUE -
  • -
- -
-

Whether the camera device supports android.control.aeLock

-
- -
Details
-

Devices with MANUAL_SENSOR capability or BURST_CAPTURE capability will always -list true. This includes FULL devices.

-
- android.control.awbLockAvailable - - byte - - [public as boolean] - - - [legacy] - - - -
    -
  • - FALSE -
  • -
  • - TRUE -
  • -
- -
-

Whether the camera device supports android.control.awbLock

-
- -
Details
-

Devices with MANUAL_POST_PROCESSING capability or BURST_CAPTURE capability will -always list true. This includes FULL devices.

-
- android.control.availableModes - - byte - x - - - n - - [public as enumList] - - - [legacy] - - -
List of enums (android.control.mode).
- - -
-

List of control modes for android.control.mode that are supported by this camera -device.

-
- -

Any value listed in android.control.mode

-
Details
-

This list contains control modes that can be set for the camera device. -LEGACY mode devices will always support AUTO mode. LIMITED and FULL -devices will always support OFF, AUTO modes.

-
- android.control.postRawSensitivityBoostRange - - int32 - x - - - 2 - - [public as rangeInt] - - - - -
Range of supported post RAW sensitivitiy boosts
- - -
-

Range of boosts for android.control.postRawSensitivityBoost supported -by this camera device.

-
- ISO arithmetic units, the same as android.sensor.sensitivity - -
Details
-

Devices support post RAW sensitivity boost will advertise -android.control.postRawSensitivityBoost key for controling -post RAW sensitivity boost.

-

This key will be null for devices that do not support any RAW format -outputs. For devices that do support RAW format outputs, this key will always -present, and if a device does not support post RAW sensitivity boost, it will -list (100, 100) in this key.

-
HAL Implementation Details
-

This key is added in HAL3.4. For HAL3.3 or earlier devices, camera framework will -generate this key as (100, 100) if device supports any of RAW output formats. -All HAL3.4 and above devices should list this key if device supports any of RAW -output formats.

-
dynamic
Property NameTypeDescriptionUnitsRangeTags
- android.control.aePrecaptureId - - int32 - - [system] - - - - [deprecated] - - - - -

The ID sent with the latest -CAMERA2_TRIGGER_PRECAPTURE_METERING call

-
- -

Deprecated. Do not use.

-
Details
-

Must be 0 if no -CAMERA2_TRIGGER_PRECAPTURE_METERING trigger received yet -by HAL. Always updated even if AE algorithm ignores the -trigger

-
- android.control.aeAntibandingMode - - byte - - [public] - - - [legacy] - - - -
    -
  • - OFF -

    The camera device will not adjust exposure duration to -avoid banding problems.

    -
  • -
  • - 50HZ -

    The camera device will adjust exposure duration to -avoid banding problems with 50Hz illumination sources.

    -
  • -
  • - 60HZ -

    The camera device will adjust exposure duration to -avoid banding problems with 60Hz illumination -sources.

    -
  • -
  • - AUTO -

    The camera device will automatically adapt its -antibanding routine to the current illumination -condition. This is the default mode if AUTO is -available on given camera device.

    -
  • -
- -
-

The desired setting for the camera device's auto-exposure -algorithm's antibanding compensation.

-
- -

android.control.aeAvailableAntibandingModes

-
Details
-

Some kinds of lighting fixtures, such as some fluorescent -lights, flicker at the rate of the power supply frequency -(60Hz or 50Hz, depending on country). While this is -typically not noticeable to a person, it can be visible to -a camera device. If a camera sets its exposure time to the -wrong value, the flicker may become visible in the -viewfinder as flicker or in a final captured image, as a -set of variable-brightness bands across the image.

-

Therefore, the auto-exposure routines of camera devices -include antibanding routines that ensure that the chosen -exposure value will not cause such banding. The choice of -exposure time depends on the rate of flicker, which the -camera device can detect automatically, or the expected -rate can be selected by the application using this -control.

-

A given camera device may not support all of the possible -options for the antibanding mode. The -android.control.aeAvailableAntibandingModes key contains -the available modes for a given camera device.

-

AUTO mode is the default if it is available on given -camera device. When AUTO mode is not available, the -default will be either 50HZ or 60HZ, and both 50HZ -and 60HZ will be available.

-

If manual exposure control is enabled (by setting -android.control.aeMode or android.control.mode to OFF), -then this setting has no effect, and the application must -ensure it selects exposure times that do not cause banding -issues. The android.statistics.sceneFlicker key can assist -the application in this.

-
HAL Implementation Details
-

For all capture request templates, this field must be set -to AUTO if AUTO mode is available. If AUTO is not available, -the default must be either 50HZ or 60HZ, and both 50HZ and -60HZ must be available.

-

If manual exposure control is enabled (by setting -android.control.aeMode or android.control.mode to OFF), -then the exposure values provided by the application must not be -adjusted for antibanding.

-
- android.control.aeExposureCompensation - - int32 - - [public] - - - [legacy] - - - - - -

Adjustment to auto-exposure (AE) target image -brightness.

-
- Compensation steps - -

android.control.aeCompensationRange

-
Details
-

The adjustment is measured as a count of steps, with the -step size defined by android.control.aeCompensationStep and the -allowed range by android.control.aeCompensationRange.

-

For example, if the exposure value (EV) step is 0.333, '6' -will mean an exposure compensation of +2 EV; -3 will mean an -exposure compensation of -1 EV. One EV represents a doubling -of image brightness. Note that this control will only be -effective if android.control.aeMode != OFF. This control -will take effect even when android.control.aeLock == true.

-

In the event of exposure compensation value being changed, camera device -may take several frames to reach the newly requested exposure target. -During that time, android.control.aeState field will be in the SEARCHING -state. Once the new exposure target is reached, android.control.aeState will -change from SEARCHING to either CONVERGED, LOCKED (if AE lock is enabled), or -FLASH_REQUIRED (if the scene is too dark for still capture).

-
- android.control.aeLock - - byte - - [public as boolean] - - - [legacy] - - - -
    -
  • - OFF -

    Auto-exposure lock is disabled; the AE algorithm -is free to update its parameters.

    -
  • -
  • - ON -

    Auto-exposure lock is enabled; the AE algorithm -must not update the exposure and sensitivity parameters -while the lock is active.

    -

    android.control.aeExposureCompensation setting changes -will still take effect while auto-exposure is locked.

    -

    Some rare LEGACY devices may not support -this, in which case the value will always be overridden to OFF.

    -
  • -
- -
-

Whether auto-exposure (AE) is currently locked to its latest -calculated values.

-
- -
Details
-

When set to true (ON), the AE algorithm is locked to its latest parameters, -and will not change exposure settings until the lock is set to false (OFF).

-

Note that even when AE is locked, the flash may be fired if -the android.control.aeMode is ON_AUTO_FLASH / -ON_ALWAYS_FLASH / ON_AUTO_FLASH_REDEYE.

-

When android.control.aeExposureCompensation is changed, even if the AE lock -is ON, the camera device will still adjust its exposure value.

-

If AE precapture is triggered (see android.control.aePrecaptureTrigger) -when AE is already locked, the camera device will not change the exposure time -(android.sensor.exposureTime) and sensitivity (android.sensor.sensitivity) -parameters. The flash may be fired if the android.control.aeMode -is ON_AUTO_FLASH/ON_AUTO_FLASH_REDEYE and the scene is too dark. If the -android.control.aeMode is ON_ALWAYS_FLASH, the scene may become overexposed. -Similarly, AE precapture trigger CANCEL has no effect when AE is already locked.

-

When an AE precapture sequence is triggered, AE unlock will not be able to unlock -the AE if AE is locked by the camera device internally during precapture metering -sequence In other words, submitting requests with AE unlock has no effect for an -ongoing precapture metering sequence. Otherwise, the precapture metering sequence -will never succeed in a sequence of preview requests where AE lock is always set -to false.

-

Since the camera device has a pipeline of in-flight requests, the settings that -get locked do not necessarily correspond to the settings that were present in the -latest capture result received from the camera device, since additional captures -and AE updates may have occurred even before the result was sent out. If an -application is switching between automatic and manual control and wishes to eliminate -any flicker during the switch, the following procedure is recommended:

-
    -
  1. Starting in auto-AE mode:
  2. -
  3. Lock AE
  4. -
  5. Wait for the first result to be output that has the AE locked
  6. -
  7. Copy exposure settings from that result into a request, set the request to manual AE
  8. -
  9. Submit the capture request, proceed to run manual AE as desired.
  10. -
-

See android.control.aeState for AE lock related state transition details.

-
- android.control.aeMode - - byte - - [public] - - - [legacy] - - - -
    -
  • - OFF -

    The camera device's autoexposure routine is disabled.

    -

    The application-selected android.sensor.exposureTime, -android.sensor.sensitivity and -android.sensor.frameDuration are used by the camera -device, along with android.flash.* fields, if there's -a flash unit for this camera device.

    -

    Note that auto-white balance (AWB) and auto-focus (AF) -behavior is device dependent when AE is in OFF mode. -To have consistent behavior across different devices, -it is recommended to either set AWB and AF to OFF mode -or lock AWB and AF before setting AE to OFF. -See android.control.awbMode, android.control.afMode, -android.control.awbLock, and android.control.afTrigger -for more details.

    -

    LEGACY devices do not support the OFF mode and will -override attempts to use this value to ON.

    -
  • -
  • - ON -

    The camera device's autoexposure routine is active, -with no flash control.

    -

    The application's values for -android.sensor.exposureTime, -android.sensor.sensitivity, and -android.sensor.frameDuration are ignored. The -application has control over the various -android.flash.* fields.

    -
  • -
  • - ON_AUTO_FLASH -

    Like ON, except that the camera device also controls -the camera's flash unit, firing it in low-light -conditions.

    -

    The flash may be fired during a precapture sequence -(triggered by android.control.aePrecaptureTrigger) and -may be fired for captures for which the -android.control.captureIntent field is set to -STILL_CAPTURE

    -
  • -
  • - ON_ALWAYS_FLASH -

    Like ON, except that the camera device also controls -the camera's flash unit, always firing it for still -captures.

    -

    The flash may be fired during a precapture sequence -(triggered by android.control.aePrecaptureTrigger) and -will always be fired for captures for which the -android.control.captureIntent field is set to -STILL_CAPTURE

    -
  • -
  • - ON_AUTO_FLASH_REDEYE -

    Like ON_AUTO_FLASH, but with automatic red eye -reduction.

    -

    If deemed necessary by the camera device, a red eye -reduction flash will fire during the precapture -sequence.

    -
  • -
- -
-

The desired mode for the camera device's -auto-exposure routine.

-
- -

android.control.aeAvailableModes

-
Details
-

This control is only effective if android.control.mode is -AUTO.

-

When set to any of the ON modes, the camera device's -auto-exposure routine is enabled, overriding the -application's selected exposure time, sensor sensitivity, -and frame duration (android.sensor.exposureTime, -android.sensor.sensitivity, and -android.sensor.frameDuration). If one of the FLASH modes -is selected, the camera device's flash unit controls are -also overridden.

-

The FLASH modes are only available if the camera device -has a flash unit (android.flash.info.available is true).

-

If flash TORCH mode is desired, this field must be set to -ON or OFF, and android.flash.mode set to TORCH.

-

When set to any of the ON modes, the values chosen by the -camera device auto-exposure routine for the overridden -fields for a given capture will be available in its -CaptureResult.

-
- android.control.aeRegions - - int32 - x - - - 5 x area_count - - [public as meteringRectangle] - - - - - - - -

List of metering areas to use for auto-exposure adjustment.

-
- Pixel coordinates within android.sensor.info.activeArraySize - -

Coordinates must be between [(0,0), (width, height)) of -android.sensor.info.activeArraySize

-
Details
-

Not available if android.control.maxRegionsAe is 0. -Otherwise will always be present.

-

The maximum number of regions supported by the device is determined by the value -of android.control.maxRegionsAe.

-

The coordinate system is based on the active pixel array, -with (0,0) being the top-left pixel in the active pixel array, and -(android.sensor.info.activeArraySize.width - 1, -android.sensor.info.activeArraySize.height - 1) being the -bottom-right pixel in the active pixel array.

-

The weight must be within [0, 1000], and represents a weight -for every pixel in the area. This means that a large metering area -with the same weight as a smaller area will have more effect in -the metering result. Metering areas can partially overlap and the -camera device will add the weights in the overlap region.

-

The weights are relative to weights of other exposure metering regions, so if only one -region is used, all non-zero weights will have the same effect. A region with 0 -weight is ignored.

-

If all regions have 0 weight, then no specific metering area needs to be used by the -camera device.

-

If the metering region is outside the used android.scaler.cropRegion returned in -capture result metadata, the camera device will ignore the sections outside the crop -region and output only the intersection rectangle as the metering region in the result -metadata. If the region is entirely outside the crop region, it will be ignored and -not reported in the result metadata.

-
HAL Implementation Details
-

The HAL level representation of MeteringRectangle[] is a -int[5 * area_count]. -Every five elements represent a metering region of -(xmin, ymin, xmax, ymax, weight). -The rectangle is defined to be inclusive on xmin and ymin, but -exclusive on xmax and ymax.

-
- android.control.aeTargetFpsRange - - int32 - x - - - 2 - - [public as rangeInt] - - - [legacy] - - - - - -

Range over which the auto-exposure routine can -adjust the capture frame rate to maintain good -exposure.

-
- Frames per second (FPS) - -

Any of the entries in android.control.aeAvailableTargetFpsRanges

-
Details
-

Only constrains auto-exposure (AE) algorithm, not -manual control of android.sensor.exposureTime and -android.sensor.frameDuration.

-
- android.control.aePrecaptureTrigger - - byte - - [public] - - - [limited] - - - -
    -
  • - IDLE -

    The trigger is idle.

    -
  • -
  • - START -

    The precapture metering sequence will be started -by the camera device.

    -

    The exact effect of the precapture trigger depends on -the current AE mode and state.

    -
  • -
  • - CANCEL -

    The camera device will cancel any currently active or completed -precapture metering sequence, the auto-exposure routine will return to its -initial state.

    -
  • -
- -
-

Whether the camera device will trigger a precapture -metering sequence when it processes this request.

-
- -
Details
-

This entry is normally set to IDLE, or is not -included at all in the request settings. When included and -set to START, the camera device will trigger the auto-exposure (AE) -precapture metering sequence.

-

When set to CANCEL, the camera device will cancel any active -precapture metering trigger, and return to its initial AE state. -If a precapture metering sequence is already completed, and the camera -device has implicitly locked the AE for subsequent still capture, the -CANCEL trigger will unlock the AE and return to its initial AE state.

-

The precapture sequence should be triggered before starting a -high-quality still capture for final metering decisions to -be made, and for firing pre-capture flash pulses to estimate -scene brightness and required final capture flash power, when -the flash is enabled.

-

Normally, this entry should be set to START for only a -single request, and the application should wait until the -sequence completes before starting a new one.

-

When a precapture metering sequence is finished, the camera device -may lock the auto-exposure routine internally to be able to accurately expose the -subsequent still capture image (android.control.captureIntent == STILL_CAPTURE). -For this case, the AE may not resume normal scan if no subsequent still capture is -submitted. To ensure that the AE routine restarts normal scan, the application should -submit a request with android.control.aeLock == true, followed by a request -with android.control.aeLock == false, if the application decides not to submit a -still capture request after the precapture sequence completes. Alternatively, for -API level 23 or newer devices, the CANCEL can be used to unlock the camera device -internally locked AE if the application doesn't submit a still capture request after -the AE precapture trigger. Note that, the CANCEL was added in API level 23, and must not -be used in devices that have earlier API levels.

-

The exact effect of auto-exposure (AE) precapture trigger -depends on the current AE mode and state; see -android.control.aeState for AE precapture state transition -details.

-

On LEGACY-level devices, the precapture trigger is not supported; -capturing a high-resolution JPEG image will automatically trigger a -precapture sequence before the high-resolution capture, including -potentially firing a pre-capture flash.

-

Using the precapture trigger and the auto-focus trigger android.control.afTrigger -simultaneously is allowed. However, since these triggers often require cooperation between -the auto-focus and auto-exposure routines (for example, the may need to be enabled for a -focus sweep), the camera device may delay acting on a later trigger until the previous -trigger has been fully handled. This may lead to longer intervals between the trigger and -changes to android.control.aeState indicating the start of the precapture sequence, for -example.

-

If both the precapture and the auto-focus trigger are activated on the same request, then -the camera device will complete them in the optimal order for that device.

-
HAL Implementation Details
-

The HAL must support triggering the AE precapture trigger while an AF trigger is active -(and vice versa), or at the same time as the AF trigger. It is acceptable for the HAL to -treat these as two consecutive triggers, for example handling the AF trigger and then the -AE trigger. Or the HAL may choose to optimize the case with both triggers fired at once, -to minimize the latency for converging both focus and exposure/flash usage.

-
- android.control.aeState - - byte - - [public] - - - [limited] - - - -
    -
  • - INACTIVE -

    AE is off or recently reset.

    -

    When a camera device is opened, it starts in -this state. This is a transient state, the camera device may skip reporting -this state in capture result.

    -
  • -
  • - SEARCHING -

    AE doesn't yet have a good set of control values -for the current scene.

    -

    This is a transient state, the camera device may skip -reporting this state in capture result.

    -
  • -
  • - CONVERGED -

    AE has a good set of control values for the -current scene.

    -
  • -
  • - LOCKED -

    AE has been locked.

    -
  • -
  • - FLASH_REQUIRED -

    AE has a good set of control values, but flash -needs to be fired for good quality still -capture.

    -
  • -
  • - PRECAPTURE -

    AE has been asked to do a precapture sequence -and is currently executing it.

    -

    Precapture can be triggered through setting -android.control.aePrecaptureTrigger to START. Currently -active and completed (if it causes camera device internal AE lock) precapture -metering sequence can be canceled through setting -android.control.aePrecaptureTrigger to CANCEL.

    -

    Once PRECAPTURE completes, AE will transition to CONVERGED -or FLASH_REQUIRED as appropriate. This is a transient -state, the camera device may skip reporting this state in -capture result.

    -
  • -
- -
-

Current state of the auto-exposure (AE) algorithm.

-
- -
Details
-

Switching between or enabling AE modes (android.control.aeMode) always -resets the AE state to INACTIVE. Similarly, switching between android.control.mode, -or android.control.sceneMode if android.control.mode == USE_SCENE_MODE resets all -the algorithm states to INACTIVE.

-

The camera device can do several state transitions between two results, if it is -allowed by the state transition table. For example: INACTIVE may never actually be -seen in a result.

-

The state in the result is the state for this image (in sync with this image): if -AE state becomes CONVERGED, then the image data associated with this result should -be good to use.

-

Below are state transition tables for different AE modes.

- - - - - - - - - - - - - - - - - -
StateTransition CauseNew StateNotes
INACTIVEINACTIVECamera device auto exposure algorithm is disabled
-

When android.control.aeMode is AE_MODE_ON_*:

- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
StateTransition CauseNew StateNotes
INACTIVECamera device initiates AE scanSEARCHINGValues changing
INACTIVEandroid.control.aeLock is ONLOCKEDValues locked
SEARCHINGCamera device finishes AE scanCONVERGEDGood values, not changing
SEARCHINGCamera device finishes AE scanFLASH_REQUIREDConverged but too dark w/o flash
SEARCHINGandroid.control.aeLock is ONLOCKEDValues locked
CONVERGEDCamera device initiates AE scanSEARCHINGValues changing
CONVERGEDandroid.control.aeLock is ONLOCKEDValues locked
FLASH_REQUIREDCamera device initiates AE scanSEARCHINGValues changing
FLASH_REQUIREDandroid.control.aeLock is ONLOCKEDValues locked
LOCKEDandroid.control.aeLock is OFFSEARCHINGValues not good after unlock
LOCKEDandroid.control.aeLock is OFFCONVERGEDValues good after unlock
LOCKEDandroid.control.aeLock is OFFFLASH_REQUIREDExposure good, but too dark
PRECAPTURESequence done. android.control.aeLock is OFFCONVERGEDReady for high-quality capture
PRECAPTURESequence done. android.control.aeLock is ONLOCKEDReady for high-quality capture
LOCKEDaeLock is ON and aePrecaptureTrigger is STARTLOCKEDPrecapture trigger is ignored when AE is already locked
LOCKEDaeLock is ON and aePrecaptureTrigger is CANCELLOCKEDPrecapture trigger is ignored when AE is already locked
Any state (excluding LOCKED)android.control.aePrecaptureTrigger is STARTPRECAPTUREStart AE precapture metering sequence
Any state (excluding LOCKED)android.control.aePrecaptureTrigger is CANCELINACTIVECurrently active precapture metering sequence is canceled
-

For the above table, the camera device may skip reporting any state changes that happen -without application intervention (i.e. mode switch, trigger, locking). Any state that -can be skipped in that manner is called a transient state.

-

For example, for above AE modes (AE_MODE_ON_*), in addition to the state transitions -listed in above table, it is also legal for the camera device to skip one or more -transient states between two results. See below table for examples:

- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
StateTransition CauseNew StateNotes
INACTIVECamera device finished AE scanCONVERGEDValues are already good, transient states are skipped by camera device.
Any state (excluding LOCKED)android.control.aePrecaptureTrigger is START, sequence doneFLASH_REQUIREDConverged but too dark w/o flash after a precapture sequence, transient states are skipped by camera device.
Any state (excluding LOCKED)android.control.aePrecaptureTrigger is START, sequence doneCONVERGEDConverged after a precapture sequence, transient states are skipped by camera device.
Any state (excluding LOCKED)android.control.aePrecaptureTrigger is CANCEL, convergedFLASH_REQUIREDConverged but too dark w/o flash after a precapture sequence is canceled, transient states are skipped by camera device.
Any state (excluding LOCKED)android.control.aePrecaptureTrigger is CANCEL, convergedCONVERGEDConverged after a precapture sequenceis canceled, transient states are skipped by camera device.
CONVERGEDCamera device finished AE scanFLASH_REQUIREDConverged but too dark w/o flash after a new scan, transient states are skipped by camera device.
FLASH_REQUIREDCamera device finished AE scanCONVERGEDConverged after a new scan, transient states are skipped by camera device.
-
- android.control.afMode - - byte - - [public] - - - [legacy] - - - -
    -
  • - OFF -

    The auto-focus routine does not control the lens; -android.lens.focusDistance is controlled by the -application.

    -
  • -
  • - AUTO -

    Basic automatic focus mode.

    -

    In this mode, the lens does not move unless -the autofocus trigger action is called. When that trigger -is activated, AF will transition to ACTIVE_SCAN, then to -the outcome of the scan (FOCUSED or NOT_FOCUSED).

    -

    Always supported if lens is not fixed focus.

    -

    Use android.lens.info.minimumFocusDistance to determine if lens -is fixed-focus.

    -

    Triggering AF_CANCEL resets the lens position to default, -and sets the AF state to INACTIVE.

    -
  • -
  • - MACRO -

    Close-up focusing mode.

    -

    In this mode, the lens does not move unless the -autofocus trigger action is called. When that trigger is -activated, AF will transition to ACTIVE_SCAN, then to -the outcome of the scan (FOCUSED or NOT_FOCUSED). This -mode is optimized for focusing on objects very close to -the camera.

    -

    When that trigger is activated, AF will transition to -ACTIVE_SCAN, then to the outcome of the scan (FOCUSED or -NOT_FOCUSED). Triggering cancel AF resets the lens -position to default, and sets the AF state to -INACTIVE.

    -
  • -
  • - CONTINUOUS_VIDEO -

    In this mode, the AF algorithm modifies the lens -position continually to attempt to provide a -constantly-in-focus image stream.

    -

    The focusing behavior should be suitable for good quality -video recording; typically this means slower focus -movement and no overshoots. When the AF trigger is not -involved, the AF algorithm should start in INACTIVE state, -and then transition into PASSIVE_SCAN and PASSIVE_FOCUSED -states as appropriate. When the AF trigger is activated, -the algorithm should immediately transition into -AF_FOCUSED or AF_NOT_FOCUSED as appropriate, and lock the -lens position until a cancel AF trigger is received.

    -

    Once cancel is received, the algorithm should transition -back to INACTIVE and resume passive scan. Note that this -behavior is not identical to CONTINUOUS_PICTURE, since an -ongoing PASSIVE_SCAN must immediately be -canceled.

    -
  • -
  • - CONTINUOUS_PICTURE -

    In this mode, the AF algorithm modifies the lens -position continually to attempt to provide a -constantly-in-focus image stream.

    -

    The focusing behavior should be suitable for still image -capture; typically this means focusing as fast as -possible. When the AF trigger is not involved, the AF -algorithm should start in INACTIVE state, and then -transition into PASSIVE_SCAN and PASSIVE_FOCUSED states as -appropriate as it attempts to maintain focus. When the AF -trigger is activated, the algorithm should finish its -PASSIVE_SCAN if active, and then transition into -AF_FOCUSED or AF_NOT_FOCUSED as appropriate, and lock the -lens position until a cancel AF trigger is received.

    -

    When the AF cancel trigger is activated, the algorithm -should transition back to INACTIVE and then act as if it -has just been started.

    -
  • -
  • - EDOF -

    Extended depth of field (digital focus) mode.

    -

    The camera device will produce images with an extended -depth of field automatically; no special focusing -operations need to be done before taking a picture.

    -

    AF triggers are ignored, and the AF state will always be -INACTIVE.

    -
  • -
- -
-

Whether auto-focus (AF) is currently enabled, and what -mode it is set to.

-
- -

android.control.afAvailableModes

-
Details
-

Only effective if android.control.mode = AUTO and the lens is not fixed focus -(i.e. android.lens.info.minimumFocusDistance > 0). Also note that -when android.control.aeMode is OFF, the behavior of AF is device -dependent. It is recommended to lock AF by using android.control.afTrigger before -setting android.control.aeMode to OFF, or set AF mode to OFF when AE is OFF.

-

If the lens is controlled by the camera device auto-focus algorithm, -the camera device will report the current AF status in android.control.afState -in result metadata.

-
HAL Implementation Details
-

When afMode is AUTO or MACRO, the lens must not move until an AF trigger is sent in a -request (android.control.afTrigger == START). After an AF trigger, the afState will end -up with either FOCUSED_LOCKED or NOT_FOCUSED_LOCKED state (see -android.control.afState for detailed state transitions), which indicates that the lens is -locked and will not move. If camera movement (e.g. tilting camera) causes the lens to move -after the lens is locked, the HAL must compensate this movement appropriately such that -the same focal plane remains in focus.

-

When afMode is one of the continuous auto focus modes, the HAL is free to start a AF -scan whenever it's not locked. When the lens is locked after an AF trigger -(see android.control.afState for detailed state transitions), the HAL should maintain the -same lock behavior as above.

-

When afMode is OFF, the application controls focus manually. The accuracy of the -focus distance control depends on the android.lens.info.focusDistanceCalibration. -However, the lens must not move regardless of the camera movement for any focus distance -manual control.

-

To put this in concrete terms, if the camera has lens elements which may move based on -camera orientation or motion (e.g. due to gravity), then the HAL must drive the lens to -remain in a fixed position invariant to the camera's orientation or motion, for example, -by using accelerometer measurements in the lens control logic. This is a typical issue -that will arise on camera modules with open-loop VCMs.

-
- android.control.afRegions - - int32 - x - - - 5 x area_count - - [public as meteringRectangle] - - - - - - - -

List of metering areas to use for auto-focus.

-
- Pixel coordinates within android.sensor.info.activeArraySize - -

Coordinates must be between [(0,0), (width, height)) of -android.sensor.info.activeArraySize

-
Details
-

Not available if android.control.maxRegionsAf is 0. -Otherwise will always be present.

-

The maximum number of focus areas supported by the device is determined by the value -of android.control.maxRegionsAf.

-

The coordinate system is based on the active pixel array, -with (0,0) being the top-left pixel in the active pixel array, and -(android.sensor.info.activeArraySize.width - 1, -android.sensor.info.activeArraySize.height - 1) being the -bottom-right pixel in the active pixel array.

-

The weight must be within [0, 1000], and represents a weight -for every pixel in the area. This means that a large metering area -with the same weight as a smaller area will have more effect in -the metering result. Metering areas can partially overlap and the -camera device will add the weights in the overlap region.

-

The weights are relative to weights of other metering regions, so if only one region -is used, all non-zero weights will have the same effect. A region with 0 weight is -ignored.

-

If all regions have 0 weight, then no specific metering area needs to be used by the -camera device.

-

If the metering region is outside the used android.scaler.cropRegion returned in -capture result metadata, the camera device will ignore the sections outside the crop -region and output only the intersection rectangle as the metering region in the result -metadata. If the region is entirely outside the crop region, it will be ignored and -not reported in the result metadata.

-
HAL Implementation Details
-

The HAL level representation of MeteringRectangle[] is a -int[5 * area_count]. -Every five elements represent a metering region of -(xmin, ymin, xmax, ymax, weight). -The rectangle is defined to be inclusive on xmin and ymin, but -exclusive on xmax and ymax.

-
- android.control.afTrigger - - byte - - [public] - - - [legacy] - - - -
    -
  • - IDLE -

    The trigger is idle.

    -
  • -
  • - START -

    Autofocus will trigger now.

    -
  • -
  • - CANCEL -

    Autofocus will return to its initial -state, and cancel any currently active trigger.

    -
  • -
- -
-

Whether the camera device will trigger autofocus for this request.

-
- -
Details
-

This entry is normally set to IDLE, or is not -included at all in the request settings.

-

When included and set to START, the camera device will trigger the -autofocus algorithm. If autofocus is disabled, this trigger has no effect.

-

When set to CANCEL, the camera device will cancel any active trigger, -and return to its initial AF state.

-

Generally, applications should set this entry to START or CANCEL for only a -single capture, and then return it to IDLE (or not set at all). Specifying -START for multiple captures in a row means restarting the AF operation over -and over again.

-

See android.control.afState for what the trigger means for each AF mode.

-

Using the autofocus trigger and the precapture trigger android.control.aePrecaptureTrigger -simultaneously is allowed. However, since these triggers often require cooperation between -the auto-focus and auto-exposure routines (for example, the may need to be enabled for a -focus sweep), the camera device may delay acting on a later trigger until the previous -trigger has been fully handled. This may lead to longer intervals between the trigger and -changes to android.control.afState, for example.

-
HAL Implementation Details
-

The HAL must support triggering the AF trigger while an AE precapture trigger is active -(and vice versa), or at the same time as the AE trigger. It is acceptable for the HAL to -treat these as two consecutive triggers, for example handling the AF trigger and then the -AE trigger. Or the HAL may choose to optimize the case with both triggers fired at once, -to minimize the latency for converging both focus and exposure/flash usage.

-
- android.control.afState - - byte - - [public] - - - [legacy] - - - -
    -
  • - INACTIVE -

    AF is off or has not yet tried to scan/been asked -to scan.

    -

    When a camera device is opened, it starts in this -state. This is a transient state, the camera device may -skip reporting this state in capture -result.

    -
  • -
  • - PASSIVE_SCAN -

    AF is currently performing an AF scan initiated the -camera device in a continuous autofocus mode.

    -

    Only used by CONTINUOUS_* AF modes. This is a transient -state, the camera device may skip reporting this state in -capture result.

    -
  • -
  • - PASSIVE_FOCUSED -

    AF currently believes it is in focus, but may -restart scanning at any time.

    -

    Only used by CONTINUOUS_* AF modes. This is a transient -state, the camera device may skip reporting this state in -capture result.

    -
  • -
  • - ACTIVE_SCAN -

    AF is performing an AF scan because it was -triggered by AF trigger.

    -

    Only used by AUTO or MACRO AF modes. This is a transient -state, the camera device may skip reporting this state in -capture result.

    -
  • -
  • - FOCUSED_LOCKED -

    AF believes it is focused correctly and has locked -focus.

    -

    This state is reached only after an explicit START AF trigger has been -sent (android.control.afTrigger), when good focus has been obtained.

    -

    The lens will remain stationary until the AF mode (android.control.afMode) is changed or -a new AF trigger is sent to the camera device (android.control.afTrigger).

    -
  • -
  • - NOT_FOCUSED_LOCKED -

    AF has failed to focus successfully and has locked -focus.

    -

    This state is reached only after an explicit START AF trigger has been -sent (android.control.afTrigger), when good focus cannot be obtained.

    -

    The lens will remain stationary until the AF mode (android.control.afMode) is changed or -a new AF trigger is sent to the camera device (android.control.afTrigger).

    -
  • -
  • - PASSIVE_UNFOCUSED -

    AF finished a passive scan without finding focus, -and may restart scanning at any time.

    -

    Only used by CONTINUOUS_* AF modes. This is a transient state, the camera -device may skip reporting this state in capture result.

    -

    LEGACY camera devices do not support this state. When a passive -scan has finished, it will always go to PASSIVE_FOCUSED.

    -
  • -
- -
-

Current state of auto-focus (AF) algorithm.

-
- -
Details
-

Switching between or enabling AF modes (android.control.afMode) always -resets the AF state to INACTIVE. Similarly, switching between android.control.mode, -or android.control.sceneMode if android.control.mode == USE_SCENE_MODE resets all -the algorithm states to INACTIVE.

-

The camera device can do several state transitions between two results, if it is -allowed by the state transition table. For example: INACTIVE may never actually be -seen in a result.

-

The state in the result is the state for this image (in sync with this image): if -AF state becomes FOCUSED, then the image data associated with this result should -be sharp.

-

Below are state transition tables for different AF modes.

-

When android.control.afMode is AF_MODE_OFF or AF_MODE_EDOF:

- - - - - - - - - - - - - - - - - -
StateTransition CauseNew StateNotes
INACTIVEINACTIVENever changes
-

When android.control.afMode is AF_MODE_AUTO or AF_MODE_MACRO:

- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
StateTransition CauseNew StateNotes
INACTIVEAF_TRIGGERACTIVE_SCANStart AF sweep, Lens now moving
ACTIVE_SCANAF sweep doneFOCUSED_LOCKEDFocused, Lens now locked
ACTIVE_SCANAF sweep doneNOT_FOCUSED_LOCKEDNot focused, Lens now locked
ACTIVE_SCANAF_CANCELINACTIVECancel/reset AF, Lens now locked
FOCUSED_LOCKEDAF_CANCELINACTIVECancel/reset AF
FOCUSED_LOCKEDAF_TRIGGERACTIVE_SCANStart new sweep, Lens now moving
NOT_FOCUSED_LOCKEDAF_CANCELINACTIVECancel/reset AF
NOT_FOCUSED_LOCKEDAF_TRIGGERACTIVE_SCANStart new sweep, Lens now moving
Any stateMode changeINACTIVE
-

For the above table, the camera device may skip reporting any state changes that happen -without application intervention (i.e. mode switch, trigger, locking). Any state that -can be skipped in that manner is called a transient state.

-

For example, for these AF modes (AF_MODE_AUTO and AF_MODE_MACRO), in addition to the -state transitions listed in above table, it is also legal for the camera device to skip -one or more transient states between two results. See below table for examples:

- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
StateTransition CauseNew StateNotes
INACTIVEAF_TRIGGERFOCUSED_LOCKEDFocus is already good or good after a scan, lens is now locked.
INACTIVEAF_TRIGGERNOT_FOCUSED_LOCKEDFocus failed after a scan, lens is now locked.
FOCUSED_LOCKEDAF_TRIGGERFOCUSED_LOCKEDFocus is already good or good after a scan, lens is now locked.
NOT_FOCUSED_LOCKEDAF_TRIGGERFOCUSED_LOCKEDFocus is good after a scan, lens is not locked.
-

When android.control.afMode is AF_MODE_CONTINUOUS_VIDEO:

- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
StateTransition CauseNew StateNotes
INACTIVECamera device initiates new scanPASSIVE_SCANStart AF scan, Lens now moving
INACTIVEAF_TRIGGERNOT_FOCUSED_LOCKEDAF state query, Lens now locked
PASSIVE_SCANCamera device completes current scanPASSIVE_FOCUSEDEnd AF scan, Lens now locked
PASSIVE_SCANCamera device fails current scanPASSIVE_UNFOCUSEDEnd AF scan, Lens now locked
PASSIVE_SCANAF_TRIGGERFOCUSED_LOCKEDImmediate transition, if focus is good. Lens now locked
PASSIVE_SCANAF_TRIGGERNOT_FOCUSED_LOCKEDImmediate transition, if focus is bad. Lens now locked
PASSIVE_SCANAF_CANCELINACTIVEReset lens position, Lens now locked
PASSIVE_FOCUSEDCamera device initiates new scanPASSIVE_SCANStart AF scan, Lens now moving
PASSIVE_UNFOCUSEDCamera device initiates new scanPASSIVE_SCANStart AF scan, Lens now moving
PASSIVE_FOCUSEDAF_TRIGGERFOCUSED_LOCKEDImmediate transition, lens now locked
PASSIVE_UNFOCUSEDAF_TRIGGERNOT_FOCUSED_LOCKEDImmediate transition, lens now locked
FOCUSED_LOCKEDAF_TRIGGERFOCUSED_LOCKEDNo effect
FOCUSED_LOCKEDAF_CANCELINACTIVERestart AF scan
NOT_FOCUSED_LOCKEDAF_TRIGGERNOT_FOCUSED_LOCKEDNo effect
NOT_FOCUSED_LOCKEDAF_CANCELINACTIVERestart AF scan
-

When android.control.afMode is AF_MODE_CONTINUOUS_PICTURE:

- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
StateTransition CauseNew StateNotes
INACTIVECamera device initiates new scanPASSIVE_SCANStart AF scan, Lens now moving
INACTIVEAF_TRIGGERNOT_FOCUSED_LOCKEDAF state query, Lens now locked
PASSIVE_SCANCamera device completes current scanPASSIVE_FOCUSEDEnd AF scan, Lens now locked
PASSIVE_SCANCamera device fails current scanPASSIVE_UNFOCUSEDEnd AF scan, Lens now locked
PASSIVE_SCANAF_TRIGGERFOCUSED_LOCKEDEventual transition once the focus is good. Lens now locked
PASSIVE_SCANAF_TRIGGERNOT_FOCUSED_LOCKEDEventual transition if cannot find focus. Lens now locked
PASSIVE_SCANAF_CANCELINACTIVEReset lens position, Lens now locked
PASSIVE_FOCUSEDCamera device initiates new scanPASSIVE_SCANStart AF scan, Lens now moving
PASSIVE_UNFOCUSEDCamera device initiates new scanPASSIVE_SCANStart AF scan, Lens now moving
PASSIVE_FOCUSEDAF_TRIGGERFOCUSED_LOCKEDImmediate trans. Lens now locked
PASSIVE_UNFOCUSEDAF_TRIGGERNOT_FOCUSED_LOCKEDImmediate trans. Lens now locked
FOCUSED_LOCKEDAF_TRIGGERFOCUSED_LOCKEDNo effect
FOCUSED_LOCKEDAF_CANCELINACTIVERestart AF scan
NOT_FOCUSED_LOCKEDAF_TRIGGERNOT_FOCUSED_LOCKEDNo effect
NOT_FOCUSED_LOCKEDAF_CANCELINACTIVERestart AF scan
-

When switch between AF_MODE_CONTINUOUS_* (CAF modes) and AF_MODE_AUTO/AF_MODE_MACRO -(AUTO modes), the initial INACTIVE or PASSIVE_SCAN states may be skipped by the -camera device. When a trigger is included in a mode switch request, the trigger -will be evaluated in the context of the new mode in the request. -See below table for examples:

- - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
StateTransition CauseNew StateNotes
any stateCAF-->AUTO mode switchINACTIVEMode switch without trigger, initial state must be INACTIVE
any stateCAF-->AUTO mode switch with AF_TRIGGERtrigger-reachable states from INACTIVEMode switch with trigger, INACTIVE is skipped
any stateAUTO-->CAF mode switchpassively reachable states from INACTIVEMode switch without trigger, passive transient state is skipped
-
- android.control.afTriggerId - - int32 - - [system] - - - - [deprecated] - - - - -

The ID sent with the latest -CAMERA2_TRIGGER_AUTOFOCUS call

-
- -

Deprecated. Do not use.

-
Details
-

Must be 0 if no CAMERA2_TRIGGER_AUTOFOCUS trigger -received yet by HAL. Always updated even if AF algorithm -ignores the trigger

-
- android.control.awbLock - - byte - - [public as boolean] - - - [legacy] - - - -
    -
  • - OFF -

    Auto-white balance lock is disabled; the AWB -algorithm is free to update its parameters if in AUTO -mode.

    -
  • -
  • - ON -

    Auto-white balance lock is enabled; the AWB -algorithm will not update its parameters while the lock -is active.

    -
  • -
- -
-

Whether auto-white balance (AWB) is currently locked to its -latest calculated values.

-
- -
Details
-

When set to true (ON), the AWB algorithm is locked to its latest parameters, -and will not change color balance settings until the lock is set to false (OFF).

-

Since the camera device has a pipeline of in-flight requests, the settings that -get locked do not necessarily correspond to the settings that were present in the -latest capture result received from the camera device, since additional captures -and AWB updates may have occurred even before the result was sent out. If an -application is switching between automatic and manual control and wishes to eliminate -any flicker during the switch, the following procedure is recommended:

-
    -
  1. Starting in auto-AWB mode:
  2. -
  3. Lock AWB
  4. -
  5. Wait for the first result to be output that has the AWB locked
  6. -
  7. Copy AWB settings from that result into a request, set the request to manual AWB
  8. -
  9. Submit the capture request, proceed to run manual AWB as desired.
  10. -
-

Note that AWB lock is only meaningful when -android.control.awbMode is in the AUTO mode; in other modes, -AWB is already fixed to a specific setting.

-

Some LEGACY devices may not support ON; the value is then overridden to OFF.

-
- android.control.awbMode - - byte - - [public] - - - [legacy] - - - -
    -
  • - OFF -

    The camera device's auto-white balance routine is disabled.

    -

    The application-selected color transform matrix -(android.colorCorrection.transform) and gains -(android.colorCorrection.gains) are used by the camera -device for manual white balance control.

    -
  • -
  • - AUTO -

    The camera device's auto-white balance routine is active.

    -

    The application's values for android.colorCorrection.transform -and android.colorCorrection.gains are ignored. -For devices that support the MANUAL_POST_PROCESSING capability, the -values used by the camera device for the transform and gains -will be available in the capture result for this request.

    -
  • -
  • - INCANDESCENT -

    The camera device's auto-white balance routine is disabled; -the camera device uses incandescent light as the assumed scene -illumination for white balance.

    -

    While the exact white balance transforms are up to the -camera device, they will approximately match the CIE -standard illuminant A.

    -

    The application's values for android.colorCorrection.transform -and android.colorCorrection.gains are ignored. -For devices that support the MANUAL_POST_PROCESSING capability, the -values used by the camera device for the transform and gains -will be available in the capture result for this request.

    -
  • -
  • - FLUORESCENT -

    The camera device's auto-white balance routine is disabled; -the camera device uses fluorescent light as the assumed scene -illumination for white balance.

    -

    While the exact white balance transforms are up to the -camera device, they will approximately match the CIE -standard illuminant F2.

    -

    The application's values for android.colorCorrection.transform -and android.colorCorrection.gains are ignored. -For devices that support the MANUAL_POST_PROCESSING capability, the -values used by the camera device for the transform and gains -will be available in the capture result for this request.

    -
  • -
  • - WARM_FLUORESCENT -

    The camera device's auto-white balance routine is disabled; -the camera device uses warm fluorescent light as the assumed scene -illumination for white balance.

    -

    While the exact white balance transforms are up to the -camera device, they will approximately match the CIE -standard illuminant F4.

    -

    The application's values for android.colorCorrection.transform -and android.colorCorrection.gains are ignored. -For devices that support the MANUAL_POST_PROCESSING capability, the -values used by the camera device for the transform and gains -will be available in the capture result for this request.

    -
  • -
  • - DAYLIGHT -

    The camera device's auto-white balance routine is disabled; -the camera device uses daylight light as the assumed scene -illumination for white balance.

    -

    While the exact white balance transforms are up to the -camera device, they will approximately match the CIE -standard illuminant D65.

    -

    The application's values for android.colorCorrection.transform -and android.colorCorrection.gains are ignored. -For devices that support the MANUAL_POST_PROCESSING capability, the -values used by the camera device for the transform and gains -will be available in the capture result for this request.

    -
  • -
  • - CLOUDY_DAYLIGHT -

    The camera device's auto-white balance routine is disabled; -the camera device uses cloudy daylight light as the assumed scene -illumination for white balance.

    -

    The application's values for android.colorCorrection.transform -and android.colorCorrection.gains are ignored. -For devices that support the MANUAL_POST_PROCESSING capability, the -values used by the camera device for the transform and gains -will be available in the capture result for this request.

    -
  • -
  • - TWILIGHT -

    The camera device's auto-white balance routine is disabled; -the camera device uses twilight light as the assumed scene -illumination for white balance.

    -

    The application's values for android.colorCorrection.transform -and android.colorCorrection.gains are ignored. -For devices that support the MANUAL_POST_PROCESSING capability, the -values used by the camera device for the transform and gains -will be available in the capture result for this request.

    -
  • -
  • - SHADE -

    The camera device's auto-white balance routine is disabled; -the camera device uses shade light as the assumed scene -illumination for white balance.

    -

    The application's values for android.colorCorrection.transform -and android.colorCorrection.gains are ignored. -For devices that support the MANUAL_POST_PROCESSING capability, the -values used by the camera device for the transform and gains -will be available in the capture result for this request.

    -
  • -
- -
-

Whether auto-white balance (AWB) is currently setting the color -transform fields, and what its illumination target -is.

-
- -

android.control.awbAvailableModes

-
Details
-

This control is only effective if android.control.mode is AUTO.

-

When set to the ON mode, the camera device's auto-white balance -routine is enabled, overriding the application's selected -android.colorCorrection.transform, android.colorCorrection.gains and -android.colorCorrection.mode. Note that when android.control.aeMode -is OFF, the behavior of AWB is device dependent. It is recommened to -also set AWB mode to OFF or lock AWB by using android.control.awbLock before -setting AE mode to OFF.

-

When set to the OFF mode, the camera device's auto-white balance -routine is disabled. The application manually controls the white -balance by android.colorCorrection.transform, android.colorCorrection.gains -and android.colorCorrection.mode.

-

When set to any other modes, the camera device's auto-white -balance routine is disabled. The camera device uses each -particular illumination target for white balance -adjustment. The application's values for -android.colorCorrection.transform, -android.colorCorrection.gains and -android.colorCorrection.mode are ignored.

-
- android.control.awbRegions - - int32 - x - - - 5 x area_count - - [public as meteringRectangle] - - - - - - - -

List of metering areas to use for auto-white-balance illuminant -estimation.

-
- Pixel coordinates within android.sensor.info.activeArraySize - -

Coordinates must be between [(0,0), (width, height)) of -android.sensor.info.activeArraySize

-
Details
-

Not available if android.control.maxRegionsAwb is 0. -Otherwise will always be present.

-

The maximum number of regions supported by the device is determined by the value -of android.control.maxRegionsAwb.

-

The coordinate system is based on the active pixel array, -with (0,0) being the top-left pixel in the active pixel array, and -(android.sensor.info.activeArraySize.width - 1, -android.sensor.info.activeArraySize.height - 1) being the -bottom-right pixel in the active pixel array.

-

The weight must range from 0 to 1000, and represents a weight -for every pixel in the area. This means that a large metering area -with the same weight as a smaller area will have more effect in -the metering result. Metering areas can partially overlap and the -camera device will add the weights in the overlap region.

-

The weights are relative to weights of other white balance metering regions, so if -only one region is used, all non-zero weights will have the same effect. A region with -0 weight is ignored.

-

If all regions have 0 weight, then no specific metering area needs to be used by the -camera device.

-

If the metering region is outside the used android.scaler.cropRegion returned in -capture result metadata, the camera device will ignore the sections outside the crop -region and output only the intersection rectangle as the metering region in the result -metadata. If the region is entirely outside the crop region, it will be ignored and -not reported in the result metadata.

-
HAL Implementation Details
-

The HAL level representation of MeteringRectangle[] is a -int[5 * area_count]. -Every five elements represent a metering region of -(xmin, ymin, xmax, ymax, weight). -The rectangle is defined to be inclusive on xmin and ymin, but -exclusive on xmax and ymax.

-
- android.control.captureIntent - - byte - - [public] - - - [legacy] - - - -
    -
  • - CUSTOM -

    The goal of this request doesn't fall into the other -categories. The camera device will default to preview-like -behavior.

    -
  • -
  • - PREVIEW -

    This request is for a preview-like use case.

    -

    The precapture trigger may be used to start off a metering -w/flash sequence.

    -
  • -
  • - STILL_CAPTURE -

    This request is for a still capture-type -use case.

    -

    If the flash unit is under automatic control, it may fire as needed.

    -
  • -
  • - VIDEO_RECORD -

    This request is for a video recording -use case.

    -
  • -
  • - VIDEO_SNAPSHOT -

    This request is for a video snapshot (still -image while recording video) use case.

    -

    The camera device should take the highest-quality image -possible (given the other settings) without disrupting the -frame rate of video recording.

    -
  • -
  • - ZERO_SHUTTER_LAG -

    This request is for a ZSL usecase; the -application will stream full-resolution images and -reprocess one or several later for a final -capture.

    -
  • -
  • - MANUAL -

    This request is for manual capture use case where -the applications want to directly control the capture parameters.

    -

    For example, the application may wish to manually control -android.sensor.exposureTime, android.sensor.sensitivity, etc.

    -
  • -
- -
-

Information to the camera device 3A (auto-exposure, -auto-focus, auto-white balance) routines about the purpose -of this capture, to help the camera device to decide optimal 3A -strategy.

-
- -
Details
-

This control (except for MANUAL) is only effective if -android.control.mode != OFF and any 3A routine is active.

-

ZERO_SHUTTER_LAG will be supported if android.request.availableCapabilities -contains PRIVATE_REPROCESSING or YUV_REPROCESSING. MANUAL will be supported if -android.request.availableCapabilities contains MANUAL_SENSOR. Other intent values are -always supported.

-
- android.control.awbState - - byte - - [public] - - - [limited] - - - -
    -
  • - INACTIVE -

    AWB is not in auto mode, or has not yet started metering.

    -

    When a camera device is opened, it starts in this -state. This is a transient state, the camera device may -skip reporting this state in capture -result.

    -
  • -
  • - SEARCHING -

    AWB doesn't yet have a good set of control -values for the current scene.

    -

    This is a transient state, the camera device -may skip reporting this state in capture result.

    -
  • -
  • - CONVERGED -

    AWB has a good set of control values for the -current scene.

    -
  • -
  • - LOCKED -

    AWB has been locked.

    -
  • -
- -
-

Current state of auto-white balance (AWB) algorithm.

-
- -
Details
-

Switching between or enabling AWB modes (android.control.awbMode) always -resets the AWB state to INACTIVE. Similarly, switching between android.control.mode, -or android.control.sceneMode if android.control.mode == USE_SCENE_MODE resets all -the algorithm states to INACTIVE.

-

The camera device can do several state transitions between two results, if it is -allowed by the state transition table. So INACTIVE may never actually be seen in -a result.

-

The state in the result is the state for this image (in sync with this image): if -AWB state becomes CONVERGED, then the image data associated with this result should -be good to use.

-

Below are state transition tables for different AWB modes.

-

When android.control.awbMode != AWB_MODE_AUTO:

- - - - - - - - - - - - - - - - - -
StateTransition CauseNew StateNotes
INACTIVEINACTIVECamera device auto white balance algorithm is disabled
-

When android.control.awbMode is AWB_MODE_AUTO:

- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
StateTransition CauseNew StateNotes
INACTIVECamera device initiates AWB scanSEARCHINGValues changing
INACTIVEandroid.control.awbLock is ONLOCKEDValues locked
SEARCHINGCamera device finishes AWB scanCONVERGEDGood values, not changing
SEARCHINGandroid.control.awbLock is ONLOCKEDValues locked
CONVERGEDCamera device initiates AWB scanSEARCHINGValues changing
CONVERGEDandroid.control.awbLock is ONLOCKEDValues locked
LOCKEDandroid.control.awbLock is OFFSEARCHINGValues not good after unlock
-

For the above table, the camera device may skip reporting any state changes that happen -without application intervention (i.e. mode switch, trigger, locking). Any state that -can be skipped in that manner is called a transient state.

-

For example, for this AWB mode (AWB_MODE_AUTO), in addition to the state transitions -listed in above table, it is also legal for the camera device to skip one or more -transient states between two results. See below table for examples:

- - - - - - - - - - - - - - - - - - - - - - - -
StateTransition CauseNew StateNotes
INACTIVECamera device finished AWB scanCONVERGEDValues are already good, transient states are skipped by camera device.
LOCKEDandroid.control.awbLock is OFFCONVERGEDValues good after unlock, transient states are skipped by camera device.
-
- android.control.effectMode - - byte - - [public] - - - [legacy] - - - -
    -
  • - OFF -

    No color effect will be applied.

    -
  • -
  • - MONO - [optional] -

    A "monocolor" effect where the image is mapped into -a single color.

    -

    This will typically be grayscale.

    -
  • -
  • - NEGATIVE - [optional] -

    A "photo-negative" effect where the image's colors -are inverted.

    -
  • -
  • - SOLARIZE - [optional] -

    A "solarisation" effect (Sabattier effect) where the -image is wholly or partially reversed in -tone.

    -
  • -
  • - SEPIA - [optional] -

    A "sepia" effect where the image is mapped into warm -gray, red, and brown tones.

    -
  • -
  • - POSTERIZE - [optional] -

    A "posterization" effect where the image uses -discrete regions of tone rather than a continuous -gradient of tones.

    -
  • -
  • - WHITEBOARD - [optional] -

    A "whiteboard" effect where the image is typically displayed -as regions of white, with black or grey details.

    -
  • -
  • - BLACKBOARD - [optional] -

    A "blackboard" effect where the image is typically displayed -as regions of black, with white or grey details.

    -
  • -
  • - AQUA - [optional] -

    An "aqua" effect where a blue hue is added to the image.

    -
  • -
- -
-

A special color effect to apply.

-
- -

android.control.availableEffects

-
Details
-

When this mode is set, a color effect will be applied -to images produced by the camera device. The interpretation -and implementation of these color effects is left to the -implementor of the camera device, and should not be -depended on to be consistent (or present) across all -devices.

-
- android.control.mode - - byte - - [public] - - - [legacy] - - - -
    -
  • - OFF -

    Full application control of pipeline.

    -

    All control by the device's metering and focusing (3A) -routines is disabled, and no other settings in -android.control.* have any effect, except that -android.control.captureIntent may be used by the camera -device to select post-processing values for processing -blocks that do not allow for manual control, or are not -exposed by the camera API.

    -

    However, the camera device's 3A routines may continue to -collect statistics and update their internal state so that -when control is switched to AUTO mode, good control values -can be immediately applied.

    -
  • -
  • - AUTO -

    Use settings for each individual 3A routine.

    -

    Manual control of capture parameters is disabled. All -controls in android.control.* besides sceneMode take -effect.

    -
  • -
  • - USE_SCENE_MODE - [optional] -

    Use a specific scene mode.

    -

    Enabling this disables control.aeMode, control.awbMode and -control.afMode controls; the camera device will ignore -those settings while USE_SCENE_MODE is active (except for -FACE_PRIORITY scene mode). Other control entries are still active. -This setting can only be used if scene mode is supported (i.e. -android.control.availableSceneModes -contain some modes other than DISABLED).

    -
  • -
  • - OFF_KEEP_STATE - [optional] -

    Same as OFF mode, except that this capture will not be -used by camera device background auto-exposure, auto-white balance and -auto-focus algorithms (3A) to update their statistics.

    -

    Specifically, the 3A routines are locked to the last -values set from a request with AUTO, OFF, or -USE_SCENE_MODE, and any statistics or state updates -collected from manual captures with OFF_KEEP_STATE will be -discarded by the camera device.

    -
  • -
- -
-

Overall mode of 3A (auto-exposure, auto-white-balance, auto-focus) control -routines.

-
- -

android.control.availableModes

-
Details
-

This is a top-level 3A control switch. When set to OFF, all 3A control -by the camera device is disabled. The application must set the fields for -capture parameters itself.

-

When set to AUTO, the individual algorithm controls in -android.control.* are in effect, such as android.control.afMode.

-

When set to USE_SCENE_MODE, the individual controls in -android.control.* are mostly disabled, and the camera device implements -one of the scene mode settings (such as ACTION, SUNSET, or PARTY) -as it wishes. The camera device scene mode 3A settings are provided by -capture results.

-

When set to OFF_KEEP_STATE, it is similar to OFF mode, the only difference -is that this frame will not be used by camera device background 3A statistics -update, as if this frame is never captured. This mode can be used in the scenario -where the application doesn't want a 3A manual control capture to affect -the subsequent auto 3A capture results.

-
- android.control.sceneMode - - byte - - [public] - - - [legacy] - - - -
    -
  • - DISABLED - 0 -

    Indicates that no scene modes are set for a given capture request.

    -
  • -
  • - FACE_PRIORITY -

    If face detection support exists, use face -detection data for auto-focus, auto-white balance, and -auto-exposure routines.

    -

    If face detection statistics are disabled -(i.e. android.statistics.faceDetectMode is set to OFF), -this should still operate correctly (but will not return -face detection statistics to the framework).

    -

    Unlike the other scene modes, android.control.aeMode, -android.control.awbMode, and android.control.afMode -remain active when FACE_PRIORITY is set.

    -
  • -
  • - ACTION - [optional] -

    Optimized for photos of quickly moving objects.

    -

    Similar to SPORTS.

    -
  • -
  • - PORTRAIT - [optional] -

    Optimized for still photos of people.

    -
  • -
  • - LANDSCAPE - [optional] -

    Optimized for photos of distant macroscopic objects.

    -
  • -
  • - NIGHT - [optional] -

    Optimized for low-light settings.

    -
  • -
  • - NIGHT_PORTRAIT - [optional] -

    Optimized for still photos of people in low-light -settings.

    -
  • -
  • - THEATRE - [optional] -

    Optimized for dim, indoor settings where flash must -remain off.

    -
  • -
  • - BEACH - [optional] -

    Optimized for bright, outdoor beach settings.

    -
  • -
  • - SNOW - [optional] -

    Optimized for bright, outdoor settings containing snow.

    -
  • -
  • - SUNSET - [optional] -

    Optimized for scenes of the setting sun.

    -
  • -
  • - STEADYPHOTO - [optional] -

    Optimized to avoid blurry photos due to small amounts of -device motion (for example: due to hand shake).

    -
  • -
  • - FIREWORKS - [optional] -

    Optimized for nighttime photos of fireworks.

    -
  • -
  • - SPORTS - [optional] -

    Optimized for photos of quickly moving people.

    -

    Similar to ACTION.

    -
  • -
  • - PARTY - [optional] -

    Optimized for dim, indoor settings with multiple moving -people.

    -
  • -
  • - CANDLELIGHT - [optional] -

    Optimized for dim settings where the main light source -is a flame.

    -
  • -
  • - BARCODE - [optional] -

    Optimized for accurately capturing a photo of barcode -for use by camera applications that wish to read the -barcode value.

    -
  • -
  • - HIGH_SPEED_VIDEO - [deprecated] - [optional] -

    This is deprecated, please use CameraDevice#createConstrainedHighSpeedCaptureSession -and CameraConstrainedHighSpeedCaptureSession#createHighSpeedRequestList -for high speed video recording.

    -

    Optimized for high speed video recording (frame rate >=60fps) use case.

    -

    The supported high speed video sizes and fps ranges are specified in -android.control.availableHighSpeedVideoConfigurations. To get desired -output frame rates, the application is only allowed to select video size -and fps range combinations listed in this static metadata. The fps range -can be control via android.control.aeTargetFpsRange.

    -

    In this mode, the camera device will override aeMode, awbMode, and afMode to -ON, ON, and CONTINUOUS_VIDEO, respectively. All post-processing block mode -controls will be overridden to be FAST. Therefore, no manual control of capture -and post-processing parameters is possible. All other controls operate the -same as when android.control.mode == AUTO. This means that all other -android.control.* fields continue to work, such as

    - -

    Outside of android.control.*, the following controls will work:

    - -

    For high speed recording use case, the actual maximum supported frame rate may -be lower than what camera can output, depending on the destination Surfaces for -the image data. For example, if the destination surface is from video encoder, -the application need check if the video encoder is capable of supporting the -high frame rate for a given video size, or it will end up with lower recording -frame rate. If the destination surface is from preview window, the preview frame -rate will be bounded by the screen refresh rate.

    -

    The camera device will only support up to 2 output high speed streams -(processed non-stalling format defined in android.request.maxNumOutputStreams) -in this mode. This control will be effective only if all of below conditions are true:

    - -

    When above conditions are NOT satistied, the controls of this mode and -android.control.aeTargetFpsRange will be ignored by the camera device, -the camera device will fall back to android.control.mode == AUTO, -and the returned capture result metadata will give the fps range choosen -by the camera device.

    -

    Switching into or out of this mode may trigger some camera ISP/sensor -reconfigurations, which may introduce extra latency. It is recommended that -the application avoids unnecessary scene mode switch as much as possible.

    -
  • -
  • - HDR - [optional] -

    Turn on a device-specific high dynamic range (HDR) mode.

    -

    In this scene mode, the camera device captures images -that keep a larger range of scene illumination levels -visible in the final image. For example, when taking a -picture of a object in front of a bright window, both -the object and the scene through the window may be -visible when using HDR mode, while in normal AUTO mode, -one or the other may be poorly exposed. As a tradeoff, -HDR mode generally takes much longer to capture a single -image, has no user control, and may have other artifacts -depending on the HDR method used.

    -

    Therefore, HDR captures operate at a much slower rate -than regular captures.

    -

    In this mode, on LIMITED or FULL devices, when a request -is made with a android.control.captureIntent of -STILL_CAPTURE, the camera device will capture an image -using a high dynamic range capture technique. On LEGACY -devices, captures that target a JPEG-format output will -be captured with HDR, and the capture intent is not -relevant.

    -

    The HDR capture may involve the device capturing a burst -of images internally and combining them into one, or it -may involve the device using specialized high dynamic -range capture hardware. In all cases, a single image is -produced in response to a capture request submitted -while in HDR mode.

    -

    Since substantial post-processing is generally needed to -produce an HDR image, only YUV, PRIVATE, and JPEG -outputs are supported for LIMITED/FULL device HDR -captures, and only JPEG outputs are supported for LEGACY -HDR captures. Using a RAW output for HDR capture is not -supported.

    -

    Some devices may also support always-on HDR, which -applies HDR processing at full frame rate. For these -devices, intents other than STILL_CAPTURE will also -produce an HDR output with no frame rate impact compared -to normal operation, though the quality may be lower -than for STILL_CAPTURE intents.

    -

    If SCENE_MODE_HDR is used with unsupported output types -or capture intents, the images captured will be as if -the SCENE_MODE was not enabled at all.

    -
  • -
  • - FACE_PRIORITY_LOW_LIGHT - [optional] - [hidden] -

    Same as FACE_PRIORITY scene mode, except that the camera -device will choose higher sensitivity values (android.sensor.sensitivity) -under low light conditions.

    -

    The camera device may be tuned to expose the images in a reduced -sensitivity range to produce the best quality images. For example, -if the android.sensor.info.sensitivityRange gives range of [100, 1600], -the camera device auto-exposure routine tuning process may limit the actual -exposure sensitivity range to [100, 1200] to ensure that the noise level isn't -exessive in order to preserve the image quality. Under this situation, the image under -low light may be under-exposed when the sensor max exposure time (bounded by the -android.control.aeTargetFpsRange when android.control.aeMode is one of the -ON_* modes) and effective max sensitivity are reached. This scene mode allows the -camera device auto-exposure routine to increase the sensitivity up to the max -sensitivity specified by android.sensor.info.sensitivityRange when the scene is too -dark and the max exposure time is reached. The captured images may be noisier -compared with the images captured in normal FACE_PRIORITY mode; therefore, it is -recommended that the application only use this scene mode when it is capable of -reducing the noise level of the captured images.

    -

    Unlike the other scene modes, android.control.aeMode, -android.control.awbMode, and android.control.afMode -remain active when FACE_PRIORITY_LOW_LIGHT is set.

    -
  • -
  • - DEVICE_CUSTOM_START - [optional] - [hidden] - 100 -

    Scene mode values within the range of -[DEVICE_CUSTOM_START, DEVICE_CUSTOM_END] are reserved for device specific -customized scene modes.

    -
  • -
  • - DEVICE_CUSTOM_END - [optional] - [hidden] - 127 -

    Scene mode values within the range of -[DEVICE_CUSTOM_START, DEVICE_CUSTOM_END] are reserved for device specific -customized scene modes.

    -
  • -
- -
-

Control for which scene mode is currently active.

-
- -

android.control.availableSceneModes

-
Details
-

Scene modes are custom camera modes optimized for a certain set of conditions and -capture settings.

-

This is the mode that that is active when -android.control.mode == USE_SCENE_MODE. Aside from FACE_PRIORITY, these modes will -disable android.control.aeMode, android.control.awbMode, and android.control.afMode -while in use.

-

The interpretation and implementation of these scene modes is left -to the implementor of the camera device. Their behavior will not be -consistent across all devices, and any given device may only implement -a subset of these modes.

-
HAL Implementation Details
-

HAL implementations that include scene modes are expected to provide -the per-scene settings to use for android.control.aeMode, -android.control.awbMode, and android.control.afMode in -android.control.sceneModeOverrides.

-

For HIGH_SPEED_VIDEO mode, if it is included in android.control.availableSceneModes, -the HAL must list supported video size and fps range in -android.control.availableHighSpeedVideoConfigurations. For a given size, e.g. -1280x720, if the HAL has two different sensor configurations for normal streaming -mode and high speed streaming, when this scene mode is set/reset in a sequence of capture -requests, the HAL may have to switch between different sensor modes. -This mode is deprecated in HAL3.3, to support high speed video recording, please implement -android.control.availableHighSpeedVideoConfigurations and CONSTRAINED_HIGH_SPEED_VIDEO -capbility defined in android.request.availableCapabilities.

-
- android.control.videoStabilizationMode - - byte - - [public] - - - [legacy] - - - -
    -
  • - OFF -

    Video stabilization is disabled.

    -
  • -
  • - ON -

    Video stabilization is enabled.

    -
  • -
- -
-

Whether video stabilization is -active.

-
- -
Details
-

Video stabilization automatically warps images from -the camera in order to stabilize motion between consecutive frames.

-

If enabled, video stabilization can modify the -android.scaler.cropRegion to keep the video stream stabilized.

-

Switching between different video stabilization modes may take several -frames to initialize, the camera device will report the current mode -in capture result metadata. For example, When "ON" mode is requested, -the video stabilization modes in the first several capture results may -still be "OFF", and it will become "ON" when the initialization is -done.

-

In addition, not all recording sizes or frame rates may be supported for -stabilization by a device that reports stabilization support. It is guaranteed -that an output targeting a MediaRecorder or MediaCodec will be stabilized if -the recording resolution is less than or equal to 1920 x 1080 (width less than -or equal to 1920, height less than or equal to 1080), and the recording -frame rate is less than or equal to 30fps. At other sizes, the CaptureResult -android.control.videoStabilizationMode field will return -OFF if the recording output is not stabilized, or if there are no output -Surface types that can be stabilized.

-

If a camera device supports both this mode and OIS -(android.lens.opticalStabilizationMode), turning both modes on may -produce undesirable interaction, so it is recommended not to enable -both at the same time.

-
- android.control.postRawSensitivityBoost - - int32 - - [public] - - - - - - - -

The amount of additional sensitivity boost applied to output images -after RAW sensor data is captured.

-
- ISO arithmetic units, the same as android.sensor.sensitivity - -

android.control.postRawSensitivityBoostRange

-
Details
-

Some camera devices support additional digital sensitivity boosting in the -camera processing pipeline after sensor RAW image is captured. -Such a boost will be applied to YUV/JPEG format output images but will not -have effect on RAW output formats like RAW_SENSOR, RAW10, RAW12 or RAW_OPAQUE.

-

This key will be null for devices that do not support any RAW format -outputs. For devices that do support RAW format outputs, this key will always -present, and if a device does not support post RAW sensitivity boost, it will -list 100 in this key.

-

If the camera device cannot apply the exact boost requested, it will reduce the -boost to the nearest supported value. -The final boost value used will be available in the output capture result.

-

For devices that support post RAW sensitivity boost, the YUV/JPEG output images -of such device will have the total sensitivity of -android.sensor.sensitivity * android.control.postRawSensitivityBoost / 100 -The sensitivity of RAW format images will always be android.sensor.sensitivity

-

This control is only effective if android.control.aeMode or android.control.mode is set to -OFF; otherwise the auto-exposure algorithm will override this value.

-
demosaic
controls
Property NameTypeDescriptionUnitsRangeTags
- android.demosaic.mode - - byte - - [system] - - - - - -
    -
  • - FAST -

    Minimal or no slowdown of frame rate compared to -Bayer RAW output.

    -
  • -
  • - HIGH_QUALITY -

    Improved processing quality but the frame rate might be slowed down -relative to raw output.

    -
  • -
- -
-

Controls the quality of the demosaicing -processing.

-
- -
edge
controls
Property NameTypeDescriptionUnitsRangeTags
- android.edge.mode - - byte - - [public] - - - [full] - - - -
    -
  • - OFF -

    No edge enhancement is applied.

    -
  • -
  • - FAST -

    Apply edge enhancement at a quality level that does not slow down frame rate -relative to sensor output. It may be the same as OFF if edge enhancement will -slow down frame rate relative to sensor.

    -
  • -
  • - HIGH_QUALITY -

    Apply high-quality edge enhancement, at a cost of possibly reduced output frame rate.

    -
  • -
  • - ZERO_SHUTTER_LAG - [optional] -

    Edge enhancement is applied at different levels for different output streams, -based on resolution. Streams at maximum recording resolution (see CameraDevice#createCaptureSession) or below have -edge enhancement applied, while higher-resolution streams have no edge enhancement -applied. The level of edge enhancement for low-resolution streams is tuned so that -frame rate is not impacted, and the quality is equal to or better than FAST (since it -is only applied to lower-resolution outputs, quality may improve from FAST).

    -

    This mode is intended to be used by applications operating in a zero-shutter-lag mode -with YUV or PRIVATE reprocessing, where the application continuously captures -high-resolution intermediate buffers into a circular buffer, from which a final image is -produced via reprocessing when a user takes a picture. For such a use case, the -high-resolution buffers must not have edge enhancement applied to maximize efficiency of -preview and to avoid double-applying enhancement when reprocessed, while low-resolution -buffers (used for recording or preview, generally) need edge enhancement applied for -reasonable preview quality.

    -

    This mode is guaranteed to be supported by devices that support either the -YUV_REPROCESSING or PRIVATE_REPROCESSING capabilities -(android.request.availableCapabilities lists either of those capabilities) and it will -be the default mode for CAMERA3_TEMPLATE_ZERO_SHUTTER_LAG template.

    -
  • -
- -
-

Operation mode for edge -enhancement.

-
- -

android.edge.availableEdgeModes

-
Details
-

Edge enhancement improves sharpness and details in the captured image. OFF means -no enhancement will be applied by the camera device.

-

FAST/HIGH_QUALITY both mean camera device determined enhancement -will be applied. HIGH_QUALITY mode indicates that the -camera device will use the highest-quality enhancement algorithms, -even if it slows down capture rate. FAST means the camera device will -not slow down capture rate when applying edge enhancement. FAST may be the same as OFF if -edge enhancement will slow down capture rate. Every output stream will have a similar -amount of enhancement applied.

-

ZERO_SHUTTER_LAG is meant to be used by applications that maintain a continuous circular -buffer of high-resolution images during preview and reprocess image(s) from that buffer -into a final capture when triggered by the user. In this mode, the camera device applies -edge enhancement to low-resolution streams (below maximum recording resolution) to -maximize preview quality, but does not apply edge enhancement to high-resolution streams, -since those will be reprocessed later if necessary.

-

For YUV_REPROCESSING, these FAST/HIGH_QUALITY modes both mean that the camera -device will apply FAST/HIGH_QUALITY YUV-domain edge enhancement, respectively. -The camera device may adjust its internal edge enhancement parameters for best -image quality based on the android.reprocess.effectiveExposureFactor, if it is set.

-
HAL Implementation Details
-

For YUV_REPROCESSING The HAL can use android.reprocess.effectiveExposureFactor to -adjust the internal edge enhancement reduction parameters appropriately to get the best -quality images.

-
- android.edge.strength - - byte - - [system] - - - - - - - -

Control the amount of edge enhancement -applied to the images

-
- 1-10; 10 is maximum sharpening - -
static
Property NameTypeDescriptionUnitsRangeTags
- android.edge.availableEdgeModes - - byte - x - - - n - - [public as enumList] - - - [full] - - -
list of enums
- - -
-

List of edge enhancement modes for android.edge.mode that are supported by this camera -device.

-
- -

Any value listed in android.edge.mode

-
Details
-

Full-capability camera devices must always support OFF; camera devices that support -YUV_REPROCESSING or PRIVATE_REPROCESSING will list ZERO_SHUTTER_LAG; all devices will -list FAST.

-
HAL Implementation Details
-

HAL must support both FAST and HIGH_QUALITY if edge enhancement control is available -on the camera device, but the underlying implementation can be the same for both modes. -That is, if the highest quality implementation on the camera device does not slow down -capture rate, then FAST and HIGH_QUALITY will generate the same output.

-
dynamic
Property NameTypeDescriptionUnitsRangeTags
- android.edge.mode - - byte - - [public] - - - [full] - - - -
    -
  • - OFF -

    No edge enhancement is applied.

    -
  • -
  • - FAST -

    Apply edge enhancement at a quality level that does not slow down frame rate -relative to sensor output. It may be the same as OFF if edge enhancement will -slow down frame rate relative to sensor.

    -
  • -
  • - HIGH_QUALITY -

    Apply high-quality edge enhancement, at a cost of possibly reduced output frame rate.

    -
  • -
  • - ZERO_SHUTTER_LAG - [optional] -

    Edge enhancement is applied at different levels for different output streams, -based on resolution. Streams at maximum recording resolution (see CameraDevice#createCaptureSession) or below have -edge enhancement applied, while higher-resolution streams have no edge enhancement -applied. The level of edge enhancement for low-resolution streams is tuned so that -frame rate is not impacted, and the quality is equal to or better than FAST (since it -is only applied to lower-resolution outputs, quality may improve from FAST).

    -

    This mode is intended to be used by applications operating in a zero-shutter-lag mode -with YUV or PRIVATE reprocessing, where the application continuously captures -high-resolution intermediate buffers into a circular buffer, from which a final image is -produced via reprocessing when a user takes a picture. For such a use case, the -high-resolution buffers must not have edge enhancement applied to maximize efficiency of -preview and to avoid double-applying enhancement when reprocessed, while low-resolution -buffers (used for recording or preview, generally) need edge enhancement applied for -reasonable preview quality.

    -

    This mode is guaranteed to be supported by devices that support either the -YUV_REPROCESSING or PRIVATE_REPROCESSING capabilities -(android.request.availableCapabilities lists either of those capabilities) and it will -be the default mode for CAMERA3_TEMPLATE_ZERO_SHUTTER_LAG template.

    -
  • -
- -
-

Operation mode for edge -enhancement.

-
- -

android.edge.availableEdgeModes

-
Details
-

Edge enhancement improves sharpness and details in the captured image. OFF means -no enhancement will be applied by the camera device.

-

FAST/HIGH_QUALITY both mean camera device determined enhancement -will be applied. HIGH_QUALITY mode indicates that the -camera device will use the highest-quality enhancement algorithms, -even if it slows down capture rate. FAST means the camera device will -not slow down capture rate when applying edge enhancement. FAST may be the same as OFF if -edge enhancement will slow down capture rate. Every output stream will have a similar -amount of enhancement applied.

-

ZERO_SHUTTER_LAG is meant to be used by applications that maintain a continuous circular -buffer of high-resolution images during preview and reprocess image(s) from that buffer -into a final capture when triggered by the user. In this mode, the camera device applies -edge enhancement to low-resolution streams (below maximum recording resolution) to -maximize preview quality, but does not apply edge enhancement to high-resolution streams, -since those will be reprocessed later if necessary.

-

For YUV_REPROCESSING, these FAST/HIGH_QUALITY modes both mean that the camera -device will apply FAST/HIGH_QUALITY YUV-domain edge enhancement, respectively. -The camera device may adjust its internal edge enhancement parameters for best -image quality based on the android.reprocess.effectiveExposureFactor, if it is set.

-
HAL Implementation Details
-

For YUV_REPROCESSING The HAL can use android.reprocess.effectiveExposureFactor to -adjust the internal edge enhancement reduction parameters appropriately to get the best -quality images.

-
flash
controls
Property NameTypeDescriptionUnitsRangeTags
- android.flash.firingPower - - byte - - [system] - - - - - - - -

Power for flash firing/torch

-
- 10 is max power; 0 is no flash. Linear - -

0 - 10

-
Details
-

Power for snapshot may use a different scale than -for torch mode. Only one entry for torch mode will be -used

-
- android.flash.firingTime - - int64 - - [system] - - - - - - - -

Firing time of flash relative to start of -exposure

-
- nanoseconds - -

0-(exposure time-flash duration)

-
Details
-

Clamped to (0, exposure time - flash -duration).

-
- android.flash.mode - - byte - - [public] - - - [legacy] - - - -
    -
  • - OFF -

    Do not fire the flash for this capture.

    -
  • -
  • - SINGLE -

    If the flash is available and charged, fire flash -for this capture.

    -
  • -
  • - TORCH -

    Transition flash to continuously on.

    -
  • -
- -
-

The desired mode for for the camera device's flash control.

-
- -
Details
-

This control is only effective when flash unit is available -(android.flash.info.available == true).

-

When this control is used, the android.control.aeMode must be set to ON or OFF. -Otherwise, the camera device auto-exposure related flash control (ON_AUTO_FLASH, -ON_ALWAYS_FLASH, or ON_AUTO_FLASH_REDEYE) will override this control.

-

When set to OFF, the camera device will not fire flash for this capture.

-

When set to SINGLE, the camera device will fire flash regardless of the camera -device's auto-exposure routine's result. When used in still capture case, this -control should be used along with auto-exposure (AE) precapture metering sequence -(android.control.aePrecaptureTrigger), otherwise, the image may be incorrectly exposed.

-

When set to TORCH, the flash will be on continuously. This mode can be used -for use cases such as preview, auto-focus assist, still capture, or video recording.

-

The flash status will be reported by android.flash.state in the capture result metadata.

-
static
Property NameTypeDescriptionUnitsRangeTags
- android.flash.info.available - - byte - - [public as boolean] - - - [legacy] - - - -
    -
  • - FALSE -
  • -
  • - TRUE -
  • -
- -
-

Whether this camera device has a -flash unit.

-
- -
Details
-

Will be false if no flash is available.

-

If there is no flash unit, none of the flash controls do -anything.

-
- android.flash.info.chargeDuration - - int64 - - [system] - - - - - - - -

Time taken before flash can fire -again

-
- nanoseconds - -

0-1e9

-
Details
-

1 second too long/too short for recharge? Should -this be power-dependent?

-
- android.flash.colorTemperature - - byte - - [system] - - - - - - - -

The x,y whitepoint of the -flash

-
- pair of floats - -

0-1 for both

-
- android.flash.maxEnergy - - byte - - [system] - - - - - - - -

Max energy output of the flash for a full -power single flash

-
- lumen-seconds - -

>= 0

-
dynamic
Property NameTypeDescriptionUnitsRangeTags
- android.flash.firingPower - - byte - - [system] - - - - - - - -

Power for flash firing/torch

-
- 10 is max power; 0 is no flash. Linear - -

0 - 10

-
Details
-

Power for snapshot may use a different scale than -for torch mode. Only one entry for torch mode will be -used

-
- android.flash.firingTime - - int64 - - [system] - - - - - - - -

Firing time of flash relative to start of -exposure

-
- nanoseconds - -

0-(exposure time-flash duration)

-
Details
-

Clamped to (0, exposure time - flash -duration).

-
- android.flash.mode - - byte - - [public] - - - [legacy] - - - -
    -
  • - OFF -

    Do not fire the flash for this capture.

    -
  • -
  • - SINGLE -

    If the flash is available and charged, fire flash -for this capture.

    -
  • -
  • - TORCH -

    Transition flash to continuously on.

    -
  • -
- -
-

The desired mode for for the camera device's flash control.

-
- -
Details
-

This control is only effective when flash unit is available -(android.flash.info.available == true).

-

When this control is used, the android.control.aeMode must be set to ON or OFF. -Otherwise, the camera device auto-exposure related flash control (ON_AUTO_FLASH, -ON_ALWAYS_FLASH, or ON_AUTO_FLASH_REDEYE) will override this control.

-

When set to OFF, the camera device will not fire flash for this capture.

-

When set to SINGLE, the camera device will fire flash regardless of the camera -device's auto-exposure routine's result. When used in still capture case, this -control should be used along with auto-exposure (AE) precapture metering sequence -(android.control.aePrecaptureTrigger), otherwise, the image may be incorrectly exposed.

-

When set to TORCH, the flash will be on continuously. This mode can be used -for use cases such as preview, auto-focus assist, still capture, or video recording.

-

The flash status will be reported by android.flash.state in the capture result metadata.

-
- android.flash.state - - byte - - [public] - - - [limited] - - - -
    -
  • - UNAVAILABLE -

    No flash on camera.

    -
  • -
  • - CHARGING -

    Flash is charging and cannot be fired.

    -
  • -
  • - READY -

    Flash is ready to fire.

    -
  • -
  • - FIRED -

    Flash fired for this capture.

    -
  • -
  • - PARTIAL -

    Flash partially illuminated this frame.

    -

    This is usually due to the next or previous frame having -the flash fire, and the flash spilling into this capture -due to hardware limitations.

    -
  • -
- -
-

Current state of the flash -unit.

-
- -
Details
-

When the camera device doesn't have flash unit -(i.e. android.flash.info.available == false), this state will always be UNAVAILABLE. -Other states indicate the current flash status.

-

In certain conditions, this will be available on LEGACY devices:

- -

In all other conditions the state will not be available on -LEGACY devices (i.e. it will be null).

-
hotPixel
controls
Property NameTypeDescriptionUnitsRangeTags
- android.hotPixel.mode - - byte - - [public] - - - - - -
    -
  • - OFF -

    No hot pixel correction is applied.

    -

    The frame rate must not be reduced relative to sensor raw output -for this option.

    -

    The hotpixel map may be returned in android.statistics.hotPixelMap.

    -
  • -
  • - FAST -

    Hot pixel correction is applied, without reducing frame -rate relative to sensor raw output.

    -

    The hotpixel map may be returned in android.statistics.hotPixelMap.

    -
  • -
  • - HIGH_QUALITY -

    High-quality hot pixel correction is applied, at a cost -of possibly reduced frame rate relative to sensor raw output.

    -

    The hotpixel map may be returned in android.statistics.hotPixelMap.

    -
  • -
- -
-

Operational mode for hot pixel correction.

-
- -

android.hotPixel.availableHotPixelModes

-
Details
-

Hotpixel correction interpolates out, or otherwise removes, pixels -that do not accurately measure the incoming light (i.e. pixels that -are stuck at an arbitrary value or are oversensitive).

-
static
Property NameTypeDescriptionUnitsRangeTags
- android.hotPixel.availableHotPixelModes - - byte - x - - - n - - [public as enumList] - - - - -
list of enums
- - -
-

List of hot pixel correction modes for android.hotPixel.mode that are supported by this -camera device.

-
- -

Any value listed in android.hotPixel.mode

-
Details
-

FULL mode camera devices will always support FAST.

-
HAL Implementation Details
-

To avoid performance issues, there will be significantly fewer hot -pixels than actual pixels on the camera sensor. -HAL must support both FAST and HIGH_QUALITY if hot pixel correction control is available -on the camera device, but the underlying implementation can be the same for both modes. -That is, if the highest quality implementation on the camera device does not slow down -capture rate, then FAST and HIGH_QUALITY will generate the same output.

-
dynamic
Property NameTypeDescriptionUnitsRangeTags
- android.hotPixel.mode - - byte - - [public] - - - - - -
    -
  • - OFF -

    No hot pixel correction is applied.

    -

    The frame rate must not be reduced relative to sensor raw output -for this option.

    -

    The hotpixel map may be returned in android.statistics.hotPixelMap.

    -
  • -
  • - FAST -

    Hot pixel correction is applied, without reducing frame -rate relative to sensor raw output.

    -

    The hotpixel map may be returned in android.statistics.hotPixelMap.

    -
  • -
  • - HIGH_QUALITY -

    High-quality hot pixel correction is applied, at a cost -of possibly reduced frame rate relative to sensor raw output.

    -

    The hotpixel map may be returned in android.statistics.hotPixelMap.

    -
  • -
- -
-

Operational mode for hot pixel correction.

-
- -

android.hotPixel.availableHotPixelModes

-
Details
-

Hotpixel correction interpolates out, or otherwise removes, pixels -that do not accurately measure the incoming light (i.e. pixels that -are stuck at an arbitrary value or are oversensitive).

-
jpeg
controls
Property NameTypeDescriptionUnitsRangeTags
- android.jpeg.gpsLocation - - byte - - [java_public as location] - - [synthetic] - - [legacy] - - - - - -

A location object to use when generating image GPS metadata.

-
- -
Details
-

Setting a location object in a request will include the GPS coordinates of the location -into any JPEG images captured based on the request. These coordinates can then be -viewed by anyone who receives the JPEG image.

-
- android.jpeg.gpsCoordinates - - double - x - - - 3 - - [ndk_public] - - - [legacy] - - -
latitude, longitude, altitude. First two in degrees, the third in meters
- - -
-

GPS coordinates to include in output JPEG -EXIF.

-
- -

(-180 - 180], [-90,90], [-inf, inf]

-
- android.jpeg.gpsProcessingMethod - - byte - - [ndk_public as string] - - - [legacy] - - - - - -

32 characters describing GPS algorithm to -include in EXIF.

-
- UTF-8 null-terminated string - -
- android.jpeg.gpsTimestamp - - int64 - - [ndk_public] - - - [legacy] - - - - - -

Time GPS fix was made to include in -EXIF.

-
- UTC in seconds since January 1, 1970 - -
- android.jpeg.orientation - - int32 - - [public] - - - [legacy] - - - - - -

The orientation for a JPEG image.

-
- Degrees in multiples of 90 - -

0, 90, 180, 270

-
Details
-

The clockwise rotation angle in degrees, relative to the orientation -to the camera, that the JPEG picture needs to be rotated by, to be viewed -upright.

-

Camera devices may either encode this value into the JPEG EXIF header, or -rotate the image data to match this orientation. When the image data is rotated, -the thumbnail data will also be rotated.

-

Note that this orientation is relative to the orientation of the camera sensor, given -by android.sensor.orientation.

-

To translate from the device orientation given by the Android sensor APIs, the following -sample code may be used:

-
private int getJpegOrientation(CameraCharacteristics c, int deviceOrientation) {
-    if (deviceOrientation == android.view.OrientationEventListener.ORIENTATION_UNKNOWN) return 0;
-    int sensorOrientation = c.get(CameraCharacteristics.SENSOR_ORIENTATION);
-
-    // Round device orientation to a multiple of 90
-    deviceOrientation = (deviceOrientation + 45) / 90 * 90;
-
-    // Reverse device orientation for front-facing cameras
-    boolean facingFront = c.get(CameraCharacteristics.LENS_FACING) == CameraCharacteristics.LENS_FACING_FRONT;
-    if (facingFront) deviceOrientation = -deviceOrientation;
-
-    // Calculate desired JPEG orientation relative to camera orientation to make
-    // the image upright relative to the device orientation
-    int jpegOrientation = (sensorOrientation + deviceOrientation + 360) % 360;
-
-    return jpegOrientation;
-}
-
-
- android.jpeg.quality - - byte - - [public] - - - [legacy] - - - - - -

Compression quality of the final JPEG -image.

-
- -

1-100; larger is higher quality

-
Details
-

85-95 is typical usage range.

-
- android.jpeg.thumbnailQuality - - byte - - [public] - - - [legacy] - - - - - -

Compression quality of JPEG -thumbnail.

-
- -

1-100; larger is higher quality

-
- android.jpeg.thumbnailSize - - int32 - x - - - 2 - - [public as size] - - - [legacy] - - - - - -

Resolution of embedded JPEG thumbnail.

-
- -

android.jpeg.availableThumbnailSizes

-
Details
-

When set to (0, 0) value, the JPEG EXIF will not contain thumbnail, -but the captured JPEG will still be a valid image.

-

For best results, when issuing a request for a JPEG image, the thumbnail size selected -should have the same aspect ratio as the main JPEG output.

-

If the thumbnail image aspect ratio differs from the JPEG primary image aspect -ratio, the camera device creates the thumbnail by cropping it from the primary image. -For example, if the primary image has 4:3 aspect ratio, the thumbnail image has -16:9 aspect ratio, the primary image will be cropped vertically (letterbox) to -generate the thumbnail image. The thumbnail image will always have a smaller Field -Of View (FOV) than the primary image when aspect ratios differ.

-

When an android.jpeg.orientation of non-zero degree is requested, -the camera device will handle thumbnail rotation in one of the following ways:

-
    -
  • Set the EXIF orientation flag - and keep jpeg and thumbnail image data unrotated.
  • -
  • Rotate the jpeg and thumbnail image data and not set - EXIF orientation flag. In this - case, LIMITED or FULL hardware level devices will report rotated thumnail size in - capture result, so the width and height will be interchanged if 90 or 270 degree - orientation is requested. LEGACY device will always report unrotated thumbnail - size.
  • -
-
HAL Implementation Details
-

The HAL must not squeeze or stretch the downscaled primary image to generate thumbnail. -The cropping must be done on the primary jpeg image rather than the sensor active array. -The stream cropping rule specified by "S5. Cropping" in camera3.h doesn't apply to the -thumbnail image cropping.

-
static
Property NameTypeDescriptionUnitsRangeTags
- android.jpeg.availableThumbnailSizes - - int32 - x - - - 2 x n - - [public as size] - - - [legacy] - - - - - -

List of JPEG thumbnail sizes for android.jpeg.thumbnailSize supported by this -camera device.

-
- -
Details
-

This list will include at least one non-zero resolution, plus (0,0) for indicating no -thumbnail should be generated.

-

Below condiditions will be satisfied for this size list:

-
    -
  • The sizes will be sorted by increasing pixel area (width x height). -If several resolutions have the same area, they will be sorted by increasing width.
  • -
  • The aspect ratio of the largest thumbnail size will be same as the -aspect ratio of largest JPEG output size in android.scaler.availableStreamConfigurations. -The largest size is defined as the size that has the largest pixel area -in a given size list.
  • -
  • Each output JPEG size in android.scaler.availableStreamConfigurations will have at least -one corresponding size that has the same aspect ratio in availableThumbnailSizes, -and vice versa.
  • -
  • All non-(0, 0) sizes will have non-zero widths and heights.
  • -
-
- android.jpeg.maxSize - - int32 - - [system] - - - - - - - -

Maximum size in bytes for the compressed -JPEG buffer

-
- -

Must be large enough to fit any JPEG produced by -the camera

-
Details
-

This is used for sizing the gralloc buffers for -JPEG

-
dynamic
Property NameTypeDescriptionUnitsRangeTags
- android.jpeg.gpsLocation - - byte - - [java_public as location] - - [synthetic] - - [legacy] - - - - - -

A location object to use when generating image GPS metadata.

-
- -
Details
-

Setting a location object in a request will include the GPS coordinates of the location -into any JPEG images captured based on the request. These coordinates can then be -viewed by anyone who receives the JPEG image.

-
- android.jpeg.gpsCoordinates - - double - x - - - 3 - - [ndk_public] - - - [legacy] - - -
latitude, longitude, altitude. First two in degrees, the third in meters
- - -
-

GPS coordinates to include in output JPEG -EXIF.

-
- -

(-180 - 180], [-90,90], [-inf, inf]

-
- android.jpeg.gpsProcessingMethod - - byte - - [ndk_public as string] - - - [legacy] - - - - - -

32 characters describing GPS algorithm to -include in EXIF.

-
- UTF-8 null-terminated string - -
- android.jpeg.gpsTimestamp - - int64 - - [ndk_public] - - - [legacy] - - - - - -

Time GPS fix was made to include in -EXIF.

-
- UTC in seconds since January 1, 1970 - -
- android.jpeg.orientation - - int32 - - [public] - - - [legacy] - - - - - -

The orientation for a JPEG image.

-
- Degrees in multiples of 90 - -

0, 90, 180, 270

-
Details
-

The clockwise rotation angle in degrees, relative to the orientation -to the camera, that the JPEG picture needs to be rotated by, to be viewed -upright.

-

Camera devices may either encode this value into the JPEG EXIF header, or -rotate the image data to match this orientation. When the image data is rotated, -the thumbnail data will also be rotated.

-

Note that this orientation is relative to the orientation of the camera sensor, given -by android.sensor.orientation.

-

To translate from the device orientation given by the Android sensor APIs, the following -sample code may be used:

-
private int getJpegOrientation(CameraCharacteristics c, int deviceOrientation) {
-    if (deviceOrientation == android.view.OrientationEventListener.ORIENTATION_UNKNOWN) return 0;
-    int sensorOrientation = c.get(CameraCharacteristics.SENSOR_ORIENTATION);
-
-    // Round device orientation to a multiple of 90
-    deviceOrientation = (deviceOrientation + 45) / 90 * 90;
-
-    // Reverse device orientation for front-facing cameras
-    boolean facingFront = c.get(CameraCharacteristics.LENS_FACING) == CameraCharacteristics.LENS_FACING_FRONT;
-    if (facingFront) deviceOrientation = -deviceOrientation;
-
-    // Calculate desired JPEG orientation relative to camera orientation to make
-    // the image upright relative to the device orientation
-    int jpegOrientation = (sensorOrientation + deviceOrientation + 360) % 360;
-
-    return jpegOrientation;
-}
-
-
- android.jpeg.quality - - byte - - [public] - - - [legacy] - - - - - -

Compression quality of the final JPEG -image.

-
- -

1-100; larger is higher quality

-
Details
-

85-95 is typical usage range.

-
- android.jpeg.size - - int32 - - [system] - - - - - - - -

The size of the compressed JPEG image, in -bytes

-
- -

>= 0

-
Details
-

If no JPEG output is produced for the request, -this must be 0.

-

Otherwise, this describes the real size of the compressed -JPEG image placed in the output stream. More specifically, -if android.jpeg.maxSize = 1000000, and a specific capture -has android.jpeg.size = 500000, then the output buffer from -the JPEG stream will be 1000000 bytes, of which the first -500000 make up the real data.

-
- android.jpeg.thumbnailQuality - - byte - - [public] - - - [legacy] - - - - - -

Compression quality of JPEG -thumbnail.

-
- -

1-100; larger is higher quality

-
- android.jpeg.thumbnailSize - - int32 - x - - - 2 - - [public as size] - - - [legacy] - - - - - -

Resolution of embedded JPEG thumbnail.

-
- -

android.jpeg.availableThumbnailSizes

-
Details
-

When set to (0, 0) value, the JPEG EXIF will not contain thumbnail, -but the captured JPEG will still be a valid image.

-

For best results, when issuing a request for a JPEG image, the thumbnail size selected -should have the same aspect ratio as the main JPEG output.

-

If the thumbnail image aspect ratio differs from the JPEG primary image aspect -ratio, the camera device creates the thumbnail by cropping it from the primary image. -For example, if the primary image has 4:3 aspect ratio, the thumbnail image has -16:9 aspect ratio, the primary image will be cropped vertically (letterbox) to -generate the thumbnail image. The thumbnail image will always have a smaller Field -Of View (FOV) than the primary image when aspect ratios differ.

-

When an android.jpeg.orientation of non-zero degree is requested, -the camera device will handle thumbnail rotation in one of the following ways:

-
    -
  • Set the EXIF orientation flag - and keep jpeg and thumbnail image data unrotated.
  • -
  • Rotate the jpeg and thumbnail image data and not set - EXIF orientation flag. In this - case, LIMITED or FULL hardware level devices will report rotated thumnail size in - capture result, so the width and height will be interchanged if 90 or 270 degree - orientation is requested. LEGACY device will always report unrotated thumbnail - size.
  • -
-
HAL Implementation Details
-

The HAL must not squeeze or stretch the downscaled primary image to generate thumbnail. -The cropping must be done on the primary jpeg image rather than the sensor active array. -The stream cropping rule specified by "S5. Cropping" in camera3.h doesn't apply to the -thumbnail image cropping.

-
lens
controls
Property NameTypeDescriptionUnitsRangeTags
- android.lens.aperture - - float - - [public] - - - [full] - - - - - -

The desired lens aperture size, as a ratio of lens focal length to the -effective aperture diameter.

-
- The f-number (f/N) - -

android.lens.info.availableApertures

-
Details
-

Setting this value is only supported on the camera devices that have a variable -aperture lens.

-

When this is supported and android.control.aeMode is OFF, -this can be set along with android.sensor.exposureTime, -android.sensor.sensitivity, and android.sensor.frameDuration -to achieve manual exposure control.

-

The requested aperture value may take several frames to reach the -requested value; the camera device will report the current (intermediate) -aperture size in capture result metadata while the aperture is changing. -While the aperture is still changing, android.lens.state will be set to MOVING.

-

When this is supported and android.control.aeMode is one of -the ON modes, this will be overridden by the camera device -auto-exposure algorithm, the overridden values are then provided -back to the user in the corresponding result.

-
- android.lens.filterDensity - - float - - [public] - - - [full] - - - - - -

The desired setting for the lens neutral density filter(s).

-
- Exposure Value (EV) - -

android.lens.info.availableFilterDensities

-
Details
-

This control will not be supported on most camera devices.

-

Lens filters are typically used to lower the amount of light the -sensor is exposed to (measured in steps of EV). As used here, an EV -step is the standard logarithmic representation, which are -non-negative, and inversely proportional to the amount of light -hitting the sensor. For example, setting this to 0 would result -in no reduction of the incoming light, and setting this to 2 would -mean that the filter is set to reduce incoming light by two stops -(allowing 1/4 of the prior amount of light to the sensor).

-

It may take several frames before the lens filter density changes -to the requested value. While the filter density is still changing, -android.lens.state will be set to MOVING.

-
- android.lens.focalLength - - float - - [public] - - - [legacy] - - - - - -

The desired lens focal length; used for optical zoom.

-
- Millimeters - -

android.lens.info.availableFocalLengths

-
Details
-

This setting controls the physical focal length of the camera -device's lens. Changing the focal length changes the field of -view of the camera device, and is usually used for optical zoom.

-

Like android.lens.focusDistance and android.lens.aperture, this -setting won't be applied instantaneously, and it may take several -frames before the lens can change to the requested focal length. -While the focal length is still changing, android.lens.state will -be set to MOVING.

-

Optical zoom will not be supported on most devices.

-
- android.lens.focusDistance - - float - - [public] - - - [full] - - - - - -

Desired distance to plane of sharpest focus, -measured from frontmost surface of the lens.

-
- See android.lens.info.focusDistanceCalibration for details - -

>= 0

-
Details
-

This control can be used for setting manual focus, on devices that support -the MANUAL_SENSOR capability and have a variable-focus lens (see -android.lens.info.minimumFocusDistance).

-

A value of 0.0f means infinity focus. The value set will be clamped to -[0.0f, android.lens.info.minimumFocusDistance].

-

Like android.lens.focalLength, this setting won't be applied -instantaneously, and it may take several frames before the lens -can move to the requested focus distance. While the lens is still moving, -android.lens.state will be set to MOVING.

-

LEGACY devices support at most setting this to 0.0f -for infinity focus.

-
- android.lens.opticalStabilizationMode - - byte - - [public] - - - [limited] - - - -
    -
  • - OFF -

    Optical stabilization is unavailable.

    -
  • -
  • - ON - [optional] -

    Optical stabilization is enabled.

    -
  • -
- -
-

Sets whether the camera device uses optical image stabilization (OIS) -when capturing images.

-
- -

android.lens.info.availableOpticalStabilization

-
Details
-

OIS is used to compensate for motion blur due to small -movements of the camera during capture. Unlike digital image -stabilization (android.control.videoStabilizationMode), OIS -makes use of mechanical elements to stabilize the camera -sensor, and thus allows for longer exposure times before -camera shake becomes apparent.

-

Switching between different optical stabilization modes may take several -frames to initialize, the camera device will report the current mode in -capture result metadata. For example, When "ON" mode is requested, the -optical stabilization modes in the first several capture results may still -be "OFF", and it will become "ON" when the initialization is done.

-

If a camera device supports both OIS and digital image stabilization -(android.control.videoStabilizationMode), turning both modes on may produce undesirable -interaction, so it is recommended not to enable both at the same time.

-

Not all devices will support OIS; see -android.lens.info.availableOpticalStabilization for -available controls.

-
static
Property NameTypeDescriptionUnitsRangeTags
- android.lens.info.availableApertures - - float - x - - - n - - [public] - - - [full] - - - - - -

List of aperture size values for android.lens.aperture that are -supported by this camera device.

-
- The aperture f-number - -
Details
-

If the camera device doesn't support a variable lens aperture, -this list will contain only one value, which is the fixed aperture size.

-

If the camera device supports a variable aperture, the aperture values -in this list will be sorted in ascending order.

-
- android.lens.info.availableFilterDensities - - float - x - - - n - - [public] - - - [full] - - - - - -

List of neutral density filter values for -android.lens.filterDensity that are supported by this camera device.

-
- Exposure value (EV) - -

Values are >= 0

-
Details
-

If a neutral density filter is not supported by this camera device, -this list will contain only 0. Otherwise, this list will include every -filter density supported by the camera device, in ascending order.

-
- android.lens.info.availableFocalLengths - - float - x - - - n - - [public] - - - [legacy] - - -
The list of available focal lengths
- - -
-

List of focal lengths for android.lens.focalLength that are supported by this camera -device.

-
- Millimeters - -

Values are > 0

-
Details
-

If optical zoom is not supported, this list will only contain -a single value corresponding to the fixed focal length of the -device. Otherwise, this list will include every focal length supported -by the camera device, in ascending order.

-
- android.lens.info.availableOpticalStabilization - - byte - x - - - n - - [public as enumList] - - - [limited] - - -
list of enums
- - -
-

List of optical image stabilization (OIS) modes for -android.lens.opticalStabilizationMode that are supported by this camera device.

-
- -

Any value listed in android.lens.opticalStabilizationMode

-
Details
-

If OIS is not supported by a given camera device, this list will -contain only OFF.

-
- android.lens.info.hyperfocalDistance - - float - - [public] - - - [limited] - - - - - -

Hyperfocal distance for this lens.

-
- See android.lens.info.focusDistanceCalibration for details - -

If lens is fixed focus, >= 0. If lens has focuser unit, the value is -within (0.0f, android.lens.info.minimumFocusDistance]

-
Details
-

If the lens is not fixed focus, the camera device will report this -field when android.lens.info.focusDistanceCalibration is APPROXIMATE or CALIBRATED.

-
- android.lens.info.minimumFocusDistance - - float - - [public] - - - [limited] - - - - - -

Shortest distance from frontmost surface -of the lens that can be brought into sharp focus.

-
- See android.lens.info.focusDistanceCalibration for details - -

>= 0

-
Details
-

If the lens is fixed-focus, this will be -0.

-
HAL Implementation Details
-

Mandatory for FULL devices; LIMITED devices -must always set this value to 0 for fixed-focus; and may omit -the minimum focus distance otherwise.

-

This field is also mandatory for all devices advertising -the MANUAL_SENSOR capability.

-
- android.lens.info.shadingMapSize - - int32 - x - - - 2 - - [ndk_public as size] - - - [full] - - -
width and height (N, M) of lens shading map provided by the camera device.
- - -
-

Dimensions of lens shading map.

-
- -

Both values >= 1

-
Details
-

The map should be on the order of 30-40 rows and columns, and -must be smaller than 64x64.

-
- android.lens.info.focusDistanceCalibration - - byte - - [public] - - - [limited] - - - -
    -
  • - UNCALIBRATED -

    The lens focus distance is not accurate, and the units used for -android.lens.focusDistance do not correspond to any physical units.

    -

    Setting the lens to the same focus distance on separate occasions may -result in a different real focus distance, depending on factors such -as the orientation of the device, the age of the focusing mechanism, -and the device temperature. The focus distance value will still be -in the range of [0, android.lens.info.minimumFocusDistance], where 0 -represents the farthest focus.

    -
  • -
  • - APPROXIMATE -

    The lens focus distance is measured in diopters.

    -

    However, setting the lens to the same focus distance -on separate occasions may result in a different real -focus distance, depending on factors such as the -orientation of the device, the age of the focusing -mechanism, and the device temperature.

    -
  • -
  • - CALIBRATED -

    The lens focus distance is measured in diopters, and -is calibrated.

    -

    The lens mechanism is calibrated so that setting the -same focus distance is repeatable on multiple -occasions with good accuracy, and the focus distance -corresponds to the real physical distance to the plane -of best focus.

    -
  • -
- -
-

The lens focus distance calibration quality.

-
- -
Details
-

The lens focus distance calibration quality determines the reliability of -focus related metadata entries, i.e. android.lens.focusDistance, -android.lens.focusRange, android.lens.info.hyperfocalDistance, and -android.lens.info.minimumFocusDistance.

-

APPROXIMATE and CALIBRATED devices report the focus metadata in -units of diopters (1/meter), so 0.0f represents focusing at infinity, -and increasing positive numbers represent focusing closer and closer -to the camera device. The focus distance control also uses diopters -on these devices.

-

UNCALIBRATED devices do not use units that are directly comparable -to any real physical measurement, but 0.0f still represents farthest -focus, and android.lens.info.minimumFocusDistance represents the -nearest focus the device can achieve.

-
HAL Implementation Details
-

For devices advertise APPROXIMATE quality or higher, diopters 0 (infinity -focus) must work. When autofocus is disabled (android.control.afMode == OFF) -and the lens focus distance is set to 0 diopters -(android.lens.focusDistance == 0), the lens will move to focus at infinity -and is stably focused at infinity even if the device tilts. It may take the -lens some time to move; during the move the lens state should be MOVING and -the output diopter value should be changing toward 0.

-
- android.lens.facing - - byte - - [public] - - - [legacy] - - - -
    -
  • - FRONT -

    The camera device faces the same direction as the device's screen.

    -
  • -
  • - BACK -

    The camera device faces the opposite direction as the device's screen.

    -
  • -
  • - EXTERNAL -

    The camera device is an external camera, and has no fixed facing relative to the -device's screen.

    -
  • -
- -
-

Direction the camera faces relative to -device screen.

-
- -
- android.lens.poseRotation - - float - x - - - 4 - - [public] - - - - - - - -

The orientation of the camera relative to the sensor -coordinate system.

-
- - Quaternion coefficients - - -
Details
-

The four coefficients that describe the quaternion -rotation from the Android sensor coordinate system to a -camera-aligned coordinate system where the X-axis is -aligned with the long side of the image sensor, the Y-axis -is aligned with the short side of the image sensor, and -the Z-axis is aligned with the optical axis of the sensor.

-

To convert from the quaternion coefficients (x,y,z,w) -to the axis of rotation (a_x, a_y, a_z) and rotation -amount theta, the following formulas can be used:

-
 theta = 2 * acos(w)
-a_x = x / sin(theta/2)
-a_y = y / sin(theta/2)
-a_z = z / sin(theta/2)
-
-

To create a 3x3 rotation matrix that applies the rotation -defined by this quaternion, the following matrix can be -used:

-
R = [ 1 - 2y^2 - 2z^2,       2xy - 2zw,       2xz + 2yw,
-           2xy + 2zw, 1 - 2x^2 - 2z^2,       2yz - 2xw,
-           2xz - 2yw,       2yz + 2xw, 1 - 2x^2 - 2y^2 ]
-
-

This matrix can then be used to apply the rotation to a - column vector point with

-

p' = Rp

-

where p is in the device sensor coordinate system, and - p' is in the camera-oriented coordinate system.

-
- android.lens.poseTranslation - - float - x - - - 3 - - [public] - - - - - - - -

Position of the camera optical center.

-
- Meters - -
Details
-

The position of the camera device's lens optical center, -as a three-dimensional vector (x,y,z), relative to the -optical center of the largest camera device facing in the -same direction as this camera, in the Android sensor coordinate -axes. Note that only the axis definitions are shared with -the sensor coordinate system, but not the origin.

-

If this device is the largest or only camera device with a -given facing, then this position will be (0, 0, 0); a -camera device with a lens optical center located 3 cm from -the main sensor along the +X axis (to the right from the -user's perspective) will report (0.03, 0, 0).

-

To transform a pixel coordinates between two cameras -facing the same direction, first the source camera -android.lens.radialDistortion must be corrected for. Then -the source camera android.lens.intrinsicCalibration needs -to be applied, followed by the android.lens.poseRotation -of the source camera, the translation of the source camera -relative to the destination camera, the -android.lens.poseRotation of the destination camera, and -finally the inverse of android.lens.intrinsicCalibration -of the destination camera. This obtains a -radial-distortion-free coordinate in the destination -camera pixel coordinates.

-

To compare this against a real image from the destination -camera, the destination camera image then needs to be -corrected for radial distortion before comparison or -sampling.

-
- android.lens.intrinsicCalibration - - float - x - - - 5 - - [public] - - - - - - - -

The parameters for this camera device's intrinsic -calibration.

-
- - Pixels in the - android.sensor.info.preCorrectionActiveArraySize - coordinate system. - - -
Details
-

The five calibration parameters that describe the -transform from camera-centric 3D coordinates to sensor -pixel coordinates:

-
[f_x, f_y, c_x, c_y, s]
-
-

Where f_x and f_y are the horizontal and vertical -focal lengths, [c_x, c_y] is the position of the optical -axis, and s is a skew parameter for the sensor plane not -being aligned with the lens plane.

-

These are typically used within a transformation matrix K:

-
K = [ f_x,   s, c_x,
-       0, f_y, c_y,
-       0    0,   1 ]
-
-

which can then be combined with the camera pose rotation -R and translation t (android.lens.poseRotation and -android.lens.poseTranslation, respective) to calculate the -complete transform from world coordinates to pixel -coordinates:

-
P = [ K 0   * [ R t
-     0 1 ]     0 1 ]
-
-

and with p_w being a point in the world coordinate system -and p_s being a point in the camera active pixel array -coordinate system, and with the mapping including the -homogeneous division by z:

-
 p_h = (x_h, y_h, z_h) = P p_w
-p_s = p_h / z_h
-
-

so [x_s, y_s] is the pixel coordinates of the world -point, z_s = 1, and w_s is a measurement of disparity -(depth) in pixel coordinates.

-

Note that the coordinate system for this transform is the -android.sensor.info.preCorrectionActiveArraySize system, -where (0,0) is the top-left of the -preCorrectionActiveArraySize rectangle. Once the pose and -intrinsic calibration transforms have been applied to a -world point, then the android.lens.radialDistortion -transform needs to be applied, and the result adjusted to -be in the android.sensor.info.activeArraySize coordinate -system (where (0, 0) is the top-left of the -activeArraySize rectangle), to determine the final pixel -coordinate of the world point for processed (non-RAW) -output buffers.

-
- android.lens.radialDistortion - - float - x - - - 6 - - [public] - - - - - - - -

The correction coefficients to correct for this camera device's -radial and tangential lens distortion.

-
- - Unitless coefficients. - - -
Details
-

Four radial distortion coefficients [kappa_0, kappa_1, kappa_2, -kappa_3] and two tangential distortion coefficients -[kappa_4, kappa_5] that can be used to correct the -lens's geometric distortion with the mapping equations:

-
 x_c = x_i * ( kappa_0 + kappa_1 * r^2 + kappa_2 * r^4 + kappa_3 * r^6 ) +
-       kappa_4 * (2 * x_i * y_i) + kappa_5 * ( r^2 + 2 * x_i^2 )
- y_c = y_i * ( kappa_0 + kappa_1 * r^2 + kappa_2 * r^4 + kappa_3 * r^6 ) +
-       kappa_5 * (2 * x_i * y_i) + kappa_4 * ( r^2 + 2 * y_i^2 )
-
-

Here, [x_c, y_c] are the coordinates to sample in the -input image that correspond to the pixel values in the -corrected image at the coordinate [x_i, y_i]:

-
 correctedImage(x_i, y_i) = sample_at(x_c, y_c, inputImage)
-
-

The pixel coordinates are defined in a normalized -coordinate system related to the -android.lens.intrinsicCalibration calibration fields. -Both [x_i, y_i] and [x_c, y_c] have (0,0) at the -lens optical center [c_x, c_y]. The maximum magnitudes -of both x and y coordinates are normalized to be 1 at the -edge further from the optical center, so the range -for both dimensions is -1 <= x <= 1.

-

Finally, r represents the radial distance from the -optical center, r^2 = x_i^2 + y_i^2, and its magnitude -is therefore no larger than |r| <= sqrt(2).

-

The distortion model used is the Brown-Conrady model.

-
dynamic
Property NameTypeDescriptionUnitsRangeTags
- android.lens.aperture - - float - - [public] - - - [full] - - - - - -

The desired lens aperture size, as a ratio of lens focal length to the -effective aperture diameter.

-
- The f-number (f/N) - -

android.lens.info.availableApertures

-
Details
-

Setting this value is only supported on the camera devices that have a variable -aperture lens.

-

When this is supported and android.control.aeMode is OFF, -this can be set along with android.sensor.exposureTime, -android.sensor.sensitivity, and android.sensor.frameDuration -to achieve manual exposure control.

-

The requested aperture value may take several frames to reach the -requested value; the camera device will report the current (intermediate) -aperture size in capture result metadata while the aperture is changing. -While the aperture is still changing, android.lens.state will be set to MOVING.

-

When this is supported and android.control.aeMode is one of -the ON modes, this will be overridden by the camera device -auto-exposure algorithm, the overridden values are then provided -back to the user in the corresponding result.

-
- android.lens.filterDensity - - float - - [public] - - - [full] - - - - - -

The desired setting for the lens neutral density filter(s).

-
- Exposure Value (EV) - -

android.lens.info.availableFilterDensities

-
Details
-

This control will not be supported on most camera devices.

-

Lens filters are typically used to lower the amount of light the -sensor is exposed to (measured in steps of EV). As used here, an EV -step is the standard logarithmic representation, which are -non-negative, and inversely proportional to the amount of light -hitting the sensor. For example, setting this to 0 would result -in no reduction of the incoming light, and setting this to 2 would -mean that the filter is set to reduce incoming light by two stops -(allowing 1/4 of the prior amount of light to the sensor).

-

It may take several frames before the lens filter density changes -to the requested value. While the filter density is still changing, -android.lens.state will be set to MOVING.

-
- android.lens.focalLength - - float - - [public] - - - [legacy] - - - - - -

The desired lens focal length; used for optical zoom.

-
- Millimeters - -

android.lens.info.availableFocalLengths

-
Details
-

This setting controls the physical focal length of the camera -device's lens. Changing the focal length changes the field of -view of the camera device, and is usually used for optical zoom.

-

Like android.lens.focusDistance and android.lens.aperture, this -setting won't be applied instantaneously, and it may take several -frames before the lens can change to the requested focal length. -While the focal length is still changing, android.lens.state will -be set to MOVING.

-

Optical zoom will not be supported on most devices.

-
- android.lens.focusDistance - - float - - [public] - - - [full] - - - - - -

Desired distance to plane of sharpest focus, -measured from frontmost surface of the lens.

-
- See android.lens.info.focusDistanceCalibration for details - -

>= 0

-
Details
-

Should be zero for fixed-focus cameras

-
- android.lens.focusRange - - float - x - - - 2 - - [public as pairFloatFloat] - - - [limited] - - -
Range of scene distances that are in focus
- - -
-

The range of scene distances that are in -sharp focus (depth of field).

-
- A pair of focus distances in diopters: (near, - far); see android.lens.info.focusDistanceCalibration for details. - -

>=0

-
Details
-

If variable focus not supported, can still report -fixed depth of field range

-
- android.lens.opticalStabilizationMode - - byte - - [public] - - - [limited] - - - -
    -
  • - OFF -

    Optical stabilization is unavailable.

    -
  • -
  • - ON - [optional] -

    Optical stabilization is enabled.

    -
  • -
- -
-

Sets whether the camera device uses optical image stabilization (OIS) -when capturing images.

-
- -

android.lens.info.availableOpticalStabilization

-
Details
-

OIS is used to compensate for motion blur due to small -movements of the camera during capture. Unlike digital image -stabilization (android.control.videoStabilizationMode), OIS -makes use of mechanical elements to stabilize the camera -sensor, and thus allows for longer exposure times before -camera shake becomes apparent.

-

Switching between different optical stabilization modes may take several -frames to initialize, the camera device will report the current mode in -capture result metadata. For example, When "ON" mode is requested, the -optical stabilization modes in the first several capture results may still -be "OFF", and it will become "ON" when the initialization is done.

-

If a camera device supports both OIS and digital image stabilization -(android.control.videoStabilizationMode), turning both modes on may produce undesirable -interaction, so it is recommended not to enable both at the same time.

-

Not all devices will support OIS; see -android.lens.info.availableOpticalStabilization for -available controls.

-
- android.lens.state - - byte - - [public] - - - [limited] - - - - - - -

Current lens status.

-
- -
Details
-

For lens parameters android.lens.focalLength, android.lens.focusDistance, -android.lens.filterDensity and android.lens.aperture, when changes are requested, -they may take several frames to reach the requested values. This state indicates -the current status of the lens parameters.

-

When the state is STATIONARY, the lens parameters are not changing. This could be -either because the parameters are all fixed, or because the lens has had enough -time to reach the most recently-requested values. -If all these lens parameters are not changable for a camera device, as listed below:

- -

Then this state will always be STATIONARY.

-

When the state is MOVING, it indicates that at least one of the lens parameters -is changing.

-
- android.lens.poseRotation - - float - x - - - 4 - - [public] - - - - - - - -

The orientation of the camera relative to the sensor -coordinate system.

-
- - Quaternion coefficients - - -
Details
-

The four coefficients that describe the quaternion -rotation from the Android sensor coordinate system to a -camera-aligned coordinate system where the X-axis is -aligned with the long side of the image sensor, the Y-axis -is aligned with the short side of the image sensor, and -the Z-axis is aligned with the optical axis of the sensor.

-

To convert from the quaternion coefficients (x,y,z,w) -to the axis of rotation (a_x, a_y, a_z) and rotation -amount theta, the following formulas can be used:

-
 theta = 2 * acos(w)
-a_x = x / sin(theta/2)
-a_y = y / sin(theta/2)
-a_z = z / sin(theta/2)
-
-

To create a 3x3 rotation matrix that applies the rotation -defined by this quaternion, the following matrix can be -used:

-
R = [ 1 - 2y^2 - 2z^2,       2xy - 2zw,       2xz + 2yw,
-           2xy + 2zw, 1 - 2x^2 - 2z^2,       2yz - 2xw,
-           2xz - 2yw,       2yz + 2xw, 1 - 2x^2 - 2y^2 ]
-
-

This matrix can then be used to apply the rotation to a - column vector point with

-

p' = Rp

-

where p is in the device sensor coordinate system, and - p' is in the camera-oriented coordinate system.

-
- android.lens.poseTranslation - - float - x - - - 3 - - [public] - - - - - - - -

Position of the camera optical center.

-
- Meters - -
Details
-

The position of the camera device's lens optical center, -as a three-dimensional vector (x,y,z), relative to the -optical center of the largest camera device facing in the -same direction as this camera, in the Android sensor coordinate -axes. Note that only the axis definitions are shared with -the sensor coordinate system, but not the origin.

-

If this device is the largest or only camera device with a -given facing, then this position will be (0, 0, 0); a -camera device with a lens optical center located 3 cm from -the main sensor along the +X axis (to the right from the -user's perspective) will report (0.03, 0, 0).

-

To transform a pixel coordinates between two cameras -facing the same direction, first the source camera -android.lens.radialDistortion must be corrected for. Then -the source camera android.lens.intrinsicCalibration needs -to be applied, followed by the android.lens.poseRotation -of the source camera, the translation of the source camera -relative to the destination camera, the -android.lens.poseRotation of the destination camera, and -finally the inverse of android.lens.intrinsicCalibration -of the destination camera. This obtains a -radial-distortion-free coordinate in the destination -camera pixel coordinates.

-

To compare this against a real image from the destination -camera, the destination camera image then needs to be -corrected for radial distortion before comparison or -sampling.

-
- android.lens.intrinsicCalibration - - float - x - - - 5 - - [public] - - - - - - - -

The parameters for this camera device's intrinsic -calibration.

-
- - Pixels in the - android.sensor.info.preCorrectionActiveArraySize - coordinate system. - - -
Details
-

The five calibration parameters that describe the -transform from camera-centric 3D coordinates to sensor -pixel coordinates:

-
[f_x, f_y, c_x, c_y, s]
-
-

Where f_x and f_y are the horizontal and vertical -focal lengths, [c_x, c_y] is the position of the optical -axis, and s is a skew parameter for the sensor plane not -being aligned with the lens plane.

-

These are typically used within a transformation matrix K:

-
K = [ f_x,   s, c_x,
-       0, f_y, c_y,
-       0    0,   1 ]
-
-

which can then be combined with the camera pose rotation -R and translation t (android.lens.poseRotation and -android.lens.poseTranslation, respective) to calculate the -complete transform from world coordinates to pixel -coordinates:

-
P = [ K 0   * [ R t
-     0 1 ]     0 1 ]
-
-

and with p_w being a point in the world coordinate system -and p_s being a point in the camera active pixel array -coordinate system, and with the mapping including the -homogeneous division by z:

-
 p_h = (x_h, y_h, z_h) = P p_w
-p_s = p_h / z_h
-
-

so [x_s, y_s] is the pixel coordinates of the world -point, z_s = 1, and w_s is a measurement of disparity -(depth) in pixel coordinates.

-

Note that the coordinate system for this transform is the -android.sensor.info.preCorrectionActiveArraySize system, -where (0,0) is the top-left of the -preCorrectionActiveArraySize rectangle. Once the pose and -intrinsic calibration transforms have been applied to a -world point, then the android.lens.radialDistortion -transform needs to be applied, and the result adjusted to -be in the android.sensor.info.activeArraySize coordinate -system (where (0, 0) is the top-left of the -activeArraySize rectangle), to determine the final pixel -coordinate of the world point for processed (non-RAW) -output buffers.

-
- android.lens.radialDistortion - - float - x - - - 6 - - [public] - - - - - - - -

The correction coefficients to correct for this camera device's -radial and tangential lens distortion.

-
- - Unitless coefficients. - - -
Details
-

Four radial distortion coefficients [kappa_0, kappa_1, kappa_2, -kappa_3] and two tangential distortion coefficients -[kappa_4, kappa_5] that can be used to correct the -lens's geometric distortion with the mapping equations:

-
 x_c = x_i * ( kappa_0 + kappa_1 * r^2 + kappa_2 * r^4 + kappa_3 * r^6 ) +
-       kappa_4 * (2 * x_i * y_i) + kappa_5 * ( r^2 + 2 * x_i^2 )
- y_c = y_i * ( kappa_0 + kappa_1 * r^2 + kappa_2 * r^4 + kappa_3 * r^6 ) +
-       kappa_5 * (2 * x_i * y_i) + kappa_4 * ( r^2 + 2 * y_i^2 )
-
-

Here, [x_c, y_c] are the coordinates to sample in the -input image that correspond to the pixel values in the -corrected image at the coordinate [x_i, y_i]:

-
 correctedImage(x_i, y_i) = sample_at(x_c, y_c, inputImage)
-
-

The pixel coordinates are defined in a normalized -coordinate system related to the -android.lens.intrinsicCalibration calibration fields. -Both [x_i, y_i] and [x_c, y_c] have (0,0) at the -lens optical center [c_x, c_y]. The maximum magnitudes -of both x and y coordinates are normalized to be 1 at the -edge further from the optical center, so the range -for both dimensions is -1 <= x <= 1.

-

Finally, r represents the radial distance from the -optical center, r^2 = x_i^2 + y_i^2, and its magnitude -is therefore no larger than |r| <= sqrt(2).

-

The distortion model used is the Brown-Conrady model.

-
noiseReduction
controls
Property NameTypeDescriptionUnitsRangeTags
- android.noiseReduction.mode - - byte - - [public] - - - [full] - - - -
    -
  • - OFF -

    No noise reduction is applied.

    -
  • -
  • - FAST -

    Noise reduction is applied without reducing frame rate relative to sensor -output. It may be the same as OFF if noise reduction will reduce frame rate -relative to sensor.

    -
  • -
  • - HIGH_QUALITY -

    High-quality noise reduction is applied, at the cost of possibly reduced frame -rate relative to sensor output.

    -
  • -
  • - MINIMAL - [optional] -

    MINIMAL noise reduction is applied without reducing frame rate relative to -sensor output.

    -
  • -
  • - ZERO_SHUTTER_LAG - [optional] -

    Noise reduction is applied at different levels for different output streams, -based on resolution. Streams at maximum recording resolution (see CameraDevice#createCaptureSession) or below have noise -reduction applied, while higher-resolution streams have MINIMAL (if supported) or no -noise reduction applied (if MINIMAL is not supported.) The degree of noise reduction -for low-resolution streams is tuned so that frame rate is not impacted, and the quality -is equal to or better than FAST (since it is only applied to lower-resolution outputs, -quality may improve from FAST).

    -

    This mode is intended to be used by applications operating in a zero-shutter-lag mode -with YUV or PRIVATE reprocessing, where the application continuously captures -high-resolution intermediate buffers into a circular buffer, from which a final image is -produced via reprocessing when a user takes a picture. For such a use case, the -high-resolution buffers must not have noise reduction applied to maximize efficiency of -preview and to avoid over-applying noise filtering when reprocessing, while -low-resolution buffers (used for recording or preview, generally) need noise reduction -applied for reasonable preview quality.

    -

    This mode is guaranteed to be supported by devices that support either the -YUV_REPROCESSING or PRIVATE_REPROCESSING capabilities -(android.request.availableCapabilities lists either of those capabilities) and it will -be the default mode for CAMERA3_TEMPLATE_ZERO_SHUTTER_LAG template.

    -
  • -
- -
-

Mode of operation for the noise reduction algorithm.

-
- -

android.noiseReduction.availableNoiseReductionModes

-
Details
-

The noise reduction algorithm attempts to improve image quality by removing -excessive noise added by the capture process, especially in dark conditions.

-

OFF means no noise reduction will be applied by the camera device, for both raw and -YUV domain.

-

MINIMAL means that only sensor raw domain basic noise reduction is enabled ,to remove -demosaicing or other processing artifacts. For YUV_REPROCESSING, MINIMAL is same as OFF. -This mode is optional, may not be support by all devices. The application should check -android.noiseReduction.availableNoiseReductionModes before using it.

-

FAST/HIGH_QUALITY both mean camera device determined noise filtering -will be applied. HIGH_QUALITY mode indicates that the camera device -will use the highest-quality noise filtering algorithms, -even if it slows down capture rate. FAST means the camera device will not -slow down capture rate when applying noise filtering. FAST may be the same as MINIMAL if -MINIMAL is listed, or the same as OFF if any noise filtering will slow down capture rate. -Every output stream will have a similar amount of enhancement applied.

-

ZERO_SHUTTER_LAG is meant to be used by applications that maintain a continuous circular -buffer of high-resolution images during preview and reprocess image(s) from that buffer -into a final capture when triggered by the user. In this mode, the camera device applies -noise reduction to low-resolution streams (below maximum recording resolution) to maximize -preview quality, but does not apply noise reduction to high-resolution streams, since -those will be reprocessed later if necessary.

-

For YUV_REPROCESSING, these FAST/HIGH_QUALITY modes both mean that the camera device -will apply FAST/HIGH_QUALITY YUV domain noise reduction, respectively. The camera device -may adjust the noise reduction parameters for best image quality based on the -android.reprocess.effectiveExposureFactor if it is set.

-
HAL Implementation Details
-

For YUV_REPROCESSING The HAL can use android.reprocess.effectiveExposureFactor to -adjust the internal noise reduction parameters appropriately to get the best quality -images.

-
- android.noiseReduction.strength - - byte - - [system] - - - - - - - -

Control the amount of noise reduction -applied to the images

-
- 1-10; 10 is max noise reduction - -

1 - 10

-
static
Property NameTypeDescriptionUnitsRangeTags
- android.noiseReduction.availableNoiseReductionModes - - byte - x - - - n - - [public as enumList] - - - [limited] - - -
list of enums
- - -
-

List of noise reduction modes for android.noiseReduction.mode that are supported -by this camera device.

-
- -

Any value listed in android.noiseReduction.mode

-
Details
-

Full-capability camera devices will always support OFF and FAST.

-

Camera devices that support YUV_REPROCESSING or PRIVATE_REPROCESSING will support -ZERO_SHUTTER_LAG.

-

Legacy-capability camera devices will only support FAST mode.

-
HAL Implementation Details
-

HAL must support both FAST and HIGH_QUALITY if noise reduction control is available -on the camera device, but the underlying implementation can be the same for both modes. -That is, if the highest quality implementation on the camera device does not slow down -capture rate, then FAST and HIGH_QUALITY will generate the same output.

-
dynamic
Property NameTypeDescriptionUnitsRangeTags
- android.noiseReduction.mode - - byte - - [public] - - - [full] - - - -
    -
  • - OFF -

    No noise reduction is applied.

    -
  • -
  • - FAST -

    Noise reduction is applied without reducing frame rate relative to sensor -output. It may be the same as OFF if noise reduction will reduce frame rate -relative to sensor.

    -
  • -
  • - HIGH_QUALITY -

    High-quality noise reduction is applied, at the cost of possibly reduced frame -rate relative to sensor output.

    -
  • -
  • - MINIMAL - [optional] -

    MINIMAL noise reduction is applied without reducing frame rate relative to -sensor output.

    -
  • -
  • - ZERO_SHUTTER_LAG - [optional] -

    Noise reduction is applied at different levels for different output streams, -based on resolution. Streams at maximum recording resolution (see CameraDevice#createCaptureSession) or below have noise -reduction applied, while higher-resolution streams have MINIMAL (if supported) or no -noise reduction applied (if MINIMAL is not supported.) The degree of noise reduction -for low-resolution streams is tuned so that frame rate is not impacted, and the quality -is equal to or better than FAST (since it is only applied to lower-resolution outputs, -quality may improve from FAST).

    -

    This mode is intended to be used by applications operating in a zero-shutter-lag mode -with YUV or PRIVATE reprocessing, where the application continuously captures -high-resolution intermediate buffers into a circular buffer, from which a final image is -produced via reprocessing when a user takes a picture. For such a use case, the -high-resolution buffers must not have noise reduction applied to maximize efficiency of -preview and to avoid over-applying noise filtering when reprocessing, while -low-resolution buffers (used for recording or preview, generally) need noise reduction -applied for reasonable preview quality.

    -

    This mode is guaranteed to be supported by devices that support either the -YUV_REPROCESSING or PRIVATE_REPROCESSING capabilities -(android.request.availableCapabilities lists either of those capabilities) and it will -be the default mode for CAMERA3_TEMPLATE_ZERO_SHUTTER_LAG template.

    -
  • -
- -
-

Mode of operation for the noise reduction algorithm.

-
- -

android.noiseReduction.availableNoiseReductionModes

-
Details
-

The noise reduction algorithm attempts to improve image quality by removing -excessive noise added by the capture process, especially in dark conditions.

-

OFF means no noise reduction will be applied by the camera device, for both raw and -YUV domain.

-

MINIMAL means that only sensor raw domain basic noise reduction is enabled ,to remove -demosaicing or other processing artifacts. For YUV_REPROCESSING, MINIMAL is same as OFF. -This mode is optional, may not be support by all devices. The application should check -android.noiseReduction.availableNoiseReductionModes before using it.

-

FAST/HIGH_QUALITY both mean camera device determined noise filtering -will be applied. HIGH_QUALITY mode indicates that the camera device -will use the highest-quality noise filtering algorithms, -even if it slows down capture rate. FAST means the camera device will not -slow down capture rate when applying noise filtering. FAST may be the same as MINIMAL if -MINIMAL is listed, or the same as OFF if any noise filtering will slow down capture rate. -Every output stream will have a similar amount of enhancement applied.

-

ZERO_SHUTTER_LAG is meant to be used by applications that maintain a continuous circular -buffer of high-resolution images during preview and reprocess image(s) from that buffer -into a final capture when triggered by the user. In this mode, the camera device applies -noise reduction to low-resolution streams (below maximum recording resolution) to maximize -preview quality, but does not apply noise reduction to high-resolution streams, since -those will be reprocessed later if necessary.

-

For YUV_REPROCESSING, these FAST/HIGH_QUALITY modes both mean that the camera device -will apply FAST/HIGH_QUALITY YUV domain noise reduction, respectively. The camera device -may adjust the noise reduction parameters for best image quality based on the -android.reprocess.effectiveExposureFactor if it is set.

-
HAL Implementation Details
-

For YUV_REPROCESSING The HAL can use android.reprocess.effectiveExposureFactor to -adjust the internal noise reduction parameters appropriately to get the best quality -images.

-
quirks
static
Property NameTypeDescriptionUnitsRangeTags
- android.quirks.meteringCropRegion - - byte - - [system] - - - - [deprecated] - - - - -

If set to 1, the camera service does not -scale 'normalized' coordinates with respect to the crop -region. This applies to metering input (a{e,f,wb}Region -and output (face rectangles).

-
- -

Deprecated. Do not use.

-
Details
-

Normalized coordinates refer to those in the -(-1000,1000) range mentioned in the -android.hardware.Camera API.

-

HAL implementations should instead always use and emit -sensor array-relative coordinates for all region data. Does -not need to be listed in static metadata. Support will be -removed in future versions of camera service.

-
- android.quirks.triggerAfWithAuto - - byte - - [system] - - - - [deprecated] - - - - -

If set to 1, then the camera service always -switches to FOCUS_MODE_AUTO before issuing a AF -trigger.

-
- -

Deprecated. Do not use.

-
Details
-

HAL implementations should implement AF trigger -modes for AUTO, MACRO, CONTINUOUS_FOCUS, and -CONTINUOUS_PICTURE modes instead of using this flag. Does -not need to be listed in static metadata. Support will be -removed in future versions of camera service

-
- android.quirks.useZslFormat - - byte - - [system] - - - - [deprecated] - - - - -

If set to 1, the camera service uses -CAMERA2_PIXEL_FORMAT_ZSL instead of -HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED for the zero -shutter lag stream

-
- -

Deprecated. Do not use.

-
Details
-

HAL implementations should use gralloc usage flags -to determine that a stream will be used for -zero-shutter-lag, instead of relying on an explicit -format setting. Does not need to be listed in static -metadata. Support will be removed in future versions of -camera service.

-
- android.quirks.usePartialResult - - byte - - [hidden] - - - - [deprecated] - - - - -

If set to 1, the HAL will always split result -metadata for a single capture into multiple buffers, -returned using multiple process_capture_result calls.

-
- -

Deprecated. Do not use.

-
Details
-

Does not need to be listed in static -metadata. Support for partial results will be reworked in -future versions of camera service. This quirk will stop -working at that point; DO NOT USE without careful -consideration of future support.

-
HAL Implementation Details
-

Refer to camera3_capture_result::partial_result -for information on how to implement partial results.

-
dynamic
Property NameTypeDescriptionUnitsRangeTags
- android.quirks.partialResult - - byte - - [hidden as boolean] - - - - [deprecated] - - -
    -
  • - FINAL -

    The last or only metadata result buffer -for this capture.

    -
  • -
  • - PARTIAL -

    A partial buffer of result metadata for this -capture. More result buffers for this capture will be sent -by the camera device, the last of which will be marked -FINAL.

    -
  • -
- -
-

Whether a result given to the framework is the -final one for the capture, or only a partial that contains a -subset of the full set of dynamic metadata -values.

-
- -

Deprecated. Do not use.

-

Optional. Default value is FINAL.

-
Details
-

The entries in the result metadata buffers for a -single capture may not overlap, except for this entry. The -FINAL buffers must retain FIFO ordering relative to the -requests that generate them, so the FINAL buffer for frame 3 must -always be sent to the framework after the FINAL buffer for frame 2, and -before the FINAL buffer for frame 4. PARTIAL buffers may be returned -in any order relative to other frames, but all PARTIAL buffers for a given -capture must arrive before the FINAL buffer for that capture. This entry may -only be used by the camera device if quirks.usePartialResult is set to 1.

-
HAL Implementation Details
-

Refer to camera3_capture_result::partial_result -for information on how to implement partial results.

-
request
controls
Property NameTypeDescriptionUnitsRangeTags
- android.request.frameCount - - int32 - - [system] - - - - [deprecated] - - - - -

A frame counter set by the framework. Must -be maintained unchanged in output frame. This value monotonically -increases with every new result (that is, each new result has a unique -frameCount value).

-
- incrementing integer - -

Deprecated. Do not use.

-

Any int.

-
- android.request.id - - int32 - - [hidden] - - - - - - - -

An application-specified ID for the current -request. Must be maintained unchanged in output -frame

-
- arbitrary integer assigned by application - -

Any int

-
- android.request.inputStreams - - int32 - x - - - n - - [system] - - - - [deprecated] - - - - -

List which camera reprocess stream is used -for the source of reprocessing data.

-
- List of camera reprocess stream IDs - -

Deprecated. Do not use.

-

Typically, only one entry allowed, must be a valid reprocess stream ID.

-
Details
-

Only meaningful when android.request.type == -REPROCESS. Ignored otherwise

-
- android.request.metadataMode - - byte - - [system] - - - - - -
    -
  • - NONE -

    No metadata should be produced on output, except -for application-bound buffer data. If no -application-bound streams exist, no frame should be -placed in the output frame queue. If such streams -exist, a frame should be placed on the output queue -with null metadata but with the necessary output buffer -information. Timestamp information should still be -included with any output stream buffers

    -
  • -
  • - FULL -

    All metadata should be produced. Statistics will -only be produced if they are separately -enabled

    -
  • -
- -
-

How much metadata to produce on -output

-
- -
- android.request.outputStreams - - int32 - x - - - n - - [system] - - - - [deprecated] - - - - -

Lists which camera output streams image data -from this capture must be sent to

-
- List of camera stream IDs - -

Deprecated. Do not use.

-

List must only include streams that have been -created

-
Details
-

If no output streams are listed, then the image -data should simply be discarded. The image data must -still be captured for metadata and statistics production, -and the lens and flash must operate as requested.

-
- android.request.type - - byte - - [system] - - - - [deprecated] - - -
    -
  • - CAPTURE -

    Capture a new image from the imaging hardware, -and process it according to the -settings

    -
  • -
  • - REPROCESS -

    Process previously captured data; the -android.request.inputStreams parameter determines the -source reprocessing stream. TODO: Mark dynamic metadata -needed for reprocessing with [RP]

    -
  • -
- -
-

The type of the request; either CAPTURE or -REPROCESS. For HAL3, this tag is redundant.

-
- -

Deprecated. Do not use.

-
static
Property NameTypeDescriptionUnitsRangeTags
- android.request.maxNumOutputStreams - - int32 - x - - - 3 - - [ndk_public] - - - [legacy] - - - - - -

The maximum numbers of different types of output streams -that can be configured and used simultaneously by a camera device.

-
- -

For processed (and stalling) format streams, >= 1.

-

For Raw format (either stalling or non-stalling) streams, >= 0.

-

For processed (but not stalling) format streams, >= 3 -for FULL mode devices (android.info.supportedHardwareLevel == FULL); ->= 2 for LIMITED mode devices (android.info.supportedHardwareLevel == LIMITED).

-
Details
-

This is a 3 element tuple that contains the max number of output simultaneous -streams for raw sensor, processed (but not stalling), and processed (and stalling) -formats respectively. For example, assuming that JPEG is typically a processed and -stalling stream, if max raw sensor format output stream number is 1, max YUV streams -number is 3, and max JPEG stream number is 2, then this tuple should be (1, 3, 2).

-

This lists the upper bound of the number of output streams supported by -the camera device. Using more streams simultaneously may require more hardware and -CPU resources that will consume more power. The image format for an output stream can -be any supported format provided by android.scaler.availableStreamConfigurations. -The formats defined in android.scaler.availableStreamConfigurations can be catergorized -into the 3 stream types as below:

-
    -
  • Processed (but stalling): any non-RAW format with a stallDurations > 0. - Typically JPEG format.
  • -
  • Raw formats: RAW_SENSOR, RAW10, or RAW12.
  • -
  • Processed (but not-stalling): any non-RAW format without a stall duration. - Typically YUV_420_888, - NV21, or - YV12.
  • -
-
- android.request.maxNumOutputRaw - - int32 - - [java_public] - - [synthetic] - - [legacy] - - - - - -

The maximum numbers of different types of output streams -that can be configured and used simultaneously by a camera device -for any RAW formats.

-
- -

>= 0

-
Details
-

This value contains the max number of output simultaneous -streams from the raw sensor.

-

This lists the upper bound of the number of output streams supported by -the camera device. Using more streams simultaneously may require more hardware and -CPU resources that will consume more power. The image format for this kind of an output stream can -be any RAW and supported format provided by android.scaler.streamConfigurationMap.

-

In particular, a RAW format is typically one of:

- -

LEGACY mode devices (android.info.supportedHardwareLevel == LEGACY) -never support raw streams.

-
- android.request.maxNumOutputProc - - int32 - - [java_public] - - [synthetic] - - [legacy] - - - - - -

The maximum numbers of different types of output streams -that can be configured and used simultaneously by a camera device -for any processed (but not-stalling) formats.

-
- -

>= 3 -for FULL mode devices (android.info.supportedHardwareLevel == FULL); ->= 2 for LIMITED mode devices (android.info.supportedHardwareLevel == LIMITED).

-
Details
-

This value contains the max number of output simultaneous -streams for any processed (but not-stalling) formats.

-

This lists the upper bound of the number of output streams supported by -the camera device. Using more streams simultaneously may require more hardware and -CPU resources that will consume more power. The image format for this kind of an output stream can -be any non-RAW and supported format provided by android.scaler.streamConfigurationMap.

-

Processed (but not-stalling) is defined as any non-RAW format without a stall duration. -Typically:

- -

For full guarantees, query StreamConfigurationMap#getOutputStallDuration with a -processed format -- it will return 0 for a non-stalling stream.

-

LEGACY devices will support at least 2 processing/non-stalling streams.

-
- android.request.maxNumOutputProcStalling - - int32 - - [java_public] - - [synthetic] - - [legacy] - - - - - -

The maximum numbers of different types of output streams -that can be configured and used simultaneously by a camera device -for any processed (and stalling) formats.

-
- -

>= 1

-
Details
-

This value contains the max number of output simultaneous -streams for any processed (but not-stalling) formats.

-

This lists the upper bound of the number of output streams supported by -the camera device. Using more streams simultaneously may require more hardware and -CPU resources that will consume more power. The image format for this kind of an output stream can -be any non-RAW and supported format provided by android.scaler.streamConfigurationMap.

-

A processed and stalling format is defined as any non-RAW format with a stallDurations -> 0. Typically only the JPEG format is a -stalling format.

-

For full guarantees, query StreamConfigurationMap#getOutputStallDuration with a -processed format -- it will return a non-0 value for a stalling stream.

-

LEGACY devices will support up to 1 processing/stalling stream.

-
- android.request.maxNumReprocessStreams - - int32 - x - - - 1 - - [system] - - - - [deprecated] - - - - -

How many reprocessing streams of any type -can be allocated at the same time.

-
- -

Deprecated. Do not use.

-

>= 0

-
Details
-

Only used by HAL2.x.

-

When set to 0, it means no reprocess stream is supported.

-
- android.request.maxNumInputStreams - - int32 - - [public] - - - [full] - - - - - -

The maximum numbers of any type of input streams -that can be configured and used simultaneously by a camera device.

-
- -

0 or 1.

-
Details
-

When set to 0, it means no input stream is supported.

-

The image format for a input stream can be any supported format returned by StreamConfigurationMap#getInputFormats. When using an -input stream, there must be at least one output stream configured to to receive the -reprocessed images.

-

When an input stream and some output streams are used in a reprocessing request, -only the input buffer will be used to produce these output stream buffers, and a -new sensor image will not be captured.

-

For example, for Zero Shutter Lag (ZSL) still capture use case, the input -stream image format will be PRIVATE, the associated output stream image format -should be JPEG.

-
HAL Implementation Details
-

For the reprocessing flow and controls, see -hardware/libhardware/include/hardware/camera3.h Section 10 for more details.

-
- android.request.pipelineMaxDepth - - byte - - [public] - - - [legacy] - - - - - -

Specifies the number of maximum pipeline stages a frame -has to go through from when it's exposed to when it's available -to the framework.

-
- -
Details
-

A typical minimum value for this is 2 (one stage to expose, -one stage to readout) from the sensor. The ISP then usually adds -its own stages to do custom HW processing. Further stages may be -added by SW processing.

-

Depending on what settings are used (e.g. YUV, JPEG) and what -processing is enabled (e.g. face detection), the actual pipeline -depth (specified by android.request.pipelineDepth) may be less than -the max pipeline depth.

-

A pipeline depth of X stages is equivalent to a pipeline latency of -X frame intervals.

-

This value will normally be 8 or less, however, for high speed capture session, -the max pipeline depth will be up to 8 x size of high speed capture request list.

-
HAL Implementation Details
-

This value should be 4 or less, expect for the high speed recording session, where the -max batch sizes may be larger than 1.

-
- android.request.partialResultCount - - int32 - - [public] - - - - - - - -

Defines how many sub-components -a result will be composed of.

-
- -

>= 1

-
Details
-

In order to combat the pipeline latency, partial results -may be delivered to the application layer from the camera device as -soon as they are available.

-

Optional; defaults to 1. A value of 1 means that partial -results are not supported, and only the final TotalCaptureResult will -be produced by the camera device.

-

A typical use case for this might be: after requesting an -auto-focus (AF) lock the new AF state might be available 50% -of the way through the pipeline. The camera device could -then immediately dispatch this state via a partial result to -the application, and the rest of the metadata via later -partial results.

-
- android.request.availableCapabilities - - byte - x - - - n - - [public] - - - [legacy] - - - - - - -

List of capabilities that this camera device -advertises as fully supporting.

-
- -
Details
-

A capability is a contract that the camera device makes in order -to be able to satisfy one or more use cases.

-

Listing a capability guarantees that the whole set of features -required to support a common use will all be available.

-

Using a subset of the functionality provided by an unsupported -capability may be possible on a specific camera device implementation; -to do this query each of android.request.availableRequestKeys, -android.request.availableResultKeys, -android.request.availableCharacteristicsKeys.

-

The following capabilities are guaranteed to be available on -android.info.supportedHardwareLevel == FULL devices:

-
    -
  • MANUAL_SENSOR
  • -
  • MANUAL_POST_PROCESSING
  • -
-

Other capabilities may be available on either FULL or LIMITED -devices, but the application should query this key to be sure.

-
HAL Implementation Details
-

Additional constraint details per-capability will be available -in the Compatibility Test Suite.

-

Minimum baseline requirements required for the -BACKWARD_COMPATIBLE capability are not explicitly listed. -Instead refer to "BC" tags and the camera CTS tests in the -android.hardware.camera2.cts package.

-

Listed controls that can be either request or result (e.g. -android.sensor.exposureTime) must be available both in the -request and the result in order to be considered to be -capability-compliant.

-

For example, if the HAL claims to support MANUAL control, -then exposure time must be configurable via the request and -the actual exposure applied must be available via -the result.

-

If MANUAL_SENSOR is omitted, the HAL may choose to omit the -android.scaler.availableMinFrameDurations static property entirely.

-

For PRIVATE_REPROCESSING and YUV_REPROCESSING capabilities, see -hardware/libhardware/include/hardware/camera3.h Section 10 for more information.

-

Devices that support the MANUAL_SENSOR capability must support the -CAMERA3_TEMPLATE_MANUAL template defined in camera3.h.

-

Devices that support the PRIVATE_REPROCESSING capability or the -YUV_REPROCESSING capability must support the -CAMERA3_TEMPLATE_ZERO_SHUTTER_LAG template defined in camera3.h.

-

For DEPTH_OUTPUT, the depth-format keys -android.depth.availableDepthStreamConfigurations, -android.depth.availableDepthMinFrameDurations, -android.depth.availableDepthStallDurations must be available, in -addition to the other keys explicitly mentioned in the DEPTH_OUTPUT -enum notes. The entry android.depth.maxDepthSamples must be available -if the DEPTH_POINT_CLOUD format is supported (HAL pixel format BLOB, dataspace -DEPTH).

-
- android.request.availableRequestKeys - - int32 - x - - - n - - [ndk_public] - - - [legacy] - - - - - -

A list of all keys that the camera device has available -to use with CaptureRequest.

-
- -
Details
-

Attempting to set a key into a CaptureRequest that is not -listed here will result in an invalid request and will be rejected -by the camera device.

-

This field can be used to query the feature set of a camera device -at a more granular level than capabilities. This is especially -important for optional keys that are not listed under any capability -in android.request.availableCapabilities.

-
HAL Implementation Details
-

Vendor tags must not be listed here. Use the vendor tag metadata -extensions C api instead (refer to camera3.h for more details).

-

Setting/getting vendor tags will be checked against the metadata -vendor extensions API and not against this field.

-

The HAL must not consume any request tags that are not listed either -here or in the vendor tag list.

-

The public camera2 API will always make the vendor tags visible -via -CameraCharacteristics#getAvailableCaptureRequestKeys.

-
- android.request.availableResultKeys - - int32 - x - - - n - - [ndk_public] - - - [legacy] - - - - - -

A list of all keys that the camera device has available -to use with CaptureResult.

-
- -
Details
-

Attempting to get a key from a CaptureResult that is not -listed here will always return a null value. Getting a key from -a CaptureResult that is listed here will generally never return a null -value.

-

The following keys may return null unless they are enabled:

- -

(Those sometimes-null keys will nevertheless be listed here -if they are available.)

-

This field can be used to query the feature set of a camera device -at a more granular level than capabilities. This is especially -important for optional keys that are not listed under any capability -in android.request.availableCapabilities.

-
HAL Implementation Details
-

Tags listed here must always have an entry in the result metadata, -even if that size is 0 elements. Only array-type tags (e.g. lists, -matrices, strings) are allowed to have 0 elements.

-

Vendor tags must not be listed here. Use the vendor tag metadata -extensions C api instead (refer to camera3.h for more details).

-

Setting/getting vendor tags will be checked against the metadata -vendor extensions API and not against this field.

-

The HAL must not produce any result tags that are not listed either -here or in the vendor tag list.

-

The public camera2 API will always make the vendor tags visible via CameraCharacteristics#getAvailableCaptureResultKeys.

-
- android.request.availableCharacteristicsKeys - - int32 - x - - - n - - [ndk_public] - - - [legacy] - - - - - -

A list of all keys that the camera device has available -to use with CameraCharacteristics.

-
- -
Details
-

This entry follows the same rules as -android.request.availableResultKeys (except that it applies for -CameraCharacteristics instead of CaptureResult). See above for more -details.

-
HAL Implementation Details
-

Keys listed here must always have an entry in the static info metadata, -even if that size is 0 elements. Only array-type tags (e.g. lists, -matrices, strings) are allowed to have 0 elements.

-

Vendor tags must not be listed here. Use the vendor tag metadata -extensions C api instead (refer to camera3.h for more details).

-

Setting/getting vendor tags will be checked against the metadata -vendor extensions API and not against this field.

-

The HAL must not have any tags in its static info that are not listed -either here or in the vendor tag list.

-

The public camera2 API will always make the vendor tags visible -via CameraCharacteristics#getKeys.

-
dynamic
Property NameTypeDescriptionUnitsRangeTags
- android.request.frameCount - - int32 - - [hidden] - - - - [deprecated] - - - - -

A frame counter set by the framework. This value monotonically -increases with every new result (that is, each new result has a unique -frameCount value).

-
- count of frames - -

Deprecated. Do not use.

-

> 0

-
Details
-

Reset on release()

-
- android.request.id - - int32 - - [hidden] - - - - - - - -

An application-specified ID for the current -request. Must be maintained unchanged in output -frame

-
- arbitrary integer assigned by application - -

Any int

-
- android.request.metadataMode - - byte - - [system] - - - - - -
    -
  • - NONE -

    No metadata should be produced on output, except -for application-bound buffer data. If no -application-bound streams exist, no frame should be -placed in the output frame queue. If such streams -exist, a frame should be placed on the output queue -with null metadata but with the necessary output buffer -information. Timestamp information should still be -included with any output stream buffers

    -
  • -
  • - FULL -

    All metadata should be produced. Statistics will -only be produced if they are separately -enabled

    -
  • -
- -
-

How much metadata to produce on -output

-
- -
- android.request.outputStreams - - int32 - x - - - n - - [system] - - - - [deprecated] - - - - -

Lists which camera output streams image data -from this capture must be sent to

-
- List of camera stream IDs - -

Deprecated. Do not use.

-

List must only include streams that have been -created

-
Details
-

If no output streams are listed, then the image -data should simply be discarded. The image data must -still be captured for metadata and statistics production, -and the lens and flash must operate as requested.

-
- android.request.pipelineDepth - - byte - - [public] - - - [legacy] - - - - - -

Specifies the number of pipeline stages the frame went -through from when it was exposed to when the final completed result -was available to the framework.

-
- -

<= android.request.pipelineMaxDepth

-
Details
-

Depending on what settings are used in the request, and -what streams are configured, the data may undergo less processing, -and some pipeline stages skipped.

-

See android.request.pipelineMaxDepth for more details.

-
HAL Implementation Details
-

This value must always represent the accurate count of how many -pipeline stages were actually used.

-
scaler
controls
Property NameTypeDescriptionUnitsRangeTags
- android.scaler.cropRegion - - int32 - x - - - 4 - - [public as rectangle] - - - [legacy] - - - - - -

The desired region of the sensor to read out for this capture.

-
- Pixel coordinates relative to - android.sensor.info.activeArraySize - -
Details
-

This control can be used to implement digital zoom.

-

The crop region coordinate system is based off -android.sensor.info.activeArraySize, with (0, 0) being the -top-left corner of the sensor active array.

-

Output streams use this rectangle to produce their output, -cropping to a smaller region if necessary to maintain the -stream's aspect ratio, then scaling the sensor input to -match the output's configured resolution.

-

The crop region is applied after the RAW to other color -space (e.g. YUV) conversion. Since raw streams -(e.g. RAW16) don't have the conversion stage, they are not -croppable. The crop region will be ignored by raw streams.

-

For non-raw streams, any additional per-stream cropping will -be done to maximize the final pixel area of the stream.

-

For example, if the crop region is set to a 4:3 aspect -ratio, then 4:3 streams will use the exact crop -region. 16:9 streams will further crop vertically -(letterbox).

-

Conversely, if the crop region is set to a 16:9, then 4:3 -outputs will crop horizontally (pillarbox), and 16:9 -streams will match exactly. These additional crops will -be centered within the crop region.

-

The width and height of the crop region cannot -be set to be smaller than -floor( activeArraySize.width / android.scaler.availableMaxDigitalZoom ) and -floor( activeArraySize.height / android.scaler.availableMaxDigitalZoom ), respectively.

-

The camera device may adjust the crop region to account -for rounding and other hardware requirements; the final -crop region used will be included in the output capture -result.

-
HAL Implementation Details
-

The output streams must maintain square pixels at all -times, no matter what the relative aspect ratios of the -crop region and the stream are. Negative values for -corner are allowed for raw output if full pixel array is -larger than active pixel array. Width and height may be -rounded to nearest larger supportable width, especially -for raw output, where only a few fixed scales may be -possible.

-

For a set of output streams configured, if the sensor output is cropped to a smaller -size than active array size, the HAL need follow below cropping rules:

-
    -
  • -

    The HAL need handle the cropRegion as if the sensor crop size is the effective active -array size.More specifically, the HAL must transform the request cropRegion from -android.sensor.info.activeArraySize to the sensor cropped pixel area size in this way:

    -
      -
    1. Translate the requested cropRegion w.r.t., the left top corner of the sensor -cropped pixel area by (tx, ty), -where tx = sensorCrop.top * (sensorCrop.height / activeArraySize.height) -and tx = sensorCrop.left * (sensorCrop.width / activeArraySize.width). The -(sensorCrop.top, sensorCrop.left) is the coordinate based off the -android.sensor.info.activeArraySize.
    2. -
    3. Scale the width and height of requested cropRegion with scaling factor of -sensorCrop.width/activeArraySize.width and sensorCrop.height/activeArraySize.height -respectively. -Once this new cropRegion is calculated, the HAL must use this region to crop the image -with regard to the sensor crop size (effective active array size). The HAL still need -follow the general cropping rule for this new cropRegion and effective active -array size.
    4. -
    -
  • -
  • -

    The HAL must report the cropRegion with regard to android.sensor.info.activeArraySize. -The HAL need convert the new cropRegion generated above w.r.t., full active array size. -The reported cropRegion may be slightly different with the requested cropRegion since -the HAL may adjust the crop region to account for rounding, conversion error, or other -hardware limitations.

    -
  • -
-

HAL2.x uses only (x, y, width)

-
static
Property NameTypeDescriptionUnitsRangeTags
- android.scaler.availableFormats - - int32 - x - - - n - - [hidden as imageFormat] - - - - [deprecated] - - -
    -
  • - RAW16 - [optional] - 0x20 -

    RAW16 is a standard, cross-platform format for raw image -buffers with 16-bit pixels.

    -

    Buffers of this format are typically expected to have a -Bayer Color Filter Array (CFA) layout, which is given in -android.sensor.info.colorFilterArrangement. Sensors with -CFAs that are not representable by a format in -android.sensor.info.colorFilterArrangement should not -use this format.

    -

    Buffers of this format will also follow the constraints given for -RAW_OPAQUE buffers, but with relaxed performance constraints.

    -

    This format is intended to give users access to the full contents -of the buffers coming directly from the image sensor prior to any -cropping or scaling operations, and all coordinate systems for -metadata used for this format are relative to the size of the -active region of the image sensor before any geometric distortion -correction has been applied (i.e. -android.sensor.info.preCorrectionActiveArraySize). Supported -dimensions for this format are limited to the full dimensions of -the sensor (e.g. either android.sensor.info.pixelArraySize or -android.sensor.info.preCorrectionActiveArraySize will be the -only supported output size).

    -

    See android.scaler.availableInputOutputFormatsMap for -the full set of performance guarantees.

    -
  • -
  • - RAW_OPAQUE - [optional] - 0x24 -

    RAW_OPAQUE (or -RAW_PRIVATE -as referred in public API) is a format for raw image buffers -coming from an image sensor.

    -

    The actual structure of buffers of this format is -platform-specific, but must follow several constraints:

    -
      -
    1. No image post-processing operations may have been applied to -buffers of this type. These buffers contain raw image data coming -directly from the image sensor.
    2. -
    3. If a buffer of this format is passed to the camera device for -reprocessing, the resulting images will be identical to the images -produced if the buffer had come directly from the sensor and was -processed with the same settings.
    4. -
    -

    The intended use for this format is to allow access to the native -raw format buffers coming directly from the camera sensor without -any additional conversions or decrease in framerate.

    -

    See android.scaler.availableInputOutputFormatsMap for the full set of -performance guarantees.

    -
  • -
  • - YV12 - [optional] - 0x32315659 -

    YCrCb 4:2:0 Planar

    -
  • -
  • - YCrCb_420_SP - [optional] - 0x11 -

    NV21

    -
  • -
  • - IMPLEMENTATION_DEFINED - 0x22 -

    System internal format, not application-accessible

    -
  • -
  • - YCbCr_420_888 - 0x23 -

    Flexible YUV420 Format

    -
  • -
  • - BLOB - 0x21 -

    JPEG format

    -
  • -
- -
-

The list of image formats that are supported by this -camera device for output streams.

-
- -

Deprecated. Do not use.

-
Details
-

All camera devices will support JPEG and YUV_420_888 formats.

-

When set to YUV_420_888, application can access the YUV420 data directly.

-
HAL Implementation Details
-

These format values are from HAL_PIXEL_FORMAT_* in -system/core/include/system/graphics.h.

-

When IMPLEMENTATION_DEFINED is used, the platform -gralloc module will select a format based on the usage flags provided -by the camera HAL device and the other endpoint of the stream. It is -usually used by preview and recording streams, where the application doesn't -need access the image data.

-

YCbCr_420_888 format must be supported by the HAL. When an image stream -needs CPU/application direct access, this format will be used.

-

The BLOB format must be supported by the HAL. This is used for the JPEG stream.

-

A RAW_OPAQUE buffer should contain only pixel data. It is strongly -recommended that any information used by the camera device when -processing images is fully expressed by the result metadata -for that image buffer.

-
- android.scaler.availableJpegMinDurations - - int64 - x - - - n - - [hidden] - - - - [deprecated] - - - - -

The minimum frame duration that is supported -for each resolution in android.scaler.availableJpegSizes.

-
- Nanoseconds - -

Deprecated. Do not use.

-

TODO: Remove property.

-
Details
-

This corresponds to the minimum steady-state frame duration when only -that JPEG stream is active and captured in a burst, with all -processing (typically in android.*.mode) set to FAST.

-

When multiple streams are configured, the minimum -frame duration will be >= max(individual stream min -durations)

-
- android.scaler.availableJpegSizes - - int32 - x - - - n x 2 - - [hidden as size] - - - - [deprecated] - - - - -

The JPEG resolutions that are supported by this camera device.

-
- -

Deprecated. Do not use.

-

TODO: Remove property.

-
Details
-

The resolutions are listed as (width, height) pairs. All camera devices will support -sensor maximum resolution (defined by android.sensor.info.activeArraySize).

-
HAL Implementation Details
-

The HAL must include sensor maximum resolution -(defined by android.sensor.info.activeArraySize), -and should include half/quarter of sensor maximum resolution.

-
- android.scaler.availableMaxDigitalZoom - - float - - [public] - - - [legacy] - - - - - -

The maximum ratio between both active area width -and crop region width, and active area height and -crop region height, for android.scaler.cropRegion.

-
- Zoom scale factor - -

>=1

-
Details
-

This represents the maximum amount of zooming possible by -the camera device, or equivalently, the minimum cropping -window size.

-

Crop regions that have a width or height that is smaller -than this ratio allows will be rounded up to the minimum -allowed size by the camera device.

-
- android.scaler.availableProcessedMinDurations - - int64 - x - - - n - - [hidden] - - - - [deprecated] - - - - -

For each available processed output size (defined in -android.scaler.availableProcessedSizes), this property lists the -minimum supportable frame duration for that size.

-
- Nanoseconds - -

Deprecated. Do not use.

-
Details
-

This should correspond to the frame duration when only that processed -stream is active, with all processing (typically in android.*.mode) -set to FAST.

-

When multiple streams are configured, the minimum frame duration will -be >= max(individual stream min durations).

-
- android.scaler.availableProcessedSizes - - int32 - x - - - n x 2 - - [hidden as size] - - - - [deprecated] - - - - -

The resolutions available for use with -processed output streams, such as YV12, NV12, and -platform opaque YUV/RGB streams to the GPU or video -encoders.

-
- -

Deprecated. Do not use.

-
Details
-

The resolutions are listed as (width, height) pairs.

-

For a given use case, the actual maximum supported resolution -may be lower than what is listed here, depending on the destination -Surface for the image data. For example, for recording video, -the video encoder chosen may have a maximum size limit (e.g. 1080p) -smaller than what the camera (e.g. maximum resolution is 3264x2448) -can provide.

-

Please reference the documentation for the image data destination to -check if it limits the maximum size for image data.

-
HAL Implementation Details
-

For FULL capability devices (android.info.supportedHardwareLevel == FULL), -the HAL must include all JPEG sizes listed in android.scaler.availableJpegSizes -and each below resolution if it is smaller than or equal to the sensor -maximum resolution (if they are not listed in JPEG sizes already):

-
    -
  • 240p (320 x 240)
  • -
  • 480p (640 x 480)
  • -
  • 720p (1280 x 720)
  • -
  • 1080p (1920 x 1080)
  • -
-

For LIMITED capability devices (android.info.supportedHardwareLevel == LIMITED), -the HAL only has to list up to the maximum video size supported by the devices.

-
- android.scaler.availableRawMinDurations - - int64 - x - - - n - - [system] - - - - [deprecated] - - - - -

For each available raw output size (defined in -android.scaler.availableRawSizes), this property lists the minimum -supportable frame duration for that size.

-
- Nanoseconds - -

Deprecated. Do not use.

-
Details
-

Should correspond to the frame duration when only the raw stream is -active.

-

When multiple streams are configured, the minimum -frame duration will be >= max(individual stream min -durations)

-
- android.scaler.availableRawSizes - - int32 - x - - - n x 2 - - [system as size] - - - - [deprecated] - - - - -

The resolutions available for use with raw -sensor output streams, listed as width, -height

-
- -

Deprecated. Do not use.

-
- android.scaler.availableInputOutputFormatsMap - - int32 - - [hidden as reprocessFormatsMap] - - - - - - - -

The mapping of image formats that are supported by this -camera device for input streams, to their corresponding output formats.

-
- -
Details
-

All camera devices with at least 1 -android.request.maxNumInputStreams will have at least one -available input format.

-

The camera device will support the following map of formats, -if its dependent capability (android.request.availableCapabilities) is supported:

- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
Input FormatOutput FormatCapability
ImageFormat#PRIVATEImageFormat#JPEGPRIVATE_REPROCESSING
ImageFormat#PRIVATEImageFormat#YUV_420_888PRIVATE_REPROCESSING
ImageFormat#YUV_420_888ImageFormat#JPEGYUV_REPROCESSING
ImageFormat#YUV_420_888ImageFormat#YUV_420_888YUV_REPROCESSING
-

PRIVATE refers to a device-internal format that is not directly application-visible. A -PRIVATE input surface can be acquired by ImageReader#newInstance -with ImageFormat#PRIVATE as the format.

-

For a PRIVATE_REPROCESSING-capable camera device, using the PRIVATE format as either input -or output will never hurt maximum frame rate (i.e. getOutputStallDuration(ImageFormat.PRIVATE, size) is always 0),

-

Attempting to configure an input stream with output streams not -listed as available in this map is not valid.

-
HAL Implementation Details
-

For the formats, see system/core/include/system/graphics.h for a definition -of the image format enumerations. The PRIVATE format refers to the -HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED format. The HAL could determine -the actual format by using the gralloc usage flags. -For ZSL use case in particular, the HAL could choose appropriate format (partially -processed YUV or RAW based format) by checking the format and GRALLOC_USAGE_HW_CAMERA_ZSL. -See camera3.h for more details.

-

This value is encoded as a variable-size array-of-arrays. -The inner array always contains [format, length, ...] where -... has length elements. An inner array is followed by another -inner array if the total metadata entry size hasn't yet been exceeded.

-

A code sample to read/write this encoding (with a device that -supports reprocessing IMPLEMENTATION_DEFINED to YUV_420_888, and JPEG, -and reprocessing YUV_420_888 to YUV_420_888 and JPEG):

-
// reading
-int32_t* contents = &entry.i32[0];
-for (size_t i = 0; i < entry.count; ) {
-    int32_t format = contents[i++];
-    int32_t length = contents[i++];
-    int32_t output_formats[length];
-    memcpy(&output_formats[0], &contents[i],
-           length * sizeof(int32_t));
-    i += length;
-}
-
-// writing (static example, PRIVATE_REPROCESSING + YUV_REPROCESSING)
-int32_t[] contents = {
-  IMPLEMENTATION_DEFINED, 2, YUV_420_888, BLOB,
-  YUV_420_888, 2, YUV_420_888, BLOB,
-};
-update_camera_metadata_entry(metadata, index, &contents[0],
-      sizeof(contents)/sizeof(contents[0]), &updated_entry);
-
-

If the HAL claims to support any of the capabilities listed in the -above details, then it must also support all the input-output -combinations listed for that capability. It can optionally support -additional formats if it so chooses.

-
- android.scaler.availableStreamConfigurations - - int32 - x - - - n x 4 - - [ndk_public as streamConfiguration] - - - [legacy] - - - -
    -
  • - OUTPUT -
  • -
  • - INPUT -
  • -
- -
-

The available stream configurations that this -camera device supports -(i.e. format, width, height, output/input stream).

-
- -
Details
-

The configurations are listed as (format, width, height, input?) -tuples.

-

For a given use case, the actual maximum supported resolution -may be lower than what is listed here, depending on the destination -Surface for the image data. For example, for recording video, -the video encoder chosen may have a maximum size limit (e.g. 1080p) -smaller than what the camera (e.g. maximum resolution is 3264x2448) -can provide.

-

Please reference the documentation for the image data destination to -check if it limits the maximum size for image data.

-

Not all output formats may be supported in a configuration with -an input stream of a particular format. For more details, see -android.scaler.availableInputOutputFormatsMap.

-

The following table describes the minimum required output stream -configurations based on the hardware level -(android.info.supportedHardwareLevel):

- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
FormatSizeHardware LevelNotes
JPEGandroid.sensor.info.activeArraySizeAny
JPEG1920x1080 (1080p)Anyif 1080p <= activeArraySize
JPEG1280x720 (720)Anyif 720p <= activeArraySize
JPEG640x480 (480p)Anyif 480p <= activeArraySize
JPEG320x240 (240p)Anyif 240p <= activeArraySize
YUV_420_888all output sizes available for JPEGFULL
YUV_420_888all output sizes available for JPEG, up to the maximum video sizeLIMITED
IMPLEMENTATION_DEFINEDsame as YUV_420_888Any
-

Refer to android.request.availableCapabilities for additional -mandatory stream configurations on a per-capability basis.

-
HAL Implementation Details
-

It is recommended (but not mandatory) to also include half/quarter -of sensor maximum resolution for JPEG formats (regardless of hardware -level).

-

(The following is a rewording of the above required table):

-

For JPEG format, the sizes may be restricted by below conditions:

-
    -
  • The HAL may choose the aspect ratio of each Jpeg size to be one of well known ones -(e.g. 4:3, 16:9, 3:2 etc.). If the sensor maximum resolution -(defined by android.sensor.info.activeArraySize) has an aspect ratio other than these, -it does not have to be included in the supported JPEG sizes.
  • -
  • Some hardware JPEG encoders may have pixel boundary alignment requirements, such as -the dimensions being a multiple of 16.
  • -
-

Therefore, the maximum JPEG size may be smaller than sensor maximum resolution. -However, the largest JPEG size must be as close as possible to the sensor maximum -resolution given above constraints. It is required that after aspect ratio adjustments, -additional size reduction due to other issues must be less than 3% in area. For example, -if the sensor maximum resolution is 3280x2464, if the maximum JPEG size has aspect -ratio 4:3, the JPEG encoder alignment requirement is 16, the maximum JPEG size will be -3264x2448.

-

For FULL capability devices (android.info.supportedHardwareLevel == FULL), -the HAL must include all YUV_420_888 sizes that have JPEG sizes listed -here as output streams.

-

It must also include each below resolution if it is smaller than or -equal to the sensor maximum resolution (for both YUV_420_888 and JPEG -formats), as output streams:

-
    -
  • 240p (320 x 240)
  • -
  • 480p (640 x 480)
  • -
  • 720p (1280 x 720)
  • -
  • 1080p (1920 x 1080)
  • -
-

For LIMITED capability devices -(android.info.supportedHardwareLevel == LIMITED), -the HAL only has to list up to the maximum video size -supported by the device.

-

Regardless of hardware level, every output resolution available for -YUV_420_888 must also be available for IMPLEMENTATION_DEFINED.

-

This supercedes the following fields, which are now deprecated:

-
    -
  • availableFormats
  • -
  • available[Processed,Raw,Jpeg]Sizes
  • -
-
- android.scaler.availableMinFrameDurations - - int64 - x - - - 4 x n - - [ndk_public as streamConfigurationDuration] - - - [legacy] - - - - - -

This lists the minimum frame duration for each -format/size combination.

-
- (format, width, height, ns) x n - -
Details
-

This should correspond to the frame duration when only that -stream is active, with all processing (typically in android.*.mode) -set to either OFF or FAST.

-

When multiple streams are used in a request, the minimum frame -duration will be max(individual stream min durations).

-

The minimum frame duration of a stream (of a particular format, size) -is the same regardless of whether the stream is input or output.

-

See android.sensor.frameDuration and -android.scaler.availableStallDurations for more details about -calculating the max frame rate.

-

(Keep in sync with -StreamConfigurationMap#getOutputMinFrameDuration)

-
- android.scaler.availableStallDurations - - int64 - x - - - 4 x n - - [ndk_public as streamConfigurationDuration] - - - [legacy] - - - - - -

This lists the maximum stall duration for each -output format/size combination.

-
- (format, width, height, ns) x n - -
Details
-

A stall duration is how much extra time would get added -to the normal minimum frame duration for a repeating request -that has streams with non-zero stall.

-

For example, consider JPEG captures which have the following -characteristics:

-
    -
  • JPEG streams act like processed YUV streams in requests for which -they are not included; in requests in which they are directly -referenced, they act as JPEG streams. This is because supporting a -JPEG stream requires the underlying YUV data to always be ready for -use by a JPEG encoder, but the encoder will only be used (and impact -frame duration) on requests that actually reference a JPEG stream.
  • -
  • The JPEG processor can run concurrently to the rest of the camera -pipeline, but cannot process more than 1 capture at a time.
  • -
-

In other words, using a repeating YUV request would result -in a steady frame rate (let's say it's 30 FPS). If a single -JPEG request is submitted periodically, the frame rate will stay -at 30 FPS (as long as we wait for the previous JPEG to return each -time). If we try to submit a repeating YUV + JPEG request, then -the frame rate will drop from 30 FPS.

-

In general, submitting a new request with a non-0 stall time -stream will not cause a frame rate drop unless there are still -outstanding buffers for that stream from previous requests.

-

Submitting a repeating request with streams (call this S) -is the same as setting the minimum frame duration from -the normal minimum frame duration corresponding to S, added with -the maximum stall duration for S.

-

If interleaving requests with and without a stall duration, -a request will stall by the maximum of the remaining times -for each can-stall stream with outstanding buffers.

-

This means that a stalling request will not have an exposure start -until the stall has completed.

-

This should correspond to the stall duration when only that stream is -active, with all processing (typically in android.*.mode) set to FAST -or OFF. Setting any of the processing modes to HIGH_QUALITY -effectively results in an indeterminate stall duration for all -streams in a request (the regular stall calculation rules are -ignored).

-

The following formats may always have a stall duration:

- -

The following formats will never have a stall duration:

- -

All other formats may or may not have an allowed stall duration on -a per-capability basis; refer to android.request.availableCapabilities -for more details.

-

See android.sensor.frameDuration for more information about -calculating the max frame rate (absent stalls).

-

(Keep up to date with -StreamConfigurationMap#getOutputStallDuration )

-
HAL Implementation Details
-

If possible, it is recommended that all non-JPEG formats -(such as RAW16) should not have a stall duration. RAW10, RAW12, RAW_OPAQUE -and IMPLEMENTATION_DEFINED must not have stall durations.

-
- android.scaler.streamConfigurationMap - - int32 - - [java_public as streamConfigurationMap] - - [synthetic] - - [legacy] - - - - - -

The available stream configurations that this -camera device supports; also includes the minimum frame durations -and the stall durations for each format/size combination.

-
- -
Details
-

All camera devices will support sensor maximum resolution (defined by -android.sensor.info.activeArraySize) for the JPEG format.

-

For a given use case, the actual maximum supported resolution -may be lower than what is listed here, depending on the destination -Surface for the image data. For example, for recording video, -the video encoder chosen may have a maximum size limit (e.g. 1080p) -smaller than what the camera (e.g. maximum resolution is 3264x2448) -can provide.

-

Please reference the documentation for the image data destination to -check if it limits the maximum size for image data.

-

The following table describes the minimum required output stream -configurations based on the hardware level -(android.info.supportedHardwareLevel):

- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
FormatSizeHardware LevelNotes
ImageFormat#JPEGandroid.sensor.info.activeArraySize (*1)Any
ImageFormat#JPEG1920x1080 (1080p)Anyif 1080p <= activeArraySize
ImageFormat#JPEG1280x720 (720p)Anyif 720p <= activeArraySize
ImageFormat#JPEG640x480 (480p)Anyif 480p <= activeArraySize
ImageFormat#JPEG320x240 (240p)Anyif 240p <= activeArraySize
ImageFormat#YUV_420_888all output sizes available for JPEGFULL
ImageFormat#YUV_420_888all output sizes available for JPEG, up to the maximum video sizeLIMITED
ImageFormat#PRIVATEsame as YUV_420_888Any
-

Refer to android.request.availableCapabilities and CameraDevice#createCaptureSession for additional mandatory -stream configurations on a per-capability basis.

-

*1: For JPEG format, the sizes may be restricted by below conditions:

-
    -
  • The HAL may choose the aspect ratio of each Jpeg size to be one of well known ones -(e.g. 4:3, 16:9, 3:2 etc.). If the sensor maximum resolution -(defined by android.sensor.info.activeArraySize) has an aspect ratio other than these, -it does not have to be included in the supported JPEG sizes.
  • -
  • Some hardware JPEG encoders may have pixel boundary alignment requirements, such as -the dimensions being a multiple of 16. -Therefore, the maximum JPEG size may be smaller than sensor maximum resolution. -However, the largest JPEG size will be as close as possible to the sensor maximum -resolution given above constraints. It is required that after aspect ratio adjustments, -additional size reduction due to other issues must be less than 3% in area. For example, -if the sensor maximum resolution is 3280x2464, if the maximum JPEG size has aspect -ratio 4:3, and the JPEG encoder alignment requirement is 16, the maximum JPEG size will be -3264x2448.
  • -
-
HAL Implementation Details
-

Do not set this property directly -(it is synthetic and will not be available at the HAL layer); -set the android.scaler.availableStreamConfigurations instead.

-

Not all output formats may be supported in a configuration with -an input stream of a particular format. For more details, see -android.scaler.availableInputOutputFormatsMap.

-

It is recommended (but not mandatory) to also include half/quarter -of sensor maximum resolution for JPEG formats (regardless of hardware -level).

-

(The following is a rewording of the above required table):

-

The HAL must include sensor maximum resolution (defined by -android.sensor.info.activeArraySize).

-

For FULL capability devices (android.info.supportedHardwareLevel == FULL), -the HAL must include all YUV_420_888 sizes that have JPEG sizes listed -here as output streams.

-

It must also include each below resolution if it is smaller than or -equal to the sensor maximum resolution (for both YUV_420_888 and JPEG -formats), as output streams:

-
    -
  • 240p (320 x 240)
  • -
  • 480p (640 x 480)
  • -
  • 720p (1280 x 720)
  • -
  • 1080p (1920 x 1080)
  • -
-

For LIMITED capability devices -(android.info.supportedHardwareLevel == LIMITED), -the HAL only has to list up to the maximum video size -supported by the device.

-

Regardless of hardware level, every output resolution available for -YUV_420_888 must also be available for IMPLEMENTATION_DEFINED.

-

This supercedes the following fields, which are now deprecated:

-
    -
  • availableFormats
  • -
  • available[Processed,Raw,Jpeg]Sizes
  • -
-
- android.scaler.croppingType - - byte - - [public] - - - [legacy] - - - -
    -
  • - CENTER_ONLY -

    The camera device only supports centered crop regions.

    -
  • -
  • - FREEFORM -

    The camera device supports arbitrarily chosen crop regions.

    -
  • -
- -
-

The crop type that this camera device supports.

-
- -
Details
-

When passing a non-centered crop region (android.scaler.cropRegion) to a camera -device that only supports CENTER_ONLY cropping, the camera device will move the -crop region to the center of the sensor active array (android.sensor.info.activeArraySize) -and keep the crop region width and height unchanged. The camera device will return the -final used crop region in metadata result android.scaler.cropRegion.

-

Camera devices that support FREEFORM cropping will support any crop region that -is inside of the active array. The camera device will apply the same crop region and -return the final used crop region in capture result metadata android.scaler.cropRegion.

-

LEGACY capability devices will only support CENTER_ONLY cropping.

-
dynamic
Property NameTypeDescriptionUnitsRangeTags
- android.scaler.cropRegion - - int32 - x - - - 4 - - [public as rectangle] - - - [legacy] - - - - - -

The desired region of the sensor to read out for this capture.

-
- Pixel coordinates relative to - android.sensor.info.activeArraySize - -
Details
-

This control can be used to implement digital zoom.

-

The crop region coordinate system is based off -android.sensor.info.activeArraySize, with (0, 0) being the -top-left corner of the sensor active array.

-

Output streams use this rectangle to produce their output, -cropping to a smaller region if necessary to maintain the -stream's aspect ratio, then scaling the sensor input to -match the output's configured resolution.

-

The crop region is applied after the RAW to other color -space (e.g. YUV) conversion. Since raw streams -(e.g. RAW16) don't have the conversion stage, they are not -croppable. The crop region will be ignored by raw streams.

-

For non-raw streams, any additional per-stream cropping will -be done to maximize the final pixel area of the stream.

-

For example, if the crop region is set to a 4:3 aspect -ratio, then 4:3 streams will use the exact crop -region. 16:9 streams will further crop vertically -(letterbox).

-

Conversely, if the crop region is set to a 16:9, then 4:3 -outputs will crop horizontally (pillarbox), and 16:9 -streams will match exactly. These additional crops will -be centered within the crop region.

-

The width and height of the crop region cannot -be set to be smaller than -floor( activeArraySize.width / android.scaler.availableMaxDigitalZoom ) and -floor( activeArraySize.height / android.scaler.availableMaxDigitalZoom ), respectively.

-

The camera device may adjust the crop region to account -for rounding and other hardware requirements; the final -crop region used will be included in the output capture -result.

-
HAL Implementation Details
-

The output streams must maintain square pixels at all -times, no matter what the relative aspect ratios of the -crop region and the stream are. Negative values for -corner are allowed for raw output if full pixel array is -larger than active pixel array. Width and height may be -rounded to nearest larger supportable width, especially -for raw output, where only a few fixed scales may be -possible.

-

For a set of output streams configured, if the sensor output is cropped to a smaller -size than active array size, the HAL need follow below cropping rules:

-
    -
  • -

    The HAL need handle the cropRegion as if the sensor crop size is the effective active -array size.More specifically, the HAL must transform the request cropRegion from -android.sensor.info.activeArraySize to the sensor cropped pixel area size in this way:

    -
      -
    1. Translate the requested cropRegion w.r.t., the left top corner of the sensor -cropped pixel area by (tx, ty), -where tx = sensorCrop.top * (sensorCrop.height / activeArraySize.height) -and tx = sensorCrop.left * (sensorCrop.width / activeArraySize.width). The -(sensorCrop.top, sensorCrop.left) is the coordinate based off the -android.sensor.info.activeArraySize.
    2. -
    3. Scale the width and height of requested cropRegion with scaling factor of -sensorCrop.width/activeArraySize.width and sensorCrop.height/activeArraySize.height -respectively. -Once this new cropRegion is calculated, the HAL must use this region to crop the image -with regard to the sensor crop size (effective active array size). The HAL still need -follow the general cropping rule for this new cropRegion and effective active -array size.
    4. -
    -
  • -
  • -

    The HAL must report the cropRegion with regard to android.sensor.info.activeArraySize. -The HAL need convert the new cropRegion generated above w.r.t., full active array size. -The reported cropRegion may be slightly different with the requested cropRegion since -the HAL may adjust the crop region to account for rounding, conversion error, or other -hardware limitations.

    -
  • -
-

HAL2.x uses only (x, y, width)

-
sensor
controls
Property NameTypeDescriptionUnitsRangeTags
- android.sensor.exposureTime - - int64 - - [public] - - - [full] - - - - - -

Duration each pixel is exposed to -light.

-
- Nanoseconds - -

android.sensor.info.exposureTimeRange

-
Details
-

If the sensor can't expose this exact duration, it will shorten the -duration exposed to the nearest possible value (rather than expose longer). -The final exposure time used will be available in the output capture result.

-

This control is only effective if android.control.aeMode or android.control.mode is set to -OFF; otherwise the auto-exposure algorithm will override this value.

-
- android.sensor.frameDuration - - int64 - - [public] - - - [full] - - - - - -

Duration from start of frame exposure to -start of next frame exposure.

-
- Nanoseconds - -

See android.sensor.info.maxFrameDuration, -android.scaler.streamConfigurationMap. The duration -is capped to max(duration, exposureTime + overhead).

-
Details
-

The maximum frame rate that can be supported by a camera subsystem is -a function of many factors:

-
    -
  • Requested resolutions of output image streams
  • -
  • Availability of binning / skipping modes on the imager
  • -
  • The bandwidth of the imager interface
  • -
  • The bandwidth of the various ISP processing blocks
  • -
-

Since these factors can vary greatly between different ISPs and -sensors, the camera abstraction tries to represent the bandwidth -restrictions with as simple a model as possible.

-

The model presented has the following characteristics:

-
    -
  • The image sensor is always configured to output the smallest -resolution possible given the application's requested output stream -sizes. The smallest resolution is defined as being at least as large -as the largest requested output stream size; the camera pipeline must -never digitally upsample sensor data when the crop region covers the -whole sensor. In general, this means that if only small output stream -resolutions are configured, the sensor can provide a higher frame -rate.
  • -
  • Since any request may use any or all the currently configured -output streams, the sensor and ISP must be configured to support -scaling a single capture to all the streams at the same time. This -means the camera pipeline must be ready to produce the largest -requested output size without any delay. Therefore, the overall -frame rate of a given configured stream set is governed only by the -largest requested stream resolution.
  • -
  • Using more than one output stream in a request does not affect the -frame duration.
  • -
  • Certain format-streams may need to do additional background processing -before data is consumed/produced by that stream. These processors -can run concurrently to the rest of the camera pipeline, but -cannot process more than 1 capture at a time.
  • -
-

The necessary information for the application, given the model above, -is provided via the android.scaler.streamConfigurationMap field using -StreamConfigurationMap#getOutputMinFrameDuration. -These are used to determine the maximum frame rate / minimum frame -duration that is possible for a given stream configuration.

-

Specifically, the application can use the following rules to -determine the minimum frame duration it can request from the camera -device:

-
    -
  1. Let the set of currently configured input/output streams -be called S.
  2. -
  3. Find the minimum frame durations for each stream in S, by looking -it up in android.scaler.streamConfigurationMap using StreamConfigurationMap#getOutputMinFrameDuration -(with its respective size/format). Let this set of frame durations be -called F.
  4. -
  5. For any given request R, the minimum frame duration allowed -for R is the maximum out of all values in F. Let the streams -used in R be called S_r.
  6. -
-

If none of the streams in S_r have a stall time (listed in StreamConfigurationMap#getOutputStallDuration -using its respective size/format), then the frame duration in F -determines the steady state frame rate that the application will get -if it uses R as a repeating request. Let this special kind of -request be called Rsimple.

-

A repeating request Rsimple can be occasionally interleaved -by a single capture of a new request Rstall (which has at least -one in-use stream with a non-0 stall time) and if Rstall has the -same minimum frame duration this will not cause a frame rate loss -if all buffers from the previous Rstall have already been -delivered.

-

For more details about stalling, see -StreamConfigurationMap#getOutputStallDuration.

-

This control is only effective if android.control.aeMode or android.control.mode is set to -OFF; otherwise the auto-exposure algorithm will override this value.

-
HAL Implementation Details
-

For more details about stalling, see -android.scaler.availableStallDurations.

-
- android.sensor.sensitivity - - int32 - - [public] - - - [full] - - - - - -

The amount of gain applied to sensor data -before processing.

-
- ISO arithmetic units - -

android.sensor.info.sensitivityRange

-
Details
-

The sensitivity is the standard ISO sensitivity value, -as defined in ISO 12232:2006.

-

The sensitivity must be within android.sensor.info.sensitivityRange, and -if if it less than android.sensor.maxAnalogSensitivity, the camera device -is guaranteed to use only analog amplification for applying the gain.

-

If the camera device cannot apply the exact sensitivity -requested, it will reduce the gain to the nearest supported -value. The final sensitivity used will be available in the -output capture result.

-

This control is only effective if android.control.aeMode or android.control.mode is set to -OFF; otherwise the auto-exposure algorithm will override this value.

-
HAL Implementation Details
-

ISO 12232:2006 REI method is acceptable.

-
- android.sensor.testPatternData - - int32 - x - - - 4 - - [public] - - - - - - - -

A pixel [R, G_even, G_odd, B] that supplies the test pattern -when android.sensor.testPatternMode is SOLID_COLOR.

-
- -
Details
-

Each color channel is treated as an unsigned 32-bit integer. -The camera device then uses the most significant X bits -that correspond to how many bits are in its Bayer raw sensor -output.

-

For example, a sensor with RAW10 Bayer output would use the -10 most significant bits from each color channel.

-
HAL Implementation Details
- -
- android.sensor.testPatternMode - - int32 - - [public] - - - - - -
    -
  • - OFF -

    No test pattern mode is used, and the camera -device returns captures from the image sensor.

    -

    This is the default if the key is not set.

    -
  • -
  • - SOLID_COLOR -

    Each pixel in [R, G_even, G_odd, B] is replaced by its -respective color channel provided in -android.sensor.testPatternData.

    -

    For example:

    -
    android.testPatternData = [0, 0xFFFFFFFF, 0xFFFFFFFF, 0]
    -
    -

    All green pixels are 100% green. All red/blue pixels are black.

    -
    android.testPatternData = [0xFFFFFFFF, 0, 0xFFFFFFFF, 0]
    -
    -

    All red pixels are 100% red. Only the odd green pixels -are 100% green. All blue pixels are 100% black.

    -
  • -
  • - COLOR_BARS -

    All pixel data is replaced with an 8-bar color pattern.

    -

    The vertical bars (left-to-right) are as follows:

    -
      -
    • 100% white
    • -
    • yellow
    • -
    • cyan
    • -
    • green
    • -
    • magenta
    • -
    • red
    • -
    • blue
    • -
    • black
    • -
    -

    In general the image would look like the following:

    -
    W Y C G M R B K
    -W Y C G M R B K
    -W Y C G M R B K
    -W Y C G M R B K
    -W Y C G M R B K
    -. . . . . . . .
    -. . . . . . . .
    -. . . . . . . .
    -
    -(B = Blue, K = Black)
    -
    -

    Each bar should take up 1/8 of the sensor pixel array width. -When this is not possible, the bar size should be rounded -down to the nearest integer and the pattern can repeat -on the right side.

    -

    Each bar's height must always take up the full sensor -pixel array height.

    -

    Each pixel in this test pattern must be set to either -0% intensity or 100% intensity.

    -
  • -
  • - COLOR_BARS_FADE_TO_GRAY -

    The test pattern is similar to COLOR_BARS, except that -each bar should start at its specified color at the top, -and fade to gray at the bottom.

    -

    Furthermore each bar is further subdivided into a left and -right half. The left half should have a smooth gradient, -and the right half should have a quantized gradient.

    -

    In particular, the right half's should consist of blocks of the -same color for 1/16th active sensor pixel array width.

    -

    The least significant bits in the quantized gradient should -be copied from the most significant bits of the smooth gradient.

    -

    The height of each bar should always be a multiple of 128. -When this is not the case, the pattern should repeat at the bottom -of the image.

    -
  • -
  • - PN9 -

    All pixel data is replaced by a pseudo-random sequence -generated from a PN9 512-bit sequence (typically implemented -in hardware with a linear feedback shift register).

    -

    The generator should be reset at the beginning of each frame, -and thus each subsequent raw frame with this test pattern should -be exactly the same as the last.

    -
  • -
  • - CUSTOM1 - 256 -

    The first custom test pattern. All custom patterns that are -available only on this camera device are at least this numeric -value.

    -

    All of the custom test patterns will be static -(that is the raw image must not vary from frame to frame).

    -
  • -
- -
-

When enabled, the sensor sends a test pattern instead of -doing a real exposure from the camera.

-
- -

android.sensor.availableTestPatternModes

-
Details
-

When a test pattern is enabled, all manual sensor controls specified -by android.sensor.* will be ignored. All other controls should -work as normal.

-

For example, if manual flash is enabled, flash firing should still -occur (and that the test pattern remain unmodified, since the flash -would not actually affect it).

-

Defaults to OFF.

-
HAL Implementation Details
-

All test patterns are specified in the Bayer domain.

-

The HAL may choose to substitute test patterns from the sensor -with test patterns from on-device memory. In that case, it should be -indistinguishable to the ISP whether the data came from the -sensor interconnect bus (such as CSI2) or memory.

-
static
Property NameTypeDescriptionUnitsRangeTags
- android.sensor.info.activeArraySize - - int32 - x - - - 4 - - [public as rectangle] - - - [legacy] - - -
Four ints defining the active pixel rectangle
- - -
-

The area of the image sensor which corresponds to active pixels after any geometric -distortion correction has been applied.

-
- Pixel coordinates on the image sensor - -
Details
-

This is the rectangle representing the size of the active region of the sensor (i.e. -the region that actually receives light from the scene) after any geometric correction -has been applied, and should be treated as the maximum size in pixels of any of the -image output formats aside from the raw formats.

-

This rectangle is defined relative to the full pixel array; (0,0) is the top-left of -the full pixel array, and the size of the full pixel array is given by -android.sensor.info.pixelArraySize.

-

The coordinate system for most other keys that list pixel coordinates, including -android.scaler.cropRegion, is defined relative to the active array rectangle given in -this field, with (0, 0) being the top-left of this rectangle.

-

The active array may be smaller than the full pixel array, since the full array may -include black calibration pixels or other inactive regions, and geometric correction -resulting in scaling or cropping may have been applied.

-
HAL Implementation Details
-

This array contains (xmin, ymin, width, height). The (xmin, ymin) must be ->= (0,0). -The (width, height) must be <= android.sensor.info.pixelArraySize.

-
- android.sensor.info.sensitivityRange - - int32 - x - - - 2 - - [public as rangeInt] - - - [full] - - -
Range of supported sensitivities
- - -
-

Range of sensitivities for android.sensor.sensitivity supported by this -camera device.

-
- -

Min <= 100, Max >= 800

-
Details
-

The values are the standard ISO sensitivity values, -as defined in ISO 12232:2006.

-
- android.sensor.info.colorFilterArrangement - - byte - - [public] - - - [full] - - - -
    -
  • - RGGB -
  • -
  • - GRBG -
  • -
  • - GBRG -
  • -
  • - BGGR -
  • -
  • - RGB -

    Sensor is not Bayer; output has 3 16-bit -values for each pixel, instead of just 1 16-bit value -per pixel.

    -
  • -
- -
-

The arrangement of color filters on sensor; -represents the colors in the top-left 2x2 section of -the sensor, in reading order.

-
- -
- android.sensor.info.exposureTimeRange - - int64 - x - - - 2 - - [public as rangeLong] - - - [full] - - -
nanoseconds
- - -
-

The range of image exposure times for android.sensor.exposureTime supported -by this camera device.

-
- Nanoseconds - -

The minimum exposure time will be less than 100 us. For FULL -capability devices (android.info.supportedHardwareLevel == FULL), -the maximum exposure time will be greater than 100ms.

-
HAL Implementation Details
-

For FULL capability devices (android.info.supportedHardwareLevel == FULL), -The maximum of the range SHOULD be at least 1 second (1e9), MUST be at least -100ms.

-
- android.sensor.info.maxFrameDuration - - int64 - - [public] - - - [full] - - - - - -

The maximum possible frame duration (minimum frame rate) for -android.sensor.frameDuration that is supported this camera device.

-
- Nanoseconds - -

For FULL capability devices -(android.info.supportedHardwareLevel == FULL), at least 100ms.

-
Details
-

Attempting to use frame durations beyond the maximum will result in the frame -duration being clipped to the maximum. See that control for a full definition of frame -durations.

-

Refer to StreamConfigurationMap#getOutputMinFrameDuration -for the minimum frame duration values.

-
HAL Implementation Details
-

For FULL capability devices (android.info.supportedHardwareLevel == FULL), -The maximum of the range SHOULD be at least -1 second (1e9), MUST be at least 100ms (100e6).

-

android.sensor.info.maxFrameDuration must be greater or -equal to the android.sensor.info.exposureTimeRange max -value (since exposure time overrides frame duration).

-

Available minimum frame durations for JPEG must be no greater -than that of the YUV_420_888/IMPLEMENTATION_DEFINED -minimum frame durations (for that respective size).

-

Since JPEG processing is considered offline and can take longer than -a single uncompressed capture, refer to -android.scaler.availableStallDurations -for details about encoding this scenario.

-
- android.sensor.info.physicalSize - - float - x - - - 2 - - [public as sizeF] - - - [legacy] - - -
width x height
- - -
-

The physical dimensions of the full pixel -array.

-
- Millimeters - -
Details
-

This is the physical size of the sensor pixel -array defined by android.sensor.info.pixelArraySize.

-
HAL Implementation Details
-

Needed for FOV calculation for old API

-
- android.sensor.info.pixelArraySize - - int32 - x - - - 2 - - [public as size] - - - [legacy] - - - - - -

Dimensions of the full pixel array, possibly -including black calibration pixels.

-
- Pixels - -
Details
-

The pixel count of the full pixel array of the image sensor, which covers -android.sensor.info.physicalSize area. This represents the full pixel dimensions of -the raw buffers produced by this sensor.

-

If a camera device supports raw sensor formats, either this or -android.sensor.info.preCorrectionActiveArraySize is the maximum dimensions for the raw -output formats listed in android.scaler.streamConfigurationMap (this depends on -whether or not the image sensor returns buffers containing pixels that are not -part of the active array region for blacklevel calibration or other purposes).

-

Some parts of the full pixel array may not receive light from the scene, -or be otherwise inactive. The android.sensor.info.preCorrectionActiveArraySize key -defines the rectangle of active pixels that will be included in processed image -formats.

-
- android.sensor.info.whiteLevel - - int32 - - [public] - - - - - - - -

Maximum raw value output by sensor.

-
- -

> 255 (8-bit output)

-
Details
-

This specifies the fully-saturated encoding level for the raw -sample values from the sensor. This is typically caused by the -sensor becoming highly non-linear or clipping. The minimum for -each channel is specified by the offset in the -android.sensor.blackLevelPattern key.

-

The white level is typically determined either by sensor bit depth -(8-14 bits is expected), or by the point where the sensor response -becomes too non-linear to be useful. The default value for this is -maximum representable value for a 16-bit raw sample (2^16 - 1).

-

The white level values of captured images may vary for different -capture settings (e.g., android.sensor.sensitivity). This key -represents a coarse approximation for such case. It is recommended -to use android.sensor.dynamicWhiteLevel for captures when supported -by the camera device, which provides more accurate white level values.

-
HAL Implementation Details
-

The full bit depth of the sensor must be available in the raw data, -so the value for linear sensors should not be significantly lower -than maximum raw value supported, i.e. 2^(sensor bits per pixel).

-
- android.sensor.info.timestampSource - - byte - - [public] - - - [legacy] - - - -
    -
  • - UNKNOWN -

    Timestamps from android.sensor.timestamp are in nanoseconds and monotonic, -but can not be compared to timestamps from other subsystems -(e.g. accelerometer, gyro etc.), or other instances of the same or different -camera devices in the same system. Timestamps between streams and results for -a single camera instance are comparable, and the timestamps for all buffers -and the result metadata generated by a single capture are identical.

    -
  • -
  • - REALTIME -

    Timestamps from android.sensor.timestamp are in the same timebase as -SystemClock#elapsedRealtimeNanos, -and they can be compared to other timestamps using that base.

    -
  • -
- -
-

The time base source for sensor capture start timestamps.

-
- -
Details
-

The timestamps provided for captures are always in nanoseconds and monotonic, but -may not based on a time source that can be compared to other system time sources.

-

This characteristic defines the source for the timestamps, and therefore whether they -can be compared against other system time sources/timestamps.

-
HAL Implementation Details
-

For camera devices implement UNKNOWN, the camera framework expects that the timestamp -source to be SYSTEM_TIME_MONOTONIC. For camera devices implement REALTIME, the camera -framework expects that the timestamp source to be SYSTEM_TIME_BOOTTIME. See -system/core/include/utils/Timers.h for the definition of SYSTEM_TIME_MONOTONIC and -SYSTEM_TIME_BOOTTIME. Note that HAL must follow above expectation; otherwise video -recording might suffer unexpected behavior.

-

Also, camera devices implements REALTIME must pass the ITS sensor fusion test which -tests the alignment between camera timestamps and gyro sensor timestamps.

-
- android.sensor.info.lensShadingApplied - - byte - - [public as boolean] - - - - - -
    -
  • - FALSE -
  • -
  • - TRUE -
  • -
- -
-

Whether the RAW images output from this camera device are subject to -lens shading correction.

-
- -
Details
-

If TRUE, all images produced by the camera device in the RAW image formats will -have lens shading correction already applied to it. If FALSE, the images will -not be adjusted for lens shading correction. -See android.request.maxNumOutputRaw for a list of RAW image formats.

-

This key will be null for all devices do not report this information. -Devices with RAW capability will always report this information in this key.

-
- android.sensor.info.preCorrectionActiveArraySize - - int32 - x - - - 4 - - [public as rectangle] - - - [legacy] - - -
Four ints defining the active pixel rectangle
- - -
-

The area of the image sensor which corresponds to active pixels prior to the -application of any geometric distortion correction.

-
- Pixel coordinates on the image sensor - -
Details
-

This is the rectangle representing the size of the active region of the sensor (i.e. -the region that actually receives light from the scene) before any geometric correction -has been applied, and should be treated as the active region rectangle for any of the -raw formats. All metadata associated with raw processing (e.g. the lens shading -correction map, and radial distortion fields) treats the top, left of this rectangle as -the origin, (0,0).

-

The size of this region determines the maximum field of view and the maximum number of -pixels that an image from this sensor can contain, prior to the application of -geometric distortion correction. The effective maximum pixel dimensions of a -post-distortion-corrected image is given by the android.sensor.info.activeArraySize -field, and the effective maximum field of view for a post-distortion-corrected image -can be calculated by applying the geometric distortion correction fields to this -rectangle, and cropping to the rectangle given in android.sensor.info.activeArraySize.

-

E.g. to calculate position of a pixel, (x,y), in a processed YUV output image with the -dimensions in android.sensor.info.activeArraySize given the position of a pixel, -(x', y'), in the raw pixel array with dimensions give in -android.sensor.info.pixelArraySize:

-
    -
  1. Choose a pixel (x', y') within the active array region of the raw buffer given in -android.sensor.info.preCorrectionActiveArraySize, otherwise this pixel is considered -to be outside of the FOV, and will not be shown in the processed output image.
  2. -
  3. Apply geometric distortion correction to get the post-distortion pixel coordinate, -(x_i, y_i). When applying geometric correction metadata, note that metadata for raw -buffers is defined relative to the top, left of the -android.sensor.info.preCorrectionActiveArraySize rectangle.
  4. -
  5. If the resulting corrected pixel coordinate is within the region given in -android.sensor.info.activeArraySize, then the position of this pixel in the -processed output image buffer is (x_i - activeArray.left, y_i - activeArray.top), -when the top, left coordinate of that buffer is treated as (0, 0).
  6. -
-

Thus, for pixel x',y' = (25, 25) on a sensor where android.sensor.info.pixelArraySize -is (100,100), android.sensor.info.preCorrectionActiveArraySize is (10, 10, 100, 100), -android.sensor.info.activeArraySize is (20, 20, 80, 80), and the geometric distortion -correction doesn't change the pixel coordinate, the resulting pixel selected in -pixel coordinates would be x,y = (25, 25) relative to the top,left of the raw buffer -with dimensions given in android.sensor.info.pixelArraySize, and would be (5, 5) -relative to the top,left of post-processed YUV output buffer with dimensions given in -android.sensor.info.activeArraySize.

-

The currently supported fields that correct for geometric distortion are:

-
    -
  1. android.lens.radialDistortion.
  2. -
-

If all of the geometric distortion fields are no-ops, this rectangle will be the same -as the post-distortion-corrected rectangle given in -android.sensor.info.activeArraySize.

-

This rectangle is defined relative to the full pixel array; (0,0) is the top-left of -the full pixel array, and the size of the full pixel array is given by -android.sensor.info.pixelArraySize.

-

The pre-correction active array may be smaller than the full pixel array, since the -full array may include black calibration pixels or other inactive regions.

-
HAL Implementation Details
-

This array contains (xmin, ymin, width, height). The (xmin, ymin) must be ->= (0,0). -The (width, height) must be <= android.sensor.info.pixelArraySize.

-

If omitted by the HAL implementation, the camera framework will assume that this is -the same as the post-correction active array region given in -android.sensor.info.activeArraySize.

-
- android.sensor.referenceIlluminant1 - - byte - - [public] - - - - - -
    -
  • - DAYLIGHT - 1 -
  • -
  • - FLUORESCENT - 2 -
  • -
  • - TUNGSTEN - 3 -

    Incandescent light

    -
  • -
  • - FLASH - 4 -
  • -
  • - FINE_WEATHER - 9 -
  • -
  • - CLOUDY_WEATHER - 10 -
  • -
  • - SHADE - 11 -
  • -
  • - DAYLIGHT_FLUORESCENT - 12 -

    D 5700 - 7100K

    -
  • -
  • - DAY_WHITE_FLUORESCENT - 13 -

    N 4600 - 5400K

    -
  • -
  • - COOL_WHITE_FLUORESCENT - 14 -

    W 3900 - 4500K

    -
  • -
  • - WHITE_FLUORESCENT - 15 -

    WW 3200 - 3700K

    -
  • -
  • - STANDARD_A - 17 -
  • -
  • - STANDARD_B - 18 -
  • -
  • - STANDARD_C - 19 -
  • -
  • - D55 - 20 -
  • -
  • - D65 - 21 -
  • -
  • - D75 - 22 -
  • -
  • - D50 - 23 -
  • -
  • - ISO_STUDIO_TUNGSTEN - 24 -
  • -
- -
-

The standard reference illuminant used as the scene light source when -calculating the android.sensor.colorTransform1, -android.sensor.calibrationTransform1, and -android.sensor.forwardMatrix1 matrices.

-
- -
Details
-

The values in this key correspond to the values defined for the -EXIF LightSource tag. These illuminants are standard light sources -that are often used calibrating camera devices.

-

If this key is present, then android.sensor.colorTransform1, -android.sensor.calibrationTransform1, and -android.sensor.forwardMatrix1 will also be present.

-

Some devices may choose to provide a second set of calibration -information for improved quality, including -android.sensor.referenceIlluminant2 and its corresponding matrices.

-
HAL Implementation Details
-

The first reference illuminant (android.sensor.referenceIlluminant1) -and corresponding matrices must be present to support the RAW capability -and DNG output.

-

When producing raw images with a color profile that has only been -calibrated against a single light source, it is valid to omit -android.sensor.referenceIlluminant2 along with the -android.sensor.colorTransform2, android.sensor.calibrationTransform2, -and android.sensor.forwardMatrix2 matrices.

-

If only android.sensor.referenceIlluminant1 is included, it should be -chosen so that it is representative of typical scene lighting. In -general, D50 or DAYLIGHT will be chosen for this case.

-

If both android.sensor.referenceIlluminant1 and -android.sensor.referenceIlluminant2 are included, they should be -chosen to represent the typical range of scene lighting conditions. -In general, low color temperature illuminant such as Standard-A will -be chosen for the first reference illuminant and a higher color -temperature illuminant such as D65 will be chosen for the second -reference illuminant.

-
- android.sensor.referenceIlluminant2 - - byte - - [public] - - - - - - - -

The standard reference illuminant used as the scene light source when -calculating the android.sensor.colorTransform2, -android.sensor.calibrationTransform2, and -android.sensor.forwardMatrix2 matrices.

-
- -

Any value listed in android.sensor.referenceIlluminant1

-
Details
-

See android.sensor.referenceIlluminant1 for more details.

-

If this key is present, then android.sensor.colorTransform2, -android.sensor.calibrationTransform2, and -android.sensor.forwardMatrix2 will also be present.

-
- android.sensor.calibrationTransform1 - - rational - x - - - 3 x 3 - - [public as colorSpaceTransform] - - - - -
3x3 matrix in row-major-order
- - -
-

A per-device calibration transform matrix that maps from the -reference sensor colorspace to the actual device sensor colorspace.

-
- -
Details
-

This matrix is used to correct for per-device variations in the -sensor colorspace, and is used for processing raw buffer data.

-

The matrix is expressed as a 3x3 matrix in row-major-order, and -contains a per-device calibration transform that maps colors -from reference sensor color space (i.e. the "golden module" -colorspace) into this camera device's native sensor color -space under the first reference illuminant -(android.sensor.referenceIlluminant1).

-
- android.sensor.calibrationTransform2 - - rational - x - - - 3 x 3 - - [public as colorSpaceTransform] - - - - -
3x3 matrix in row-major-order
- - -
-

A per-device calibration transform matrix that maps from the -reference sensor colorspace to the actual device sensor colorspace -(this is the colorspace of the raw buffer data).

-
- -
Details
-

This matrix is used to correct for per-device variations in the -sensor colorspace, and is used for processing raw buffer data.

-

The matrix is expressed as a 3x3 matrix in row-major-order, and -contains a per-device calibration transform that maps colors -from reference sensor color space (i.e. the "golden module" -colorspace) into this camera device's native sensor color -space under the second reference illuminant -(android.sensor.referenceIlluminant2).

-

This matrix will only be present if the second reference -illuminant is present.

-
- android.sensor.colorTransform1 - - rational - x - - - 3 x 3 - - [public as colorSpaceTransform] - - - - -
3x3 matrix in row-major-order
- - -
-

A matrix that transforms color values from CIE XYZ color space to -reference sensor color space.

-
- -
Details
-

This matrix is used to convert from the standard CIE XYZ color -space to the reference sensor colorspace, and is used when processing -raw buffer data.

-

The matrix is expressed as a 3x3 matrix in row-major-order, and -contains a color transform matrix that maps colors from the CIE -XYZ color space to the reference sensor color space (i.e. the -"golden module" colorspace) under the first reference illuminant -(android.sensor.referenceIlluminant1).

-

The white points chosen in both the reference sensor color space -and the CIE XYZ colorspace when calculating this transform will -match the standard white point for the first reference illuminant -(i.e. no chromatic adaptation will be applied by this transform).

-
- android.sensor.colorTransform2 - - rational - x - - - 3 x 3 - - [public as colorSpaceTransform] - - - - -
3x3 matrix in row-major-order
- - -
-

A matrix that transforms color values from CIE XYZ color space to -reference sensor color space.

-
- -
Details
-

This matrix is used to convert from the standard CIE XYZ color -space to the reference sensor colorspace, and is used when processing -raw buffer data.

-

The matrix is expressed as a 3x3 matrix in row-major-order, and -contains a color transform matrix that maps colors from the CIE -XYZ color space to the reference sensor color space (i.e. the -"golden module" colorspace) under the second reference illuminant -(android.sensor.referenceIlluminant2).

-

The white points chosen in both the reference sensor color space -and the CIE XYZ colorspace when calculating this transform will -match the standard white point for the second reference illuminant -(i.e. no chromatic adaptation will be applied by this transform).

-

This matrix will only be present if the second reference -illuminant is present.

-
- android.sensor.forwardMatrix1 - - rational - x - - - 3 x 3 - - [public as colorSpaceTransform] - - - - -
3x3 matrix in row-major-order
- - -
-

A matrix that transforms white balanced camera colors from the reference -sensor colorspace to the CIE XYZ colorspace with a D50 whitepoint.

-
- -
Details
-

This matrix is used to convert to the standard CIE XYZ colorspace, and -is used when processing raw buffer data.

-

This matrix is expressed as a 3x3 matrix in row-major-order, and contains -a color transform matrix that maps white balanced colors from the -reference sensor color space to the CIE XYZ color space with a D50 white -point.

-

Under the first reference illuminant (android.sensor.referenceIlluminant1) -this matrix is chosen so that the standard white point for this reference -illuminant in the reference sensor colorspace is mapped to D50 in the -CIE XYZ colorspace.

-
- android.sensor.forwardMatrix2 - - rational - x - - - 3 x 3 - - [public as colorSpaceTransform] - - - - -
3x3 matrix in row-major-order
- - -
-

A matrix that transforms white balanced camera colors from the reference -sensor colorspace to the CIE XYZ colorspace with a D50 whitepoint.

-
- -
Details
-

This matrix is used to convert to the standard CIE XYZ colorspace, and -is used when processing raw buffer data.

-

This matrix is expressed as a 3x3 matrix in row-major-order, and contains -a color transform matrix that maps white balanced colors from the -reference sensor color space to the CIE XYZ color space with a D50 white -point.

-

Under the second reference illuminant (android.sensor.referenceIlluminant2) -this matrix is chosen so that the standard white point for this reference -illuminant in the reference sensor colorspace is mapped to D50 in the -CIE XYZ colorspace.

-

This matrix will only be present if the second reference -illuminant is present.

-
- android.sensor.baseGainFactor - - rational - - [system] - - - - - - - -

Gain factor from electrons to raw units when -ISO=100

-
- -
- android.sensor.blackLevelPattern - - int32 - x - - - 4 - - [public as blackLevelPattern] - - - - -
2x2 raw count block
- - -
-

A fixed black level offset for each of the color filter arrangement -(CFA) mosaic channels.

-
- -

>= 0 for each.

-
Details
-

This key specifies the zero light value for each of the CFA mosaic -channels in the camera sensor. The maximal value output by the -sensor is represented by the value in android.sensor.info.whiteLevel.

-

The values are given in the same order as channels listed for the CFA -layout key (see android.sensor.info.colorFilterArrangement), i.e. the -nth value given corresponds to the black level offset for the nth -color channel listed in the CFA.

-

The black level values of captured images may vary for different -capture settings (e.g., android.sensor.sensitivity). This key -represents a coarse approximation for such case. It is recommended to -use android.sensor.dynamicBlackLevel or use pixels from -android.sensor.opticalBlackRegions directly for captures when -supported by the camera device, which provides more accurate black -level values. For raw capture in particular, it is recommended to use -pixels from android.sensor.opticalBlackRegions to calculate black -level values for each frame.

-
HAL Implementation Details
-

The values are given in row-column scan order, with the first value -corresponding to the element of the CFA in row=0, column=0.

-
- android.sensor.maxAnalogSensitivity - - int32 - - [public] - - - [full] - - - - - -

Maximum sensitivity that is implemented -purely through analog gain.

-
- -
Details
-

For android.sensor.sensitivity values less than or -equal to this, all applied gain must be analog. For -values above this, the gain applied can be a mix of analog and -digital.

-
- android.sensor.orientation - - int32 - - [public] - - - [legacy] - - - - - -

Clockwise angle through which the output image needs to be rotated to be -upright on the device screen in its native orientation.

-
- Degrees of clockwise rotation; always a multiple of - 90 - -

0, 90, 180, 270

-
Details
-

Also defines the direction of rolling shutter readout, which is from top to bottom in -the sensor's coordinate system.

-
- android.sensor.profileHueSatMapDimensions - - int32 - x - - - 3 - - [system] - - - - -
Number of samples for hue, saturation, and value
- - -
-

The number of input samples for each dimension of -android.sensor.profileHueSatMap.

-
- -

Hue >= 1, -Saturation >= 2, -Value >= 1

-
Details
-

The number of input samples for the hue, saturation, and value -dimension of android.sensor.profileHueSatMap. The order of the -dimensions given is hue, saturation, value; where hue is the 0th -element.

-
- android.sensor.availableTestPatternModes - - int32 - x - - - n - - [public] - - - - -
list of enums
- - -
-

List of sensor test pattern modes for android.sensor.testPatternMode -supported by this camera device.

-
- -

Any value listed in android.sensor.testPatternMode

-
Details
-

Defaults to OFF, and always includes OFF if defined.

-
HAL Implementation Details
-

All custom modes must be >= CUSTOM1.

-
- android.sensor.opticalBlackRegions - - int32 - x - - - 4 x num_regions - - [public as rectangle] - - - - - - - -

List of disjoint rectangles indicating the sensor -optically shielded black pixel regions.

-
- -
Details
-

In most camera sensors, the active array is surrounded by some -optically shielded pixel areas. By blocking light, these pixels -provides a reliable black reference for black level compensation -in active array region.

-

This key provides a list of disjoint rectangles specifying the -regions of optically shielded (with metal shield) black pixel -regions if the camera device is capable of reading out these black -pixels in the output raw images. In comparison to the fixed black -level values reported by android.sensor.blackLevelPattern, this key -may provide a more accurate way for the application to calculate -black level of each captured raw images.

-

When this key is reported, the android.sensor.dynamicBlackLevel and -android.sensor.dynamicWhiteLevel will also be reported.

-
HAL Implementation Details
-

This array contains (xmin, ymin, width, height). The (xmin, ymin) -must be >= (0,0) and <= -android.sensor.info.pixelArraySize. The (width, height) must be -<= android.sensor.info.pixelArraySize. Each region must be -outside the region reported by -android.sensor.info.preCorrectionActiveArraySize.

-

The HAL must report minimal number of disjoint regions for the -optically shielded back pixel regions. For example, if a region can -be covered by one rectangle, the HAL must not split this region into -multiple rectangles.

-
- android.sensor.opaqueRawSize - - int32 - x - - - n x 3 - - [system] - - - - - - - -

Size in bytes for all the listed opaque RAW buffer sizes

-
- -

Must be large enough to fit the opaque RAW of corresponding size produced by -the camera

-
Details
-

This configurations are listed as (width, height, size_in_bytes) tuples. -This is used for sizing the gralloc buffers for opaque RAW buffers. -All RAW_OPAQUE output stream configuration listed in -android.scaler.availableStreamConfigurations will have a corresponding tuple in -this key.

-
HAL Implementation Details
-

This key is added in HAL3.4. -For HAL3.4 or above: devices advertising RAW_OPAQUE format output must list this key. -For HAL3.3 or earlier devices: if RAW_OPAQUE ouput is advertised, camera framework -will derive this key by assuming each pixel takes two bytes and no padding bytes -between rows.

-
dynamic
Property NameTypeDescriptionUnitsRangeTags
- android.sensor.exposureTime - - int64 - - [public] - - - [full] - - - - - -

Duration each pixel is exposed to -light.

-
- Nanoseconds - -

android.sensor.info.exposureTimeRange

-
Details
-

If the sensor can't expose this exact duration, it will shorten the -duration exposed to the nearest possible value (rather than expose longer). -The final exposure time used will be available in the output capture result.

-

This control is only effective if android.control.aeMode or android.control.mode is set to -OFF; otherwise the auto-exposure algorithm will override this value.

-
- android.sensor.frameDuration - - int64 - - [public] - - - [full] - - - - - -

Duration from start of frame exposure to -start of next frame exposure.

-
- Nanoseconds - -

See android.sensor.info.maxFrameDuration, -android.scaler.streamConfigurationMap. The duration -is capped to max(duration, exposureTime + overhead).

-
Details
-

The maximum frame rate that can be supported by a camera subsystem is -a function of many factors:

-
    -
  • Requested resolutions of output image streams
  • -
  • Availability of binning / skipping modes on the imager
  • -
  • The bandwidth of the imager interface
  • -
  • The bandwidth of the various ISP processing blocks
  • -
-

Since these factors can vary greatly between different ISPs and -sensors, the camera abstraction tries to represent the bandwidth -restrictions with as simple a model as possible.

-

The model presented has the following characteristics:

-
    -
  • The image sensor is always configured to output the smallest -resolution possible given the application's requested output stream -sizes. The smallest resolution is defined as being at least as large -as the largest requested output stream size; the camera pipeline must -never digitally upsample sensor data when the crop region covers the -whole sensor. In general, this means that if only small output stream -resolutions are configured, the sensor can provide a higher frame -rate.
  • -
  • Since any request may use any or all the currently configured -output streams, the sensor and ISP must be configured to support -scaling a single capture to all the streams at the same time. This -means the camera pipeline must be ready to produce the largest -requested output size without any delay. Therefore, the overall -frame rate of a given configured stream set is governed only by the -largest requested stream resolution.
  • -
  • Using more than one output stream in a request does not affect the -frame duration.
  • -
  • Certain format-streams may need to do additional background processing -before data is consumed/produced by that stream. These processors -can run concurrently to the rest of the camera pipeline, but -cannot process more than 1 capture at a time.
  • -
-

The necessary information for the application, given the model above, -is provided via the android.scaler.streamConfigurationMap field using -StreamConfigurationMap#getOutputMinFrameDuration. -These are used to determine the maximum frame rate / minimum frame -duration that is possible for a given stream configuration.

-

Specifically, the application can use the following rules to -determine the minimum frame duration it can request from the camera -device:

-
    -
  1. Let the set of currently configured input/output streams -be called S.
  2. -
  3. Find the minimum frame durations for each stream in S, by looking -it up in android.scaler.streamConfigurationMap using StreamConfigurationMap#getOutputMinFrameDuration -(with its respective size/format). Let this set of frame durations be -called F.
  4. -
  5. For any given request R, the minimum frame duration allowed -for R is the maximum out of all values in F. Let the streams -used in R be called S_r.
  6. -
-

If none of the streams in S_r have a stall time (listed in StreamConfigurationMap#getOutputStallDuration -using its respective size/format), then the frame duration in F -determines the steady state frame rate that the application will get -if it uses R as a repeating request. Let this special kind of -request be called Rsimple.

-

A repeating request Rsimple can be occasionally interleaved -by a single capture of a new request Rstall (which has at least -one in-use stream with a non-0 stall time) and if Rstall has the -same minimum frame duration this will not cause a frame rate loss -if all buffers from the previous Rstall have already been -delivered.

-

For more details about stalling, see -StreamConfigurationMap#getOutputStallDuration.

-

This control is only effective if android.control.aeMode or android.control.mode is set to -OFF; otherwise the auto-exposure algorithm will override this value.

-
HAL Implementation Details
-

For more details about stalling, see -android.scaler.availableStallDurations.

-
- android.sensor.sensitivity - - int32 - - [public] - - - [full] - - - - - -

The amount of gain applied to sensor data -before processing.

-
- ISO arithmetic units - -

android.sensor.info.sensitivityRange

-
Details
-

The sensitivity is the standard ISO sensitivity value, -as defined in ISO 12232:2006.

-

The sensitivity must be within android.sensor.info.sensitivityRange, and -if if it less than android.sensor.maxAnalogSensitivity, the camera device -is guaranteed to use only analog amplification for applying the gain.

-

If the camera device cannot apply the exact sensitivity -requested, it will reduce the gain to the nearest supported -value. The final sensitivity used will be available in the -output capture result.

-

This control is only effective if android.control.aeMode or android.control.mode is set to -OFF; otherwise the auto-exposure algorithm will override this value.

-
HAL Implementation Details
-

ISO 12232:2006 REI method is acceptable.

-
- android.sensor.timestamp - - int64 - - [public] - - - [legacy] - - - - - -

Time at start of exposure of first -row of the image sensor active array, in nanoseconds.

-
- Nanoseconds - -

> 0

-
Details
-

The timestamps are also included in all image -buffers produced for the same capture, and will be identical -on all the outputs.

-

When android.sensor.info.timestampSource == UNKNOWN, -the timestamps measure time since an unspecified starting point, -and are monotonically increasing. They can be compared with the -timestamps for other captures from the same camera device, but are -not guaranteed to be comparable to any other time source.

-

When android.sensor.info.timestampSource == REALTIME, the -timestamps measure time in the same timebase as SystemClock#elapsedRealtimeNanos, and they can -be compared to other timestamps from other subsystems that -are using that base.

-

For reprocessing, the timestamp will match the start of exposure of -the input image, i.e. the -timestamp in the TotalCaptureResult that was used to create the -reprocess capture request.

-
HAL Implementation Details
-

All timestamps must be in reference to the kernel's -CLOCK_BOOTTIME monotonic clock, which properly accounts for -time spent asleep. This allows for synchronization with -sensors that continue to operate while the system is -otherwise asleep.

-

If android.sensor.info.timestampSource == REALTIME, -The timestamp must be synchronized with the timestamps from other -sensor subsystems that are using the same timebase.

-

For reprocessing, the input image's start of exposure can be looked up -with android.sensor.timestamp from the metadata included in the -capture request.

-
- android.sensor.temperature - - float - - [system] - - - - - - - -

The temperature of the sensor, sampled at the time -exposure began for this frame.

-

The thermal diode being queried should be inside the sensor PCB, or -somewhere close to it.

-
- Celsius - -

Optional. This value is missing if no temperature is available.

-
- android.sensor.neutralColorPoint - - rational - x - - - 3 - - [public] - - - - - - - -

The estimated camera neutral color in the native sensor colorspace at -the time of capture.

-
- -
Details
-

This value gives the neutral color point encoded as an RGB value in the -native sensor color space. The neutral color point indicates the -currently estimated white point of the scene illumination. It can be -used to interpolate between the provided color transforms when -processing raw sensor data.

-

The order of the values is R, G, B; where R is in the lowest index.

-
- android.sensor.noiseProfile - - double - x - - - 2 x CFA Channels - - [public as pairDoubleDouble] - - - - -
Pairs of noise model coefficients
- - -
-

Noise model coefficients for each CFA mosaic channel.

-
- -
Details
-

This key contains two noise model coefficients for each CFA channel -corresponding to the sensor amplification (S) and sensor readout -noise (O). These are given as pairs of coefficients for each channel -in the same order as channels listed for the CFA layout key -(see android.sensor.info.colorFilterArrangement). This is -represented as an array of Pair<Double, Double>, where -the first member of the Pair at index n is the S coefficient and the -second member is the O coefficient for the nth color channel in the CFA.

-

These coefficients are used in a two parameter noise model to describe -the amount of noise present in the image for each CFA channel. The -noise model used here is:

-

N(x) = sqrt(Sx + O)

-

Where x represents the recorded signal of a CFA channel normalized to -the range [0, 1], and S and O are the noise model coeffiecients for -that channel.

-

A more detailed description of the noise model can be found in the -Adobe DNG specification for the NoiseProfile tag.

-
HAL Implementation Details
-

For a CFA layout of RGGB, the list of coefficients would be given as -an array of doubles S0,O0,S1,O1,..., where S0 and O0 are the coefficients -for the red channel, S1 and O1 are the coefficients for the first green -channel, etc.

-
- android.sensor.profileHueSatMap - - float - x - - - hue_samples x saturation_samples x value_samples x 3 - - [system] - - - - -
Mapping for hue, saturation, and value
- - -
-

A mapping containing a hue shift, saturation scale, and value scale -for each pixel.

-
- - The hue shift is given in degrees; saturation and value scale factors are - unitless and are between 0 and 1 inclusive - - -
Details
-

hue_samples, saturation_samples, and value_samples are given in -android.sensor.profileHueSatMapDimensions.

-

Each entry of this map contains three floats corresponding to the -hue shift, saturation scale, and value scale, respectively; where the -hue shift has the lowest index. The map entries are stored in the key -in nested loop order, with the value divisions in the outer loop, the -hue divisions in the middle loop, and the saturation divisions in the -inner loop. All zero input saturation entries are required to have a -value scale factor of 1.0.

-
- android.sensor.profileToneCurve - - float - x - - - samples x 2 - - [system] - - - - -
Samples defining a spline for a tone-mapping curve
- - -
-

A list of x,y samples defining a tone-mapping curve for gamma adjustment.

-
- -

Each sample has an input range of [0, 1] and an output range of -[0, 1]. The first sample is required to be (0, 0), and the last -sample is required to be (1, 1).

-
Details
-

This key contains a default tone curve that can be applied while -processing the image as a starting point for user adjustments. -The curve is specified as a list of value pairs in linear gamma. -The curve is interpolated using a cubic spline.

-
- android.sensor.greenSplit - - float - - [public] - - - - - - - -

The worst-case divergence between Bayer green channels.

-
- -

>= 0

-
Details
-

This value is an estimate of the worst case split between the -Bayer green channels in the red and blue rows in the sensor color -filter array.

-

The green split is calculated as follows:

-
    -
  1. A 5x5 pixel (or larger) window W within the active sensor array is -chosen. The term 'pixel' here is taken to mean a group of 4 Bayer -mosaic channels (R, Gr, Gb, B). The location and size of the window -chosen is implementation defined, and should be chosen to provide a -green split estimate that is both representative of the entire image -for this camera sensor, and can be calculated quickly.
  2. -
  3. The arithmetic mean of the green channels from the red -rows (mean_Gr) within W is computed.
  4. -
  5. The arithmetic mean of the green channels from the blue -rows (mean_Gb) within W is computed.
  6. -
  7. The maximum ratio R of the two means is computed as follows: -R = max((mean_Gr + 1)/(mean_Gb + 1), (mean_Gb + 1)/(mean_Gr + 1))
  8. -
-

The ratio R is the green split divergence reported for this property, -which represents how much the green channels differ in the mosaic -pattern. This value is typically used to determine the treatment of -the green mosaic channels when demosaicing.

-

The green split value can be roughly interpreted as follows:

-
    -
  • R < 1.03 is a negligible split (<3% divergence).
  • -
  • 1.20 <= R >= 1.03 will require some software -correction to avoid demosaic errors (3-20% divergence).
  • -
  • R > 1.20 will require strong software correction to produce -a usuable image (>20% divergence).
  • -
-
HAL Implementation Details
-

The green split given may be a static value based on prior -characterization of the camera sensor using the green split -calculation method given here over a large, representative, sample -set of images. Other methods of calculation that produce equivalent -results, and can be interpreted in the same manner, may be used.

-
- android.sensor.testPatternData - - int32 - x - - - 4 - - [public] - - - - - - - -

A pixel [R, G_even, G_odd, B] that supplies the test pattern -when android.sensor.testPatternMode is SOLID_COLOR.

-
- -
Details
-

Each color channel is treated as an unsigned 32-bit integer. -The camera device then uses the most significant X bits -that correspond to how many bits are in its Bayer raw sensor -output.

-

For example, a sensor with RAW10 Bayer output would use the -10 most significant bits from each color channel.

-
HAL Implementation Details
- -
- android.sensor.testPatternMode - - int32 - - [public] - - - - - -
    -
  • - OFF -

    No test pattern mode is used, and the camera -device returns captures from the image sensor.

    -

    This is the default if the key is not set.

    -
  • -
  • - SOLID_COLOR -

    Each pixel in [R, G_even, G_odd, B] is replaced by its -respective color channel provided in -android.sensor.testPatternData.

    -

    For example:

    -
    android.testPatternData = [0, 0xFFFFFFFF, 0xFFFFFFFF, 0]
    -
    -

    All green pixels are 100% green. All red/blue pixels are black.

    -
    android.testPatternData = [0xFFFFFFFF, 0, 0xFFFFFFFF, 0]
    -
    -

    All red pixels are 100% red. Only the odd green pixels -are 100% green. All blue pixels are 100% black.

    -
  • -
  • - COLOR_BARS -

    All pixel data is replaced with an 8-bar color pattern.

    -

    The vertical bars (left-to-right) are as follows:

    -
      -
    • 100% white
    • -
    • yellow
    • -
    • cyan
    • -
    • green
    • -
    • magenta
    • -
    • red
    • -
    • blue
    • -
    • black
    • -
    -

    In general the image would look like the following:

    -
    W Y C G M R B K
    -W Y C G M R B K
    -W Y C G M R B K
    -W Y C G M R B K
    -W Y C G M R B K
    -. . . . . . . .
    -. . . . . . . .
    -. . . . . . . .
    -
    -(B = Blue, K = Black)
    -
    -

    Each bar should take up 1/8 of the sensor pixel array width. -When this is not possible, the bar size should be rounded -down to the nearest integer and the pattern can repeat -on the right side.

    -

    Each bar's height must always take up the full sensor -pixel array height.

    -

    Each pixel in this test pattern must be set to either -0% intensity or 100% intensity.

    -
  • -
  • - COLOR_BARS_FADE_TO_GRAY -

    The test pattern is similar to COLOR_BARS, except that -each bar should start at its specified color at the top, -and fade to gray at the bottom.

    -

    Furthermore each bar is further subdivided into a left and -right half. The left half should have a smooth gradient, -and the right half should have a quantized gradient.

    -

    In particular, the right half's should consist of blocks of the -same color for 1/16th active sensor pixel array width.

    -

    The least significant bits in the quantized gradient should -be copied from the most significant bits of the smooth gradient.

    -

    The height of each bar should always be a multiple of 128. -When this is not the case, the pattern should repeat at the bottom -of the image.

    -
  • -
  • - PN9 -

    All pixel data is replaced by a pseudo-random sequence -generated from a PN9 512-bit sequence (typically implemented -in hardware with a linear feedback shift register).

    -

    The generator should be reset at the beginning of each frame, -and thus each subsequent raw frame with this test pattern should -be exactly the same as the last.

    -
  • -
  • - CUSTOM1 - 256 -

    The first custom test pattern. All custom patterns that are -available only on this camera device are at least this numeric -value.

    -

    All of the custom test patterns will be static -(that is the raw image must not vary from frame to frame).

    -
  • -
- -
-

When enabled, the sensor sends a test pattern instead of -doing a real exposure from the camera.

-
- -

android.sensor.availableTestPatternModes

-
Details
-

When a test pattern is enabled, all manual sensor controls specified -by android.sensor.* will be ignored. All other controls should -work as normal.

-

For example, if manual flash is enabled, flash firing should still -occur (and that the test pattern remain unmodified, since the flash -would not actually affect it).

-

Defaults to OFF.

-
HAL Implementation Details
-

All test patterns are specified in the Bayer domain.

-

The HAL may choose to substitute test patterns from the sensor -with test patterns from on-device memory. In that case, it should be -indistinguishable to the ISP whether the data came from the -sensor interconnect bus (such as CSI2) or memory.

-
- android.sensor.rollingShutterSkew - - int64 - - [public] - - - [limited] - - - - - -

Duration between the start of first row exposure -and the start of last row exposure.

-
- Nanoseconds - -

>= 0 and < -StreamConfigurationMap#getOutputMinFrameDuration.

-
Details
-

This is the exposure time skew between the first and last -row exposure start times. The first row and the last row are -the first and last rows inside of the -android.sensor.info.activeArraySize.

-

For typical camera sensors that use rolling shutters, this is also equivalent -to the frame readout time.

-
HAL Implementation Details
-

The HAL must report 0 if the sensor is using global shutter, where all pixels begin -exposure at the same time.

-
- android.sensor.dynamicBlackLevel - - float - x - - - 4 - - [public] - - - - -
2x2 raw count block
- - -
-

A per-frame dynamic black level offset for each of the color filter -arrangement (CFA) mosaic channels.

-
- -

>= 0 for each.

-
Details
-

Camera sensor black levels may vary dramatically for different -capture settings (e.g. android.sensor.sensitivity). The fixed black -level reported by android.sensor.blackLevelPattern may be too -inaccurate to represent the actual value on a per-frame basis. The -camera device internal pipeline relies on reliable black level values -to process the raw images appropriately. To get the best image -quality, the camera device may choose to estimate the per frame black -level values either based on optically shielded black regions -(android.sensor.opticalBlackRegions) or its internal model.

-

This key reports the camera device estimated per-frame zero light -value for each of the CFA mosaic channels in the camera sensor. The -android.sensor.blackLevelPattern may only represent a coarse -approximation of the actual black level values. This value is the -black level used in camera device internal image processing pipeline -and generally more accurate than the fixed black level values. -However, since they are estimated values by the camera device, they -may not be as accurate as the black level values calculated from the -optical black pixels reported by android.sensor.opticalBlackRegions.

-

The values are given in the same order as channels listed for the CFA -layout key (see android.sensor.info.colorFilterArrangement), i.e. the -nth value given corresponds to the black level offset for the nth -color channel listed in the CFA.

-

This key will be available if android.sensor.opticalBlackRegions is -available or the camera device advertises this key via -CameraCharacteristics#getAvailableCaptureResultKeys.

-
HAL Implementation Details
-

The values are given in row-column scan order, with the first value -corresponding to the element of the CFA in row=0, column=0.

-
- android.sensor.dynamicWhiteLevel - - int32 - - [public] - - - - - - - -

Maximum raw value output by sensor for this frame.

-
- -

>= 0

-
Details
-

Since the android.sensor.blackLevelPattern may change for different -capture settings (e.g., android.sensor.sensitivity), the white -level will change accordingly. This key is similar to -android.sensor.info.whiteLevel, but specifies the camera device -estimated white level for each frame.

-

This key will be available if android.sensor.opticalBlackRegions is -available or the camera device advertises this key via -CameraCharacteristics#getAvailableCaptureRequestKeys.

-
HAL Implementation Details
-

The full bit depth of the sensor must be available in the raw data, -so the value for linear sensors should not be significantly lower -than maximum raw value supported, i.e. 2^(sensor bits per pixel).

-
shading
controls
Property NameTypeDescriptionUnitsRangeTags
- android.shading.mode - - byte - - [public] - - - [full] - - - -
    -
  • - OFF -

    No lens shading correction is applied.

    -
  • -
  • - FAST -

    Apply lens shading corrections, without slowing -frame rate relative to sensor raw output

    -
  • -
  • - HIGH_QUALITY -

    Apply high-quality lens shading correction, at the -cost of possibly reduced frame rate.

    -
  • -
- -
-

Quality of lens shading correction applied -to the image data.

-
- -

android.shading.availableModes

-
Details
-

When set to OFF mode, no lens shading correction will be applied by the -camera device, and an identity lens shading map data will be provided -if android.statistics.lensShadingMapMode == ON. For example, for lens -shading map with size of [ 4, 3 ], -the output android.statistics.lensShadingCorrectionMap for this case will be an identity -map shown below:

-
[ 1.0, 1.0, 1.0, 1.0,  1.0, 1.0, 1.0, 1.0,
- 1.0, 1.0, 1.0, 1.0,  1.0, 1.0, 1.0, 1.0,
- 1.0, 1.0, 1.0, 1.0,  1.0, 1.0, 1.0, 1.0,
- 1.0, 1.0, 1.0, 1.0,  1.0, 1.0, 1.0, 1.0,
- 1.0, 1.0, 1.0, 1.0,  1.0, 1.0, 1.0, 1.0,
- 1.0, 1.0, 1.0, 1.0,  1.0, 1.0, 1.0, 1.0 ]
-
-

When set to other modes, lens shading correction will be applied by the camera -device. Applications can request lens shading map data by setting -android.statistics.lensShadingMapMode to ON, and then the camera device will provide lens -shading map data in android.statistics.lensShadingCorrectionMap; the returned shading map -data will be the one applied by the camera device for this capture request.

-

The shading map data may depend on the auto-exposure (AE) and AWB statistics, therefore -the reliability of the map data may be affected by the AE and AWB algorithms. When AE and -AWB are in AUTO modes(android.control.aeMode != OFF and android.control.awbMode != -OFF), to get best results, it is recommended that the applications wait for the AE and AWB -to be converged before using the returned shading map data.

-
- android.shading.strength - - byte - - [system] - - - - - - - -

Control the amount of shading correction -applied to the images

-
- unitless: 1-10; 10 is full shading - compensation - -
dynamic
Property NameTypeDescriptionUnitsRangeTags
- android.shading.mode - - byte - - [public] - - - [full] - - - -
    -
  • - OFF -

    No lens shading correction is applied.

    -
  • -
  • - FAST -

    Apply lens shading corrections, without slowing -frame rate relative to sensor raw output

    -
  • -
  • - HIGH_QUALITY -

    Apply high-quality lens shading correction, at the -cost of possibly reduced frame rate.

    -
  • -
- -
-

Quality of lens shading correction applied -to the image data.

-
- -

android.shading.availableModes

-
Details
-

When set to OFF mode, no lens shading correction will be applied by the -camera device, and an identity lens shading map data will be provided -if android.statistics.lensShadingMapMode == ON. For example, for lens -shading map with size of [ 4, 3 ], -the output android.statistics.lensShadingCorrectionMap for this case will be an identity -map shown below:

-
[ 1.0, 1.0, 1.0, 1.0,  1.0, 1.0, 1.0, 1.0,
- 1.0, 1.0, 1.0, 1.0,  1.0, 1.0, 1.0, 1.0,
- 1.0, 1.0, 1.0, 1.0,  1.0, 1.0, 1.0, 1.0,
- 1.0, 1.0, 1.0, 1.0,  1.0, 1.0, 1.0, 1.0,
- 1.0, 1.0, 1.0, 1.0,  1.0, 1.0, 1.0, 1.0,
- 1.0, 1.0, 1.0, 1.0,  1.0, 1.0, 1.0, 1.0 ]
-
-

When set to other modes, lens shading correction will be applied by the camera -device. Applications can request lens shading map data by setting -android.statistics.lensShadingMapMode to ON, and then the camera device will provide lens -shading map data in android.statistics.lensShadingCorrectionMap; the returned shading map -data will be the one applied by the camera device for this capture request.

-

The shading map data may depend on the auto-exposure (AE) and AWB statistics, therefore -the reliability of the map data may be affected by the AE and AWB algorithms. When AE and -AWB are in AUTO modes(android.control.aeMode != OFF and android.control.awbMode != -OFF), to get best results, it is recommended that the applications wait for the AE and AWB -to be converged before using the returned shading map data.

-
static
Property NameTypeDescriptionUnitsRangeTags
- android.shading.availableModes - - byte - x - - - n - - [public as enumList] - - - [legacy] - - -
List of enums (android.shading.mode).
- - -
-

List of lens shading modes for android.shading.mode that are supported by this camera device.

-
- -

Any value listed in android.shading.mode

-
Details
-

This list contains lens shading modes that can be set for the camera device. -Camera devices that support the MANUAL_POST_PROCESSING capability will always -list OFF and FAST mode. This includes all FULL level devices. -LEGACY devices will always only support FAST mode.

-
HAL Implementation Details
-

HAL must support both FAST and HIGH_QUALITY if lens shading correction control is -available on the camera device, but the underlying implementation can be the same for -both modes. That is, if the highest quality implementation on the camera device does not -slow down capture rate, then FAST and HIGH_QUALITY will generate the same output.

-
statistics
controls
Property NameTypeDescriptionUnitsRangeTags
- android.statistics.faceDetectMode - - byte - - [public] - - - [legacy] - - - -
    -
  • - OFF -

    Do not include face detection statistics in capture -results.

    -
  • -
  • - SIMPLE - [optional] -

    Return face rectangle and confidence values only.

    -
  • -
  • - FULL - [optional] -

    Return all face -metadata.

    -

    In this mode, face rectangles, scores, landmarks, and face IDs are all valid.

    -
  • -
- -
-

Operating mode for the face detector -unit.

-
- -

android.statistics.info.availableFaceDetectModes

-
Details
-

Whether face detection is enabled, and whether it -should output just the basic fields or the full set of -fields.

-
HAL Implementation Details
-

SIMPLE mode must fill in android.statistics.faceRectangles and -android.statistics.faceScores. -FULL mode must also fill in android.statistics.faceIds, and -android.statistics.faceLandmarks.

-
- android.statistics.histogramMode - - byte - - [system as boolean] - - - - - -
    -
  • - OFF -
  • -
  • - ON -
  • -
- -
-

Operating mode for histogram -generation

-
- -
- android.statistics.sharpnessMapMode - - byte - - [system as boolean] - - - - - -
    -
  • - OFF -
  • -
  • - ON -
  • -
- -
-

Operating mode for sharpness map -generation

-
- -
- android.statistics.hotPixelMapMode - - byte - - [public as boolean] - - - - - -
    -
  • - OFF -

    Hot pixel map production is disabled.

    -
  • -
  • - ON -

    Hot pixel map production is enabled.

    -
  • -
- -
-

Operating mode for hot pixel map generation.

-
- -

android.statistics.info.availableHotPixelMapModes

-
Details
-

If set to true, a hot pixel map is returned in android.statistics.hotPixelMap. -If set to false, no hot pixel map will be returned.

-
- android.statistics.lensShadingMapMode - - byte - - [public] - - - [full] - - - -
    -
  • - OFF -

    Do not include a lens shading map in the capture result.

    -
  • -
  • - ON -

    Include a lens shading map in the capture result.

    -
  • -
- -
-

Whether the camera device will output the lens -shading map in output result metadata.

-
- -

android.statistics.info.availableLensShadingMapModes

-
Details
-

When set to ON, -android.statistics.lensShadingMap will be provided in -the output result metadata.

-

ON is always supported on devices with the RAW capability.

-
static
Property NameTypeDescriptionUnitsRangeTags
- android.statistics.info.availableFaceDetectModes - - byte - x - - - n - - [public as enumList] - - - [legacy] - - -
List of enums from android.statistics.faceDetectMode
- - -
-

List of face detection modes for android.statistics.faceDetectMode that are -supported by this camera device.

-
- -

Any value listed in android.statistics.faceDetectMode

-
Details
-

OFF is always supported.

-
- android.statistics.info.histogramBucketCount - - int32 - - [system] - - - - - - - -

Number of histogram buckets -supported

-
- -

>= 64

-
- android.statistics.info.maxFaceCount - - int32 - - [public] - - - [legacy] - - - - - -

The maximum number of simultaneously detectable -faces.

-
- -

0 for cameras without available face detection; otherwise: ->=4 for LIMITED or FULL hwlevel devices or ->0 for LEGACY devices.

-
- android.statistics.info.maxHistogramCount - - int32 - - [system] - - - - - - - -

Maximum value possible for a histogram -bucket

-
- -
- android.statistics.info.maxSharpnessMapValue - - int32 - - [system] - - - - - - - -

Maximum value possible for a sharpness map -region.

-
- -
- android.statistics.info.sharpnessMapSize - - int32 - x - - - 2 - - [system as size] - - - - -
width x height
- - -
-

Dimensions of the sharpness -map

-
- -

Must be at least 32 x 32

-
- android.statistics.info.availableHotPixelMapModes - - byte - x - - - n - - [public as boolean] - - - - -
list of enums
- - -
-

List of hot pixel map output modes for android.statistics.hotPixelMapMode that are -supported by this camera device.

-
- -

Any value listed in android.statistics.hotPixelMapMode

-
Details
-

If no hotpixel map output is available for this camera device, this will contain only -false.

-

ON is always supported on devices with the RAW capability.

-
- android.statistics.info.availableLensShadingMapModes - - byte - x - - - n - - [public as enumList] - - - - -
list of enums
- - -
-

List of lens shading map output modes for android.statistics.lensShadingMapMode that -are supported by this camera device.

-
- -

Any value listed in android.statistics.lensShadingMapMode

-
Details
-

If no lens shading map output is available for this camera device, this key will -contain only OFF.

-

ON is always supported on devices with the RAW capability. -LEGACY mode devices will always only support OFF.

-
dynamic
Property NameTypeDescriptionUnitsRangeTags
- android.statistics.faceDetectMode - - byte - - [public] - - - [legacy] - - - -
    -
  • - OFF -

    Do not include face detection statistics in capture -results.

    -
  • -
  • - SIMPLE - [optional] -

    Return face rectangle and confidence values only.

    -
  • -
  • - FULL - [optional] -

    Return all face -metadata.

    -

    In this mode, face rectangles, scores, landmarks, and face IDs are all valid.

    -
  • -
- -
-

Operating mode for the face detector -unit.

-
- -

android.statistics.info.availableFaceDetectModes

-
Details
-

Whether face detection is enabled, and whether it -should output just the basic fields or the full set of -fields.

-
HAL Implementation Details
-

SIMPLE mode must fill in android.statistics.faceRectangles and -android.statistics.faceScores. -FULL mode must also fill in android.statistics.faceIds, and -android.statistics.faceLandmarks.

-
- android.statistics.faceIds - - int32 - x - - - n - - [ndk_public] - - - [legacy] - - - - - -

List of unique IDs for detected faces.

-
- -
Details
-

Each detected face is given a unique ID that is valid for as long as the face is visible -to the camera device. A face that leaves the field of view and later returns may be -assigned a new ID.

-

Only available if android.statistics.faceDetectMode == FULL

-
- android.statistics.faceLandmarks - - int32 - x - - - n x 6 - - [ndk_public] - - - [legacy] - - -
(leftEyeX, leftEyeY, rightEyeX, rightEyeY, mouthX, mouthY)
- - -
-

List of landmarks for detected -faces.

-
- -
Details
-

The coordinate system is that of android.sensor.info.activeArraySize, with -(0, 0) being the top-left pixel of the active array.

-

Only available if android.statistics.faceDetectMode == FULL

-
- android.statistics.faceRectangles - - int32 - x - - - n x 4 - - [ndk_public as rectangle] - - - [legacy] - - -
(xmin, ymin, xmax, ymax). (0,0) is top-left of active pixel area
- - -
-

List of the bounding rectangles for detected -faces.

-
- -
Details
-

The coordinate system is that of android.sensor.info.activeArraySize, with -(0, 0) being the top-left pixel of the active array.

-

Only available if android.statistics.faceDetectMode != OFF

-
- android.statistics.faceScores - - byte - x - - - n - - [ndk_public] - - - [legacy] - - - - - -

List of the face confidence scores for -detected faces

-
- -

1-100

-
Details
-

Only available if android.statistics.faceDetectMode != OFF.

-
HAL Implementation Details
-

The value should be meaningful (for example, setting 100 at -all times is illegal).

-
- android.statistics.faces - - int32 - x - - - n - - [java_public as face] - - [synthetic] - - [legacy] - - - - - -

List of the faces detected through camera face detection -in this capture.

-
- -
Details
-

Only available if android.statistics.faceDetectMode != OFF.

-
- android.statistics.histogram - - int32 - x - - - n x 3 - - [system] - - - - -
count of pixels for each color channel that fall into each histogram bucket, scaled to be between 0 and maxHistogramCount
- - -
-

A 3-channel histogram based on the raw -sensor data

-
- -
Details
-

The k'th bucket (0-based) covers the input range -(with w = android.sensor.info.whiteLevel) of [ k * w/N, -(k + 1) * w / N ). If only a monochrome sharpness map is -supported, all channels should have the same data

-
- android.statistics.histogramMode - - byte - - [system as boolean] - - - - - -
    -
  • - OFF -
  • -
  • - ON -
  • -
- -
-

Operating mode for histogram -generation

-
- -
- android.statistics.sharpnessMap - - int32 - x - - - n x m x 3 - - [system] - - - - -
estimated sharpness for each region of the input image. Normalized to be between 0 and maxSharpnessMapValue. Higher values mean sharper (better focused)
- - -
-

A 3-channel sharpness map, based on the raw -sensor data

-
- -
Details
-

If only a monochrome sharpness map is supported, -all channels should have the same data

-
- android.statistics.sharpnessMapMode - - byte - - [system as boolean] - - - - - -
    -
  • - OFF -
  • -
  • - ON -
  • -
- -
-

Operating mode for sharpness map -generation

-
- -
- android.statistics.lensShadingCorrectionMap - - byte - - [java_public as lensShadingMap] - - - [full] - - - - - -

The shading map is a low-resolution floating-point map -that lists the coefficients used to correct for vignetting, for each -Bayer color channel.

-
- -

Each gain factor is >= 1

-
Details
-

The map provided here is the same map that is used by the camera device to -correct both color shading and vignetting for output non-RAW images.

-

When there is no lens shading correction applied to RAW -output images (android.sensor.info.lensShadingApplied == -false), this map is the complete lens shading correction -map; when there is some lens shading correction applied to -the RAW output image (android.sensor.info.lensShadingApplied== true), this map reports the remaining lens shading -correction map that needs to be applied to get shading -corrected images that match the camera device's output for -non-RAW formats.

-

For a complete shading correction map, the least shaded -section of the image will have a gain factor of 1; all -other sections will have gains above 1.

-

When android.colorCorrection.mode = TRANSFORM_MATRIX, the map -will take into account the colorCorrection settings.

-

The shading map is for the entire active pixel array, and is not -affected by the crop region specified in the request. Each shading map -entry is the value of the shading compensation map over a specific -pixel on the sensor. Specifically, with a (N x M) resolution shading -map, and an active pixel array size (W x H), shading map entry -(x,y) ϵ (0 ... N-1, 0 ... M-1) is the value of the shading map at -pixel ( ((W-1)/(N-1)) * x, ((H-1)/(M-1)) * y) for the four color channels. -The map is assumed to be bilinearly interpolated between the sample points.

-

The channel order is [R, Geven, Godd, B], where Geven is the green -channel for the even rows of a Bayer pattern, and Godd is the odd rows. -The shading map is stored in a fully interleaved format.

-

The shading map will generally have on the order of 30-40 rows and columns, -and will be smaller than 64x64.

-

As an example, given a very small map defined as:

-
width,height = [ 4, 3 ]
-values =
-[ 1.3, 1.2, 1.15, 1.2,  1.2, 1.2, 1.15, 1.2,
-    1.1, 1.2, 1.2, 1.2,  1.3, 1.2, 1.3, 1.3,
-  1.2, 1.2, 1.25, 1.1,  1.1, 1.1, 1.1, 1.0,
-    1.0, 1.0, 1.0, 1.0,  1.2, 1.3, 1.25, 1.2,
-  1.3, 1.2, 1.2, 1.3,   1.2, 1.15, 1.1, 1.2,
-    1.2, 1.1, 1.0, 1.2,  1.3, 1.15, 1.2, 1.3 ]
-
-

The low-resolution scaling map images for each channel are -(displayed using nearest-neighbor interpolation):

-

Red lens shading map -Green (even rows) lens shading map -Green (odd rows) lens shading map -Blue lens shading map

-

As a visualization only, inverting the full-color map to recover an -image of a gray wall (using bicubic interpolation for visual quality) as captured by the sensor gives:

-

Image of a uniform white wall (inverse shading map)

-
- android.statistics.lensShadingMap - - float - x - - - 4 x n x m - - [ndk_public] - - - [full] - - -
2D array of float gain factors per channel to correct lens shading
- - -
-

The shading map is a low-resolution floating-point map -that lists the coefficients used to correct for vignetting and color shading, -for each Bayer color channel of RAW image data.

-
- -

Each gain factor is >= 1

-
Details
-

The map provided here is the same map that is used by the camera device to -correct both color shading and vignetting for output non-RAW images.

-

When there is no lens shading correction applied to RAW -output images (android.sensor.info.lensShadingApplied == -false), this map is the complete lens shading correction -map; when there is some lens shading correction applied to -the RAW output image (android.sensor.info.lensShadingApplied== true), this map reports the remaining lens shading -correction map that needs to be applied to get shading -corrected images that match the camera device's output for -non-RAW formats.

-

For a complete shading correction map, the least shaded -section of the image will have a gain factor of 1; all -other sections will have gains above 1.

-

When android.colorCorrection.mode = TRANSFORM_MATRIX, the map -will take into account the colorCorrection settings.

-

The shading map is for the entire active pixel array, and is not -affected by the crop region specified in the request. Each shading map -entry is the value of the shading compensation map over a specific -pixel on the sensor. Specifically, with a (N x M) resolution shading -map, and an active pixel array size (W x H), shading map entry -(x,y) ϵ (0 ... N-1, 0 ... M-1) is the value of the shading map at -pixel ( ((W-1)/(N-1)) * x, ((H-1)/(M-1)) * y) for the four color channels. -The map is assumed to be bilinearly interpolated between the sample points.

-

The channel order is [R, Geven, Godd, B], where Geven is the green -channel for the even rows of a Bayer pattern, and Godd is the odd rows. -The shading map is stored in a fully interleaved format, and its size -is provided in the camera static metadata by android.lens.info.shadingMapSize.

-

The shading map will generally have on the order of 30-40 rows and columns, -and will be smaller than 64x64.

-

As an example, given a very small map defined as:

-
android.lens.info.shadingMapSize = [ 4, 3 ]
-android.statistics.lensShadingMap =
-[ 1.3, 1.2, 1.15, 1.2,  1.2, 1.2, 1.15, 1.2,
-    1.1, 1.2, 1.2, 1.2,  1.3, 1.2, 1.3, 1.3,
-  1.2, 1.2, 1.25, 1.1,  1.1, 1.1, 1.1, 1.0,
-    1.0, 1.0, 1.0, 1.0,  1.2, 1.3, 1.25, 1.2,
-  1.3, 1.2, 1.2, 1.3,   1.2, 1.15, 1.1, 1.2,
-    1.2, 1.1, 1.0, 1.2,  1.3, 1.15, 1.2, 1.3 ]
-
-

The low-resolution scaling map images for each channel are -(displayed using nearest-neighbor interpolation):

-

Red lens shading map -Green (even rows) lens shading map -Green (odd rows) lens shading map -Blue lens shading map

-

As a visualization only, inverting the full-color map to recover an -image of a gray wall (using bicubic interpolation for visual quality) -as captured by the sensor gives:

-

Image of a uniform white wall (inverse shading map)

-

Note that the RAW image data might be subject to lens shading -correction not reported on this map. Query -android.sensor.info.lensShadingApplied to see if RAW image data has subject -to lens shading correction. If android.sensor.info.lensShadingApplied -is TRUE, the RAW image data is subject to partial or full lens shading -correction. In the case full lens shading correction is applied to RAW -images, the gain factor map reported in this key will contain all 1.0 gains. -In other words, the map reported in this key is the remaining lens shading -that needs to be applied on the RAW image to get images without lens shading -artifacts. See android.request.maxNumOutputRaw for a list of RAW image -formats.

-
HAL Implementation Details
-

The lens shading map calculation may depend on exposure and white balance statistics. -When AE and AWB are in AUTO modes -(android.control.aeMode != OFF and android.control.awbMode != OFF), the HAL -may have all the information it need to generate most accurate lens shading map. When -AE or AWB are in manual mode -(android.control.aeMode == OFF or android.control.awbMode == OFF), the shading map -may be adversely impacted by manual exposure or white balance parameters. To avoid -generating unreliable shading map data, the HAL may choose to lock the shading map with -the latest known good map generated when the AE and AWB are in AUTO modes.

-
- android.statistics.predictedColorGains - - float - x - - - 4 - - [hidden] - - - - [deprecated] - -
A 1D array of floats for 4 color channel gains
- - -
-

The best-fit color channel gains calculated -by the camera device's statistics units for the current output frame.

-
- -

Deprecated. Do not use.

-
Details
-

This may be different than the gains used for this frame, -since statistics processing on data from a new frame -typically completes after the transform has already been -applied to that frame.

-

The 4 channel gains are defined in Bayer domain, -see android.colorCorrection.gains for details.

-

This value should always be calculated by the auto-white balance (AWB) block, -regardless of the android.control.* current values.

-
- android.statistics.predictedColorTransform - - rational - x - - - 3 x 3 - - [hidden] - - - - [deprecated] - -
3x3 rational matrix in row-major order
- - -
-

The best-fit color transform matrix estimate -calculated by the camera device's statistics units for the current -output frame.

-
- -

Deprecated. Do not use.

-
Details
-

The camera device will provide the estimate from its -statistics unit on the white balance transforms to use -for the next frame. These are the values the camera device believes -are the best fit for the current output frame. This may -be different than the transform used for this frame, since -statistics processing on data from a new frame typically -completes after the transform has already been applied to -that frame.

-

These estimates must be provided for all frames, even if -capture settings and color transforms are set by the application.

-

This value should always be calculated by the auto-white balance (AWB) block, -regardless of the android.control.* current values.

-
- android.statistics.sceneFlicker - - byte - - [public] - - - [full] - - - -
    -
  • - NONE -

    The camera device does not detect any flickering illumination -in the current scene.

    -
  • -
  • - 50HZ -

    The camera device detects illumination flickering at 50Hz -in the current scene.

    -
  • -
  • - 60HZ -

    The camera device detects illumination flickering at 60Hz -in the current scene.

    -
  • -
- -
-

The camera device estimated scene illumination lighting -frequency.

-
- -
Details
-

Many light sources, such as most fluorescent lights, flicker at a rate -that depends on the local utility power standards. This flicker must be -accounted for by auto-exposure routines to avoid artifacts in captured images. -The camera device uses this entry to tell the application what the scene -illuminant frequency is.

-

When manual exposure control is enabled -(android.control.aeMode == OFF or android.control.mode == -OFF), the android.control.aeAntibandingMode doesn't perform -antibanding, and the application can ensure it selects -exposure times that do not cause banding issues by looking -into this metadata field. See -android.control.aeAntibandingMode for more details.

-

Reports NONE if there doesn't appear to be flickering illumination.

-
- android.statistics.hotPixelMapMode - - byte - - [public as boolean] - - - - - -
    -
  • - OFF -

    Hot pixel map production is disabled.

    -
  • -
  • - ON -

    Hot pixel map production is enabled.

    -
  • -
- -
-

Operating mode for hot pixel map generation.

-
- -

android.statistics.info.availableHotPixelMapModes

-
Details
-

If set to true, a hot pixel map is returned in android.statistics.hotPixelMap. -If set to false, no hot pixel map will be returned.

-
- android.statistics.hotPixelMap - - int32 - x - - - 2 x n - - [public as point] - - - - -
list of coordinates based on android.sensor.pixelArraySize
- - -
-

List of (x, y) coordinates of hot/defective pixels on the sensor.

-
- -

n <= number of pixels on the sensor. -The (x, y) coordinates must be bounded by -android.sensor.info.pixelArraySize.

-
Details
-

A coordinate (x, y) must lie between (0, 0), and -(width - 1, height - 1) (inclusive), which are the top-left and -bottom-right of the pixel array, respectively. The width and -height dimensions are given in android.sensor.info.pixelArraySize. -This may include hot pixels that lie outside of the active array -bounds given by android.sensor.info.activeArraySize.

-
HAL Implementation Details
-

A hotpixel map contains the coordinates of pixels on the camera -sensor that do report valid values (usually due to defects in -the camera sensor). This includes pixels that are stuck at certain -values, or have a response that does not accuractly encode the -incoming light from the scene.

-

To avoid performance issues, there should be significantly fewer hot -pixels than actual pixels on the camera sensor.

-
- android.statistics.lensShadingMapMode - - byte - - [public] - - - [full] - - - -
    -
  • - OFF -

    Do not include a lens shading map in the capture result.

    -
  • -
  • - ON -

    Include a lens shading map in the capture result.

    -
  • -
- -
-

Whether the camera device will output the lens -shading map in output result metadata.

-
- -

android.statistics.info.availableLensShadingMapModes

-
Details
-

When set to ON, -android.statistics.lensShadingMap will be provided in -the output result metadata.

-

ON is always supported on devices with the RAW capability.

-
tonemap
controls
Property NameTypeDescriptionUnitsRangeTags
- android.tonemap.curveBlue - - float - x - - - n x 2 - - [ndk_public] - - - [full] - - -
1D array of float pairs (P_IN, P_OUT). The maximum number of pairs is specified by android.tonemap.maxCurvePoints.
- - -
-

Tonemapping / contrast / gamma curve for the blue -channel, to use when android.tonemap.mode is -CONTRAST_CURVE.

-
- -
Details
-

See android.tonemap.curveRed for more details.

-
- android.tonemap.curveGreen - - float - x - - - n x 2 - - [ndk_public] - - - [full] - - -
1D array of float pairs (P_IN, P_OUT). The maximum number of pairs is specified by android.tonemap.maxCurvePoints.
- - -
-

Tonemapping / contrast / gamma curve for the green -channel, to use when android.tonemap.mode is -CONTRAST_CURVE.

-
- -
Details
-

See android.tonemap.curveRed for more details.

-
- android.tonemap.curveRed - - float - x - - - n x 2 - - [ndk_public] - - - [full] - - -
1D array of float pairs (P_IN, P_OUT). The maximum number of pairs is specified by android.tonemap.maxCurvePoints.
- - -
-

Tonemapping / contrast / gamma curve for the red -channel, to use when android.tonemap.mode is -CONTRAST_CURVE.

-
- -

0-1 on both input and output coordinates, normalized -as a floating-point value such that 0 == black and 1 == white.

-
Details
-

Each channel's curve is defined by an array of control points:

-
android.tonemap.curveRed =
-  [ P0in, P0out, P1in, P1out, P2in, P2out, P3in, P3out, ..., PNin, PNout ]
-2 <= N <= android.tonemap.maxCurvePoints
-

These are sorted in order of increasing Pin; it is -required that input values 0.0 and 1.0 are included in the list to -define a complete mapping. For input values between control points, -the camera device must linearly interpolate between the control -points.

-

Each curve can have an independent number of points, and the number -of points can be less than max (that is, the request doesn't have to -always provide a curve with number of points equivalent to -android.tonemap.maxCurvePoints).

-

A few examples, and their corresponding graphical mappings; these -only specify the red channel and the precision is limited to 4 -digits, for conciseness.

-

Linear mapping:

-
android.tonemap.curveRed = [ 0, 0, 1.0, 1.0 ]
-
-

Linear mapping curve

-

Invert mapping:

-
android.tonemap.curveRed = [ 0, 1.0, 1.0, 0 ]
-
-

Inverting mapping curve

-

Gamma 1/2.2 mapping, with 16 control points:

-
android.tonemap.curveRed = [
-  0.0000, 0.0000, 0.0667, 0.2920, 0.1333, 0.4002, 0.2000, 0.4812,
-  0.2667, 0.5484, 0.3333, 0.6069, 0.4000, 0.6594, 0.4667, 0.7072,
-  0.5333, 0.7515, 0.6000, 0.7928, 0.6667, 0.8317, 0.7333, 0.8685,
-  0.8000, 0.9035, 0.8667, 0.9370, 0.9333, 0.9691, 1.0000, 1.0000 ]
-
-

Gamma = 1/2.2 tonemapping curve

-

Standard sRGB gamma mapping, per IEC 61966-2-1:1999, with 16 control points:

-
android.tonemap.curveRed = [
-  0.0000, 0.0000, 0.0667, 0.2864, 0.1333, 0.4007, 0.2000, 0.4845,
-  0.2667, 0.5532, 0.3333, 0.6125, 0.4000, 0.6652, 0.4667, 0.7130,
-  0.5333, 0.7569, 0.6000, 0.7977, 0.6667, 0.8360, 0.7333, 0.8721,
-  0.8000, 0.9063, 0.8667, 0.9389, 0.9333, 0.9701, 1.0000, 1.0000 ]
-
-

sRGB tonemapping curve

-
HAL Implementation Details
-

For good quality of mapping, at least 128 control points are -preferred.

-

A typical use case of this would be a gamma-1/2.2 curve, with as many -control points used as are available.

-
- android.tonemap.curve - - float - - [java_public as tonemapCurve] - - [synthetic] - - [full] - - - - - -

Tonemapping / contrast / gamma curve to use when android.tonemap.mode -is CONTRAST_CURVE.

-
- -
Details
-

The tonemapCurve consist of three curves for each of red, green, and blue -channels respectively. The following example uses the red channel as an -example. The same logic applies to green and blue channel. -Each channel's curve is defined by an array of control points:

-
curveRed =
-  [ P0(in, out), P1(in, out), P2(in, out), P3(in, out), ..., PN(in, out) ]
-2 <= N <= android.tonemap.maxCurvePoints
-

These are sorted in order of increasing Pin; it is always -guaranteed that input values 0.0 and 1.0 are included in the list to -define a complete mapping. For input values between control points, -the camera device must linearly interpolate between the control -points.

-

Each curve can have an independent number of points, and the number -of points can be less than max (that is, the request doesn't have to -always provide a curve with number of points equivalent to -android.tonemap.maxCurvePoints).

-

A few examples, and their corresponding graphical mappings; these -only specify the red channel and the precision is limited to 4 -digits, for conciseness.

-

Linear mapping:

-
curveRed = [ (0, 0), (1.0, 1.0) ]
-
-

Linear mapping curve

-

Invert mapping:

-
curveRed = [ (0, 1.0), (1.0, 0) ]
-
-

Inverting mapping curve

-

Gamma 1/2.2 mapping, with 16 control points:

-
curveRed = [
-  (0.0000, 0.0000), (0.0667, 0.2920), (0.1333, 0.4002), (0.2000, 0.4812),
-  (0.2667, 0.5484), (0.3333, 0.6069), (0.4000, 0.6594), (0.4667, 0.7072),
-  (0.5333, 0.7515), (0.6000, 0.7928), (0.6667, 0.8317), (0.7333, 0.8685),
-  (0.8000, 0.9035), (0.8667, 0.9370), (0.9333, 0.9691), (1.0000, 1.0000) ]
-
-

Gamma = 1/2.2 tonemapping curve

-

Standard sRGB gamma mapping, per IEC 61966-2-1:1999, with 16 control points:

-
curveRed = [
-  (0.0000, 0.0000), (0.0667, 0.2864), (0.1333, 0.4007), (0.2000, 0.4845),
-  (0.2667, 0.5532), (0.3333, 0.6125), (0.4000, 0.6652), (0.4667, 0.7130),
-  (0.5333, 0.7569), (0.6000, 0.7977), (0.6667, 0.8360), (0.7333, 0.8721),
-  (0.8000, 0.9063), (0.8667, 0.9389), (0.9333, 0.9701), (1.0000, 1.0000) ]
-
-

sRGB tonemapping curve

-
HAL Implementation Details
-

This entry is created by the framework from the curveRed, curveGreen and -curveBlue entries.

-
- android.tonemap.mode - - byte - - [public] - - - [full] - - - -
    -
  • - CONTRAST_CURVE -

    Use the tone mapping curve specified in -the android.tonemap.curve* entries.

    -

    All color enhancement and tonemapping must be disabled, except -for applying the tonemapping curve specified by -android.tonemap.curve.

    -

    Must not slow down frame rate relative to raw -sensor output.

    -
  • -
  • - FAST -

    Advanced gamma mapping and color enhancement may be applied, without -reducing frame rate compared to raw sensor output.

    -
  • -
  • - HIGH_QUALITY -

    High-quality gamma mapping and color enhancement will be applied, at -the cost of possibly reduced frame rate compared to raw sensor output.

    -
  • -
  • - GAMMA_VALUE -

    Use the gamma value specified in android.tonemap.gamma to peform -tonemapping.

    -

    All color enhancement and tonemapping must be disabled, except -for applying the tonemapping curve specified by android.tonemap.gamma.

    -

    Must not slow down frame rate relative to raw sensor output.

    -
  • -
  • - PRESET_CURVE -

    Use the preset tonemapping curve specified in -android.tonemap.presetCurve to peform tonemapping.

    -

    All color enhancement and tonemapping must be disabled, except -for applying the tonemapping curve specified by -android.tonemap.presetCurve.

    -

    Must not slow down frame rate relative to raw sensor output.

    -
  • -
- -
-

High-level global contrast/gamma/tonemapping control.

-
- -

android.tonemap.availableToneMapModes

-
Details
-

When switching to an application-defined contrast curve by setting -android.tonemap.mode to CONTRAST_CURVE, the curve is defined -per-channel with a set of (in, out) points that specify the -mapping from input high-bit-depth pixel value to the output -low-bit-depth value. Since the actual pixel ranges of both input -and output may change depending on the camera pipeline, the values -are specified by normalized floating-point numbers.

-

More-complex color mapping operations such as 3D color look-up -tables, selective chroma enhancement, or other non-linear color -transforms will be disabled when android.tonemap.mode is -CONTRAST_CURVE.

-

When using either FAST or HIGH_QUALITY, the camera device will -emit its own tonemap curve in android.tonemap.curve. -These values are always available, and as close as possible to the -actually used nonlinear/nonglobal transforms.

-

If a request is sent with CONTRAST_CURVE with the camera device's -provided curve in FAST or HIGH_QUALITY, the image's tonemap will be -roughly the same.

-
- android.tonemap.gamma - - float - - [public] - - - - - - - -

Tonemapping curve to use when android.tonemap.mode is -GAMMA_VALUE

-
- -
Details
-

The tonemap curve will be defined the following formula: -* OUT = pow(IN, 1.0 / gamma) -where IN and OUT is the input pixel value scaled to range [0.0, 1.0], -pow is the power function and gamma is the gamma value specified by this -key.

-

The same curve will be applied to all color channels. The camera device -may clip the input gamma value to its supported range. The actual applied -value will be returned in capture result.

-

The valid range of gamma value varies on different devices, but values -within [1.0, 5.0] are guaranteed not to be clipped.

-
- android.tonemap.presetCurve - - byte - - [public] - - - - - -
    -
  • - SRGB -

    Tonemapping curve is defined by sRGB

    -
  • -
  • - REC709 -

    Tonemapping curve is defined by ITU-R BT.709

    -
  • -
- -
-

Tonemapping curve to use when android.tonemap.mode is -PRESET_CURVE

-
- -
Details
-

The tonemap curve will be defined by specified standard.

-

sRGB (approximated by 16 control points):

-

sRGB tonemapping curve

-

Rec. 709 (approximated by 16 control points):

-

Rec. 709 tonemapping curve

-

Note that above figures show a 16 control points approximation of preset -curves. Camera devices may apply a different approximation to the curve.

-
static
Property NameTypeDescriptionUnitsRangeTags
- android.tonemap.maxCurvePoints - - int32 - - [public] - - - [full] - - - - - -

Maximum number of supported points in the -tonemap curve that can be used for android.tonemap.curve.

-
- -
Details
-

If the actual number of points provided by the application (in android.tonemap.curve*) is -less than this maximum, the camera device will resample the curve to its internal -representation, using linear interpolation.

-

The output curves in the result metadata may have a different number -of points than the input curves, and will represent the actual -hardware curves used as closely as possible when linearly interpolated.

-
HAL Implementation Details
-

This value must be at least 64. This should be at least 128.

-
- android.tonemap.availableToneMapModes - - byte - x - - - n - - [public as enumList] - - - [full] - - -
list of enums
- - -
-

List of tonemapping modes for android.tonemap.mode that are supported by this camera -device.

-
- -

Any value listed in android.tonemap.mode

-
Details
-

Camera devices that support the MANUAL_POST_PROCESSING capability will always contain -at least one of below mode combinations:

-
    -
  • CONTRAST_CURVE, FAST and HIGH_QUALITY
  • -
  • GAMMA_VALUE, PRESET_CURVE, FAST and HIGH_QUALITY
  • -
-

This includes all FULL level devices.

-
HAL Implementation Details
-

HAL must support both FAST and HIGH_QUALITY if automatic tonemap control is available -on the camera device, but the underlying implementation can be the same for both modes. -That is, if the highest quality implementation on the camera device does not slow down -capture rate, then FAST and HIGH_QUALITY will generate the same output.

-
dynamic
Property NameTypeDescriptionUnitsRangeTags
- android.tonemap.curveBlue - - float - x - - - n x 2 - - [ndk_public] - - - [full] - - -
1D array of float pairs (P_IN, P_OUT). The maximum number of pairs is specified by android.tonemap.maxCurvePoints.
- - -
-

Tonemapping / contrast / gamma curve for the blue -channel, to use when android.tonemap.mode is -CONTRAST_CURVE.

-
- -
Details
-

See android.tonemap.curveRed for more details.

-
- android.tonemap.curveGreen - - float - x - - - n x 2 - - [ndk_public] - - - [full] - - -
1D array of float pairs (P_IN, P_OUT). The maximum number of pairs is specified by android.tonemap.maxCurvePoints.
- - -
-

Tonemapping / contrast / gamma curve for the green -channel, to use when android.tonemap.mode is -CONTRAST_CURVE.

-
- -
Details
-

See android.tonemap.curveRed for more details.

-
- android.tonemap.curveRed - - float - x - - - n x 2 - - [ndk_public] - - - [full] - - -
1D array of float pairs (P_IN, P_OUT). The maximum number of pairs is specified by android.tonemap.maxCurvePoints.
- - -
-

Tonemapping / contrast / gamma curve for the red -channel, to use when android.tonemap.mode is -CONTRAST_CURVE.

-
- -

0-1 on both input and output coordinates, normalized -as a floating-point value such that 0 == black and 1 == white.

-
Details
-

Each channel's curve is defined by an array of control points:

-
android.tonemap.curveRed =
-  [ P0in, P0out, P1in, P1out, P2in, P2out, P3in, P3out, ..., PNin, PNout ]
-2 <= N <= android.tonemap.maxCurvePoints
-

These are sorted in order of increasing Pin; it is -required that input values 0.0 and 1.0 are included in the list to -define a complete mapping. For input values between control points, -the camera device must linearly interpolate between the control -points.

-

Each curve can have an independent number of points, and the number -of points can be less than max (that is, the request doesn't have to -always provide a curve with number of points equivalent to -android.tonemap.maxCurvePoints).

-

A few examples, and their corresponding graphical mappings; these -only specify the red channel and the precision is limited to 4 -digits, for conciseness.

-

Linear mapping:

-
android.tonemap.curveRed = [ 0, 0, 1.0, 1.0 ]
-
-

Linear mapping curve

-

Invert mapping:

-
android.tonemap.curveRed = [ 0, 1.0, 1.0, 0 ]
-
-

Inverting mapping curve

-

Gamma 1/2.2 mapping, with 16 control points:

-
android.tonemap.curveRed = [
-  0.0000, 0.0000, 0.0667, 0.2920, 0.1333, 0.4002, 0.2000, 0.4812,
-  0.2667, 0.5484, 0.3333, 0.6069, 0.4000, 0.6594, 0.4667, 0.7072,
-  0.5333, 0.7515, 0.6000, 0.7928, 0.6667, 0.8317, 0.7333, 0.8685,
-  0.8000, 0.9035, 0.8667, 0.9370, 0.9333, 0.9691, 1.0000, 1.0000 ]
-
-

Gamma = 1/2.2 tonemapping curve

-

Standard sRGB gamma mapping, per IEC 61966-2-1:1999, with 16 control points:

-
android.tonemap.curveRed = [
-  0.0000, 0.0000, 0.0667, 0.2864, 0.1333, 0.4007, 0.2000, 0.4845,
-  0.2667, 0.5532, 0.3333, 0.6125, 0.4000, 0.6652, 0.4667, 0.7130,
-  0.5333, 0.7569, 0.6000, 0.7977, 0.6667, 0.8360, 0.7333, 0.8721,
-  0.8000, 0.9063, 0.8667, 0.9389, 0.9333, 0.9701, 1.0000, 1.0000 ]
-
-

sRGB tonemapping curve

-
HAL Implementation Details
-

For good quality of mapping, at least 128 control points are -preferred.

-

A typical use case of this would be a gamma-1/2.2 curve, with as many -control points used as are available.

-
- android.tonemap.curve - - float - - [java_public as tonemapCurve] - - [synthetic] - - [full] - - - - - -

Tonemapping / contrast / gamma curve to use when android.tonemap.mode -is CONTRAST_CURVE.

-
- -
Details
-

The tonemapCurve consist of three curves for each of red, green, and blue -channels respectively. The following example uses the red channel as an -example. The same logic applies to green and blue channel. -Each channel's curve is defined by an array of control points:

-
curveRed =
-  [ P0(in, out), P1(in, out), P2(in, out), P3(in, out), ..., PN(in, out) ]
-2 <= N <= android.tonemap.maxCurvePoints
-

These are sorted in order of increasing Pin; it is always -guaranteed that input values 0.0 and 1.0 are included in the list to -define a complete mapping. For input values between control points, -the camera device must linearly interpolate between the control -points.

-

Each curve can have an independent number of points, and the number -of points can be less than max (that is, the request doesn't have to -always provide a curve with number of points equivalent to -android.tonemap.maxCurvePoints).

-

A few examples, and their corresponding graphical mappings; these -only specify the red channel and the precision is limited to 4 -digits, for conciseness.

-

Linear mapping:

-
curveRed = [ (0, 0), (1.0, 1.0) ]
-
-

Linear mapping curve

-

Invert mapping:

-
curveRed = [ (0, 1.0), (1.0, 0) ]
-
-

Inverting mapping curve

-

Gamma 1/2.2 mapping, with 16 control points:

-
curveRed = [
-  (0.0000, 0.0000), (0.0667, 0.2920), (0.1333, 0.4002), (0.2000, 0.4812),
-  (0.2667, 0.5484), (0.3333, 0.6069), (0.4000, 0.6594), (0.4667, 0.7072),
-  (0.5333, 0.7515), (0.6000, 0.7928), (0.6667, 0.8317), (0.7333, 0.8685),
-  (0.8000, 0.9035), (0.8667, 0.9370), (0.9333, 0.9691), (1.0000, 1.0000) ]
-
-

Gamma = 1/2.2 tonemapping curve

-

Standard sRGB gamma mapping, per IEC 61966-2-1:1999, with 16 control points:

-
curveRed = [
-  (0.0000, 0.0000), (0.0667, 0.2864), (0.1333, 0.4007), (0.2000, 0.4845),
-  (0.2667, 0.5532), (0.3333, 0.6125), (0.4000, 0.6652), (0.4667, 0.7130),
-  (0.5333, 0.7569), (0.6000, 0.7977), (0.6667, 0.8360), (0.7333, 0.8721),
-  (0.8000, 0.9063), (0.8667, 0.9389), (0.9333, 0.9701), (1.0000, 1.0000) ]
-
-

sRGB tonemapping curve

-
HAL Implementation Details
-

This entry is created by the framework from the curveRed, curveGreen and -curveBlue entries.

-
- android.tonemap.mode - - byte - - [public] - - - [full] - - - -
    -
  • - CONTRAST_CURVE -

    Use the tone mapping curve specified in -the android.tonemap.curve* entries.

    -

    All color enhancement and tonemapping must be disabled, except -for applying the tonemapping curve specified by -android.tonemap.curve.

    -

    Must not slow down frame rate relative to raw -sensor output.

    -
  • -
  • - FAST -

    Advanced gamma mapping and color enhancement may be applied, without -reducing frame rate compared to raw sensor output.

    -
  • -
  • - HIGH_QUALITY -

    High-quality gamma mapping and color enhancement will be applied, at -the cost of possibly reduced frame rate compared to raw sensor output.

    -
  • -
  • - GAMMA_VALUE -

    Use the gamma value specified in android.tonemap.gamma to peform -tonemapping.

    -

    All color enhancement and tonemapping must be disabled, except -for applying the tonemapping curve specified by android.tonemap.gamma.

    -

    Must not slow down frame rate relative to raw sensor output.

    -
  • -
  • - PRESET_CURVE -

    Use the preset tonemapping curve specified in -android.tonemap.presetCurve to peform tonemapping.

    -

    All color enhancement and tonemapping must be disabled, except -for applying the tonemapping curve specified by -android.tonemap.presetCurve.

    -

    Must not slow down frame rate relative to raw sensor output.

    -
  • -
- -
-

High-level global contrast/gamma/tonemapping control.

-
- -

android.tonemap.availableToneMapModes

-
Details
-

When switching to an application-defined contrast curve by setting -android.tonemap.mode to CONTRAST_CURVE, the curve is defined -per-channel with a set of (in, out) points that specify the -mapping from input high-bit-depth pixel value to the output -low-bit-depth value. Since the actual pixel ranges of both input -and output may change depending on the camera pipeline, the values -are specified by normalized floating-point numbers.

-

More-complex color mapping operations such as 3D color look-up -tables, selective chroma enhancement, or other non-linear color -transforms will be disabled when android.tonemap.mode is -CONTRAST_CURVE.

-

When using either FAST or HIGH_QUALITY, the camera device will -emit its own tonemap curve in android.tonemap.curve. -These values are always available, and as close as possible to the -actually used nonlinear/nonglobal transforms.

-

If a request is sent with CONTRAST_CURVE with the camera device's -provided curve in FAST or HIGH_QUALITY, the image's tonemap will be -roughly the same.

-
- android.tonemap.gamma - - float - - [public] - - - - - - - -

Tonemapping curve to use when android.tonemap.mode is -GAMMA_VALUE

-
- -
Details
-

The tonemap curve will be defined the following formula: -* OUT = pow(IN, 1.0 / gamma) -where IN and OUT is the input pixel value scaled to range [0.0, 1.0], -pow is the power function and gamma is the gamma value specified by this -key.

-

The same curve will be applied to all color channels. The camera device -may clip the input gamma value to its supported range. The actual applied -value will be returned in capture result.

-

The valid range of gamma value varies on different devices, but values -within [1.0, 5.0] are guaranteed not to be clipped.

-
- android.tonemap.presetCurve - - byte - - [public] - - - - - -
    -
  • - SRGB -

    Tonemapping curve is defined by sRGB

    -
  • -
  • - REC709 -

    Tonemapping curve is defined by ITU-R BT.709

    -
  • -
- -
-

Tonemapping curve to use when android.tonemap.mode is -PRESET_CURVE

-
- -
Details
-

The tonemap curve will be defined by specified standard.

-

sRGB (approximated by 16 control points):

-

sRGB tonemapping curve

-

Rec. 709 (approximated by 16 control points):

-

Rec. 709 tonemapping curve

-

Note that above figures show a 16 control points approximation of preset -curves. Camera devices may apply a different approximation to the curve.

-
led
controls
Property NameTypeDescriptionUnitsRangeTags
- android.led.transmit - - byte - - [hidden as boolean] - - - - - -
    -
  • - OFF -
  • -
  • - ON -
  • -
- -
-

This LED is nominally used to indicate to the user -that the camera is powered on and may be streaming images back to the -Application Processor. In certain rare circumstances, the OS may -disable this when video is processed locally and not transmitted to -any untrusted applications.

-

In particular, the LED must always be on when the data could be -transmitted off the device. The LED should always be on whenever -data is stored locally on the device.

-

The LED may be off if a trusted application is using the data that -doesn't violate the above rules.

-
- -
dynamic
Property NameTypeDescriptionUnitsRangeTags
- android.led.transmit - - byte - - [hidden as boolean] - - - - - -
    -
  • - OFF -
  • -
  • - ON -
  • -
- -
-

This LED is nominally used to indicate to the user -that the camera is powered on and may be streaming images back to the -Application Processor. In certain rare circumstances, the OS may -disable this when video is processed locally and not transmitted to -any untrusted applications.

-

In particular, the LED must always be on when the data could be -transmitted off the device. The LED should always be on whenever -data is stored locally on the device.

-

The LED may be off if a trusted application is using the data that -doesn't violate the above rules.

-
- -
static
Property NameTypeDescriptionUnitsRangeTags
- android.led.availableLeds - - byte - x - - - n - - [hidden] - - - - - - - - -

A list of camera LEDs that are available on this system.

-
- -
info
static
Property NameTypeDescriptionUnitsRangeTags
- android.info.supportedHardwareLevel - - byte - - [public] - - - [legacy] - - - -
    -
  • - LIMITED -

    This camera device does not have enough capabilities to qualify as a FULL device or -better.

    -

    Only the stream configurations listed in the LEGACY and LIMITED tables in the -createCaptureSession documentation are guaranteed to be supported.

    -

    All LIMITED devices support the BACKWARDS_COMPATIBLE capability, indicating basic -support for color image capture. The only exception is that the device may -alternatively support only the DEPTH_OUTPUT capability, if it can only output depth -measurements and not color images.

    -

    LIMITED devices and above require the use of android.control.aePrecaptureTrigger -to lock exposure metering (and calculate flash power, for cameras with flash) before -capturing a high-quality still image.

    -

    A LIMITED device that only lists the BACKWARDS_COMPATIBLE capability is only -required to support full-automatic operation and post-processing (OFF is not -supported for android.control.aeMode, android.control.afMode, or -android.control.awbMode)

    -

    Additional capabilities may optionally be supported by a LIMITED-level device, and -can be checked for in android.request.availableCapabilities.

    -
  • -
  • - FULL -

    This camera device is capable of supporting advanced imaging applications.

    -

    The stream configurations listed in the FULL, LEGACY and LIMITED tables in the -createCaptureSession documentation are guaranteed to be supported.

    -

    A FULL device will support below capabilities:

    - -

    Note: -Pre-API level 23, FULL devices also supported arbitrary cropping region -(android.scaler.croppingType == FREEFORM); this requirement was relaxed in API level -23, and FULL devices may only support CENTERED cropping.

    -
  • -
  • - LEGACY -

    This camera device is running in backward compatibility mode.

    -

    Only the stream configurations listed in the LEGACY table in the createCaptureSession -documentation are supported.

    -

    A LEGACY device does not support per-frame control, manual sensor control, manual -post-processing, arbitrary cropping regions, and has relaxed performance constraints. -No additional capabilities beyond BACKWARD_COMPATIBLE will ever be listed by a -LEGACY device in android.request.availableCapabilities.

    -

    In addition, the android.control.aePrecaptureTrigger is not functional on LEGACY -devices. Instead, every request that includes a JPEG-format output target is treated -as triggering a still capture, internally executing a precapture trigger. This may -fire the flash for flash power metering during precapture, and then fire the flash -for the final capture, if a flash is available on the device and the AE mode is set to -enable the flash.

    -
  • -
  • - 3 -

    This camera device is capable of YUV reprocessing and RAW data capture, in addition to -FULL-level capabilities.

    -

    The stream configurations listed in the LEVEL_3, RAW, FULL, LEGACY and -LIMITED tables in the createCaptureSession -documentation are guaranteed to be supported.

    -

    The following additional capabilities are guaranteed to be supported:

    -
    -
  • -
- -
-

Generally classifies the overall set of the camera device functionality.

-
- -
Details
-

The supported hardware level is a high-level description of the camera device's -capabilities, summarizing several capabilities into one field. Each level adds additional -features to the previous one, and is always a strict superset of the previous level. -The ordering is LEGACY < LIMITED < FULL < LEVEL_3.

-

Starting from LEVEL_3, the level enumerations are guaranteed to be in increasing -numerical value as well. To check if a given device is at least at a given hardware level, -the following code snippet can be used:

-
// Returns true if the device supports the required hardware level, or better.
-boolean isHardwareLevelSupported(CameraCharacteristics c, int requiredLevel) {
-    int deviceLevel = c.get(CameraCharacteristics.INFO_SUPPORTED_HARDWARE_LEVEL);
-    if (deviceLevel == CameraCharacteristics.INFO_SUPPORTED_HARDWARE_LEVEL_LEGACY) {
-        return requiredLevel == deviceLevel;
-    }
-    // deviceLevel is not LEGACY, can use numerical sort
-    return requiredLevel <= deviceLevel;
-}
-
-

At a high level, the levels are:

-
    -
  • LEGACY devices operate in a backwards-compatibility mode for older - Android devices, and have very limited capabilities.
  • -
  • LIMITED devices represent the - baseline feature set, and may also include additional capabilities that are - subsets of FULL.
  • -
  • FULL devices additionally support per-frame manual control of sensor, flash, lens and - post-processing settings, and image capture at a high rate.
  • -
  • LEVEL_3 devices additionally support YUV reprocessing and RAW image capture, along - with additional output stream configurations.
  • -
-

See the individual level enums for full descriptions of the supported capabilities. The -android.request.availableCapabilities entry describes the device's capabilities at a -finer-grain level, if needed. In addition, many controls have their available settings or -ranges defined in individual CameraCharacteristics entries.

-

Some features are not part of any particular hardware level or capability and must be -queried separately. These include:

- -
HAL Implementation Details
-

The camera 3 HAL device can implement one of three possible operational modes; LIMITED, -FULL, and LEVEL_3.

-

FULL support or better is expected from new higher-end devices. Limited -mode has hardware requirements roughly in line with those for a camera HAL device v1 -implementation, and is expected from older or inexpensive devices. Each level is a strict -superset of the previous level, and they share the same essential operational flow.

-

For full details refer to "S3. Operational Modes" in camera3.h

-

Camera HAL3+ must not implement LEGACY mode. It is there for backwards compatibility in -the android.hardware.camera2 user-facing API only on HALv1 devices, and is implemented -by the camera framework code.

-
blackLevel
controls
Property NameTypeDescriptionUnitsRangeTags
- android.blackLevel.lock - - byte - - [public as boolean] - - - [full] - - - -
    -
  • - OFF -
  • -
  • - ON -
  • -
- -
-

Whether black-level compensation is locked -to its current values, or is free to vary.

-
- -
Details
-

When set to true (ON), the values used for black-level -compensation will not change until the lock is set to -false (OFF).

-

Since changes to certain capture parameters (such as -exposure time) may require resetting of black level -compensation, the camera device must report whether setting -the black level lock was successful in the output result -metadata.

-

For example, if a sequence of requests is as follows:

-
    -
  • Request 1: Exposure = 10ms, Black level lock = OFF
  • -
  • Request 2: Exposure = 10ms, Black level lock = ON
  • -
  • Request 3: Exposure = 10ms, Black level lock = ON
  • -
  • Request 4: Exposure = 20ms, Black level lock = ON
  • -
  • Request 5: Exposure = 20ms, Black level lock = ON
  • -
  • Request 6: Exposure = 20ms, Black level lock = ON
  • -
-

And the exposure change in Request 4 requires the camera -device to reset the black level offsets, then the output -result metadata is expected to be:

-
    -
  • Result 1: Exposure = 10ms, Black level lock = OFF
  • -
  • Result 2: Exposure = 10ms, Black level lock = ON
  • -
  • Result 3: Exposure = 10ms, Black level lock = ON
  • -
  • Result 4: Exposure = 20ms, Black level lock = OFF
  • -
  • Result 5: Exposure = 20ms, Black level lock = ON
  • -
  • Result 6: Exposure = 20ms, Black level lock = ON
  • -
-

This indicates to the application that on frame 4, black -levels were reset due to exposure value changes, and pixel -values may not be consistent across captures.

-

The camera device will maintain the lock to the extent -possible, only overriding the lock to OFF when changes to -other request parameters require a black level recalculation -or reset.

-
HAL Implementation Details
-

If for some reason black level locking is no longer possible -(for example, the analog gain has changed, which forces -black level offsets to be recalculated), then the HAL must -override this request (and it must report 'OFF' when this -does happen) until the next capture for which locking is -possible again.

-
dynamic
Property NameTypeDescriptionUnitsRangeTags
- android.blackLevel.lock - - byte - - [public as boolean] - - - [full] - - - -
    -
  • - OFF -
  • -
  • - ON -
  • -
- -
-

Whether black-level compensation is locked -to its current values, or is free to vary.

-
- -
Details
-

Whether the black level offset was locked for this frame. Should be -ON if android.blackLevel.lock was ON in the capture request, unless -a change in other capture settings forced the camera device to -perform a black level reset.

-
HAL Implementation Details
-

If for some reason black level locking is no longer possible -(for example, the analog gain has changed, which forces -black level offsets to be recalculated), then the HAL must -override this request (and it must report 'OFF' when this -does happen) until the next capture for which locking is -possible again.

-
sync
dynamic
Property NameTypeDescriptionUnitsRangeTags
- android.sync.frameNumber - - int64 - - [ndk_public] - - - [legacy] - - - -
    -
  • - CONVERGING - -1 -

    The current result is not yet fully synchronized to any request.

    -

    Synchronization is in progress, and reading metadata from this -result may include a mix of data that have taken effect since the -last synchronization time.

    -

    In some future result, within android.sync.maxLatency frames, -this value will update to the actual frame number frame number -the result is guaranteed to be synchronized to (as long as the -request settings remain constant).

    -
  • -
  • - UNKNOWN - -2 -

    The current result's synchronization status is unknown.

    -

    The result may have already converged, or it may be in -progress. Reading from this result may include some mix -of settings from past requests.

    -

    After a settings change, the new settings will eventually all -take effect for the output buffers and results. However, this -value will not change when that happens. Altering settings -rapidly may provide outcomes using mixes of settings from recent -requests.

    -

    This value is intended primarily for backwards compatibility with -the older camera implementations (for android.hardware.Camera).

    -
  • -
- -
-

The frame number corresponding to the last request -with which the output result (metadata + buffers) has been fully -synchronized.

-
- -

Either a non-negative value corresponding to a -frame_number, or one of the two enums (CONVERGING / UNKNOWN).

-
Details
-

When a request is submitted to the camera device, there is usually a -delay of several frames before the controls get applied. A camera -device may either choose to account for this delay by implementing a -pipeline and carefully submit well-timed atomic control updates, or -it may start streaming control changes that span over several frame -boundaries.

-

In the latter case, whenever a request's settings change relative to -the previous submitted request, the full set of changes may take -multiple frame durations to fully take effect. Some settings may -take effect sooner (in less frame durations) than others.

-

While a set of control changes are being propagated, this value -will be CONVERGING.

-

Once it is fully known that a set of control changes have been -finished propagating, and the resulting updated control settings -have been read back by the camera device, this value will be set -to a non-negative frame number (corresponding to the request to -which the results have synchronized to).

-

Older camera device implementations may not have a way to detect -when all camera controls have been applied, and will always set this -value to UNKNOWN.

-

FULL capability devices will always have this value set to the -frame number of the request corresponding to this result.

-

Further details:

-
    -
  • Whenever a request differs from the last request, any future -results not yet returned may have this value set to CONVERGING (this -could include any in-progress captures not yet returned by the camera -device, for more details see pipeline considerations below).
  • -
  • Submitting a series of multiple requests that differ from the -previous request (e.g. r1, r2, r3 s.t. r1 != r2 != r3) -moves the new synchronization frame to the last non-repeating -request (using the smallest frame number from the contiguous list of -repeating requests).
  • -
  • Submitting the same request repeatedly will not change this value -to CONVERGING, if it was already a non-negative value.
  • -
  • When this value changes to non-negative, that means that all of the -metadata controls from the request have been applied, all of the -metadata controls from the camera device have been read to the -updated values (into the result), and all of the graphics buffers -corresponding to this result are also synchronized to the request.
  • -
-

Pipeline considerations:

-

Submitting a request with updated controls relative to the previously -submitted requests may also invalidate the synchronization state -of all the results corresponding to currently in-flight requests.

-

In other words, results for this current request and up to -android.request.pipelineMaxDepth prior requests may have their -android.sync.frameNumber change to CONVERGING.

-
HAL Implementation Details
-

Using UNKNOWN here is illegal unless android.sync.maxLatency -is also UNKNOWN.

-

FULL capability devices should simply set this value to the -frame_number of the request this result corresponds to.

-
static
Property NameTypeDescriptionUnitsRangeTags
- android.sync.maxLatency - - int32 - - [public] - - - [legacy] - - - -
    -
  • - PER_FRAME_CONTROL - 0 -

    Every frame has the requests immediately applied.

    -

    Changing controls over multiple requests one after another will -produce results that have those controls applied atomically -each frame.

    -

    All FULL capability devices will have this as their maxLatency.

    -
  • -
  • - UNKNOWN - -1 -

    Each new frame has some subset (potentially the entire set) -of the past requests applied to the camera settings.

    -

    By submitting a series of identical requests, the camera device -will eventually have the camera settings applied, but it is -unknown when that exact point will be.

    -

    All LEGACY capability devices will have this as their maxLatency.

    -
  • -
- -
-

The maximum number of frames that can occur after a request -(different than the previous) has been submitted, and before the -result's state becomes synchronized.

-
- Frame counts - -

A positive value, PER_FRAME_CONTROL, or UNKNOWN.

-
Details
-

This defines the maximum distance (in number of metadata results), -between the frame number of the request that has new controls to apply -and the frame number of the result that has all the controls applied.

-

In other words this acts as an upper boundary for how many frames -must occur before the camera device knows for a fact that the new -submitted camera settings have been applied in outgoing frames.

-
HAL Implementation Details
-

For example if maxLatency was 2,

-
initial request = X (repeating)
-request1 = X
-request2 = Y
-request3 = Y
-request4 = Y
-
-where requestN has frameNumber N, and the first of the repeating
-initial request's has frameNumber F (and F < 1).
-
-initial result = X' + { android.sync.frameNumber == F }
-result1 = X' + { android.sync.frameNumber == F }
-result2 = X' + { android.sync.frameNumber == CONVERGING }
-result3 = X' + { android.sync.frameNumber == CONVERGING }
-result4 = X' + { android.sync.frameNumber == 2 }
-
-where resultN has frameNumber N.
-
-

Since result4 has a frameNumber == 4 and -android.sync.frameNumber == 2, the distance is clearly -4 - 2 = 2.

-

Use frame_count from camera3_request_t instead of -android.request.frameCount or -CaptureResult#getFrameNumber.

-

LIMITED devices are strongly encouraged to use a non-negative -value. If UNKNOWN is used here then app developers do not have a way -to know when sensor settings have been applied.

-
reprocess
controls
Property NameTypeDescriptionUnitsRangeTags
- android.reprocess.effectiveExposureFactor - - float - - [java_public] - - - [limited] - - - - - -

The amount of exposure time increase factor applied to the original output -frame by the application processing before sending for reprocessing.

-
- Relative exposure time increase factor. - -

>= 1.0

-
Details
-

This is optional, and will be supported if the camera device supports YUV_REPROCESSING -capability (android.request.availableCapabilities contains YUV_REPROCESSING).

-

For some YUV reprocessing use cases, the application may choose to filter the original -output frames to effectively reduce the noise to the same level as a frame that was -captured with longer exposure time. To be more specific, assuming the original captured -images were captured with a sensitivity of S and an exposure time of T, the model in -the camera device is that the amount of noise in the image would be approximately what -would be expected if the original capture parameters had been a sensitivity of -S/effectiveExposureFactor and an exposure time of T*effectiveExposureFactor, rather -than S and T respectively. If the captured images were processed by the application -before being sent for reprocessing, then the application may have used image processing -algorithms and/or multi-frame image fusion to reduce the noise in the -application-processed images (input images). By using the effectiveExposureFactor -control, the application can communicate to the camera device the actual noise level -improvement in the application-processed image. With this information, the camera -device can select appropriate noise reduction and edge enhancement parameters to avoid -excessive noise reduction (android.noiseReduction.mode) and insufficient edge -enhancement (android.edge.mode) being applied to the reprocessed frames.

-

For example, for multi-frame image fusion use case, the application may fuse -multiple output frames together to a final frame for reprocessing. When N image are -fused into 1 image for reprocessing, the exposure time increase factor could be up to -square root of N (based on a simple photon shot noise model). The camera device will -adjust the reprocessing noise reduction and edge enhancement parameters accordingly to -produce the best quality images.

-

This is relative factor, 1.0 indicates the application hasn't processed the input -buffer in a way that affects its effective exposure time.

-

This control is only effective for YUV reprocessing capture request. For noise -reduction reprocessing, it is only effective when android.noiseReduction.mode != OFF. -Similarly, for edge enhancement reprocessing, it is only effective when -android.edge.mode != OFF.

-
dynamic
Property NameTypeDescriptionUnitsRangeTags
- android.reprocess.effectiveExposureFactor - - float - - [java_public] - - - [limited] - - - - - -

The amount of exposure time increase factor applied to the original output -frame by the application processing before sending for reprocessing.

-
- Relative exposure time increase factor. - -

>= 1.0

-
Details
-

This is optional, and will be supported if the camera device supports YUV_REPROCESSING -capability (android.request.availableCapabilities contains YUV_REPROCESSING).

-

For some YUV reprocessing use cases, the application may choose to filter the original -output frames to effectively reduce the noise to the same level as a frame that was -captured with longer exposure time. To be more specific, assuming the original captured -images were captured with a sensitivity of S and an exposure time of T, the model in -the camera device is that the amount of noise in the image would be approximately what -would be expected if the original capture parameters had been a sensitivity of -S/effectiveExposureFactor and an exposure time of T*effectiveExposureFactor, rather -than S and T respectively. If the captured images were processed by the application -before being sent for reprocessing, then the application may have used image processing -algorithms and/or multi-frame image fusion to reduce the noise in the -application-processed images (input images). By using the effectiveExposureFactor -control, the application can communicate to the camera device the actual noise level -improvement in the application-processed image. With this information, the camera -device can select appropriate noise reduction and edge enhancement parameters to avoid -excessive noise reduction (android.noiseReduction.mode) and insufficient edge -enhancement (android.edge.mode) being applied to the reprocessed frames.

-

For example, for multi-frame image fusion use case, the application may fuse -multiple output frames together to a final frame for reprocessing. When N image are -fused into 1 image for reprocessing, the exposure time increase factor could be up to -square root of N (based on a simple photon shot noise model). The camera device will -adjust the reprocessing noise reduction and edge enhancement parameters accordingly to -produce the best quality images.

-

This is relative factor, 1.0 indicates the application hasn't processed the input -buffer in a way that affects its effective exposure time.

-

This control is only effective for YUV reprocessing capture request. For noise -reduction reprocessing, it is only effective when android.noiseReduction.mode != OFF. -Similarly, for edge enhancement reprocessing, it is only effective when -android.edge.mode != OFF.

-
static
Property NameTypeDescriptionUnitsRangeTags
- android.reprocess.maxCaptureStall - - int32 - - [java_public] - - - [limited] - - - - - -

The maximal camera capture pipeline stall (in unit of frame count) introduced by a -reprocess capture request.

-
- Number of frames. - -

<= 4

-
Details
-

The key describes the maximal interference that one reprocess (input) request -can introduce to the camera simultaneous streaming of regular (output) capture -requests, including repeating requests.

-

When a reprocessing capture request is submitted while a camera output repeating request -(e.g. preview) is being served by the camera device, it may preempt the camera capture -pipeline for at least one frame duration so that the camera device is unable to process -the following capture request in time for the next sensor start of exposure boundary. -When this happens, the application may observe a capture time gap (longer than one frame -duration) between adjacent capture output frames, which usually exhibits as preview -glitch if the repeating request output targets include a preview surface. This key gives -the worst-case number of frame stall introduced by one reprocess request with any kind of -formats/sizes combination.

-

If this key reports 0, it means a reprocess request doesn't introduce any glitch to the -ongoing camera repeating request outputs, as if this reprocess request is never issued.

-

This key is supported if the camera device supports PRIVATE or YUV reprocessing ( -i.e. android.request.availableCapabilities contains PRIVATE_REPROCESSING or -YUV_REPROCESSING).

-
depth
static
Property NameTypeDescriptionUnitsRangeTags
- android.depth.maxDepthSamples - - int32 - - [system] - - - [limited] - - - - - -

Maximum number of points that a depth point cloud may contain.

-
- -
Details
-

If a camera device supports outputting depth range data in the form of a depth point -cloud (ImageFormat#DEPTH_POINT_CLOUD), this is the maximum -number of points an output buffer may contain.

-

Any given buffer may contain between 0 and maxDepthSamples points, inclusive. -If output in the depth point cloud format is not supported, this entry will -not be defined.

-
- android.depth.availableDepthStreamConfigurations - - int32 - x - - - n x 4 - - [ndk_public as streamConfiguration] - - - [limited] - - - -
    -
  • - OUTPUT -
  • -
  • - INPUT -
  • -
- -
-

The available depth dataspace stream -configurations that this camera device supports -(i.e. format, width, height, output/input stream).

-
- -
Details
-

These are output stream configurations for use with -dataSpace HAL_DATASPACE_DEPTH. The configurations are -listed as (format, width, height, input?) tuples.

-

Only devices that support depth output for at least -the HAL_PIXEL_FORMAT_Y16 dense depth map may include -this entry.

-

A device that also supports the HAL_PIXEL_FORMAT_BLOB -sparse depth point cloud must report a single entry for -the format in this list as (HAL_PIXEL_FORMAT_BLOB, -android.depth.maxDepthSamples, 1, OUTPUT) in addition to -the entries for HAL_PIXEL_FORMAT_Y16.

-
- android.depth.availableDepthMinFrameDurations - - int64 - x - - - 4 x n - - [ndk_public as streamConfigurationDuration] - - - [limited] - - - - - -

This lists the minimum frame duration for each -format/size combination for depth output formats.

-
- (format, width, height, ns) x n - -
Details
-

This should correspond to the frame duration when only that -stream is active, with all processing (typically in android.*.mode) -set to either OFF or FAST.

-

When multiple streams are used in a request, the minimum frame -duration will be max(individual stream min durations).

-

The minimum frame duration of a stream (of a particular format, size) -is the same regardless of whether the stream is input or output.

-

See android.sensor.frameDuration and -android.scaler.availableStallDurations for more details about -calculating the max frame rate.

-

(Keep in sync with StreamConfigurationMap#getOutputMinFrameDuration)

-
- android.depth.availableDepthStallDurations - - int64 - x - - - 4 x n - - [ndk_public as streamConfigurationDuration] - - - [limited] - - - - - -

This lists the maximum stall duration for each -output format/size combination for depth streams.

-
- (format, width, height, ns) x n - -
Details
-

A stall duration is how much extra time would get added -to the normal minimum frame duration for a repeating request -that has streams with non-zero stall.

-

This functions similarly to -android.scaler.availableStallDurations for depth -streams.

-

All depth output stream formats may have a nonzero stall -duration.

-
- android.depth.depthIsExclusive - - byte - - [public as boolean] - - - [limited] - - - -
    -
  • - FALSE -
  • -
  • - TRUE -
  • -
- -
-

Indicates whether a capture request may target both a -DEPTH16 / DEPTH_POINT_CLOUD output, and normal color outputs (such as -YUV_420_888, JPEG, or RAW) simultaneously.

-
- -
Details
-

If TRUE, including both depth and color outputs in a single -capture request is not supported. An application must interleave color -and depth requests. If FALSE, a single request can target both types -of output.

-

Typically, this restriction exists on camera devices that -need to emit a specific pattern or wavelength of light to -measure depth values, which causes the color image to be -corrupted during depth measurement.

-
- -
-

Tags

- -
- - [ top ] - - - diff --git a/camera/metadata/3.2/types.hal b/camera/metadata/3.2/types.hal index 17d1d5e9ca1650c8c918c5923903473827d47a55..67b4e447d51823646a3038728c140d7f19d78b98 100644 --- a/camera/metadata/3.2/types.hal +++ b/camera/metadata/3.2/types.hal @@ -1,5 +1,5 @@ /* - * Copyright (C) 2016 The Android Open Source Project + * Copyright (C) 2017 The Android Open Source Project * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. @@ -14,13 +14,17 @@ * limitations under the License. */ +/* + * Autogenerated from camera metadata definitions in + * /system/media/camera/docs/metadata_definitions.xml + * *** DO NOT EDIT BY HAND *** + */ + package android.hardware.camera.metadata@3.2; /** * Top level hierarchy definitions for camera metadata. *_INFO sections are for * the static metadata that can be retrived without opening the camera device. - * New sections must be added right before ANDROID_SECTION_COUNT to maintain - * existing enumerations. */ enum CameraMetadataSection : uint32_t { ANDROID_COLOR_CORRECTION, @@ -82,7 +86,7 @@ enum CameraMetadataSection : uint32_t { }; /** - * Hierarchy positions in enum space. All vendor extension tags must be + * Hierarchy positions in enum space. All vendor extension sections must be * defined with tag >= VENDOR_SECTION_START */ enum CameraMetadataSectionStart : uint32_t { @@ -143,1175 +147,2325 @@ enum CameraMetadataSectionStart : uint32_t { }; /** - * Main enum for defining camera metadata tags. New entries must always go - * before the section _END tag to preserve existing enumeration values. In - * addition, the name and type of the tag needs to be added to - * system/media/camera/src/camera_metadata_tag_info.c + * Main enumeration for defining camera metadata tags added in this revision + * + *

Partial documentation is included for each tag; for complete documentation, reference + * '/system/media/camera/docs/docs.html' in the corresponding Android source tree.

*/ enum CameraMetadataTag : uint32_t { + /** android.colorCorrection.mode [dynamic, enum, public] + * + *

The mode control selects how the image data is converted from the + * sensor's native color into linear sRGB color.

+ */ ANDROID_COLOR_CORRECTION_MODE = CameraMetadataSectionStart:ANDROID_COLOR_CORRECTION_START, + /** android.colorCorrection.transform [dynamic, rational[], public] + * + *

A color transform matrix to use to transform + * from sensor RGB color space to output linear sRGB color space.

+ */ ANDROID_COLOR_CORRECTION_TRANSFORM, + /** android.colorCorrection.gains [dynamic, float[], public] + * + *

Gains applying to Bayer raw color channels for + * white-balance.

+ */ ANDROID_COLOR_CORRECTION_GAINS, + /** android.colorCorrection.aberrationMode [dynamic, enum, public] + * + *

Mode of operation for the chromatic aberration correction algorithm.

+ */ ANDROID_COLOR_CORRECTION_ABERRATION_MODE, + /** android.colorCorrection.availableAberrationModes [static, byte[], public] + * + *

List of aberration correction modes for ANDROID_COLOR_CORRECTION_ABERRATION_MODE that are + * supported by this camera device.

+ * + * @see ANDROID_COLOR_CORRECTION_ABERRATION_MODE + */ ANDROID_COLOR_CORRECTION_AVAILABLE_ABERRATION_MODES, ANDROID_COLOR_CORRECTION_END, + /** android.control.aeAntibandingMode [dynamic, enum, public] + * + *

The desired setting for the camera device's auto-exposure + * algorithm's antibanding compensation.

+ */ ANDROID_CONTROL_AE_ANTIBANDING_MODE = CameraMetadataSectionStart:ANDROID_CONTROL_START, + /** android.control.aeExposureCompensation [dynamic, int32, public] + * + *

Adjustment to auto-exposure (AE) target image + * brightness.

+ */ ANDROID_CONTROL_AE_EXPOSURE_COMPENSATION, + /** android.control.aeLock [dynamic, enum, public] + * + *

Whether auto-exposure (AE) is currently locked to its latest + * calculated values.

+ */ ANDROID_CONTROL_AE_LOCK, + /** android.control.aeMode [dynamic, enum, public] + * + *

The desired mode for the camera device's + * auto-exposure routine.

+ */ ANDROID_CONTROL_AE_MODE, + /** android.control.aeRegions [dynamic, int32[], public] + * + *

List of metering areas to use for auto-exposure adjustment.

+ */ ANDROID_CONTROL_AE_REGIONS, + /** android.control.aeTargetFpsRange [dynamic, int32[], public] + * + *

Range over which the auto-exposure routine can + * adjust the capture frame rate to maintain good + * exposure.

+ */ ANDROID_CONTROL_AE_TARGET_FPS_RANGE, + /** android.control.aePrecaptureTrigger [dynamic, enum, public] + * + *

Whether the camera device will trigger a precapture + * metering sequence when it processes this request.

+ */ ANDROID_CONTROL_AE_PRECAPTURE_TRIGGER, + /** android.control.afMode [dynamic, enum, public] + * + *

Whether auto-focus (AF) is currently enabled, and what + * mode it is set to.

+ */ ANDROID_CONTROL_AF_MODE, + /** android.control.afRegions [dynamic, int32[], public] + * + *

List of metering areas to use for auto-focus.

+ */ ANDROID_CONTROL_AF_REGIONS, + /** android.control.afTrigger [dynamic, enum, public] + * + *

Whether the camera device will trigger autofocus for this request.

+ */ ANDROID_CONTROL_AF_TRIGGER, + /** android.control.awbLock [dynamic, enum, public] + * + *

Whether auto-white balance (AWB) is currently locked to its + * latest calculated values.

+ */ ANDROID_CONTROL_AWB_LOCK, + /** android.control.awbMode [dynamic, enum, public] + * + *

Whether auto-white balance (AWB) is currently setting the color + * transform fields, and what its illumination target + * is.

+ */ ANDROID_CONTROL_AWB_MODE, + /** android.control.awbRegions [dynamic, int32[], public] + * + *

List of metering areas to use for auto-white-balance illuminant + * estimation.

+ */ ANDROID_CONTROL_AWB_REGIONS, + /** android.control.captureIntent [dynamic, enum, public] + * + *

Information to the camera device 3A (auto-exposure, + * auto-focus, auto-white balance) routines about the purpose + * of this capture, to help the camera device to decide optimal 3A + * strategy.

+ */ ANDROID_CONTROL_CAPTURE_INTENT, + /** android.control.effectMode [dynamic, enum, public] + * + *

A special color effect to apply.

+ */ ANDROID_CONTROL_EFFECT_MODE, + /** android.control.mode [dynamic, enum, public] + * + *

Overall mode of 3A (auto-exposure, auto-white-balance, auto-focus) control + * routines.

+ */ ANDROID_CONTROL_MODE, + /** android.control.sceneMode [dynamic, enum, public] + * + *

Control for which scene mode is currently active.

+ */ ANDROID_CONTROL_SCENE_MODE, + /** android.control.videoStabilizationMode [dynamic, enum, public] + * + *

Whether video stabilization is + * active.

+ */ ANDROID_CONTROL_VIDEO_STABILIZATION_MODE, + /** android.control.aeAvailableAntibandingModes [static, byte[], public] + * + *

List of auto-exposure antibanding modes for ANDROID_CONTROL_AE_ANTIBANDING_MODE that are + * supported by this camera device.

+ * + * @see ANDROID_CONTROL_AE_ANTIBANDING_MODE + */ ANDROID_CONTROL_AE_AVAILABLE_ANTIBANDING_MODES, + /** android.control.aeAvailableModes [static, byte[], public] + * + *

List of auto-exposure modes for ANDROID_CONTROL_AE_MODE that are supported by this camera + * device.

+ * + * @see ANDROID_CONTROL_AE_MODE + */ ANDROID_CONTROL_AE_AVAILABLE_MODES, + /** android.control.aeAvailableTargetFpsRanges [static, int32[], public] + * + *

List of frame rate ranges for ANDROID_CONTROL_AE_TARGET_FPS_RANGE supported by + * this camera device.

+ * + * @see ANDROID_CONTROL_AE_TARGET_FPS_RANGE + */ ANDROID_CONTROL_AE_AVAILABLE_TARGET_FPS_RANGES, + /** android.control.aeCompensationRange [static, int32[], public] + * + *

Maximum and minimum exposure compensation values for + * ANDROID_CONTROL_AE_EXPOSURE_COMPENSATION, in counts of ANDROID_CONTROL_AE_COMPENSATION_STEP, + * that are supported by this camera device.

+ * + * @see ANDROID_CONTROL_AE_COMPENSATION_STEP + * @see ANDROID_CONTROL_AE_EXPOSURE_COMPENSATION + */ ANDROID_CONTROL_AE_COMPENSATION_RANGE, + /** android.control.aeCompensationStep [static, rational, public] + * + *

Smallest step by which the exposure compensation + * can be changed.

+ */ ANDROID_CONTROL_AE_COMPENSATION_STEP, + /** android.control.afAvailableModes [static, byte[], public] + * + *

List of auto-focus (AF) modes for ANDROID_CONTROL_AF_MODE that are + * supported by this camera device.

+ * + * @see ANDROID_CONTROL_AF_MODE + */ ANDROID_CONTROL_AF_AVAILABLE_MODES, + /** android.control.availableEffects [static, byte[], public] + * + *

List of color effects for ANDROID_CONTROL_EFFECT_MODE that are supported by this camera + * device.

+ * + * @see ANDROID_CONTROL_EFFECT_MODE + */ ANDROID_CONTROL_AVAILABLE_EFFECTS, + /** android.control.availableSceneModes [static, byte[], public] + * + *

List of scene modes for ANDROID_CONTROL_SCENE_MODE that are supported by this camera + * device.

+ * + * @see ANDROID_CONTROL_SCENE_MODE + */ ANDROID_CONTROL_AVAILABLE_SCENE_MODES, + /** android.control.availableVideoStabilizationModes [static, byte[], public] + * + *

List of video stabilization modes for ANDROID_CONTROL_VIDEO_STABILIZATION_MODE + * that are supported by this camera device.

+ * + * @see ANDROID_CONTROL_VIDEO_STABILIZATION_MODE + */ ANDROID_CONTROL_AVAILABLE_VIDEO_STABILIZATION_MODES, + /** android.control.awbAvailableModes [static, byte[], public] + * + *

List of auto-white-balance modes for ANDROID_CONTROL_AWB_MODE that are supported by this + * camera device.

+ * + * @see ANDROID_CONTROL_AWB_MODE + */ ANDROID_CONTROL_AWB_AVAILABLE_MODES, + /** android.control.maxRegions [static, int32[], ndk_public] + * + *

List of the maximum number of regions that can be used for metering in + * auto-exposure (AE), auto-white balance (AWB), and auto-focus (AF); + * this corresponds to the the maximum number of elements in + * ANDROID_CONTROL_AE_REGIONS, ANDROID_CONTROL_AWB_REGIONS, + * and ANDROID_CONTROL_AF_REGIONS.

+ * + * @see ANDROID_CONTROL_AE_REGIONS + * @see ANDROID_CONTROL_AF_REGIONS + * @see ANDROID_CONTROL_AWB_REGIONS + */ ANDROID_CONTROL_MAX_REGIONS, + /** android.control.sceneModeOverrides [static, byte[], system] + * + *

Ordered list of auto-exposure, auto-white balance, and auto-focus + * settings to use with each available scene mode.

+ */ ANDROID_CONTROL_SCENE_MODE_OVERRIDES, + /** android.control.aePrecaptureId [dynamic, int32, system] + * + *

The ID sent with the latest + * CAMERA2_TRIGGER_PRECAPTURE_METERING call

+ */ ANDROID_CONTROL_AE_PRECAPTURE_ID, + /** android.control.aeState [dynamic, enum, public] + * + *

Current state of the auto-exposure (AE) algorithm.

+ */ ANDROID_CONTROL_AE_STATE, + /** android.control.afState [dynamic, enum, public] + * + *

Current state of auto-focus (AF) algorithm.

+ */ ANDROID_CONTROL_AF_STATE, + /** android.control.afTriggerId [dynamic, int32, system] + * + *

The ID sent with the latest + * CAMERA2_TRIGGER_AUTOFOCUS call

+ */ ANDROID_CONTROL_AF_TRIGGER_ID, + /** android.control.awbState [dynamic, enum, public] + * + *

Current state of auto-white balance (AWB) algorithm.

+ */ ANDROID_CONTROL_AWB_STATE, + /** android.control.availableHighSpeedVideoConfigurations [static, int32[], hidden] + * + *

List of available high speed video size, fps range and max batch size configurations + * supported by the camera device, in the format of (width, height, fps_min, fps_max, batch_size_max).

+ */ ANDROID_CONTROL_AVAILABLE_HIGH_SPEED_VIDEO_CONFIGURATIONS, + /** android.control.aeLockAvailable [static, enum, public] + * + *

Whether the camera device supports ANDROID_CONTROL_AE_LOCK

+ * + * @see ANDROID_CONTROL_AE_LOCK + */ ANDROID_CONTROL_AE_LOCK_AVAILABLE, + /** android.control.awbLockAvailable [static, enum, public] + * + *

Whether the camera device supports ANDROID_CONTROL_AWB_LOCK

+ * + * @see ANDROID_CONTROL_AWB_LOCK + */ ANDROID_CONTROL_AWB_LOCK_AVAILABLE, + /** android.control.availableModes [static, byte[], public] + * + *

List of control modes for ANDROID_CONTROL_MODE that are supported by this camera + * device.

+ * + * @see ANDROID_CONTROL_MODE + */ ANDROID_CONTROL_AVAILABLE_MODES, + /** android.control.postRawSensitivityBoostRange [static, int32[], public] + * + *

Range of boosts for ANDROID_CONTROL_POST_RAW_SENSITIVITY_BOOST supported + * by this camera device.

+ * + * @see ANDROID_CONTROL_POST_RAW_SENSITIVITY_BOOST + */ ANDROID_CONTROL_POST_RAW_SENSITIVITY_BOOST_RANGE, + /** android.control.postRawSensitivityBoost [dynamic, int32, public] + * + *

The amount of additional sensitivity boost applied to output images + * after RAW sensor data is captured.

+ */ ANDROID_CONTROL_POST_RAW_SENSITIVITY_BOOST, + /** android.control.enableZsl [dynamic, enum, public] + * + *

Allow camera device to enable zero-shutter-lag mode for requests with + * ANDROID_CONTROL_CAPTURE_INTENT == STILL_CAPTURE.

+ * + * @see ANDROID_CONTROL_CAPTURE_INTENT + */ ANDROID_CONTROL_ENABLE_ZSL, ANDROID_CONTROL_END, + /** android.demosaic.mode [controls, enum, system] + * + *

Controls the quality of the demosaicing + * processing.

+ */ ANDROID_DEMOSAIC_MODE = CameraMetadataSectionStart:ANDROID_DEMOSAIC_START, ANDROID_DEMOSAIC_END, + /** android.edge.mode [dynamic, enum, public] + * + *

Operation mode for edge + * enhancement.

+ */ ANDROID_EDGE_MODE = CameraMetadataSectionStart:ANDROID_EDGE_START, + /** android.edge.strength [controls, byte, system] + * + *

Control the amount of edge enhancement + * applied to the images

+ */ ANDROID_EDGE_STRENGTH, + /** android.edge.availableEdgeModes [static, byte[], public] + * + *

List of edge enhancement modes for ANDROID_EDGE_MODE that are supported by this camera + * device.

+ * + * @see ANDROID_EDGE_MODE + */ ANDROID_EDGE_AVAILABLE_EDGE_MODES, ANDROID_EDGE_END, + /** android.flash.firingPower [dynamic, byte, system] + * + *

Power for flash firing/torch

+ */ ANDROID_FLASH_FIRING_POWER = CameraMetadataSectionStart:ANDROID_FLASH_START, + /** android.flash.firingTime [dynamic, int64, system] + * + *

Firing time of flash relative to start of + * exposure

+ */ ANDROID_FLASH_FIRING_TIME, + /** android.flash.mode [dynamic, enum, public] + * + *

The desired mode for for the camera device's flash control.

+ */ ANDROID_FLASH_MODE, + /** android.flash.colorTemperature [static, byte, system] + * + *

The x,y whitepoint of the + * flash

+ */ ANDROID_FLASH_COLOR_TEMPERATURE, + /** android.flash.maxEnergy [static, byte, system] + * + *

Max energy output of the flash for a full + * power single flash

+ */ ANDROID_FLASH_MAX_ENERGY, + /** android.flash.state [dynamic, enum, public] + * + *

Current state of the flash + * unit.

+ */ ANDROID_FLASH_STATE, ANDROID_FLASH_END, + /** android.flash.info.available [static, enum, public] + * + *

Whether this camera device has a + * flash unit.

+ */ ANDROID_FLASH_INFO_AVAILABLE = CameraMetadataSectionStart:ANDROID_FLASH_INFO_START, + /** android.flash.info.chargeDuration [static, int64, system] + * + *

Time taken before flash can fire + * again

+ */ ANDROID_FLASH_INFO_CHARGE_DURATION, ANDROID_FLASH_INFO_END, + /** android.hotPixel.mode [dynamic, enum, public] + * + *

Operational mode for hot pixel correction.

+ */ ANDROID_HOT_PIXEL_MODE = CameraMetadataSectionStart:ANDROID_HOT_PIXEL_START, + /** android.hotPixel.availableHotPixelModes [static, byte[], public] + * + *

List of hot pixel correction modes for ANDROID_HOT_PIXEL_MODE that are supported by this + * camera device.

+ * + * @see ANDROID_HOT_PIXEL_MODE + */ ANDROID_HOT_PIXEL_AVAILABLE_HOT_PIXEL_MODES, ANDROID_HOT_PIXEL_END, + /** android.jpeg.gpsCoordinates [dynamic, double[], ndk_public] + * + *

GPS coordinates to include in output JPEG + * EXIF.

+ */ ANDROID_JPEG_GPS_COORDINATES = CameraMetadataSectionStart:ANDROID_JPEG_START, + /** android.jpeg.gpsProcessingMethod [dynamic, byte, ndk_public] + * + *

32 characters describing GPS algorithm to + * include in EXIF.

+ */ ANDROID_JPEG_GPS_PROCESSING_METHOD, + /** android.jpeg.gpsTimestamp [dynamic, int64, ndk_public] + * + *

Time GPS fix was made to include in + * EXIF.

+ */ ANDROID_JPEG_GPS_TIMESTAMP, + /** android.jpeg.orientation [dynamic, int32, public] + * + *

The orientation for a JPEG image.

+ */ ANDROID_JPEG_ORIENTATION, + /** android.jpeg.quality [dynamic, byte, public] + * + *

Compression quality of the final JPEG + * image.

+ */ ANDROID_JPEG_QUALITY, + /** android.jpeg.thumbnailQuality [dynamic, byte, public] + * + *

Compression quality of JPEG + * thumbnail.

+ */ ANDROID_JPEG_THUMBNAIL_QUALITY, + /** android.jpeg.thumbnailSize [dynamic, int32[], public] + * + *

Resolution of embedded JPEG thumbnail.

+ */ ANDROID_JPEG_THUMBNAIL_SIZE, + /** android.jpeg.availableThumbnailSizes [static, int32[], public] + * + *

List of JPEG thumbnail sizes for ANDROID_JPEG_THUMBNAIL_SIZE supported by this + * camera device.

+ * + * @see ANDROID_JPEG_THUMBNAIL_SIZE + */ ANDROID_JPEG_AVAILABLE_THUMBNAIL_SIZES, + /** android.jpeg.maxSize [static, int32, system] + * + *

Maximum size in bytes for the compressed + * JPEG buffer

+ */ ANDROID_JPEG_MAX_SIZE, + /** android.jpeg.size [dynamic, int32, system] + * + *

The size of the compressed JPEG image, in + * bytes

+ */ ANDROID_JPEG_SIZE, ANDROID_JPEG_END, + /** android.lens.aperture [dynamic, float, public] + * + *

The desired lens aperture size, as a ratio of lens focal length to the + * effective aperture diameter.

+ */ ANDROID_LENS_APERTURE = CameraMetadataSectionStart:ANDROID_LENS_START, + /** android.lens.filterDensity [dynamic, float, public] + * + *

The desired setting for the lens neutral density filter(s).

+ */ ANDROID_LENS_FILTER_DENSITY, + /** android.lens.focalLength [dynamic, float, public] + * + *

The desired lens focal length; used for optical zoom.

+ */ ANDROID_LENS_FOCAL_LENGTH, + /** android.lens.focusDistance [dynamic, float, public] + * + *

Desired distance to plane of sharpest focus, + * measured from frontmost surface of the lens.

+ */ ANDROID_LENS_FOCUS_DISTANCE, + /** android.lens.opticalStabilizationMode [dynamic, enum, public] + * + *

Sets whether the camera device uses optical image stabilization (OIS) + * when capturing images.

+ */ ANDROID_LENS_OPTICAL_STABILIZATION_MODE, + /** android.lens.facing [static, enum, public] + * + *

Direction the camera faces relative to + * device screen.

+ */ ANDROID_LENS_FACING, + /** android.lens.poseRotation [dynamic, float[], public] + * + *

The orientation of the camera relative to the sensor + * coordinate system.

+ */ ANDROID_LENS_POSE_ROTATION, + /** android.lens.poseTranslation [dynamic, float[], public] + * + *

Position of the camera optical center.

+ */ ANDROID_LENS_POSE_TRANSLATION, + /** android.lens.focusRange [dynamic, float[], public] + * + *

The range of scene distances that are in + * sharp focus (depth of field).

+ */ ANDROID_LENS_FOCUS_RANGE, + /** android.lens.state [dynamic, enum, public] + * + *

Current lens status.

+ */ ANDROID_LENS_STATE, + /** android.lens.intrinsicCalibration [dynamic, float[], public] + * + *

The parameters for this camera device's intrinsic + * calibration.

+ */ ANDROID_LENS_INTRINSIC_CALIBRATION, + /** android.lens.radialDistortion [dynamic, float[], public] + * + *

The correction coefficients to correct for this camera device's + * radial and tangential lens distortion.

+ */ ANDROID_LENS_RADIAL_DISTORTION, ANDROID_LENS_END, + /** android.lens.info.availableApertures [static, float[], public] + * + *

List of aperture size values for ANDROID_LENS_APERTURE that are + * supported by this camera device.

+ * + * @see ANDROID_LENS_APERTURE + */ ANDROID_LENS_INFO_AVAILABLE_APERTURES = CameraMetadataSectionStart:ANDROID_LENS_INFO_START, + /** android.lens.info.availableFilterDensities [static, float[], public] + * + *

List of neutral density filter values for + * ANDROID_LENS_FILTER_DENSITY that are supported by this camera device.

+ * + * @see ANDROID_LENS_FILTER_DENSITY + */ ANDROID_LENS_INFO_AVAILABLE_FILTER_DENSITIES, + /** android.lens.info.availableFocalLengths [static, float[], public] + * + *

List of focal lengths for ANDROID_LENS_FOCAL_LENGTH that are supported by this camera + * device.

+ * + * @see ANDROID_LENS_FOCAL_LENGTH + */ ANDROID_LENS_INFO_AVAILABLE_FOCAL_LENGTHS, + /** android.lens.info.availableOpticalStabilization [static, byte[], public] + * + *

List of optical image stabilization (OIS) modes for + * ANDROID_LENS_OPTICAL_STABILIZATION_MODE that are supported by this camera device.

+ * + * @see ANDROID_LENS_OPTICAL_STABILIZATION_MODE + */ ANDROID_LENS_INFO_AVAILABLE_OPTICAL_STABILIZATION, + /** android.lens.info.hyperfocalDistance [static, float, public] + * + *

Hyperfocal distance for this lens.

+ */ ANDROID_LENS_INFO_HYPERFOCAL_DISTANCE, + /** android.lens.info.minimumFocusDistance [static, float, public] + * + *

Shortest distance from frontmost surface + * of the lens that can be brought into sharp focus.

+ */ ANDROID_LENS_INFO_MINIMUM_FOCUS_DISTANCE, + /** android.lens.info.shadingMapSize [static, int32[], ndk_public] + * + *

Dimensions of lens shading map.

+ */ ANDROID_LENS_INFO_SHADING_MAP_SIZE, + /** android.lens.info.focusDistanceCalibration [static, enum, public] + * + *

The lens focus distance calibration quality.

+ */ ANDROID_LENS_INFO_FOCUS_DISTANCE_CALIBRATION, ANDROID_LENS_INFO_END, + /** android.noiseReduction.mode [dynamic, enum, public] + * + *

Mode of operation for the noise reduction algorithm.

+ */ ANDROID_NOISE_REDUCTION_MODE = CameraMetadataSectionStart:ANDROID_NOISE_REDUCTION_START, + /** android.noiseReduction.strength [controls, byte, system] + * + *

Control the amount of noise reduction + * applied to the images

+ */ ANDROID_NOISE_REDUCTION_STRENGTH, + /** android.noiseReduction.availableNoiseReductionModes [static, byte[], public] + * + *

List of noise reduction modes for ANDROID_NOISE_REDUCTION_MODE that are supported + * by this camera device.

+ * + * @see ANDROID_NOISE_REDUCTION_MODE + */ ANDROID_NOISE_REDUCTION_AVAILABLE_NOISE_REDUCTION_MODES, ANDROID_NOISE_REDUCTION_END, + /** android.quirks.meteringCropRegion [static, byte, system] + * + *

If set to 1, the camera service does not + * scale 'normalized' coordinates with respect to the crop + * region. This applies to metering input (a{e,f,wb}Region + * and output (face rectangles).

+ */ ANDROID_QUIRKS_METERING_CROP_REGION = CameraMetadataSectionStart:ANDROID_QUIRKS_START, + /** android.quirks.triggerAfWithAuto [static, byte, system] + * + *

If set to 1, then the camera service always + * switches to FOCUS_MODE_AUTO before issuing a AF + * trigger.

+ */ ANDROID_QUIRKS_TRIGGER_AF_WITH_AUTO, + /** android.quirks.useZslFormat [static, byte, system] + * + *

If set to 1, the camera service uses + * CAMERA2_PIXEL_FORMAT_ZSL instead of + * HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED for the zero + * shutter lag stream

+ */ ANDROID_QUIRKS_USE_ZSL_FORMAT, + /** android.quirks.usePartialResult [static, byte, hidden] + * + *

If set to 1, the HAL will always split result + * metadata for a single capture into multiple buffers, + * returned using multiple process_capture_result calls.

+ */ ANDROID_QUIRKS_USE_PARTIAL_RESULT, + /** android.quirks.partialResult [dynamic, enum, hidden] + * + *

Whether a result given to the framework is the + * final one for the capture, or only a partial that contains a + * subset of the full set of dynamic metadata + * values.

+ */ ANDROID_QUIRKS_PARTIAL_RESULT, ANDROID_QUIRKS_END, + /** android.request.frameCount [dynamic, int32, hidden] + * + *

A frame counter set by the framework. This value monotonically + * increases with every new result (that is, each new result has a unique + * frameCount value).

+ */ ANDROID_REQUEST_FRAME_COUNT = CameraMetadataSectionStart:ANDROID_REQUEST_START, + /** android.request.id [dynamic, int32, hidden] + * + *

An application-specified ID for the current + * request. Must be maintained unchanged in output + * frame

+ */ ANDROID_REQUEST_ID, + /** android.request.inputStreams [controls, int32[], system] + * + *

List which camera reprocess stream is used + * for the source of reprocessing data.

+ */ ANDROID_REQUEST_INPUT_STREAMS, + /** android.request.metadataMode [dynamic, enum, system] + * + *

How much metadata to produce on + * output

+ */ ANDROID_REQUEST_METADATA_MODE, + /** android.request.outputStreams [dynamic, int32[], system] + * + *

Lists which camera output streams image data + * from this capture must be sent to

+ */ ANDROID_REQUEST_OUTPUT_STREAMS, + /** android.request.type [controls, enum, system] + * + *

The type of the request; either CAPTURE or + * REPROCESS. For legacy HAL3, this tag is redundant.

+ */ ANDROID_REQUEST_TYPE, + /** android.request.maxNumOutputStreams [static, int32[], ndk_public] + * + *

The maximum numbers of different types of output streams + * that can be configured and used simultaneously by a camera device.

+ */ ANDROID_REQUEST_MAX_NUM_OUTPUT_STREAMS, + /** android.request.maxNumReprocessStreams [static, int32[], system] + * + *

How many reprocessing streams of any type + * can be allocated at the same time.

+ */ ANDROID_REQUEST_MAX_NUM_REPROCESS_STREAMS, + /** android.request.maxNumInputStreams [static, int32, java_public] + * + *

The maximum numbers of any type of input streams + * that can be configured and used simultaneously by a camera device.

+ */ ANDROID_REQUEST_MAX_NUM_INPUT_STREAMS, + /** android.request.pipelineDepth [dynamic, byte, public] + * + *

Specifies the number of pipeline stages the frame went + * through from when it was exposed to when the final completed result + * was available to the framework.

+ */ ANDROID_REQUEST_PIPELINE_DEPTH, + /** android.request.pipelineMaxDepth [static, byte, public] + * + *

Specifies the number of maximum pipeline stages a frame + * has to go through from when it's exposed to when it's available + * to the framework.

+ */ ANDROID_REQUEST_PIPELINE_MAX_DEPTH, + /** android.request.partialResultCount [static, int32, public] + * + *

Defines how many sub-components + * a result will be composed of.

+ */ ANDROID_REQUEST_PARTIAL_RESULT_COUNT, + /** android.request.availableCapabilities [static, enum[], public] + * + *

List of capabilities that this camera device + * advertises as fully supporting.

+ */ ANDROID_REQUEST_AVAILABLE_CAPABILITIES, + /** android.request.availableRequestKeys [static, int32[], ndk_public] + * + *

A list of all keys that the camera device has available + * to use with {@link ACaptureRequest }.

+ */ ANDROID_REQUEST_AVAILABLE_REQUEST_KEYS, + /** android.request.availableResultKeys [static, int32[], ndk_public] + * + *

A list of all keys that the camera device has available to use with {@link ACameraCaptureSession_captureCallback_result }.

+ */ ANDROID_REQUEST_AVAILABLE_RESULT_KEYS, + /** android.request.availableCharacteristicsKeys [static, int32[], ndk_public] + * + *

A list of all keys that the camera device has available to use with {@link ACameraManager_getCameraCharacteristics }.

+ */ ANDROID_REQUEST_AVAILABLE_CHARACTERISTICS_KEYS, ANDROID_REQUEST_END, + /** android.scaler.cropRegion [dynamic, int32[], public] + * + *

The desired region of the sensor to read out for this capture.

+ */ ANDROID_SCALER_CROP_REGION = CameraMetadataSectionStart:ANDROID_SCALER_START, + /** android.scaler.availableFormats [static, enum[], hidden] + * + *

The list of image formats that are supported by this + * camera device for output streams.

+ */ ANDROID_SCALER_AVAILABLE_FORMATS, + /** android.scaler.availableJpegMinDurations [static, int64[], hidden] + * + *

The minimum frame duration that is supported + * for each resolution in ANDROID_SCALER_AVAILABLE_JPEG_SIZES.

+ * + * @see ANDROID_SCALER_AVAILABLE_JPEG_SIZES + */ ANDROID_SCALER_AVAILABLE_JPEG_MIN_DURATIONS, + /** android.scaler.availableJpegSizes [static, int32[], hidden] + * + *

The JPEG resolutions that are supported by this camera device.

+ */ ANDROID_SCALER_AVAILABLE_JPEG_SIZES, + /** android.scaler.availableMaxDigitalZoom [static, float, public] + * + *

The maximum ratio between both active area width + * and crop region width, and active area height and + * crop region height, for ANDROID_SCALER_CROP_REGION.

+ * + * @see ANDROID_SCALER_CROP_REGION + */ ANDROID_SCALER_AVAILABLE_MAX_DIGITAL_ZOOM, + /** android.scaler.availableProcessedMinDurations [static, int64[], hidden] + * + *

For each available processed output size (defined in + * ANDROID_SCALER_AVAILABLE_PROCESSED_SIZES), this property lists the + * minimum supportable frame duration for that size.

+ * + * @see ANDROID_SCALER_AVAILABLE_PROCESSED_SIZES + */ ANDROID_SCALER_AVAILABLE_PROCESSED_MIN_DURATIONS, + /** android.scaler.availableProcessedSizes [static, int32[], hidden] + * + *

The resolutions available for use with + * processed output streams, such as YV12, NV12, and + * platform opaque YUV/RGB streams to the GPU or video + * encoders.

+ */ ANDROID_SCALER_AVAILABLE_PROCESSED_SIZES, + /** android.scaler.availableRawMinDurations [static, int64[], system] + * + *

For each available raw output size (defined in + * ANDROID_SCALER_AVAILABLE_RAW_SIZES), this property lists the minimum + * supportable frame duration for that size.

+ * + * @see ANDROID_SCALER_AVAILABLE_RAW_SIZES + */ ANDROID_SCALER_AVAILABLE_RAW_MIN_DURATIONS, + /** android.scaler.availableRawSizes [static, int32[], system] + * + *

The resolutions available for use with raw + * sensor output streams, listed as width, + * height

+ */ ANDROID_SCALER_AVAILABLE_RAW_SIZES, + /** android.scaler.availableInputOutputFormatsMap [static, int32, hidden] + * + *

The mapping of image formats that are supported by this + * camera device for input streams, to their corresponding output formats.

+ */ ANDROID_SCALER_AVAILABLE_INPUT_OUTPUT_FORMATS_MAP, + /** android.scaler.availableStreamConfigurations [static, enum[], ndk_public] + * + *

The available stream configurations that this + * camera device supports + * (i.e. format, width, height, output/input stream).

+ */ ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS, + /** android.scaler.availableMinFrameDurations [static, int64[], ndk_public] + * + *

This lists the minimum frame duration for each + * format/size combination.

+ */ ANDROID_SCALER_AVAILABLE_MIN_FRAME_DURATIONS, + /** android.scaler.availableStallDurations [static, int64[], ndk_public] + * + *

This lists the maximum stall duration for each + * output format/size combination.

+ */ ANDROID_SCALER_AVAILABLE_STALL_DURATIONS, + /** android.scaler.croppingType [static, enum, public] + * + *

The crop type that this camera device supports.

+ */ ANDROID_SCALER_CROPPING_TYPE, ANDROID_SCALER_END, + /** android.sensor.exposureTime [dynamic, int64, public] + * + *

Duration each pixel is exposed to + * light.

+ */ ANDROID_SENSOR_EXPOSURE_TIME = CameraMetadataSectionStart:ANDROID_SENSOR_START, + /** android.sensor.frameDuration [dynamic, int64, public] + * + *

Duration from start of frame exposure to + * start of next frame exposure.

+ */ ANDROID_SENSOR_FRAME_DURATION, + /** android.sensor.sensitivity [dynamic, int32, public] + * + *

The amount of gain applied to sensor data + * before processing.

+ */ ANDROID_SENSOR_SENSITIVITY, + /** android.sensor.referenceIlluminant1 [static, enum, public] + * + *

The standard reference illuminant used as the scene light source when + * calculating the ANDROID_SENSOR_COLOR_TRANSFORM1, + * ANDROID_SENSOR_CALIBRATION_TRANSFORM1, and + * ANDROID_SENSOR_FORWARD_MATRIX1 matrices.

+ * + * @see ANDROID_SENSOR_CALIBRATION_TRANSFORM1 + * @see ANDROID_SENSOR_COLOR_TRANSFORM1 + * @see ANDROID_SENSOR_FORWARD_MATRIX1 + */ ANDROID_SENSOR_REFERENCE_ILLUMINANT1, + /** android.sensor.referenceIlluminant2 [static, byte, public] + * + *

The standard reference illuminant used as the scene light source when + * calculating the ANDROID_SENSOR_COLOR_TRANSFORM2, + * ANDROID_SENSOR_CALIBRATION_TRANSFORM2, and + * ANDROID_SENSOR_FORWARD_MATRIX2 matrices.

+ * + * @see ANDROID_SENSOR_CALIBRATION_TRANSFORM2 + * @see ANDROID_SENSOR_COLOR_TRANSFORM2 + * @see ANDROID_SENSOR_FORWARD_MATRIX2 + */ ANDROID_SENSOR_REFERENCE_ILLUMINANT2, + /** android.sensor.calibrationTransform1 [static, rational[], public] + * + *

A per-device calibration transform matrix that maps from the + * reference sensor colorspace to the actual device sensor colorspace.

+ */ ANDROID_SENSOR_CALIBRATION_TRANSFORM1, + /** android.sensor.calibrationTransform2 [static, rational[], public] + * + *

A per-device calibration transform matrix that maps from the + * reference sensor colorspace to the actual device sensor colorspace + * (this is the colorspace of the raw buffer data).

+ */ ANDROID_SENSOR_CALIBRATION_TRANSFORM2, + /** android.sensor.colorTransform1 [static, rational[], public] + * + *

A matrix that transforms color values from CIE XYZ color space to + * reference sensor color space.

+ */ ANDROID_SENSOR_COLOR_TRANSFORM1, + /** android.sensor.colorTransform2 [static, rational[], public] + * + *

A matrix that transforms color values from CIE XYZ color space to + * reference sensor color space.

+ */ ANDROID_SENSOR_COLOR_TRANSFORM2, + /** android.sensor.forwardMatrix1 [static, rational[], public] + * + *

A matrix that transforms white balanced camera colors from the reference + * sensor colorspace to the CIE XYZ colorspace with a D50 whitepoint.

+ */ ANDROID_SENSOR_FORWARD_MATRIX1, + /** android.sensor.forwardMatrix2 [static, rational[], public] + * + *

A matrix that transforms white balanced camera colors from the reference + * sensor colorspace to the CIE XYZ colorspace with a D50 whitepoint.

+ */ ANDROID_SENSOR_FORWARD_MATRIX2, + /** android.sensor.baseGainFactor [static, rational, system] + * + *

Gain factor from electrons to raw units when + * ISO=100

+ */ ANDROID_SENSOR_BASE_GAIN_FACTOR, + /** android.sensor.blackLevelPattern [static, int32[], public] + * + *

A fixed black level offset for each of the color filter arrangement + * (CFA) mosaic channels.

+ */ ANDROID_SENSOR_BLACK_LEVEL_PATTERN, + /** android.sensor.maxAnalogSensitivity [static, int32, public] + * + *

Maximum sensitivity that is implemented + * purely through analog gain.

+ */ ANDROID_SENSOR_MAX_ANALOG_SENSITIVITY, + /** android.sensor.orientation [static, int32, public] + * + *

Clockwise angle through which the output image needs to be rotated to be + * upright on the device screen in its native orientation.

+ */ ANDROID_SENSOR_ORIENTATION, + /** android.sensor.profileHueSatMapDimensions [static, int32[], system] + * + *

The number of input samples for each dimension of + * ANDROID_SENSOR_PROFILE_HUE_SAT_MAP.

+ * + * @see ANDROID_SENSOR_PROFILE_HUE_SAT_MAP + */ ANDROID_SENSOR_PROFILE_HUE_SAT_MAP_DIMENSIONS, + /** android.sensor.timestamp [dynamic, int64, public] + * + *

Time at start of exposure of first + * row of the image sensor active array, in nanoseconds.

+ */ ANDROID_SENSOR_TIMESTAMP, + /** android.sensor.temperature [dynamic, float, system] + * + *

The temperature of the sensor, sampled at the time + * exposure began for this frame.

+ *

The thermal diode being queried should be inside the sensor PCB, or + * somewhere close to it.

+ */ ANDROID_SENSOR_TEMPERATURE, + /** android.sensor.neutralColorPoint [dynamic, rational[], public] + * + *

The estimated camera neutral color in the native sensor colorspace at + * the time of capture.

+ */ ANDROID_SENSOR_NEUTRAL_COLOR_POINT, + /** android.sensor.noiseProfile [dynamic, double[], public] + * + *

Noise model coefficients for each CFA mosaic channel.

+ */ ANDROID_SENSOR_NOISE_PROFILE, + /** android.sensor.profileHueSatMap [dynamic, float[], system] + * + *

A mapping containing a hue shift, saturation scale, and value scale + * for each pixel.

+ */ ANDROID_SENSOR_PROFILE_HUE_SAT_MAP, + /** android.sensor.profileToneCurve [dynamic, float[], system] + * + *

A list of x,y samples defining a tone-mapping curve for gamma adjustment.

+ */ ANDROID_SENSOR_PROFILE_TONE_CURVE, + /** android.sensor.greenSplit [dynamic, float, public] + * + *

The worst-case divergence between Bayer green channels.

+ */ ANDROID_SENSOR_GREEN_SPLIT, + /** android.sensor.testPatternData [dynamic, int32[], public] + * + *

A pixel [R, G_even, G_odd, B] that supplies the test pattern + * when ANDROID_SENSOR_TEST_PATTERN_MODE is SOLID_COLOR.

+ * + * @see ANDROID_SENSOR_TEST_PATTERN_MODE + */ ANDROID_SENSOR_TEST_PATTERN_DATA, + /** android.sensor.testPatternMode [dynamic, enum, public] + * + *

When enabled, the sensor sends a test pattern instead of + * doing a real exposure from the camera.

+ */ ANDROID_SENSOR_TEST_PATTERN_MODE, + /** android.sensor.availableTestPatternModes [static, int32[], public] + * + *

List of sensor test pattern modes for ANDROID_SENSOR_TEST_PATTERN_MODE + * supported by this camera device.

+ * + * @see ANDROID_SENSOR_TEST_PATTERN_MODE + */ ANDROID_SENSOR_AVAILABLE_TEST_PATTERN_MODES, + /** android.sensor.rollingShutterSkew [dynamic, int64, public] + * + *

Duration between the start of first row exposure + * and the start of last row exposure.

+ */ ANDROID_SENSOR_ROLLING_SHUTTER_SKEW, + /** android.sensor.opticalBlackRegions [static, int32[], public] + * + *

List of disjoint rectangles indicating the sensor + * optically shielded black pixel regions.

+ */ ANDROID_SENSOR_OPTICAL_BLACK_REGIONS, + /** android.sensor.dynamicBlackLevel [dynamic, float[], public] + * + *

A per-frame dynamic black level offset for each of the color filter + * arrangement (CFA) mosaic channels.

+ */ ANDROID_SENSOR_DYNAMIC_BLACK_LEVEL, + /** android.sensor.dynamicWhiteLevel [dynamic, int32, public] + * + *

Maximum raw value output by sensor for this frame.

+ */ ANDROID_SENSOR_DYNAMIC_WHITE_LEVEL, + /** android.sensor.opaqueRawSize [static, int32[], system] + * + *

Size in bytes for all the listed opaque RAW buffer sizes

+ */ ANDROID_SENSOR_OPAQUE_RAW_SIZE, ANDROID_SENSOR_END, + /** android.sensor.info.activeArraySize [static, int32[], public] + * + *

The area of the image sensor which corresponds to active pixels after any geometric + * distortion correction has been applied.

+ */ ANDROID_SENSOR_INFO_ACTIVE_ARRAY_SIZE = CameraMetadataSectionStart:ANDROID_SENSOR_INFO_START, + /** android.sensor.info.sensitivityRange [static, int32[], public] + * + *

Range of sensitivities for ANDROID_SENSOR_SENSITIVITY supported by this + * camera device.

+ * + * @see ANDROID_SENSOR_SENSITIVITY + */ ANDROID_SENSOR_INFO_SENSITIVITY_RANGE, + /** android.sensor.info.colorFilterArrangement [static, enum, public] + * + *

The arrangement of color filters on sensor; + * represents the colors in the top-left 2x2 section of + * the sensor, in reading order.

+ */ ANDROID_SENSOR_INFO_COLOR_FILTER_ARRANGEMENT, + /** android.sensor.info.exposureTimeRange [static, int64[], public] + * + *

The range of image exposure times for ANDROID_SENSOR_EXPOSURE_TIME supported + * by this camera device.

+ * + * @see ANDROID_SENSOR_EXPOSURE_TIME + */ ANDROID_SENSOR_INFO_EXPOSURE_TIME_RANGE, + /** android.sensor.info.maxFrameDuration [static, int64, public] + * + *

The maximum possible frame duration (minimum frame rate) for + * ANDROID_SENSOR_FRAME_DURATION that is supported this camera device.

+ * + * @see ANDROID_SENSOR_FRAME_DURATION + */ ANDROID_SENSOR_INFO_MAX_FRAME_DURATION, + /** android.sensor.info.physicalSize [static, float[], public] + * + *

The physical dimensions of the full pixel + * array.

+ */ ANDROID_SENSOR_INFO_PHYSICAL_SIZE, + /** android.sensor.info.pixelArraySize [static, int32[], public] + * + *

Dimensions of the full pixel array, possibly + * including black calibration pixels.

+ */ ANDROID_SENSOR_INFO_PIXEL_ARRAY_SIZE, + /** android.sensor.info.whiteLevel [static, int32, public] + * + *

Maximum raw value output by sensor.

+ */ ANDROID_SENSOR_INFO_WHITE_LEVEL, + /** android.sensor.info.timestampSource [static, enum, public] + * + *

The time base source for sensor capture start timestamps.

+ */ ANDROID_SENSOR_INFO_TIMESTAMP_SOURCE, + /** android.sensor.info.lensShadingApplied [static, enum, public] + * + *

Whether the RAW images output from this camera device are subject to + * lens shading correction.

+ */ ANDROID_SENSOR_INFO_LENS_SHADING_APPLIED, + /** android.sensor.info.preCorrectionActiveArraySize [static, int32[], public] + * + *

The area of the image sensor which corresponds to active pixels prior to the + * application of any geometric distortion correction.

+ */ ANDROID_SENSOR_INFO_PRE_CORRECTION_ACTIVE_ARRAY_SIZE, ANDROID_SENSOR_INFO_END, + /** android.shading.mode [dynamic, enum, public] + * + *

Quality of lens shading correction applied + * to the image data.

+ */ ANDROID_SHADING_MODE = CameraMetadataSectionStart:ANDROID_SHADING_START, + /** android.shading.strength [controls, byte, system] + * + *

Control the amount of shading correction + * applied to the images

+ */ ANDROID_SHADING_STRENGTH, + /** android.shading.availableModes [static, byte[], public] + * + *

List of lens shading modes for ANDROID_SHADING_MODE that are supported by this camera device.

+ * + * @see ANDROID_SHADING_MODE + */ ANDROID_SHADING_AVAILABLE_MODES, ANDROID_SHADING_END, + /** android.statistics.faceDetectMode [dynamic, enum, public] + * + *

Operating mode for the face detector + * unit.

+ */ ANDROID_STATISTICS_FACE_DETECT_MODE = CameraMetadataSectionStart:ANDROID_STATISTICS_START, + /** android.statistics.histogramMode [dynamic, enum, system] + * + *

Operating mode for histogram + * generation

+ */ ANDROID_STATISTICS_HISTOGRAM_MODE, + /** android.statistics.sharpnessMapMode [dynamic, enum, system] + * + *

Operating mode for sharpness map + * generation

+ */ ANDROID_STATISTICS_SHARPNESS_MAP_MODE, + /** android.statistics.hotPixelMapMode [dynamic, enum, public] + * + *

Operating mode for hot pixel map generation.

+ */ ANDROID_STATISTICS_HOT_PIXEL_MAP_MODE, + /** android.statistics.faceIds [dynamic, int32[], ndk_public] + * + *

List of unique IDs for detected faces.

+ */ ANDROID_STATISTICS_FACE_IDS, + /** android.statistics.faceLandmarks [dynamic, int32[], ndk_public] + * + *

List of landmarks for detected + * faces.

+ */ ANDROID_STATISTICS_FACE_LANDMARKS, + /** android.statistics.faceRectangles [dynamic, int32[], ndk_public] + * + *

List of the bounding rectangles for detected + * faces.

+ */ ANDROID_STATISTICS_FACE_RECTANGLES, + /** android.statistics.faceScores [dynamic, byte[], ndk_public] + * + *

List of the face confidence scores for + * detected faces

+ */ ANDROID_STATISTICS_FACE_SCORES, + /** android.statistics.histogram [dynamic, int32[], system] + * + *

A 3-channel histogram based on the raw + * sensor data

+ */ ANDROID_STATISTICS_HISTOGRAM, + /** android.statistics.sharpnessMap [dynamic, int32[], system] + * + *

A 3-channel sharpness map, based on the raw + * sensor data

+ */ ANDROID_STATISTICS_SHARPNESS_MAP, + /** android.statistics.lensShadingCorrectionMap [dynamic, byte, java_public] + * + *

The shading map is a low-resolution floating-point map + * that lists the coefficients used to correct for vignetting, for each + * Bayer color channel.

+ */ ANDROID_STATISTICS_LENS_SHADING_CORRECTION_MAP, + /** android.statistics.lensShadingMap [dynamic, float[], ndk_public] + * + *

The shading map is a low-resolution floating-point map + * that lists the coefficients used to correct for vignetting and color shading, + * for each Bayer color channel of RAW image data.

+ */ ANDROID_STATISTICS_LENS_SHADING_MAP, + /** android.statistics.predictedColorGains [dynamic, float[], hidden] + * + *

The best-fit color channel gains calculated + * by the camera device's statistics units for the current output frame.

+ */ ANDROID_STATISTICS_PREDICTED_COLOR_GAINS, + /** android.statistics.predictedColorTransform [dynamic, rational[], hidden] + * + *

The best-fit color transform matrix estimate + * calculated by the camera device's statistics units for the current + * output frame.

+ */ ANDROID_STATISTICS_PREDICTED_COLOR_TRANSFORM, + /** android.statistics.sceneFlicker [dynamic, enum, public] + * + *

The camera device estimated scene illumination lighting + * frequency.

+ */ ANDROID_STATISTICS_SCENE_FLICKER, + /** android.statistics.hotPixelMap [dynamic, int32[], public] + * + *

List of (x, y) coordinates of hot/defective pixels on the sensor.

+ */ ANDROID_STATISTICS_HOT_PIXEL_MAP, + /** android.statistics.lensShadingMapMode [dynamic, enum, public] + * + *

Whether the camera device will output the lens + * shading map in output result metadata.

+ */ ANDROID_STATISTICS_LENS_SHADING_MAP_MODE, ANDROID_STATISTICS_END, - ANDROID_STATISTICS_INFO_AVAILABLE_FACE_DETECT_MODES = - CameraMetadataSectionStart:ANDROID_STATISTICS_INFO_START, - + /** android.statistics.info.availableFaceDetectModes [static, byte[], public] + * + *

List of face detection modes for ANDROID_STATISTICS_FACE_DETECT_MODE that are + * supported by this camera device.

+ * + * @see ANDROID_STATISTICS_FACE_DETECT_MODE + */ + ANDROID_STATISTICS_INFO_AVAILABLE_FACE_DETECT_MODES = CameraMetadataSectionStart:ANDROID_STATISTICS_INFO_START, + + /** android.statistics.info.histogramBucketCount [static, int32, system] + * + *

Number of histogram buckets + * supported

+ */ ANDROID_STATISTICS_INFO_HISTOGRAM_BUCKET_COUNT, + /** android.statistics.info.maxFaceCount [static, int32, public] + * + *

The maximum number of simultaneously detectable + * faces.

+ */ ANDROID_STATISTICS_INFO_MAX_FACE_COUNT, + /** android.statistics.info.maxHistogramCount [static, int32, system] + * + *

Maximum value possible for a histogram + * bucket

+ */ ANDROID_STATISTICS_INFO_MAX_HISTOGRAM_COUNT, + /** android.statistics.info.maxSharpnessMapValue [static, int32, system] + * + *

Maximum value possible for a sharpness map + * region.

+ */ ANDROID_STATISTICS_INFO_MAX_SHARPNESS_MAP_VALUE, + /** android.statistics.info.sharpnessMapSize [static, int32[], system] + * + *

Dimensions of the sharpness + * map

+ */ ANDROID_STATISTICS_INFO_SHARPNESS_MAP_SIZE, + /** android.statistics.info.availableHotPixelMapModes [static, byte[], public] + * + *

List of hot pixel map output modes for ANDROID_STATISTICS_HOT_PIXEL_MAP_MODE that are + * supported by this camera device.

+ * + * @see ANDROID_STATISTICS_HOT_PIXEL_MAP_MODE + */ ANDROID_STATISTICS_INFO_AVAILABLE_HOT_PIXEL_MAP_MODES, + /** android.statistics.info.availableLensShadingMapModes [static, byte[], public] + * + *

List of lens shading map output modes for ANDROID_STATISTICS_LENS_SHADING_MAP_MODE that + * are supported by this camera device.

+ * + * @see ANDROID_STATISTICS_LENS_SHADING_MAP_MODE + */ ANDROID_STATISTICS_INFO_AVAILABLE_LENS_SHADING_MAP_MODES, ANDROID_STATISTICS_INFO_END, + /** android.tonemap.curveBlue [dynamic, float[], ndk_public] + * + *

Tonemapping / contrast / gamma curve for the blue + * channel, to use when ANDROID_TONEMAP_MODE is + * CONTRAST_CURVE.

+ * + * @see ANDROID_TONEMAP_MODE + */ ANDROID_TONEMAP_CURVE_BLUE = CameraMetadataSectionStart:ANDROID_TONEMAP_START, + /** android.tonemap.curveGreen [dynamic, float[], ndk_public] + * + *

Tonemapping / contrast / gamma curve for the green + * channel, to use when ANDROID_TONEMAP_MODE is + * CONTRAST_CURVE.

+ * + * @see ANDROID_TONEMAP_MODE + */ ANDROID_TONEMAP_CURVE_GREEN, + /** android.tonemap.curveRed [dynamic, float[], ndk_public] + * + *

Tonemapping / contrast / gamma curve for the red + * channel, to use when ANDROID_TONEMAP_MODE is + * CONTRAST_CURVE.

+ * + * @see ANDROID_TONEMAP_MODE + */ ANDROID_TONEMAP_CURVE_RED, + /** android.tonemap.mode [dynamic, enum, public] + * + *

High-level global contrast/gamma/tonemapping control.

+ */ ANDROID_TONEMAP_MODE, + /** android.tonemap.maxCurvePoints [static, int32, public] + * + *

Maximum number of supported points in the + * tonemap curve that can be used for ANDROID_TONEMAP_CURVE.

+ * + * @see ANDROID_TONEMAP_CURVE + */ ANDROID_TONEMAP_MAX_CURVE_POINTS, + /** android.tonemap.availableToneMapModes [static, byte[], public] + * + *

List of tonemapping modes for ANDROID_TONEMAP_MODE that are supported by this camera + * device.

+ * + * @see ANDROID_TONEMAP_MODE + */ ANDROID_TONEMAP_AVAILABLE_TONE_MAP_MODES, + /** android.tonemap.gamma [dynamic, float, public] + * + *

Tonemapping curve to use when ANDROID_TONEMAP_MODE is + * GAMMA_VALUE

+ * + * @see ANDROID_TONEMAP_MODE + */ ANDROID_TONEMAP_GAMMA, + /** android.tonemap.presetCurve [dynamic, enum, public] + * + *

Tonemapping curve to use when ANDROID_TONEMAP_MODE is + * PRESET_CURVE

+ * + * @see ANDROID_TONEMAP_MODE + */ ANDROID_TONEMAP_PRESET_CURVE, ANDROID_TONEMAP_END, + /** android.led.transmit [dynamic, enum, hidden] + * + *

This LED is nominally used to indicate to the user + * that the camera is powered on and may be streaming images back to the + * Application Processor. In certain rare circumstances, the OS may + * disable this when video is processed locally and not transmitted to + * any untrusted applications.

+ *

In particular, the LED must always be on when the data could be + * transmitted off the device. The LED should always be on whenever + * data is stored locally on the device.

+ *

The LED may be off if a trusted application is using the data that + * doesn't violate the above rules.

+ */ ANDROID_LED_TRANSMIT = CameraMetadataSectionStart:ANDROID_LED_START, + /** android.led.availableLeds [static, enum[], hidden] + * + *

A list of camera LEDs that are available on this system.

+ */ ANDROID_LED_AVAILABLE_LEDS, ANDROID_LED_END, + /** android.info.supportedHardwareLevel [static, enum, public] + * + *

Generally classifies the overall set of the camera device functionality.

+ */ ANDROID_INFO_SUPPORTED_HARDWARE_LEVEL = CameraMetadataSectionStart:ANDROID_INFO_START, ANDROID_INFO_END, + /** android.blackLevel.lock [dynamic, enum, public] + * + *

Whether black-level compensation is locked + * to its current values, or is free to vary.

+ */ ANDROID_BLACK_LEVEL_LOCK = CameraMetadataSectionStart:ANDROID_BLACK_LEVEL_START, ANDROID_BLACK_LEVEL_END, + /** android.sync.frameNumber [dynamic, enum, ndk_public] + * + *

The frame number corresponding to the last request + * with which the output result (metadata + buffers) has been fully + * synchronized.

+ */ ANDROID_SYNC_FRAME_NUMBER = CameraMetadataSectionStart:ANDROID_SYNC_START, + /** android.sync.maxLatency [static, enum, public] + * + *

The maximum number of frames that can occur after a request + * (different than the previous) has been submitted, and before the + * result's state becomes synchronized.

+ */ ANDROID_SYNC_MAX_LATENCY, ANDROID_SYNC_END, + /** android.reprocess.effectiveExposureFactor [dynamic, float, java_public] + * + *

The amount of exposure time increase factor applied to the original output + * frame by the application processing before sending for reprocessing.

+ */ ANDROID_REPROCESS_EFFECTIVE_EXPOSURE_FACTOR = CameraMetadataSectionStart:ANDROID_REPROCESS_START, + /** android.reprocess.maxCaptureStall [static, int32, java_public] + * + *

The maximal camera capture pipeline stall (in unit of frame count) introduced by a + * reprocess capture request.

+ */ ANDROID_REPROCESS_MAX_CAPTURE_STALL, ANDROID_REPROCESS_END, + /** android.depth.maxDepthSamples [static, int32, system] + * + *

Maximum number of points that a depth point cloud may contain.

+ */ ANDROID_DEPTH_MAX_DEPTH_SAMPLES = CameraMetadataSectionStart:ANDROID_DEPTH_START, + /** android.depth.availableDepthStreamConfigurations [static, enum[], ndk_public] + * + *

The available depth dataspace stream + * configurations that this camera device supports + * (i.e. format, width, height, output/input stream).

+ */ ANDROID_DEPTH_AVAILABLE_DEPTH_STREAM_CONFIGURATIONS, + /** android.depth.availableDepthMinFrameDurations [static, int64[], ndk_public] + * + *

This lists the minimum frame duration for each + * format/size combination for depth output formats.

+ */ ANDROID_DEPTH_AVAILABLE_DEPTH_MIN_FRAME_DURATIONS, + /** android.depth.availableDepthStallDurations [static, int64[], ndk_public] + * + *

This lists the maximum stall duration for each + * output format/size combination for depth streams.

+ */ ANDROID_DEPTH_AVAILABLE_DEPTH_STALL_DURATIONS, + /** android.depth.depthIsExclusive [static, enum, public] + * + *

Indicates whether a capture request may target both a + * DEPTH16 / DEPTH_POINT_CLOUD output, and normal color outputs (such as + * YUV_420_888, JPEG, or RAW) simultaneously.

+ */ ANDROID_DEPTH_DEPTH_IS_EXCLUSIVE, ANDROID_DEPTH_END, }; -/** +/* * Enumeration definitions for the various entries that need them */ + +/** android.colorCorrection.mode enumeration values + * @see ANDROID_COLOR_CORRECTION_MODE + */ enum CameraMetadataEnumAndroidColorCorrectionMode : uint32_t { ANDROID_COLOR_CORRECTION_MODE_TRANSFORM_MATRIX, - ANDROID_COLOR_CORRECTION_MODE_FAST, - ANDROID_COLOR_CORRECTION_MODE_HIGH_QUALITY, - }; +/** android.colorCorrection.aberrationMode enumeration values + * @see ANDROID_COLOR_CORRECTION_ABERRATION_MODE + */ enum CameraMetadataEnumAndroidColorCorrectionAberrationMode : uint32_t { ANDROID_COLOR_CORRECTION_ABERRATION_MODE_OFF, - ANDROID_COLOR_CORRECTION_ABERRATION_MODE_FAST, - ANDROID_COLOR_CORRECTION_ABERRATION_MODE_HIGH_QUALITY, - }; +/** android.control.aeAntibandingMode enumeration values + * @see ANDROID_CONTROL_AE_ANTIBANDING_MODE + */ enum CameraMetadataEnumAndroidControlAeAntibandingMode : uint32_t { ANDROID_CONTROL_AE_ANTIBANDING_MODE_OFF, - ANDROID_CONTROL_AE_ANTIBANDING_MODE_50HZ, - ANDROID_CONTROL_AE_ANTIBANDING_MODE_60HZ, - ANDROID_CONTROL_AE_ANTIBANDING_MODE_AUTO, - }; +/** android.control.aeLock enumeration values + * @see ANDROID_CONTROL_AE_LOCK + */ enum CameraMetadataEnumAndroidControlAeLock : uint32_t { ANDROID_CONTROL_AE_LOCK_OFF, - ANDROID_CONTROL_AE_LOCK_ON, - }; +/** android.control.aeMode enumeration values + * @see ANDROID_CONTROL_AE_MODE + */ enum CameraMetadataEnumAndroidControlAeMode : uint32_t { ANDROID_CONTROL_AE_MODE_OFF, - ANDROID_CONTROL_AE_MODE_ON, - ANDROID_CONTROL_AE_MODE_ON_AUTO_FLASH, - ANDROID_CONTROL_AE_MODE_ON_ALWAYS_FLASH, - ANDROID_CONTROL_AE_MODE_ON_AUTO_FLASH_REDEYE, - }; +/** android.control.aePrecaptureTrigger enumeration values + * @see ANDROID_CONTROL_AE_PRECAPTURE_TRIGGER + */ enum CameraMetadataEnumAndroidControlAePrecaptureTrigger : uint32_t { ANDROID_CONTROL_AE_PRECAPTURE_TRIGGER_IDLE, - ANDROID_CONTROL_AE_PRECAPTURE_TRIGGER_START, - ANDROID_CONTROL_AE_PRECAPTURE_TRIGGER_CANCEL, - }; +/** android.control.afMode enumeration values + * @see ANDROID_CONTROL_AF_MODE + */ enum CameraMetadataEnumAndroidControlAfMode : uint32_t { ANDROID_CONTROL_AF_MODE_OFF, - ANDROID_CONTROL_AF_MODE_AUTO, - ANDROID_CONTROL_AF_MODE_MACRO, - ANDROID_CONTROL_AF_MODE_CONTINUOUS_VIDEO, - ANDROID_CONTROL_AF_MODE_CONTINUOUS_PICTURE, - ANDROID_CONTROL_AF_MODE_EDOF, - }; +/** android.control.afTrigger enumeration values + * @see ANDROID_CONTROL_AF_TRIGGER + */ enum CameraMetadataEnumAndroidControlAfTrigger : uint32_t { ANDROID_CONTROL_AF_TRIGGER_IDLE, - ANDROID_CONTROL_AF_TRIGGER_START, - ANDROID_CONTROL_AF_TRIGGER_CANCEL, - }; +/** android.control.awbLock enumeration values + * @see ANDROID_CONTROL_AWB_LOCK + */ enum CameraMetadataEnumAndroidControlAwbLock : uint32_t { ANDROID_CONTROL_AWB_LOCK_OFF, - ANDROID_CONTROL_AWB_LOCK_ON, - }; +/** android.control.awbMode enumeration values + * @see ANDROID_CONTROL_AWB_MODE + */ enum CameraMetadataEnumAndroidControlAwbMode : uint32_t { ANDROID_CONTROL_AWB_MODE_OFF, - ANDROID_CONTROL_AWB_MODE_AUTO, - ANDROID_CONTROL_AWB_MODE_INCANDESCENT, - ANDROID_CONTROL_AWB_MODE_FLUORESCENT, - ANDROID_CONTROL_AWB_MODE_WARM_FLUORESCENT, - ANDROID_CONTROL_AWB_MODE_DAYLIGHT, - ANDROID_CONTROL_AWB_MODE_CLOUDY_DAYLIGHT, - ANDROID_CONTROL_AWB_MODE_TWILIGHT, - ANDROID_CONTROL_AWB_MODE_SHADE, - }; +/** android.control.captureIntent enumeration values + * @see ANDROID_CONTROL_CAPTURE_INTENT + */ enum CameraMetadataEnumAndroidControlCaptureIntent : uint32_t { ANDROID_CONTROL_CAPTURE_INTENT_CUSTOM, - ANDROID_CONTROL_CAPTURE_INTENT_PREVIEW, - ANDROID_CONTROL_CAPTURE_INTENT_STILL_CAPTURE, - ANDROID_CONTROL_CAPTURE_INTENT_VIDEO_RECORD, - ANDROID_CONTROL_CAPTURE_INTENT_VIDEO_SNAPSHOT, - ANDROID_CONTROL_CAPTURE_INTENT_ZERO_SHUTTER_LAG, - ANDROID_CONTROL_CAPTURE_INTENT_MANUAL, - }; +/** android.control.effectMode enumeration values + * @see ANDROID_CONTROL_EFFECT_MODE + */ enum CameraMetadataEnumAndroidControlEffectMode : uint32_t { ANDROID_CONTROL_EFFECT_MODE_OFF, - ANDROID_CONTROL_EFFECT_MODE_MONO, - ANDROID_CONTROL_EFFECT_MODE_NEGATIVE, - ANDROID_CONTROL_EFFECT_MODE_SOLARIZE, - ANDROID_CONTROL_EFFECT_MODE_SEPIA, - ANDROID_CONTROL_EFFECT_MODE_POSTERIZE, - ANDROID_CONTROL_EFFECT_MODE_WHITEBOARD, - ANDROID_CONTROL_EFFECT_MODE_BLACKBOARD, - ANDROID_CONTROL_EFFECT_MODE_AQUA, - }; +/** android.control.mode enumeration values + * @see ANDROID_CONTROL_MODE + */ enum CameraMetadataEnumAndroidControlMode : uint32_t { ANDROID_CONTROL_MODE_OFF, - ANDROID_CONTROL_MODE_AUTO, - ANDROID_CONTROL_MODE_USE_SCENE_MODE, - ANDROID_CONTROL_MODE_OFF_KEEP_STATE, - }; +/** android.control.sceneMode enumeration values + * @see ANDROID_CONTROL_SCENE_MODE + */ enum CameraMetadataEnumAndroidControlSceneMode : uint32_t { - ANDROID_CONTROL_SCENE_MODE_DISABLED = 0, - + ANDROID_CONTROL_SCENE_MODE_DISABLED = 0, ANDROID_CONTROL_SCENE_MODE_FACE_PRIORITY, - ANDROID_CONTROL_SCENE_MODE_ACTION, - ANDROID_CONTROL_SCENE_MODE_PORTRAIT, - ANDROID_CONTROL_SCENE_MODE_LANDSCAPE, - ANDROID_CONTROL_SCENE_MODE_NIGHT, - ANDROID_CONTROL_SCENE_MODE_NIGHT_PORTRAIT, - ANDROID_CONTROL_SCENE_MODE_THEATRE, - ANDROID_CONTROL_SCENE_MODE_BEACH, - ANDROID_CONTROL_SCENE_MODE_SNOW, - ANDROID_CONTROL_SCENE_MODE_SUNSET, - ANDROID_CONTROL_SCENE_MODE_STEADYPHOTO, - ANDROID_CONTROL_SCENE_MODE_FIREWORKS, - ANDROID_CONTROL_SCENE_MODE_SPORTS, - ANDROID_CONTROL_SCENE_MODE_PARTY, - ANDROID_CONTROL_SCENE_MODE_CANDLELIGHT, - ANDROID_CONTROL_SCENE_MODE_BARCODE, - ANDROID_CONTROL_SCENE_MODE_HIGH_SPEED_VIDEO, - ANDROID_CONTROL_SCENE_MODE_HDR, - ANDROID_CONTROL_SCENE_MODE_FACE_PRIORITY_LOW_LIGHT, - - ANDROID_CONTROL_SCENE_MODE_DEVICE_CUSTOM_START = 100, - - ANDROID_CONTROL_SCENE_MODE_DEVICE_CUSTOM_END = 127, - + ANDROID_CONTROL_SCENE_MODE_DEVICE_CUSTOM_START = 100, + ANDROID_CONTROL_SCENE_MODE_DEVICE_CUSTOM_END = 127, }; +/** android.control.videoStabilizationMode enumeration values + * @see ANDROID_CONTROL_VIDEO_STABILIZATION_MODE + */ enum CameraMetadataEnumAndroidControlVideoStabilizationMode : uint32_t { ANDROID_CONTROL_VIDEO_STABILIZATION_MODE_OFF, - ANDROID_CONTROL_VIDEO_STABILIZATION_MODE_ON, - }; +/** android.control.aeState enumeration values + * @see ANDROID_CONTROL_AE_STATE + */ enum CameraMetadataEnumAndroidControlAeState : uint32_t { ANDROID_CONTROL_AE_STATE_INACTIVE, - ANDROID_CONTROL_AE_STATE_SEARCHING, - ANDROID_CONTROL_AE_STATE_CONVERGED, - ANDROID_CONTROL_AE_STATE_LOCKED, - ANDROID_CONTROL_AE_STATE_FLASH_REQUIRED, - ANDROID_CONTROL_AE_STATE_PRECAPTURE, - }; +/** android.control.afState enumeration values + * @see ANDROID_CONTROL_AF_STATE + */ enum CameraMetadataEnumAndroidControlAfState : uint32_t { ANDROID_CONTROL_AF_STATE_INACTIVE, - ANDROID_CONTROL_AF_STATE_PASSIVE_SCAN, - ANDROID_CONTROL_AF_STATE_PASSIVE_FOCUSED, - ANDROID_CONTROL_AF_STATE_ACTIVE_SCAN, - ANDROID_CONTROL_AF_STATE_FOCUSED_LOCKED, - ANDROID_CONTROL_AF_STATE_NOT_FOCUSED_LOCKED, - ANDROID_CONTROL_AF_STATE_PASSIVE_UNFOCUSED, - }; +/** android.control.awbState enumeration values + * @see ANDROID_CONTROL_AWB_STATE + */ enum CameraMetadataEnumAndroidControlAwbState : uint32_t { ANDROID_CONTROL_AWB_STATE_INACTIVE, - ANDROID_CONTROL_AWB_STATE_SEARCHING, - ANDROID_CONTROL_AWB_STATE_CONVERGED, - ANDROID_CONTROL_AWB_STATE_LOCKED, - }; +/** android.control.aeLockAvailable enumeration values + * @see ANDROID_CONTROL_AE_LOCK_AVAILABLE + */ enum CameraMetadataEnumAndroidControlAeLockAvailable : uint32_t { ANDROID_CONTROL_AE_LOCK_AVAILABLE_FALSE, - ANDROID_CONTROL_AE_LOCK_AVAILABLE_TRUE, - }; +/** android.control.awbLockAvailable enumeration values + * @see ANDROID_CONTROL_AWB_LOCK_AVAILABLE + */ enum CameraMetadataEnumAndroidControlAwbLockAvailable : uint32_t { ANDROID_CONTROL_AWB_LOCK_AVAILABLE_FALSE, - ANDROID_CONTROL_AWB_LOCK_AVAILABLE_TRUE, - }; +/** android.control.enableZsl enumeration values + * @see ANDROID_CONTROL_ENABLE_ZSL + */ enum CameraMetadataEnumAndroidControlEnableZsl : uint32_t { ANDROID_CONTROL_ENABLE_ZSL_FALSE, - ANDROID_CONTROL_ENABLE_ZSL_TRUE, - }; +/** android.demosaic.mode enumeration values + * @see ANDROID_DEMOSAIC_MODE + */ enum CameraMetadataEnumAndroidDemosaicMode : uint32_t { ANDROID_DEMOSAIC_MODE_FAST, - ANDROID_DEMOSAIC_MODE_HIGH_QUALITY, - }; +/** android.edge.mode enumeration values + * @see ANDROID_EDGE_MODE + */ enum CameraMetadataEnumAndroidEdgeMode : uint32_t { ANDROID_EDGE_MODE_OFF, - ANDROID_EDGE_MODE_FAST, - ANDROID_EDGE_MODE_HIGH_QUALITY, - ANDROID_EDGE_MODE_ZERO_SHUTTER_LAG, - }; +/** android.flash.mode enumeration values + * @see ANDROID_FLASH_MODE + */ enum CameraMetadataEnumAndroidFlashMode : uint32_t { ANDROID_FLASH_MODE_OFF, - ANDROID_FLASH_MODE_SINGLE, - ANDROID_FLASH_MODE_TORCH, - }; +/** android.flash.state enumeration values + * @see ANDROID_FLASH_STATE + */ enum CameraMetadataEnumAndroidFlashState : uint32_t { ANDROID_FLASH_STATE_UNAVAILABLE, - ANDROID_FLASH_STATE_CHARGING, - ANDROID_FLASH_STATE_READY, - ANDROID_FLASH_STATE_FIRED, - ANDROID_FLASH_STATE_PARTIAL, - }; +/** android.flash.info.available enumeration values + * @see ANDROID_FLASH_INFO_AVAILABLE + */ enum CameraMetadataEnumAndroidFlashInfoAvailable : uint32_t { ANDROID_FLASH_INFO_AVAILABLE_FALSE, - ANDROID_FLASH_INFO_AVAILABLE_TRUE, - }; +/** android.hotPixel.mode enumeration values + * @see ANDROID_HOT_PIXEL_MODE + */ enum CameraMetadataEnumAndroidHotPixelMode : uint32_t { ANDROID_HOT_PIXEL_MODE_OFF, - ANDROID_HOT_PIXEL_MODE_FAST, - ANDROID_HOT_PIXEL_MODE_HIGH_QUALITY, - }; +/** android.lens.opticalStabilizationMode enumeration values + * @see ANDROID_LENS_OPTICAL_STABILIZATION_MODE + */ enum CameraMetadataEnumAndroidLensOpticalStabilizationMode : uint32_t { ANDROID_LENS_OPTICAL_STABILIZATION_MODE_OFF, - ANDROID_LENS_OPTICAL_STABILIZATION_MODE_ON, - }; +/** android.lens.facing enumeration values + * @see ANDROID_LENS_FACING + */ enum CameraMetadataEnumAndroidLensFacing : uint32_t { ANDROID_LENS_FACING_FRONT, - ANDROID_LENS_FACING_BACK, - ANDROID_LENS_FACING_EXTERNAL, - }; +/** android.lens.state enumeration values + * @see ANDROID_LENS_STATE + */ enum CameraMetadataEnumAndroidLensState : uint32_t { ANDROID_LENS_STATE_STATIONARY, - ANDROID_LENS_STATE_MOVING, - }; +/** android.lens.info.focusDistanceCalibration enumeration values + * @see ANDROID_LENS_INFO_FOCUS_DISTANCE_CALIBRATION + */ enum CameraMetadataEnumAndroidLensInfoFocusDistanceCalibration : uint32_t { ANDROID_LENS_INFO_FOCUS_DISTANCE_CALIBRATION_UNCALIBRATED, - ANDROID_LENS_INFO_FOCUS_DISTANCE_CALIBRATION_APPROXIMATE, - ANDROID_LENS_INFO_FOCUS_DISTANCE_CALIBRATION_CALIBRATED, - }; +/** android.noiseReduction.mode enumeration values + * @see ANDROID_NOISE_REDUCTION_MODE + */ enum CameraMetadataEnumAndroidNoiseReductionMode : uint32_t { ANDROID_NOISE_REDUCTION_MODE_OFF, - ANDROID_NOISE_REDUCTION_MODE_FAST, - ANDROID_NOISE_REDUCTION_MODE_HIGH_QUALITY, - ANDROID_NOISE_REDUCTION_MODE_MINIMAL, - ANDROID_NOISE_REDUCTION_MODE_ZERO_SHUTTER_LAG, - }; +/** android.quirks.partialResult enumeration values + * @see ANDROID_QUIRKS_PARTIAL_RESULT + */ enum CameraMetadataEnumAndroidQuirksPartialResult : uint32_t { ANDROID_QUIRKS_PARTIAL_RESULT_FINAL, - ANDROID_QUIRKS_PARTIAL_RESULT_PARTIAL, - }; +/** android.request.metadataMode enumeration values + * @see ANDROID_REQUEST_METADATA_MODE + */ enum CameraMetadataEnumAndroidRequestMetadataMode : uint32_t { ANDROID_REQUEST_METADATA_MODE_NONE, - ANDROID_REQUEST_METADATA_MODE_FULL, - }; +/** android.request.type enumeration values + * @see ANDROID_REQUEST_TYPE + */ enum CameraMetadataEnumAndroidRequestType : uint32_t { ANDROID_REQUEST_TYPE_CAPTURE, - ANDROID_REQUEST_TYPE_REPROCESS, - }; +/** android.request.availableCapabilities enumeration values + * @see ANDROID_REQUEST_AVAILABLE_CAPABILITIES + */ enum CameraMetadataEnumAndroidRequestAvailableCapabilities : uint32_t { ANDROID_REQUEST_AVAILABLE_CAPABILITIES_BACKWARD_COMPATIBLE, - ANDROID_REQUEST_AVAILABLE_CAPABILITIES_MANUAL_SENSOR, - ANDROID_REQUEST_AVAILABLE_CAPABILITIES_MANUAL_POST_PROCESSING, - ANDROID_REQUEST_AVAILABLE_CAPABILITIES_RAW, - ANDROID_REQUEST_AVAILABLE_CAPABILITIES_PRIVATE_REPROCESSING, - ANDROID_REQUEST_AVAILABLE_CAPABILITIES_READ_SENSOR_SETTINGS, - ANDROID_REQUEST_AVAILABLE_CAPABILITIES_BURST_CAPTURE, - ANDROID_REQUEST_AVAILABLE_CAPABILITIES_YUV_REPROCESSING, - ANDROID_REQUEST_AVAILABLE_CAPABILITIES_DEPTH_OUTPUT, - ANDROID_REQUEST_AVAILABLE_CAPABILITIES_CONSTRAINED_HIGH_SPEED_VIDEO, - }; +/** android.scaler.availableFormats enumeration values + * @see ANDROID_SCALER_AVAILABLE_FORMATS + */ enum CameraMetadataEnumAndroidScalerAvailableFormats : uint32_t { - ANDROID_SCALER_AVAILABLE_FORMATS_RAW16 = 0x20, - - ANDROID_SCALER_AVAILABLE_FORMATS_RAW_OPAQUE = 0x24, - - ANDROID_SCALER_AVAILABLE_FORMATS_YV12 = 0x32315659, - - ANDROID_SCALER_AVAILABLE_FORMATS_YCrCb_420_SP = 0x11, - - ANDROID_SCALER_AVAILABLE_FORMATS_IMPLEMENTATION_DEFINED = 0x22, - - ANDROID_SCALER_AVAILABLE_FORMATS_YCbCr_420_888 = 0x23, - - ANDROID_SCALER_AVAILABLE_FORMATS_BLOB = 0x21, - + ANDROID_SCALER_AVAILABLE_FORMATS_RAW16 = 0x20, + ANDROID_SCALER_AVAILABLE_FORMATS_RAW_OPAQUE = 0x24, + ANDROID_SCALER_AVAILABLE_FORMATS_YV12 = 0x32315659, + ANDROID_SCALER_AVAILABLE_FORMATS_YCrCb_420_SP = 0x11, + ANDROID_SCALER_AVAILABLE_FORMATS_IMPLEMENTATION_DEFINED = 0x22, + ANDROID_SCALER_AVAILABLE_FORMATS_YCbCr_420_888 = 0x23, + ANDROID_SCALER_AVAILABLE_FORMATS_BLOB = 0x21, }; +/** android.scaler.availableStreamConfigurations enumeration values + * @see ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS + */ enum CameraMetadataEnumAndroidScalerAvailableStreamConfigurations : uint32_t { ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS_OUTPUT, - ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS_INPUT, - }; +/** android.scaler.croppingType enumeration values + * @see ANDROID_SCALER_CROPPING_TYPE + */ enum CameraMetadataEnumAndroidScalerCroppingType : uint32_t { ANDROID_SCALER_CROPPING_TYPE_CENTER_ONLY, - ANDROID_SCALER_CROPPING_TYPE_FREEFORM, - }; +/** android.sensor.referenceIlluminant1 enumeration values + * @see ANDROID_SENSOR_REFERENCE_ILLUMINANT1 + */ enum CameraMetadataEnumAndroidSensorReferenceIlluminant1 : uint32_t { - ANDROID_SENSOR_REFERENCE_ILLUMINANT1_DAYLIGHT = 1, - - ANDROID_SENSOR_REFERENCE_ILLUMINANT1_FLUORESCENT = 2, - - ANDROID_SENSOR_REFERENCE_ILLUMINANT1_TUNGSTEN = 3, - - ANDROID_SENSOR_REFERENCE_ILLUMINANT1_FLASH = 4, - - ANDROID_SENSOR_REFERENCE_ILLUMINANT1_FINE_WEATHER = 9, - - ANDROID_SENSOR_REFERENCE_ILLUMINANT1_CLOUDY_WEATHER = 10, - - ANDROID_SENSOR_REFERENCE_ILLUMINANT1_SHADE = 11, - - ANDROID_SENSOR_REFERENCE_ILLUMINANT1_DAYLIGHT_FLUORESCENT = 12, - - ANDROID_SENSOR_REFERENCE_ILLUMINANT1_DAY_WHITE_FLUORESCENT = 13, - + ANDROID_SENSOR_REFERENCE_ILLUMINANT1_DAYLIGHT = 1, + ANDROID_SENSOR_REFERENCE_ILLUMINANT1_FLUORESCENT = 2, + ANDROID_SENSOR_REFERENCE_ILLUMINANT1_TUNGSTEN = 3, + ANDROID_SENSOR_REFERENCE_ILLUMINANT1_FLASH = 4, + ANDROID_SENSOR_REFERENCE_ILLUMINANT1_FINE_WEATHER = 9, + ANDROID_SENSOR_REFERENCE_ILLUMINANT1_CLOUDY_WEATHER = 10, + ANDROID_SENSOR_REFERENCE_ILLUMINANT1_SHADE = 11, + ANDROID_SENSOR_REFERENCE_ILLUMINANT1_DAYLIGHT_FLUORESCENT = 12, + ANDROID_SENSOR_REFERENCE_ILLUMINANT1_DAY_WHITE_FLUORESCENT = 13, ANDROID_SENSOR_REFERENCE_ILLUMINANT1_COOL_WHITE_FLUORESCENT = 14, - - ANDROID_SENSOR_REFERENCE_ILLUMINANT1_WHITE_FLUORESCENT = 15, - - ANDROID_SENSOR_REFERENCE_ILLUMINANT1_STANDARD_A = 17, - - ANDROID_SENSOR_REFERENCE_ILLUMINANT1_STANDARD_B = 18, - - ANDROID_SENSOR_REFERENCE_ILLUMINANT1_STANDARD_C = 19, - - ANDROID_SENSOR_REFERENCE_ILLUMINANT1_D55 = 20, - - ANDROID_SENSOR_REFERENCE_ILLUMINANT1_D65 = 21, - - ANDROID_SENSOR_REFERENCE_ILLUMINANT1_D75 = 22, - - ANDROID_SENSOR_REFERENCE_ILLUMINANT1_D50 = 23, - - ANDROID_SENSOR_REFERENCE_ILLUMINANT1_ISO_STUDIO_TUNGSTEN = 24, - -}; - + ANDROID_SENSOR_REFERENCE_ILLUMINANT1_WHITE_FLUORESCENT = 15, + ANDROID_SENSOR_REFERENCE_ILLUMINANT1_STANDARD_A = 17, + ANDROID_SENSOR_REFERENCE_ILLUMINANT1_STANDARD_B = 18, + ANDROID_SENSOR_REFERENCE_ILLUMINANT1_STANDARD_C = 19, + ANDROID_SENSOR_REFERENCE_ILLUMINANT1_D55 = 20, + ANDROID_SENSOR_REFERENCE_ILLUMINANT1_D65 = 21, + ANDROID_SENSOR_REFERENCE_ILLUMINANT1_D75 = 22, + ANDROID_SENSOR_REFERENCE_ILLUMINANT1_D50 = 23, + ANDROID_SENSOR_REFERENCE_ILLUMINANT1_ISO_STUDIO_TUNGSTEN = 24, +}; + +/** android.sensor.testPatternMode enumeration values + * @see ANDROID_SENSOR_TEST_PATTERN_MODE + */ enum CameraMetadataEnumAndroidSensorTestPatternMode : uint32_t { ANDROID_SENSOR_TEST_PATTERN_MODE_OFF, - ANDROID_SENSOR_TEST_PATTERN_MODE_SOLID_COLOR, - ANDROID_SENSOR_TEST_PATTERN_MODE_COLOR_BARS, - ANDROID_SENSOR_TEST_PATTERN_MODE_COLOR_BARS_FADE_TO_GRAY, - ANDROID_SENSOR_TEST_PATTERN_MODE_PN9, - - ANDROID_SENSOR_TEST_PATTERN_MODE_CUSTOM1 = 256, - + ANDROID_SENSOR_TEST_PATTERN_MODE_CUSTOM1 = 256, }; +/** android.sensor.info.colorFilterArrangement enumeration values + * @see ANDROID_SENSOR_INFO_COLOR_FILTER_ARRANGEMENT + */ enum CameraMetadataEnumAndroidSensorInfoColorFilterArrangement : uint32_t { ANDROID_SENSOR_INFO_COLOR_FILTER_ARRANGEMENT_RGGB, - ANDROID_SENSOR_INFO_COLOR_FILTER_ARRANGEMENT_GRBG, - ANDROID_SENSOR_INFO_COLOR_FILTER_ARRANGEMENT_GBRG, - ANDROID_SENSOR_INFO_COLOR_FILTER_ARRANGEMENT_BGGR, - ANDROID_SENSOR_INFO_COLOR_FILTER_ARRANGEMENT_RGB, - }; +/** android.sensor.info.timestampSource enumeration values + * @see ANDROID_SENSOR_INFO_TIMESTAMP_SOURCE + */ enum CameraMetadataEnumAndroidSensorInfoTimestampSource : uint32_t { ANDROID_SENSOR_INFO_TIMESTAMP_SOURCE_UNKNOWN, - ANDROID_SENSOR_INFO_TIMESTAMP_SOURCE_REALTIME, - }; +/** android.sensor.info.lensShadingApplied enumeration values + * @see ANDROID_SENSOR_INFO_LENS_SHADING_APPLIED + */ enum CameraMetadataEnumAndroidSensorInfoLensShadingApplied : uint32_t { ANDROID_SENSOR_INFO_LENS_SHADING_APPLIED_FALSE, - ANDROID_SENSOR_INFO_LENS_SHADING_APPLIED_TRUE, - }; +/** android.shading.mode enumeration values + * @see ANDROID_SHADING_MODE + */ enum CameraMetadataEnumAndroidShadingMode : uint32_t { ANDROID_SHADING_MODE_OFF, - ANDROID_SHADING_MODE_FAST, - ANDROID_SHADING_MODE_HIGH_QUALITY, - }; +/** android.statistics.faceDetectMode enumeration values + * @see ANDROID_STATISTICS_FACE_DETECT_MODE + */ enum CameraMetadataEnumAndroidStatisticsFaceDetectMode : uint32_t { ANDROID_STATISTICS_FACE_DETECT_MODE_OFF, - ANDROID_STATISTICS_FACE_DETECT_MODE_SIMPLE, - ANDROID_STATISTICS_FACE_DETECT_MODE_FULL, - }; +/** android.statistics.histogramMode enumeration values + * @see ANDROID_STATISTICS_HISTOGRAM_MODE + */ enum CameraMetadataEnumAndroidStatisticsHistogramMode : uint32_t { ANDROID_STATISTICS_HISTOGRAM_MODE_OFF, - ANDROID_STATISTICS_HISTOGRAM_MODE_ON, - }; +/** android.statistics.sharpnessMapMode enumeration values + * @see ANDROID_STATISTICS_SHARPNESS_MAP_MODE + */ enum CameraMetadataEnumAndroidStatisticsSharpnessMapMode : uint32_t { ANDROID_STATISTICS_SHARPNESS_MAP_MODE_OFF, - ANDROID_STATISTICS_SHARPNESS_MAP_MODE_ON, - }; +/** android.statistics.hotPixelMapMode enumeration values + * @see ANDROID_STATISTICS_HOT_PIXEL_MAP_MODE + */ enum CameraMetadataEnumAndroidStatisticsHotPixelMapMode : uint32_t { ANDROID_STATISTICS_HOT_PIXEL_MAP_MODE_OFF, - ANDROID_STATISTICS_HOT_PIXEL_MAP_MODE_ON, - }; +/** android.statistics.sceneFlicker enumeration values + * @see ANDROID_STATISTICS_SCENE_FLICKER + */ enum CameraMetadataEnumAndroidStatisticsSceneFlicker : uint32_t { ANDROID_STATISTICS_SCENE_FLICKER_NONE, - ANDROID_STATISTICS_SCENE_FLICKER_50HZ, - ANDROID_STATISTICS_SCENE_FLICKER_60HZ, - }; +/** android.statistics.lensShadingMapMode enumeration values + * @see ANDROID_STATISTICS_LENS_SHADING_MAP_MODE + */ enum CameraMetadataEnumAndroidStatisticsLensShadingMapMode : uint32_t { ANDROID_STATISTICS_LENS_SHADING_MAP_MODE_OFF, - ANDROID_STATISTICS_LENS_SHADING_MAP_MODE_ON, - }; +/** android.tonemap.mode enumeration values + * @see ANDROID_TONEMAP_MODE + */ enum CameraMetadataEnumAndroidTonemapMode : uint32_t { ANDROID_TONEMAP_MODE_CONTRAST_CURVE, - ANDROID_TONEMAP_MODE_FAST, - ANDROID_TONEMAP_MODE_HIGH_QUALITY, - ANDROID_TONEMAP_MODE_GAMMA_VALUE, - ANDROID_TONEMAP_MODE_PRESET_CURVE, - }; +/** android.tonemap.presetCurve enumeration values + * @see ANDROID_TONEMAP_PRESET_CURVE + */ enum CameraMetadataEnumAndroidTonemapPresetCurve : uint32_t { ANDROID_TONEMAP_PRESET_CURVE_SRGB, - ANDROID_TONEMAP_PRESET_CURVE_REC709, - }; +/** android.led.transmit enumeration values + * @see ANDROID_LED_TRANSMIT + */ enum CameraMetadataEnumAndroidLedTransmit : uint32_t { ANDROID_LED_TRANSMIT_OFF, - ANDROID_LED_TRANSMIT_ON, - }; +/** android.led.availableLeds enumeration values + * @see ANDROID_LED_AVAILABLE_LEDS + */ enum CameraMetadataEnumAndroidLedAvailableLeds : uint32_t { ANDROID_LED_AVAILABLE_LEDS_TRANSMIT, - }; +/** android.info.supportedHardwareLevel enumeration values + * @see ANDROID_INFO_SUPPORTED_HARDWARE_LEVEL + */ enum CameraMetadataEnumAndroidInfoSupportedHardwareLevel : uint32_t { ANDROID_INFO_SUPPORTED_HARDWARE_LEVEL_LIMITED, - ANDROID_INFO_SUPPORTED_HARDWARE_LEVEL_FULL, - ANDROID_INFO_SUPPORTED_HARDWARE_LEVEL_LEGACY, - ANDROID_INFO_SUPPORTED_HARDWARE_LEVEL_3, - }; +/** android.blackLevel.lock enumeration values + * @see ANDROID_BLACK_LEVEL_LOCK + */ enum CameraMetadataEnumAndroidBlackLevelLock : uint32_t { ANDROID_BLACK_LEVEL_LOCK_OFF, - ANDROID_BLACK_LEVEL_LOCK_ON, - }; +/** android.sync.frameNumber enumeration values + * @see ANDROID_SYNC_FRAME_NUMBER + */ enum CameraMetadataEnumAndroidSyncFrameNumber : uint32_t { - ANDROID_SYNC_FRAME_NUMBER_CONVERGING = -1, - - ANDROID_SYNC_FRAME_NUMBER_UNKNOWN = -2, - + ANDROID_SYNC_FRAME_NUMBER_CONVERGING = -1, + ANDROID_SYNC_FRAME_NUMBER_UNKNOWN = -2, }; +/** android.sync.maxLatency enumeration values + * @see ANDROID_SYNC_MAX_LATENCY + */ enum CameraMetadataEnumAndroidSyncMaxLatency : uint32_t { - ANDROID_SYNC_MAX_LATENCY_PER_FRAME_CONTROL = 0, - - ANDROID_SYNC_MAX_LATENCY_UNKNOWN = -1, - + ANDROID_SYNC_MAX_LATENCY_PER_FRAME_CONTROL = 0, + ANDROID_SYNC_MAX_LATENCY_UNKNOWN = -1, }; +/** android.depth.availableDepthStreamConfigurations enumeration values + * @see ANDROID_DEPTH_AVAILABLE_DEPTH_STREAM_CONFIGURATIONS + */ enum CameraMetadataEnumAndroidDepthAvailableDepthStreamConfigurations : uint32_t { ANDROID_DEPTH_AVAILABLE_DEPTH_STREAM_CONFIGURATIONS_OUTPUT, - ANDROID_DEPTH_AVAILABLE_DEPTH_STREAM_CONFIGURATIONS_INPUT, - }; +/** android.depth.depthIsExclusive enumeration values + * @see ANDROID_DEPTH_DEPTH_IS_EXCLUSIVE + */ enum CameraMetadataEnumAndroidDepthDepthIsExclusive : uint32_t { ANDROID_DEPTH_DEPTH_IS_EXCLUSIVE_FALSE, - ANDROID_DEPTH_DEPTH_IS_EXCLUSIVE_TRUE, - }; diff --git a/camera/metadata/3.3/Android.bp b/camera/metadata/3.3/Android.bp new file mode 100644 index 0000000000000000000000000000000000000000..ad6f141cb8200480ab6979afa05f90a1d15b5a67 --- /dev/null +++ b/camera/metadata/3.3/Android.bp @@ -0,0 +1,31 @@ +// This file is autogenerated by hidl-gen -Landroidbp. + +hidl_interface { + name: "android.hardware.camera.metadata@3.3", + root: "android.hardware", + vndk: { + enabled: true, + }, + srcs: [ + "types.hal", + ], + interfaces: [ + "android.hardware.camera.metadata@3.2", + ], + types: [ + "CameraMetadataEnumAndroidControlAeMode", + "CameraMetadataEnumAndroidControlAfSceneChange", + "CameraMetadataEnumAndroidControlCaptureIntent", + "CameraMetadataEnumAndroidDistortionCorrectionMode", + "CameraMetadataEnumAndroidInfoSupportedHardwareLevel", + "CameraMetadataEnumAndroidLensPoseReference", + "CameraMetadataEnumAndroidLogicalMultiCameraSensorSyncType", + "CameraMetadataEnumAndroidRequestAvailableCapabilities", + "CameraMetadataEnumAndroidStatisticsOisDataMode", + "CameraMetadataSection", + "CameraMetadataSectionStart", + "CameraMetadataTag", + ], + gen_java: true, +} + diff --git a/camera/metadata/3.3/types.hal b/camera/metadata/3.3/types.hal new file mode 100644 index 0000000000000000000000000000000000000000..04edfe9fb41e2f2c0e7476fad54fac55b885996e --- /dev/null +++ b/camera/metadata/3.3/types.hal @@ -0,0 +1,267 @@ +/* + * Copyright (C) 2017 The Android Open Source Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +/* + * Autogenerated from camera metadata definitions in + * /system/media/camera/docs/metadata_definitions.xml + * *** DO NOT EDIT BY HAND *** + */ + +package android.hardware.camera.metadata@3.3; + +/* Include definitions from all prior minor HAL metadata revisions */ +import android.hardware.camera.metadata@3.2; + +/** + * Top level hierarchy definitions for camera metadata. *_INFO sections are for + * the static metadata that can be retrived without opening the camera device. + */ +enum CameraMetadataSection : @3.2::CameraMetadataSection { + ANDROID_LOGICAL_MULTI_CAMERA = + android.hardware.camera.metadata@3.2::CameraMetadataSection:ANDROID_SECTION_COUNT, + + ANDROID_DISTORTION_CORRECTION, + + ANDROID_SECTION_COUNT_3_3, + + VENDOR_SECTION_3_3 = 0x8000, + +}; + +/** + * Hierarchy positions in enum space. All vendor extension sections must be + * defined with tag >= VENDOR_SECTION_START + */ +enum CameraMetadataSectionStart : android.hardware.camera.metadata@3.2::CameraMetadataSectionStart { + ANDROID_LOGICAL_MULTI_CAMERA_START = CameraMetadataSection:ANDROID_LOGICAL_MULTI_CAMERA << 16, + + ANDROID_DISTORTION_CORRECTION_START = CameraMetadataSection:ANDROID_DISTORTION_CORRECTION << 16, + + VENDOR_SECTION_START_3_3 = CameraMetadataSection:VENDOR_SECTION_3_3 << 16, + +}; + +/** + * Main enumeration for defining camera metadata tags added in this revision + * + *

Partial documentation is included for each tag; for complete documentation, reference + * '/system/media/camera/docs/docs.html' in the corresponding Android source tree.

+ */ +enum CameraMetadataTag : @3.2::CameraMetadataTag { + /** android.control.afSceneChange [dynamic, enum, public] + * + *

Whether a significant scene change is detected within the currently-set AF + * region(s).

+ */ + ANDROID_CONTROL_AF_SCENE_CHANGE = android.hardware.camera.metadata@3.2::CameraMetadataTag:ANDROID_CONTROL_END, + + ANDROID_CONTROL_END_3_3, + + /** android.lens.poseReference [static, enum, public] + * + *

The origin for ANDROID_LENS_POSE_TRANSLATION.

+ * + * @see ANDROID_LENS_POSE_TRANSLATION + */ + ANDROID_LENS_POSE_REFERENCE = android.hardware.camera.metadata@3.2::CameraMetadataTag:ANDROID_LENS_END, + + /** android.lens.distortion [dynamic, float[], public] + * + *

The correction coefficients to correct for this camera device's + * radial and tangential lens distortion.

+ *

Replaces the deprecated ANDROID_LENS_RADIAL_DISTORTION field, which was + * inconsistently defined.

+ * + * @see ANDROID_LENS_RADIAL_DISTORTION + */ + ANDROID_LENS_DISTORTION, + + ANDROID_LENS_END_3_3, + + /** android.request.availableSessionKeys [static, int32[], ndk_public] + * + *

A subset of the available request keys that the camera device + * can pass as part of the capture session initialization.

+ */ + ANDROID_REQUEST_AVAILABLE_SESSION_KEYS = android.hardware.camera.metadata@3.2::CameraMetadataTag:ANDROID_REQUEST_END, + + /** android.request.availablePhysicalCameraRequestKeys [static, int32[], hidden] + * + *

A subset of the available request keys that can be overriden for + * physical devices backing a logical multi-camera.

+ */ + ANDROID_REQUEST_AVAILABLE_PHYSICAL_CAMERA_REQUEST_KEYS, + + ANDROID_REQUEST_END_3_3, + + /** android.statistics.oisDataMode [dynamic, enum, public] + * + *

A control for selecting whether OIS position information is included in output + * result metadata.

+ */ + ANDROID_STATISTICS_OIS_DATA_MODE = android.hardware.camera.metadata@3.2::CameraMetadataTag:ANDROID_STATISTICS_END, + + /** android.statistics.oisTimestamps [dynamic, int64[], ndk_public] + * + *

An array of timestamps of OIS samples, in nanoseconds.

+ */ + ANDROID_STATISTICS_OIS_TIMESTAMPS, + + /** android.statistics.oisXShifts [dynamic, float[], ndk_public] + * + *

An array of shifts of OIS samples, in x direction.

+ */ + ANDROID_STATISTICS_OIS_X_SHIFTS, + + /** android.statistics.oisYShifts [dynamic, float[], ndk_public] + * + *

An array of shifts of OIS samples, in y direction.

+ */ + ANDROID_STATISTICS_OIS_Y_SHIFTS, + + ANDROID_STATISTICS_END_3_3, + + /** android.statistics.info.availableOisDataModes [static, byte[], public] + * + *

List of OIS data output modes for ANDROID_STATISTICS_OIS_DATA_MODE that + * are supported by this camera device.

+ * + * @see ANDROID_STATISTICS_OIS_DATA_MODE + */ + ANDROID_STATISTICS_INFO_AVAILABLE_OIS_DATA_MODES = android.hardware.camera.metadata@3.2::CameraMetadataTag:ANDROID_STATISTICS_INFO_END, + + ANDROID_STATISTICS_INFO_END_3_3, + + /** android.info.version [static, byte, public] + * + *

A short string for manufacturer version information about the camera device, such as + * ISP hardware, sensors, etc.

+ */ + ANDROID_INFO_VERSION = android.hardware.camera.metadata@3.2::CameraMetadataTag:ANDROID_INFO_END, + + ANDROID_INFO_END_3_3, + + /** android.logicalMultiCamera.physicalIds [static, byte[], hidden] + * + *

String containing the ids of the underlying physical cameras.

+ */ + ANDROID_LOGICAL_MULTI_CAMERA_PHYSICAL_IDS = CameraMetadataSectionStart:ANDROID_LOGICAL_MULTI_CAMERA_START, + + /** android.logicalMultiCamera.sensorSyncType [static, enum, public] + * + *

The accuracy of frame timestamp synchronization between physical cameras

+ */ + ANDROID_LOGICAL_MULTI_CAMERA_SENSOR_SYNC_TYPE, + + ANDROID_LOGICAL_MULTI_CAMERA_END_3_3, + + /** android.distortionCorrection.mode [dynamic, enum, public] + * + *

Mode of operation for the lens distortion correction block.

+ */ + ANDROID_DISTORTION_CORRECTION_MODE = CameraMetadataSectionStart:ANDROID_DISTORTION_CORRECTION_START, + + /** android.distortionCorrection.availableModes [static, byte[], public] + * + *

List of distortion correction modes for ANDROID_DISTORTION_CORRECTION_MODE that are + * supported by this camera device.

+ * + * @see ANDROID_DISTORTION_CORRECTION_MODE + */ + ANDROID_DISTORTION_CORRECTION_AVAILABLE_MODES, + + ANDROID_DISTORTION_CORRECTION_END_3_3, + +}; + +/* + * Enumeration definitions for the various entries that need them + */ + +/** android.control.aeMode enumeration values added since v3.2 + * @see ANDROID_CONTROL_AE_MODE + */ +enum CameraMetadataEnumAndroidControlAeMode : + @3.2::CameraMetadataEnumAndroidControlAeMode { + ANDROID_CONTROL_AE_MODE_ON_EXTERNAL_FLASH, +}; + +/** android.control.captureIntent enumeration values added since v3.2 + * @see ANDROID_CONTROL_CAPTURE_INTENT + */ +enum CameraMetadataEnumAndroidControlCaptureIntent : + @3.2::CameraMetadataEnumAndroidControlCaptureIntent { + ANDROID_CONTROL_CAPTURE_INTENT_MOTION_TRACKING, +}; + +/** android.control.afSceneChange enumeration values + * @see ANDROID_CONTROL_AF_SCENE_CHANGE + */ +enum CameraMetadataEnumAndroidControlAfSceneChange : uint32_t { + ANDROID_CONTROL_AF_SCENE_CHANGE_NOT_DETECTED, + ANDROID_CONTROL_AF_SCENE_CHANGE_DETECTED, +}; + +/** android.lens.poseReference enumeration values + * @see ANDROID_LENS_POSE_REFERENCE + */ +enum CameraMetadataEnumAndroidLensPoseReference : uint32_t { + ANDROID_LENS_POSE_REFERENCE_PRIMARY_CAMERA, + ANDROID_LENS_POSE_REFERENCE_GYROSCOPE, +}; + +/** android.request.availableCapabilities enumeration values added since v3.2 + * @see ANDROID_REQUEST_AVAILABLE_CAPABILITIES + */ +enum CameraMetadataEnumAndroidRequestAvailableCapabilities : + @3.2::CameraMetadataEnumAndroidRequestAvailableCapabilities { + ANDROID_REQUEST_AVAILABLE_CAPABILITIES_MOTION_TRACKING, + ANDROID_REQUEST_AVAILABLE_CAPABILITIES_LOGICAL_MULTI_CAMERA, + ANDROID_REQUEST_AVAILABLE_CAPABILITIES_MONOCHROME, +}; + +/** android.statistics.oisDataMode enumeration values + * @see ANDROID_STATISTICS_OIS_DATA_MODE + */ +enum CameraMetadataEnumAndroidStatisticsOisDataMode : uint32_t { + ANDROID_STATISTICS_OIS_DATA_MODE_OFF, + ANDROID_STATISTICS_OIS_DATA_MODE_ON, +}; + +/** android.info.supportedHardwareLevel enumeration values added since v3.2 + * @see ANDROID_INFO_SUPPORTED_HARDWARE_LEVEL + */ +enum CameraMetadataEnumAndroidInfoSupportedHardwareLevel : + @3.2::CameraMetadataEnumAndroidInfoSupportedHardwareLevel { + ANDROID_INFO_SUPPORTED_HARDWARE_LEVEL_EXTERNAL, +}; + +/** android.logicalMultiCamera.sensorSyncType enumeration values + * @see ANDROID_LOGICAL_MULTI_CAMERA_SENSOR_SYNC_TYPE + */ +enum CameraMetadataEnumAndroidLogicalMultiCameraSensorSyncType : uint32_t { + ANDROID_LOGICAL_MULTI_CAMERA_SENSOR_SYNC_TYPE_APPROXIMATE, + ANDROID_LOGICAL_MULTI_CAMERA_SENSOR_SYNC_TYPE_CALIBRATED, +}; + +/** android.distortionCorrection.mode enumeration values + * @see ANDROID_DISTORTION_CORRECTION_MODE + */ +enum CameraMetadataEnumAndroidDistortionCorrectionMode : uint32_t { + ANDROID_DISTORTION_CORRECTION_MODE_OFF, + ANDROID_DISTORTION_CORRECTION_MODE_FAST, + ANDROID_DISTORTION_CORRECTION_MODE_HIGH_QUALITY, +}; diff --git a/camera/provider/2.4/ICameraProvider.hal b/camera/provider/2.4/ICameraProvider.hal index 3015b7d9e1e7c2b5f108ede47687d82a9cab73f4..8773bc08198a55ce62740f761125da69c42ec114 100644 --- a/camera/provider/2.4/ICameraProvider.hal +++ b/camera/provider/2.4/ICameraProvider.hal @@ -35,22 +35,24 @@ import android.hardware.camera.device@3.2::ICameraDevice; * where * - / is the provider HAL HIDL version, * - is the type of devices this provider knows about, such as - * "internal", "legacy", "usb", or "remote" + * "internal", "legacy", "external", "remote" etc. The camera framework + * must not differentiate or chage its behavior based on the specific type. * - is a non-negative integer starting from 0 to disambiguate * between multiple HALs of the same type. * * The "legacy" type is only used for passthrough legacy HAL mode, and must * not be used by a standalone binderized HAL. * - * The device instance names enumerated by the provider must be of the form + * The device instance names enumerated by the provider in getCameraIdList() or + * ICameraProviderCallback::cameraDeviceStatusChange() must be of the form * "device@.//" where * / is the HIDL version of the interface. is either a small * incrementing integer for "internal" device types, with 0 being the main * back-facing camera and 1 being the main front-facing camera, if they exist. - * Or, for external devices such as type "usb", a unique serial number that can - * be used to identify the device reliably when it is disconnected and - * reconnected. Multiple providers may not enumerate the same device ID. + * Or, for external devices, a unique serial number (if possible) that can be + * used to identify the device reliably when it is disconnected and reconnected. * + * Multiple providers must not enumerate the same device ID. */ interface ICameraProvider { @@ -97,7 +99,7 @@ interface ICameraProvider { getVendorTags() generates (Status status, vec sections); /** - * getCameraDeviceList: + * getCameraIdList: * * Returns the list of internal camera device interfaces known to this * camera provider. These devices can then be accessed via the hardware diff --git a/camera/provider/2.4/default/Android.bp b/camera/provider/2.4/default/Android.bp index c0b35912f74736525f2d634bcb9fe6bcb2ab61ce..ae24d7814302166d983dd75153a1c4d56e2663de 100644 --- a/camera/provider/2.4/default/Android.bp +++ b/camera/provider/2.4/default/Android.bp @@ -3,7 +3,8 @@ cc_library_shared { defaults: ["hidl_defaults"], proprietary: true, relative_install_path: "hw", - srcs: ["CameraProvider.cpp"], + srcs: ["CameraProvider.cpp", + "ExternalCameraProvider.cpp"], shared_libs: [ "libhidlbase", "libhidltransport", @@ -12,9 +13,12 @@ cc_library_shared { "android.hardware.camera.device@1.0", "android.hardware.camera.device@3.2", "android.hardware.camera.device@3.3", + "android.hardware.camera.device@3.4", "camera.device@1.0-impl", "camera.device@3.2-impl", "camera.device@3.3-impl", + "camera.device@3.4-impl", + "camera.device@3.4-external-impl", "android.hardware.camera.provider@2.4", "android.hardware.camera.common@1.0", "android.hardware.graphics.mapper@2.0", @@ -22,11 +26,16 @@ cc_library_shared { "android.hidl.memory@1.0", "liblog", "libhardware", - "libcamera_metadata" + "libcamera_metadata", + "libtinyxml2" + ], + header_libs: [ + "camera.device@3.4-impl_headers", + "camera.device@3.4-external-impl_headers" ], static_libs: [ - "android.hardware.camera.common@1.0-helper" - ] + "android.hardware.camera.common@1.0-helper", + ], } cc_binary { @@ -37,6 +46,53 @@ cc_binary { srcs: ["service.cpp"], compile_multilib: "32", init_rc: ["android.hardware.camera.provider@2.4-service.rc"], + shared_libs: [ + "libhidlbase", + "libhidltransport", + "libbinder", + "liblog", + "libutils", + "android.hardware.camera.device@1.0", + "android.hardware.camera.device@3.2", + "android.hardware.camera.device@3.3", + "android.hardware.camera.device@3.4", + "android.hardware.camera.provider@2.4", + "android.hardware.camera.common@1.0", + ], +} + + +cc_binary { + name: "android.hardware.camera.provider@2.4-service_64", + defaults: ["hidl_defaults"], + proprietary: true, + relative_install_path: "hw", + srcs: ["service.cpp"], + compile_multilib: "64", + init_rc: ["android.hardware.camera.provider@2.4-service_64.rc"], + shared_libs: [ + "libhidlbase", + "libhidltransport", + "libbinder", + "liblog", + "libutils", + "android.hardware.camera.device@1.0", + "android.hardware.camera.device@3.2", + "android.hardware.camera.device@3.3", + "android.hardware.camera.device@3.4", + "android.hardware.camera.provider@2.4", + "android.hardware.camera.common@1.0", + ], +} + +cc_binary { + name: "android.hardware.camera.provider@2.4-external-service", + defaults: ["hidl_defaults"], + proprietary: true, + relative_install_path: "hw", + srcs: ["external-service.cpp"], + compile_multilib: "32", + init_rc: ["android.hardware.camera.provider@2.4-external-service.rc"], shared_libs: [ "libhidlbase", "libhidltransport", diff --git a/camera/provider/2.4/default/CameraProvider.cpp b/camera/provider/2.4/default/CameraProvider.cpp index 8c5af8e26c2982c3dc935a8e45a3256c22096c42..63139390cbb4389997c9d5f3081179cfafbe7489 100644 --- a/camera/provider/2.4/default/CameraProvider.cpp +++ b/camera/provider/2.4/default/CameraProvider.cpp @@ -19,8 +19,10 @@ #include #include "CameraProvider.h" +#include "ExternalCameraProvider.h" #include "CameraDevice_1_0.h" #include "CameraDevice_3_3.h" +#include "CameraDevice_3_4.h" #include #include #include @@ -35,10 +37,12 @@ namespace implementation { namespace { const char *kLegacyProviderName = "legacy/0"; +const char *kExternalProviderName = "external/0"; // "device@/legacy/" const std::regex kDeviceNameRE("device@([0-9]+\\.[0-9]+)/legacy/(.+)"); const char *kHAL3_2 = "3.2"; const char *kHAL3_3 = "3.3"; +const char *kHAL3_4 = "3.4"; const char *kHAL1_0 = "1.0"; const int kMaxCameraDeviceNameLen = 128; const int kMaxCameraIdLen = 16; @@ -238,12 +242,16 @@ std::string CameraProvider::getHidlDeviceName( if (deviceVersion != CAMERA_DEVICE_API_VERSION_1_0 && deviceVersion != CAMERA_DEVICE_API_VERSION_3_2 && deviceVersion != CAMERA_DEVICE_API_VERSION_3_3 && - deviceVersion != CAMERA_DEVICE_API_VERSION_3_4 ) { + deviceVersion != CAMERA_DEVICE_API_VERSION_3_4 && + deviceVersion != CAMERA_DEVICE_API_VERSION_3_5) { return hidl_string(""); } bool isV1 = deviceVersion == CAMERA_DEVICE_API_VERSION_1_0; int versionMajor = isV1 ? 1 : 3; int versionMinor = isV1 ? 0 : mPreferredHal3MinorVersion; + if (deviceVersion == CAMERA_DEVICE_API_VERSION_3_5) { + versionMinor = 4; + } char deviceName[kMaxCameraDeviceNameLen]; snprintf(deviceName, sizeof(deviceName), "device@%d.%d/legacy/%s", versionMajor, versionMinor, cameraId.c_str()); @@ -290,7 +298,8 @@ bool CameraProvider::initialize() { return true; } - mPreferredHal3MinorVersion = property_get_int32("ro.camera.wrapper.hal3TrebleMinorVersion", 3); + mPreferredHal3MinorVersion = + property_get_int32("ro.vendor.camera.wrapper.hal3TrebleMinorVersion", 3); ALOGV("Preferred HAL 3 minor version is %d", mPreferredHal3MinorVersion); switch(mPreferredHal3MinorVersion) { case 2: @@ -299,7 +308,8 @@ bool CameraProvider::initialize() { break; default: ALOGW("Unknown minor camera device HAL version %d in property " - "'camera.wrapper.hal3TrebleMinorVersion', defaulting to 3", mPreferredHal3MinorVersion); + "'camera.wrapper.hal3TrebleMinorVersion', defaulting to 3", + mPreferredHal3MinorVersion); mPreferredHal3MinorVersion = 3; } @@ -347,6 +357,7 @@ int CameraProvider::checkCameraVersion(int id, camera_info info) { case CAMERA_DEVICE_API_VERSION_3_2: case CAMERA_DEVICE_API_VERSION_3_3: case CAMERA_DEVICE_API_VERSION_3_4: + case CAMERA_DEVICE_API_VERSION_3_5: // in support break; case CAMERA_DEVICE_API_VERSION_2_0: @@ -535,10 +546,27 @@ Return CameraProvider::getCameraDeviceInterface_V3_x( return Void(); } + sp device; + if (deviceVersion == kHAL3_4) { + ALOGV("Constructing v3.4 camera device"); + sp deviceImpl = + new android::hardware::camera::device::V3_4::implementation::CameraDevice( + mModule, cameraId, mCameraDeviceNames); + if (deviceImpl == nullptr || deviceImpl->isInitFailed()) { + ALOGE("%s: camera device %s init failed!", __FUNCTION__, cameraId.c_str()); + device = nullptr; + _hidl_cb(Status::INTERNAL_ERROR, nullptr); + return Void(); + } + + device = deviceImpl; + _hidl_cb (Status::OK, device); + return Void(); + } + // Since some Treble HAL revisions can map to the same legacy HAL version(s), we default // to the newest possible Treble HAL revision, but allow for override if needed via // system property. - sp device; switch (mPreferredHal3MinorVersion) { case 2: { // Map legacy camera device v3 HAL to Treble camera device HAL v3.2 ALOGV("Constructing v3.2 camera device"); @@ -579,20 +607,24 @@ Return CameraProvider::getCameraDeviceInterface_V3_x( } ICameraProvider* HIDL_FETCH_ICameraProvider(const char* name) { - if (strcmp(name, kLegacyProviderName) != 0) { - return nullptr; - } - CameraProvider* provider = new CameraProvider(); - if (provider == nullptr) { - ALOGE("%s: cannot allocate camera provider!", __FUNCTION__); - return nullptr; - } - if (provider->isInitFailed()) { - ALOGE("%s: camera provider init failed!", __FUNCTION__); - delete provider; - return nullptr; + if (strcmp(name, kLegacyProviderName) == 0) { + CameraProvider* provider = new CameraProvider(); + if (provider == nullptr) { + ALOGE("%s: cannot allocate camera provider!", __FUNCTION__); + return nullptr; + } + if (provider->isInitFailed()) { + ALOGE("%s: camera provider init failed!", __FUNCTION__); + delete provider; + return nullptr; + } + return provider; + } else if (strcmp(name, kExternalProviderName) == 0) { + ExternalCameraProvider* provider = new ExternalCameraProvider(); + return provider; } - return provider; + ALOGE("%s: unknown instance name: %s", __FUNCTION__, name); + return nullptr; } } // namespace implementation diff --git a/camera/provider/2.4/default/ExternalCameraProvider.cpp b/camera/provider/2.4/default/ExternalCameraProvider.cpp new file mode 100644 index 0000000000000000000000000000000000000000..a4046d0907f3d65ea0c5332efcbae41fcaa089bc --- /dev/null +++ b/camera/provider/2.4/default/ExternalCameraProvider.cpp @@ -0,0 +1,297 @@ +/* + * Copyright (C) 2018 The Android Open Source Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#define LOG_TAG "CamPvdr@2.4-external" +//#define LOG_NDEBUG 0 +#include + +#include +#include +#include +#include +#include "ExternalCameraProvider.h" +#include "ExternalCameraDevice_3_4.h" + +namespace android { +namespace hardware { +namespace camera { +namespace provider { +namespace V2_4 { +namespace implementation { + +namespace { +// "device@/external/" +const std::regex kDeviceNameRE("device@([0-9]+\\.[0-9]+)/external/(.+)"); +const int kMaxDevicePathLen = 256; +const char* kDevicePath = "/dev/"; +constexpr char kPrefix[] = "video"; +constexpr int kPrefixLen = sizeof(kPrefix) - 1; + +bool matchDeviceName(const hidl_string& deviceName, std::string* deviceVersion, + std::string* cameraId) { + std::string deviceNameStd(deviceName.c_str()); + std::smatch sm; + if (std::regex_match(deviceNameStd, sm, kDeviceNameRE)) { + if (deviceVersion != nullptr) { + *deviceVersion = sm[1]; + } + if (cameraId != nullptr) { + *cameraId = sm[2]; + } + return true; + } + return false; +} + +} // anonymous namespace + +ExternalCameraProvider::ExternalCameraProvider() : + mCfg(ExternalCameraConfig::loadFromCfg()), + mHotPlugThread(this) { + mHotPlugThread.run("ExtCamHotPlug", PRIORITY_BACKGROUND); +} + +ExternalCameraProvider::~ExternalCameraProvider() { + mHotPlugThread.requestExit(); +} + + +Return ExternalCameraProvider::setCallback( + const sp& callback) { + { + Mutex::Autolock _l(mLock); + mCallbacks = callback; + } + if (mCallbacks == nullptr) { + return Status::OK; + } + // Send a callback for all devices to initialize + { + for (const auto& pair : mCameraStatusMap) { + mCallbacks->cameraDeviceStatusChange(pair.first, pair.second); + } + } + + return Status::OK; +} + +Return ExternalCameraProvider::getVendorTags(getVendorTags_cb _hidl_cb) { + // No vendor tag support for USB camera + hidl_vec zeroSections; + _hidl_cb(Status::OK, zeroSections); + return Void(); +} + +Return ExternalCameraProvider::getCameraIdList(getCameraIdList_cb _hidl_cb) { + // External camera HAL always report 0 camera, and extra cameras + // are just reported via cameraDeviceStatusChange callbacks + hidl_vec hidlDeviceNameList; + _hidl_cb(Status::OK, hidlDeviceNameList); + return Void(); +} + +Return ExternalCameraProvider::isSetTorchModeSupported( + isSetTorchModeSupported_cb _hidl_cb) { + // No torch mode support for USB camera + _hidl_cb (Status::OK, false); + return Void(); +} + +Return ExternalCameraProvider::getCameraDeviceInterface_V1_x( + const hidl_string&, + getCameraDeviceInterface_V1_x_cb _hidl_cb) { + // External Camera HAL does not support HAL1 + _hidl_cb(Status::OPERATION_NOT_SUPPORTED, nullptr); + return Void(); +} + +Return ExternalCameraProvider::getCameraDeviceInterface_V3_x( + const hidl_string& cameraDeviceName, + getCameraDeviceInterface_V3_x_cb _hidl_cb) { + + std::string cameraId, deviceVersion; + bool match = matchDeviceName(cameraDeviceName, &deviceVersion, &cameraId); + if (!match) { + _hidl_cb(Status::ILLEGAL_ARGUMENT, nullptr); + return Void(); + } + + if (mCameraStatusMap.count(cameraDeviceName) == 0 || + mCameraStatusMap[cameraDeviceName] != CameraDeviceStatus::PRESENT) { + _hidl_cb(Status::ILLEGAL_ARGUMENT, nullptr); + return Void(); + } + + ALOGV("Constructing v3.4 external camera device"); + sp device; + sp deviceImpl = + new device::V3_4::implementation::ExternalCameraDevice( + cameraId, mCfg); + if (deviceImpl == nullptr || deviceImpl->isInitFailed()) { + ALOGE("%s: camera device %s init failed!", __FUNCTION__, cameraId.c_str()); + device = nullptr; + _hidl_cb(Status::INTERNAL_ERROR, nullptr); + return Void(); + } + device = deviceImpl; + + _hidl_cb (Status::OK, device); + + return Void(); +} + +void ExternalCameraProvider::addExternalCamera(const char* devName) { + ALOGI("ExtCam: adding %s to External Camera HAL!", devName); + Mutex::Autolock _l(mLock); + std::string deviceName = std::string("device@3.4/external/") + devName; + mCameraStatusMap[deviceName] = CameraDeviceStatus::PRESENT; + if (mCallbacks != nullptr) { + mCallbacks->cameraDeviceStatusChange(deviceName, CameraDeviceStatus::PRESENT); + } +} + +void ExternalCameraProvider::deviceAdded(const char* devName) { + { + base::unique_fd fd(::open(devName, O_RDWR)); + if (fd.get() < 0) { + ALOGE("%s open v4l2 device %s failed:%s", __FUNCTION__, devName, strerror(errno)); + return; + } + + struct v4l2_capability capability; + int ret = ioctl(fd.get(), VIDIOC_QUERYCAP, &capability); + if (ret < 0) { + ALOGE("%s v4l2 QUERYCAP %s failed", __FUNCTION__, devName); + return; + } + + if (!(capability.device_caps & V4L2_CAP_VIDEO_CAPTURE)) { + ALOGW("%s device %s does not support VIDEO_CAPTURE", __FUNCTION__, devName); + return; + } + } + // See if we can initialize ExternalCameraDevice correctly + sp deviceImpl = + new device::V3_4::implementation::ExternalCameraDevice(devName, mCfg); + if (deviceImpl == nullptr || deviceImpl->isInitFailed()) { + ALOGW("%s: Attempt to init camera device %s failed!", __FUNCTION__, devName); + return; + } + deviceImpl.clear(); + + addExternalCamera(devName); + return; +} + +void ExternalCameraProvider::deviceRemoved(const char* devName) { + Mutex::Autolock _l(mLock); + std::string deviceName = std::string("device@3.4/external/") + devName; + if (mCameraStatusMap.find(deviceName) != mCameraStatusMap.end()) { + mCameraStatusMap.erase(deviceName); + if (mCallbacks != nullptr) { + mCallbacks->cameraDeviceStatusChange(deviceName, CameraDeviceStatus::NOT_PRESENT); + } + } else { + ALOGE("%s: cannot find camera device %s", __FUNCTION__, devName); + } +} + +ExternalCameraProvider::HotplugThread::HotplugThread(ExternalCameraProvider* parent) : + Thread(/*canCallJava*/false), + mParent(parent), + mInternalDevices(parent->mCfg.mInternalDevices) {} + +ExternalCameraProvider::HotplugThread::~HotplugThread() {} + +bool ExternalCameraProvider::HotplugThread::threadLoop() { + // Find existing /dev/video* devices + DIR* devdir = opendir(kDevicePath); + if(devdir == 0) { + ALOGE("%s: cannot open %s! Exiting threadloop", __FUNCTION__, kDevicePath); + return false; + } + + struct dirent* de; + while ((de = readdir(devdir)) != 0) { + // Find external v4l devices that's existing before we start watching and add them + if (!strncmp(kPrefix, de->d_name, kPrefixLen)) { + // TODO: This might reject some valid devices. Ex: internal is 33 and a device named 3 + // is added. + std::string deviceId(de->d_name + kPrefixLen); + if (mInternalDevices.count(deviceId) == 0) { + ALOGV("Non-internal v4l device %s found", de->d_name); + char v4l2DevicePath[kMaxDevicePathLen]; + snprintf(v4l2DevicePath, kMaxDevicePathLen, + "%s%s", kDevicePath, de->d_name); + mParent->deviceAdded(v4l2DevicePath); + } + } + } + closedir(devdir); + + // Watch new video devices + mINotifyFD = inotify_init(); + if (mINotifyFD < 0) { + ALOGE("%s: inotify init failed! Exiting threadloop", __FUNCTION__); + return true; + } + + mWd = inotify_add_watch(mINotifyFD, kDevicePath, IN_CREATE | IN_DELETE); + if (mWd < 0) { + ALOGE("%s: inotify add watch failed! Exiting threadloop", __FUNCTION__); + return true; + } + + ALOGI("%s start monitoring new V4L2 devices", __FUNCTION__); + + bool done = false; + char eventBuf[512]; + while (!done) { + int offset = 0; + int ret = read(mINotifyFD, eventBuf, sizeof(eventBuf)); + if (ret >= (int)sizeof(struct inotify_event)) { + while (offset < ret) { + struct inotify_event* event = (struct inotify_event*)&eventBuf[offset]; + if (event->wd == mWd) { + if (!strncmp(kPrefix, event->name, kPrefixLen)) { + std::string deviceId(event->name + kPrefixLen); + if (mInternalDevices.count(deviceId) == 0) { + char v4l2DevicePath[kMaxDevicePathLen]; + snprintf(v4l2DevicePath, kMaxDevicePathLen, + "%s%s", kDevicePath, event->name); + if (event->mask & IN_CREATE) { + mParent->deviceAdded(v4l2DevicePath); + } + if (event->mask & IN_DELETE) { + mParent->deviceRemoved(v4l2DevicePath); + } + } + } + } + offset += sizeof(struct inotify_event) + event->len; + } + } + } + + return true; +} + +} // namespace implementation +} // namespace V2_4 +} // namespace provider +} // namespace camera +} // namespace hardware +} // namespace android diff --git a/camera/provider/2.4/default/ExternalCameraProvider.h b/camera/provider/2.4/default/ExternalCameraProvider.h new file mode 100644 index 0000000000000000000000000000000000000000..c83cc708bf8e4464056311b71b486990f2860773 --- /dev/null +++ b/camera/provider/2.4/default/ExternalCameraProvider.h @@ -0,0 +1,109 @@ +/* + * Copyright (C) 2018 The Android Open Source Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#ifndef ANDROID_HARDWARE_CAMERA_PROVIDER_V2_4_EXTCAMERAPROVIDER_H +#define ANDROID_HARDWARE_CAMERA_PROVIDER_V2_4_EXTCAMERAPROVIDER_H + +#include +#include +#include +#include "utils/Mutex.h" +#include "utils/Thread.h" +#include +#include +#include +#include "ExternalCameraUtils.h" + +namespace android { +namespace hardware { +namespace camera { +namespace provider { +namespace V2_4 { +namespace implementation { + +using ::android::hardware::camera::common::V1_0::CameraDeviceStatus; +using ::android::hardware::camera::common::V1_0::Status; +using ::android::hardware::camera::common::V1_0::VendorTagSection; +using ::android::hardware::camera::external::common::ExternalCameraConfig; +using ::android::hardware::camera::provider::V2_4::ICameraProvider; +using ::android::hardware::camera::provider::V2_4::ICameraProviderCallback; +using ::android::hardware::Return; +using ::android::hardware::Void; +using ::android::hardware::hidl_vec; +using ::android::hardware::hidl_string; +using ::android::sp; +using ::android::Mutex; + +struct ExternalCameraProvider : public ICameraProvider { + ExternalCameraProvider(); + ~ExternalCameraProvider(); + + // Methods from ::android::hardware::camera::provider::V2_4::ICameraProvider follow. + Return setCallback(const sp& callback) override; + + Return getVendorTags(getVendorTags_cb _hidl_cb) override; + + Return getCameraIdList(getCameraIdList_cb _hidl_cb) override; + + Return isSetTorchModeSupported(isSetTorchModeSupported_cb _hidl_cb) override; + + Return getCameraDeviceInterface_V1_x( + const hidl_string&, + getCameraDeviceInterface_V1_x_cb) override; + Return getCameraDeviceInterface_V3_x( + const hidl_string&, + getCameraDeviceInterface_V3_x_cb) override; + +private: + + void addExternalCamera(const char* devName); + + void deviceAdded(const char* devName); + + void deviceRemoved(const char* devName); + + class HotplugThread : public android::Thread { + public: + HotplugThread(ExternalCameraProvider* parent); + ~HotplugThread(); + + virtual bool threadLoop() override; + + private: + ExternalCameraProvider* mParent = nullptr; + const std::unordered_set mInternalDevices; + + int mINotifyFD = -1; + int mWd = -1; + }; + + Mutex mLock; + sp mCallbacks = nullptr; + std::unordered_map mCameraStatusMap; // camera id -> status + const ExternalCameraConfig mCfg; + HotplugThread mHotPlugThread; +}; + + + +} // namespace implementation +} // namespace V2_4 +} // namespace provider +} // namespace camera +} // namespace hardware +} // namespace android + +#endif // ANDROID_HARDWARE_CAMERA_PROVIDER_V2_4_EXTCAMERAPROVIDER_H diff --git a/camera/provider/2.4/default/android.hardware.camera.provider@2.4-external-service.rc b/camera/provider/2.4/default/android.hardware.camera.provider@2.4-external-service.rc new file mode 100644 index 0000000000000000000000000000000000000000..acdb2007a50be29b7fad726e0a406e4d91b7ea93 --- /dev/null +++ b/camera/provider/2.4/default/android.hardware.camera.provider@2.4-external-service.rc @@ -0,0 +1,7 @@ +service vendor.camera-provider-2-4-ext /vendor/bin/hw/android.hardware.camera.provider@2.4-external-service + class hal + user cameraserver + group audio camera input drmrpc usb + ioprio rt 4 + capabilities SYS_NICE + writepid /dev/cpuset/camera-daemon/tasks /dev/stune/top-app/tasks diff --git a/camera/provider/2.4/default/android.hardware.camera.provider@2.4-service.rc b/camera/provider/2.4/default/android.hardware.camera.provider@2.4-service.rc index 2bf309b422f179f38765dfac76e792e5d2865e85..c9196284ee56791076812c589a9d130e32fef943 100644 --- a/camera/provider/2.4/default/android.hardware.camera.provider@2.4-service.rc +++ b/camera/provider/2.4/default/android.hardware.camera.provider@2.4-service.rc @@ -1,4 +1,4 @@ -service camera-provider-2-4 /vendor/bin/hw/android.hardware.camera.provider@2.4-service +service vendor.camera-provider-2-4 /vendor/bin/hw/android.hardware.camera.provider@2.4-service class hal user cameraserver group audio camera input drmrpc diff --git a/camera/provider/2.4/default/android.hardware.camera.provider@2.4-service_64.rc b/camera/provider/2.4/default/android.hardware.camera.provider@2.4-service_64.rc new file mode 100644 index 0000000000000000000000000000000000000000..4c721ecb882242a65f06d9466b65f37043c9d60e --- /dev/null +++ b/camera/provider/2.4/default/android.hardware.camera.provider@2.4-service_64.rc @@ -0,0 +1,7 @@ +service vendor.camera-provider-2-4 /vendor/bin/hw/android.hardware.camera.provider@2.4-service_64 + class hal + user cameraserver + group audio camera input drmrpc + ioprio rt 4 + capabilities SYS_NICE + writepid /dev/cpuset/camera-daemon/tasks /dev/stune/top-app/tasks diff --git a/camera/provider/2.4/default/external-service.cpp b/camera/provider/2.4/default/external-service.cpp new file mode 100644 index 0000000000000000000000000000000000000000..f91aa596c444fcfb5e2d2df8a2aba000b6f2702c --- /dev/null +++ b/camera/provider/2.4/default/external-service.cpp @@ -0,0 +1,34 @@ +/* + * Copyright 2018 The Android Open Source Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#define LOG_TAG "android.hardware.camera.provider@2.4-external-service" + +#include +#include + +#include + +using android::hardware::camera::provider::V2_4::ICameraProvider; +using android::hardware::defaultPassthroughServiceImplementation; + +int main() +{ + ALOGI("External camera provider service is starting."); + // The camera HAL may communicate to other vendor components via + // /dev/vndbinder + android::ProcessState::initWithDriver("/dev/vndbinder"); + return defaultPassthroughServiceImplementation("external/0", /*maxThreads*/ 6); +} diff --git a/camera/provider/2.4/vts/functional/Android.bp b/camera/provider/2.4/vts/functional/Android.bp index 81d3de15660554b5c552169520c6e226f3f7cd06..ead4083ea1841f9e2d2436004f318807a373cf9a 100644 --- a/camera/provider/2.4/vts/functional/Android.bp +++ b/camera/provider/2.4/vts/functional/Android.bp @@ -23,6 +23,7 @@ cc_test { shared_libs: [ "libbinder", "libcamera_metadata", + "libcutils", "libfmq", "libgui", "libui", @@ -35,9 +36,13 @@ cc_test { "android.hardware.camera.device@1.0", "android.hardware.camera.device@3.2", "android.hardware.camera.device@3.3", + "android.hardware.camera.device@3.4", "android.hardware.camera.provider@2.4", + "android.hardware.graphics.allocator@2.0", "android.hardware.graphics.common@1.0", "android.hardware.graphics.mapper@2.0", + "android.hidl.allocator@1.0", "libgrallocusage", + "libhidlmemory", ], } diff --git a/camera/provider/2.4/vts/functional/VtsHalCameraProviderV2_4TargetTest.cpp b/camera/provider/2.4/vts/functional/VtsHalCameraProviderV2_4TargetTest.cpp index e4cf9af273ca7256db3d7317ef9a55cf479419ce..95c7167c3b096dcdb5c3034d994014981f375e4a 100644 --- a/camera/provider/2.4/vts/functional/VtsHalCameraProviderV2_4TargetTest.cpp +++ b/camera/provider/2.4/vts/functional/VtsHalCameraProviderV2_4TargetTest.cpp @@ -1,5 +1,5 @@ /* - * Copyright (C) 2016 The Android Open Source Project + * Copyright (C) 2016-2018 The Android Open Source Project * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. @@ -16,10 +16,12 @@ #define LOG_TAG "camera_hidl_hal_test" +#include #include #include #include #include +#include #include #include @@ -27,11 +29,14 @@ #include #include #include +#include +#include #include #include #include #include #include +#include #include #include #include @@ -43,9 +48,17 @@ #include #include +#include +#include +#include +#include +#include +#include + #include #include +using namespace ::android::hardware::camera::device; using ::android::hardware::Return; using ::android::hardware::Void; using ::android::hardware::hidl_handle; @@ -60,6 +73,7 @@ using ::android::BufferQueue; using ::android::BufferItemConsumer; using ::android::Surface; using ::android::hardware::graphics::common::V1_0::BufferUsage; +using ::android::hardware::graphics::common::V1_0::Dataspace; using ::android::hardware::graphics::common::V1_0::PixelFormat; using ::android::hardware::camera::common::V1_0::Status; using ::android::hardware::camera::common::V1_0::CameraDeviceStatus; @@ -77,7 +91,6 @@ using ::android::hardware::camera::device::V3_2::ICameraDeviceCallback; using ::android::hardware::camera::device::V3_2::ICameraDeviceSession; using ::android::hardware::camera::device::V3_2::NotifyMsg; using ::android::hardware::camera::device::V3_2::RequestTemplate; -using ::android::hardware::camera::device::V3_2::Stream; using ::android::hardware::camera::device::V3_2::StreamType; using ::android::hardware::camera::device::V3_2::StreamRotation; using ::android::hardware::camera::device::V3_2::StreamConfiguration; @@ -99,6 +112,9 @@ using ::android::hardware::camera::device::V1_0::FrameCallbackFlag; using ::android::hardware::camera::device::V1_0::HandleTimestampMessage; using ::android::hardware::MessageQueue; using ::android::hardware::kSynchronizedReadWrite; +using ::android::hidl::allocator::V1_0::IAllocator; +using ::android::hidl::memory::V1_0::IMemory; +using ::android::hidl::memory::V1_0::IMapper; using ResultMetadataQueue = MessageQueue; using ::android::hidl::manager::V1_0::IServiceManager; @@ -113,6 +129,7 @@ const int64_t kAutoFocusTimeoutSec = 5; const int64_t kTorchTimeoutSec = 1; const int64_t kEmptyFlushTimeoutMSec = 200; const char kDumpOutput[] = "/dev/null"; +const uint32_t kBurstFrameCount = 10; struct AvailableStream { int32_t width; @@ -128,9 +145,11 @@ struct AvailableZSLInputOutput { namespace { // "device@/legacy/" const char *kDeviceNameRE = "device@([0-9]+\\.[0-9]+)/%s/(.+)"; + const int CAMERA_DEVICE_API_VERSION_3_4 = 0x304; const int CAMERA_DEVICE_API_VERSION_3_3 = 0x303; const int CAMERA_DEVICE_API_VERSION_3_2 = 0x302; const int CAMERA_DEVICE_API_VERSION_1_0 = 0x100; + const char *kHAL3_4 = "3.4"; const char *kHAL3_3 = "3.3"; const char *kHAL3_2 = "3.2"; const char *kHAL1_0 = "1.0"; @@ -164,7 +183,9 @@ namespace { return -1; } - if (version.compare(kHAL3_3) == 0) { + if (version.compare(kHAL3_4) == 0) { + return CAMERA_DEVICE_API_VERSION_3_4; + } else if (version.compare(kHAL3_3) == 0) { return CAMERA_DEVICE_API_VERSION_3_3; } else if (version.compare(kHAL3_2) == 0) { return CAMERA_DEVICE_API_VERSION_3_2; @@ -526,12 +547,16 @@ public: } }; - struct DeviceCb : public ICameraDeviceCallback { + struct DeviceCb : public V3_4::ICameraDeviceCallback { DeviceCb(CameraHidlTest *parent) : mParent(parent) {} + Return processCaptureResult_3_4( + const hidl_vec& results) override; Return processCaptureResult(const hidl_vec& results) override; Return notify(const hidl_vec& msgs) override; private: + bool processCaptureResultLocked(const CaptureResult& results); + CameraHidlTest *mParent; // Parent object }; @@ -606,18 +631,36 @@ public: void setParameters( const sp<::android::hardware::camera::device::V1_0::ICameraDevice> &device, const CameraParameters &cameraParams); + void allocateGraphicBuffer(uint32_t width, uint32_t height, uint64_t usage, + PixelFormat format, hidl_handle *buffer_handle /*out*/); void waitForFrameLocked(DataCallbackMsg msgFrame, std::unique_lock &l); void openEmptyDeviceSession(const std::string &name, sp provider, sp *session /*out*/, - sp *session3_3 /*out*/, camera_metadata_t **staticMeta /*out*/); - void configurePreviewStream(const std::string &name, + void castSession(const sp &session, int32_t deviceVersion, + sp *session3_3 /*out*/, + sp *session3_4 /*out*/); + void createStreamConfiguration(const ::android::hardware::hidl_vec& streams3_2, + StreamConfigurationMode configMode, + ::android::hardware::camera::device::V3_2::StreamConfiguration *config3_2, + ::android::hardware::camera::device::V3_4::StreamConfiguration *config3_4); + + void configurePreviewStreams3_4(const std::string &name, int32_t deviceVersion, + sp provider, + const AvailableStream *previewThreshold, + const std::unordered_set& physicalIds, + sp *session3_4 /*out*/, + V3_2::Stream* previewStream /*out*/, + device::V3_4::HalStreamConfiguration *halStreamConfig /*out*/, + bool *supportsPartialResults /*out*/, + uint32_t *partialResultCount /*out*/); + void configurePreviewStream(const std::string &name, int32_t deviceVersion, sp provider, const AvailableStream *previewThreshold, sp *session /*out*/, - Stream *previewStream /*out*/, + V3_2::Stream *previewStream /*out*/, HalStreamConfiguration *halStreamConfig /*out*/, bool *supportsPartialResults /*out*/, uint32_t *partialResultCount /*out*/); @@ -625,6 +668,16 @@ public: std::vector &outputStreams, const AvailableStream *threshold = nullptr); static Status isConstrainedModeAvailable(camera_metadata_t *staticMeta); + static Status isLogicalMultiCamera(camera_metadata_t *staticMeta); + static Status getPhysicalCameraIds(camera_metadata_t *staticMeta, + std::unordered_set *physicalIds/*out*/); + static Status getSupportedKeys(camera_metadata_t *staticMeta, + uint32_t tagId, std::unordered_set *requestIDs/*out*/); + static void constructFilteredSettings(const sp& session, + const std::unordered_set& availableKeys, RequestTemplate reqTemplate, + android::hardware::camera::common::V1_0::helper::CameraMetadata* defaultSettings/*out*/, + android::hardware::camera::common::V1_0::helper::CameraMetadata* filteredSettings + /*out*/); static Status pickConstrainedModeSize(camera_metadata_t *staticMeta, AvailableStream &hfrStream); static Status isZSLModeAvailable(camera_metadata_t *staticMeta); @@ -682,6 +735,20 @@ protected: // return from HAL but framework. ::android::Vector resultOutputBuffers; + InFlightRequest() : + shutterTimestamp(0), + errorCodeValid(false), + errorCode(ErrorCode::ERROR_BUFFER), + usePartialResult(false), + numPartialResults(0), + resultQueue(nullptr), + haveResultMetadata(false), + numBuffersLeft(0), + frameNumber(0), + partialResultCount(0), + errorStreamId(-1), + hasInputBuffer(false) {} + InFlightRequest(ssize_t numBuffers, bool hasInput, bool partialResults, uint32_t partialCount, std::shared_ptr queue = nullptr) : @@ -827,6 +894,27 @@ Return CameraHidlTest::Camera1DeviceCb::handleCallbackTimestampBatch( return Void(); } +Return CameraHidlTest::DeviceCb::processCaptureResult_3_4( + const hidl_vec& results) { + + if (nullptr == mParent) { + return Void(); + } + + bool notify = false; + std::unique_lock l(mParent->mLock); + for (size_t i = 0 ; i < results.size(); i++) { + notify = processCaptureResultLocked(results[i].v3_2); + } + + l.unlock(); + if (notify) { + mParent->mResultCondition.notify_one(); + } + + return Void(); +} + Return CameraHidlTest::DeviceCb::processCaptureResult( const hidl_vec& results) { if (nullptr == mParent) { @@ -836,129 +924,139 @@ Return CameraHidlTest::DeviceCb::processCaptureResult( bool notify = false; std::unique_lock l(mParent->mLock); for (size_t i = 0 ; i < results.size(); i++) { - uint32_t frameNumber = results[i].frameNumber; - - if ((results[i].result.size() == 0) && - (results[i].outputBuffers.size() == 0) && - (results[i].inputBuffer.buffer == nullptr) && - (results[i].fmqResultSize == 0)) { - ALOGE("%s: No result data provided by HAL for frame %d result count: %d", - __func__, frameNumber, (int) results[i].fmqResultSize); - ADD_FAILURE(); - break; - } + notify = processCaptureResultLocked(results[i]); + } - ssize_t idx = mParent->mInflightMap.indexOfKey(frameNumber); - if (::android::NAME_NOT_FOUND == idx) { - ALOGE("%s: Unexpected frame number! received: %u", - __func__, frameNumber); + l.unlock(); + if (notify) { + mParent->mResultCondition.notify_one(); + } + + return Void(); +} + +bool CameraHidlTest::DeviceCb::processCaptureResultLocked(const CaptureResult& results) { + bool notify = false; + uint32_t frameNumber = results.frameNumber; + + if ((results.result.size() == 0) && + (results.outputBuffers.size() == 0) && + (results.inputBuffer.buffer == nullptr) && + (results.fmqResultSize == 0)) { + ALOGE("%s: No result data provided by HAL for frame %d result count: %d", + __func__, frameNumber, (int) results.fmqResultSize); + ADD_FAILURE(); + return notify; + } + + ssize_t idx = mParent->mInflightMap.indexOfKey(frameNumber); + if (::android::NAME_NOT_FOUND == idx) { + ALOGE("%s: Unexpected frame number! received: %u", + __func__, frameNumber); + ADD_FAILURE(); + return notify; + } + + bool isPartialResult = false; + bool hasInputBufferInRequest = false; + InFlightRequest *request = mParent->mInflightMap.editValueAt(idx); + ::android::hardware::camera::device::V3_2::CameraMetadata resultMetadata; + size_t resultSize = 0; + if (results.fmqResultSize > 0) { + resultMetadata.resize(results.fmqResultSize); + if (request->resultQueue == nullptr) { ADD_FAILURE(); - break; + return notify; } - - bool isPartialResult = false; - bool hasInputBufferInRequest = false; - InFlightRequest *request = mParent->mInflightMap.editValueAt(idx); - ::android::hardware::camera::device::V3_2::CameraMetadata resultMetadata; - size_t resultSize = 0; - if (results[i].fmqResultSize > 0) { - resultMetadata.resize(results[i].fmqResultSize); - if (request->resultQueue == nullptr) { - ADD_FAILURE(); - break; - } - if (!request->resultQueue->read(resultMetadata.data(), - results[i].fmqResultSize)) { - ALOGE("%s: Frame %d: Cannot read camera metadata from fmq," - "size = %" PRIu64, __func__, frameNumber, - results[i].fmqResultSize); - ADD_FAILURE(); - break; - } - resultSize = resultMetadata.size(); - } else if (results[i].result.size() > 0) { - resultMetadata.setToExternal(const_cast( - results[i].result.data()), results[i].result.size()); - resultSize = resultMetadata.size(); - } - - if (!request->usePartialResult && (resultSize > 0) && - (results[i].partialResult != 1)) { - ALOGE("%s: Result is malformed for frame %d: partial_result %u " - "must be 1 if partial result is not supported", __func__, - frameNumber, results[i].partialResult); + if (!request->resultQueue->read(resultMetadata.data(), + results.fmqResultSize)) { + ALOGE("%s: Frame %d: Cannot read camera metadata from fmq," + "size = %" PRIu64, __func__, frameNumber, + results.fmqResultSize); ADD_FAILURE(); - break; + return notify; } + resultSize = resultMetadata.size(); + } else if (results.result.size() > 0) { + resultMetadata.setToExternal(const_cast( + results.result.data()), results.result.size()); + resultSize = resultMetadata.size(); + } - if (results[i].partialResult != 0) { - request->partialResultCount = results[i].partialResult; - } + if (!request->usePartialResult && (resultSize > 0) && + (results.partialResult != 1)) { + ALOGE("%s: Result is malformed for frame %d: partial_result %u " + "must be 1 if partial result is not supported", __func__, + frameNumber, results.partialResult); + ADD_FAILURE(); + return notify; + } - // Check if this result carries only partial metadata - if (request->usePartialResult && (resultSize > 0)) { - if ((results[i].partialResult > request->numPartialResults) || - (results[i].partialResult < 1)) { - ALOGE("%s: Result is malformed for frame %d: partial_result %u" - " must be in the range of [1, %d] when metadata is " - "included in the result", __func__, frameNumber, - results[i].partialResult, request->numPartialResults); - ADD_FAILURE(); - break; - } - request->collectedResult.append( - reinterpret_cast( - resultMetadata.data())); + if (results.partialResult != 0) { + request->partialResultCount = results.partialResult; + } - isPartialResult = - (results[i].partialResult < request->numPartialResults); + // Check if this result carries only partial metadata + if (request->usePartialResult && (resultSize > 0)) { + if ((results.partialResult > request->numPartialResults) || + (results.partialResult < 1)) { + ALOGE("%s: Result is malformed for frame %d: partial_result %u" + " must be in the range of [1, %d] when metadata is " + "included in the result", __func__, frameNumber, + results.partialResult, request->numPartialResults); + ADD_FAILURE(); + return notify; } + request->collectedResult.append( + reinterpret_cast( + resultMetadata.data())); + + isPartialResult = + (results.partialResult < request->numPartialResults); + } else if (resultSize > 0) { + request->collectedResult.append(reinterpret_cast( + resultMetadata.data())); + isPartialResult = false; + } - hasInputBufferInRequest = request->hasInputBuffer; - - // Did we get the (final) result metadata for this capture? - if ((resultSize > 0) && !isPartialResult) { - if (request->haveResultMetadata) { - ALOGE("%s: Called multiple times with metadata for frame %d", - __func__, frameNumber); - ADD_FAILURE(); - break; - } - request->haveResultMetadata = true; - request->collectedResult.sort(); - } + hasInputBufferInRequest = request->hasInputBuffer; - uint32_t numBuffersReturned = results[i].outputBuffers.size(); - if (results[i].inputBuffer.buffer != nullptr) { - if (hasInputBufferInRequest) { - numBuffersReturned += 1; - } else { - ALOGW("%s: Input buffer should be NULL if there is no input" - " buffer sent in the request", __func__); - } - } - request->numBuffersLeft -= numBuffersReturned; - if (request->numBuffersLeft < 0) { - ALOGE("%s: Too many buffers returned for frame %d", __func__, - frameNumber); + // Did we get the (final) result metadata for this capture? + if ((resultSize > 0) && !isPartialResult) { + if (request->haveResultMetadata) { + ALOGE("%s: Called multiple times with metadata for frame %d", + __func__, frameNumber); ADD_FAILURE(); - break; + return notify; } + request->haveResultMetadata = true; + request->collectedResult.sort(); + } - request->resultOutputBuffers.appendArray(results[i].outputBuffers.data(), - results[i].outputBuffers.size()); - // If shutter event is received notify the pending threads. - if (request->shutterTimestamp != 0) { - notify = true; + uint32_t numBuffersReturned = results.outputBuffers.size(); + if (results.inputBuffer.buffer != nullptr) { + if (hasInputBufferInRequest) { + numBuffersReturned += 1; + } else { + ALOGW("%s: Input buffer should be NULL if there is no input" + " buffer sent in the request", __func__); } } - - l.unlock(); - if (notify) { - mParent->mResultCondition.notify_one(); + request->numBuffersLeft -= numBuffersReturned; + if (request->numBuffersLeft < 0) { + ALOGE("%s: Too many buffers returned for frame %d", __func__, + frameNumber); + ADD_FAILURE(); + return notify; } - return Void(); + request->resultOutputBuffers.appendArray(results.outputBuffers.data(), + results.outputBuffers.size()); + // If shutter event is received notify the pending threads. + if (request->shutterTimestamp != 0) { + notify = true; + } + return notify; } Return CameraHidlTest::DeviceCb::notify( @@ -1004,7 +1102,7 @@ Return CameraHidlTest::DeviceCb::notify( } hidl_vec CameraHidlTest::getCameraDeviceNames(sp provider) { - hidl_vec cameraDeviceNames; + std::vector cameraDeviceNames; Return ret; ret = provider->getCameraIdList( [&](auto status, const auto& idList) { @@ -1013,12 +1111,69 @@ hidl_vec CameraHidlTest::getCameraDeviceNames(sp p ALOGI("Camera Id[%zu] is %s", i, idList[i].c_str()); } ASSERT_EQ(Status::OK, status); - cameraDeviceNames = idList; + for (const auto& id : idList) { + cameraDeviceNames.push_back(id); + } }); if (!ret.isOk()) { ADD_FAILURE(); } - return cameraDeviceNames; + + // External camera devices are reported through cameraDeviceStatusChange + struct ProviderCb : public ICameraProviderCallback { + virtual Return cameraDeviceStatusChange( + const hidl_string& devName, + CameraDeviceStatus newStatus) override { + ALOGI("camera device status callback name %s, status %d", + devName.c_str(), (int) newStatus); + if (newStatus == CameraDeviceStatus::PRESENT) { + externalCameraDeviceNames.push_back(devName); + + } + return Void(); + } + + virtual Return torchModeStatusChange( + const hidl_string&, TorchModeStatus) override { + return Void(); + } + + std::vector externalCameraDeviceNames; + }; + sp cb = new ProviderCb; + auto status = mProvider->setCallback(cb); + + for (const auto& devName : cb->externalCameraDeviceNames) { + if (cameraDeviceNames.end() == std::find( + cameraDeviceNames.begin(), cameraDeviceNames.end(), devName)) { + cameraDeviceNames.push_back(devName); + } + } + + hidl_vec retList(cameraDeviceNames.size()); + for (size_t i = 0; i < cameraDeviceNames.size(); i++) { + retList[i] = cameraDeviceNames[i]; + } + return retList; +} + +// Test devices with first_api_level >= P does not advertise device@1.0 +TEST_F(CameraHidlTest, noHal1AfterP) { + constexpr int32_t HAL1_PHASE_OUT_API_LEVEL = 28; + int32_t firstApiLevel = property_get_int32("ro.product.first_api_level", /*default*/-1); + if (firstApiLevel < 0) { + firstApiLevel = property_get_int32("ro.build.version.sdk", /*default*/-1); + } + ASSERT_GT(firstApiLevel, 0); // first_api_level must exist + + if (firstApiLevel >= HAL1_PHASE_OUT_API_LEVEL) { + hidl_vec cameraDeviceNames = getCameraDeviceNames(mProvider); + for (const auto& name : cameraDeviceNames) { + int deviceVersion = getCameraDeviceVersion(name, mProviderType); + ASSERT_NE(deviceVersion, 0); // Must be a valid device version + ASSERT_NE(deviceVersion, CAMERA_DEVICE_API_VERSION_1_0); // Must not be device@1.0 + } + } } // Test if ICameraProvider::isTorchModeSupported returns Status::OK @@ -1040,9 +1195,6 @@ TEST_F(CameraHidlTest, getCameraIdList) { ALOGI("Camera Id[%zu] is %s", i, idList[i].c_str()); } ASSERT_EQ(Status::OK, status); - // This is true for internal camera provider. - // Not necessary hold for external cameras providers - ASSERT_GT(idList.size(), 0u); }); ASSERT_TRUE(ret.isOk()); } @@ -1100,6 +1252,7 @@ TEST_F(CameraHidlTest, getCameraDeviceInterface) { for (const auto& name : cameraDeviceNames) { int deviceVersion = getCameraDeviceVersion(name, mProviderType); switch (deviceVersion) { + case CAMERA_DEVICE_API_VERSION_3_4: case CAMERA_DEVICE_API_VERSION_3_3: case CAMERA_DEVICE_API_VERSION_3_2: { Return ret; @@ -1140,6 +1293,7 @@ TEST_F(CameraHidlTest, getResourceCost) { for (const auto& name : cameraDeviceNames) { int deviceVersion = getCameraDeviceVersion(name, mProviderType); switch (deviceVersion) { + case CAMERA_DEVICE_API_VERSION_3_4: case CAMERA_DEVICE_API_VERSION_3_3: case CAMERA_DEVICE_API_VERSION_3_2: { ::android::sp<::android::hardware::camera::device::V3_2::ICameraDevice> device3_x; @@ -1879,6 +2033,7 @@ TEST_F(CameraHidlTest, getCameraCharacteristics) { for (const auto& name : cameraDeviceNames) { int deviceVersion = getCameraDeviceVersion(name, mProviderType); switch (deviceVersion) { + case CAMERA_DEVICE_API_VERSION_3_4: case CAMERA_DEVICE_API_VERSION_3_3: case CAMERA_DEVICE_API_VERSION_3_2: { ::android::sp<::android::hardware::camera::device::V3_2::ICameraDevice> device3_x; @@ -1905,6 +2060,20 @@ TEST_F(CameraHidlTest, getCameraCharacteristics) { // characteristics keys we've defined. ASSERT_GT(entryCount, 0u); ALOGI("getCameraCharacteristics metadata entry count is %zu", entryCount); + + camera_metadata_ro_entry entry; + int retcode = find_camera_metadata_ro_entry(metadata, + ANDROID_INFO_SUPPORTED_HARDWARE_LEVEL, &entry); + if ((0 == retcode) && (entry.count > 0)) { + uint8_t hardwareLevel = entry.data.u8[0]; + ASSERT_TRUE( + hardwareLevel == ANDROID_INFO_SUPPORTED_HARDWARE_LEVEL_LIMITED || + hardwareLevel == ANDROID_INFO_SUPPORTED_HARDWARE_LEVEL_FULL || + hardwareLevel == ANDROID_INFO_SUPPORTED_HARDWARE_LEVEL_3 || + hardwareLevel == ANDROID_INFO_SUPPORTED_HARDWARE_LEVEL_EXTERNAL); + } else { + ADD_FAILURE() << "Get camera hardware level failed!"; + } }); ASSERT_TRUE(ret.isOk()); } @@ -1943,6 +2112,7 @@ TEST_F(CameraHidlTest, setTorchMode) { for (const auto& name : cameraDeviceNames) { int deviceVersion = getCameraDeviceVersion(name, mProviderType); switch (deviceVersion) { + case CAMERA_DEVICE_API_VERSION_3_4: case CAMERA_DEVICE_API_VERSION_3_3: case CAMERA_DEVICE_API_VERSION_3_2: { ::android::sp<::android::hardware::camera::device::V3_2::ICameraDevice> device3_x; @@ -2067,6 +2237,7 @@ TEST_F(CameraHidlTest, dumpState) { for (const auto& name : cameraDeviceNames) { int deviceVersion = getCameraDeviceVersion(name, mProviderType); switch (deviceVersion) { + case CAMERA_DEVICE_API_VERSION_3_4: case CAMERA_DEVICE_API_VERSION_3_3: case CAMERA_DEVICE_API_VERSION_3_2: { ::android::sp device3_x; @@ -2130,6 +2301,7 @@ TEST_F(CameraHidlTest, openClose) { for (const auto& name : cameraDeviceNames) { int deviceVersion = getCameraDeviceVersion(name, mProviderType); switch (deviceVersion) { + case CAMERA_DEVICE_API_VERSION_3_4: case CAMERA_DEVICE_API_VERSION_3_3: case CAMERA_DEVICE_API_VERSION_3_2: { ::android::sp<::android::hardware::camera::device::V3_2::ICameraDevice> device3_x; @@ -2152,12 +2324,14 @@ TEST_F(CameraHidlTest, openClose) { session = newSession; }); ASSERT_TRUE(ret.isOk()); - // Ensure that a device labeling itself as 3.3 can have its session interface cast - // to the 3.3 interface, and that lower versions can't be cast to it. - auto castResult = device::V3_3::ICameraDeviceSession::castFrom(session); - ASSERT_TRUE(castResult.isOk()); - sp sessionV3_3 = castResult; - if (deviceVersion == CAMERA_DEVICE_API_VERSION_3_3) { + // Ensure that a device labeling itself as 3.3/3.4 can have its session interface + // cast the 3.3/3.4 interface, and that lower versions can't be cast to it. + sp sessionV3_3; + sp sessionV3_4; + castSession(session, deviceVersion, &sessionV3_3, &sessionV3_4); + if (deviceVersion == CAMERA_DEVICE_API_VERSION_3_4) { + ASSERT_TRUE(sessionV3_4.get() != nullptr); + } else if (deviceVersion == CAMERA_DEVICE_API_VERSION_3_3) { ASSERT_TRUE(sessionV3_3.get() != nullptr); } else { ASSERT_TRUE(sessionV3_3.get() == nullptr); @@ -2213,6 +2387,7 @@ TEST_F(CameraHidlTest, constructDefaultRequestSettings) { for (const auto& name : cameraDeviceNames) { int deviceVersion = getCameraDeviceVersion(name, mProviderType); switch (deviceVersion) { + case CAMERA_DEVICE_API_VERSION_3_4: case CAMERA_DEVICE_API_VERSION_3_3: case CAMERA_DEVICE_API_VERSION_3_2: { ::android::sp<::android::hardware::camera::device::V3_2::ICameraDevice> device3_x; @@ -2301,66 +2476,73 @@ TEST_F(CameraHidlTest, configureStreamsAvailableOutputs) { for (const auto& name : cameraDeviceNames) { int deviceVersion = getCameraDeviceVersion(name, mProviderType); - switch (deviceVersion) { - case CAMERA_DEVICE_API_VERSION_3_3: - case CAMERA_DEVICE_API_VERSION_3_2: { - camera_metadata_t* staticMeta; - Return ret; - sp session; - sp session3_3; - openEmptyDeviceSession(name, mProvider, - &session /*out*/, &session3_3 /*out*/, &staticMeta /*out*/); - - outputStreams.clear(); - ASSERT_EQ(Status::OK, getAvailableOutputStreams(staticMeta, outputStreams)); - ASSERT_NE(0u, outputStreams.size()); - - int32_t streamId = 0; - for (auto& it : outputStreams) { - Stream stream = {streamId, - StreamType::OUTPUT, - static_cast(it.width), - static_cast(it.height), - static_cast(it.format), - GRALLOC1_CONSUMER_USAGE_HWCOMPOSER, - 0, - StreamRotation::ROTATION_0}; - ::android::hardware::hidl_vec streams = {stream}; - StreamConfiguration config = {streams, StreamConfigurationMode::NORMAL_MODE}; - if (session3_3 == nullptr) { - ret = session->configureStreams(config, - [streamId](Status s, HalStreamConfiguration halConfig) { - ASSERT_EQ(Status::OK, s); - ASSERT_EQ(1u, halConfig.streams.size()); - ASSERT_EQ(halConfig.streams[0].id, streamId); - }); - } else { - ret = session3_3->configureStreams_3_3(config, - [streamId](Status s, device::V3_3::HalStreamConfiguration halConfig) { - ASSERT_EQ(Status::OK, s); - ASSERT_EQ(1u, halConfig.streams.size()); - ASSERT_EQ(halConfig.streams[0].v3_2.id, streamId); - }); - } - ASSERT_TRUE(ret.isOk()); - streamId++; - } + if (deviceVersion == CAMERA_DEVICE_API_VERSION_1_0) { + continue; + } else if (deviceVersion <= 0) { + ALOGE("%s: Unsupported device version %d", __func__, deviceVersion); + ADD_FAILURE(); + return; + } - free_camera_metadata(staticMeta); - ret = session->close(); - ASSERT_TRUE(ret.isOk()); - } - break; - case CAMERA_DEVICE_API_VERSION_1_0: { - //Not applicable - } - break; - default: { - ALOGE("%s: Unsupported device version %d", __func__, deviceVersion); - ADD_FAILURE(); + camera_metadata_t* staticMeta; + Return ret; + sp session; + sp session3_3; + sp session3_4; + openEmptyDeviceSession(name, mProvider, + &session /*out*/, &staticMeta /*out*/); + castSession(session, deviceVersion, &session3_3, &session3_4); + + outputStreams.clear(); + ASSERT_EQ(Status::OK, getAvailableOutputStreams(staticMeta, outputStreams)); + ASSERT_NE(0u, outputStreams.size()); + + int32_t streamId = 0; + for (auto& it : outputStreams) { + V3_2::Stream stream3_2; + bool isJpeg = static_cast(it.format) == PixelFormat::BLOB; + stream3_2 = {streamId, + StreamType::OUTPUT, + static_cast(it.width), + static_cast(it.height), + static_cast(it.format), + GRALLOC1_CONSUMER_USAGE_HWCOMPOSER, + (isJpeg) ? static_cast(Dataspace::V0_JFIF) : 0, + StreamRotation::ROTATION_0}; + ::android::hardware::hidl_vec streams3_2 = {stream3_2}; + ::android::hardware::camera::device::V3_4::StreamConfiguration config3_4; + ::android::hardware::camera::device::V3_2::StreamConfiguration config3_2; + createStreamConfiguration(streams3_2, StreamConfigurationMode::NORMAL_MODE, + &config3_2, &config3_4); + if (session3_4 != nullptr) { + ret = session3_4->configureStreams_3_4(config3_4, + [streamId](Status s, device::V3_4::HalStreamConfiguration halConfig) { + ASSERT_EQ(Status::OK, s); + ASSERT_EQ(1u, halConfig.streams.size()); + ASSERT_EQ(halConfig.streams[0].v3_3.v3_2.id, streamId); + }); + } else if (session3_3 != nullptr) { + ret = session3_3->configureStreams_3_3(config3_2, + [streamId](Status s, device::V3_3::HalStreamConfiguration halConfig) { + ASSERT_EQ(Status::OK, s); + ASSERT_EQ(1u, halConfig.streams.size()); + ASSERT_EQ(halConfig.streams[0].v3_2.id, streamId); + }); + } else { + ret = session->configureStreams(config3_2, + [streamId](Status s, HalStreamConfiguration halConfig) { + ASSERT_EQ(Status::OK, s); + ASSERT_EQ(1u, halConfig.streams.size()); + ASSERT_EQ(halConfig.streams[0].id, streamId); + }); } - break; + ASSERT_TRUE(ret.isOk()); + streamId++; } + + free_camera_metadata(staticMeta); + ret = session->close(); + ASSERT_TRUE(ret.isOk()); } } @@ -2371,132 +2553,153 @@ TEST_F(CameraHidlTest, configureStreamsInvalidOutputs) { for (const auto& name : cameraDeviceNames) { int deviceVersion = getCameraDeviceVersion(name, mProviderType); - switch (deviceVersion) { - case CAMERA_DEVICE_API_VERSION_3_3: - case CAMERA_DEVICE_API_VERSION_3_2: { - camera_metadata_t* staticMeta; - Return ret; - sp session; - sp session3_3; - openEmptyDeviceSession(name, mProvider, - &session /*out*/, &session3_3 /*out*/, &staticMeta /*out*/); - - outputStreams.clear(); - ASSERT_EQ(Status::OK, getAvailableOutputStreams(staticMeta, outputStreams)); - ASSERT_NE(0u, outputStreams.size()); - - int32_t streamId = 0; - Stream stream = {streamId++, - StreamType::OUTPUT, - static_cast(0), - static_cast(0), - static_cast(outputStreams[0].format), - GRALLOC1_CONSUMER_USAGE_HWCOMPOSER, - 0, - StreamRotation::ROTATION_0}; - ::android::hardware::hidl_vec streams = {stream}; - StreamConfiguration config = {streams, StreamConfigurationMode::NORMAL_MODE}; - if(session3_3 == nullptr) { - ret = session->configureStreams(config, - [](Status s, HalStreamConfiguration) { - ASSERT_TRUE((Status::ILLEGAL_ARGUMENT == s) || - (Status::INTERNAL_ERROR == s)); + if (deviceVersion == CAMERA_DEVICE_API_VERSION_1_0) { + continue; + } else if (deviceVersion <= 0) { + ALOGE("%s: Unsupported device version %d", __func__, deviceVersion); + ADD_FAILURE(); + return; + } + + camera_metadata_t* staticMeta; + Return ret; + sp session; + sp session3_3; + sp session3_4; + openEmptyDeviceSession(name, mProvider, &session /*out*/, &staticMeta /*out*/); + castSession(session, deviceVersion, &session3_3, &session3_4); + + outputStreams.clear(); + ASSERT_EQ(Status::OK, getAvailableOutputStreams(staticMeta, outputStreams)); + ASSERT_NE(0u, outputStreams.size()); + + int32_t streamId = 0; + V3_2::Stream stream3_2 = {streamId++, + StreamType::OUTPUT, + static_cast(0), + static_cast(0), + static_cast(outputStreams[0].format), + GRALLOC1_CONSUMER_USAGE_HWCOMPOSER, + 0, + StreamRotation::ROTATION_0}; + ::android::hardware::hidl_vec streams = {stream3_2}; + ::android::hardware::camera::device::V3_4::StreamConfiguration config3_4; + ::android::hardware::camera::device::V3_2::StreamConfiguration config3_2; + createStreamConfiguration(streams, StreamConfigurationMode::NORMAL_MODE, + &config3_2, &config3_4); + if(session3_4 != nullptr) { + ret = session3_4->configureStreams_3_4(config3_4, + [](Status s, device::V3_4::HalStreamConfiguration) { + ASSERT_TRUE((Status::ILLEGAL_ARGUMENT == s) || + (Status::INTERNAL_ERROR == s)); + }); + } else if(session3_3 != nullptr) { + ret = session3_3->configureStreams_3_3(config3_2, + [](Status s, device::V3_3::HalStreamConfiguration) { + ASSERT_TRUE((Status::ILLEGAL_ARGUMENT == s) || + (Status::INTERNAL_ERROR == s)); + }); + } else { + ret = session->configureStreams(config3_2, + [](Status s, HalStreamConfiguration) { + ASSERT_TRUE((Status::ILLEGAL_ARGUMENT == s) || + (Status::INTERNAL_ERROR == s)); + }); + } + ASSERT_TRUE(ret.isOk()); + + stream3_2 = {streamId++, + StreamType::OUTPUT, + static_cast(UINT32_MAX), + static_cast(UINT32_MAX), + static_cast(outputStreams[0].format), + GRALLOC1_CONSUMER_USAGE_HWCOMPOSER, + 0, + StreamRotation::ROTATION_0}; + streams[0] = stream3_2; + createStreamConfiguration(streams, StreamConfigurationMode::NORMAL_MODE, + &config3_2, &config3_4); + if(session3_4 != nullptr) { + ret = session3_4->configureStreams_3_4(config3_4, [](Status s, + device::V3_4::HalStreamConfiguration) { + ASSERT_EQ(Status::ILLEGAL_ARGUMENT, s); + }); + } else if(session3_3 != nullptr) { + ret = session3_3->configureStreams_3_3(config3_2, [](Status s, + device::V3_3::HalStreamConfiguration) { + ASSERT_EQ(Status::ILLEGAL_ARGUMENT, s); + }); + } else { + ret = session->configureStreams(config3_2, [](Status s, + HalStreamConfiguration) { + ASSERT_EQ(Status::ILLEGAL_ARGUMENT, s); + }); + } + ASSERT_TRUE(ret.isOk()); + + for (auto& it : outputStreams) { + stream3_2 = {streamId++, + StreamType::OUTPUT, + static_cast(it.width), + static_cast(it.height), + static_cast(UINT32_MAX), + GRALLOC1_CONSUMER_USAGE_HWCOMPOSER, + 0, + StreamRotation::ROTATION_0}; + streams[0] = stream3_2; + createStreamConfiguration(streams, StreamConfigurationMode::NORMAL_MODE, + &config3_2, &config3_4); + if(session3_4 != nullptr) { + ret = session3_4->configureStreams_3_4(config3_4, + [](Status s, device::V3_4::HalStreamConfiguration) { + ASSERT_EQ(Status::ILLEGAL_ARGUMENT, s); }); - } else { - ret = session3_3->configureStreams_3_3(config, + } else if(session3_3 != nullptr) { + ret = session3_3->configureStreams_3_3(config3_2, [](Status s, device::V3_3::HalStreamConfiguration) { - ASSERT_TRUE((Status::ILLEGAL_ARGUMENT == s) || - (Status::INTERNAL_ERROR == s)); + ASSERT_EQ(Status::ILLEGAL_ARGUMENT, s); }); - } - ASSERT_TRUE(ret.isOk()); + } else { + ret = session->configureStreams(config3_2, + [](Status s, HalStreamConfiguration) { + ASSERT_EQ(Status::ILLEGAL_ARGUMENT, s); + }); + } + ASSERT_TRUE(ret.isOk()); - stream = {streamId++, - StreamType::OUTPUT, - static_cast(UINT32_MAX), - static_cast(UINT32_MAX), - static_cast(outputStreams[0].format), - GRALLOC1_CONSUMER_USAGE_HWCOMPOSER, - 0, - StreamRotation::ROTATION_0}; - streams[0] = stream; - config = {streams, StreamConfigurationMode::NORMAL_MODE}; - if(session3_3 == nullptr) { - ret = session->configureStreams(config, [](Status s, - HalStreamConfiguration) { + stream3_2 = {streamId++, + StreamType::OUTPUT, + static_cast(it.width), + static_cast(it.height), + static_cast(it.format), + GRALLOC1_CONSUMER_USAGE_HWCOMPOSER, + 0, + static_cast(UINT32_MAX)}; + streams[0] = stream3_2; + createStreamConfiguration(streams, StreamConfigurationMode::NORMAL_MODE, + &config3_2, &config3_4); + if(session3_4 != nullptr) { + ret = session3_4->configureStreams_3_4(config3_4, + [](Status s, device::V3_4::HalStreamConfiguration) { ASSERT_EQ(Status::ILLEGAL_ARGUMENT, s); }); - } else { - ret = session3_3->configureStreams_3_3(config, [](Status s, - device::V3_3::HalStreamConfiguration) { + } else if(session3_3 != nullptr) { + ret = session3_3->configureStreams_3_3(config3_2, + [](Status s, device::V3_3::HalStreamConfiguration) { + ASSERT_EQ(Status::ILLEGAL_ARGUMENT, s); + }); + } else { + ret = session->configureStreams(config3_2, + [](Status s, HalStreamConfiguration) { ASSERT_EQ(Status::ILLEGAL_ARGUMENT, s); }); - } - ASSERT_TRUE(ret.isOk()); - - for (auto& it : outputStreams) { - stream = {streamId++, - StreamType::OUTPUT, - static_cast(it.width), - static_cast(it.height), - static_cast(UINT32_MAX), - GRALLOC1_CONSUMER_USAGE_HWCOMPOSER, - 0, - StreamRotation::ROTATION_0}; - streams[0] = stream; - config = {streams, StreamConfigurationMode::NORMAL_MODE}; - if(session3_3 == nullptr) { - ret = session->configureStreams(config, - [](Status s, HalStreamConfiguration) { - ASSERT_EQ(Status::ILLEGAL_ARGUMENT, s); - }); - } else { - ret = session3_3->configureStreams_3_3(config, - [](Status s, device::V3_3::HalStreamConfiguration) { - ASSERT_EQ(Status::ILLEGAL_ARGUMENT, s); - }); - } - ASSERT_TRUE(ret.isOk()); - - stream = {streamId++, - StreamType::OUTPUT, - static_cast(it.width), - static_cast(it.height), - static_cast(it.format), - GRALLOC1_CONSUMER_USAGE_HWCOMPOSER, - 0, - static_cast(UINT32_MAX)}; - streams[0] = stream; - config = {streams, StreamConfigurationMode::NORMAL_MODE}; - if(session3_3 == nullptr) { - ret = session->configureStreams(config, - [](Status s, HalStreamConfiguration) { - ASSERT_EQ(Status::ILLEGAL_ARGUMENT, s); - }); - } else { - ret = session3_3->configureStreams_3_3(config, - [](Status s, device::V3_3::HalStreamConfiguration) { - ASSERT_EQ(Status::ILLEGAL_ARGUMENT, s); - }); - } - ASSERT_TRUE(ret.isOk()); - } - - free_camera_metadata(staticMeta); - ret = session->close(); - ASSERT_TRUE(ret.isOk()); - } - break; - case CAMERA_DEVICE_API_VERSION_1_0: { - //Not applicable - } - break; - default: { - ALOGE("%s: Unsupported device version %d", __func__, deviceVersion); - ADD_FAILURE(); } - break; + ASSERT_TRUE(ret.isOk()); } + + free_camera_metadata(staticMeta); + ret = session->close(); + ASSERT_TRUE(ret.isOk()); } } @@ -2509,347 +2712,456 @@ TEST_F(CameraHidlTest, configureStreamsZSLInputOutputs) { for (const auto& name : cameraDeviceNames) { int deviceVersion = getCameraDeviceVersion(name, mProviderType); - switch (deviceVersion) { - case CAMERA_DEVICE_API_VERSION_3_3: - case CAMERA_DEVICE_API_VERSION_3_2: { - camera_metadata_t* staticMeta; - Return ret; - sp session; - sp session3_3; - openEmptyDeviceSession(name, mProvider, - &session /*out*/, &session3_3 /*out*/, &staticMeta /*out*/); + if (deviceVersion == CAMERA_DEVICE_API_VERSION_1_0) { + continue; + } else if (deviceVersion <= 0) { + ALOGE("%s: Unsupported device version %d", __func__, deviceVersion); + ADD_FAILURE(); + return; + } - Status rc = isZSLModeAvailable(staticMeta); - if (Status::METHOD_NOT_SUPPORTED == rc) { - ret = session->close(); - ASSERT_TRUE(ret.isOk()); - continue; - } - ASSERT_EQ(Status::OK, rc); - - inputStreams.clear(); - ASSERT_EQ(Status::OK, getAvailableOutputStreams(staticMeta, inputStreams)); - ASSERT_NE(0u, inputStreams.size()); - - inputOutputMap.clear(); - ASSERT_EQ(Status::OK, getZSLInputOutputMap(staticMeta, inputOutputMap)); - ASSERT_NE(0u, inputOutputMap.size()); - - int32_t streamId = 0; - for (auto& inputIter : inputOutputMap) { - AvailableStream input; - ASSERT_EQ(Status::OK, findLargestSize(inputStreams, inputIter.inputFormat, - input)); - ASSERT_NE(0u, inputStreams.size()); - - AvailableStream outputThreshold = {INT32_MAX, INT32_MAX, - inputIter.outputFormat}; - std::vector outputStreams; - ASSERT_EQ(Status::OK, - getAvailableOutputStreams(staticMeta, outputStreams, - &outputThreshold)); - for (auto& outputIter : outputStreams) { - Stream zslStream = {streamId++, - StreamType::OUTPUT, - static_cast(input.width), - static_cast(input.height), - static_cast(input.format), - GRALLOC_USAGE_HW_CAMERA_ZSL, - 0, - StreamRotation::ROTATION_0}; - Stream inputStream = {streamId++, - StreamType::INPUT, - static_cast(input.width), - static_cast(input.height), - static_cast(input.format), - 0, - 0, - StreamRotation::ROTATION_0}; - Stream outputStream = {streamId++, - StreamType::OUTPUT, - static_cast(outputIter.width), - static_cast(outputIter.height), - static_cast(outputIter.format), - GRALLOC1_CONSUMER_USAGE_HWCOMPOSER, - 0, - StreamRotation::ROTATION_0}; - - ::android::hardware::hidl_vec streams = {inputStream, zslStream, - outputStream}; - StreamConfiguration config = {streams, - StreamConfigurationMode::NORMAL_MODE}; - if (session3_3 == nullptr) { - ret = session->configureStreams(config, - [](Status s, HalStreamConfiguration halConfig) { - ASSERT_EQ(Status::OK, s); - ASSERT_EQ(3u, halConfig.streams.size()); - }); - } else { - ret = session3_3->configureStreams_3_3(config, - [](Status s, device::V3_3::HalStreamConfiguration halConfig) { - ASSERT_EQ(Status::OK, s); - ASSERT_EQ(3u, halConfig.streams.size()); - }); - } - ASSERT_TRUE(ret.isOk()); - } + camera_metadata_t* staticMeta; + Return ret; + sp session; + sp session3_3; + sp session3_4; + openEmptyDeviceSession(name, mProvider, &session /*out*/, &staticMeta /*out*/); + castSession(session, deviceVersion, &session3_3, &session3_4); + + Status rc = isZSLModeAvailable(staticMeta); + if (Status::METHOD_NOT_SUPPORTED == rc) { + ret = session->close(); + ASSERT_TRUE(ret.isOk()); + continue; + } + ASSERT_EQ(Status::OK, rc); + + inputStreams.clear(); + ASSERT_EQ(Status::OK, getAvailableOutputStreams(staticMeta, inputStreams)); + ASSERT_NE(0u, inputStreams.size()); + + inputOutputMap.clear(); + ASSERT_EQ(Status::OK, getZSLInputOutputMap(staticMeta, inputOutputMap)); + ASSERT_NE(0u, inputOutputMap.size()); + + int32_t streamId = 0; + for (auto& inputIter : inputOutputMap) { + AvailableStream input; + ASSERT_EQ(Status::OK, findLargestSize(inputStreams, inputIter.inputFormat, + input)); + ASSERT_NE(0u, inputStreams.size()); + + AvailableStream outputThreshold = {INT32_MAX, INT32_MAX, + inputIter.outputFormat}; + std::vector outputStreams; + ASSERT_EQ(Status::OK, + getAvailableOutputStreams(staticMeta, outputStreams, + &outputThreshold)); + for (auto& outputIter : outputStreams) { + V3_2::Stream zslStream = {streamId++, + StreamType::OUTPUT, + static_cast(input.width), + static_cast(input.height), + static_cast(input.format), + GRALLOC_USAGE_HW_CAMERA_ZSL, + 0, + StreamRotation::ROTATION_0}; + V3_2::Stream inputStream = {streamId++, + StreamType::INPUT, + static_cast(input.width), + static_cast(input.height), + static_cast(input.format), + 0, + 0, + StreamRotation::ROTATION_0}; + V3_2::Stream outputStream = {streamId++, + StreamType::OUTPUT, + static_cast(outputIter.width), + static_cast(outputIter.height), + static_cast(outputIter.format), + GRALLOC1_CONSUMER_USAGE_HWCOMPOSER, + 0, + StreamRotation::ROTATION_0}; + + ::android::hardware::hidl_vec streams = {inputStream, zslStream, + outputStream}; + ::android::hardware::camera::device::V3_4::StreamConfiguration config3_4; + ::android::hardware::camera::device::V3_2::StreamConfiguration config3_2; + createStreamConfiguration(streams, StreamConfigurationMode::NORMAL_MODE, + &config3_2, &config3_4); + if (session3_4 != nullptr) { + ret = session3_4->configureStreams_3_4(config3_4, + [](Status s, device::V3_4::HalStreamConfiguration halConfig) { + ASSERT_EQ(Status::OK, s); + ASSERT_EQ(3u, halConfig.streams.size()); + }); + } else if (session3_3 != nullptr) { + ret = session3_3->configureStreams_3_3(config3_2, + [](Status s, device::V3_3::HalStreamConfiguration halConfig) { + ASSERT_EQ(Status::OK, s); + ASSERT_EQ(3u, halConfig.streams.size()); + }); + } else { + ret = session->configureStreams(config3_2, + [](Status s, HalStreamConfiguration halConfig) { + ASSERT_EQ(Status::OK, s); + ASSERT_EQ(3u, halConfig.streams.size()); + }); } - - free_camera_metadata(staticMeta); - ret = session->close(); ASSERT_TRUE(ret.isOk()); } - break; - case CAMERA_DEVICE_API_VERSION_1_0: { - //Not applicable - } - break; - default: { - ALOGE("%s: Unsupported device version %d", __func__, deviceVersion); - ADD_FAILURE(); - } - break; } + + free_camera_metadata(staticMeta); + ret = session->close(); + ASSERT_TRUE(ret.isOk()); } } -// Verify that all supported preview + still capture stream combinations -// can be configured successfully. -TEST_F(CameraHidlTest, configureStreamsPreviewStillOutputs) { +// Check wehether session parameters are supported. If Hal support for them +// exist, then try to configure a preview stream using them. +TEST_F(CameraHidlTest, configureStreamsWithSessionParameters) { hidl_vec cameraDeviceNames = getCameraDeviceNames(mProvider); - std::vector outputBlobStreams; std::vector outputPreviewStreams; AvailableStream previewThreshold = {kMaxPreviewWidth, kMaxPreviewHeight, static_cast(PixelFormat::IMPLEMENTATION_DEFINED)}; - AvailableStream blobThreshold = {INT32_MAX, INT32_MAX, - static_cast(PixelFormat::BLOB)}; for (const auto& name : cameraDeviceNames) { int deviceVersion = getCameraDeviceVersion(name, mProviderType); - switch (deviceVersion) { - case CAMERA_DEVICE_API_VERSION_3_3: - case CAMERA_DEVICE_API_VERSION_3_2: { - camera_metadata_t* staticMeta; - Return ret; - sp session; - sp session3_3; - openEmptyDeviceSession(name, mProvider, - &session /*out*/, &session3_3 /*out*/, &staticMeta /*out*/); - - outputBlobStreams.clear(); - ASSERT_EQ(Status::OK, - getAvailableOutputStreams(staticMeta, outputBlobStreams, - &blobThreshold)); - ASSERT_NE(0u, outputBlobStreams.size()); - - outputPreviewStreams.clear(); - ASSERT_EQ(Status::OK, getAvailableOutputStreams(staticMeta, outputPreviewStreams, - &previewThreshold)); - ASSERT_NE(0u, outputPreviewStreams.size()); - - int32_t streamId = 0; - for (auto& blobIter : outputBlobStreams) { - for (auto& previewIter : outputPreviewStreams) { - Stream previewStream = {streamId++, - StreamType::OUTPUT, - static_cast(previewIter.width), - static_cast(previewIter.height), - static_cast(previewIter.format), - GRALLOC1_CONSUMER_USAGE_HWCOMPOSER, - 0, - StreamRotation::ROTATION_0}; - Stream blobStream = {streamId++, - StreamType::OUTPUT, - static_cast(blobIter.width), - static_cast(blobIter.height), - static_cast(blobIter.format), - GRALLOC1_CONSUMER_USAGE_CPU_READ, - 0, - StreamRotation::ROTATION_0}; - ::android::hardware::hidl_vec streams = {previewStream, - blobStream}; - StreamConfiguration config = {streams, - StreamConfigurationMode::NORMAL_MODE}; - if (session3_3 == nullptr) { - ret = session->configureStreams(config, - [](Status s, HalStreamConfiguration halConfig) { - ASSERT_EQ(Status::OK, s); - ASSERT_EQ(2u, halConfig.streams.size()); - }); - } else { - ret = session3_3->configureStreams_3_3(config, - [](Status s, device::V3_3::HalStreamConfiguration halConfig) { - ASSERT_EQ(Status::OK, s); - ASSERT_EQ(2u, halConfig.streams.size()); - }); - } - ASSERT_TRUE(ret.isOk()); - } - } + if (deviceVersion <= 0) { + ALOGE("%s: Unsupported device version %d", __func__, deviceVersion); + ADD_FAILURE(); + return; + } else if (deviceVersion < CAMERA_DEVICE_API_VERSION_3_4) { + continue; + } - free_camera_metadata(staticMeta); - ret = session->close(); - ASSERT_TRUE(ret.isOk()); - } - break; - case CAMERA_DEVICE_API_VERSION_1_0: { - //Not applicable - } - break; - default: { - ALOGE("%s: Unsupported device version %d", __func__, deviceVersion); - ADD_FAILURE(); - } - break; + camera_metadata_t* staticMetaBuffer; + Return ret; + sp session; + sp session3_3; + sp session3_4; + openEmptyDeviceSession(name, mProvider, &session /*out*/, &staticMetaBuffer /*out*/); + castSession(session, deviceVersion, &session3_3, &session3_4); + ASSERT_NE(session3_4, nullptr); + + std::unordered_set availableSessionKeys; + auto rc = getSupportedKeys(staticMetaBuffer, ANDROID_REQUEST_AVAILABLE_SESSION_KEYS, + &availableSessionKeys); + ASSERT_TRUE(Status::OK == rc); + if (availableSessionKeys.empty()) { + free_camera_metadata(staticMetaBuffer); + ret = session->close(); + ASSERT_TRUE(ret.isOk()); + continue; + } + + android::hardware::camera::common::V1_0::helper::CameraMetadata previewRequestSettings; + android::hardware::camera::common::V1_0::helper::CameraMetadata sessionParams; + constructFilteredSettings(session, availableSessionKeys, RequestTemplate::PREVIEW, + &previewRequestSettings, &sessionParams); + if (sessionParams.isEmpty()) { + free_camera_metadata(staticMetaBuffer); + ret = session->close(); + ASSERT_TRUE(ret.isOk()); + continue; } + + ASSERT_EQ(Status::OK, getAvailableOutputStreams(staticMetaBuffer, outputPreviewStreams, + &previewThreshold)); + ASSERT_NE(0u, outputPreviewStreams.size()); + + V3_4::Stream previewStream; + previewStream.v3_2 = {0, + StreamType::OUTPUT, + static_cast(outputPreviewStreams[0].width), + static_cast(outputPreviewStreams[0].height), + static_cast(outputPreviewStreams[0].format), + GRALLOC1_CONSUMER_USAGE_HWCOMPOSER, + 0, + StreamRotation::ROTATION_0}; + ::android::hardware::hidl_vec streams = {previewStream}; + ::android::hardware::camera::device::V3_4::StreamConfiguration config; + config.streams = streams; + config.operationMode = StreamConfigurationMode::NORMAL_MODE; + const camera_metadata_t *sessionParamsBuffer = sessionParams.getAndLock(); + config.sessionParams.setToExternal( + reinterpret_cast (const_cast (sessionParamsBuffer)), + get_camera_metadata_size(sessionParamsBuffer)); + ret = session3_4->configureStreams_3_4(config, + [](Status s, device::V3_4::HalStreamConfiguration halConfig) { + ASSERT_EQ(Status::OK, s); + ASSERT_EQ(1u, halConfig.streams.size()); + }); + ASSERT_TRUE(ret.isOk()); + + free_camera_metadata(staticMetaBuffer); + ret = session->close(); + ASSERT_TRUE(ret.isOk()); } } -// In case constrained mode is supported, test whether it can be -// configured. Additionally check for common invalid inputs when -// using this mode. -TEST_F(CameraHidlTest, configureStreamsConstrainedOutputs) { +// Verify that all supported preview + still capture stream combinations +// can be configured successfully. +TEST_F(CameraHidlTest, configureStreamsPreviewStillOutputs) { hidl_vec cameraDeviceNames = getCameraDeviceNames(mProvider); + std::vector outputBlobStreams; + std::vector outputPreviewStreams; + AvailableStream previewThreshold = {kMaxPreviewWidth, kMaxPreviewHeight, + static_cast(PixelFormat::IMPLEMENTATION_DEFINED)}; + AvailableStream blobThreshold = {INT32_MAX, INT32_MAX, + static_cast(PixelFormat::BLOB)}; for (const auto& name : cameraDeviceNames) { int deviceVersion = getCameraDeviceVersion(name, mProviderType); - switch (deviceVersion) { - case CAMERA_DEVICE_API_VERSION_3_3: - case CAMERA_DEVICE_API_VERSION_3_2: { - camera_metadata_t* staticMeta; - Return ret; - sp session; - sp session3_3; - openEmptyDeviceSession(name, mProvider, - &session /*out*/, &session3_3 /*out*/, &staticMeta /*out*/); + if (deviceVersion == CAMERA_DEVICE_API_VERSION_1_0) { + continue; + } else if (deviceVersion <= 0) { + ALOGE("%s: Unsupported device version %d", __func__, deviceVersion); + ADD_FAILURE(); + return; + } - Status rc = isConstrainedModeAvailable(staticMeta); - if (Status::METHOD_NOT_SUPPORTED == rc) { - ret = session->close(); - ASSERT_TRUE(ret.isOk()); - continue; - } - ASSERT_EQ(Status::OK, rc); - - AvailableStream hfrStream; - rc = pickConstrainedModeSize(staticMeta, hfrStream); - ASSERT_EQ(Status::OK, rc); - - int32_t streamId = 0; - Stream stream = {streamId, - StreamType::OUTPUT, - static_cast(hfrStream.width), - static_cast(hfrStream.height), - static_cast(hfrStream.format), - GRALLOC1_CONSUMER_USAGE_VIDEO_ENCODER, - 0, - StreamRotation::ROTATION_0}; - ::android::hardware::hidl_vec streams = {stream}; - StreamConfiguration config = {streams, - StreamConfigurationMode::CONSTRAINED_HIGH_SPEED_MODE}; - if (session3_3 == nullptr) { - ret = session->configureStreams(config, - [streamId](Status s, HalStreamConfiguration halConfig) { + camera_metadata_t* staticMeta; + Return ret; + sp session; + sp session3_3; + sp session3_4; + openEmptyDeviceSession(name, mProvider, &session /*out*/, &staticMeta /*out*/); + castSession(session, deviceVersion, &session3_3, &session3_4); + + outputBlobStreams.clear(); + ASSERT_EQ(Status::OK, + getAvailableOutputStreams(staticMeta, outputBlobStreams, + &blobThreshold)); + ASSERT_NE(0u, outputBlobStreams.size()); + + outputPreviewStreams.clear(); + ASSERT_EQ(Status::OK, getAvailableOutputStreams(staticMeta, outputPreviewStreams, + &previewThreshold)); + ASSERT_NE(0u, outputPreviewStreams.size()); + + int32_t streamId = 0; + for (auto& blobIter : outputBlobStreams) { + for (auto& previewIter : outputPreviewStreams) { + V3_2::Stream previewStream = {streamId++, + StreamType::OUTPUT, + static_cast(previewIter.width), + static_cast(previewIter.height), + static_cast(previewIter.format), + GRALLOC1_CONSUMER_USAGE_HWCOMPOSER, + 0, + StreamRotation::ROTATION_0}; + V3_2::Stream blobStream = {streamId++, + StreamType::OUTPUT, + static_cast(blobIter.width), + static_cast(blobIter.height), + static_cast(blobIter.format), + GRALLOC1_CONSUMER_USAGE_CPU_READ, + static_cast(Dataspace::V0_JFIF), + StreamRotation::ROTATION_0}; + ::android::hardware::hidl_vec streams = {previewStream, + blobStream}; + ::android::hardware::camera::device::V3_4::StreamConfiguration config3_4; + ::android::hardware::camera::device::V3_2::StreamConfiguration config3_2; + createStreamConfiguration(streams, StreamConfigurationMode::NORMAL_MODE, + &config3_2, &config3_4); + if (session3_4 != nullptr) { + ret = session3_4->configureStreams_3_4(config3_4, + [](Status s, device::V3_4::HalStreamConfiguration halConfig) { ASSERT_EQ(Status::OK, s); - ASSERT_EQ(1u, halConfig.streams.size()); - ASSERT_EQ(halConfig.streams[0].id, streamId); + ASSERT_EQ(2u, halConfig.streams.size()); }); - } else { - ret = session3_3->configureStreams_3_3(config, - [streamId](Status s, device::V3_3::HalStreamConfiguration halConfig) { + } else if (session3_3 != nullptr) { + ret = session3_3->configureStreams_3_3(config3_2, + [](Status s, device::V3_3::HalStreamConfiguration halConfig) { ASSERT_EQ(Status::OK, s); - ASSERT_EQ(1u, halConfig.streams.size()); - ASSERT_EQ(halConfig.streams[0].v3_2.id, streamId); - }); - } - ASSERT_TRUE(ret.isOk()); - - stream = {streamId++, - StreamType::OUTPUT, - static_cast(0), - static_cast(0), - static_cast(hfrStream.format), - GRALLOC1_CONSUMER_USAGE_VIDEO_ENCODER, - 0, - StreamRotation::ROTATION_0}; - streams[0] = stream; - config = {streams, StreamConfigurationMode::CONSTRAINED_HIGH_SPEED_MODE}; - if (session3_3 == nullptr) { - ret = session->configureStreams(config, - [](Status s, HalStreamConfiguration) { - ASSERT_TRUE((Status::ILLEGAL_ARGUMENT == s) || - (Status::INTERNAL_ERROR == s)); + ASSERT_EQ(2u, halConfig.streams.size()); }); } else { - ret = session3_3->configureStreams_3_3(config, - [](Status s, device::V3_3::HalStreamConfiguration) { - ASSERT_TRUE((Status::ILLEGAL_ARGUMENT == s) || - (Status::INTERNAL_ERROR == s)); + ret = session->configureStreams(config3_2, + [](Status s, HalStreamConfiguration halConfig) { + ASSERT_EQ(Status::OK, s); + ASSERT_EQ(2u, halConfig.streams.size()); }); } ASSERT_TRUE(ret.isOk()); + } + } - stream = {streamId++, - StreamType::OUTPUT, - static_cast(UINT32_MAX), - static_cast(UINT32_MAX), - static_cast(hfrStream.format), - GRALLOC1_CONSUMER_USAGE_VIDEO_ENCODER, - 0, - StreamRotation::ROTATION_0}; - streams[0] = stream; - config = {streams, StreamConfigurationMode::CONSTRAINED_HIGH_SPEED_MODE}; - if (session3_3 == nullptr) { - ret = session->configureStreams(config, - [](Status s, HalStreamConfiguration) { - ASSERT_EQ(Status::ILLEGAL_ARGUMENT, s); - }); - } else { - ret = session3_3->configureStreams_3_3(config, - [](Status s, device::V3_3::HalStreamConfiguration) { - ASSERT_EQ(Status::ILLEGAL_ARGUMENT, s); - }); - } - ASSERT_TRUE(ret.isOk()); + free_camera_metadata(staticMeta); + ret = session->close(); + ASSERT_TRUE(ret.isOk()); + } +} - stream = {streamId++, - StreamType::OUTPUT, - static_cast(hfrStream.width), - static_cast(hfrStream.height), - static_cast(UINT32_MAX), - GRALLOC1_CONSUMER_USAGE_VIDEO_ENCODER, - 0, - StreamRotation::ROTATION_0}; - streams[0] = stream; - config = {streams, StreamConfigurationMode::CONSTRAINED_HIGH_SPEED_MODE}; - if (session3_3 == nullptr) { - ret = session->configureStreams(config, - [](Status s, HalStreamConfiguration) { - ASSERT_EQ(Status::ILLEGAL_ARGUMENT, s); - }); - } else { - ret = session3_3->configureStreams_3_3(config, - [](Status s, device::V3_3::HalStreamConfiguration) { - ASSERT_EQ(Status::ILLEGAL_ARGUMENT, s); - }); - } - ASSERT_TRUE(ret.isOk()); +// In case constrained mode is supported, test whether it can be +// configured. Additionally check for common invalid inputs when +// using this mode. +TEST_F(CameraHidlTest, configureStreamsConstrainedOutputs) { + hidl_vec cameraDeviceNames = getCameraDeviceNames(mProvider); - free_camera_metadata(staticMeta); - ret = session->close(); - ASSERT_TRUE(ret.isOk()); - } - break; - case CAMERA_DEVICE_API_VERSION_1_0: { - //Not applicable - } - break; - default: { - ALOGE("%s: Unsupported device version %d", __func__, deviceVersion); - ADD_FAILURE(); - } - break; + for (const auto& name : cameraDeviceNames) { + int deviceVersion = getCameraDeviceVersion(name, mProviderType); + if (deviceVersion == CAMERA_DEVICE_API_VERSION_1_0) { + continue; + } else if (deviceVersion <= 0) { + ALOGE("%s: Unsupported device version %d", __func__, deviceVersion); + ADD_FAILURE(); + return; + } + + camera_metadata_t* staticMeta; + Return ret; + sp session; + sp session3_3; + sp session3_4; + openEmptyDeviceSession(name, mProvider, &session /*out*/, &staticMeta /*out*/); + castSession(session, deviceVersion, &session3_3, &session3_4); + + Status rc = isConstrainedModeAvailable(staticMeta); + if (Status::METHOD_NOT_SUPPORTED == rc) { + ret = session->close(); + ASSERT_TRUE(ret.isOk()); + continue; + } + ASSERT_EQ(Status::OK, rc); + + AvailableStream hfrStream; + rc = pickConstrainedModeSize(staticMeta, hfrStream); + ASSERT_EQ(Status::OK, rc); + + int32_t streamId = 0; + V3_2::Stream stream = {streamId, + StreamType::OUTPUT, + static_cast(hfrStream.width), + static_cast(hfrStream.height), + static_cast(hfrStream.format), + GRALLOC1_CONSUMER_USAGE_VIDEO_ENCODER, + 0, + StreamRotation::ROTATION_0}; + ::android::hardware::hidl_vec streams = {stream}; + ::android::hardware::camera::device::V3_4::StreamConfiguration config3_4; + ::android::hardware::camera::device::V3_2::StreamConfiguration config3_2; + createStreamConfiguration(streams, StreamConfigurationMode::CONSTRAINED_HIGH_SPEED_MODE, + &config3_2, &config3_4); + if (session3_4 != nullptr) { + ret = session3_4->configureStreams_3_4(config3_4, + [streamId](Status s, device::V3_4::HalStreamConfiguration halConfig) { + ASSERT_EQ(Status::OK, s); + ASSERT_EQ(1u, halConfig.streams.size()); + ASSERT_EQ(halConfig.streams[0].v3_3.v3_2.id, streamId); + }); + } else if (session3_3 != nullptr) { + ret = session3_3->configureStreams_3_3(config3_2, + [streamId](Status s, device::V3_3::HalStreamConfiguration halConfig) { + ASSERT_EQ(Status::OK, s); + ASSERT_EQ(1u, halConfig.streams.size()); + ASSERT_EQ(halConfig.streams[0].v3_2.id, streamId); + }); + } else { + ret = session->configureStreams(config3_2, + [streamId](Status s, HalStreamConfiguration halConfig) { + ASSERT_EQ(Status::OK, s); + ASSERT_EQ(1u, halConfig.streams.size()); + ASSERT_EQ(halConfig.streams[0].id, streamId); + }); + } + ASSERT_TRUE(ret.isOk()); + + stream = {streamId++, + StreamType::OUTPUT, + static_cast(0), + static_cast(0), + static_cast(hfrStream.format), + GRALLOC1_CONSUMER_USAGE_VIDEO_ENCODER, + 0, + StreamRotation::ROTATION_0}; + streams[0] = stream; + createStreamConfiguration(streams, StreamConfigurationMode::CONSTRAINED_HIGH_SPEED_MODE, + &config3_2, &config3_4); + if (session3_4 != nullptr) { + ret = session3_4->configureStreams_3_4(config3_4, + [](Status s, device::V3_4::HalStreamConfiguration) { + ASSERT_TRUE((Status::ILLEGAL_ARGUMENT == s) || + (Status::INTERNAL_ERROR == s)); + }); + } else if (session3_3 != nullptr) { + ret = session3_3->configureStreams_3_3(config3_2, + [](Status s, device::V3_3::HalStreamConfiguration) { + ASSERT_TRUE((Status::ILLEGAL_ARGUMENT == s) || + (Status::INTERNAL_ERROR == s)); + }); + } else { + ret = session->configureStreams(config3_2, + [](Status s, HalStreamConfiguration) { + ASSERT_TRUE((Status::ILLEGAL_ARGUMENT == s) || + (Status::INTERNAL_ERROR == s)); + }); + } + ASSERT_TRUE(ret.isOk()); + + stream = {streamId++, + StreamType::OUTPUT, + static_cast(UINT32_MAX), + static_cast(UINT32_MAX), + static_cast(hfrStream.format), + GRALLOC1_CONSUMER_USAGE_VIDEO_ENCODER, + 0, + StreamRotation::ROTATION_0}; + streams[0] = stream; + createStreamConfiguration(streams, StreamConfigurationMode::CONSTRAINED_HIGH_SPEED_MODE, + &config3_2, &config3_4); + if (session3_4 != nullptr) { + ret = session3_4->configureStreams_3_4(config3_4, + [](Status s, device::V3_4::HalStreamConfiguration) { + ASSERT_EQ(Status::ILLEGAL_ARGUMENT, s); + }); + } else if (session3_3 != nullptr) { + ret = session3_3->configureStreams_3_3(config3_2, + [](Status s, device::V3_3::HalStreamConfiguration) { + ASSERT_EQ(Status::ILLEGAL_ARGUMENT, s); + }); + } else { + ret = session->configureStreams(config3_2, + [](Status s, HalStreamConfiguration) { + ASSERT_EQ(Status::ILLEGAL_ARGUMENT, s); + }); + } + ASSERT_TRUE(ret.isOk()); + + stream = {streamId++, + StreamType::OUTPUT, + static_cast(hfrStream.width), + static_cast(hfrStream.height), + static_cast(UINT32_MAX), + GRALLOC1_CONSUMER_USAGE_VIDEO_ENCODER, + 0, + StreamRotation::ROTATION_0}; + streams[0] = stream; + createStreamConfiguration(streams, StreamConfigurationMode::CONSTRAINED_HIGH_SPEED_MODE, + &config3_2, &config3_4); + if (session3_4 != nullptr) { + ret = session3_4->configureStreams_3_4(config3_4, + [](Status s, device::V3_4::HalStreamConfiguration) { + ASSERT_EQ(Status::ILLEGAL_ARGUMENT, s); + }); + } else if (session3_3 != nullptr) { + ret = session3_3->configureStreams_3_3(config3_2, + [](Status s, device::V3_3::HalStreamConfiguration) { + ASSERT_EQ(Status::ILLEGAL_ARGUMENT, s); + }); + } else { + ret = session->configureStreams(config3_2, + [](Status s, HalStreamConfiguration) { + ASSERT_EQ(Status::ILLEGAL_ARGUMENT, s); + }); } + ASSERT_TRUE(ret.isOk()); + + free_camera_metadata(staticMeta); + ret = session->close(); + ASSERT_TRUE(ret.isOk()); } } @@ -2866,82 +3178,84 @@ TEST_F(CameraHidlTest, configureStreamsVideoStillOutputs) { for (const auto& name : cameraDeviceNames) { int deviceVersion = getCameraDeviceVersion(name, mProviderType); - switch (deviceVersion) { - case CAMERA_DEVICE_API_VERSION_3_3: - case CAMERA_DEVICE_API_VERSION_3_2: { - camera_metadata_t* staticMeta; - Return ret; - sp session; - sp session3_3; - openEmptyDeviceSession(name, mProvider, - &session /*out*/, &session3_3 /*out*/, &staticMeta /*out*/); - - outputBlobStreams.clear(); - ASSERT_EQ(Status::OK, - getAvailableOutputStreams(staticMeta, outputBlobStreams, - &blobThreshold)); - ASSERT_NE(0u, outputBlobStreams.size()); - - outputVideoStreams.clear(); - ASSERT_EQ(Status::OK, - getAvailableOutputStreams(staticMeta, outputVideoStreams, - &videoThreshold)); - ASSERT_NE(0u, outputVideoStreams.size()); - - int32_t streamId = 0; - for (auto& blobIter : outputBlobStreams) { - for (auto& videoIter : outputVideoStreams) { - Stream videoStream = {streamId++, - StreamType::OUTPUT, - static_cast(videoIter.width), - static_cast(videoIter.height), - static_cast(videoIter.format), - GRALLOC1_CONSUMER_USAGE_VIDEO_ENCODER, - 0, - StreamRotation::ROTATION_0}; - Stream blobStream = {streamId++, - StreamType::OUTPUT, - static_cast(blobIter.width), - static_cast(blobIter.height), - static_cast(blobIter.format), - GRALLOC1_CONSUMER_USAGE_CPU_READ, - 0, - StreamRotation::ROTATION_0}; - ::android::hardware::hidl_vec streams = {videoStream, blobStream}; - StreamConfiguration config = {streams, - StreamConfigurationMode::NORMAL_MODE}; - if (session3_3 == nullptr) { - ret = session->configureStreams(config, - [](Status s, HalStreamConfiguration halConfig) { - ASSERT_EQ(Status::OK, s); - ASSERT_EQ(2u, halConfig.streams.size()); - }); - } else { - ret = session3_3->configureStreams_3_3(config, - [](Status s, device::V3_3::HalStreamConfiguration halConfig) { - ASSERT_EQ(Status::OK, s); - ASSERT_EQ(2u, halConfig.streams.size()); - }); - } - ASSERT_TRUE(ret.isOk()); - } - } + if (deviceVersion == CAMERA_DEVICE_API_VERSION_1_0) { + continue; + } else if (deviceVersion <= 0) { + ALOGE("%s: Unsupported device version %d", __func__, deviceVersion); + ADD_FAILURE(); + return; + } - free_camera_metadata(staticMeta); - ret = session->close(); + camera_metadata_t* staticMeta; + Return ret; + sp session; + sp session3_3; + sp session3_4; + openEmptyDeviceSession(name, mProvider, &session /*out*/, &staticMeta /*out*/); + castSession(session, deviceVersion, &session3_3, &session3_4); + + outputBlobStreams.clear(); + ASSERT_EQ(Status::OK, + getAvailableOutputStreams(staticMeta, outputBlobStreams, + &blobThreshold)); + ASSERT_NE(0u, outputBlobStreams.size()); + + outputVideoStreams.clear(); + ASSERT_EQ(Status::OK, + getAvailableOutputStreams(staticMeta, outputVideoStreams, + &videoThreshold)); + ASSERT_NE(0u, outputVideoStreams.size()); + + int32_t streamId = 0; + for (auto& blobIter : outputBlobStreams) { + for (auto& videoIter : outputVideoStreams) { + V3_2::Stream videoStream = {streamId++, + StreamType::OUTPUT, + static_cast(videoIter.width), + static_cast(videoIter.height), + static_cast(videoIter.format), + GRALLOC1_CONSUMER_USAGE_VIDEO_ENCODER, + 0, + StreamRotation::ROTATION_0}; + V3_2::Stream blobStream = {streamId++, + StreamType::OUTPUT, + static_cast(blobIter.width), + static_cast(blobIter.height), + static_cast(blobIter.format), + GRALLOC1_CONSUMER_USAGE_CPU_READ, + static_cast(Dataspace::V0_JFIF), + StreamRotation::ROTATION_0}; + ::android::hardware::hidl_vec streams = {videoStream, blobStream}; + ::android::hardware::camera::device::V3_4::StreamConfiguration config3_4; + ::android::hardware::camera::device::V3_2::StreamConfiguration config3_2; + createStreamConfiguration(streams, StreamConfigurationMode::NORMAL_MODE, + &config3_2, &config3_4); + if (session3_4 != nullptr) { + ret = session3_4->configureStreams_3_4(config3_4, + [](Status s, device::V3_4::HalStreamConfiguration halConfig) { + ASSERT_EQ(Status::OK, s); + ASSERT_EQ(2u, halConfig.streams.size()); + }); + } else if (session3_3 != nullptr) { + ret = session3_3->configureStreams_3_3(config3_2, + [](Status s, device::V3_3::HalStreamConfiguration halConfig) { + ASSERT_EQ(Status::OK, s); + ASSERT_EQ(2u, halConfig.streams.size()); + }); + } else { + ret = session->configureStreams(config3_2, + [](Status s, HalStreamConfiguration halConfig) { + ASSERT_EQ(Status::OK, s); + ASSERT_EQ(2u, halConfig.streams.size()); + }); + } ASSERT_TRUE(ret.isOk()); } - break; - case CAMERA_DEVICE_API_VERSION_1_0: { - //Not applicable - } - break; - default: { - ALOGE("%s: Unsupported device version %d", __func__, deviceVersion); - ADD_FAILURE(); - } - break; } + + free_camera_metadata(staticMeta); + ret = session->close(); + ASSERT_TRUE(ret.isOk()); } } @@ -2956,160 +3270,375 @@ TEST_F(CameraHidlTest, processCaptureRequestPreview) { for (const auto& name : cameraDeviceNames) { int deviceVersion = getCameraDeviceVersion(name, mProviderType); - switch (deviceVersion) { - case CAMERA_DEVICE_API_VERSION_3_3: - case CAMERA_DEVICE_API_VERSION_3_2: { - Stream previewStream; - HalStreamConfiguration halStreamConfig; - sp session; - bool supportsPartialResults = false; - uint32_t partialResultCount = 0; - configurePreviewStream(name, mProvider, &previewThreshold, &session /*out*/, - &previewStream /*out*/, &halStreamConfig /*out*/, - &supportsPartialResults /*out*/, - &partialResultCount /*out*/); - - std::shared_ptr resultQueue; - auto resultQueueRet = - session->getCaptureResultMetadataQueue( - [&resultQueue](const auto& descriptor) { - resultQueue = std::make_shared( - descriptor); - if (!resultQueue->isValid() || - resultQueue->availableToWrite() <= 0) { - ALOGE("%s: HAL returns empty result metadata fmq," - " not use it", __func__); - resultQueue = nullptr; - // Don't use the queue onwards. - } - }); - ASSERT_TRUE(resultQueueRet.isOk()); + if (deviceVersion == CAMERA_DEVICE_API_VERSION_1_0) { + continue; + } else if (deviceVersion <= 0) { + ALOGE("%s: Unsupported device version %d", __func__, deviceVersion); + ADD_FAILURE(); + return; + } - InFlightRequest inflightReq = {1, false, supportsPartialResults, - partialResultCount, resultQueue}; + V3_2::Stream previewStream; + HalStreamConfiguration halStreamConfig; + sp session; + bool supportsPartialResults = false; + uint32_t partialResultCount = 0; + configurePreviewStream(name, deviceVersion, mProvider, &previewThreshold, &session /*out*/, + &previewStream /*out*/, &halStreamConfig /*out*/, + &supportsPartialResults /*out*/, + &partialResultCount /*out*/); - RequestTemplate reqTemplate = RequestTemplate::PREVIEW; - Return ret; - ret = session->constructDefaultRequestSettings(reqTemplate, - [&](auto status, const auto& req) { - ASSERT_EQ(Status::OK, status); - settings = req; - }); - ASSERT_TRUE(ret.isOk()); + std::shared_ptr resultQueue; + auto resultQueueRet = + session->getCaptureResultMetadataQueue( + [&resultQueue](const auto& descriptor) { + resultQueue = std::make_shared( + descriptor); + if (!resultQueue->isValid() || + resultQueue->availableToWrite() <= 0) { + ALOGE("%s: HAL returns empty result metadata fmq," + " not use it", __func__); + resultQueue = nullptr; + // Don't use the queue onwards. + } + }); + ASSERT_TRUE(resultQueueRet.isOk()); + + InFlightRequest inflightReq = {1, false, supportsPartialResults, + partialResultCount, resultQueue}; + + RequestTemplate reqTemplate = RequestTemplate::PREVIEW; + Return ret; + ret = session->constructDefaultRequestSettings(reqTemplate, + [&](auto status, const auto& req) { + ASSERT_EQ(Status::OK, status); + settings = req; + }); + ASSERT_TRUE(ret.isOk()); + + hidl_handle buffer_handle; + allocateGraphicBuffer(previewStream.width, previewStream.height, + android_convertGralloc1To0Usage(halStreamConfig.streams[0].producerUsage, + halStreamConfig.streams[0].consumerUsage), + halStreamConfig.streams[0].overrideFormat, &buffer_handle); + + StreamBuffer outputBuffer = {halStreamConfig.streams[0].id, + bufferId, + buffer_handle, + BufferStatus::OK, + nullptr, + nullptr}; + ::android::hardware::hidl_vec outputBuffers = {outputBuffer}; + StreamBuffer emptyInputBuffer = {-1, 0, nullptr, BufferStatus::ERROR, nullptr, + nullptr}; + CaptureRequest request = {frameNumber, 0 /* fmqSettingsSize */, settings, + emptyInputBuffer, outputBuffers}; + + { + std::unique_lock l(mLock); + mInflightMap.clear(); + mInflightMap.add(frameNumber, &inflightReq); + } - sp gb = new GraphicBuffer( - previewStream.width, previewStream.height, - static_cast(halStreamConfig.streams[0].overrideFormat), 1, - android_convertGralloc1To0Usage(halStreamConfig.streams[0].producerUsage, - halStreamConfig.streams[0].consumerUsage)); - ASSERT_NE(nullptr, gb.get()); - StreamBuffer outputBuffer = {halStreamConfig.streams[0].id, - bufferId, - hidl_handle(gb->getNativeBuffer()->handle), - BufferStatus::OK, - nullptr, - nullptr}; - ::android::hardware::hidl_vec outputBuffers = {outputBuffer}; - StreamBuffer emptyInputBuffer = {-1, 0, nullptr, BufferStatus::ERROR, nullptr, - nullptr}; - CaptureRequest request = {frameNumber, 0 /* fmqSettingsSize */, settings, - emptyInputBuffer, outputBuffers}; + Status status = Status::INTERNAL_ERROR; + uint32_t numRequestProcessed = 0; + hidl_vec cachesToRemove; + Return returnStatus = session->processCaptureRequest( + {request}, cachesToRemove, [&status, &numRequestProcessed](auto s, + uint32_t n) { + status = s; + numRequestProcessed = n; + }); + ASSERT_TRUE(returnStatus.isOk()); + ASSERT_EQ(Status::OK, status); + ASSERT_EQ(numRequestProcessed, 1u); + + { + std::unique_lock l(mLock); + while (!inflightReq.errorCodeValid && + ((0 < inflightReq.numBuffersLeft) || + (!inflightReq.haveResultMetadata))) { + auto timeout = std::chrono::system_clock::now() + + std::chrono::seconds(kStreamBufferTimeoutSec); + ASSERT_NE(std::cv_status::timeout, + mResultCondition.wait_until(l, timeout)); + } - { - std::unique_lock l(mLock); - mInflightMap.clear(); - mInflightMap.add(frameNumber, &inflightReq); - } + ASSERT_FALSE(inflightReq.errorCodeValid); + ASSERT_NE(inflightReq.resultOutputBuffers.size(), 0u); + ASSERT_EQ(previewStream.id, inflightReq.resultOutputBuffers[0].streamId); + + request.frameNumber++; + // Empty settings should be supported after the first call + // for repeating requests. + request.settings.setToExternal(nullptr, 0, true); + // The buffer has been registered to HAL by bufferId, so per + // API contract we should send a null handle for this buffer + request.outputBuffers[0].buffer = nullptr; + mInflightMap.clear(); + inflightReq = {1, false, supportsPartialResults, partialResultCount, + resultQueue}; + mInflightMap.add(request.frameNumber, &inflightReq); + } - Status status = Status::INTERNAL_ERROR; - uint32_t numRequestProcessed = 0; - hidl_vec cachesToRemove; - Return returnStatus = session->processCaptureRequest( - {request}, cachesToRemove, [&status, &numRequestProcessed](auto s, - uint32_t n) { - status = s; - numRequestProcessed = n; - }); - ASSERT_TRUE(returnStatus.isOk()); - ASSERT_EQ(Status::OK, status); - ASSERT_EQ(numRequestProcessed, 1u); + returnStatus = session->processCaptureRequest( + {request}, cachesToRemove, [&status, &numRequestProcessed](auto s, + uint32_t n) { + status = s; + numRequestProcessed = n; + }); + ASSERT_TRUE(returnStatus.isOk()); + ASSERT_EQ(Status::OK, status); + ASSERT_EQ(numRequestProcessed, 1u); + + { + std::unique_lock l(mLock); + while (!inflightReq.errorCodeValid && + ((0 < inflightReq.numBuffersLeft) || + (!inflightReq.haveResultMetadata))) { + auto timeout = std::chrono::system_clock::now() + + std::chrono::seconds(kStreamBufferTimeoutSec); + ASSERT_NE(std::cv_status::timeout, + mResultCondition.wait_until(l, timeout)); + } - { - std::unique_lock l(mLock); - while (!inflightReq.errorCodeValid && - ((0 < inflightReq.numBuffersLeft) || - (!inflightReq.haveResultMetadata))) { - auto timeout = std::chrono::system_clock::now() + - std::chrono::seconds(kStreamBufferTimeoutSec); - ASSERT_NE(std::cv_status::timeout, - mResultCondition.wait_until(l, timeout)); - } + ASSERT_FALSE(inflightReq.errorCodeValid); + ASSERT_NE(inflightReq.resultOutputBuffers.size(), 0u); + ASSERT_EQ(previewStream.id, inflightReq.resultOutputBuffers[0].streamId); + } - ASSERT_FALSE(inflightReq.errorCodeValid); - ASSERT_NE(inflightReq.resultOutputBuffers.size(), 0u); - ASSERT_EQ(previewStream.id, inflightReq.resultOutputBuffers[0].streamId); - - request.frameNumber++; - // Empty settings should be supported after the first call - // for repeating requests. - request.settings.setToExternal(nullptr, 0, true); - // The buffer has been registered to HAL by bufferId, so per - // API contract we should send a null handle for this buffer - request.outputBuffers[0].buffer = nullptr; - mInflightMap.clear(); - inflightReq = {1, false, supportsPartialResults, partialResultCount, - resultQueue}; - mInflightMap.add(request.frameNumber, &inflightReq); - } + ret = session->close(); + ASSERT_TRUE(ret.isOk()); + } +} - returnStatus = session->processCaptureRequest( - {request}, cachesToRemove, [&status, &numRequestProcessed](auto s, - uint32_t n) { - status = s; - numRequestProcessed = n; - }); - ASSERT_TRUE(returnStatus.isOk()); - ASSERT_EQ(Status::OK, status); - ASSERT_EQ(numRequestProcessed, 1u); +// Generate and verify a multi-camera capture request +TEST_F(CameraHidlTest, processMultiCaptureRequestPreview) { + hidl_vec cameraDeviceNames = getCameraDeviceNames(mProvider); + AvailableStream previewThreshold = {kMaxPreviewWidth, kMaxPreviewHeight, + static_cast(PixelFormat::YCBCR_420_888)}; + uint64_t bufferId = 1; + uint32_t frameNumber = 1; + ::android::hardware::hidl_vec settings; + ::android::hardware::hidl_vec emptySettings; + hidl_string invalidPhysicalId = "-1"; - { - std::unique_lock l(mLock); - while (!inflightReq.errorCodeValid && - ((0 < inflightReq.numBuffersLeft) || - (!inflightReq.haveResultMetadata))) { - auto timeout = std::chrono::system_clock::now() + - std::chrono::seconds(kStreamBufferTimeoutSec); - ASSERT_NE(std::cv_status::timeout, - mResultCondition.wait_until(l, timeout)); - } + for (const auto& name : cameraDeviceNames) { + int deviceVersion = getCameraDeviceVersion(name, mProviderType); + if (deviceVersion < CAMERA_DEVICE_API_VERSION_3_4) { + continue; + } + std::string version, deviceId; + ASSERT_TRUE(::matchDeviceName(name, mProviderType, &version, &deviceId)); + camera_metadata_t* staticMeta; + Return ret; + sp session; + openEmptyDeviceSession(name, mProvider, &session /*out*/, &staticMeta /*out*/); + + Status rc = isLogicalMultiCamera(staticMeta); + if (Status::METHOD_NOT_SUPPORTED == rc) { + free_camera_metadata(staticMeta); + ret = session->close(); + ASSERT_TRUE(ret.isOk()); + continue; + } + std::unordered_set physicalIds; + rc = getPhysicalCameraIds(staticMeta, &physicalIds); + ASSERT_TRUE(Status::OK == rc); + ASSERT_TRUE(physicalIds.size() > 1); + + std::unordered_set physicalRequestKeyIDs; + rc = getSupportedKeys(staticMeta, + ANDROID_REQUEST_AVAILABLE_PHYSICAL_CAMERA_REQUEST_KEYS, &physicalRequestKeyIDs); + ASSERT_TRUE(Status::OK == rc); + if (physicalRequestKeyIDs.empty()) { + free_camera_metadata(staticMeta); + ret = session->close(); + ASSERT_TRUE(ret.isOk()); + // The logical camera doesn't support any individual physical requests. + continue; + } - ASSERT_FALSE(inflightReq.errorCodeValid); - ASSERT_NE(inflightReq.resultOutputBuffers.size(), 0u); - ASSERT_EQ(previewStream.id, inflightReq.resultOutputBuffers[0].streamId); - } + android::hardware::camera::common::V1_0::helper::CameraMetadata defaultPreviewSettings; + android::hardware::camera::common::V1_0::helper::CameraMetadata filteredSettings; + constructFilteredSettings(session, physicalRequestKeyIDs, RequestTemplate::PREVIEW, + &defaultPreviewSettings, &filteredSettings); + if (filteredSettings.isEmpty()) { + // No physical device settings in default request. + free_camera_metadata(staticMeta); + ret = session->close(); + ASSERT_TRUE(ret.isOk()); + continue; + } - ret = session->close(); - ASSERT_TRUE(ret.isOk()); - } - break; - case CAMERA_DEVICE_API_VERSION_1_0: { - //Not applicable + const camera_metadata_t *settingsBuffer = defaultPreviewSettings.getAndLock(); + settings.setToExternal( + reinterpret_cast (const_cast (settingsBuffer)), + get_camera_metadata_size(settingsBuffer)); + + free_camera_metadata(staticMeta); + ret = session->close(); + ASSERT_TRUE(ret.isOk()); + + // Leave only 2 physical devices in the id set. + auto it = physicalIds.begin(); + string physicalDeviceId = *it; it++; + physicalIds.erase(++it, physicalIds.end()); + ASSERT_EQ(physicalIds.size(), 2u); + + V3_4::HalStreamConfiguration halStreamConfig; + bool supportsPartialResults = false; + uint32_t partialResultCount = 0; + V3_2::Stream previewStream; + sp session3_4; + configurePreviewStreams3_4(name, deviceVersion, mProvider, &previewThreshold, physicalIds, + &session3_4, &previewStream, &halStreamConfig /*out*/, + &supportsPartialResults /*out*/, &partialResultCount /*out*/); + ASSERT_NE(session3_4, nullptr); + + std::shared_ptr resultQueue; + auto resultQueueRet = + session3_4->getCaptureResultMetadataQueue( + [&resultQueue](const auto& descriptor) { + resultQueue = std::make_shared( + descriptor); + if (!resultQueue->isValid() || + resultQueue->availableToWrite() <= 0) { + ALOGE("%s: HAL returns empty result metadata fmq," + " not use it", __func__); + resultQueue = nullptr; + // Don't use the queue onwards. + } + }); + ASSERT_TRUE(resultQueueRet.isOk()); + + InFlightRequest inflightReq = {static_cast (halStreamConfig.streams.size()), false, + supportsPartialResults, partialResultCount, resultQueue}; + + std::vector graphicBuffers; + graphicBuffers.reserve(halStreamConfig.streams.size()); + ::android::hardware::hidl_vec outputBuffers; + outputBuffers.resize(halStreamConfig.streams.size()); + size_t k = 0; + for (const auto& halStream : halStreamConfig.streams) { + hidl_handle buffer_handle; + allocateGraphicBuffer(previewStream.width, previewStream.height, + android_convertGralloc1To0Usage(halStream.v3_3.v3_2.producerUsage, + halStream.v3_3.v3_2.consumerUsage), + halStream.v3_3.v3_2.overrideFormat, &buffer_handle); + graphicBuffers.push_back(buffer_handle); + outputBuffers[k] = {halStream.v3_3.v3_2.id, bufferId, buffer_handle, + BufferStatus::OK, nullptr, nullptr}; + bufferId++; + k++; + } + hidl_vec camSettings(1); + const camera_metadata_t *filteredSettingsBuffer = filteredSettings.getAndLock(); + camSettings[0].settings.setToExternal( + reinterpret_cast (const_cast ( + filteredSettingsBuffer)), + get_camera_metadata_size(filteredSettingsBuffer)); + camSettings[0].fmqSettingsSize = 0; + camSettings[0].physicalCameraId = physicalDeviceId; + + StreamBuffer emptyInputBuffer = {-1, 0, nullptr, BufferStatus::ERROR, nullptr, nullptr}; + V3_4::CaptureRequest request = {{frameNumber, 0 /* fmqSettingsSize */, settings, + emptyInputBuffer, outputBuffers}, camSettings}; + + { + std::unique_lock l(mLock); + mInflightMap.clear(); + mInflightMap.add(frameNumber, &inflightReq); + } + + Status stat = Status::INTERNAL_ERROR; + uint32_t numRequestProcessed = 0; + hidl_vec cachesToRemove; + Return returnStatus = session3_4->processCaptureRequest_3_4( + {request}, cachesToRemove, [&stat, &numRequestProcessed](auto s, uint32_t n) { + stat = s; + numRequestProcessed = n; + }); + ASSERT_TRUE(returnStatus.isOk()); + ASSERT_EQ(Status::OK, stat); + ASSERT_EQ(numRequestProcessed, 1u); + + { + std::unique_lock l(mLock); + while (!inflightReq.errorCodeValid && + ((0 < inflightReq.numBuffersLeft) || + (!inflightReq.haveResultMetadata))) { + auto timeout = std::chrono::system_clock::now() + + std::chrono::seconds(kStreamBufferTimeoutSec); + ASSERT_NE(std::cv_status::timeout, + mResultCondition.wait_until(l, timeout)); } - break; - default: { - ALOGE("%s: Unsupported device version %d", __func__, deviceVersion); - ADD_FAILURE(); + + ASSERT_FALSE(inflightReq.errorCodeValid); + ASSERT_NE(inflightReq.resultOutputBuffers.size(), 0u); + + request.v3_2.frameNumber++; + // Empty settings should be supported after the first call + // for repeating requests. + request.v3_2.settings.setToExternal(nullptr, 0, true); + request.physicalCameraSettings[0].settings.setToExternal(nullptr, 0, true); + // The buffer has been registered to HAL by bufferId, so per + // API contract we should send a null handle for this buffer + request.v3_2.outputBuffers[0].buffer = nullptr; + mInflightMap.clear(); + inflightReq = {static_cast (physicalIds.size()), false, + supportsPartialResults, partialResultCount, resultQueue}; + mInflightMap.add(request.v3_2.frameNumber, &inflightReq); + } + + returnStatus = session3_4->processCaptureRequest_3_4( + {request}, cachesToRemove, [&stat, &numRequestProcessed](auto s, uint32_t n) { + stat = s; + numRequestProcessed = n; + }); + ASSERT_TRUE(returnStatus.isOk()); + ASSERT_EQ(Status::OK, stat); + ASSERT_EQ(numRequestProcessed, 1u); + + { + std::unique_lock l(mLock); + while (!inflightReq.errorCodeValid && + ((0 < inflightReq.numBuffersLeft) || + (!inflightReq.haveResultMetadata))) { + auto timeout = std::chrono::system_clock::now() + + std::chrono::seconds(kStreamBufferTimeoutSec); + ASSERT_NE(std::cv_status::timeout, + mResultCondition.wait_until(l, timeout)); } - break; + + ASSERT_FALSE(inflightReq.errorCodeValid); + ASSERT_NE(inflightReq.resultOutputBuffers.size(), 0u); } + + // Invalid physical camera id should fail process requests + frameNumber++; + camSettings[0].physicalCameraId = invalidPhysicalId; + camSettings[0].settings = settings; + request = {{frameNumber, 0 /* fmqSettingsSize */, settings, + emptyInputBuffer, outputBuffers}, camSettings}; + returnStatus = session3_4->processCaptureRequest_3_4( + {request}, cachesToRemove, [&stat, &numRequestProcessed](auto s, uint32_t n) { + stat = s; + numRequestProcessed = n; + }); + ASSERT_TRUE(returnStatus.isOk()); + ASSERT_EQ(Status::ILLEGAL_ARGUMENT, stat); + + defaultPreviewSettings.unlock(settingsBuffer); + filteredSettings.unlock(filteredSettingsBuffer); + ret = session3_4->close(); + ASSERT_TRUE(ret.isOk()); } } -// Test whether an incorrect capture request with missing settings will -// be reported correctly. -TEST_F(CameraHidlTest, processCaptureRequestInvalidSinglePreview) { +// Generate and verify a burst containing alternating sensor sensitivity values +TEST_F(CameraHidlTest, processCaptureRequestBurstISO) { hidl_vec cameraDeviceNames = getCameraDeviceNames(mProvider); - std::vector outputPreviewStreams; AvailableStream previewThreshold = {kMaxPreviewWidth, kMaxPreviewHeight, static_cast(PixelFormat::IMPLEMENTATION_DEFINED)}; uint64_t bufferId = 1; @@ -3118,136 +3647,268 @@ TEST_F(CameraHidlTest, processCaptureRequestInvalidSinglePreview) { for (const auto& name : cameraDeviceNames) { int deviceVersion = getCameraDeviceVersion(name, mProviderType); - switch (deviceVersion) { - case CAMERA_DEVICE_API_VERSION_3_3: - case CAMERA_DEVICE_API_VERSION_3_2: { - Stream previewStream; - HalStreamConfiguration halStreamConfig; - sp session; - bool supportsPartialResults = false; - uint32_t partialResultCount = 0; - configurePreviewStream(name, mProvider, &previewThreshold, &session /*out*/, - &previewStream /*out*/, &halStreamConfig /*out*/, - &supportsPartialResults /*out*/, - &partialResultCount /*out*/); - - sp gb = new GraphicBuffer( - previewStream.width, previewStream.height, - static_cast(halStreamConfig.streams[0].overrideFormat), 1, + if (deviceVersion == CAMERA_DEVICE_API_VERSION_1_0) { + continue; + } else if (deviceVersion <= 0) { + ALOGE("%s: Unsupported device version %d", __func__, deviceVersion); + ADD_FAILURE(); + return; + } + camera_metadata_t* staticMetaBuffer; + Return ret; + sp session; + openEmptyDeviceSession(name, mProvider, &session /*out*/, &staticMetaBuffer /*out*/); + ::android::hardware::camera::common::V1_0::helper::CameraMetadata staticMeta( + staticMetaBuffer); + + camera_metadata_entry_t hwLevel = staticMeta.find(ANDROID_INFO_SUPPORTED_HARDWARE_LEVEL); + ASSERT_TRUE(0 < hwLevel.count); + if (ANDROID_INFO_SUPPORTED_HARDWARE_LEVEL_LIMITED == hwLevel.data.u8[0] || + ANDROID_INFO_SUPPORTED_HARDWARE_LEVEL_EXTERNAL == hwLevel.data.u8[0]) { + //Limited/External devices can skip this test + ret = session->close(); + ASSERT_TRUE(ret.isOk()); + continue; + } + + camera_metadata_entry_t isoRange = staticMeta.find(ANDROID_SENSOR_INFO_SENSITIVITY_RANGE); + ASSERT_EQ(isoRange.count, 2u); + + ret = session->close(); + ASSERT_TRUE(ret.isOk()); + + bool supportsPartialResults = false; + uint32_t partialResultCount = 0; + V3_2::Stream previewStream; + HalStreamConfiguration halStreamConfig; + configurePreviewStream(name, deviceVersion, mProvider, &previewThreshold, + &session /*out*/, &previewStream /*out*/, &halStreamConfig /*out*/, + &supportsPartialResults /*out*/, &partialResultCount /*out*/); + std::shared_ptr resultQueue; + + auto resultQueueRet = session->getCaptureResultMetadataQueue( + [&resultQueue](const auto& descriptor) { + resultQueue = std::make_shared(descriptor); + if (!resultQueue->isValid() || resultQueue->availableToWrite() <= 0) { + ALOGE("%s: HAL returns empty result metadata fmq," + " not use it", __func__); + resultQueue = nullptr; + // Don't use the queue onwards. + } + }); + ASSERT_TRUE(resultQueueRet.isOk()); + ASSERT_NE(nullptr, resultQueue); + + ret = session->constructDefaultRequestSettings(RequestTemplate::PREVIEW, + [&](auto status, const auto& req) { + ASSERT_EQ(Status::OK, status); + settings = req; }); + ASSERT_TRUE(ret.isOk()); + + ::android::hardware::camera::common::V1_0::helper::CameraMetadata requestMeta; + StreamBuffer emptyInputBuffer = {-1, 0, nullptr, BufferStatus::ERROR, nullptr, nullptr}; + hidl_handle buffers[kBurstFrameCount]; + StreamBuffer outputBuffers[kBurstFrameCount]; + CaptureRequest requests[kBurstFrameCount]; + InFlightRequest inflightReqs[kBurstFrameCount]; + int32_t isoValues[kBurstFrameCount]; + hidl_vec requestSettings[kBurstFrameCount]; + for (uint32_t i = 0; i < kBurstFrameCount; i++) { + std::unique_lock l(mLock); + + isoValues[i] = ((i % 2) == 0) ? isoRange.data.i32[0] : isoRange.data.i32[1]; + allocateGraphicBuffer(previewStream.width, previewStream.height, android_convertGralloc1To0Usage(halStreamConfig.streams[0].producerUsage, - halStreamConfig.streams[0].consumerUsage)); - - StreamBuffer outputBuffer = {halStreamConfig.streams[0].id, - bufferId, - hidl_handle(gb->getNativeBuffer()->handle), - BufferStatus::OK, - nullptr, - nullptr}; - ::android::hardware::hidl_vec outputBuffers = {outputBuffer}; - StreamBuffer emptyInputBuffer = {-1, 0, nullptr, BufferStatus::ERROR, nullptr, - nullptr}; - CaptureRequest request = {frameNumber, 0 /* fmqSettingsSize */, settings, - emptyInputBuffer, outputBuffers}; - - // Settings were not correctly initialized, we should fail here - Status status = Status::OK; - uint32_t numRequestProcessed = 0; - hidl_vec cachesToRemove; - Return ret = session->processCaptureRequest( - {request}, cachesToRemove, [&status, &numRequestProcessed](auto s, - uint32_t n) { - status = s; - numRequestProcessed = n; - }); - ASSERT_TRUE(ret.isOk()); - ASSERT_EQ(Status::ILLEGAL_ARGUMENT, status); - ASSERT_EQ(numRequestProcessed, 0u); + halStreamConfig.streams[0].consumerUsage), + halStreamConfig.streams[0].overrideFormat, &buffers[i]); + + outputBuffers[i] = {halStreamConfig.streams[0].id, bufferId + i, + buffers[i], BufferStatus::OK, nullptr, nullptr}; + requestMeta.append(reinterpret_cast (settings.data())); + + // Disable all 3A routines + uint8_t mode = static_cast(ANDROID_CONTROL_MODE_OFF); + ASSERT_EQ(::android::OK, requestMeta.update(ANDROID_CONTROL_MODE, &mode, 1)); + ASSERT_EQ(::android::OK, requestMeta.update(ANDROID_SENSOR_SENSITIVITY, &isoValues[i], + 1)); + camera_metadata_t *metaBuffer = requestMeta.release(); + requestSettings[i].setToExternal(reinterpret_cast (metaBuffer), + get_camera_metadata_size(metaBuffer), true); + + requests[i] = {frameNumber + i, 0 /* fmqSettingsSize */, requestSettings[i], + emptyInputBuffer, {outputBuffers[i]}}; + + inflightReqs[i] = {1, false, supportsPartialResults, partialResultCount, resultQueue}; + mInflightMap.add(frameNumber + i, &inflightReqs[i]); + } - ret = session->close(); - ASSERT_TRUE(ret.isOk()); - } - break; - case CAMERA_DEVICE_API_VERSION_1_0: { - //Not applicable - } - break; - default: { - ALOGE("%s: Unsupported device version %d", __func__, deviceVersion); - ADD_FAILURE(); + Status status = Status::INTERNAL_ERROR; + uint32_t numRequestProcessed = 0; + hidl_vec cachesToRemove; + hidl_vec burstRequest; + burstRequest.setToExternal(requests, kBurstFrameCount); + Return returnStatus = session->processCaptureRequest(burstRequest, cachesToRemove, + [&status, &numRequestProcessed] (auto s, uint32_t n) { + status = s; + numRequestProcessed = n; + }); + ASSERT_TRUE(returnStatus.isOk()); + ASSERT_EQ(Status::OK, status); + ASSERT_EQ(numRequestProcessed, kBurstFrameCount); + + for (size_t i = 0; i < kBurstFrameCount; i++) { + std::unique_lock l(mLock); + while (!inflightReqs[i].errorCodeValid && ((0 < inflightReqs[i].numBuffersLeft) || + (!inflightReqs[i].haveResultMetadata))) { + auto timeout = std::chrono::system_clock::now() + + std::chrono::seconds(kStreamBufferTimeoutSec); + ASSERT_NE(std::cv_status::timeout, mResultCondition.wait_until(l, timeout)); } - break; + + ASSERT_FALSE(inflightReqs[i].errorCodeValid); + ASSERT_NE(inflightReqs[i].resultOutputBuffers.size(), 0u); + ASSERT_EQ(previewStream.id, inflightReqs[i].resultOutputBuffers[0].streamId); + ASSERT_FALSE(inflightReqs[i].collectedResult.isEmpty()); + ASSERT_TRUE(inflightReqs[i].collectedResult.exists(ANDROID_SENSOR_SENSITIVITY)); + camera_metadata_entry_t isoResult = inflightReqs[i].collectedResult.find( + ANDROID_SENSOR_SENSITIVITY); + ASSERT_TRUE(isoResult.data.i32[0] == isoValues[i]); } + + ret = session->close(); + ASSERT_TRUE(ret.isOk()); } } -// Check whether an invalid capture request with missing output buffers -// will be reported correctly. -TEST_F(CameraHidlTest, processCaptureRequestInvalidBuffer) { +// Test whether an incorrect capture request with missing settings will +// be reported correctly. +TEST_F(CameraHidlTest, processCaptureRequestInvalidSinglePreview) { hidl_vec cameraDeviceNames = getCameraDeviceNames(mProvider); - std::vector outputBlobStreams; + std::vector outputPreviewStreams; AvailableStream previewThreshold = {kMaxPreviewWidth, kMaxPreviewHeight, static_cast(PixelFormat::IMPLEMENTATION_DEFINED)}; + uint64_t bufferId = 1; uint32_t frameNumber = 1; ::android::hardware::hidl_vec settings; for (const auto& name : cameraDeviceNames) { int deviceVersion = getCameraDeviceVersion(name, mProviderType); - switch (deviceVersion) { - case CAMERA_DEVICE_API_VERSION_3_3: - case CAMERA_DEVICE_API_VERSION_3_2: { - Stream previewStream; - HalStreamConfiguration halStreamConfig; - sp session; - bool supportsPartialResults = false; - uint32_t partialResultCount = 0; - configurePreviewStream(name, mProvider, &previewThreshold, &session /*out*/, - &previewStream /*out*/, &halStreamConfig /*out*/, - &supportsPartialResults /*out*/, - &partialResultCount /*out*/); - - RequestTemplate reqTemplate = RequestTemplate::PREVIEW; - Return ret; - ret = session->constructDefaultRequestSettings(reqTemplate, - [&](auto status, const auto& req) { - ASSERT_EQ(Status::OK, status); - settings = req; - }); - ASSERT_TRUE(ret.isOk()); + if (deviceVersion == CAMERA_DEVICE_API_VERSION_1_0) { + continue; + } else if (deviceVersion <= 0) { + ALOGE("%s: Unsupported device version %d", __func__, deviceVersion); + ADD_FAILURE(); + return; + } - ::android::hardware::hidl_vec emptyOutputBuffers; - StreamBuffer emptyInputBuffer = {-1, 0, nullptr, BufferStatus::ERROR, nullptr, - nullptr}; - CaptureRequest request = {frameNumber, 0 /* fmqSettingsSize */, settings, - emptyInputBuffer, emptyOutputBuffers}; - - // Output buffers are missing, we should fail here - Status status = Status::OK; - uint32_t numRequestProcessed = 0; - hidl_vec cachesToRemove; - ret = session->processCaptureRequest( - {request}, cachesToRemove, [&status, &numRequestProcessed](auto s, - uint32_t n) { - status = s; - numRequestProcessed = n; - }); - ASSERT_TRUE(ret.isOk()); - ASSERT_EQ(Status::ILLEGAL_ARGUMENT, status); - ASSERT_EQ(numRequestProcessed, 0u); + V3_2::Stream previewStream; + HalStreamConfiguration halStreamConfig; + sp session; + bool supportsPartialResults = false; + uint32_t partialResultCount = 0; + configurePreviewStream(name, deviceVersion, mProvider, &previewThreshold, &session /*out*/, + &previewStream /*out*/, &halStreamConfig /*out*/, + &supportsPartialResults /*out*/, + &partialResultCount /*out*/); + + hidl_handle buffer_handle; + allocateGraphicBuffer(previewStream.width, previewStream.height, + android_convertGralloc1To0Usage(halStreamConfig.streams[0].producerUsage, + halStreamConfig.streams[0].consumerUsage), + halStreamConfig.streams[0].overrideFormat, &buffer_handle); + + StreamBuffer outputBuffer = {halStreamConfig.streams[0].id, + bufferId, + buffer_handle, + BufferStatus::OK, + nullptr, + nullptr}; + ::android::hardware::hidl_vec outputBuffers = {outputBuffer}; + StreamBuffer emptyInputBuffer = {-1, 0, nullptr, BufferStatus::ERROR, nullptr, + nullptr}; + CaptureRequest request = {frameNumber, 0 /* fmqSettingsSize */, settings, + emptyInputBuffer, outputBuffers}; + + // Settings were not correctly initialized, we should fail here + Status status = Status::OK; + uint32_t numRequestProcessed = 0; + hidl_vec cachesToRemove; + Return ret = session->processCaptureRequest( + {request}, cachesToRemove, [&status, &numRequestProcessed](auto s, + uint32_t n) { + status = s; + numRequestProcessed = n; + }); + ASSERT_TRUE(ret.isOk()); + ASSERT_EQ(Status::ILLEGAL_ARGUMENT, status); + ASSERT_EQ(numRequestProcessed, 0u); - ret = session->close(); - ASSERT_TRUE(ret.isOk()); - } - break; - case CAMERA_DEVICE_API_VERSION_1_0: { - //Not applicable - } - break; - default: { - ALOGE("%s: Unsupported device version %d", __func__, deviceVersion); - ADD_FAILURE(); - } - break; + ret = session->close(); + ASSERT_TRUE(ret.isOk()); + } +} + +// Check whether an invalid capture request with missing output buffers +// will be reported correctly. +TEST_F(CameraHidlTest, processCaptureRequestInvalidBuffer) { + hidl_vec cameraDeviceNames = getCameraDeviceNames(mProvider); + std::vector outputBlobStreams; + AvailableStream previewThreshold = {kMaxPreviewWidth, kMaxPreviewHeight, + static_cast(PixelFormat::IMPLEMENTATION_DEFINED)}; + uint32_t frameNumber = 1; + ::android::hardware::hidl_vec settings; + + for (const auto& name : cameraDeviceNames) { + int deviceVersion = getCameraDeviceVersion(name, mProviderType); + if (deviceVersion == CAMERA_DEVICE_API_VERSION_1_0) { + continue; + } else if (deviceVersion <= 0) { + ALOGE("%s: Unsupported device version %d", __func__, deviceVersion); + ADD_FAILURE(); + return; } + + V3_2::Stream previewStream; + HalStreamConfiguration halStreamConfig; + sp session; + bool supportsPartialResults = false; + uint32_t partialResultCount = 0; + configurePreviewStream(name, deviceVersion, mProvider, &previewThreshold, &session /*out*/, + &previewStream /*out*/, &halStreamConfig /*out*/, + &supportsPartialResults /*out*/, + &partialResultCount /*out*/); + + RequestTemplate reqTemplate = RequestTemplate::PREVIEW; + Return ret; + ret = session->constructDefaultRequestSettings(reqTemplate, + [&](auto status, const auto& req) { + ASSERT_EQ(Status::OK, status); + settings = req; + }); + ASSERT_TRUE(ret.isOk()); + + ::android::hardware::hidl_vec emptyOutputBuffers; + StreamBuffer emptyInputBuffer = {-1, 0, nullptr, BufferStatus::ERROR, nullptr, + nullptr}; + CaptureRequest request = {frameNumber, 0 /* fmqSettingsSize */, settings, + emptyInputBuffer, emptyOutputBuffers}; + + // Output buffers are missing, we should fail here + Status status = Status::OK; + uint32_t numRequestProcessed = 0; + hidl_vec cachesToRemove; + ret = session->processCaptureRequest( + {request}, cachesToRemove, [&status, &numRequestProcessed](auto s, + uint32_t n) { + status = s; + numRequestProcessed = n; + }); + ASSERT_TRUE(ret.isOk()); + ASSERT_EQ(Status::ILLEGAL_ARGUMENT, status); + ASSERT_EQ(numRequestProcessed, 0u); + + ret = session->close(); + ASSERT_TRUE(ret.isOk()); } } @@ -3263,130 +3924,123 @@ TEST_F(CameraHidlTest, flushPreviewRequest) { for (const auto& name : cameraDeviceNames) { int deviceVersion = getCameraDeviceVersion(name, mProviderType); - switch (deviceVersion) { - case CAMERA_DEVICE_API_VERSION_3_3: - case CAMERA_DEVICE_API_VERSION_3_2: { - Stream previewStream; - HalStreamConfiguration halStreamConfig; - sp session; - bool supportsPartialResults = false; - uint32_t partialResultCount = 0; - configurePreviewStream(name, mProvider, &previewThreshold, &session /*out*/, - &previewStream /*out*/, &halStreamConfig /*out*/, - &supportsPartialResults /*out*/, - &partialResultCount /*out*/); - - std::shared_ptr resultQueue; - auto resultQueueRet = - session->getCaptureResultMetadataQueue( - [&resultQueue](const auto& descriptor) { - resultQueue = std::make_shared( - descriptor); - if (!resultQueue->isValid() || - resultQueue->availableToWrite() <= 0) { - ALOGE("%s: HAL returns empty result metadata fmq," - " not use it", __func__); - resultQueue = nullptr; - // Don't use the queue onwards. - } - }); - ASSERT_TRUE(resultQueueRet.isOk()); - - InFlightRequest inflightReq = {1, false, supportsPartialResults, - partialResultCount, resultQueue}; - RequestTemplate reqTemplate = RequestTemplate::PREVIEW; - Return ret; - ret = session->constructDefaultRequestSettings(reqTemplate, - [&](auto status, const auto& req) { - ASSERT_EQ(Status::OK, status); - settings = req; - }); - ASSERT_TRUE(ret.isOk()); - - sp gb = new GraphicBuffer( - previewStream.width, previewStream.height, - static_cast(halStreamConfig.streams[0].overrideFormat), 1, - android_convertGralloc1To0Usage(halStreamConfig.streams[0].producerUsage, - halStreamConfig.streams[0].consumerUsage)); - ASSERT_NE(nullptr, gb.get()); - StreamBuffer outputBuffer = {halStreamConfig.streams[0].id, - bufferId, - hidl_handle(gb->getNativeBuffer()->handle), - BufferStatus::OK, - nullptr, - nullptr}; - ::android::hardware::hidl_vec outputBuffers = {outputBuffer}; - const StreamBuffer emptyInputBuffer = {-1, 0, nullptr, - BufferStatus::ERROR, nullptr, nullptr}; - CaptureRequest request = {frameNumber, 0 /* fmqSettingsSize */, settings, - emptyInputBuffer, outputBuffers}; - - { - std::unique_lock l(mLock); - mInflightMap.clear(); - mInflightMap.add(frameNumber, &inflightReq); - } - - Status status = Status::INTERNAL_ERROR; - uint32_t numRequestProcessed = 0; - hidl_vec cachesToRemove; - ret = session->processCaptureRequest( - {request}, cachesToRemove, [&status, &numRequestProcessed](auto s, - uint32_t n) { - status = s; - numRequestProcessed = n; - }); + if (deviceVersion == CAMERA_DEVICE_API_VERSION_1_0) { + continue; + } else if (deviceVersion <= 0) { + ALOGE("%s: Unsupported device version %d", __func__, deviceVersion); + ADD_FAILURE(); + return; + } - ASSERT_TRUE(ret.isOk()); - ASSERT_EQ(Status::OK, status); - ASSERT_EQ(numRequestProcessed, 1u); - // Flush before waiting for request to complete. - Return returnStatus = session->flush(); - ASSERT_TRUE(returnStatus.isOk()); - ASSERT_EQ(Status::OK, returnStatus); + V3_2::Stream previewStream; + HalStreamConfiguration halStreamConfig; + sp session; + bool supportsPartialResults = false; + uint32_t partialResultCount = 0; + configurePreviewStream(name, deviceVersion, mProvider, &previewThreshold, &session /*out*/, + &previewStream /*out*/, &halStreamConfig /*out*/, + &supportsPartialResults /*out*/, + &partialResultCount /*out*/); - { - std::unique_lock l(mLock); - while (!inflightReq.errorCodeValid && - ((0 < inflightReq.numBuffersLeft) || - (!inflightReq.haveResultMetadata))) { - auto timeout = std::chrono::system_clock::now() + - std::chrono::seconds(kStreamBufferTimeoutSec); - ASSERT_NE(std::cv_status::timeout, mResultCondition.wait_until(l, - timeout)); + std::shared_ptr resultQueue; + auto resultQueueRet = + session->getCaptureResultMetadataQueue( + [&resultQueue](const auto& descriptor) { + resultQueue = std::make_shared( + descriptor); + if (!resultQueue->isValid() || + resultQueue->availableToWrite() <= 0) { + ALOGE("%s: HAL returns empty result metadata fmq," + " not use it", __func__); + resultQueue = nullptr; + // Don't use the queue onwards. } + }); + ASSERT_TRUE(resultQueueRet.isOk()); + + InFlightRequest inflightReq = {1, false, supportsPartialResults, + partialResultCount, resultQueue}; + RequestTemplate reqTemplate = RequestTemplate::PREVIEW; + Return ret; + ret = session->constructDefaultRequestSettings(reqTemplate, + [&](auto status, const auto& req) { + ASSERT_EQ(Status::OK, status); + settings = req; + }); + ASSERT_TRUE(ret.isOk()); + + hidl_handle buffer_handle; + allocateGraphicBuffer(previewStream.width, previewStream.height, + android_convertGralloc1To0Usage(halStreamConfig.streams[0].producerUsage, + halStreamConfig.streams[0].consumerUsage), + halStreamConfig.streams[0].overrideFormat, &buffer_handle); + + StreamBuffer outputBuffer = {halStreamConfig.streams[0].id, + bufferId, + buffer_handle, + BufferStatus::OK, + nullptr, + nullptr}; + ::android::hardware::hidl_vec outputBuffers = {outputBuffer}; + const StreamBuffer emptyInputBuffer = {-1, 0, nullptr, + BufferStatus::ERROR, nullptr, nullptr}; + CaptureRequest request = {frameNumber, 0 /* fmqSettingsSize */, settings, + emptyInputBuffer, outputBuffers}; + + { + std::unique_lock l(mLock); + mInflightMap.clear(); + mInflightMap.add(frameNumber, &inflightReq); + } - if (!inflightReq.errorCodeValid) { - ASSERT_NE(inflightReq.resultOutputBuffers.size(), 0u); - ASSERT_EQ(previewStream.id, inflightReq.resultOutputBuffers[0].streamId); - } else { - switch (inflightReq.errorCode) { - case ErrorCode::ERROR_REQUEST: - case ErrorCode::ERROR_RESULT: - case ErrorCode::ERROR_BUFFER: - // Expected - break; - case ErrorCode::ERROR_DEVICE: - default: - FAIL() << "Unexpected error:" - << static_cast(inflightReq.errorCode); - } - } + Status status = Status::INTERNAL_ERROR; + uint32_t numRequestProcessed = 0; + hidl_vec cachesToRemove; + ret = session->processCaptureRequest( + {request}, cachesToRemove, [&status, &numRequestProcessed](auto s, + uint32_t n) { + status = s; + numRequestProcessed = n; + }); - ret = session->close(); - ASSERT_TRUE(ret.isOk()); - } - } - break; - case CAMERA_DEVICE_API_VERSION_1_0: { - //Not applicable + ASSERT_TRUE(ret.isOk()); + ASSERT_EQ(Status::OK, status); + ASSERT_EQ(numRequestProcessed, 1u); + // Flush before waiting for request to complete. + Return returnStatus = session->flush(); + ASSERT_TRUE(returnStatus.isOk()); + ASSERT_EQ(Status::OK, returnStatus); + + { + std::unique_lock l(mLock); + while (!inflightReq.errorCodeValid && + ((0 < inflightReq.numBuffersLeft) || + (!inflightReq.haveResultMetadata))) { + auto timeout = std::chrono::system_clock::now() + + std::chrono::seconds(kStreamBufferTimeoutSec); + ASSERT_NE(std::cv_status::timeout, mResultCondition.wait_until(l, + timeout)); } - break; - default: { - ALOGE("%s: Unsupported device version %d", __func__, deviceVersion); - ADD_FAILURE(); + + if (!inflightReq.errorCodeValid) { + ASSERT_NE(inflightReq.resultOutputBuffers.size(), 0u); + ASSERT_EQ(previewStream.id, inflightReq.resultOutputBuffers[0].streamId); + } else { + switch (inflightReq.errorCode) { + case ErrorCode::ERROR_REQUEST: + case ErrorCode::ERROR_RESULT: + case ErrorCode::ERROR_BUFFER: + // Expected + break; + case ErrorCode::ERROR_DEVICE: + default: + FAIL() << "Unexpected error:" + << static_cast(inflightReq.errorCode); + } } - break; + + ret = session->close(); + ASSERT_TRUE(ret.isOk()); } } } @@ -3400,44 +4054,37 @@ TEST_F(CameraHidlTest, flushEmpty) { for (const auto& name : cameraDeviceNames) { int deviceVersion = getCameraDeviceVersion(name, mProviderType); - switch (deviceVersion) { - case CAMERA_DEVICE_API_VERSION_3_3: - case CAMERA_DEVICE_API_VERSION_3_2: { - Stream previewStream; - HalStreamConfiguration halStreamConfig; - sp session; - bool supportsPartialResults = false; - uint32_t partialResultCount = 0; - configurePreviewStream(name, mProvider, &previewThreshold, &session /*out*/, - &previewStream /*out*/, &halStreamConfig /*out*/, - &supportsPartialResults /*out*/, - &partialResultCount /*out*/); - - Return returnStatus = session->flush(); - ASSERT_TRUE(returnStatus.isOk()); - ASSERT_EQ(Status::OK, returnStatus); - - { - std::unique_lock l(mLock); - auto timeout = std::chrono::system_clock::now() + - std::chrono::milliseconds(kEmptyFlushTimeoutMSec); - ASSERT_EQ(std::cv_status::timeout, mResultCondition.wait_until(l, timeout)); - } + if (deviceVersion == CAMERA_DEVICE_API_VERSION_1_0) { + continue; + } else if (deviceVersion <= 0) { + ALOGE("%s: Unsupported device version %d", __func__, deviceVersion); + ADD_FAILURE(); + return; + } - Return ret = session->close(); - ASSERT_TRUE(ret.isOk()); - } - break; - case CAMERA_DEVICE_API_VERSION_1_0: { - //Not applicable - } - break; - default: { - ALOGE("%s: Unsupported device version %d", __func__, deviceVersion); - ADD_FAILURE(); - } - break; + V3_2::Stream previewStream; + HalStreamConfiguration halStreamConfig; + sp session; + bool supportsPartialResults = false; + uint32_t partialResultCount = 0; + configurePreviewStream(name, deviceVersion, mProvider, &previewThreshold, &session /*out*/, + &previewStream /*out*/, &halStreamConfig /*out*/, + &supportsPartialResults /*out*/, + &partialResultCount /*out*/); + + Return returnStatus = session->flush(); + ASSERT_TRUE(returnStatus.isOk()); + ASSERT_EQ(Status::OK, returnStatus); + + { + std::unique_lock l(mLock); + auto timeout = std::chrono::system_clock::now() + + std::chrono::milliseconds(kEmptyFlushTimeoutMSec); + ASSERT_EQ(std::cv_status::timeout, mResultCondition.wait_until(l, timeout)); } + + Return ret = session->close(); + ASSERT_TRUE(ret.isOk()); } } @@ -3480,6 +4127,109 @@ Status CameraHidlTest::getAvailableOutputStreams(camera_metadata_t *staticMeta, return Status::OK; } +// Check if the camera device has logical multi-camera capability. +Status CameraHidlTest::isLogicalMultiCamera(camera_metadata_t *staticMeta) { + Status ret = Status::METHOD_NOT_SUPPORTED; + if (nullptr == staticMeta) { + return Status::ILLEGAL_ARGUMENT; + } + + camera_metadata_ro_entry entry; + int rc = find_camera_metadata_ro_entry(staticMeta, + ANDROID_REQUEST_AVAILABLE_CAPABILITIES, &entry); + if (0 != rc) { + return Status::ILLEGAL_ARGUMENT; + } + + for (size_t i = 0; i < entry.count; i++) { + if (ANDROID_REQUEST_AVAILABLE_CAPABILITIES_LOGICAL_MULTI_CAMERA == entry.data.u8[i]) { + ret = Status::OK; + break; + } + } + + return ret; +} + +// Generate a list of physical camera ids backing a logical multi-camera. +Status CameraHidlTest::getPhysicalCameraIds(camera_metadata_t *staticMeta, + std::unordered_set *physicalIds) { + if ((nullptr == staticMeta) || (nullptr == physicalIds)) { + return Status::ILLEGAL_ARGUMENT; + } + + camera_metadata_ro_entry entry; + int rc = find_camera_metadata_ro_entry(staticMeta, ANDROID_LOGICAL_MULTI_CAMERA_PHYSICAL_IDS, + &entry); + if (0 != rc) { + return Status::ILLEGAL_ARGUMENT; + } + + const uint8_t* ids = entry.data.u8; + size_t start = 0; + for (size_t i = 0; i < entry.count; i++) { + if (ids[i] == '\0') { + if (start != i) { + std::string currentId(reinterpret_cast (ids + start)); + physicalIds->emplace(currentId); + } + start = i + 1; + } + } + + return Status::OK; +} + +// Generate a set of suported camera key ids. +Status CameraHidlTest::getSupportedKeys(camera_metadata_t *staticMeta, + uint32_t tagId, std::unordered_set *requestIDs) { + if ((nullptr == staticMeta) || (nullptr == requestIDs)) { + return Status::ILLEGAL_ARGUMENT; + } + + camera_metadata_ro_entry entry; + int rc = find_camera_metadata_ro_entry(staticMeta, tagId, &entry); + if ((0 != rc) || (entry.count == 0)) { + return Status::OK; + } + + requestIDs->insert(entry.data.i32, entry.data.i32 + entry.count); + + return Status::OK; +} + +void CameraHidlTest::constructFilteredSettings(const sp& session, + const std::unordered_set& availableKeys, RequestTemplate reqTemplate, + android::hardware::camera::common::V1_0::helper::CameraMetadata* defaultSettings, + android::hardware::camera::common::V1_0::helper::CameraMetadata* filteredSettings) { + ASSERT_NE(defaultSettings, nullptr); + ASSERT_NE(filteredSettings, nullptr); + + auto ret = session->constructDefaultRequestSettings(reqTemplate, + [&defaultSettings] (auto status, const auto& req) mutable { + ASSERT_EQ(Status::OK, status); + + const camera_metadata_t *metadata = reinterpret_cast ( + req.data()); + size_t expectedSize = req.size(); + int result = validate_camera_metadata_structure(metadata, &expectedSize); + ASSERT_TRUE((result == 0) || (result == CAMERA_METADATA_VALIDATION_SHIFTED)); + + size_t entryCount = get_camera_metadata_entry_count(metadata); + ASSERT_GT(entryCount, 0u); + *defaultSettings = metadata; + }); + ASSERT_TRUE(ret.isOk()); + const android::hardware::camera::common::V1_0::helper::CameraMetadata &constSettings = + *defaultSettings; + for (const auto& keyIt : availableKeys) { + camera_metadata_ro_entry entry = constSettings.find(keyIt); + if (entry.count > 0) { + filteredSettings->update(entry); + } + } +} + // Check if constrained mode is supported by using the static // camera characteristics. Status CameraHidlTest::isConstrainedModeAvailable(camera_metadata_t *staticMeta) { @@ -3624,12 +4374,136 @@ Status CameraHidlTest::isAutoFocusModeAvailable( return Status::METHOD_NOT_SUPPORTED; } +void CameraHidlTest::createStreamConfiguration( + const ::android::hardware::hidl_vec& streams3_2, + StreamConfigurationMode configMode, + ::android::hardware::camera::device::V3_2::StreamConfiguration *config3_2 /*out*/, + ::android::hardware::camera::device::V3_4::StreamConfiguration *config3_4 /*out*/) { + ASSERT_NE(nullptr, config3_2); + ASSERT_NE(nullptr, config3_4); + + ::android::hardware::hidl_vec streams3_4(streams3_2.size()); + size_t idx = 0; + for (auto& stream3_2 : streams3_2) { + V3_4::Stream stream; + stream.v3_2 = stream3_2; + streams3_4[idx++] = stream; + } + *config3_4 = {streams3_4, configMode, {}}; + *config3_2 = {streams3_2, configMode}; +} + +// Configure multiple preview streams using different physical ids. +void CameraHidlTest::configurePreviewStreams3_4(const std::string &name, int32_t deviceVersion, + sp provider, + const AvailableStream *previewThreshold, + const std::unordered_set& physicalIds, + sp *session3_4 /*out*/, + V3_2::Stream *previewStream /*out*/, + device::V3_4::HalStreamConfiguration *halStreamConfig /*out*/, + bool *supportsPartialResults /*out*/, + uint32_t *partialResultCount /*out*/) { + ASSERT_NE(nullptr, session3_4); + ASSERT_NE(nullptr, halStreamConfig); + ASSERT_NE(nullptr, previewStream); + ASSERT_NE(nullptr, supportsPartialResults); + ASSERT_NE(nullptr, partialResultCount); + ASSERT_FALSE(physicalIds.empty()); + + std::vector outputPreviewStreams; + ::android::sp device3_x; + ALOGI("configureStreams: Testing camera device %s", name.c_str()); + Return ret; + ret = provider->getCameraDeviceInterface_V3_x( + name, + [&](auto status, const auto& device) { + ALOGI("getCameraDeviceInterface_V3_x returns status:%d", + (int)status); + ASSERT_EQ(Status::OK, status); + ASSERT_NE(device, nullptr); + device3_x = device; + }); + ASSERT_TRUE(ret.isOk()); + + sp cb = new DeviceCb(this); + sp session; + ret = device3_x->open( + cb, + [&session](auto status, const auto& newSession) { + ALOGI("device::open returns status:%d", (int)status); + ASSERT_EQ(Status::OK, status); + ASSERT_NE(newSession, nullptr); + session = newSession; + }); + ASSERT_TRUE(ret.isOk()); + + sp session3_3; + castSession(session, deviceVersion, &session3_3, session3_4); + ASSERT_NE(nullptr, session3_4); + + camera_metadata_t *staticMeta; + ret = device3_x->getCameraCharacteristics([&] (Status s, + CameraMetadata metadata) { + ASSERT_EQ(Status::OK, s); + staticMeta = clone_camera_metadata( + reinterpret_cast(metadata.data())); + ASSERT_NE(nullptr, staticMeta); + }); + ASSERT_TRUE(ret.isOk()); + + camera_metadata_ro_entry entry; + auto status = find_camera_metadata_ro_entry(staticMeta, + ANDROID_REQUEST_PARTIAL_RESULT_COUNT, &entry); + if ((0 == status) && (entry.count > 0)) { + *partialResultCount = entry.data.i32[0]; + *supportsPartialResults = (*partialResultCount > 1); + } + + outputPreviewStreams.clear(); + auto rc = getAvailableOutputStreams(staticMeta, + outputPreviewStreams, previewThreshold); + free_camera_metadata(staticMeta); + ASSERT_EQ(Status::OK, rc); + ASSERT_FALSE(outputPreviewStreams.empty()); + + ::android::hardware::hidl_vec streams3_4(physicalIds.size()); + int32_t streamId = 0; + for (auto const& physicalId : physicalIds) { + V3_4::Stream stream3_4 = {{streamId, StreamType::OUTPUT, + static_cast (outputPreviewStreams[0].width), + static_cast (outputPreviewStreams[0].height), + static_cast (outputPreviewStreams[0].format), + GRALLOC1_CONSUMER_USAGE_HWCOMPOSER, 0, StreamRotation::ROTATION_0}, + physicalId.c_str(), /*bufferSize*/ 0}; + streams3_4[streamId++] = stream3_4; + } + + ::android::hardware::camera::device::V3_4::StreamConfiguration config3_4; + config3_4 = {streams3_4, StreamConfigurationMode::NORMAL_MODE, {}}; + RequestTemplate reqTemplate = RequestTemplate::PREVIEW; + ret = (*session3_4)->constructDefaultRequestSettings(reqTemplate, + [&config3_4](auto status, const auto& req) { + ASSERT_EQ(Status::OK, status); + config3_4.sessionParams = req; + }); + ASSERT_TRUE(ret.isOk()); + + ret = (*session3_4)->configureStreams_3_4(config3_4, + [&] (Status s, device::V3_4::HalStreamConfiguration halConfig) { + ASSERT_EQ(Status::OK, s); + ASSERT_EQ(physicalIds.size(), halConfig.streams.size()); + *halStreamConfig = halConfig; + }); + *previewStream = streams3_4[0].v3_2; + ASSERT_TRUE(ret.isOk()); +} + // Open a device session and configure a preview stream. -void CameraHidlTest::configurePreviewStream(const std::string &name, +void CameraHidlTest::configurePreviewStream(const std::string &name, int32_t deviceVersion, sp provider, const AvailableStream *previewThreshold, sp *session /*out*/, - Stream *previewStream /*out*/, + V3_2::Stream *previewStream /*out*/, HalStreamConfiguration *halStreamConfig /*out*/, bool *supportsPartialResults /*out*/, uint32_t *partialResultCount /*out*/) { @@ -3665,9 +4539,9 @@ void CameraHidlTest::configurePreviewStream(const std::string &name, }); ASSERT_TRUE(ret.isOk()); - auto castResult = device::V3_3::ICameraDeviceSession::castFrom(*session); - ASSERT_TRUE(castResult.isOk()); - sp session3_3 = castResult; + sp session3_3; + sp session3_4; + castSession(*session, deviceVersion, &session3_3, &session3_4); camera_metadata_t *staticMeta; ret = device3_x->getCameraCharacteristics([&] (Status s, @@ -3694,23 +4568,35 @@ void CameraHidlTest::configurePreviewStream(const std::string &name, ASSERT_EQ(Status::OK, rc); ASSERT_FALSE(outputPreviewStreams.empty()); - *previewStream = {0, StreamType::OUTPUT, + V3_2::Stream stream3_2 = {0, StreamType::OUTPUT, static_cast (outputPreviewStreams[0].width), static_cast (outputPreviewStreams[0].height), static_cast (outputPreviewStreams[0].format), GRALLOC1_CONSUMER_USAGE_HWCOMPOSER, 0, StreamRotation::ROTATION_0}; - ::android::hardware::hidl_vec streams = {*previewStream}; - StreamConfiguration config = {streams, - StreamConfigurationMode::NORMAL_MODE}; - if (session3_3 == nullptr) { - ret = (*session)->configureStreams(config, - [&] (Status s, HalStreamConfiguration halConfig) { + ::android::hardware::hidl_vec streams3_2 = {stream3_2}; + ::android::hardware::camera::device::V3_2::StreamConfiguration config3_2; + ::android::hardware::camera::device::V3_4::StreamConfiguration config3_4; + createStreamConfiguration(streams3_2, StreamConfigurationMode::NORMAL_MODE, + &config3_2, &config3_4); + if (session3_4 != nullptr) { + RequestTemplate reqTemplate = RequestTemplate::PREVIEW; + ret = session3_4->constructDefaultRequestSettings(reqTemplate, + [&config3_4](auto status, const auto& req) { + ASSERT_EQ(Status::OK, status); + config3_4.sessionParams = req; + }); + ASSERT_TRUE(ret.isOk()); + ret = session3_4->configureStreams_3_4(config3_4, + [&] (Status s, device::V3_4::HalStreamConfiguration halConfig) { ASSERT_EQ(Status::OK, s); ASSERT_EQ(1u, halConfig.streams.size()); - *halStreamConfig = halConfig; + halStreamConfig->streams.resize(halConfig.streams.size()); + for (size_t i = 0; i < halConfig.streams.size(); i++) { + halStreamConfig->streams[i] = halConfig.streams[i].v3_3.v3_2; + } }); - } else { - ret = session3_3->configureStreams_3_3(config, + } else if (session3_3 != nullptr) { + ret = session3_3->configureStreams_3_3(config3_2, [&] (Status s, device::V3_3::HalStreamConfiguration halConfig) { ASSERT_EQ(Status::OK, s); ASSERT_EQ(1u, halConfig.streams.size()); @@ -3719,15 +4605,48 @@ void CameraHidlTest::configurePreviewStream(const std::string &name, halStreamConfig->streams[i] = halConfig.streams[i].v3_2; } }); + } else { + ret = (*session)->configureStreams(config3_2, + [&] (Status s, HalStreamConfiguration halConfig) { + ASSERT_EQ(Status::OK, s); + ASSERT_EQ(1u, halConfig.streams.size()); + *halStreamConfig = halConfig; + }); } + *previewStream = stream3_2; ASSERT_TRUE(ret.isOk()); } +//Cast camera device session to corresponding version +void CameraHidlTest::castSession(const sp &session, int32_t deviceVersion, + sp *session3_3 /*out*/, + sp *session3_4 /*out*/) { + ASSERT_NE(nullptr, session3_3); + ASSERT_NE(nullptr, session3_4); + + switch (deviceVersion) { + case CAMERA_DEVICE_API_VERSION_3_4: { + auto castResult = device::V3_4::ICameraDeviceSession::castFrom(session); + ASSERT_TRUE(castResult.isOk()); + *session3_4 = castResult; + break; + } + case CAMERA_DEVICE_API_VERSION_3_3: { + auto castResult = device::V3_3::ICameraDeviceSession::castFrom(session); + ASSERT_TRUE(castResult.isOk()); + *session3_3 = castResult; + break; + } + default: + //no-op + return; + } +} + // Open a device session with empty callbacks and return static metadata. void CameraHidlTest::openEmptyDeviceSession(const std::string &name, sp provider, sp *session /*out*/, - sp *session3_3 /*out*/, camera_metadata_t **staticMeta /*out*/) { ASSERT_NE(nullptr, session); ASSERT_NE(nullptr, staticMeta); @@ -3763,12 +4682,6 @@ void CameraHidlTest::openEmptyDeviceSession(const std::string &name, ASSERT_NE(nullptr, *staticMeta); }); ASSERT_TRUE(ret.isOk()); - - if(session3_3 != nullptr) { - auto castResult = device::V3_3::ICameraDeviceSession::castFrom(*session); - ASSERT_TRUE(castResult.isOk()); - *session3_3 = castResult; - } } // Open a particular camera device. @@ -3897,6 +4810,44 @@ void CameraHidlTest::setParameters( ASSERT_EQ(Status::OK, returnStatus); } +void CameraHidlTest::allocateGraphicBuffer(uint32_t width, uint32_t height, uint64_t usage, + PixelFormat format, hidl_handle *buffer_handle /*out*/) { + ASSERT_NE(buffer_handle, nullptr); + + sp allocator = + android::hardware::graphics::allocator::V2_0::IAllocator::getService(); + ASSERT_NE(nullptr, allocator.get()); + + sp mapper = + android::hardware::graphics::mapper::V2_0::IMapper::getService(); + ASSERT_NE(mapper.get(), nullptr); + + android::hardware::graphics::mapper::V2_0::IMapper::BufferDescriptorInfo descriptorInfo {}; + descriptorInfo.width = width; + descriptorInfo.height = height; + descriptorInfo.layerCount = 1; + descriptorInfo.format = format; + descriptorInfo.usage = usage; + + ::android::hardware::hidl_vec descriptor; + auto ret = mapper->createDescriptor( + descriptorInfo, [&descriptor](android::hardware::graphics::mapper::V2_0::Error err, + ::android::hardware::hidl_vec desc) { + ASSERT_EQ(err, android::hardware::graphics::mapper::V2_0::Error::NONE); + descriptor = desc; + }); + ASSERT_TRUE(ret.isOk()); + + ret = allocator->allocate(descriptor, 1u, + [&](android::hardware::graphics::mapper::V2_0::Error err, uint32_t /*stride*/, + const ::android::hardware::hidl_vec<::android::hardware::hidl_handle>& buffers) { + ASSERT_EQ(android::hardware::graphics::mapper::V2_0::Error::NONE, err); + ASSERT_EQ(buffers.size(), 1u); + *buffer_handle = buffers[0]; + }); + ASSERT_TRUE(ret.isOk()); +} + int main(int argc, char **argv) { ::testing::AddGlobalTestEnvironment(CameraHidlEnvironment::Instance()); ::testing::InitGoogleTest(&argc, argv); diff --git a/cas/1.0/CasHal.mk b/cas/1.0/CasHal.mk deleted file mode 100644 index 3cae6bf4167651e1e9d2ff913e9eeb18ecbe41cc..0000000000000000000000000000000000000000 --- a/cas/1.0/CasHal.mk +++ /dev/null @@ -1,192 +0,0 @@ -# -# Copyright (C) 2017 The Android Open Source Project -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - - -######################################################################## -# Included by frameworks/base for MediaCas. Hidl HAL can't be linked as -# Java lib from frameworks because it has dependency on frameworks itself. -# - -intermediates := $(TARGET_OUT_COMMON_GEN)/JAVA_LIBRARIES/android.hardware.cas-V1.0-java_intermediates - -HIDL := $(HOST_OUT_EXECUTABLES)/hidl-gen$(HOST_EXECUTABLE_SUFFIX) -HIDL_PATH := system/libhidl/transport/base/1.0 - -# -# Build types.hal (DebugInfo) -# -GEN := $(intermediates)/android/hidl/base/V1_0/DebugInfo.java -$(GEN): $(HIDL) -$(GEN): PRIVATE_HIDL := $(HIDL) -$(GEN): PRIVATE_DEPS := $(HIDL_PATH)/types.hal -$(GEN): PRIVATE_OUTPUT_DIR := $(intermediates) -$(GEN): PRIVATE_CUSTOM_TOOL = \ - $(PRIVATE_HIDL) -o $(PRIVATE_OUTPUT_DIR) \ - -Ljava \ - -randroid.hidl:system/libhidl/transport \ - android.hidl.base@1.0::types.DebugInfo - -$(GEN): $(HIDL_PATH)/types.hal - $(transform-generated-source) -LOCAL_GENERATED_SOURCES += $(GEN) - -# -# Build IBase.hal -# -GEN := $(intermediates)/android/hidl/base/V1_0/IBase.java -$(GEN): $(HIDL) -$(GEN): PRIVATE_HIDL := $(HIDL) -$(GEN): PRIVATE_DEPS := $(HIDL_PATH)/IBase.hal -$(GEN): PRIVATE_DEPS += $(HIDL_PATH)/types.hal -$(GEN): $(HIDL_PATH)/types.hal -$(GEN): PRIVATE_OUTPUT_DIR := $(intermediates) -$(GEN): PRIVATE_CUSTOM_TOOL = \ - $(PRIVATE_HIDL) -o $(PRIVATE_OUTPUT_DIR) \ - -Ljava \ - -randroid.hidl:system/libhidl/transport \ - android.hidl.base@1.0::IBase - -$(GEN): $(HIDL_PATH)/IBase.hal - $(transform-generated-source) -LOCAL_GENERATED_SOURCES += $(GEN) - -HIDL_PATH := hardware/interfaces/cas/1.0 - -# -# Build types.hal (HidlCasPluginDescriptor) -# -GEN := $(intermediates)/android/hardware/cas/V1_0/HidlCasPluginDescriptor.java -$(GEN): $(HIDL) -$(GEN): PRIVATE_HIDL := $(HIDL) -$(GEN): PRIVATE_DEPS := $(HIDL_PATH)/types.hal -$(GEN): PRIVATE_OUTPUT_DIR := $(intermediates) -$(GEN): PRIVATE_CUSTOM_TOOL = \ - $(PRIVATE_HIDL) -o $(PRIVATE_OUTPUT_DIR) \ - -Ljava \ - -randroid.hardware:hardware/interfaces \ - -randroid.hidl:system/libhidl/transport \ - android.hardware.cas@1.0::types.HidlCasPluginDescriptor - -$(GEN): $(HIDL_PATH)/types.hal - $(transform-generated-source) -LOCAL_GENERATED_SOURCES += $(GEN) - -# -# Build types.hal (Status) -# -GEN := $(intermediates)/android/hardware/cas/V1_0/Status.java -$(GEN): $(HIDL) -$(GEN): PRIVATE_HIDL := $(HIDL) -$(GEN): PRIVATE_DEPS := $(HIDL_PATH)/types.hal -$(GEN): PRIVATE_OUTPUT_DIR := $(intermediates) -$(GEN): PRIVATE_CUSTOM_TOOL = \ - $(PRIVATE_HIDL) -o $(PRIVATE_OUTPUT_DIR) \ - -Ljava \ - -randroid.hardware:hardware/interfaces \ - -randroid.hidl:system/libhidl/transport \ - android.hardware.cas@1.0::types.Status - -$(GEN): $(HIDL_PATH)/types.hal - $(transform-generated-source) -LOCAL_GENERATED_SOURCES += $(GEN) - -# -# Build ICas.hal -# -GEN := $(intermediates)/android/hardware/cas/V1_0/ICas.java -$(GEN): $(HIDL) -$(GEN): PRIVATE_HIDL := $(HIDL) -$(GEN): PRIVATE_DEPS := $(HIDL_PATH)/ICas.hal -$(GEN): PRIVATE_DEPS += $(HIDL_PATH)/types.hal -$(GEN): $(HIDL_PATH)/types.hal -$(GEN): PRIVATE_OUTPUT_DIR := $(intermediates) -$(GEN): PRIVATE_CUSTOM_TOOL = \ - $(PRIVATE_HIDL) -o $(PRIVATE_OUTPUT_DIR) \ - -Ljava \ - -randroid.hardware:hardware/interfaces \ - -randroid.hidl:system/libhidl/transport \ - android.hardware.cas@1.0::ICas - -$(GEN): $(HIDL_PATH)/ICas.hal - $(transform-generated-source) -LOCAL_GENERATED_SOURCES += $(GEN) - -# -# Build ICasListener.hal -# -GEN := $(intermediates)/android/hardware/cas/V1_0/ICasListener.java -$(GEN): $(HIDL) -$(GEN): PRIVATE_HIDL := $(HIDL) -$(GEN): PRIVATE_DEPS := $(HIDL_PATH)/ICasListener.hal -$(GEN): PRIVATE_OUTPUT_DIR := $(intermediates) -$(GEN): PRIVATE_CUSTOM_TOOL = \ - $(PRIVATE_HIDL) -o $(PRIVATE_OUTPUT_DIR) \ - -Ljava \ - -randroid.hardware:hardware/interfaces \ - -randroid.hidl:system/libhidl/transport \ - android.hardware.cas@1.0::ICasListener - -$(GEN): $(HIDL_PATH)/ICasListener.hal - $(transform-generated-source) -LOCAL_GENERATED_SOURCES += $(GEN) - -# -# Build IDescramblerBase.hal -# -GEN := $(intermediates)/android/hardware/cas/V1_0/IDescramblerBase.java -$(GEN): $(HIDL) -$(GEN): PRIVATE_HIDL := $(HIDL) -$(GEN): PRIVATE_DEPS := $(HIDL_PATH)/IDescramblerBase.hal -$(GEN): PRIVATE_DEPS += $(HIDL_PATH)/types.hal -$(GEN): $(HIDL_PATH)/types.hal -$(GEN): PRIVATE_OUTPUT_DIR := $(intermediates) -$(GEN): PRIVATE_CUSTOM_TOOL = \ - $(PRIVATE_HIDL) -o $(PRIVATE_OUTPUT_DIR) \ - -Ljava \ - -randroid.hardware:hardware/interfaces \ - -randroid.hidl:system/libhidl/transport \ - android.hardware.cas@1.0::IDescramblerBase - -$(GEN): $(HIDL_PATH)/IDescramblerBase.hal - $(transform-generated-source) -LOCAL_GENERATED_SOURCES += $(GEN) - -# -# Build IMediaCasService.hal -# -GEN := $(intermediates)/android/hardware/cas/V1_0/IMediaCasService.java -$(GEN): $(HIDL) -$(GEN): PRIVATE_HIDL := $(HIDL) -$(GEN): PRIVATE_DEPS := $(HIDL_PATH)/IMediaCasService.hal -$(GEN): PRIVATE_DEPS += $(HIDL_PATH)/ICas.hal -$(GEN): $(HIDL_PATH)/ICas.hal -$(GEN): PRIVATE_DEPS += $(HIDL_PATH)/ICasListener.hal -$(GEN): $(HIDL_PATH)/ICasListener.hal -$(GEN): PRIVATE_DEPS += $(HIDL_PATH)/IDescramblerBase.hal -$(GEN): $(HIDL_PATH)/IDescramblerBase.hal -$(GEN): PRIVATE_DEPS += $(HIDL_PATH)/types.hal -$(GEN): $(HIDL_PATH)/types.hal -$(GEN): PRIVATE_OUTPUT_DIR := $(intermediates) -$(GEN): PRIVATE_CUSTOM_TOOL = \ - $(PRIVATE_HIDL) -o $(PRIVATE_OUTPUT_DIR) \ - -Ljava \ - -randroid.hardware:hardware/interfaces \ - -randroid.hidl:system/libhidl/transport \ - android.hardware.cas@1.0::IMediaCasService - -$(GEN): $(HIDL_PATH)/IMediaCasService.hal - $(transform-generated-source) -LOCAL_GENERATED_SOURCES += $(GEN) - diff --git a/cas/1.0/default/CasImpl.cpp b/cas/1.0/default/CasImpl.cpp index 9d1f4a3268800320700645733aa4e27a19617e36..178020e4774fb813e70a806b6031daf54785497f 100644 --- a/cas/1.0/default/CasImpl.cpp +++ b/cas/1.0/default/CasImpl.cpp @@ -31,19 +31,8 @@ namespace cas { namespace V1_0 { namespace implementation { -struct CasImpl::PluginHolder : public RefBase { -public: - explicit PluginHolder(CasPlugin *plugin) : mPlugin(plugin) {} - ~PluginHolder() { if (mPlugin != NULL) delete mPlugin; } - CasPlugin* get() { return mPlugin; } - -private: - CasPlugin *mPlugin; - DISALLOW_EVIL_CONSTRUCTORS(PluginHolder); -}; - CasImpl::CasImpl(const sp &listener) - : mPluginHolder(NULL), mListener(listener) { + : mListener(listener) { ALOGV("CTOR"); } @@ -69,7 +58,8 @@ void CasImpl::OnEvent( void CasImpl::init(const sp& library, CasPlugin *plugin) { mLibrary = library; - mPluginHolder = new PluginHolder(plugin); + std::shared_ptr holder(plugin); + std::atomic_store(&mPluginHolder, holder); } void CasImpl::onEvent( @@ -88,21 +78,22 @@ void CasImpl::onEvent( Return CasImpl::setPrivateData(const HidlCasData& pvtData) { ALOGV("%s", __FUNCTION__); - sp holder = mPluginHolder; - if (holder == NULL) { + std::shared_ptr holder = std::atomic_load(&mPluginHolder); + if (holder.get() == nullptr) { return toStatus(INVALID_OPERATION); } - return toStatus(holder->get()->setPrivateData(pvtData)); + return toStatus(holder->setPrivateData(pvtData)); } Return CasImpl::openSession(openSession_cb _hidl_cb) { ALOGV("%s", __FUNCTION__); CasSessionId sessionId; - sp holder = mPluginHolder; + std::shared_ptr holder = std::atomic_load(&mPluginHolder); status_t err = INVALID_OPERATION; - if (holder != NULL) { - err = holder->get()->openSession(&sessionId); + if (holder.get() != nullptr) { + err = holder->openSession(&sessionId); + holder.reset(); } _hidl_cb(toStatus(err), sessionId); @@ -114,87 +105,87 @@ Return CasImpl::setSessionPrivateData( const HidlCasSessionId &sessionId, const HidlCasData& pvtData) { ALOGV("%s: sessionId=%s", __FUNCTION__, sessionIdToString(sessionId).string()); - sp holder = mPluginHolder; - if (holder == NULL) { + std::shared_ptr holder = std::atomic_load(&mPluginHolder); + if (holder.get() == nullptr) { return toStatus(INVALID_OPERATION); } - return toStatus( - holder->get()->setSessionPrivateData( - sessionId, pvtData)); + return toStatus(holder->setSessionPrivateData(sessionId, pvtData)); } Return CasImpl::closeSession(const HidlCasSessionId &sessionId) { ALOGV("%s: sessionId=%s", __FUNCTION__, sessionIdToString(sessionId).string()); - sp holder = mPluginHolder; - if (holder == NULL) { + std::shared_ptr holder = std::atomic_load(&mPluginHolder); + if (holder.get() == nullptr) { return toStatus(INVALID_OPERATION); } - return toStatus(holder->get()->closeSession(sessionId)); + return toStatus(holder->closeSession(sessionId)); } Return CasImpl::processEcm( const HidlCasSessionId &sessionId, const HidlCasData& ecm) { ALOGV("%s: sessionId=%s", __FUNCTION__, sessionIdToString(sessionId).string()); - sp holder = mPluginHolder; - if (holder == NULL) { + std::shared_ptr holder = std::atomic_load(&mPluginHolder); + if (holder.get() == nullptr) { return toStatus(INVALID_OPERATION); } - return toStatus(holder->get()->processEcm(sessionId, ecm)); + return toStatus(holder->processEcm(sessionId, ecm)); } Return CasImpl::processEmm(const HidlCasData& emm) { ALOGV("%s", __FUNCTION__); - sp holder = mPluginHolder; - if (holder == NULL) { + std::shared_ptr holder = std::atomic_load(&mPluginHolder); + if (holder.get() == nullptr) { return toStatus(INVALID_OPERATION); } - return toStatus(holder->get()->processEmm(emm)); + return toStatus(holder->processEmm(emm)); } Return CasImpl::sendEvent( int32_t event, int32_t arg, const HidlCasData& eventData) { ALOGV("%s", __FUNCTION__); - sp holder = mPluginHolder; - if (holder == NULL) { + std::shared_ptr holder = std::atomic_load(&mPluginHolder); + if (holder.get() == nullptr) { return toStatus(INVALID_OPERATION); } - status_t err = holder->get()->sendEvent(event, arg, eventData); + status_t err = holder->sendEvent(event, arg, eventData); return toStatus(err); } Return CasImpl::provision(const hidl_string& provisionString) { ALOGV("%s: provisionString=%s", __FUNCTION__, provisionString.c_str()); - sp holder = mPluginHolder; - if (holder == NULL) { + std::shared_ptr holder = std::atomic_load(&mPluginHolder); + if (holder.get() == nullptr) { return toStatus(INVALID_OPERATION); } - return toStatus(holder->get()->provision(String8(provisionString.c_str()))); + return toStatus(holder->provision(String8(provisionString.c_str()))); } Return CasImpl::refreshEntitlements( int32_t refreshType, const HidlCasData& refreshData) { ALOGV("%s", __FUNCTION__); - sp holder = mPluginHolder; - if (holder == NULL) { + std::shared_ptr holder = std::atomic_load(&mPluginHolder); + if (holder.get() == nullptr) { return toStatus(INVALID_OPERATION); } - status_t err = holder->get()->refreshEntitlements(refreshType, refreshData); + status_t err = holder->refreshEntitlements(refreshType, refreshData); return toStatus(err); } Return CasImpl::release() { - ALOGV("%s: plugin=%p", __FUNCTION__, - mPluginHolder != NULL ? mPluginHolder->get() : NULL); - mPluginHolder.clear(); + ALOGV("%s: plugin=%p", __FUNCTION__, mPluginHolder.get()); + + std::shared_ptr holder(nullptr); + std::atomic_store(&mPluginHolder, holder); + return Status::OK; } diff --git a/cas/1.0/default/CasImpl.h b/cas/1.0/default/CasImpl.h index 841d64e038328e7fbae9343cac77d203ba5e8ceb..d7928381ccb6bccfa3b12e49330aa0b1d754d1a3 100644 --- a/cas/1.0/default/CasImpl.h +++ b/cas/1.0/default/CasImpl.h @@ -88,7 +88,7 @@ public: private: struct PluginHolder; sp mLibrary; - sp mPluginHolder; + std::shared_ptr mPluginHolder; sp mListener; DISALLOW_EVIL_CONSTRUCTORS(CasImpl); diff --git a/cas/1.0/default/DescramblerImpl.cpp b/cas/1.0/default/DescramblerImpl.cpp index 3d90809cd05ad89fbe41e1460d1833a2bac24ef3..6d5e2d5e3262d703f196acc16eb42751fdce2dee 100644 --- a/cas/1.0/default/DescramblerImpl.cpp +++ b/cas/1.0/default/DescramblerImpl.cpp @@ -18,8 +18,9 @@ #define LOG_TAG "android.hardware.cas@1.0-DescramblerImpl" #include -#include #include +#include +#include #include #include "DescramblerImpl.h" @@ -49,12 +50,12 @@ CHECK_SUBSAMPLE_DEF(CryptoPlugin); DescramblerImpl::DescramblerImpl( const sp& library, DescramblerPlugin *plugin) : - mLibrary(library), mPlugin(plugin) { - ALOGV("CTOR: mPlugin=%p", mPlugin); + mLibrary(library), mPluginHolder(plugin) { + ALOGV("CTOR: plugin=%p", mPluginHolder.get()); } DescramblerImpl::~DescramblerImpl() { - ALOGV("DTOR: mPlugin=%p", mPlugin); + ALOGV("DTOR: plugin=%p", mPluginHolder.get()); release(); } @@ -62,12 +63,27 @@ Return DescramblerImpl::setMediaCasSession(const HidlCasSessionId& sessi ALOGV("%s: sessionId=%s", __FUNCTION__, sessionIdToString(sessionId).string()); - return toStatus(mPlugin->setMediaCasSession(sessionId)); + std::shared_ptr holder = std::atomic_load(&mPluginHolder); + if (holder.get() == nullptr) { + return toStatus(INVALID_OPERATION); + } + + return toStatus(holder->setMediaCasSession(sessionId)); } Return DescramblerImpl::requiresSecureDecoderComponent( const hidl_string& mime) { - return mPlugin->requiresSecureDecoderComponent(String8(mime.c_str())); + std::shared_ptr holder = std::atomic_load(&mPluginHolder); + if (holder.get() == nullptr) { + return false; + } + + return holder->requiresSecureDecoderComponent(String8(mime.c_str())); +} + +static inline bool validateRangeForSize( + uint64_t offset, uint64_t length, uint64_t size) { + return isInRange(0, size, offset, length); } Return DescramblerImpl::descramble( @@ -80,22 +96,88 @@ Return DescramblerImpl::descramble( descramble_cb _hidl_cb) { ALOGV("%s", __FUNCTION__); + // hidl_memory's size is stored in uint64_t, but mapMemory's mmap will map + // size in size_t. If size is over SIZE_MAX, mapMemory mapMemory could succeed + // but the mapped memory's actual size will be smaller than the reported size. + if (srcBuffer.heapBase.size() > SIZE_MAX) { + ALOGE("Invalid hidl_memory size: %llu", srcBuffer.heapBase.size()); + android_errorWriteLog(0x534e4554, "79376389"); + _hidl_cb(toStatus(BAD_VALUE), 0, NULL); + return Void(); + } + sp srcMem = mapMemory(srcBuffer.heapBase); + + // Validate if the offset and size in the SharedBuffer is consistent with the + // mapped ashmem, since the offset and size is controlled by client. + if (srcMem == NULL) { + ALOGE("Failed to map src buffer."); + _hidl_cb(toStatus(BAD_VALUE), 0, NULL); + return Void(); + } + if (!validateRangeForSize( + srcBuffer.offset, srcBuffer.size, (uint64_t)srcMem->getSize())) { + ALOGE("Invalid src buffer range: offset %llu, size %llu, srcMem size %llu", + srcBuffer.offset, srcBuffer.size, (uint64_t)srcMem->getSize()); + android_errorWriteLog(0x534e4554, "67962232"); + _hidl_cb(toStatus(BAD_VALUE), 0, NULL); + return Void(); + } + + // use 64-bit here to catch bad subsample size that might be overflowing. + uint64_t totalBytesInSubSamples = 0; + for (size_t i = 0; i < subSamples.size(); i++) { + totalBytesInSubSamples += (uint64_t)subSamples[i].numBytesOfClearData + + subSamples[i].numBytesOfEncryptedData; + } + // Further validate if the specified srcOffset and requested total subsample size + // is consistent with the source shared buffer size. + if (!validateRangeForSize(srcOffset, totalBytesInSubSamples, srcBuffer.size)) { + ALOGE("Invalid srcOffset and subsample size: " + "srcOffset %llu, totalBytesInSubSamples %llu, srcBuffer size %llu", + srcOffset, totalBytesInSubSamples, srcBuffer.size); + android_errorWriteLog(0x534e4554, "67962232"); + _hidl_cb(toStatus(BAD_VALUE), 0, NULL); + return Void(); + } + void *srcPtr = (uint8_t *)(void *)srcMem->getPointer() + srcBuffer.offset; void *dstPtr = NULL; if (dstBuffer.type == BufferType::SHARED_MEMORY) { // When using shared memory, src buffer is also used as dst, // we don't map it again here. dstPtr = srcPtr; + + // In this case the dst and src would be the same buffer, need to validate + // dstOffset against the buffer size too. + if (!validateRangeForSize(dstOffset, totalBytesInSubSamples, srcBuffer.size)) { + ALOGE("Invalid dstOffset and subsample size: " + "dstOffset %llu, totalBytesInSubSamples %llu, srcBuffer size %llu", + dstOffset, totalBytesInSubSamples, srcBuffer.size); + android_errorWriteLog(0x534e4554, "67962232"); + _hidl_cb(toStatus(BAD_VALUE), 0, NULL); + return Void(); + } } else { native_handle_t *handle = const_cast( dstBuffer.secureMemory.getNativeHandle()); dstPtr = static_cast(handle); } + + // Get a local copy of the shared_ptr for the plugin. Note that before + // calling the HIDL callback, this shared_ptr must be manually reset, + // since the client side could proceed as soon as the callback is called + // without waiting for this method to go out of scope. + std::shared_ptr holder = std::atomic_load(&mPluginHolder); + if (holder.get() == nullptr) { + _hidl_cb(toStatus(INVALID_OPERATION), 0, NULL); + return Void(); + } + // Casting hidl SubSample to DescramblerPlugin::SubSample, but need // to ensure structs are actually idential - int32_t result = mPlugin->descramble( + int32_t result = holder->descramble( dstBuffer.type != BufferType::SHARED_MEMORY, (DescramblerPlugin::ScramblingControl)scramblingControl, subSamples.size(), @@ -106,17 +188,17 @@ Return DescramblerImpl::descramble( dstOffset, NULL); + holder.reset(); _hidl_cb(toStatus(result >= 0 ? OK : result), result, NULL); return Void(); } Return DescramblerImpl::release() { - ALOGV("%s: mPlugin=%p", __FUNCTION__, mPlugin); + ALOGV("%s: plugin=%p", __FUNCTION__, mPluginHolder.get()); + + std::shared_ptr holder(nullptr); + std::atomic_store(&mPluginHolder, holder); - if (mPlugin != NULL) { - delete mPlugin; - mPlugin = NULL; - } return Status::OK; } diff --git a/cas/1.0/default/DescramblerImpl.h b/cas/1.0/default/DescramblerImpl.h index d3b146ecc73381e08e29bc835856348ea4286888..305f1154735cdbf3f35f420735e39f7818c77fb2 100644 --- a/cas/1.0/default/DescramblerImpl.h +++ b/cas/1.0/default/DescramblerImpl.h @@ -55,7 +55,7 @@ public: private: sp mLibrary; - DescramblerPlugin *mPlugin; + std::shared_ptr mPluginHolder; DISALLOW_EVIL_CONSTRUCTORS(DescramblerImpl); }; diff --git a/cas/1.0/default/MediaCasService.cpp b/cas/1.0/default/MediaCasService.cpp index ca4322461e68674810e35a30021c285485a69d6d..dbdd0087db17d2545eb4e6bc92a453f23ffe5e90 100644 --- a/cas/1.0/default/MediaCasService.cpp +++ b/cas/1.0/default/MediaCasService.cpp @@ -69,7 +69,7 @@ Return> MediaCasService::createPlugin( if (mCasLoader.findFactoryForScheme(CA_system_id, &library, &factory)) { CasPlugin *plugin = NULL; sp casImpl = new CasImpl(listener); - if (factory->createPlugin(CA_system_id, (uint64_t)casImpl.get(), + if (factory->createPlugin(CA_system_id, casImpl.get(), &CasImpl::OnEvent, &plugin) == OK && plugin != NULL) { casImpl->init(library, plugin); result = casImpl; diff --git a/cas/1.0/default/android.hardware.cas@1.0-service.rc b/cas/1.0/default/android.hardware.cas@1.0-service.rc index 93de79444e955c6fa27ef48a90958c5db8946288..74f2f9676ebfeea0a71103b76778a68b1112e430 100644 --- a/cas/1.0/default/android.hardware.cas@1.0-service.rc +++ b/cas/1.0/default/android.hardware.cas@1.0-service.rc @@ -1,4 +1,4 @@ -service cas-hal-1-0 /vendor/bin/hw/android.hardware.cas@1.0-service +service vendor.cas-hal-1-0 /vendor/bin/hw/android.hardware.cas@1.0-service class hal user media group mediadrm drmrpc diff --git a/cas/1.0/vts/functional/Android.bp b/cas/1.0/vts/functional/Android.bp index e1e09e9ce850257e7c9ebf8f7effb752bc5e04ef..0db9bb00cd3eff24c294e25c2dc5c2dc733d84ac 100644 --- a/cas/1.0/vts/functional/Android.bp +++ b/cas/1.0/vts/functional/Android.bp @@ -23,6 +23,7 @@ cc_test { "android.hardware.cas.native@1.0", "android.hidl.allocator@1.0", "android.hidl.memory@1.0", + "libhidlallocatorutils", "libhidlmemory", ], shared_libs: [ diff --git a/cas/1.0/vts/functional/VtsHalCasV1_0TargetTest.cpp b/cas/1.0/vts/functional/VtsHalCasV1_0TargetTest.cpp index 1218307918372bbf1e44f11bd65173a2675dc869..14b8bbdd5a1dc034ce4b54a43771cb85eaa75f15 100644 --- a/cas/1.0/vts/functional/VtsHalCasV1_0TargetTest.cpp +++ b/cas/1.0/vts/functional/VtsHalCasV1_0TargetTest.cpp @@ -30,6 +30,7 @@ #include #include #include +#include #include #include @@ -53,6 +54,7 @@ using android::Condition; using android::hardware::cas::V1_0::ICas; using android::hardware::cas::V1_0::ICasListener; using android::hardware::cas::V1_0::IDescramblerBase; +using android::hardware::cas::V1_0::Status; using android::hardware::cas::native::V1_0::IDescrambler; using android::hardware::cas::native::V1_0::SubSample; using android::hardware::cas::native::V1_0::SharedBuffer; @@ -61,13 +63,12 @@ using android::hardware::cas::native::V1_0::BufferType; using android::hardware::cas::native::V1_0::ScramblingControl; using android::hardware::cas::V1_0::IMediaCasService; using android::hardware::cas::V1_0::HidlCasPluginDescriptor; -using android::hardware::Void; +using android::hardware::fromHeap; using android::hardware::hidl_vec; using android::hardware::hidl_string; -using android::hardware::hidl_handle; -using android::hardware::hidl_memory; +using android::hardware::HidlMemory; using android::hardware::Return; -using android::hardware::cas::V1_0::Status; +using android::hardware::Void; using android::IMemory; using android::IMemoryHeap; using android::MemoryDealer; @@ -237,12 +238,26 @@ class MediaCasHidlTest : public ::testing::VtsHalHidlTargetTestBase { sp mMediaCas; sp mDescramblerBase; sp mCasListener; + typedef struct _OobInputTestParams { + const SubSample* subSamples; + uint32_t numSubSamples; + size_t imemSizeActual; + uint64_t imemOffset; + uint64_t imemSize; + uint64_t srcOffset; + uint64_t dstOffset; + } OobInputTestParams; ::testing::AssertionResult createCasPlugin(int32_t caSystemId); ::testing::AssertionResult openCasSession(std::vector* sessionId); - ::testing::AssertionResult descrambleTestInputBuffer(const sp& descrambler, - Status* descrambleStatus, - sp* hidlInMemory); + ::testing::AssertionResult descrambleTestInputBuffer( + const sp& descrambler, + Status* descrambleStatus, + sp* hidlInMemory); + ::testing::AssertionResult descrambleTestOobInput( + const sp& descrambler, + Status* descrambleStatus, + const OobInputTestParams& params); }; ::testing::AssertionResult MediaCasHidlTest::createCasPlugin(int32_t caSystemId) { @@ -301,7 +316,7 @@ class MediaCasHidlTest : public ::testing::VtsHalHidlTargetTestBase { } *inMemory = mem; - // build hidl_memory from memory heap + // build HidlMemory from memory heap ssize_t offset; size_t size; sp heap = mem->getMemory(&offset, &size); @@ -310,18 +325,14 @@ class MediaCasHidlTest : public ::testing::VtsHalHidlTargetTestBase { return ::testing::AssertionFailure(); } - native_handle_t* nativeHandle = native_handle_create(1, 0); - if (!nativeHandle) { - ALOGE("failed to create native handle!"); - return ::testing::AssertionFailure(); - } - nativeHandle->data[0] = heap->getHeapID(); - uint8_t* ipBuffer = static_cast(static_cast(mem->pointer())); memcpy(ipBuffer, kInBinaryBuffer, sizeof(kInBinaryBuffer)); + // hidlMemory is not to be passed out of scope! + sp hidlMemory = fromHeap(heap); + SharedBuffer srcBuffer = { - .heapBase = hidl_memory("ashmem", hidl_handle(nativeHandle), heap->getSize()), + .heapBase = *hidlMemory, .offset = (uint64_t) offset, .size = (uint64_t) size }; @@ -346,6 +357,68 @@ class MediaCasHidlTest : public ::testing::VtsHalHidlTargetTestBase { return ::testing::AssertionResult(returnVoid.isOk()); } +::testing::AssertionResult MediaCasHidlTest::descrambleTestOobInput( + const sp& descrambler, + Status* descrambleStatus, + const OobInputTestParams& params) { + hidl_vec hidlSubSamples; + hidlSubSamples.setToExternal( + const_cast(params.subSamples), params.numSubSamples, false /*own*/); + + sp dealer = new MemoryDealer(params.imemSizeActual, "vts-cas"); + if (nullptr == dealer.get()) { + ALOGE("couldn't get MemoryDealer!"); + return ::testing::AssertionFailure(); + } + + sp mem = dealer->allocate(params.imemSizeActual); + if (nullptr == mem.get()) { + ALOGE("couldn't allocate IMemory!"); + return ::testing::AssertionFailure(); + } + + // build HidlMemory from memory heap + ssize_t offset; + size_t size; + sp heap = mem->getMemory(&offset, &size); + if (nullptr == heap.get()) { + ALOGE("couldn't get memory heap!"); + return ::testing::AssertionFailure(); + } + + // hidlMemory is not to be passed out of scope! + sp hidlMemory = fromHeap(heap); + + SharedBuffer srcBuffer = { + .heapBase = *hidlMemory, + .offset = (uint64_t) offset + params.imemOffset, + .size = (uint64_t) params.imemSize, + }; + + DestinationBuffer dstBuffer; + dstBuffer.type = BufferType::SHARED_MEMORY; + dstBuffer.nonsecureMemory = srcBuffer; + + uint32_t outBytes; + hidl_string detailedError; + auto returnVoid = descrambler->descramble( + ScramblingControl::EVENKEY /*2*/, hidlSubSamples, + srcBuffer, + params.srcOffset, + dstBuffer, + params.dstOffset, + [&](Status status, uint32_t bytesWritten, const hidl_string& detailedErr) { + *descrambleStatus = status; + outBytes = bytesWritten; + detailedError = detailedErr; + }); + if (!returnVoid.isOk() || *descrambleStatus != Status::OK) { + ALOGI("descramble failed, trans=%s, status=%d, outBytes=%u, error=%s", + returnVoid.description().c_str(), *descrambleStatus, outBytes, detailedError.c_str()); + } + return ::testing::AssertionResult(returnVoid.isOk()); +} + TEST_F(MediaCasHidlTest, EnumeratePlugins) { description("Test enumerate plugins"); hidl_vec descriptors; @@ -627,6 +700,153 @@ TEST_F(MediaCasHidlTest, TestClearKeyErrors) { EXPECT_FALSE(mDescramblerBase->requiresSecureDecoderComponent("bad")); } +TEST_F(MediaCasHidlTest, TestClearKeyOobFails) { + description("Test that oob descramble request fails with expected error"); + + ASSERT_TRUE(createCasPlugin(CLEAR_KEY_SYSTEM_ID)); + + auto returnStatus = mMediaCas->provision(hidl_string(PROVISION_STR)); + EXPECT_TRUE(returnStatus.isOk()); + EXPECT_EQ(Status::OK, returnStatus); + + std::vector sessionId; + ASSERT_TRUE(openCasSession(&sessionId)); + + returnStatus = mDescramblerBase->setMediaCasSession(sessionId); + EXPECT_TRUE(returnStatus.isOk()); + EXPECT_EQ(Status::OK, returnStatus); + + hidl_vec hidlEcm; + hidlEcm.setToExternal(const_cast(kEcmBinaryBuffer), sizeof(kEcmBinaryBuffer)); + returnStatus = mMediaCas->processEcm(sessionId, hidlEcm); + EXPECT_TRUE(returnStatus.isOk()); + EXPECT_EQ(Status::OK, returnStatus); + + sp descrambler = IDescrambler::castFrom(mDescramblerBase); + ASSERT_NE(nullptr, descrambler.get()); + + Status descrambleStatus = Status::OK; + + // test invalid src buffer offset + ASSERT_TRUE(descrambleTestOobInput( + descrambler, + &descrambleStatus, + { + .subSamples = kSubSamples, + .numSubSamples = sizeof(kSubSamples)/sizeof(SubSample), + .imemSizeActual = sizeof(kInBinaryBuffer), + .imemOffset = 0xcccccc, + .imemSize = sizeof(kInBinaryBuffer), + .srcOffset = 0, + .dstOffset = 0 + })); + EXPECT_EQ(Status::BAD_VALUE, descrambleStatus); + + // test invalid src buffer size + ASSERT_TRUE(descrambleTestOobInput( + descrambler, + &descrambleStatus, + { + .subSamples = kSubSamples, + .numSubSamples = sizeof(kSubSamples)/sizeof(SubSample), + .imemSizeActual = sizeof(kInBinaryBuffer), + .imemOffset = 0, + .imemSize = 0xcccccc, + .srcOffset = 0, + .dstOffset = 0 + })); + EXPECT_EQ(Status::BAD_VALUE, descrambleStatus); + + // test invalid src buffer size + ASSERT_TRUE(descrambleTestOobInput( + descrambler, + &descrambleStatus, + { + .subSamples = kSubSamples, + .numSubSamples = sizeof(kSubSamples)/sizeof(SubSample), + .imemSizeActual = sizeof(kInBinaryBuffer), + .imemOffset = 1, + .imemSize = (uint64_t)-1, + .srcOffset = 0, + .dstOffset = 0 + })); + EXPECT_EQ(Status::BAD_VALUE, descrambleStatus); + + // test invalid srcOffset + ASSERT_TRUE(descrambleTestOobInput( + descrambler, + &descrambleStatus, + { + .subSamples = kSubSamples, + .numSubSamples = sizeof(kSubSamples)/sizeof(SubSample), + .imemSizeActual = sizeof(kInBinaryBuffer), + .imemOffset = 0, + .imemSize = sizeof(kInBinaryBuffer), + .srcOffset = 0xcccccc, + .dstOffset = 0 + })); + EXPECT_EQ(Status::BAD_VALUE, descrambleStatus); + + // test invalid dstOffset + ASSERT_TRUE(descrambleTestOobInput( + descrambler, + &descrambleStatus, + { + .subSamples = kSubSamples, + .numSubSamples = sizeof(kSubSamples)/sizeof(SubSample), + .imemSizeActual = sizeof(kInBinaryBuffer), + .imemOffset = 0, + .imemSize = sizeof(kInBinaryBuffer), + .srcOffset = 0, + .dstOffset = 0xcccccc + })); + EXPECT_EQ(Status::BAD_VALUE, descrambleStatus); + + // test detection of oob subsample sizes + const SubSample invalidSubSamples1[] = + {{162, 0}, {0, 184}, {0, 0xdddddd}}; + + ASSERT_TRUE(descrambleTestOobInput( + descrambler, + &descrambleStatus, + { + .subSamples = invalidSubSamples1, + .numSubSamples = sizeof(invalidSubSamples1)/sizeof(SubSample), + .imemSizeActual = sizeof(kInBinaryBuffer), + .imemOffset = 0, + .imemSize = sizeof(kInBinaryBuffer), + .srcOffset = 0, + .dstOffset = 0 + })); + EXPECT_EQ(Status::BAD_VALUE, descrambleStatus); + + // test detection of overflowing subsample sizes + const SubSample invalidSubSamples2[] = + {{162, 0}, {0, 184}, {2, (uint32_t)-1}}; + + ASSERT_TRUE(descrambleTestOobInput( + descrambler, + &descrambleStatus, + { + .subSamples = invalidSubSamples2, + .numSubSamples = sizeof(invalidSubSamples2)/sizeof(SubSample), + .imemSizeActual = sizeof(kInBinaryBuffer), + .imemOffset = 0, + .imemSize = sizeof(kInBinaryBuffer), + .srcOffset = 0, + .dstOffset = 0 + })); + EXPECT_EQ(Status::BAD_VALUE, descrambleStatus); + + returnStatus = mDescramblerBase->release(); + EXPECT_TRUE(returnStatus.isOk()); + EXPECT_EQ(Status::OK, returnStatus); + + returnStatus = mMediaCas->release(); + EXPECT_TRUE(returnStatus.isOk()); + EXPECT_EQ(Status::OK, returnStatus); +} + } // anonymous namespace int main(int argc, char** argv) { diff --git a/compatibility_matrices/Android.mk b/compatibility_matrices/Android.mk index 6446beb3a4cca5019211184b512019216f12d7f5..6074119679e3ff4ffe47f13d1a963bb30a0b72d3 100644 --- a/compatibility_matrices/Android.mk +++ b/compatibility_matrices/Android.mk @@ -60,13 +60,13 @@ include $(BUILD_FRAMEWORK_COMPATIBILITY_MATRIX) include $(CLEAR_VARS) include $(LOCAL_PATH)/clear_vars.mk -LOCAL_MODULE := framework_compatibility_matrix.current.xml -LOCAL_MODULE_STEM := compatibility_matrix.current.xml +LOCAL_MODULE := framework_compatibility_matrix.3.xml +LOCAL_MODULE_STEM := compatibility_matrix.3.xml LOCAL_SRC_FILES := $(LOCAL_MODULE_STEM) LOCAL_KERNEL_CONFIG_DATA_PATHS := \ - 4.4.0:$(my_kernel_config_data)/android-4.4 \ - 4.9.0:$(my_kernel_config_data)/android-4.9 \ - 4.14.0:$(my_kernel_config_data)/android-4.14 \ + 4.4.107:$(my_kernel_config_data)/p/android-4.4 \ + 4.9.84:$(my_kernel_config_data)/p/android-4.9 \ + 4.14.42:$(my_kernel_config_data)/p/android-4.14 \ include $(BUILD_FRAMEWORK_COMPATIBILITY_MATRIX) @@ -126,7 +126,7 @@ LOCAL_REQUIRED_MODULES := \ framework_compatibility_matrix.legacy.xml \ framework_compatibility_matrix.1.xml \ framework_compatibility_matrix.2.xml \ - framework_compatibility_matrix.current.xml \ + framework_compatibility_matrix.3.xml \ framework_compatibility_matrix.device.xml LOCAL_GENERATED_SOURCES := $(call module-installed-files,$(LOCAL_REQUIRED_MODULES)) diff --git a/compatibility_matrices/compatibility_matrix.current.xml b/compatibility_matrices/compatibility_matrix.3.xml similarity index 77% rename from compatibility_matrices/compatibility_matrix.current.xml rename to compatibility_matrices/compatibility_matrix.3.xml index 5aecea02985e5f1753762ff7b2b5849e3d36707c..f271642fd1b68a00a976f000feb8b58341edc416 100644 --- a/compatibility_matrices/compatibility_matrix.current.xml +++ b/compatibility_matrices/compatibility_matrix.3.xml @@ -1,7 +1,7 @@ android.hardware.audio - 2.0 + 4.0 IDevicesFactory default @@ -9,12 +9,28 @@ android.hardware.audio.effect - 2.0 + 4.0 IEffectsFactory default + + android.hardware.authsecret + 1.0 + + IAuthSecret + default + + + + android.hardware.automotive.audiocontrol + 1.0 + + IAudioControl + default + + android.hardware.automotive.evs 1.0 @@ -47,6 +63,14 @@ default + + android.hardware.bluetooth.a2dp + 1.0 + + IBluetoothAudioOffload + default + + android.hardware.boot 1.0 @@ -63,12 +87,20 @@ default + + android.hardware.broadcastradio + 2.0 + + IBroadcastRadio + .* + + android.hardware.camera.provider 2.4 ICameraProvider - legacy/0 + [^/]+/[0-9]+ @@ -81,12 +113,20 @@ android.hardware.configstore - 1.0 + 1.0-1 ISurfaceFlingerConfigs default + + android.hardware.confirmationui + 1.0 + + IConfirmationUI + default + + android.hardware.contexthub 1.0 @@ -95,16 +135,28 @@ default - + android.hardware.drm 1.0 ICryptoFactory - default + .* IDrmFactory - default + .* + + + + android.hardware.drm + 1.1 + + ICryptoFactory + .* + + + IDrmFactory + .* @@ -125,7 +177,7 @@ android.hardware.gnss - 1.0 + 1.0-1 IGnss default @@ -141,7 +193,7 @@ android.hardware.graphics.composer - 2.1 + 2.1-2 IComposer default @@ -149,15 +201,15 @@ android.hardware.graphics.mapper - 2.0 + 2.0-1 IMapper default - + android.hardware.health - 1.0 + 2.0 IHealth default @@ -170,11 +222,20 @@ android.hardware.keymaster 3.0 + 4.0 IKeymasterDevice default + + android.hardware.keymaster + 4.0 + + IKeymasterDevice + strongbox + + android.hardware.light 2.0 @@ -203,9 +264,17 @@ default + + android.hardware.neuralnetworks + 1.0-1 + + IDevice + .* + + android.hardware.nfc - 1.0 + 1.1 INfc default @@ -221,7 +290,7 @@ android.hardware.power - 1.0-1 + 1.0-2 IPower default @@ -229,16 +298,26 @@ android.hardware.radio - 1.0-1 + 1.0-2 IRadio slot1 + slot2 + slot3 ISap slot1 + + android.hardware.radio.config + 1.0 + + IRadioConfig + default + + android.hardware.renderscript 1.0 @@ -252,7 +331,8 @@ 1.0 ISecureElement - eSE1 + eSE[1-9][0-9]* + SIM[1-9][0-9]* @@ -265,7 +345,7 @@ android.hardware.soundtrigger - 2.0 + 2.0-1 ISoundTriggerHw default @@ -319,9 +399,17 @@ default + + android.hardware.usb.gadget + 1.0 + + IUsbGadget + default + + android.hardware.vibrator - 1.0-1 + 1.0-2 IVibrator default @@ -345,12 +433,20 @@ android.hardware.wifi - 1.0-1 + 1.0-2 IWifi default + + android.hardware.wifi.hostapd + 1.0 + + IHostapd + default + + android.hardware.wifi.offload 1.0 @@ -361,7 +457,7 @@ android.hardware.wifi.supplicant - 1.0 + 1.0-1 ISupplicant default diff --git a/configstore/1.0/default/android.hardware.configstore@1.0-service.rc b/configstore/1.0/default/android.hardware.configstore@1.0-service.rc deleted file mode 100644 index 563d8541c3fe9221f785442cf65df99a797db4ad..0000000000000000000000000000000000000000 --- a/configstore/1.0/default/android.hardware.configstore@1.0-service.rc +++ /dev/null @@ -1,4 +0,0 @@ -service configstore-hal-1-0 /vendor/bin/hw/android.hardware.configstore@1.0-service - class hal animation - user system - group system diff --git a/configstore/1.1/Android.bp b/configstore/1.1/Android.bp new file mode 100644 index 0000000000000000000000000000000000000000..2b6e6fa3782931239211bb76981ce7a73b0d6f31 --- /dev/null +++ b/configstore/1.1/Android.bp @@ -0,0 +1,23 @@ +// This file is autogenerated by hidl-gen -Landroidbp. + +hidl_interface { + name: "android.hardware.configstore@1.1", + root: "android.hardware", + vndk: { + enabled: true, + }, + srcs: [ + "types.hal", + "ISurfaceFlingerConfigs.hal", + ], + interfaces: [ + "android.hardware.configstore@1.0", + "android.hidl.base@1.0", + ], + types: [ + "DisplayOrientation", + "OptionalDisplayOrientation", + ], + gen_java: true, +} + diff --git a/configstore/1.1/ISurfaceFlingerConfigs.hal b/configstore/1.1/ISurfaceFlingerConfigs.hal new file mode 100644 index 0000000000000000000000000000000000000000..3a695941f9ecadae8e71be02776636bcb166129c --- /dev/null +++ b/configstore/1.1/ISurfaceFlingerConfigs.hal @@ -0,0 +1,28 @@ +/* + * Copyright (C) 2018 The Android Open Source Project + * + * Licensed under the Apache License, Version 2.1 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.1 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package android.hardware.configstore@1.1; + +import @1.0::ISurfaceFlingerConfigs; + +/** + * New revision of ISurfaceFlingerConfigs + */ +interface ISurfaceFlingerConfigs extends @1.0::ISurfaceFlingerConfigs { + /** + * Returns the orientation of the primary display device. + */ + primaryDisplayOrientation() generates (OptionalDisplayOrientation value); +}; diff --git a/configstore/1.0/default/Android.mk b/configstore/1.1/default/Android.mk similarity index 72% rename from configstore/1.0/default/Android.mk rename to configstore/1.1/default/Android.mk index 539741ee5908a2c587cf07af6276d91101772fe3..40f621b3addc53acf57404e78ad677acb2631cba 100644 --- a/configstore/1.0/default/Android.mk +++ b/configstore/1.1/default/Android.mk @@ -2,36 +2,37 @@ LOCAL_PATH := $(call my-dir) ################################################################################ include $(CLEAR_VARS) -LOCAL_MODULE := android.hardware.configstore@1.0-service +LOCAL_MODULE := android.hardware.configstore@1.1-service # seccomp is not required for coverage build. ifneq ($(NATIVE_COVERAGE),true) -LOCAL_REQUIRED_MODULES_arm64 := configstore@1.0.policy +LOCAL_REQUIRED_MODULES_arm64 := configstore@1.1.policy endif LOCAL_PROPRIETARY_MODULE := true LOCAL_MODULE_CLASS := EXECUTABLES LOCAL_MODULE_RELATIVE_PATH := hw -LOCAL_INIT_RC := android.hardware.configstore@1.0-service.rc +LOCAL_INIT_RC := android.hardware.configstore@1.1-service.rc LOCAL_SRC_FILES:= service.cpp include $(LOCAL_PATH)/surfaceflinger.mk LOCAL_SHARED_LIBRARIES := \ - android.hardware.configstore@1.0 \ libhidlbase \ libhidltransport \ libbase \ libhwminijail \ liblog \ libutils \ + android.hardware.configstore@1.0 \ + android.hardware.configstore@1.1 include $(BUILD_EXECUTABLE) # seccomp filter for configstore ifeq ($(TARGET_ARCH), $(filter $(TARGET_ARCH), arm64)) include $(CLEAR_VARS) -LOCAL_MODULE := configstore@1.0.policy +LOCAL_MODULE := configstore@1.1.policy LOCAL_MODULE_CLASS := ETC LOCAL_MODULE_PATH := $(TARGET_OUT_VENDOR)/etc/seccomp_policy -LOCAL_SRC_FILES := seccomp_policy/configstore@1.0-$(TARGET_ARCH).policy +LOCAL_SRC_FILES := seccomp_policy/configstore@1.1-$(TARGET_ARCH).policy include $(BUILD_PREBUILT) endif diff --git a/configstore/1.0/default/SurfaceFlingerConfigs.cpp b/configstore/1.1/default/SurfaceFlingerConfigs.cpp similarity index 70% rename from configstore/1.0/default/SurfaceFlingerConfigs.cpp rename to configstore/1.1/default/SurfaceFlingerConfigs.cpp index 3239274f9fa7dc1706b0d16baff006cd4d35e43b..da3081c12f98ba7121326b9f10d28ae827a867f1 100644 --- a/configstore/1.0/default/SurfaceFlingerConfigs.cpp +++ b/configstore/1.1/default/SurfaceFlingerConfigs.cpp @@ -1,5 +1,5 @@ /* - * Copyright (C) 2017 The Android Open Source Project + * Copyright (C) 2018 The Android Open Source Project * * Licensed under the Apache License, Version 2.1 (the "License"); * you may not use this file except in compliance with the License. @@ -16,10 +16,13 @@ #include "SurfaceFlingerConfigs.h" +#include +#include + namespace android { namespace hardware { namespace configstore { -namespace V1_0 { +namespace V1_1 { namespace implementation { // Methods from ::android::hardware::configstore::V1_0::ISurfaceFlingerConfigs @@ -139,10 +142,59 @@ Return SurfaceFlingerConfigs::startGraphicsAllocatorService( return Void(); } +// Methods from ::android::hardware::configstore::V1_1::ISurfaceFlingerConfigs +// follow. + +#ifdef PRIMARY_DISPLAY_ORIENTATION +static_assert(PRIMARY_DISPLAY_ORIENTATION == 0 || PRIMARY_DISPLAY_ORIENTATION == 90 || + PRIMARY_DISPLAY_ORIENTATION == 180 || PRIMARY_DISPLAY_ORIENTATION == 270, + "Primary display orientation must be 0/90/180/270"); +#endif + +Return SurfaceFlingerConfigs::primaryDisplayOrientation( + primaryDisplayOrientation_cb _hidl_cb) { + using ::android::hardware::configstore::V1_1::DisplayOrientation; + + bool specified = false; + DisplayOrientation value = DisplayOrientation::ORIENTATION_0; + + int orientation = 0; +#ifdef PRIMARY_DISPLAY_ORIENTATION + specified = true; + orientation = PRIMARY_DISPLAY_ORIENTATION; +#endif + + switch (orientation) { + case 0: { + value = DisplayOrientation::ORIENTATION_0; + break; + } + case 90: { + value = DisplayOrientation::ORIENTATION_90; + break; + } + case 180: { + value = DisplayOrientation::ORIENTATION_180; + break; + } + case 270: { + value = DisplayOrientation::ORIENTATION_270; + break; + } + default: { + // statically checked above -> memory corruption + LOG_ALWAYS_FATAL("Invalid orientation %d", orientation); + } + } + + _hidl_cb({specified, value}); + return Void(); +} + // Methods from ::android::hidl::base::V1_0::IBase follow. } // namespace implementation -} // namespace V1_0 +} // namespace V1_1 } // namespace configstore } // namespace hardware } // namespace android diff --git a/configstore/1.0/default/SurfaceFlingerConfigs.h b/configstore/1.1/default/SurfaceFlingerConfigs.h similarity index 74% rename from configstore/1.0/default/SurfaceFlingerConfigs.h rename to configstore/1.1/default/SurfaceFlingerConfigs.h index 32e5fc39280f267929441d003aaeb733d6775471..3714e81697f67b64d263bdfcc27bacc1b280d48b 100644 --- a/configstore/1.0/default/SurfaceFlingerConfigs.h +++ b/configstore/1.1/default/SurfaceFlingerConfigs.h @@ -1,17 +1,17 @@ -#ifndef ANDROID_HARDWARE_CONFIGSTORE_V1_0_SURFACEFLINGERCONFIGS_H -#define ANDROID_HARDWARE_CONFIGSTORE_V1_0_SURFACEFLINGERCONFIGS_H +#ifndef ANDROID_HARDWARE_CONFIGSTORE_V1_1_SURFACEFLINGERCONFIGS_H +#define ANDROID_HARDWARE_CONFIGSTORE_V1_1_SURFACEFLINGERCONFIGS_H -#include +#include #include #include namespace android { namespace hardware { namespace configstore { -namespace V1_0 { +namespace V1_1 { namespace implementation { -using ::android::hardware::configstore::V1_0::ISurfaceFlingerConfigs; +using ::android::hardware::configstore::V1_1::ISurfaceFlingerConfigs; using ::android::hardware::Return; using ::android::hardware::Void; using ::android::sp; @@ -32,13 +32,17 @@ struct SurfaceFlingerConfigs : public ISurfaceFlingerConfigs { Return maxFrameBufferAcquiredBuffers(maxFrameBufferAcquiredBuffers_cb _hidl_cb) override; Return startGraphicsAllocatorService(startGraphicsAllocatorService_cb _hidl_cb) override; + // Methods from + // ::android::hardware::configstore::V1_1::ISurfaceFlingerConfigs follow. + Return primaryDisplayOrientation(primaryDisplayOrientation_cb _hidl_cb) override; + // Methods from ::android::hidl::base::V1_0::IBase follow. }; } // namespace implementation -} // namespace V1_0 +} // namespace V1_1 } // namespace configstore } // namespace hardware } // namespace android -#endif // ANDROID_HARDWARE_CONFIGSTORE_V1_0_SURFACEFLINGERCONFIGS_H +#endif // ANDROID_HARDWARE_CONFIGSTORE_V1_1_SURFACEFLINGERCONFIGS_H diff --git a/configstore/1.1/default/android.hardware.configstore@1.1-service.rc b/configstore/1.1/default/android.hardware.configstore@1.1-service.rc new file mode 100644 index 0000000000000000000000000000000000000000..105678acb2f6b3622c760e88e169a42cfdcb771f --- /dev/null +++ b/configstore/1.1/default/android.hardware.configstore@1.1-service.rc @@ -0,0 +1,4 @@ +service vendor.configstore-hal /vendor/bin/hw/android.hardware.configstore@1.1-service + class hal animation + user system + group system diff --git a/configstore/1.0/default/seccomp_policy/configstore@1.0-arm64.policy b/configstore/1.1/default/seccomp_policy/configstore@1.1-arm64.policy similarity index 87% rename from configstore/1.0/default/seccomp_policy/configstore@1.0-arm64.policy rename to configstore/1.1/default/seccomp_policy/configstore@1.1-arm64.policy index 62d7e1daf7f5c5bd12a2783a36bd4722172870e8..937fddd67b160e74d4eb0869fc6fc97f7dced0be 100644 --- a/configstore/1.0/default/seccomp_policy/configstore@1.0-arm64.policy +++ b/configstore/1.1/default/seccomp_policy/configstore@1.1-arm64.policy @@ -17,7 +17,9 @@ futex: 1 ioctl: arg1 == 0xc0306201 # prctl: arg0 == PR_SET_NAME || arg0 == PR_SET_VMA || arg0 == PR_SET_TIMERSLACK # || arg0 == PR_GET_NO_NEW_PRIVS # used by crash_dump -prctl: arg0 == 15 || arg0 == 0x53564d41 || arg0 == 29 || arg0 == 39 +# prctl: arg0 == 15 || arg0 == 0x53564d41 || arg0 == 29 || arg0 == 39 +# TODO(b/68162846) reduce scope of prctl() based on arguments +prctl: 1 openat: 1 mmap: 1 mprotect: 1 @@ -28,6 +30,7 @@ faccessat: 1 write: 1 fstat: 1 clone: 1 +sched_setscheduler: 1 munmap: 1 lseek: 1 sigaltstack: 1 @@ -39,6 +42,7 @@ exit_group: 1 rt_sigreturn: 1 getrlimit: 1 madvise: 1 +getdents64: 1 clock_gettime: 1 getpid: 1 diff --git a/configstore/1.0/default/service.cpp b/configstore/1.1/default/service.cpp similarity index 77% rename from configstore/1.0/default/service.cpp rename to configstore/1.1/default/service.cpp index c9c81a07f556363b3e6ca7ab94b7d112b763b76a..3b4e7745ee21fef663b068397a8a1f35914edfd2 100644 --- a/configstore/1.0/default/service.cpp +++ b/configstore/1.1/default/service.cpp @@ -1,5 +1,5 @@ /* - * Copyright (C) 2017 The Android Open Source Project + * Copyright (C) 2018 The Android Open Source Project * * Licensed under the Apache License, Version 2.1 (the "License"); * you may not use this file except in compliance with the License. @@ -14,9 +14,9 @@ * limitations under the License. */ -#define LOG_TAG "android.hardware.configstore@1.0-service" +#define LOG_TAG "android.hardware.configstore@1.1-service" -#include +#include #include #include @@ -24,8 +24,8 @@ using android::hardware::configureRpcThreadpool; using android::hardware::joinRpcThreadpool; -using android::hardware::configstore::V1_0::ISurfaceFlingerConfigs; -using android::hardware::configstore::V1_0::implementation::SurfaceFlingerConfigs; +using android::hardware::configstore::V1_1::ISurfaceFlingerConfigs; +using android::hardware::configstore::V1_1::implementation::SurfaceFlingerConfigs; using android::hardware::SetupMinijail; using android::sp; using android::status_t; @@ -34,7 +34,7 @@ using android::OK; int main() { configureRpcThreadpool(10, true); - SetupMinijail("/vendor/etc/seccomp_policy/configstore@1.0.policy"); + SetupMinijail("/vendor/etc/seccomp_policy/configstore@1.1.policy"); sp surfaceFlingerConfigs = new SurfaceFlingerConfigs; status_t status = surfaceFlingerConfigs->registerAsService(); diff --git a/configstore/1.0/default/surfaceflinger.mk b/configstore/1.1/default/surfaceflinger.mk similarity index 91% rename from configstore/1.0/default/surfaceflinger.mk rename to configstore/1.1/default/surfaceflinger.mk index 19876076245f0c7e2a5125db6b13cbfb54e961c7..35922ebb096ac81878031c1f96b68cda1b61e090 100644 --- a/configstore/1.0/default/surfaceflinger.mk +++ b/configstore/1.1/default/surfaceflinger.mk @@ -50,3 +50,7 @@ endif ifneq ($(SF_START_GRAPHICS_ALLOCATOR_SERVICE),) LOCAL_CFLAGS += -DSTART_GRAPHICS_ALLOCATOR_SERVICE endif + +ifneq ($(SF_PRIMARY_DISPLAY_ORIENTATION),) + LOCAL_CFLAGS += -DPRIMARY_DISPLAY_ORIENTATION=$(SF_PRIMARY_DISPLAY_ORIENTATION) +endif diff --git a/configstore/1.1/types.hal b/configstore/1.1/types.hal new file mode 100644 index 0000000000000000000000000000000000000000..adc57476acd19f05375a5a5db0ed73355a577f4a --- /dev/null +++ b/configstore/1.1/types.hal @@ -0,0 +1,31 @@ +/* + * Copyright (C) 2018 The Android Open Source Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package android.hardware.configstore@1.1; + +/** + * Orientation in degrees. + */ +enum DisplayOrientation : uint8_t { + ORIENTATION_0, + ORIENTATION_90, + ORIENTATION_180, + ORIENTATION_270, +}; + +struct OptionalDisplayOrientation { + bool specified; + DisplayOrientation value; +}; diff --git a/configstore/utils/Android.bp b/configstore/utils/Android.bp index 0d626a50b21b25acd8b4e1140e99d4e5b93f9d0a..178f245fd673d1b3a673bfb7feffbd214feb1f43 100644 --- a/configstore/utils/Android.bp +++ b/configstore/utils/Android.bp @@ -29,11 +29,13 @@ cc_library_shared { shared_libs: [ "android.hardware.configstore@1.0", + "android.hardware.configstore@1.1", "libbase", "libhidlbase" ], export_shared_lib_headers: [ "android.hardware.configstore@1.0", + "android.hardware.configstore@1.1", "libbase", "libhidlbase" ], diff --git a/configstore/utils/include/configstore/Utils.h b/configstore/utils/include/configstore/Utils.h index b107a207b3c789acaf6dff56525ab5c7442868b0..e04f57df123c0ac2a116cf7cf0d1fa6d43791be8 100644 --- a/configstore/utils/include/configstore/Utils.h +++ b/configstore/utils/include/configstore/Utils.h @@ -18,6 +18,7 @@ #define ANDROID_HARDWARE_CONFIGSTORE_UTILS_H #include +#include #include #include @@ -34,13 +35,20 @@ void logAlwaysError(const std::string& message); } // namespace details namespace configstore { -// import types from V1_0 +// import types from configstore +using ::android::hardware::configstore::V1_1::DisplayOrientation; using ::android::hardware::configstore::V1_0::OptionalBool; using ::android::hardware::configstore::V1_0::OptionalInt32; using ::android::hardware::configstore::V1_0::OptionalUInt32; using ::android::hardware::configstore::V1_0::OptionalInt64; using ::android::hardware::configstore::V1_0::OptionalUInt64; using ::android::hardware::configstore::V1_0::OptionalString; +using ::android::hardware::configstore::V1_1::OptionalDisplayOrientation; + +static inline std::ostream& operator<<(std::ostream& os, DisplayOrientation orientation) { + os << ::android::hardware::configstore::V1_1::toString(orientation); + return os; +} // a function to retrieve and cache the service handle // for a particular interface @@ -141,6 +149,12 @@ std::string getString(const std::string &defValue) { return get(defValue); } +template (I::*func)( + std::function)> +DisplayOrientation getDisplayOrientation(DisplayOrientation defValue) { + return get(defValue); +} + } // namespace configstore } // namespace hardware } // namespace android diff --git a/confirmationui/1.0/Android.bp b/confirmationui/1.0/Android.bp new file mode 100644 index 0000000000000000000000000000000000000000..21acecba7ecb4a24985e9d0319548c8c42055de7 --- /dev/null +++ b/confirmationui/1.0/Android.bp @@ -0,0 +1,27 @@ +// This file is autogenerated by hidl-gen -Landroidbp. + +hidl_interface { + name: "android.hardware.confirmationui@1.0", + root: "android.hardware", + vndk: { + enabled: true, + }, + srcs: [ + "types.hal", + "IConfirmationResultCallback.hal", + "IConfirmationUI.hal", + ], + interfaces: [ + "android.hardware.keymaster@4.0", + "android.hidl.base@1.0", + ], + types: [ + "MessageSize", + "ResponseCode", + "TestKeyBits", + "TestModeCommands", + "UIOption", + ], + gen_java: false, +} + diff --git a/confirmationui/1.0/IConfirmationResultCallback.hal b/confirmationui/1.0/IConfirmationResultCallback.hal new file mode 100644 index 0000000000000000000000000000000000000000..03a10cfe6cdaa157bf03ffb20165b72b480e0470 --- /dev/null +++ b/confirmationui/1.0/IConfirmationResultCallback.hal @@ -0,0 +1,61 @@ +/* + * Copyright (C) 2017 The Android Open Source Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package android.hardware.confirmationui@1.0; + +/** + * Callback interface passed to IConfirmationUI::promptUserConfirmation(). + * Informs the caller about the result of the prompt operation. + */ +interface IConfirmationResultCallback { + /** + * This callback is called by the confirmation provider when it stops prompting the user. + * Iff the user has confirmed the prompted text, error is ErrorCode::OK and the + * parameters formattedMessage and confirmationToken hold the values needed to request + * a signature from keymaster. + * In all other cases formattedMessage and confirmationToken must be of length 0. + * + * @param error - OK: IFF the user has confirmed the prompt. + * - Canceled: If the user has pressed the cancel button. + * - Aborted: If IConfirmationUI::abort() was called. + * - SystemError: If an unexpected System error occurred that prevented the TUI + * from being shut down gracefully. + * @param formattedMessage holds the prompt text and extra data. + * The message is CBOR (RFC 7049) encoded and has the following format: + * CBOR_MAP{ "prompt", , "extra", } + * The message is a CBOR encoded map (type 5) with the keys + * "prompt" and "extra". The keys are encoded as CBOR text string + * (type 3). The value is encoded as CBOR text string + * (type 3), and the value is encoded as CBOR byte string + * (type 2). The map must have exactly one key value pair for each of + * the keys "prompt" and "extra". Other keys are not allowed. + * The value of "prompt" is given by the proptText argument to + * IConfirmationUI::promptUserConfirmation and must not be modified + * by the implementation. + * The value of "extra" is given by the extraData argument to + * IConfirmationUI::promptUserConfirmation and must not be modified + * or interpreted by the implementation. + * + * @param confirmationToken a 32-byte HMAC-SHA256 value, computed over + * "confirmation token" || + * i.e. the literal UTF-8 encoded string "confirmation token", without + * the "", concatenated with the formatted message as returned in the + * formattedMessage argument. The HMAC is keyed with a 256-bit secret + * which is shared with Keymaster. In test mode the test key MUST be + * used (see types.hal TestModeCommands and TestKeyBits). + */ + result(ResponseCode error, vec formattedMessage, vec confirmationToken); +}; diff --git a/confirmationui/1.0/IConfirmationUI.hal b/confirmationui/1.0/IConfirmationUI.hal new file mode 100644 index 0000000000000000000000000000000000000000..db8055d16c5ce4578b586bc4577436d7f24839d3 --- /dev/null +++ b/confirmationui/1.0/IConfirmationUI.hal @@ -0,0 +1,81 @@ +/* + * Copyright (C) 2017 The Android Open Source Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package android.hardware.confirmationui@1.0; + +import android.hardware.keymaster@4.0::HardwareAuthToken; +import IConfirmationResultCallback; + +interface IConfirmationUI { + /** + * Asynchronously initiates a confirmation UI dialog prompting the user to confirm a given text. + * The TUI prompt must be implemented in such a way that a positive response indicates with + * high confidence that a user has seen the given prompt text even if the Android framework + * including the kernel was compromised. + * + * @param resultCB Implementation of IResultCallback. Used by the implementation to report + * the result of the current pending user prompt. + * + * @param promptText UTF-8 encoded string which is to be presented to the user. + * + * @param extraData A binary blob that must be included in the formatted output message as is. + * It is opaque to the implementation. Implementations must neither interpret + * nor modify the content. + * + * @param locale String specifying the locale that must be used by the TUI dialog. The string + * is an IETF BCP 47 tag. + * + * @param uiOptions A set of uiOptions manipulating how the confirmation prompt is displayed. + * Refer to UIOption in types.hal for possible options. + * + * @return error - OK: IFF the dialog was successfully started. In this case, and only in this + * case, the implementation must, eventually, call the callback to + * indicate completion. + * - OperationPending: Is returned when the confirmation provider is currently + * in use. + * - SystemError: An error occurred trying to communicate with the confirmation + * provider (e.g. trusted app). + * - UIError: The confirmation provider encountered an issue with displaying + * the prompt text to the user. + */ + promptUserConfirmation(IConfirmationResultCallback resultCB, string promptText, + vec extraData, string locale, vec uiOptions) + generates(ResponseCode error); + + /** + * DeliverSecureInput is used by the framework to deliver a secure input event to the + * confirmation provider. + * + * VTS test mode: + * This function can be used to test certain code paths non-interactively. See TestModeCommands + * in types.hal for details. + * + * @param secureInputToken An authentication token as generated by Android authentication + * providers. + * + * @return error - Ignored: Unless used for testing (See TestModeCommands). + */ + deliverSecureInputEvent(HardwareAuthToken secureInputToken) + generates(ResponseCode error); + + /** + * Aborts a pending user prompt. This allows the framework to gracefully end a TUI dialog. + * If a TUI operation was pending the corresponding call back is informed with + * ErrorCode::Aborted. + */ + abort(); +}; + diff --git a/confirmationui/1.0/default/Android.bp b/confirmationui/1.0/default/Android.bp new file mode 100644 index 0000000000000000000000000000000000000000..10018e8720fe95fcf774371e82a05dd11f1e62cc --- /dev/null +++ b/confirmationui/1.0/default/Android.bp @@ -0,0 +1,43 @@ +// +// Copyright (C) 2017 The Android Open Source Project +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +// + +cc_binary { + name: "android.hardware.confirmationui@1.0-service", + init_rc: ["android.hardware.confirmationui@1.0-service.rc"], + vendor: true, + relative_install_path: "hw", + cflags: [ + "-Wall", + "-Wextra", + "-Werror", + ], + srcs: [ + "service.cpp", + "ConfirmationUI.cpp", + "PlatformSpecifics.cpp", + ], + shared_libs: [ + "android.hardware.confirmationui@1.0", + "android.hardware.confirmationui-support-lib", + "android.hardware.keymaster@4.0", + "libcrypto", + "libbase", + "libhidlbase", + "libhidltransport", + "liblog", + "libutils", + ], +} \ No newline at end of file diff --git a/confirmationui/1.0/default/ConfirmationUI.cpp b/confirmationui/1.0/default/ConfirmationUI.cpp new file mode 100644 index 0000000000000000000000000000000000000000..41e03ce80af25213b3a51999790a2751a0a05e4f --- /dev/null +++ b/confirmationui/1.0/default/ConfirmationUI.cpp @@ -0,0 +1,71 @@ +/* +** +** Copyright 2017, The Android Open Source Project +** +** Licensed under the Apache License, Version 2.0 (the "License"); +** you may not use this file except in compliance with the License. +** You may obtain a copy of the License at +** +** http://www.apache.org/licenses/LICENSE-2.0 +** +** Unless required by applicable law or agreed to in writing, software +** distributed under the License is distributed on an "AS IS" BASIS, +** WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +** See the License for the specific language governing permissions and +** limitations under the License. +*/ + +#include "ConfirmationUI.h" + +#include "PlatformSpecifics.h" + +#include +#include + +#include + +#include + +namespace android { +namespace hardware { +namespace confirmationui { +namespace V1_0 { +namespace implementation { + +using ::android::hardware::confirmationui::V1_0::generic::Operation; +using ::android::hardware::keymaster::V4_0::HardwareAuthToken; + +uint8_t hmacKey[32]; + +// Methods from ::android::hardware::confirmationui::V1_0::IConfirmationUI follow. +Return ConfirmationUI::promptUserConfirmation( + const sp& resultCB, const hidl_string& promptText, + const hidl_vec& extraData, const hidl_string& locale, + const hidl_vec& uiOptions) { + auto& operation = MyOperation::get(); + auto result = operation.init(resultCB, promptText, extraData, locale, uiOptions); + if (result == ResponseCode::OK) { + // This is where implementation start the UI and then call setPending on success. + operation.setPending(); + } + return result; +} + +Return ConfirmationUI::deliverSecureInputEvent( + const HardwareAuthToken& secureInputToken) { + auto& operation = MyOperation::get(); + return operation.deliverSecureInputEvent(secureInputToken); +} + +Return ConfirmationUI::abort() { + auto& operation = MyOperation::get(); + operation.abort(); + operation.finalize(hmacKey); + return Void(); +} + +} // namespace implementation +} // namespace V1_0 +} // namespace confirmationui +} // namespace hardware +} // namespace android diff --git a/confirmationui/1.0/default/ConfirmationUI.h b/confirmationui/1.0/default/ConfirmationUI.h new file mode 100644 index 0000000000000000000000000000000000000000..e9e7f993b88eafec496d76fe93b870c21ceeaba6 --- /dev/null +++ b/confirmationui/1.0/default/ConfirmationUI.h @@ -0,0 +1,57 @@ +/* +** +** Copyright 2017, The Android Open Source Project +** +** Licensed under the Apache License, Version 2.0 (the "License"); +** you may not use this file except in compliance with the License. +** You may obtain a copy of the License at +** +** http://www.apache.org/licenses/LICENSE-2.0 +** +** Unless required by applicable law or agreed to in writing, software +** distributed under the License is distributed on an "AS IS" BASIS, +** WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +** See the License for the specific language governing permissions and +** limitations under the License. +*/ + +#ifndef ANDROID_HARDWARE_CONFIRMATIONUI_V1_0_CONFIRMATIONUI_H +#define ANDROID_HARDWARE_CONFIRMATIONUI_V1_0_CONFIRMATIONUI_H + +#include +#include +#include + +namespace android { +namespace hardware { +namespace confirmationui { +namespace V1_0 { +namespace implementation { + +using ::android::hardware::hidl_array; +using ::android::hardware::hidl_memory; +using ::android::hardware::hidl_string; +using ::android::hardware::hidl_vec; +using ::android::hardware::Return; +using ::android::hardware::Void; +using ::android::sp; + +struct ConfirmationUI : public IConfirmationUI { + // Methods from ::android::hardware::confirmationui::V1_0::IConfirmationUI follow. + Return promptUserConfirmation(const sp& resultCB, + const hidl_string& promptText, + const hidl_vec& extraData, + const hidl_string& locale, + const hidl_vec& uiOptions) override; + Return deliverSecureInputEvent( + const ::android::hardware::keymaster::V4_0::HardwareAuthToken& secureInputToken) override; + Return abort() override; +}; + +} // namespace implementation +} // namespace V1_0 +} // namespace confirmationui +} // namespace hardware +} // namespace android + +#endif // ANDROID_HARDWARE_CONFIRMATIONUI_V1_0_CONFIRMATIONUI_H diff --git a/confirmationui/1.0/default/OWNERS b/confirmationui/1.0/default/OWNERS new file mode 100644 index 0000000000000000000000000000000000000000..335660da3be9ef092d3999ffdf1a5a73920a5d3d --- /dev/null +++ b/confirmationui/1.0/default/OWNERS @@ -0,0 +1,2 @@ +jdanis@google.com +swillden@google.com diff --git a/confirmationui/1.0/default/PlatformSpecifics.cpp b/confirmationui/1.0/default/PlatformSpecifics.cpp new file mode 100644 index 0000000000000000000000000000000000000000..03d61654712a1da3b503733a4c5042fe379b51c0 --- /dev/null +++ b/confirmationui/1.0/default/PlatformSpecifics.cpp @@ -0,0 +1,62 @@ +/* +** +** Copyright 2017, The Android Open Source Project +** +** Licensed under the Apache License, Version 2.0 (the "License"); +** you may not use this file except in compliance with the License. +** You may obtain a copy of the License at +** +** http://www.apache.org/licenses/LICENSE-2.0 +** +** Unless required by applicable law or agreed to in writing, software +** distributed under the License is distributed on an "AS IS" BASIS, +** WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +** See the License for the specific language governing permissions and +** limitations under the License. +*/ + +#include "PlatformSpecifics.h" + +#include +#include +#include + +namespace android { +namespace hardware { +namespace confirmationui { +namespace V1_0 { +namespace implementation { + +MonotonicClockTimeStamper::TimeStamp MonotonicClockTimeStamper::now() { + timespec ts; + if (!clock_gettime(CLOCK_BOOTTIME, &ts)) { + return TimeStamp(ts.tv_sec * UINT64_C(1000) + ts.tv_nsec / UINT64_C(1000000)); + } else { + return {}; + } +} + +support::NullOr HMacImplementation::hmac256( + const support::auth_token_key_t& key, std::initializer_list buffers) { + HMAC_CTX hmacCtx; + HMAC_CTX_init(&hmacCtx); + if (!HMAC_Init_ex(&hmacCtx, key.data(), key.size(), EVP_sha256(), nullptr)) { + return {}; + } + for (auto& buffer : buffers) { + if (!HMAC_Update(&hmacCtx, buffer.data(), buffer.size())) { + return {}; + } + } + support::hmac_t result; + if (!HMAC_Final(&hmacCtx, result.data(), nullptr)) { + return {}; + } + return result; +} + +} // namespace implementation +} // namespace V1_0 +} // namespace confirmationui +} // namespace hardware +} // namespace android diff --git a/confirmationui/1.0/default/PlatformSpecifics.h b/confirmationui/1.0/default/PlatformSpecifics.h new file mode 100644 index 0000000000000000000000000000000000000000..29f299c7d6aedba2b9e04f0e4e39d246e1368f98 --- /dev/null +++ b/confirmationui/1.0/default/PlatformSpecifics.h @@ -0,0 +1,71 @@ +/* +** +** Copyright 2017, The Android Open Source Project +** +** Licensed under the Apache License, Version 2.0 (the "License"); +** you may not use this file except in compliance with the License. +** You may obtain a copy of the License at +** +** http://www.apache.org/licenses/LICENSE-2.0 +** +** Unless required by applicable law or agreed to in writing, software +** distributed under the License is distributed on an "AS IS" BASIS, +** WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +** See the License for the specific language governing permissions and +** limitations under the License. +*/ + +#ifndef CONFIRMATIONUI_1_0_DEFAULT_PLATFORMSPECIFICS_H_ +#define CONFIRMATIONUI_1_0_DEFAULT_PLATFORMSPECIFICS_H_ + +#include +#include + +#include +#include +#include + +namespace android { +namespace hardware { +namespace confirmationui { +namespace V1_0 { +namespace implementation { + +struct MonotonicClockTimeStamper { + class TimeStamp { + public: + explicit TimeStamp(uint64_t ts) : timestamp_(ts), ok_(true) {} + TimeStamp() : timestamp_(0), ok_(false) {} + bool isOk() const { return ok_; } + operator const uint64_t() const { return timestamp_; } + + private: + uint64_t timestamp_; + bool ok_; + }; + static TimeStamp now(); +}; + +class HMacImplementation { + public: + static support::NullOr hmac256( + const support::auth_token_key_t& key, + std::initializer_list buffers); +}; + +class MyOperation : public generic::Operation, + MonotonicClockTimeStamper, HMacImplementation> { + public: + static MyOperation& get() { + static MyOperation op; + return op; + } +}; + +} // namespace implementation +} // namespace V1_0 +} // namespace confirmationui +} // namespace hardware +} // namespace android + +#endif // CONFIRMATIONUI_1_0_DEFAULT_PLATFORMSPECIFICS_H_ diff --git a/confirmationui/1.0/default/android.hardware.confirmationui@1.0-service.rc b/confirmationui/1.0/default/android.hardware.confirmationui@1.0-service.rc new file mode 100644 index 0000000000000000000000000000000000000000..c04e55eb67b731cdeef4303b81f1cadea74c6aeb --- /dev/null +++ b/confirmationui/1.0/default/android.hardware.confirmationui@1.0-service.rc @@ -0,0 +1,4 @@ +service vendor.confirmationui-1-0 /vendor/bin/hw/android.hardware.confirmationui@1.0-service + class hal + user nobody + group drmrpc diff --git a/confirmationui/1.0/default/service.cpp b/confirmationui/1.0/default/service.cpp new file mode 100644 index 0000000000000000000000000000000000000000..39f3f62b003c5d1f3ed8ee3e1f1fb4032f677d32 --- /dev/null +++ b/confirmationui/1.0/default/service.cpp @@ -0,0 +1,39 @@ +/* +** +** Copyright 2017, The Android Open Source Project +** +** Licensed under the Apache License, Version 2.0 (the "License"); +** you may not use this file except in compliance with the License. +** You may obtain a copy of the License at +** +** http://www.apache.org/licenses/LICENSE-2.0 +** +** Unless required by applicable law or agreed to in writing, software +** distributed under the License is distributed on an "AS IS" BASIS, +** WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +** See the License for the specific language governing permissions and +** limitations under the License. +*/ + +#define LOG_TAG "android.hardware.confirmationui@1.0-service" + +#include +#include + +#include "ConfirmationUI.h" + +using android::hardware::joinRpcThreadpool; + +using android::hardware::confirmationui::V1_0::implementation::ConfirmationUI; + +int main() { + ::android::hardware::configureRpcThreadpool(1, true /*willJoinThreadpool*/); + auto confirmationui = new ConfirmationUI(); + auto status = confirmationui->registerAsService(); + if (status != android::OK) { + LOG(FATAL) << "Could not register service for ConfirmationIU 1.0 (" << status << ")"; + } + + joinRpcThreadpool(); + return -1; // Should never get here. +} diff --git a/confirmationui/1.0/types.hal b/confirmationui/1.0/types.hal new file mode 100644 index 0000000000000000000000000000000000000000..fd7ae6afdd10e2555e8166e4912b1e23ffcca860 --- /dev/null +++ b/confirmationui/1.0/types.hal @@ -0,0 +1,104 @@ +/* + * Copyright (C) 2017 The Android Open Source Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package android.hardware.confirmationui@1.0; + +/** + * UI modification options. + */ +enum UIOption : uint32_t { + /** Accessibility: Requests color inverted style. */ + AccessibilityInverted = 0, + /** Accessibility: Requests magnified style. */ + AccessibilityMagnified = 1, +}; + +/** + * Codes returned by ConfirmationUI API calls. + */ +enum ResponseCode : uint32_t { + /** API call succeeded or the user gave approval (result callback). */ + OK = 0, + /** The user canceled the TUI (result callback). */ + Canceled = 1, + /** IConfirmationUI::abort() was called. (result callback). */ + Aborted = 2, + /** Cannot start another prompt. */ + OperationPending = 3, + /** IConfirmationUI::deliverSecureInputEvent call was ingored. */ + Ignored = 4, + /** An unexpected system error occured. */ + SystemError = 5, + /** Returned by an unimplemented API call. */ + Unimplemented = 6, + /** + * This is returned when an error is diagnosed that should have been + * caught by earlier input sanitization. Should never be seen in production. + */ + Unexpected = 7, + /** General UI error. */ + UIError = 0x10000, + UIErrorMissingGlyph, + /** + * The implementation must return this error code on promptUserConfirmation if the + * resulting formatted message does not fit into MessageSize::MAX bytes. It is + * advised that the implementation formats the message upon receiving this API call to + * be able to diagnose this syndrome. + */ + UIErrorMessageTooLong, + UIErrorMalformedUTF8Encoding, +}; + +/** + * This defines the maximum message size. This indirectly limits the size of the prompt text + * and the extra data that can be passed to the confirmation UI. The prompt text and extra data + * must fit in to this size including CBOR header information. + */ +enum MessageSize : uint32_t { MAX = 0x1800 }; + +/** + * The test key is 32byte word with all bytes set to TestKeyBits::BYTE. + */ +enum TestKeyBits: uint8_t { BYTE = 0xA5 }; + +/** + * Test mode commands. + * + * IConfirmationUI::deliverSecureInputEvent can be used to test certain code paths. + * To that end, the caller passes an auth token that has an HMAC keyed with the test key + * (see TestKeyBits in types.hal). Implementations first check the HMAC against test key. + * If the test key produces a matching HMAC, the implementation evaluates the challenge field + * of the auth token against the values defined in TestModeCommand. + * If the command indicates that a confirmation token is to be generated the test key MUST be used + * to generate this confirmation token. + * + * See command code for individual test command descriptions. + */ +enum TestModeCommands: uint64_t { + /** + * Simulates the user pressing the OK button on the UI. If no operation is pending + * ResponseCode::Ignored must be returned. A pending operation is finalized successfully + * see IConfirmationResultCallback::result, however, the test key (see TestKeyBits) MUST be + * used to generate the confirmation token. + */ + OK_EVENT = 0, + /** + * Simulates the user pressing the CANCEL button on the UI. If no operation is pending + * Result::Ignored must be returned. A pending operation is finalized as specified in + * IConfirmationResultCallback.hal. + */ + CANCEL_EVENT = 1, +}; diff --git a/confirmationui/1.0/vts/OWNERS b/confirmationui/1.0/vts/OWNERS new file mode 100644 index 0000000000000000000000000000000000000000..e7aa8b4ba2dd384e2ae008943ffb3386908fa3b9 --- /dev/null +++ b/confirmationui/1.0/vts/OWNERS @@ -0,0 +1,3 @@ +jdanis@google.com +swillden@google.com +yim@google.com diff --git a/confirmationui/1.0/vts/functional/Android.bp b/confirmationui/1.0/vts/functional/Android.bp new file mode 100644 index 0000000000000000000000000000000000000000..823e035afdadbd59f7a39ee000fddacee41496a9 --- /dev/null +++ b/confirmationui/1.0/vts/functional/Android.bp @@ -0,0 +1,30 @@ +// +// Copyright (C) 2018 The Android Open Source Project +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +// + +cc_test { + name: "VtsHalConfirmationUIV1_0TargetTest", + defaults: ["VtsHalTargetTestDefaults"], + srcs: [ + "VtsHalConfirmationUIV1_0TargetTest.cpp", + ], + static_libs: [ + "android.hardware.confirmationui@1.0", + "android.hardware.keymaster@4.0", + "libcrypto", + "libcn-cbor", + "android.hardware.confirmationui-support-lib", + ], +} diff --git a/confirmationui/1.0/vts/functional/VtsHalConfirmationUIV1_0TargetTest.cpp b/confirmationui/1.0/vts/functional/VtsHalConfirmationUIV1_0TargetTest.cpp new file mode 100644 index 0000000000000000000000000000000000000000..278d1f444016662234cc5dcb4135ffbe3dc8b6d8 --- /dev/null +++ b/confirmationui/1.0/vts/functional/VtsHalConfirmationUIV1_0TargetTest.cpp @@ -0,0 +1,423 @@ +/* + * Copyright (C) 2018 The Android Open Source Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#define LOG_TAG "ConfirmationIOHidlHalTest" +#include + +#include +#include +#include + +#include +#include +#include +#include + +#include +#include + +#include +#include + +#include + +using ::android::sp; + +using ::std::string; + +namespace android { +namespace hardware { + +namespace confirmationui { +namespace V1_0 { + +namespace test { +namespace { +const support::auth_token_key_t testKey(static_cast(TestKeyBits::BYTE)); + +class HMacImplementation { + public: + static support::NullOr hmac256( + const support::auth_token_key_t& key, + std::initializer_list buffers) { + HMAC_CTX hmacCtx; + HMAC_CTX_init(&hmacCtx); + if (!HMAC_Init_ex(&hmacCtx, key.data(), key.size(), EVP_sha256(), nullptr)) { + return {}; + } + for (auto& buffer : buffers) { + if (!HMAC_Update(&hmacCtx, buffer.data(), buffer.size())) { + return {}; + } + } + support::hmac_t result; + if (!HMAC_Final(&hmacCtx, result.data(), nullptr)) { + return {}; + } + return result; + } +}; + +using HMacer = support::HMac; + +template +hidl_vec testHMAC(const Data&... data) { + auto hmac = HMacer::hmac256(testKey, data...); + if (!hmac.isOk()) { + EXPECT_TRUE(false) << "Failed to compute test hmac. This is a self-test error."; + return {}; + } + hidl_vec result(hmac.value().size()); + copy(hmac.value().data(), hmac.value().data() + hmac.value().size(), result.data()); + return result; +} + +using ::android::hardware::keymaster::V4_0::HardwareAuthToken; +using ::android::hardware::keymaster::V4_0::HardwareAuthenticatorType; + +template +auto toBytes(const T& v) -> const uint8_t (&)[sizeof(T)] { + return *reinterpret_cast(&v); +} + +HardwareAuthToken makeTestToken(const TestModeCommands command, uint64_t timestamp = 0) { + HardwareAuthToken auth_token; + auth_token.challenge = static_cast(command); + auth_token.userId = 0; + auth_token.authenticatorId = 0; + auth_token.authenticatorType = HardwareAuthenticatorType::NONE; + auth_token.timestamp = timestamp; + + // Canonical form of auth-token v0 + // version (1 byte) + // challenge (8 bytes) + // user_id (8 bytes) + // authenticator_id (8 bytes) + // authenticator_type (4 bytes) + // timestamp (8 bytes) + // total 37 bytes + auth_token.mac = testHMAC("\0", + toBytes(auth_token.challenge), // + toBytes(auth_token.userId), // + toBytes(auth_token.authenticatorId), // + toBytes(support::hton(auth_token.authenticatorType)), // + toBytes(support::hton(auth_token.timestamp))); // + + return auth_token; +} + +#define DEBUG_CONFRIMATIONUI_UTILS_TEST + +#ifdef DEBUG_CONFRIMATIONUI_UTILS_TEST +std::ostream& hexdump(std::ostream& out, const uint8_t* data, size_t size) { + for (size_t i = 0; i < size; ++i) { + uint8_t byte = data[i]; + out << std::hex << std::setw(2) << std::setfill('0') << (unsigned)byte; + switch (i & 0xf) { + case 0xf: + out << "\n"; + break; + case 7: + out << " "; + break; + default: + out << " "; + break; + } + } + return out; +} +#endif + +constexpr char hex_value[256] = {0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, // + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, // + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, // + 0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 0, 0, 0, 0, 0, 0, // '0'..'9' + 0, 10, 11, 12, 13, 14, 15, 0, 0, 0, 0, 0, 0, 0, 0, 0, // 'A'..'F' + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, // + 0, 10, 11, 12, 13, 14, 15, 0, 0, 0, 0, 0, 0, 0, 0, 0, // 'a'..'f' + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, // + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, // + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, // + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, // + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, // + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, // + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, // + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, // + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0}; + +std::string hex2str(std::string a) { + std::string b; + size_t num = a.size() / 2; + b.resize(num); + for (size_t i = 0; i < num; i++) { + b[i] = (hex_value[a[i * 2] & 0xFF] << 4) + (hex_value[a[i * 2 + 1] & 0xFF]); + } + return b; +} + +} // namespace + +class ConfirmationArgs { + public: + ResponseCode error_; + hidl_vec formattedMessage_; + hidl_vec confirmationToken_; + bool verifyConfirmationToken() { + static constexpr char confirmationPrefix[] = "confirmation token"; + EXPECT_EQ(32U, confirmationToken_.size()); + return 32U == confirmationToken_.size() && + !memcmp(confirmationToken_.data(), + testHMAC(confirmationPrefix, formattedMessage_).data(), 32); + } +}; + +class ConfirmationTestCallback : public ::testing::VtsHalHidlTargetCallbackBase, + public IConfirmationResultCallback { + public: + Return result(ResponseCode error, const hidl_vec& formattedMessage, + const hidl_vec& confirmationToken) override { + ConfirmationArgs args; + args.error_ = error; + args.formattedMessage_ = formattedMessage; + args.confirmationToken_ = confirmationToken; + NotifyFromCallback(args); + return Void(); + } +}; + +class ConfirmationUIHidlEnvironment : public ::testing::VtsHalHidlTargetTestEnvBase { + public: + // get the test environment singleton + static ConfirmationUIHidlEnvironment* Instance() { + static ConfirmationUIHidlEnvironment* instance = new ConfirmationUIHidlEnvironment; + return instance; + } + + void registerTestServices() override { registerTestService(); } + + private: + ConfirmationUIHidlEnvironment(){}; + + GTEST_DISALLOW_COPY_AND_ASSIGN_(ConfirmationUIHidlEnvironment); +}; + +class ConfirmationUIHidlTest : public ::testing::VtsHalHidlTargetTestBase { + public: + void TearDown() override { confirmator().abort(); } + + static void SetUpTestCase() { + string service_name = + ConfirmationUIHidlEnvironment::Instance()->getServiceName(); + confirmator_ = IConfirmationUI::getService(service_name); + ASSERT_NE(nullptr, confirmator_.get()); + } + + static void TearDownTestCase() { confirmator_.clear(); } + + static IConfirmationUI& confirmator() { return *confirmator_; } + + private: + static sp confirmator_; +}; + +sp ConfirmationUIHidlTest::confirmator_; + +#define ASSERT_HAL_CALL(expected, call) \ + { \ + auto result = call; \ + ASSERT_TRUE(result.isOk()); \ + ASSERT_EQ(expected, static_cast(result)); \ + } + +struct CnCborDeleter { + void operator()(cn_cbor* ptr) { cn_cbor_free(ptr); } +}; + +typedef std::unique_ptr CnCborPtr; + +// Simulates the User taping Ok +TEST_F(ConfirmationUIHidlTest, UserOkTest) { + static constexpr char test_prompt[] = "Me first, gimme gimme!"; + static constexpr uint8_t test_extra[] = {0x1, 0x2, 0x3}; + sp conf_cb = new ConfirmationTestCallback; + hidl_string prompt_text(test_prompt); + hidl_vec extra(test_extra, test_extra + 3); + ASSERT_HAL_CALL(ResponseCode::OK, + confirmator().promptUserConfirmation(conf_cb, prompt_text, extra, "en", {})); + + ASSERT_HAL_CALL(ResponseCode::OK, confirmator().deliverSecureInputEvent( + makeTestToken(TestModeCommands::OK_EVENT))); + + auto result = conf_cb->WaitForCallback(); + ASSERT_EQ(ResponseCode::OK, result.args->error_); + + ASSERT_TRUE(result.args->verifyConfirmationToken()); + + cn_cbor_errback cn_cbor_error; + auto parsed_message = + CnCborPtr(cn_cbor_decode(result.args->formattedMessage_.data(), + result.args->formattedMessage_.size(), &cn_cbor_error)); + // is parsable CBOR + ASSERT_TRUE(parsed_message.get()); + // is a map + ASSERT_EQ(CN_CBOR_MAP, parsed_message->type); + + // the message must have exactly 2 key value pairs. + // cn_cbor holds 2* in the length field + ASSERT_EQ(4, parsed_message->length); + // map has key "prompt" + auto prompt = cn_cbor_mapget_string(parsed_message.get(), "prompt"); + ASSERT_TRUE(prompt); + ASSERT_EQ(CN_CBOR_TEXT, prompt->type); + ASSERT_EQ(22, prompt->length); + ASSERT_EQ(0, memcmp(test_prompt, prompt->v.str, 22)); + // map has key "extra" + auto extra_out = cn_cbor_mapget_string(parsed_message.get(), "extra"); + ASSERT_TRUE(extra_out); + ASSERT_EQ(CN_CBOR_BYTES, extra_out->type); + ASSERT_EQ(3, extra_out->length); + ASSERT_EQ(0, memcmp(test_extra, extra_out->v.bytes, 3)); +} + +// Initiates a confirmation prompt with a message that is too long +TEST_F(ConfirmationUIHidlTest, MessageTooLongTest) { + static constexpr uint8_t test_extra[static_cast(MessageSize::MAX)] = {}; + static constexpr char test_prompt[] = "D\'oh!"; + sp conf_cb = new ConfirmationTestCallback; + hidl_string prompt_text(test_prompt); + hidl_vec extra(test_extra, test_extra + sizeof(test_extra)); + ASSERT_HAL_CALL(ResponseCode::UIErrorMessageTooLong, + confirmator().promptUserConfirmation(conf_cb, prompt_text, extra, "en", {})); +} + +// If the message gets very long some HAL implementations might fail even before the message +// reaches the trusted app implementation. But the HAL must still diagnose the correct error. +TEST_F(ConfirmationUIHidlTest, MessageWayTooLongTest) { + static constexpr uint8_t test_extra[static_cast(MessageSize::MAX) * 10] = {}; + static constexpr char test_prompt[] = "D\'oh!"; + sp conf_cb = new ConfirmationTestCallback; + hidl_string prompt_text(test_prompt); + hidl_vec extra(test_extra, test_extra + sizeof(test_extra)); + ASSERT_HAL_CALL(ResponseCode::UIErrorMessageTooLong, + confirmator().promptUserConfirmation(conf_cb, prompt_text, extra, "en", {})); +} + +// Simulates the User tapping the Cancel +TEST_F(ConfirmationUIHidlTest, UserCancelTest) { + static constexpr char test_prompt[] = "Me first, gimme gimme!"; + static constexpr uint8_t test_extra[] = {0x1, 0x2, 0x3}; + sp conf_cb = new ConfirmationTestCallback; + hidl_string prompt_text(test_prompt); + hidl_vec extra(test_extra, test_extra + 3); + ASSERT_HAL_CALL(ResponseCode::OK, + confirmator().promptUserConfirmation(conf_cb, prompt_text, extra, "en", {})); + + ASSERT_HAL_CALL(ResponseCode::OK, confirmator().deliverSecureInputEvent( + makeTestToken(TestModeCommands::CANCEL_EVENT))); + + auto result = conf_cb->WaitForCallback(); + ASSERT_EQ(ResponseCode::Canceled, result.args->error_); + + ASSERT_EQ(0U, result.args->confirmationToken_.size()); + ASSERT_EQ(0U, result.args->formattedMessage_.size()); +} + +// Simulates the framework candelling an ongoing prompt +TEST_F(ConfirmationUIHidlTest, AbortTest) { + static constexpr char test_prompt[] = "Me first, gimme gimme!"; + static constexpr uint8_t test_extra[] = {0x1, 0x2, 0x3}; + sp conf_cb = new ConfirmationTestCallback; + hidl_string prompt_text(test_prompt); + hidl_vec extra(test_extra, test_extra + 3); + ASSERT_HAL_CALL(ResponseCode::OK, + confirmator().promptUserConfirmation(conf_cb, prompt_text, extra, "en", {})); + + confirmator().abort(); + + auto result = conf_cb->WaitForCallback(); + ASSERT_EQ(ResponseCode::Aborted, result.args->error_); + ASSERT_EQ(0U, result.args->confirmationToken_.size()); + ASSERT_EQ(0U, result.args->formattedMessage_.size()); +} + +// Passing malformed UTF-8 to the confirmation UI +// This test passes a string that ends in the middle of a multibyte character +TEST_F(ConfirmationUIHidlTest, MalformedUTF8Test1) { + static constexpr char test_prompt[] = {char(0xc0), 0}; + static constexpr uint8_t test_extra[] = {0x1, 0x2, 0x3}; + sp conf_cb = new ConfirmationTestCallback; + hidl_string prompt_text(test_prompt); + hidl_vec extra(test_extra, test_extra + 3); + ASSERT_HAL_CALL(ResponseCode::UIErrorMalformedUTF8Encoding, + confirmator().promptUserConfirmation(conf_cb, prompt_text, extra, "en", {})); +} + +// Passing malformed UTF-8 to the confirmation UI +// This test passes a string with a 5-byte character. +TEST_F(ConfirmationUIHidlTest, MalformedUTF8Test2) { + static constexpr char test_prompt[] = {char(0xf8), char(0x82), char(0x82), + char(0x82), char(0x82), 0}; + static constexpr uint8_t test_extra[] = {0x1, 0x2, 0x3}; + sp conf_cb = new ConfirmationTestCallback; + hidl_string prompt_text(test_prompt); + hidl_vec extra(test_extra, test_extra + 3); + ASSERT_HAL_CALL(ResponseCode::UIErrorMalformedUTF8Encoding, + confirmator().promptUserConfirmation(conf_cb, prompt_text, extra, "en", {})); +} + +// Passing malformed UTF-8 to the confirmation UI +// This test passes a string with a 2-byte character followed by a stray non UTF-8 character. +TEST_F(ConfirmationUIHidlTest, MalformedUTF8Test3) { + static constexpr char test_prompt[] = {char(0xc0), char(0x82), char(0x83), 0}; + static constexpr uint8_t test_extra[] = {0x1, 0x2, 0x3}; + sp conf_cb = new ConfirmationTestCallback; + hidl_string prompt_text(test_prompt); + hidl_vec extra(test_extra, test_extra + 3); + ASSERT_HAL_CALL(ResponseCode::UIErrorMalformedUTF8Encoding, + confirmator().promptUserConfirmation(conf_cb, prompt_text, extra, "en", {})); +} + +// Test the implementation of HMAC SHA 256 against a golden blob. +TEST(ConfirmationUITestSelfTest, HMAC256SelfTest) { + const char key_str[32] = "keykeykeykeykeykeykeykeykeykeyk"; + const uint8_t(&key)[32] = *reinterpret_cast(key_str); + auto expected = hex2str("2377fbcaa7fb3f6c20cfa1d9ebc60e9922cf58c909e25e300f3cb57f7805c886"); + auto result = HMacer::hmac256(key, "value1", "value2", "value3"); + +#ifdef DEBUG_CONFRIMATIONUI_UTILS_TEST + hexdump(std::cout, reinterpret_cast(expected.data()), 32) << std::endl; + hexdump(std::cout, result.value().data(), 32) << std::endl; +#endif + + support::ByteBufferProxy expected_bytes(expected); + ASSERT_TRUE(result.isOk()); + ASSERT_EQ(expected, result.value()); +} + +} // namespace test +} // namespace V1_0 +} // namespace confirmationui +} // namespace hardware +} // namespace android + +int main(int argc, char** argv) { + ::testing::InitGoogleTest(&argc, argv); + std::vector positional_args; + int status = RUN_ALL_TESTS(); + ALOGI("Test result = %d", status); + return status; +} diff --git a/confirmationui/support/Android.bp b/confirmationui/support/Android.bp new file mode 100644 index 0000000000000000000000000000000000000000..62156b34198dab15ffd4cbc7f1e11e9fa7aa46ec --- /dev/null +++ b/confirmationui/support/Android.bp @@ -0,0 +1,51 @@ +// +// Copyright (C) 2017 The Android Open Source Project +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +// + +cc_library { + name: "android.hardware.confirmationui-support-lib", + vendor_available: true, + host_supported: true, + vndk: { + enabled: true, + }, + srcs: [ + "src/cbor.cpp", + "src/confirmationui_utils.cpp", + ], + export_include_dirs: [ + "include", + ] +} + +cc_test { + name: "android.hardware.confirmationui-support-lib-tests", + srcs: [ + "test/gtest_main.cpp", + "test/android_cbor_test.cpp", + "test/msg_formatting_test.cpp", + ], + static_libs: [ + "libgtest", + "android.hardware.confirmationui-support-lib", + ], + shared_libs: [ + "android.hardware.confirmationui@1.0", + "android.hardware.keymaster@4.0", + "libhidlbase", + ], + clang: true, + cflags: [ "-O0" ], +} diff --git a/confirmationui/support/OWNERS b/confirmationui/support/OWNERS new file mode 100644 index 0000000000000000000000000000000000000000..335660da3be9ef092d3999ffdf1a5a73920a5d3d --- /dev/null +++ b/confirmationui/support/OWNERS @@ -0,0 +1,2 @@ +jdanis@google.com +swillden@google.com diff --git a/confirmationui/support/include/android/hardware/confirmationui/1.0/generic/GenericOperation.h b/confirmationui/support/include/android/hardware/confirmationui/1.0/generic/GenericOperation.h new file mode 100644 index 0000000000000000000000000000000000000000..b1c322ce533b87f03f8dc18471b6386984b57bd0 --- /dev/null +++ b/confirmationui/support/include/android/hardware/confirmationui/1.0/generic/GenericOperation.h @@ -0,0 +1,201 @@ +/* +** +** Copyright 2017, The Android Open Source Project +** +** Licensed under the Apache License, Version 2.0 (the "License"); +** you may not use this file except in compliance with the License. +** You may obtain a copy of the License at +** +** http://www.apache.org/licenses/LICENSE-2.0 +** +** Unless required by applicable law or agreed to in writing, software +** distributed under the License is distributed on an "AS IS" BASIS, +** WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +** See the License for the specific language governing permissions and +** limitations under the License. +*/ + +#ifndef CONFIRMATIONUI_1_0_DEFAULT_GENERICOPERATION_H_ +#define CONFIRMATIONUI_1_0_DEFAULT_GENERICOPERATION_H_ + +#include +#include +#include +#include + +namespace android { +namespace hardware { +namespace confirmationui { +namespace V1_0 { +namespace generic { + +namespace { +using namespace ::android::hardware::confirmationui::support; +using ::android::hardware::keymaster::V4_0::HardwareAuthToken; +using ::android::hardware::keymaster::V4_0::HardwareAuthenticatorType; + +inline bool hasOption(UIOption option, const hidl_vec& uiOptions) { + for (auto& o : uiOptions) { + if (o == option) return true; + } + return false; +} + +template +class Operation { + using HMacer = support::HMac; + + public: + Operation() : error_(ResponseCode::Ignored), formattedMessageLength_(0) {} + + ResponseCode init(const Callback& resultCB, const hidl_string& promptText, + const hidl_vec& extraData, const hidl_string& locale, + const hidl_vec& uiOptions) { + (void)locale; + (void)uiOptions; + resultCB_ = resultCB; + if (error_ != ResponseCode::Ignored) return ResponseCode::OperationPending; + + // We need to access the prompt text multiple times. Once for formatting the CBOR message + // and again for rendering the dialog. It is vital that the prompt does not change + // in the meantime. As of this point the prompt text is in a shared buffer and therefore + // susceptible to TOCTOU attacks. Note that promptText.size() resides on the stack and + // is safe to access multiple times. So now we copy the prompt string into the + // scratchpad promptStringBuffer_ from where we can format the CBOR message and then + // pass it to the renderer. + if (promptText.size() >= uint32_t(MessageSize::MAX)) + return ResponseCode::UIErrorMessageTooLong; + auto pos = std::copy(promptText.c_str(), promptText.c_str() + promptText.size(), + promptStringBuffer_); + *pos = 0; // null-terminate the prompt for the renderer. + + // Note the extra data is accessed only once for formating the CBOR message. So it is safe + // to read it from the shared buffer directly. Anyway we don't trust or interpret the + // extra data in any way so all we do is take a snapshot and we don't care if it is + // modified concurrently. + auto state = write(WriteState(formattedMessageBuffer_), + map(pair(text("prompt"), text(promptStringBuffer_, promptText.size())), + pair(text("extra"), bytes(extraData)))); + switch (state.error_) { + case Error::OK: + break; + case Error::OUT_OF_DATA: + return ResponseCode::UIErrorMessageTooLong; + case Error::MALFORMED_UTF8: + return ResponseCode::UIErrorMalformedUTF8Encoding; + case Error::MALFORMED: + default: + return ResponseCode::Unexpected; + } + formattedMessageLength_ = state.data_ - formattedMessageBuffer_; + + // on success record the start time + startTime_ = TimeStamper::now(); + if (!startTime_.isOk()) { + return ResponseCode::SystemError; + } + return ResponseCode::OK; + } + + void setPending() { error_ = ResponseCode::OK; } + + void setHmacKey(const auth_token_key_t& key) { hmacKey_ = key; } + NullOr hmacKey() const { return hmacKey_; } + + void abort() { + if (isPending()) { + resultCB_->result(ResponseCode::Aborted, {}, {}); + error_ = ResponseCode::Ignored; + } + } + + void userCancel() { + if (isPending()) error_ = ResponseCode::Canceled; + } + + void finalize(const auth_token_key_t& key) { + if (error_ == ResponseCode::Ignored) return; + resultCB_->result(error_, getMessage(), userConfirm(key)); + error_ = ResponseCode::Ignored; + resultCB_ = {}; + } + + bool isPending() const { return error_ != ResponseCode::Ignored; } + const hidl_string getPrompt() const { + hidl_string s; + s.setToExternal(promptStringBuffer_, strlen(promptStringBuffer_)); + return s; + } + + ResponseCode deliverSecureInputEvent(const HardwareAuthToken& secureInputToken) { + const auth_token_key_t testKey(static_cast(TestKeyBits::BYTE)); + + auto hmac = HMacer::hmac256(testKey, "\0", bytes_cast(secureInputToken.challenge), + bytes_cast(secureInputToken.userId), + bytes_cast(secureInputToken.authenticatorId), + bytes_cast(hton(secureInputToken.authenticatorType)), + bytes_cast(hton(secureInputToken.timestamp))); + if (!hmac.isOk()) return ResponseCode::Unexpected; + if (hmac.value() == secureInputToken.mac) { + // okay so this is a test token + switch (static_cast(secureInputToken.challenge)) { + case TestModeCommands::OK_EVENT: { + if (isPending()) { + finalize(testKey); + return ResponseCode::OK; + } else { + return ResponseCode::Ignored; + } + } + case TestModeCommands::CANCEL_EVENT: { + bool ignored = !isPending(); + userCancel(); + finalize(testKey); + return ignored ? ResponseCode::Ignored : ResponseCode::OK; + } + default: + return ResponseCode::Ignored; + } + } + return ResponseCode::Ignored; + } + + private: + bool acceptAuthToken(const HardwareAuthToken&) { return false; } + hidl_vec getMessage() { + hidl_vec result; + if (error_ != ResponseCode::OK) return {}; + result.setToExternal(formattedMessageBuffer_, formattedMessageLength_); + return result; + } + hidl_vec userConfirm(const auth_token_key_t& key) { + if (error_ != ResponseCode::OK) return {}; + confirmationTokenScratchpad_ = HMacer::hmac256(key, "confirmation token", getMessage()); + if (!confirmationTokenScratchpad_.isOk()) { + error_ = ResponseCode::Unexpected; + return {}; + } + hidl_vec result; + result.setToExternal(confirmationTokenScratchpad_->data(), + confirmationTokenScratchpad_->size()); + return result; + } + + ResponseCode error_ = ResponseCode::Ignored; + uint8_t formattedMessageBuffer_[uint32_t(MessageSize::MAX)]; + char promptStringBuffer_[uint32_t(MessageSize::MAX)]; + size_t formattedMessageLength_ = 0; + NullOr confirmationTokenScratchpad_; + Callback resultCB_; + typename TimeStamper::TimeStamp startTime_; + NullOr hmacKey_; +}; + +} // namespace +} // namespace generic +} // namespace V1_0 +} // namespace confirmationui +} // namespace hardware +} // namespace android + +#endif // CONFIRMATIONUI_1_0_DEFAULT_GENERICOPERATION_H_ diff --git a/confirmationui/support/include/android/hardware/confirmationui/support/cbor.h b/confirmationui/support/include/android/hardware/confirmationui/support/cbor.h new file mode 100644 index 0000000000000000000000000000000000000000..f5814d4f0547590ea6e0dff1e609305bd28d08c2 --- /dev/null +++ b/confirmationui/support/include/android/hardware/confirmationui/support/cbor.h @@ -0,0 +1,335 @@ +/* +** +** Copyright 2017, The Android Open Source Project +** +** Licensed under the Apache License, Version 2.0 (the "License"); +** you may not use this file except in compliance with the License. +** You may obtain a copy of the License at +** +** http://www.apache.org/licenses/LICENSE-2.0 +** +** Unless required by applicable law or agreed to in writing, software +** distributed under the License is distributed on an "AS IS" BASIS, +** WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +** See the License for the specific language governing permissions and +** limitations under the License. +*/ + +#ifndef CONFIRMATIONUI_1_0_DEFAULT_CBOR_H_ +#define CONFIRMATIONUI_1_0_DEFAULT_CBOR_H_ + +#include +#include +#include + +namespace android { +namespace hardware { +namespace confirmationui { +namespace support { + +template +Out copy(In begin, In end, Out out) { + while (begin != end) { + *out++ = *begin++; + } + return out; +} + +enum class Type : uint8_t { + NUMBER = 0, + NEGATIVE = 1, + BYTE_STRING = 2, + TEXT_STRING = 3, + ARRAY = 4, + MAP = 5, + TAG = 6, + FLOAT = 7, +}; + +enum class Error : uint32_t { + OK = 0, + OUT_OF_DATA = 1, + MALFORMED = 2, + MALFORMED_UTF8 = 3, +}; + +template +struct MapElement { + const Key& key_; + const Value& value_; + MapElement(const Key& key, const Value& value) : key_(key), value_(value) {} +}; + +template +struct Array; + +template +struct Array { + const Head& head_; + Array tail_; + Array(const Head& head, const Tail&... tail) : head_(head), tail_(tail...) {} + constexpr size_t size() const { return sizeof...(Tail) + 1; }; +}; + +template <> +struct Array<> {}; + +struct TextStr {}; +struct ByteStr {}; + +template +struct StringBuffer { + const T* data_; + size_t size_; + StringBuffer(const T* data, size_t size) : data_(data), size_(size) { + static_assert(sizeof(T) == 1, "elements too large"); + } + const T* data() const { return data_; } + size_t size() const { return size_; } +}; + +/** + * Takes a char array turns it into a StringBuffer of TextStr type. The length of the resulting + * StringBuffer is size - 1, effectively stripping the 0 character from the region being considered. + * If the terminating 0 shall not be stripped use text_keep_last. + */ +template +StringBuffer text(const char (&str)[size]) { + if (size > 0) return StringBuffer(str, size - 1); + return StringBuffer(str, size); +} + +/** + * As opposed to text(const char (&str)[size] this function does not strips the last character. + */ +template +StringBuffer text_keep_last(const char (&str)[size]) { + return StringBuffer(str, size); +} + +template +auto getData(const T& v) -> decltype(v.data()) { + return v.data(); +} + +template +auto getData(const T& v) -> decltype(v.c_str()) { + return v.c_str(); +} + +template +auto text(const T& str) -> StringBuffer, TextStr> { + return StringBuffer, TextStr>(getData(str), str.size()); +} + +inline StringBuffer text(const char* str, size_t size) { + return StringBuffer(str, size); +} + +template +StringBuffer bytes(const T (&str)[size]) { + return StringBuffer(str, size); +} + +template +StringBuffer bytes(const T* str, size_t size) { + return StringBuffer(str, size); +} + +template +auto bytes(const T& str) -> StringBuffer, ByteStr> { + return StringBuffer, ByteStr>(getData(str), str.size()); +} + +template +struct Map; + +template +struct Map, Tail...> { + const MapElement& head_; + Map tail_; + Map(const MapElement& head, const Tail&... tail) + : head_(head), tail_(tail...) {} + constexpr size_t size() const { return sizeof...(Tail) + 1; }; +}; + +template <> +struct Map<> {}; + +template +Map...> map(const MapElement&... elements) { + return Map...>(elements...); +} + +template +Array arr(const Elements&... elements) { + return Array(elements...); +} + +template +MapElement pair(const Key& k, const Value& v) { + return MapElement(k, v); +} + +template +struct getUnsignedType; + +template <> +struct getUnsignedType { + typedef uint8_t type; +}; +template <> +struct getUnsignedType { + typedef uint16_t type; +}; +template <> +struct getUnsignedType { + typedef uint32_t type; +}; +template <> +struct getUnsignedType { + typedef uint64_t type; +}; + +template +using Unsigned = typename getUnsignedType::type; + +class WriteState { + public: + WriteState() : data_(nullptr), size_(0), error_(Error::OK) {} + WriteState(uint8_t* buffer, size_t size) : data_(buffer), size_(size), error_(Error::OK) {} + WriteState(uint8_t* buffer, size_t size, Error error) + : data_(buffer), size_(size), error_(error) {} + template + WriteState(uint8_t (&buffer)[size]) : data_(buffer), size_(size), error_(Error::OK) {} + + WriteState& operator++() { + if (size_) { + ++data_; + --size_; + } else { + error_ = Error::OUT_OF_DATA; + } + return *this; + } + WriteState& operator+=(size_t offset) { + if (offset > size_) { + error_ = Error::OUT_OF_DATA; + } else { + data_ += offset; + size_ -= offset; + } + return *this; + } + operator bool() const { return error_ == Error::OK; } + + uint8_t* data_; + size_t size_; + Error error_; +}; + +WriteState writeHeader(WriteState wState, Type type, const uint64_t value); +bool checkUTF8Copy(const char* begin, const char* const end, uint8_t* out); + +template +WriteState writeNumber(WriteState wState, const T& v) { + if (!wState) return wState; + if (v >= 0) { + return writeHeader(wState, Type::NUMBER, v); + } else { + return writeHeader(wState, Type::NEGATIVE, UINT64_C(-1) - v); + } +} + +inline WriteState write(const WriteState& wState, const uint8_t& v) { + return writeNumber(wState, v); +} +inline WriteState write(const WriteState& wState, const int8_t& v) { + return writeNumber(wState, v); +} +inline WriteState write(const WriteState& wState, const uint16_t& v) { + return writeNumber(wState, v); +} +inline WriteState write(const WriteState& wState, const int16_t& v) { + return writeNumber(wState, v); +} +inline WriteState write(const WriteState& wState, const uint32_t& v) { + return writeNumber(wState, v); +} +inline WriteState write(const WriteState& wState, const int32_t& v) { + return writeNumber(wState, v); +} +inline WriteState write(const WriteState& wState, const uint64_t& v) { + return writeNumber(wState, v); +} +inline WriteState write(const WriteState& wState, const int64_t& v) { + return writeNumber(wState, v); +} + +template +WriteState write(WriteState wState, const StringBuffer& v) { + wState = writeHeader(wState, Type::TEXT_STRING, v.size()); + uint8_t* buffer = wState.data_; + wState += v.size(); + if (!wState) return wState; + if (!checkUTF8Copy(v.data(), v.data() + v.size(), buffer)) { + wState.error_ = Error::MALFORMED_UTF8; + } + return wState; +} + +template +WriteState write(WriteState wState, const StringBuffer& v) { + wState = writeHeader(wState, Type::BYTE_STRING, v.size()); + uint8_t* buffer = wState.data_; + wState += v.size(); + if (!wState) return wState; + static_assert(sizeof(*v.data()) == 1, "elements too large"); + copy(v.data(), v.data() + v.size(), buffer); + return wState; +} + +template