diff --git a/apex/Android.bp b/apex/Android.bp
index b9b9bde2e2784b45e6009dfa9f218a599b374acf..570ca0199242ea36a45f586f834146b808f6d601 100644
--- a/apex/Android.bp
+++ b/apex/Android.bp
@@ -56,6 +56,7 @@ apex_defaults {
prebuilts: [
"code_coverage.policy",
"com.android.media-mediatranscoding.rc",
+ "com.android.media-mediatranscoding.32rc",
"crash_dump.policy",
"mediaextractor.policy",
"media-linker-config",
@@ -177,6 +178,7 @@ apex_defaults {
],
prebuilts: [
"com.android.media.swcodec-mediaswcodec.rc",
+ "com.android.media.swcodec-mediaswcodec.32rc",
"com.android.media.swcodec-ld.config.txt",
"mediaswcodec.policy",
"code_coverage.policy",
@@ -201,17 +203,34 @@ apex_defaults {
compressible: true,
}
+// install as mediatranscoding.* and mediaswcodec.* instead of init.*
+// so we are ready for day we have more than 1 *rc file within the apex.
+
prebuilt_etc {
name: "com.android.media-mediatranscoding.rc",
src: "mediatranscoding.rc",
- filename: "init.rc",
+ filename: "mediatranscoding.rc",
+ installable: false,
+}
+
+prebuilt_etc {
+ name: "com.android.media-mediatranscoding.32rc",
+ src: "mediatranscoding.32rc",
+ filename: "mediatranscoding.32rc",
installable: false,
}
prebuilt_etc {
name: "com.android.media.swcodec-mediaswcodec.rc",
src: "mediaswcodec.rc",
- filename: "init.rc",
+ filename: "mediaswcodec.rc",
+ installable: false,
+}
+
+prebuilt_etc {
+ name: "com.android.media.swcodec-mediaswcodec.32rc",
+ src: "mediaswcodec.32rc",
+ filename: "mediaswcodec.32rc",
installable: false,
}
diff --git a/apex/mediaswcodec.32rc b/apex/mediaswcodec.32rc
index 79aef36d046291be8b935e19009be7fec036d372..f40d1728daf49e3a1963503826c1385a49654c0a 100644
--- a/apex/mediaswcodec.32rc
+++ b/apex/mediaswcodec.32rc
@@ -1,3 +1,5 @@
+## for SDK releases >= 32
+##
service media.swcodec /apex/com.android.media.swcodec/bin/mediaswcodec
class main
user mediacodec
diff --git a/apex/mediaswcodec.rc b/apex/mediaswcodec.rc
index 0c9b8c8cfe219f75f0b47a61d9c5e79e3e6e5a34..46799c771fe179796c0a9028bfa0676d2bc98645 100644
--- a/apex/mediaswcodec.rc
+++ b/apex/mediaswcodec.rc
@@ -1,3 +1,6 @@
+## for SDK releases 29..31
+## where writepid has not yet been replaced by task_profiles
+##
service media.swcodec /apex/com.android.media.swcodec/bin/mediaswcodec
class main
user mediacodec
diff --git a/apex/mediatranscoding.32rc b/apex/mediatranscoding.32rc
index 5169462414abc817acd1e559a04cd040a76c9118..edba9b9f810131acc7c35960dbee897f1daf24fb 100644
--- a/apex/mediatranscoding.32rc
+++ b/apex/mediatranscoding.32rc
@@ -1,3 +1,6 @@
+## for SDK releases >= 32
+##
+#
# media.transcoding service is defined on com.android.media apex which goes back
# to API29, but we only want it started on API31+ devices. So we declare it as
# "disabled" and start it explicitly on boot.
diff --git a/apex/mediatranscoding.rc b/apex/mediatranscoding.rc
index ae9f8baac605fe6d9b15b6d1867fe43bfdbf1a60..6e453be364aa0f0aa685038a18cda027889e0426 100644
--- a/apex/mediatranscoding.rc
+++ b/apex/mediatranscoding.rc
@@ -1,3 +1,7 @@
+## for SDK releases 29..31
+## where writepid has not yet been replaced by task_profiles
+##
+#
# media.transcoding service is defined on com.android.media apex which goes back
# to API29, but we only want it started on API31+ devices. So we declare it as
# "disabled" and start it explicitly on boot.
diff --git a/camera/Android.bp b/camera/Android.bp
index 4ed3269665cd52e2f77e983ceb8602a6c2467f89..e44202bfbf192e39f0de6cf840742e2355d64790 100644
--- a/camera/Android.bp
+++ b/camera/Android.bp
@@ -113,6 +113,30 @@ cc_library_shared {
}
+cc_library_host_static {
+ name: "libcamera_client_host",
+
+ srcs: [
+ "CameraMetadata.cpp",
+ "VendorTagDescriptor.cpp",
+ ],
+
+ shared_libs: [
+ "libbase",
+ "libcamera_metadata",
+ ],
+
+ include_dirs: [
+ "system/media/private/camera/include",
+ "frameworks/native/include/media/openmax",
+ ],
+
+ export_include_dirs: [
+ "include",
+ "include/camera"
+ ],
+}
+
// AIDL interface between camera clients and the camera service.
filegroup {
name: "libcamera_client_aidl",
diff --git a/camera/CameraBase.cpp b/camera/CameraBase.cpp
index 03439fdf72543f99529d3a6c3a0055214a861c53..24c9108224aed8a42eac685d79da7d0ad3aa0bc8 100644
--- a/camera/CameraBase.cpp
+++ b/camera/CameraBase.cpp
@@ -68,6 +68,9 @@ status_t CameraStatus::writeToParcel(android::Parcel* parcel) const {
unavailablePhysicalIds16.push_back(String16(id8));
}
res = parcel->writeString16Vector(unavailablePhysicalIds16);
+ if (res != OK) return res;
+
+ res = parcel->writeString16(String16(clientPackage));
return res;
}
@@ -86,6 +89,12 @@ status_t CameraStatus::readFromParcel(const android::Parcel* parcel) {
for (auto& id16 : unavailablePhysicalIds16) {
unavailablePhysicalIds.push_back(String8(id16));
}
+
+ String16 tempClientPackage;
+ res = parcel->readString16(&tempClientPackage);
+ if (res != OK) return res;
+ clientPackage = String8(tempClientPackage);
+
return res;
}
diff --git a/camera/CameraSessionStats.cpp b/camera/CameraSessionStats.cpp
index 28e037ff946a4f84290582969d7e5f7142289e50..d1aa36a5246b153a71e5486d5ea59f3c66e1bf09 100644
--- a/camera/CameraSessionStats.cpp
+++ b/camera/CameraSessionStats.cpp
@@ -52,6 +52,12 @@ status_t CameraStreamStats::readFromParcel(const android::Parcel* parcel) {
return err;
}
+ float maxPreviewFps = 0;
+ if ((err = parcel->readFloat(&maxPreviewFps)) != OK) {
+ ALOGE("%s: Failed to read maxPreviewFps from parcel", __FUNCTION__);
+ return err;
+ }
+
int dataSpace = 0;
if ((err = parcel->readInt32(&dataSpace)) != OK) {
ALOGE("%s: Failed to read dataSpace from parcel", __FUNCTION__);
@@ -112,9 +118,22 @@ status_t CameraStreamStats::readFromParcel(const android::Parcel* parcel) {
return err;
}
+ int64_t dynamicRangeProfile = ANDROID_REQUEST_AVAILABLE_DYNAMIC_RANGE_PROFILES_MAP_STANDARD;
+ if ((err = parcel->readInt64(&dynamicRangeProfile)) != OK) {
+ ALOGE("%s: Failed to read dynamic range profile type from parcel", __FUNCTION__);
+ return err;
+ }
+
+ int64_t streamUseCase = ANDROID_SCALER_AVAILABLE_STREAM_USE_CASES_DEFAULT;
+ if ((err = parcel->readInt64(&streamUseCase)) != OK) {
+ ALOGE("%s: Failed to read stream use case from parcel", __FUNCTION__);
+ return err;
+ }
+
mWidth = width;
mHeight = height;
mFormat = format;
+ mMaxPreviewFps = maxPreviewFps;
mDataSpace = dataSpace;
mUsage = usage;
mRequestCount = requestCount;
@@ -125,6 +144,8 @@ status_t CameraStreamStats::readFromParcel(const android::Parcel* parcel) {
mHistogramType = histogramType;
mHistogramBins = std::move(histogramBins);
mHistogramCounts = std::move(histogramCounts);
+ mDynamicRangeProfile = dynamicRangeProfile;
+ mStreamUseCase = streamUseCase;
return OK;
}
@@ -152,6 +173,11 @@ status_t CameraStreamStats::writeToParcel(android::Parcel* parcel) const {
return err;
}
+ if ((err = parcel->writeFloat(mMaxPreviewFps)) != OK) {
+ ALOGE("%s: Failed to write stream maxPreviewFps!", __FUNCTION__);
+ return err;
+ }
+
if ((err = parcel->writeInt32(mDataSpace)) != OK) {
ALOGE("%s: Failed to write stream dataSpace!", __FUNCTION__);
return err;
@@ -202,6 +228,16 @@ status_t CameraStreamStats::writeToParcel(android::Parcel* parcel) const {
return err;
}
+ if ((err = parcel->writeInt64(mDynamicRangeProfile)) != OK) {
+ ALOGE("%s: Failed to write dynamic range profile type", __FUNCTION__);
+ return err;
+ }
+
+ if ((err = parcel->writeInt64(mStreamUseCase)) != OK) {
+ ALOGE("%s: Failed to write stream use case!", __FUNCTION__);
+ return err;
+ }
+
return OK;
}
@@ -223,11 +259,13 @@ CameraSessionStats::CameraSessionStats() :
mApiLevel(0),
mIsNdk(false),
mLatencyMs(-1),
+ mMaxPreviewFps(0),
mSessionType(0),
mInternalReconfigure(0),
mRequestCount(0),
mResultErrorCount(0),
- mDeviceError(false) {}
+ mDeviceError(false),
+ mVideoStabilizationMode(-1) {}
CameraSessionStats::CameraSessionStats(const String16& cameraId,
int facing, int newCameraState, const String16& clientName,
@@ -239,11 +277,13 @@ CameraSessionStats::CameraSessionStats(const String16& cameraId,
mApiLevel(apiLevel),
mIsNdk(isNdk),
mLatencyMs(latencyMs),
+ mMaxPreviewFps(0),
mSessionType(0),
mInternalReconfigure(0),
mRequestCount(0),
mResultErrorCount(0),
- mDeviceError(0) {}
+ mDeviceError(0),
+ mVideoStabilizationMode(-1) {}
status_t CameraSessionStats::readFromParcel(const android::Parcel* parcel) {
if (parcel == NULL) {
@@ -295,6 +335,12 @@ status_t CameraSessionStats::readFromParcel(const android::Parcel* parcel) {
return err;
}
+ float maxPreviewFps;
+ if ((err = parcel->readFloat(&maxPreviewFps)) != OK) {
+ ALOGE("%s: Failed to read maxPreviewFps from parcel", __FUNCTION__);
+ return err;
+ }
+
int32_t sessionType;
if ((err = parcel->readInt32(&sessionType)) != OK) {
ALOGE("%s: Failed to read session type from parcel", __FUNCTION__);
@@ -331,6 +377,18 @@ status_t CameraSessionStats::readFromParcel(const android::Parcel* parcel) {
return err;
}
+ String16 userTag;
+ if ((err = parcel->readString16(&userTag)) != OK) {
+ ALOGE("%s: Failed to read user tag!", __FUNCTION__);
+ return BAD_VALUE;
+ }
+
+ int32_t videoStabilizationMode;
+ if ((err = parcel->readInt32(&videoStabilizationMode)) != OK) {
+ ALOGE("%s: Failed to read video stabilization mode from parcel", __FUNCTION__);
+ return err;
+ }
+
mCameraId = id;
mFacing = facing;
mNewCameraState = newCameraState;
@@ -338,12 +396,15 @@ status_t CameraSessionStats::readFromParcel(const android::Parcel* parcel) {
mApiLevel = apiLevel;
mIsNdk = isNdk;
mLatencyMs = latencyMs;
+ mMaxPreviewFps = maxPreviewFps;
mSessionType = sessionType;
mInternalReconfigure = internalReconfigure;
mRequestCount = requestCount;
mResultErrorCount = resultErrorCount;
mDeviceError = deviceError;
mStreamStats = std::move(streamStats);
+ mUserTag = userTag;
+ mVideoStabilizationMode = videoStabilizationMode;
return OK;
}
@@ -391,6 +452,11 @@ status_t CameraSessionStats::writeToParcel(android::Parcel* parcel) const {
return err;
}
+ if ((err = parcel->writeFloat(mMaxPreviewFps)) != OK) {
+ ALOGE("%s: Failed to write maxPreviewFps!", __FUNCTION__);
+ return err;
+ }
+
if ((err = parcel->writeInt32(mSessionType)) != OK) {
ALOGE("%s: Failed to write session type!", __FUNCTION__);
return err;
@@ -421,6 +487,15 @@ status_t CameraSessionStats::writeToParcel(android::Parcel* parcel) const {
return err;
}
+ if ((err = parcel->writeString16(mUserTag)) != OK) {
+ ALOGE("%s: Failed to write user tag!", __FUNCTION__);
+ return err;
+ }
+
+ if ((err = parcel->writeInt32(mVideoStabilizationMode)) != OK) {
+ ALOGE("%s: Failed to write video stabilization mode!", __FUNCTION__);
+ return err;
+ }
return OK;
}
diff --git a/camera/CameraUtils.cpp b/camera/CameraUtils.cpp
index af3c49276265d92036a13032dd34700d70417b2d..34737806eb34eaa8cf39f72ba3bbd4a014119141 100644
--- a/camera/CameraUtils.cpp
+++ b/camera/CameraUtils.cpp
@@ -18,6 +18,7 @@
//#define LOG_NDEBUG 0
#include
+#include
#include
#include
@@ -31,7 +32,7 @@ namespace android {
const char *kCameraServiceDisabledProperty = "config.disable_cameraservice";
status_t CameraUtils::getRotationTransform(const CameraMetadata& staticInfo,
- /*out*/int32_t* transform) {
+ int mirrorMode, /*out*/int32_t* transform) {
ALOGV("%s", __FUNCTION__);
if (transform == NULL) {
@@ -55,9 +56,18 @@ status_t CameraUtils::getRotationTransform(const CameraMetadata& staticInfo,
int32_t& flags = *transform;
- bool mirror = (entryFacing.data.u8[0] == ANDROID_LENS_FACING_FRONT);
+ int32_t mirror = 0;
+ if (mirrorMode == OutputConfiguration::MIRROR_MODE_AUTO &&
+ entryFacing.data.u8[0] == ANDROID_LENS_FACING_FRONT) {
+ mirror = NATIVE_WINDOW_TRANSFORM_FLIP_H;
+ } else if (mirrorMode == OutputConfiguration::MIRROR_MODE_H) {
+ mirror = NATIVE_WINDOW_TRANSFORM_FLIP_H;
+ } else if (mirrorMode == OutputConfiguration::MIRROR_MODE_V) {
+ mirror = NATIVE_WINDOW_TRANSFORM_FLIP_V;
+ }
+
int orientation = entry.data.i32[0];
- if (!mirror) {
+ if (mirror == 0) {
switch (orientation) {
case 0:
flags = 0;
@@ -77,25 +87,25 @@ status_t CameraUtils::getRotationTransform(const CameraMetadata& staticInfo,
return INVALID_OPERATION;
}
} else {
- // Front camera needs to be horizontally flipped for mirror-like behavior.
+ // - Front camera needs to be horizontally flipped for mirror-like behavior.
+ // - Application-specified mirroring needs to be applied.
// Note: Flips are applied before rotates; using XOR here as some of these flags are
// composed in terms of other flip/rotation flags, and are not bitwise-ORable.
switch (orientation) {
case 0:
- flags = NATIVE_WINDOW_TRANSFORM_FLIP_H;
+ flags = mirror;
break;
case 90:
- flags = NATIVE_WINDOW_TRANSFORM_FLIP_H ^
+ flags = mirror ^
NATIVE_WINDOW_TRANSFORM_ROT_270;
break;
case 180:
- flags = NATIVE_WINDOW_TRANSFORM_FLIP_H ^
+ flags = mirror ^
NATIVE_WINDOW_TRANSFORM_ROT_180;
break;
case 270:
- flags = NATIVE_WINDOW_TRANSFORM_FLIP_H ^
+ flags = mirror ^
NATIVE_WINDOW_TRANSFORM_ROT_90;
-
break;
default:
ALOGE("%s: Invalid HAL android.sensor.orientation value: %d",
diff --git a/camera/OWNERS b/camera/OWNERS
index 2a1d5237e7cfeeb1db6eff897832f854e562e67b..385c163e5af0056518c96c03ba1921076cb3ccbf 100644
--- a/camera/OWNERS
+++ b/camera/OWNERS
@@ -1,4 +1,3 @@
-
# Bug component: 41727
etalvala@google.com
arakesh@google.com
diff --git a/camera/aidl/android/hardware/ICameraService.aidl b/camera/aidl/android/hardware/ICameraService.aidl
index 78a77d4ea1e0bc38109223949306beec23f6ea78..1e748c7540006d804e73fee7e5966238341e535e 100644
--- a/camera/aidl/android/hardware/ICameraService.aidl
+++ b/camera/aidl/android/hardware/ICameraService.aidl
@@ -173,6 +173,13 @@ interface ICameraService
void setTorchMode(String cameraId, boolean enabled, IBinder clientBinder);
+ // Change the brightness level of the flash unit associated with cameraId to strengthLevel.
+ // If the torch is in OFF state and strengthLevel > 0 then the torch will also be turned ON.
+ void turnOnTorchWithStrengthLevel(String cameraId, int strengthLevel, IBinder clientBinder);
+
+ // Get the brightness level of the flash unit associated with cameraId.
+ int getTorchStrengthLevel(String cameraId);
+
/**
* Notify the camera service of a system event. Should only be called from system_server.
*
@@ -180,6 +187,8 @@ interface ICameraService
*/
const int EVENT_NONE = 0;
const int EVENT_USER_SWITCHED = 1; // The argument is the set of new foreground user IDs.
+ const int EVENT_USB_DEVICE_ATTACHED = 2; // The argument is the deviceId and vendorId
+ const int EVENT_USB_DEVICE_DETACHED = 3; // The argument is the deviceId and vendorId
oneway void notifySystemEvent(int eventId, in int[] args);
/**
diff --git a/camera/aidl/android/hardware/ICameraServiceListener.aidl b/camera/aidl/android/hardware/ICameraServiceListener.aidl
index c54813c8b28164ce5a458282eda0630148c592f5..5f17f5be5919a64b3b22261173096f86b38c64a2 100644
--- a/camera/aidl/android/hardware/ICameraServiceListener.aidl
+++ b/camera/aidl/android/hardware/ICameraServiceListener.aidl
@@ -83,6 +83,8 @@ interface ICameraServiceListener
oneway void onTorchStatusChanged(int status, String cameraId);
+ oneway void onTorchStrengthLevelChanged(String cameraId, int newTorchStrength);
+
/**
* Notify registered clients about camera access priority changes.
* Clients which were previously unable to open a certain camera device
diff --git a/camera/aidl/android/hardware/ICameraServiceProxy.aidl b/camera/aidl/android/hardware/ICameraServiceProxy.aidl
index f5d0120042c7edb892619b7a28bc6a45bb9f106e..88783fbdebe646aa97e3b7214acdc72b44a38da0 100644
--- a/camera/aidl/android/hardware/ICameraServiceProxy.aidl
+++ b/camera/aidl/android/hardware/ICameraServiceProxy.aidl
@@ -44,4 +44,9 @@ interface ICameraServiceProxy
* {@link android.hardware.camera2.CameraMetadata#SCALER_ROTATE_AND_CROP_270}).
*/
int getRotateAndCropOverride(String packageName, int lensFacing, int userId);
+
+ /**
+ * Checks if the camera has been disabled via device policy.
+ */
+ boolean isCameraDisabled();
}
diff --git a/camera/camera2/CaptureRequest.cpp b/camera/camera2/CaptureRequest.cpp
index ebc09d74299049ac90086bc7e9b899e922845c80..7a8a4bae411287d19bca7880bb3b94fe21b59f89 100644
--- a/camera/camera2/CaptureRequest.cpp
+++ b/camera/camera2/CaptureRequest.cpp
@@ -146,6 +146,20 @@ status_t CaptureRequest::readFromParcel(const android::Parcel* parcel) {
mSurfaceIdxList.push_back(surfaceIdx);
}
+ int32_t hasUserTag;
+ if ((err = parcel->readInt32(&hasUserTag)) != OK) {
+ ALOGE("%s: Failed to read user tag availability flag", __FUNCTION__);
+ return BAD_VALUE;
+ }
+ if (hasUserTag) {
+ String16 userTag;
+ if ((err = parcel->readString16(&userTag)) != OK) {
+ ALOGE("%s: Failed to read user tag!", __FUNCTION__);
+ return BAD_VALUE;
+ }
+ mUserTag = String8(userTag).c_str();
+ }
+
return OK;
}
@@ -213,6 +227,14 @@ status_t CaptureRequest::writeToParcel(android::Parcel* parcel) const {
return err;
}
}
+
+ if (mUserTag.empty()) {
+ parcel->writeInt32(0);
+ } else {
+ parcel->writeInt32(1);
+ parcel->writeString16(String16(mUserTag.c_str()));
+ }
+
return OK;
}
diff --git a/camera/camera2/OutputConfiguration.cpp b/camera/camera2/OutputConfiguration.cpp
index 2bccd870b8a7cf9c28595748387f89cfab35e558..11d4960dbc3987c62a2a212cd214bb2bf41b9f27 100644
--- a/camera/camera2/OutputConfiguration.cpp
+++ b/camera/camera2/OutputConfiguration.cpp
@@ -23,6 +23,7 @@
#include
#include
#include
+#include
#include
namespace android {
@@ -76,6 +77,22 @@ const std::vector &OutputConfiguration::getSensorPixelModesUsed() const
return mSensorPixelModesUsed;
}
+int64_t OutputConfiguration::getDynamicRangeProfile() const {
+ return mDynamicRangeProfile;
+}
+
+int64_t OutputConfiguration::getStreamUseCase() const {
+ return mStreamUseCase;
+}
+
+int OutputConfiguration::getTimestampBase() const {
+ return mTimestampBase;
+}
+
+int OutputConfiguration::getMirrorMode() const {
+ return mMirrorMode;
+}
+
OutputConfiguration::OutputConfiguration() :
mRotation(INVALID_ROTATION),
mSurfaceSetID(INVALID_SET_ID),
@@ -84,7 +101,11 @@ OutputConfiguration::OutputConfiguration() :
mHeight(0),
mIsDeferred(false),
mIsShared(false),
- mIsMultiResolution(false) {
+ mIsMultiResolution(false),
+ mDynamicRangeProfile(ANDROID_REQUEST_AVAILABLE_DYNAMIC_RANGE_PROFILES_MAP_STANDARD),
+ mStreamUseCase(ANDROID_SCALER_AVAILABLE_STREAM_USE_CASES_DEFAULT),
+ mTimestampBase(TIMESTAMP_BASE_DEFAULT),
+ mMirrorMode(MIRROR_MODE_AUTO) {
}
OutputConfiguration::OutputConfiguration(const android::Parcel& parcel) :
@@ -165,6 +186,30 @@ status_t OutputConfiguration::readFromParcel(const android::Parcel* parcel) {
ALOGE("%s: Failed to read sensor pixel mode(s) from parcel", __FUNCTION__);
return err;
}
+ int64_t dynamicProfile;
+ if ((err = parcel->readInt64(&dynamicProfile)) != OK) {
+ ALOGE("%s: Failed to read surface dynamic range profile flag from parcel", __FUNCTION__);
+ return err;
+ }
+
+ int64_t streamUseCase = ANDROID_SCALER_AVAILABLE_STREAM_USE_CASES_DEFAULT;
+ if ((err = parcel->readInt64(&streamUseCase)) != OK) {
+ ALOGE("%s: Failed to read stream use case from parcel", __FUNCTION__);
+ return err;
+ }
+
+ int timestampBase = TIMESTAMP_BASE_DEFAULT;
+ if ((err = parcel->readInt32(×tampBase)) != OK) {
+ ALOGE("%s: Failed to read timestamp base from parcel", __FUNCTION__);
+ return err;
+ }
+
+ int mirrorMode = MIRROR_MODE_AUTO;
+ if ((err = parcel->readInt32(&mirrorMode)) != OK) {
+ ALOGE("%s: Failed to read mirroring mode from parcel", __FUNCTION__);
+ return err;
+ }
+
mRotation = rotation;
mSurfaceSetID = setID;
mSurfaceType = surfaceType;
@@ -173,6 +218,9 @@ status_t OutputConfiguration::readFromParcel(const android::Parcel* parcel) {
mIsDeferred = isDeferred != 0;
mIsShared = isShared != 0;
mIsMultiResolution = isMultiResolution != 0;
+ mStreamUseCase = streamUseCase;
+ mTimestampBase = timestampBase;
+ mMirrorMode = mirrorMode;
for (auto& surface : surfaceShims) {
ALOGV("%s: OutputConfiguration: %p, name %s", __FUNCTION__,
surface.graphicBufferProducer.get(),
@@ -181,10 +229,14 @@ status_t OutputConfiguration::readFromParcel(const android::Parcel* parcel) {
}
mSensorPixelModesUsed = std::move(sensorPixelModesUsed);
+ mDynamicRangeProfile = dynamicProfile;
ALOGV("%s: OutputConfiguration: rotation = %d, setId = %d, surfaceType = %d,"
- " physicalCameraId = %s, isMultiResolution = %d", __FUNCTION__, mRotation,
- mSurfaceSetID, mSurfaceType, String8(mPhysicalCameraId).string(), mIsMultiResolution);
+ " physicalCameraId = %s, isMultiResolution = %d, streamUseCase = %" PRId64
+ ", timestampBase = %d, mirrorMode = %d",
+ __FUNCTION__, mRotation, mSurfaceSetID, mSurfaceType,
+ String8(mPhysicalCameraId).string(), mIsMultiResolution, mStreamUseCase, timestampBase,
+ mMirrorMode);
return err;
}
@@ -199,6 +251,10 @@ OutputConfiguration::OutputConfiguration(sp& gbp, int ro
mIsShared = isShared;
mPhysicalCameraId = physicalId;
mIsMultiResolution = false;
+ mDynamicRangeProfile = ANDROID_REQUEST_AVAILABLE_DYNAMIC_RANGE_PROFILES_MAP_STANDARD;
+ mStreamUseCase = ANDROID_SCALER_AVAILABLE_STREAM_USE_CASES_DEFAULT;
+ mTimestampBase = TIMESTAMP_BASE_DEFAULT;
+ mMirrorMode = MIRROR_MODE_AUTO;
}
OutputConfiguration::OutputConfiguration(
@@ -207,7 +263,11 @@ OutputConfiguration::OutputConfiguration(
int width, int height, bool isShared)
: mGbps(gbps), mRotation(rotation), mSurfaceSetID(surfaceSetID), mSurfaceType(surfaceType),
mWidth(width), mHeight(height), mIsDeferred(false), mIsShared(isShared),
- mPhysicalCameraId(physicalCameraId), mIsMultiResolution(false) { }
+ mPhysicalCameraId(physicalCameraId), mIsMultiResolution(false),
+ mDynamicRangeProfile(ANDROID_REQUEST_AVAILABLE_DYNAMIC_RANGE_PROFILES_MAP_STANDARD),
+ mStreamUseCase(ANDROID_SCALER_AVAILABLE_STREAM_USE_CASES_DEFAULT),
+ mTimestampBase(TIMESTAMP_BASE_DEFAULT),
+ mMirrorMode(MIRROR_MODE_AUTO) { }
status_t OutputConfiguration::writeToParcel(android::Parcel* parcel) const {
@@ -254,6 +314,18 @@ status_t OutputConfiguration::writeToParcel(android::Parcel* parcel) const {
err = parcel->writeParcelableVector(mSensorPixelModesUsed);
if (err != OK) return err;
+ err = parcel->writeInt64(mDynamicRangeProfile);
+ if (err != OK) return err;
+
+ err = parcel->writeInt64(mStreamUseCase);
+ if (err != OK) return err;
+
+ err = parcel->writeInt32(mTimestampBase);
+ if (err != OK) return err;
+
+ err = parcel->writeInt32(mMirrorMode);
+ if (err != OK) return err;
+
return OK;
}
diff --git a/camera/cameraserver/Android.bp b/camera/cameraserver/Android.bp
index 8ca892001251fcd5ac52ea36df3f0fd3549e16a5..094a3c19854f94330ab703d806387440edad5563 100644
--- a/camera/cameraserver/Android.bp
+++ b/camera/cameraserver/Android.bp
@@ -43,6 +43,7 @@ cc_binary {
"android.hardware.camera.provider@2.5",
"android.hardware.camera.provider@2.6",
"android.hardware.camera.provider@2.7",
+ "android.hardware.camera.provider-V1-ndk",
"android.hardware.camera.device@1.0",
"android.hardware.camera.device@3.2",
"android.hardware.camera.device@3.4",
diff --git a/camera/include/camera/CameraBase.h b/camera/include/camera/CameraBase.h
index e156994798e10df296c37f5a5594f4f0030192f1..8e5396863355a3d95c553b2a95dc7987d51e27c2 100644
--- a/camera/include/camera/CameraBase.h
+++ b/camera/include/camera/CameraBase.h
@@ -85,11 +85,17 @@ struct CameraStatus : public android::Parcelable {
*/
std::vector unavailablePhysicalIds;
+ /**
+ * Client package name if camera is open, otherwise not applicable
+ */
+ String8 clientPackage;
+
virtual status_t writeToParcel(android::Parcel* parcel) const;
virtual status_t readFromParcel(const android::Parcel* parcel);
- CameraStatus(String8 id, int32_t s, const std::vector& unavailSubIds) :
- cameraId(id), status(s), unavailablePhysicalIds(unavailSubIds) {}
+ CameraStatus(String8 id, int32_t s, const std::vector& unavailSubIds,
+ const String8& clientPkg) : cameraId(id), status(s),
+ unavailablePhysicalIds(unavailSubIds), clientPackage(clientPkg) {}
CameraStatus() : status(ICameraServiceListener::STATUS_PRESENT) {}
};
diff --git a/camera/include/camera/CameraSessionStats.h b/camera/include/camera/CameraSessionStats.h
index c398acaa02e4b65122b4537f1b7dba385293c58a..aaa88b2b08dfaca7f0a0cb70f17a75c57ea948f1 100644
--- a/camera/include/camera/CameraSessionStats.h
+++ b/camera/include/camera/CameraSessionStats.h
@@ -19,6 +19,8 @@
#include
+#include
+
namespace android {
namespace hardware {
@@ -35,6 +37,7 @@ public:
int mWidth;
int mHeight;
int mFormat;
+ float mMaxPreviewFps;
int mDataSpace;
int64_t mUsage;
@@ -60,16 +63,26 @@ public:
// size(mHistogramBins) + 1 = size(mHistogramCounts)
std::vector mHistogramCounts;
+ // Dynamic range profile
+ int64_t mDynamicRangeProfile;
+ // Stream use case
+ int64_t mStreamUseCase;
+
CameraStreamStats() :
- mWidth(0), mHeight(0), mFormat(0), mDataSpace(0), mUsage(0),
+ mWidth(0), mHeight(0), mFormat(0), mMaxPreviewFps(0), mDataSpace(0), mUsage(0),
mRequestCount(0), mErrorCount(0), mStartLatencyMs(0),
- mMaxHalBuffers(0), mMaxAppBuffers(0), mHistogramType(HISTOGRAM_TYPE_UNKNOWN) {}
- CameraStreamStats(int width, int height, int format, int dataSpace, int64_t usage,
- int maxHalBuffers, int maxAppBuffers)
- : mWidth(width), mHeight(height), mFormat(format), mDataSpace(dataSpace),
- mUsage(usage), mRequestCount(0), mErrorCount(0), mStartLatencyMs(0),
- mMaxHalBuffers(maxHalBuffers), mMaxAppBuffers(maxAppBuffers),
- mHistogramType(HISTOGRAM_TYPE_UNKNOWN) {}
+ mMaxHalBuffers(0), mMaxAppBuffers(0), mHistogramType(HISTOGRAM_TYPE_UNKNOWN),
+ mDynamicRangeProfile(ANDROID_REQUEST_AVAILABLE_DYNAMIC_RANGE_PROFILES_MAP_STANDARD),
+ mStreamUseCase(ANDROID_SCALER_AVAILABLE_STREAM_USE_CASES_DEFAULT) {}
+ CameraStreamStats(int width, int height, int format, float maxPreviewFps, int dataSpace,
+ int64_t usage, int maxHalBuffers, int maxAppBuffers, int dynamicRangeProfile,
+ int streamUseCase)
+ : mWidth(width), mHeight(height), mFormat(format), mMaxPreviewFps(maxPreviewFps),
+ mDataSpace(dataSpace), mUsage(usage), mRequestCount(0), mErrorCount(0),
+ mStartLatencyMs(0), mMaxHalBuffers(maxHalBuffers), mMaxAppBuffers(maxAppBuffers),
+ mHistogramType(HISTOGRAM_TYPE_UNKNOWN),
+ mDynamicRangeProfile(dynamicRangeProfile),
+ mStreamUseCase(streamUseCase) {}
virtual status_t readFromParcel(const android::Parcel* parcel) override;
virtual status_t writeToParcel(android::Parcel* parcel) const override;
@@ -111,6 +124,7 @@ public:
bool mIsNdk;
// latency in ms for camera open, close, or session creation.
int mLatencyMs;
+ float mMaxPreviewFps;
// Session info and statistics
int mSessionType;
@@ -122,6 +136,8 @@ public:
// Whether the device runs into an error state
bool mDeviceError;
std::vector mStreamStats;
+ String16 mUserTag;
+ int mVideoStabilizationMode;
// Constructors
CameraSessionStats();
diff --git a/camera/include/camera/CameraUtils.h b/camera/include/camera/CameraUtils.h
index a397ccdd5e2f83635872f6eaac590b7ad65e92a9..31d25e79ea8d538cce233b525e3e843881574c58 100644
--- a/camera/include/camera/CameraUtils.h
+++ b/camera/include/camera/CameraUtils.h
@@ -37,10 +37,13 @@ class CameraUtils {
* metadata. This is based on the sensor orientation and lens facing
* attributes of the camera device.
*
+ * If mirrorMode is not AUTO, it will be used to override the lens
+ * facing based mirror.
+ *
* Returns OK on success, or a negative error code.
*/
static status_t getRotationTransform(const CameraMetadata& staticInfo,
- /*out*/int32_t* transform);
+ int mirrorMode, /*out*/int32_t* transform);
/**
* Check if the image data is VideoNativeHandleMetadata, that contains a native handle.
diff --git a/camera/include/camera/camera2/CaptureRequest.h b/camera/include/camera/camera2/CaptureRequest.h
index 506abab70474aec771059af0cb99117814b28475..28dbc7cb289be683208f0817fa6363ac01f4816c 100644
--- a/camera/include/camera/camera2/CaptureRequest.h
+++ b/camera/include/camera/camera2/CaptureRequest.h
@@ -63,6 +63,8 @@ struct CaptureRequest : public Parcelable {
void* mContext; // arbitrary user context from NDK apps, null for java apps
+ std::string mUserTag; // The string representation of object passed into setTag.
+
/**
* Keep impl up-to-date with CaptureRequest.java in frameworks/base
*/
diff --git a/camera/include/camera/camera2/OutputConfiguration.h b/camera/include/camera/camera2/OutputConfiguration.h
index f80ed3a8e6787fc1b571db003bd49e025439681b..b842885a6b77a427807cadf44b5ff67d918f3837 100644
--- a/camera/include/camera/camera2/OutputConfiguration.h
+++ b/camera/include/camera/camera2/OutputConfiguration.h
@@ -38,16 +38,34 @@ public:
SURFACE_TYPE_SURFACE_VIEW = 0,
SURFACE_TYPE_SURFACE_TEXTURE = 1
};
+ enum TimestampBaseType {
+ TIMESTAMP_BASE_DEFAULT = 0,
+ TIMESTAMP_BASE_SENSOR = 1,
+ TIMESTAMP_BASE_MONOTONIC = 2,
+ TIMESTAMP_BASE_REALTIME = 3,
+ TIMESTAMP_BASE_CHOREOGRAPHER_SYNCED = 4
+ };
+ enum MirrorModeType {
+ MIRROR_MODE_AUTO = 0,
+ MIRROR_MODE_NONE = 1,
+ MIRROR_MODE_H = 2,
+ MIRROR_MODE_V = 3,
+ };
+
const std::vector>& getGraphicBufferProducers() const;
int getRotation() const;
int getSurfaceSetID() const;
int getSurfaceType() const;
int getWidth() const;
int getHeight() const;
+ int64_t getDynamicRangeProfile() const;
bool isDeferred() const;
bool isShared() const;
String16 getPhysicalCameraId() const;
bool isMultiResolution() const;
+ int64_t getStreamUseCase() const;
+ int getTimestampBase() const;
+ int getMirrorMode() const;
// set of sensor pixel mode resolutions allowed {MAX_RESOLUTION, DEFAULT_MODE};
const std::vector& getSensorPixelModesUsed() const;
@@ -89,7 +107,11 @@ public:
gbpsEqual(other) &&
mPhysicalCameraId == other.mPhysicalCameraId &&
mIsMultiResolution == other.mIsMultiResolution &&
- sensorPixelModesUsedEqual(other));
+ sensorPixelModesUsedEqual(other) &&
+ mDynamicRangeProfile == other.mDynamicRangeProfile &&
+ mStreamUseCase == other.mStreamUseCase &&
+ mTimestampBase == other.mTimestampBase &&
+ mMirrorMode == other.mMirrorMode);
}
bool operator != (const OutputConfiguration& other) const {
return !(*this == other);
@@ -126,6 +148,18 @@ public:
if (!sensorPixelModesUsedEqual(other)) {
return sensorPixelModesUsedLessThan(other);
}
+ if (mDynamicRangeProfile != other.mDynamicRangeProfile) {
+ return mDynamicRangeProfile < other.mDynamicRangeProfile;
+ }
+ if (mStreamUseCase != other.mStreamUseCase) {
+ return mStreamUseCase < other.mStreamUseCase;
+ }
+ if (mTimestampBase != other.mTimestampBase) {
+ return mTimestampBase < other.mTimestampBase;
+ }
+ if (mMirrorMode != other.mMirrorMode) {
+ return mMirrorMode < other.mMirrorMode;
+ }
return gbpsLessThan(other);
}
@@ -150,6 +184,10 @@ private:
String16 mPhysicalCameraId;
bool mIsMultiResolution;
std::vector mSensorPixelModesUsed;
+ int64_t mDynamicRangeProfile;
+ int64_t mStreamUseCase;
+ int mTimestampBase;
+ int mMirrorMode;
};
} // namespace params
} // namespace camera2
diff --git a/camera/ndk/NdkCameraCaptureSession.cpp b/camera/ndk/NdkCameraCaptureSession.cpp
index 1ac8482ac41c3599f4afeea36717b7f07611517e..9c98778fe45f8055e829f20771f3bf72a05754b6 100644
--- a/camera/ndk/NdkCameraCaptureSession.cpp
+++ b/camera/ndk/NdkCameraCaptureSession.cpp
@@ -29,6 +29,7 @@
#include "impl/ACameraCaptureSession.h"
#include "impl/ACameraCaptureSession.inc"
+#include "NdkCameraCaptureSession.inc"
using namespace android;
@@ -72,22 +73,16 @@ camera_status_t ACameraCaptureSession_capture(
int numRequests, ACaptureRequest** requests,
/*optional*/int* captureSequenceId) {
ATRACE_CALL();
- if (session == nullptr || requests == nullptr || numRequests < 1) {
- ALOGE("%s: Error: invalid input: session %p, numRequest %d, requests %p",
- __FUNCTION__, session, numRequests, requests);
- return ACAMERA_ERROR_INVALID_PARAMETER;
- }
-
- if (session->isClosed()) {
- ALOGE("%s: session %p is already closed", __FUNCTION__, session);
- if (captureSequenceId != nullptr) {
- *captureSequenceId = CAPTURE_SEQUENCE_ID_NONE;
- }
- return ACAMERA_ERROR_SESSION_CLOSED;
- }
+ return captureTemplate(session, cbs, numRequests, requests, captureSequenceId);
+}
- return session->capture(
- cbs, numRequests, requests, captureSequenceId);
+EXPORT
+camera_status_t ACameraCaptureSession_captureV2(
+ ACameraCaptureSession* session, /*optional*/ACameraCaptureSession_captureCallbacksV2* cbs,
+ int numRequests, ACaptureRequest** requests,
+ /*optional*/int* captureSequenceId) {
+ ATRACE_CALL();
+ return captureTemplate(session, cbs, numRequests, requests, captureSequenceId);
}
EXPORT
@@ -97,22 +92,26 @@ camera_status_t ACameraCaptureSession_logicalCamera_capture(
int numRequests, ACaptureRequest** requests,
/*optional*/int* captureSequenceId) {
ATRACE_CALL();
- if (session == nullptr || requests == nullptr || numRequests < 1) {
- ALOGE("%s: Error: invalid input: session %p, numRequest %d, requests %p",
- __FUNCTION__, session, numRequests, requests);
- return ACAMERA_ERROR_INVALID_PARAMETER;
- }
+ return captureTemplate(session, lcbs, numRequests, requests, captureSequenceId);
+}
- if (session->isClosed()) {
- ALOGE("%s: session %p is already closed", __FUNCTION__, session);
- if (captureSequenceId) {
- *captureSequenceId = CAPTURE_SEQUENCE_ID_NONE;
- }
- return ACAMERA_ERROR_SESSION_CLOSED;
- }
+EXPORT
+camera_status_t ACameraCaptureSession_logicalCamera_captureV2(
+ ACameraCaptureSession* session,
+ /*optional*/ACameraCaptureSession_logicalCamera_captureCallbacksV2* lcbs,
+ int numRequests, ACaptureRequest** requests,
+ /*optional*/int* captureSequenceId) {
+ ATRACE_CALL();
+ return captureTemplate(session, lcbs, numRequests, requests, captureSequenceId);
+}
- return session->capture(
- lcbs, numRequests, requests, captureSequenceId);
+EXPORT
+camera_status_t ACameraCaptureSession_setRepeatingRequestV2(
+ ACameraCaptureSession* session, /*optional*/ACameraCaptureSession_captureCallbacksV2* cbs,
+ int numRequests, ACaptureRequest** requests,
+ /*optional*/int* captureSequenceId) {
+ ATRACE_CALL();
+ return setRepeatingRequestTemplate(session, cbs, numRequests, requests, captureSequenceId);
}
EXPORT
@@ -121,23 +120,10 @@ camera_status_t ACameraCaptureSession_setRepeatingRequest(
int numRequests, ACaptureRequest** requests,
/*optional*/int* captureSequenceId) {
ATRACE_CALL();
- if (session == nullptr || requests == nullptr || numRequests < 1) {
- ALOGE("%s: Error: invalid input: session %p, numRequest %d, requests %p",
- __FUNCTION__, session, numRequests, requests);
- return ACAMERA_ERROR_INVALID_PARAMETER;
- }
-
- if (session->isClosed()) {
- ALOGE("%s: session %p is already closed", __FUNCTION__, session);
- if (captureSequenceId) {
- *captureSequenceId = CAPTURE_SEQUENCE_ID_NONE;
- }
- return ACAMERA_ERROR_SESSION_CLOSED;
- }
-
- return session->setRepeatingRequest(cbs, numRequests, requests, captureSequenceId);
+ return setRepeatingRequestTemplate(session, cbs, numRequests, requests, captureSequenceId);
}
+
EXPORT
camera_status_t ACameraCaptureSession_logicalCamera_setRepeatingRequest(
ACameraCaptureSession* session,
@@ -145,21 +131,18 @@ camera_status_t ACameraCaptureSession_logicalCamera_setRepeatingRequest(
int numRequests, ACaptureRequest** requests,
/*optional*/int* captureSequenceId) {
ATRACE_CALL();
- if (session == nullptr || requests == nullptr || numRequests < 1) {
- ALOGE("%s: Error: invalid input: session %p, numRequest %d, requests %p",
- __FUNCTION__, session, numRequests, requests);
- return ACAMERA_ERROR_INVALID_PARAMETER;
- }
+ return setRepeatingRequestTemplate(session, lcbs, numRequests, requests, captureSequenceId);
+}
- if (session->isClosed()) {
- ALOGE("%s: session %p is already closed", __FUNCTION__, session);
- if (captureSequenceId) {
- *captureSequenceId = CAPTURE_SEQUENCE_ID_NONE;
- }
- return ACAMERA_ERROR_SESSION_CLOSED;
- }
- return session->setRepeatingRequest(lcbs, numRequests, requests, captureSequenceId);
+EXPORT
+camera_status_t ACameraCaptureSession_logicalCamera_setRepeatingRequestV2(
+ ACameraCaptureSession* session,
+ /*optional*/ACameraCaptureSession_logicalCamera_captureCallbacksV2* lcbs,
+ int numRequests, ACaptureRequest** requests,
+ /*optional*/int* captureSequenceId) {
+ ATRACE_CALL();
+ return setRepeatingRequestTemplate(session, lcbs, numRequests, requests, captureSequenceId);
}
EXPORT
diff --git a/camera/ndk/NdkCameraCaptureSession.inc b/camera/ndk/NdkCameraCaptureSession.inc
new file mode 100644
index 0000000000000000000000000000000000000000..258e20d34eb67eed2f765214f28699e36ac600e2
--- /dev/null
+++ b/camera/ndk/NdkCameraCaptureSession.inc
@@ -0,0 +1,70 @@
+/*
+ * Copyright (C) 2022 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#include "impl/ACameraCaptureSession.h"
+
+#include
+
+using namespace android;
+
+template
+camera_status_t captureTemplate(
+ ACameraCaptureSession* session,
+ /*optional*/CallbackType* cbs,
+ int numRequests, ACaptureRequest** requests,
+ /*optional*/int* captureSequenceId) {
+ ATRACE_CALL();
+ if (session == nullptr || requests == nullptr || numRequests < 1) {
+ ALOGE("%s: Error: invalid input: session %p, numRequest %d, requests %p",
+ __FUNCTION__, session, numRequests, requests);
+ return ACAMERA_ERROR_INVALID_PARAMETER;
+ }
+
+ if (session->isClosed()) {
+ ALOGE("%s: session %p is already closed", __FUNCTION__, session);
+ if (captureSequenceId) {
+ *captureSequenceId = CAPTURE_SEQUENCE_ID_NONE;
+ }
+ return ACAMERA_ERROR_SESSION_CLOSED;
+ }
+
+ return session->capture(
+ cbs, numRequests, requests, captureSequenceId);
+}
+
+template
+camera_status_t setRepeatingRequestTemplate(
+ ACameraCaptureSession* session,
+ /*optional*/CallbackType* cbs,
+ int numRequests, ACaptureRequest** requests,
+ /*optional*/int* captureSequenceId) {
+ ATRACE_CALL();
+ if (session == nullptr || requests == nullptr || numRequests < 1) {
+ ALOGE("%s: Error: invalid input: session %p, numRequest %d, requests %p",
+ __FUNCTION__, session, numRequests, requests);
+ return ACAMERA_ERROR_INVALID_PARAMETER;
+ }
+
+ if (session->isClosed()) {
+ ALOGE("%s: session %p is already closed", __FUNCTION__, session);
+ if (captureSequenceId) {
+ *captureSequenceId = CAPTURE_SEQUENCE_ID_NONE;
+ }
+ return ACAMERA_ERROR_SESSION_CLOSED;
+ }
+
+ return session->setRepeatingRequest(cbs, numRequests, requests, captureSequenceId);
+}
diff --git a/camera/ndk/impl/ACameraDevice.cpp b/camera/ndk/impl/ACameraDevice.cpp
index dd652c70c52ea4661690abafdc8af6337bd8834d..7997768e6e5809d7c06dbbf1295fdbb86705b5e2 100644
--- a/camera/ndk/impl/ACameraDevice.cpp
+++ b/camera/ndk/impl/ACameraDevice.cpp
@@ -26,8 +26,6 @@
#include "ACaptureRequest.h"
#include "ACameraCaptureSession.h"
-#include "ACameraCaptureSession.inc"
-
ACameraDevice::~ACameraDevice() {
mDevice->stopLooperAndDisconnect();
}
@@ -913,6 +911,7 @@ void CameraDevice::CallbackHandler::onMessageReceived(
case kWhatOnError:
case kWhatSessionStateCb:
case kWhatCaptureStart:
+ case kWhatCaptureStart2:
case kWhatCaptureResult:
case kWhatLogicalCaptureResult:
case kWhatCaptureFail:
@@ -985,6 +984,7 @@ void CameraDevice::CallbackHandler::onMessageReceived(
}
case kWhatSessionStateCb:
case kWhatCaptureStart:
+ case kWhatCaptureStart2:
case kWhatCaptureResult:
case kWhatLogicalCaptureResult:
case kWhatCaptureFail:
@@ -1004,6 +1004,7 @@ void CameraDevice::CallbackHandler::onMessageReceived(
sp requestSp = nullptr;
switch (msg->what()) {
case kWhatCaptureStart:
+ case kWhatCaptureStart2:
case kWhatCaptureResult:
case kWhatLogicalCaptureResult:
case kWhatCaptureFail:
@@ -1055,6 +1056,35 @@ void CameraDevice::CallbackHandler::onMessageReceived(
freeACaptureRequest(request);
break;
}
+ case kWhatCaptureStart2:
+ {
+ ACameraCaptureSession_captureCallback_startV2 onStart2;
+ found = msg->findPointer(kCallbackFpKey, (void**) &onStart2);
+ if (!found) {
+ ALOGE("%s: Cannot find capture startV2 callback!", __FUNCTION__);
+ return;
+ }
+ if (onStart2 == nullptr) {
+ return;
+ }
+ int64_t timestamp;
+ found = msg->findInt64(kTimeStampKey, ×tamp);
+ if (!found) {
+ ALOGE("%s: Cannot find timestamp!", __FUNCTION__);
+ return;
+ }
+ int64_t frameNumber;
+ found = msg->findInt64(kFrameNumberKey, &frameNumber);
+ if (!found) {
+ ALOGE("%s: Cannot find frame number!", __FUNCTION__);
+ return;
+ }
+
+ ACaptureRequest* request = allocateACaptureRequest(requestSp, mId);
+ (*onStart2)(context, session.get(), request, timestamp, frameNumber);
+ freeACaptureRequest(request);
+ break;
+ }
case kWhatCaptureResult:
{
ACameraCaptureSession_captureCallback_result onResult;
@@ -1285,7 +1315,8 @@ CameraDevice::CallbackHolder::CallbackHolder(
ACameraCaptureSession_captureCallbacks* cbs) :
mSession(session), mRequests(requests),
mIsRepeating(isRepeating),
- mIsLogicalCameraCallback(false) {
+ mIsLogicalCameraCallback(false),
+ mIs2Callback(false) {
initCaptureCallbacks(cbs);
if (cbs != nullptr) {
@@ -1301,7 +1332,8 @@ CameraDevice::CallbackHolder::CallbackHolder(
ACameraCaptureSession_logicalCamera_captureCallbacks* lcbs) :
mSession(session), mRequests(requests),
mIsRepeating(isRepeating),
- mIsLogicalCameraCallback(true) {
+ mIsLogicalCameraCallback(true),
+ mIs2Callback(false) {
initCaptureCallbacks(lcbs);
if (lcbs != nullptr) {
@@ -1310,6 +1342,40 @@ CameraDevice::CallbackHolder::CallbackHolder(
}
}
+CameraDevice::CallbackHolder::CallbackHolder(
+ sp session,
+ const Vector >& requests,
+ bool isRepeating,
+ ACameraCaptureSession_captureCallbacksV2* cbs) :
+ mSession(session), mRequests(requests),
+ mIsRepeating(isRepeating),
+ mIsLogicalCameraCallback(false),
+ mIs2Callback(true) {
+ initCaptureCallbacksV2(cbs);
+
+ if (cbs != nullptr) {
+ mOnCaptureCompleted = cbs->onCaptureCompleted;
+ mOnCaptureFailed = cbs->onCaptureFailed;
+ }
+}
+
+CameraDevice::CallbackHolder::CallbackHolder(
+ sp session,
+ const Vector >& requests,
+ bool isRepeating,
+ ACameraCaptureSession_logicalCamera_captureCallbacksV2* lcbs) :
+ mSession(session), mRequests(requests),
+ mIsRepeating(isRepeating),
+ mIsLogicalCameraCallback(true),
+ mIs2Callback(true) {
+ initCaptureCallbacksV2(lcbs);
+
+ if (lcbs != nullptr) {
+ mOnLogicalCameraCaptureCompleted = lcbs->onLogicalCameraCaptureCompleted;
+ mOnLogicalCameraCaptureFailed = lcbs->onLogicalCameraCaptureFailed;
+ }
+}
+
void
CameraDevice::checkRepeatingSequenceCompleteLocked(
const int sequenceId, const int64_t lastFrameNumber) {
@@ -1536,7 +1602,6 @@ CameraDevice::ServiceCallback::onCaptureStarted(
const CaptureResultExtras& resultExtras,
int64_t timestamp) {
binder::Status ret = binder::Status::ok();
-
sp dev = mDevice.promote();
if (dev == nullptr) {
return ret; // device has been closed
@@ -1551,11 +1616,14 @@ CameraDevice::ServiceCallback::onCaptureStarted(
int sequenceId = resultExtras.requestId;
int32_t burstId = resultExtras.burstId;
+ int64_t frameNumber = resultExtras.frameNumber;
auto it = dev->mSequenceCallbackMap.find(sequenceId);
if (it != dev->mSequenceCallbackMap.end()) {
CallbackHolder cbh = (*it).second;
+ bool v2Callback = cbh.mIs2Callback;
ACameraCaptureSession_captureCallback_start onStart = cbh.mOnCaptureStarted;
+ ACameraCaptureSession_captureCallback_startV2 onStart2 = cbh.mOnCaptureStarted2;
sp session = cbh.mSession;
if ((size_t) burstId >= cbh.mRequests.size()) {
ALOGE("%s: Error: request index %d out of bound (size %zu)",
@@ -1563,12 +1631,19 @@ CameraDevice::ServiceCallback::onCaptureStarted(
dev->setCameraDeviceErrorLocked(ACAMERA_ERROR_CAMERA_SERVICE);
}
sp request = cbh.mRequests[burstId];
- sp msg = new AMessage(kWhatCaptureStart, dev->mHandler);
+ sp msg = nullptr;
+ if (v2Callback) {
+ msg = new AMessage(kWhatCaptureStart2, dev->mHandler);
+ msg->setPointer(kCallbackFpKey, (void*) onStart2);
+ } else {
+ msg = new AMessage(kWhatCaptureStart, dev->mHandler);
+ msg->setPointer(kCallbackFpKey, (void *)onStart);
+ }
msg->setPointer(kContextKey, cbh.mContext);
msg->setObject(kSessionSpKey, session);
- msg->setPointer(kCallbackFpKey, (void*) onStart);
msg->setObject(kCaptureRequestKey, request);
msg->setInt64(kTimeStampKey, timestamp);
+ msg->setInt64(kFrameNumberKey, frameNumber);
dev->postSessionMsgAndCleanup(msg);
}
return ret;
diff --git a/camera/ndk/impl/ACameraDevice.h b/camera/ndk/impl/ACameraDevice.h
index 344d9644183349dbeb2b5357f26e8d86aae0d47f..17988fe8f1cde30df02a890452dd8837611337b5 100644
--- a/camera/ndk/impl/ACameraDevice.h
+++ b/camera/ndk/impl/ACameraDevice.h
@@ -215,6 +215,7 @@ class CameraDevice final : public RefBase {
kWhatSessionStateCb, // onReady, onActive
// Capture callbacks
kWhatCaptureStart, // onCaptureStarted
+ kWhatCaptureStart2, // onCaptureStarted
kWhatCaptureResult, // onCaptureProgressed, onCaptureCompleted
kWhatLogicalCaptureResult, // onLogicalCameraCaptureCompleted
kWhatCaptureFail, // onCaptureFailed
@@ -294,11 +295,18 @@ class CameraDevice final : public RefBase {
const Vector >& requests,
bool isRepeating,
ACameraCaptureSession_logicalCamera_captureCallbacks* lcbs);
-
- template
- void initCaptureCallbacks(T* cbs) {
+ CallbackHolder(sp session,
+ const Vector >& requests,
+ bool isRepeating,
+ ACameraCaptureSession_captureCallbacksV2* cbs);
+ CallbackHolder(sp session,
+ const Vector >& requests,
+ bool isRepeating,
+ ACameraCaptureSession_logicalCamera_captureCallbacksV2* lcbs);
+ void clearCallbacks() {
mContext = nullptr;
mOnCaptureStarted = nullptr;
+ mOnCaptureStarted2 = nullptr;
mOnCaptureProgressed = nullptr;
mOnCaptureCompleted = nullptr;
mOnLogicalCameraCaptureCompleted = nullptr;
@@ -307,6 +315,24 @@ class CameraDevice final : public RefBase {
mOnCaptureSequenceCompleted = nullptr;
mOnCaptureSequenceAborted = nullptr;
mOnCaptureBufferLost = nullptr;
+ }
+
+ template
+ void initCaptureCallbacksV2(T* cbs) {
+ clearCallbacks();
+ if (cbs != nullptr) {
+ mContext = cbs->context;
+ mOnCaptureStarted2 = cbs->onCaptureStarted;
+ mOnCaptureProgressed = cbs->onCaptureProgressed;
+ mOnCaptureSequenceCompleted = cbs->onCaptureSequenceCompleted;
+ mOnCaptureSequenceAborted = cbs->onCaptureSequenceAborted;
+ mOnCaptureBufferLost = cbs->onCaptureBufferLost;
+ }
+ }
+
+ template
+ void initCaptureCallbacks(T* cbs) {
+ clearCallbacks();
if (cbs != nullptr) {
mContext = cbs->context;
mOnCaptureStarted = cbs->onCaptureStarted;
@@ -320,9 +346,11 @@ class CameraDevice final : public RefBase {
Vector > mRequests;
const bool mIsRepeating;
const bool mIsLogicalCameraCallback;
+ const bool mIs2Callback;
void* mContext;
ACameraCaptureSession_captureCallback_start mOnCaptureStarted;
+ ACameraCaptureSession_captureCallback_startV2 mOnCaptureStarted2;
ACameraCaptureSession_captureCallback_result mOnCaptureProgressed;
ACameraCaptureSession_captureCallback_result mOnCaptureCompleted;
ACameraCaptureSession_logicalCamera_captureCallback_result mOnLogicalCameraCaptureCompleted;
diff --git a/camera/ndk/impl/ACameraManager.cpp b/camera/ndk/impl/ACameraManager.cpp
index 95ef2b2af1f828c9901bb42f9ff780adb87c4fee..5892f1ad0f19c9d2498983fea7291379b8ef2834 100644
--- a/camera/ndk/impl/ACameraManager.cpp
+++ b/camera/ndk/impl/ACameraManager.cpp
@@ -189,8 +189,12 @@ void CameraManagerGlobal::DeathNotifier::binderDied(const wp&)
sp cm = mCameraManager.promote();
if (cm != nullptr) {
AutoMutex lock(cm->mLock);
+ std::vector cameraIdList;
for (auto& pair : cm->mDeviceStatusMap) {
- const String8 &cameraId = pair.first;
+ cameraIdList.push_back(pair.first);
+ }
+
+ for (String8 cameraId : cameraIdList) {
cm->onStatusChangedLocked(
CameraServiceListener::STATUS_NOT_PRESENT, cameraId);
}
diff --git a/camera/ndk/impl/ACameraManager.h b/camera/ndk/impl/ACameraManager.h
index da887a26e09e692d26a2d79237bcabea57d16558..d53d8099ded18e701fbcc64caacccafb84b1ef5d 100644
--- a/camera/ndk/impl/ACameraManager.h
+++ b/camera/ndk/impl/ACameraManager.h
@@ -95,6 +95,9 @@ class CameraManagerGlobal final : public RefBase {
virtual binder::Status onTorchStatusChanged(int32_t, const String16&) {
return binder::Status::ok();
}
+ virtual binder::Status onTorchStrengthLevelChanged(const String16&, int32_t) {
+ return binder::Status::ok();
+ }
virtual binder::Status onCameraAccessPrioritiesChanged();
virtual binder::Status onCameraOpened(const String16&, const String16&) {
diff --git a/camera/ndk/include/camera/NdkCameraCaptureSession.h b/camera/ndk/include/camera/NdkCameraCaptureSession.h
index 2b7f040be449c3884795ed41b1aa6dc6d027c0ee..b0fd00ce20cc8b42bdcc24dac8f7f1ceb03810ba 100644
--- a/camera/ndk/include/camera/NdkCameraCaptureSession.h
+++ b/camera/ndk/include/camera/NdkCameraCaptureSession.h
@@ -811,6 +811,184 @@ camera_status_t ACameraCaptureSession_logicalCamera_setRepeatingRequest(
int numRequests, ACaptureRequest** requests,
/*optional*/int* captureSequenceId) __INTRODUCED_IN(29);
+/**
+ * The definition of camera capture start callback. The same as
+ * {@link ACameraCaptureSession_captureCallbacks#onCaptureStarted}, except that
+ * it has the frame number of the capture as well.
+ *
+ * @param context The optional application context provided by user in
+ * {@link ACameraCaptureSession_captureCallbacks}.
+ * @param session The camera capture session of interest.
+ * @param request The capture request that is starting. Note that this pointer points to a copy of
+ * capture request sent by application, so the address is different to what
+ * application sent but the content will match. This request will be freed by
+ * framework immediately after this callback returns.
+ * @param timestamp The timestamp when the capture is started. This timestamp will match
+ * {@link ACAMERA_SENSOR_TIMESTAMP} of the {@link ACameraMetadata} in
+ * {@link ACameraCaptureSession_captureCallbacks#onCaptureCompleted} callback.
+ * @param frameNumber the frame number of the capture started
+ */
+typedef void (*ACameraCaptureSession_captureCallback_startV2)(
+ void* context, ACameraCaptureSession* session,
+ const ACaptureRequest* request, int64_t timestamp, int64_t frameNumber);
+/**
+ * This has the same functionality as ACameraCaptureSession_captureCallbacks,
+ * with the exception that captureCallback_startV2 callback is
+ * used, instead of captureCallback_start, to support retrieving the frame number.
+ */
+typedef struct ACameraCaptureSession_captureCallbacksV2 {
+ /**
+ * Same as ACameraCaptureSession_captureCallbacks
+ */
+ void* context;
+
+ /**
+ * Same as {@link ACameraCaptureSession_captureCallbacks#onCaptureStarted},
+ * except that it has the frame number of the capture added in the parameter
+ * list.
+ */
+ ACameraCaptureSession_captureCallback_startV2 onCaptureStarted;
+
+ /**
+ * Same as {@link ACameraCaptureSession_captureCallbacks#onCaptureProgressed}.
+ */
+ ACameraCaptureSession_captureCallback_result onCaptureProgressed;
+
+ /**
+ * Same as {@link ACameraCaptureSession_captureCallbacks#onCaptureCompleted}.
+ */
+ ACameraCaptureSession_captureCallback_result onCaptureCompleted;
+
+ /**
+ * Same as {@link ACameraCaptureSession_captureCallbacks#onCaptureFailed}.
+ */
+ ACameraCaptureSession_captureCallback_failed onCaptureFailed;
+
+ /**
+ * Same as {@link ACameraCaptureSession_captureCallbacks#onCaptureSequenceCompleted}.
+ */
+ ACameraCaptureSession_captureCallback_sequenceEnd onCaptureSequenceCompleted;
+
+ /**
+ * Same as {@link ACameraCaptureSession_captureCallbacks#onCaptureSequenceAborted}.
+ */
+ ACameraCaptureSession_captureCallback_sequenceAbort onCaptureSequenceAborted;
+
+ /**
+ * Same as {@link ACameraCaptureSession_captureCallbacks#onCaptureBufferLost}.
+ */
+ ACameraCaptureSession_captureCallback_bufferLost onCaptureBufferLost;
+
+
+} ACameraCaptureSession_captureCallbacksV2;
+
+/**
+ * This has the same functionality as ACameraCaptureSession_logicalCamera_captureCallbacks,
+ * with the exception that an captureCallback_startV2 callback is
+ * used, instead of captureCallback_start, to support retrieving frame number.
+ */
+typedef struct ACameraCaptureSession_logicalCamera_captureCallbacksV2 {
+ /**
+ * Same as ACameraCaptureSession_captureCallbacks
+ */
+ void* context;
+
+ /**
+ * Same as {@link ACameraCaptureSession_captureCallbacks#onCaptureStarted},
+ * except that it has the frame number of the capture added in the parameter
+ * list.
+ */
+ ACameraCaptureSession_captureCallback_startV2 onCaptureStarted;
+
+
+ /**
+ * Same as {@link ACameraCaptureSession_captureCallbacks#onCaptureProgressed}.
+ */
+ ACameraCaptureSession_captureCallback_result onCaptureProgressed;
+
+ /**
+ * Same as
+ * {@link ACameraCaptureSession_logicalCamera_captureCallbacks#onLogicalCaptureCompleted}.
+ */
+ ACameraCaptureSession_logicalCamera_captureCallback_result onLogicalCameraCaptureCompleted;
+
+ /**
+ * This callback is called instead of {@link onLogicalCameraCaptureCompleted} when the
+ * camera device failed to produce a capture result for the
+ * request.
+ *
+ * Other requests are unaffected, and some or all image buffers from
+ * the capture may have been pushed to their respective output
+ * streams.
+ *
+ * Note that the ACaptureRequest pointer in the callback will not match what application has
+ * submitted, but the contents the ACaptureRequest will match what application submitted.
+ *
+ * @see ALogicalCameraCaptureFailure
+ */
+ ACameraCaptureSession_logicalCamera_captureCallback_failed onLogicalCameraCaptureFailed;
+
+ /**
+ * Same as {@link ACameraCaptureSession_captureCallbacks#onCaptureSequenceCompleted}.
+ */
+ ACameraCaptureSession_captureCallback_sequenceEnd onCaptureSequenceCompleted;
+
+ /**
+ * Same as {@link ACameraCaptureSession_captureCallbacks#onCaptureSequenceAborted}.
+ */
+ ACameraCaptureSession_captureCallback_sequenceAbort onCaptureSequenceAborted;
+
+ /**
+ * Same as {@link ACameraCaptureSession_captureCallbacks#onCaptureBufferLost}.
+ */
+ ACameraCaptureSession_captureCallback_bufferLost onCaptureBufferLost;
+
+} ACameraCaptureSession_logicalCamera_captureCallbacksV2;
+
+/**
+ * This has the same functionality as ACameraCaptureSession_capture, with added
+ * support for v2 of camera callbacks, where the onCaptureStarted callback
+ * adds frame number in its parameter list.
+ */
+camera_status_t ACameraCaptureSession_captureV2(
+ ACameraCaptureSession* session,
+ /*optional*/ACameraCaptureSession_captureCallbacksV2* callbacks,
+ int numRequests, ACaptureRequest** requests,
+ /*optional*/int* captureSequenceId) __INTRODUCED_IN(33);
+
+/**
+ * This has the same functionality as ACameraCaptureSession_logical_setRepeatingRequest, with added
+ * support for v2 of logical multi-camera callbacks where the onCaptureStarted
+ * callback adds frame number in its parameter list.
+ */
+camera_status_t ACameraCaptureSession_setRepeatingRequestV2(
+ ACameraCaptureSession* session,
+ /*optional*/ACameraCaptureSession_captureCallbacksV2* callbacks,
+ int numRequests, ACaptureRequest** requests,
+ /*optional*/int* captureSequenceId) __INTRODUCED_IN(33);
+
+/**
+ * This has the same functionality as ACameraCaptureSession_logical_capture, with added
+ * support for v2 of logical multi-camera callbacks where the onCaptureStarted callback
+ * adds frame number in its parameter list.
+ */
+camera_status_t ACameraCaptureSession_logicalCamera_captureV2(
+ ACameraCaptureSession* session,
+ /*optional*/ACameraCaptureSession_logicalCamera_captureCallbacksV2* callbacks,
+ int numRequests, ACaptureRequest** requests,
+ /*optional*/int* captureSequenceId) __INTRODUCED_IN(33);
+
+/**
+ * This has the same functionality as ACameraCaptureSession_logical_setRepeatingRequest, with added
+ * support for v2 of logical multi-camera callbacks where the onCaptureStarted
+ * callback adds frame number in its parameter list.
+ */
+camera_status_t ACameraCaptureSession_logicalCamera_setRepeatingRequestV2(
+ ACameraCaptureSession* session,
+ /*optional*/ACameraCaptureSession_logicalCamera_captureCallbacksV2* callbacks,
+ int numRequests, ACaptureRequest** requests,
+ /*optional*/int* captureSequenceId) __INTRODUCED_IN(33);
+
__END_DECLS
#endif /* _NDK_CAMERA_CAPTURE_SESSION_H */
diff --git a/camera/ndk/include/camera/NdkCameraMetadataTags.h b/camera/ndk/include/camera/NdkCameraMetadataTags.h
index 816303c4088c41ad65be8cf5128ae20610de861d..b6f8552ae1b5a5760e2ef4436a23f16a44a71727 100644
--- a/camera/ndk/include/camera/NdkCameraMetadataTags.h
+++ b/camera/ndk/include/camera/NdkCameraMetadataTags.h
@@ -72,6 +72,8 @@ typedef enum acamera_metadata_section {
ACAMERA_DISTORTION_CORRECTION,
ACAMERA_HEIC,
ACAMERA_HEIC_INFO,
+ ACAMERA_AUTOMOTIVE,
+ ACAMERA_AUTOMOTIVE_LENS,
ACAMERA_SECTION_COUNT,
ACAMERA_VENDOR = 0x8000
@@ -115,6 +117,8 @@ typedef enum acamera_metadata_section_start {
<< 16,
ACAMERA_HEIC_START = ACAMERA_HEIC << 16,
ACAMERA_HEIC_INFO_START = ACAMERA_HEIC_INFO << 16,
+ ACAMERA_AUTOMOTIVE_START = ACAMERA_AUTOMOTIVE << 16,
+ ACAMERA_AUTOMOTIVE_LENS_START = ACAMERA_AUTOMOTIVE_LENS << 16,
ACAMERA_VENDOR_START = ACAMERA_VENDOR << 16
} acamera_metadata_section_start_t;
@@ -517,6 +521,14 @@ typedef enum acamera_metadata_tag {
* region and output only the intersection rectangle as the metering region in the result
* metadata. If the region is entirely outside the crop region, it will be ignored and
* not reported in the result metadata.
+ * When setting the AE metering regions, the application must consider the additional
+ * crop resulted from the aspect ratio differences between the preview stream and
+ * ACAMERA_SCALER_CROP_REGION. For example, if the ACAMERA_SCALER_CROP_REGION is the full
+ * active array size with 4:3 aspect ratio, and the preview stream is 16:9,
+ * the boundary of AE regions will be [0, y_crop] and
+ * [active_width, active_height - 2 * y_crop] rather than [0, 0] and
+ * [active_width, active_height], where y_crop is the additional crop due to aspect ratio
+ * mismatch.
* Starting from API level 30, the coordinate system of activeArraySize or
* preCorrectionActiveArraySize is used to represent post-zoomRatio field of view, not
* pre-zoom field of view. This means that the same aeRegions values at different
@@ -718,6 +730,14 @@ typedef enum acamera_metadata_tag {
* region and output only the intersection rectangle as the metering region in the result
* metadata. If the region is entirely outside the crop region, it will be ignored and
* not reported in the result metadata.
+ * When setting the AF metering regions, the application must consider the additional
+ * crop resulted from the aspect ratio differences between the preview stream and
+ * ACAMERA_SCALER_CROP_REGION. For example, if the ACAMERA_SCALER_CROP_REGION is the full
+ * active array size with 4:3 aspect ratio, and the preview stream is 16:9,
+ * the boundary of AF regions will be [0, y_crop] and
+ * [active_width, active_height - 2 * y_crop] rather than [0, 0] and
+ * [active_width, active_height], where y_crop is the additional crop due to aspect ratio
+ * mismatch.
* Starting from API level 30, the coordinate system of activeArraySize or
* preCorrectionActiveArraySize is used to represent post-zoomRatio field of view, not
* pre-zoom field of view. This means that the same afRegions values at different
@@ -838,7 +858,7 @@ typedef enum acamera_metadata_tag {
* routine is enabled, overriding the application's selected
* ACAMERA_COLOR_CORRECTION_TRANSFORM, ACAMERA_COLOR_CORRECTION_GAINS and
* ACAMERA_COLOR_CORRECTION_MODE. Note that when ACAMERA_CONTROL_AE_MODE
- * is OFF, the behavior of AWB is device dependent. It is recommened to
+ * is OFF, the behavior of AWB is device dependent. It is recommended to
* also set AWB mode to OFF or lock AWB by using ACAMERA_CONTROL_AWB_LOCK before
* setting AE mode to OFF.
* When set to the OFF mode, the camera device's auto-white balance
@@ -913,6 +933,14 @@ typedef enum acamera_metadata_tag {
* region and output only the intersection rectangle as the metering region in the result
* metadata. If the region is entirely outside the crop region, it will be ignored and
* not reported in the result metadata.
+ * When setting the AWB metering regions, the application must consider the additional
+ * crop resulted from the aspect ratio differences between the preview stream and
+ * ACAMERA_SCALER_CROP_REGION. For example, if the ACAMERA_SCALER_CROP_REGION is the full
+ * active array size with 4:3 aspect ratio, and the preview stream is 16:9,
+ * the boundary of AWB regions will be [0, y_crop] and
+ * [active_width, active_height - 2 * y_crop] rather than [0, 0] and
+ * [active_width, active_height], where y_crop is the additional crop due to aspect ratio
+ * mismatch.
* Starting from API level 30, the coordinate system of activeArraySize or
* preCorrectionActiveArraySize is used to represent post-zoomRatio field of view, not
* pre-zoom field of view. This means that the same awbRegions values at different
@@ -961,13 +989,15 @@ typedef enum acamera_metadata_tag {
*
*
This control (except for MANUAL) is only effective if
* ACAMERA_CONTROL_MODE != OFF
and any 3A routine is active.
- * All intents are supported by all devices, except that:
- * * ZERO_SHUTTER_LAG will be supported if ACAMERA_REQUEST_AVAILABLE_CAPABILITIES contains
- * PRIVATE_REPROCESSING or YUV_REPROCESSING.
- * * MANUAL will be supported if ACAMERA_REQUEST_AVAILABLE_CAPABILITIES contains
- * MANUAL_SENSOR.
- * * MOTION_TRACKING will be supported if ACAMERA_REQUEST_AVAILABLE_CAPABILITIES contains
- * MOTION_TRACKING.
+ * All intents are supported by all devices, except that:
+ *
+ * - ZERO_SHUTTER_LAG will be supported if ACAMERA_REQUEST_AVAILABLE_CAPABILITIES contains
+ * PRIVATE_REPROCESSING or YUV_REPROCESSING.
+ * - MANUAL will be supported if ACAMERA_REQUEST_AVAILABLE_CAPABILITIES contains
+ * MANUAL_SENSOR.
+ * - MOTION_TRACKING will be supported if ACAMERA_REQUEST_AVAILABLE_CAPABILITIES contains
+ * MOTION_TRACKING.
+ *
*
* @see ACAMERA_CONTROL_MODE
* @see ACAMERA_REQUEST_AVAILABLE_CAPABILITIES
@@ -1090,6 +1120,15 @@ typedef enum acamera_metadata_tag {
* (ACAMERA_LENS_OPTICAL_STABILIZATION_MODE), turning both modes on may
* produce undesirable interaction, so it is recommended not to enable
* both at the same time.
+ * If video stabilization is set to "PREVIEW_STABILIZATION",
+ * ACAMERA_LENS_OPTICAL_STABILIZATION_MODE is overridden. The camera sub-system may choose
+ * to turn on hardware based image stabilization in addition to software based stabilization
+ * if it deems that appropriate.
+ * This key may be a part of the available session keys, which camera clients may
+ * query via
+ * {@link ACameraManager_getCameraCharacteristics }.
+ * If this is the case, changing this key over the life-time of a capture session may
+ * cause delays / glitches.
*
* @see ACAMERA_CONTROL_VIDEO_STABILIZATION_MODE
* @see ACAMERA_LENS_OPTICAL_STABILIZATION_MODE
@@ -1449,7 +1488,7 @@ typedef enum acamera_metadata_tag {
* Any state (excluding LOCKED) | ACAMERA_CONTROL_AE_PRECAPTURE_TRIGGER is START, sequence done | FLASH_REQUIRED | Converged but too dark w/o flash after a precapture sequence, transient states are skipped by camera device.
* Any state (excluding LOCKED) | ACAMERA_CONTROL_AE_PRECAPTURE_TRIGGER is START, sequence done | CONVERGED | Converged after a precapture sequence, transient states are skipped by camera device.
* Any state (excluding LOCKED) | ACAMERA_CONTROL_AE_PRECAPTURE_TRIGGER is CANCEL, converged | FLASH_REQUIRED | Converged but too dark w/o flash after a precapture sequence is canceled, transient states are skipped by camera device.
- * Any state (excluding LOCKED) | ACAMERA_CONTROL_AE_PRECAPTURE_TRIGGER is CANCEL, converged | CONVERGED | Converged after a precapture sequenceis canceled, transient states are skipped by camera device.
+ * Any state (excluding LOCKED) | ACAMERA_CONTROL_AE_PRECAPTURE_TRIGGER is CANCEL, converged | CONVERGED | Converged after a precapture sequences canceled, transient states are skipped by camera device.
* CONVERGED | Camera device finished AE scan | FLASH_REQUIRED | Converged but too dark w/o flash after a new scan, transient states are skipped by camera device.
* FLASH_REQUIRED | Camera device finished AE scan | CONVERGED | Converged after a new scan, transient states are skipped by camera device.
*
@@ -1685,7 +1724,7 @@ typedef enum acamera_metadata_tag {
*
*
* Devices support post RAW sensitivity boost will advertise
- * ACAMERA_CONTROL_POST_RAW_SENSITIVITY_BOOST key for controling
+ * ACAMERA_CONTROL_POST_RAW_SENSITIVITY_BOOST key for controlling
* post RAW sensitivity boost.
* This key will be null
for devices that do not support any RAW format
* outputs. For devices that do support RAW format outputs, this key will always
@@ -2144,6 +2183,51 @@ typedef enum acamera_metadata_tag {
*/
ACAMERA_FLASH_INFO_AVAILABLE = // byte (acamera_metadata_enum_android_flash_info_available_t)
ACAMERA_FLASH_INFO_START,
+ /**
+ *
Maximum flashlight brightness level.
+ *
+ * Type: int32
+ *
+ * This tag may appear in:
+ *
+ * - ACameraMetadata from ACameraManager_getCameraCharacteristics
+ *
+ *
+ * If this value is greater than 1, then the device supports controlling the
+ * flashlight brightness level via
+ * CameraManager#turnOnTorchWithStrengthLevel.
+ * If this value is equal to 1, flashlight brightness control is not supported.
+ * The value for this key will be null for devices with no flash unit.
+ */
+ ACAMERA_FLASH_INFO_STRENGTH_MAXIMUM_LEVEL = // int32
+ ACAMERA_FLASH_INFO_START + 2,
+ /**
+ * Default flashlight brightness level to be set via
+ * CameraManager#turnOnTorchWithStrengthLevel.
+ *
+ * Type: int32
+ *
+ * This tag may appear in:
+ *
+ * - ACameraMetadata from ACameraManager_getCameraCharacteristics
+ *
+ *
+ * If flash unit is available this will be greater than or equal to 1 and less
+ * or equal to ACAMERA_FLASH_INFO_STRENGTH_MAXIMUM_LEVEL
.
+ * Setting flashlight brightness above the default level
+ * (i.e.ACAMERA_FLASH_INFO_STRENGTH_DEFAULT_LEVEL
) may make the device more
+ * likely to reach thermal throttling conditions and slow down, or drain the
+ * battery quicker than normal. To minimize such issues, it is recommended to
+ * start the flashlight at this default brightness until a user explicitly requests
+ * a brighter level.
+ * Note that the value for this key will be null for devices with no flash unit.
+ * The default level should always be > 0.
+ *
+ * @see ACAMERA_FLASH_INFO_STRENGTH_DEFAULT_LEVEL
+ * @see ACAMERA_FLASH_INFO_STRENGTH_MAXIMUM_LEVEL
+ */
+ ACAMERA_FLASH_INFO_STRENGTH_DEFAULT_LEVEL = // int32
+ ACAMERA_FLASH_INFO_START + 3,
ACAMERA_FLASH_INFO_END,
/**
@@ -2341,7 +2425,7 @@ typedef enum acamera_metadata_tag {
* and keep jpeg and thumbnail image data unrotated.
* Rotate the jpeg and thumbnail image data and not set
* EXIF orientation flag. In this
- * case, LIMITED or FULL hardware level devices will report rotated thumnail size in
+ * case, LIMITED or FULL hardware level devices will report rotated thumbnail size in
* capture result, so the width and height will be interchanged if 90 or 270 degree
* orientation is requested. LEGACY device will always report unrotated thumbnail
* size.
@@ -2370,7 +2454,7 @@ typedef enum acamera_metadata_tag {
*
* This list will include at least one non-zero resolution, plus (0,0)
for indicating no
* thumbnail should be generated.
- * Below condiditions will be satisfied for this size list:
+ * Below conditions will be satisfied for this size list:
*
* - The sizes will be sorted by increasing pixel area (width x height).
* If several resolutions have the same area, they will be sorted by increasing width.
@@ -2526,12 +2610,18 @@ typedef enum acamera_metadata_tag {
* If a camera device supports both OIS and digital image stabilization
* (ACAMERA_CONTROL_VIDEO_STABILIZATION_MODE), turning both modes on may produce undesirable
* interaction, so it is recommended not to enable both at the same time.
+ * If ACAMERA_CONTROL_VIDEO_STABILIZATION_MODE is set to "PREVIEW_STABILIZATION",
+ * ACAMERA_LENS_OPTICAL_STABILIZATION_MODE is overridden. The camera sub-system may choose
+ * to turn on hardware based image stabilization in addition to software based stabilization
+ * if it deems that appropriate. This key's value in the capture result will reflect which
+ * OIS mode was chosen.
* Not all devices will support OIS; see
* ACAMERA_LENS_INFO_AVAILABLE_OPTICAL_STABILIZATION for
* available controls.
*
* @see ACAMERA_CONTROL_VIDEO_STABILIZATION_MODE
* @see ACAMERA_LENS_INFO_AVAILABLE_OPTICAL_STABILIZATION
+ * @see ACAMERA_LENS_OPTICAL_STABILIZATION_MODE
*/
ACAMERA_LENS_OPTICAL_STABILIZATION_MODE = // byte (acamera_metadata_enum_android_lens_optical_stabilization_mode_t)
ACAMERA_LENS_START + 4,
@@ -2634,6 +2724,9 @@ typedef enum acamera_metadata_tag {
* with PRIMARY_CAMERA.
* When ACAMERA_LENS_POSE_REFERENCE is UNDEFINED, this position cannot be accurately
* represented by the camera device, and will be represented as (0, 0, 0)
.
+ * When ACAMERA_LENS_POSE_REFERENCE is AUTOMOTIVE, then this position is relative to the
+ * origin of the automotive sensor coordinate system, which is at the center of the rear
+ * axle.
*
* @see ACAMERA_LENS_DISTORTION
* @see ACAMERA_LENS_INTRINSIC_CALIBRATION
@@ -2675,7 +2768,7 @@ typedef enum acamera_metadata_tag {
* When the state is STATIONARY, the lens parameters are not changing. This could be
* either because the parameters are all fixed, or because the lens has had enough
* time to reach the most recently-requested values.
- * If all these lens parameters are not changable for a camera device, as listed below:
+ * If all these lens parameters are not changeable for a camera device, as listed below:
*
* - Fixed focus (
ACAMERA_LENS_INFO_MINIMUM_FOCUS_DISTANCE == 0
), which means
* ACAMERA_LENS_FOCUS_DISTANCE parameter will always be 0.
@@ -3127,7 +3220,7 @@ typedef enum acamera_metadata_tag {
* the camera device. Using more streams simultaneously may require more hardware and
* CPU resources that will consume more power. The image format for an output stream can
* be any supported format provided by ACAMERA_SCALER_AVAILABLE_STREAM_CONFIGURATIONS.
- * The formats defined in ACAMERA_SCALER_AVAILABLE_STREAM_CONFIGURATIONS can be catergorized
+ * The formats defined in ACAMERA_SCALER_AVAILABLE_STREAM_CONFIGURATIONS can be categorized
* into the 3 stream types as below:
*
* - Processed (but stalling): any non-RAW format with a stallDurations > 0.
@@ -3354,7 +3447,7 @@ typedef enum acamera_metadata_tag {
* but clients should be aware and expect delays during their application.
* An example usage scenario could look like this:
*
- * - The camera client starts by quering the session parameter key list via
+ *
- The camera client starts by querying the session parameter key list via
* {@link ACameraManager_getCameraCharacteristics }.
* - Before triggering the capture session create sequence, a capture request
* must be built via
@@ -3393,16 +3486,36 @@ typedef enum acamera_metadata_tag {
*
*
* This is a subset of ACAMERA_REQUEST_AVAILABLE_REQUEST_KEYS which contains a list
- * of keys that can be overridden using Builder#setPhysicalCameraKey.
+ * of keys that can be overridden using
+ * Builder#setPhysicalCameraKey.
* The respective value of such request key can be obtained by calling
- * Builder#getPhysicalCameraKey. Capture requests that contain
- * individual physical device requests must be built via
+ * Builder#getPhysicalCameraKey.
+ * Capture requests that contain individual physical device requests must be built via
* Set).
*
* @see ACAMERA_REQUEST_AVAILABLE_REQUEST_KEYS
*/
ACAMERA_REQUEST_AVAILABLE_PHYSICAL_CAMERA_REQUEST_KEYS = // int32[n]
ACAMERA_REQUEST_START + 17,
+ /**
+ * A map of all available 10-bit dynamic range profiles along with their
+ * capture request constraints.
+ *
+ * Type: int64[n*3] (acamera_metadata_enum_android_request_available_dynamic_range_profiles_map_t)
+ *
+ * This tag may appear in:
+ *
+ * - ACameraMetadata from ACameraManager_getCameraCharacteristics
+ *
+ *
+ * Devices supporting the 10-bit output capability
+ * CameraCharacteristics#REQUEST_AVAILABLE_CAPABILITIES_DYNAMIC_RANGE_TEN_BIT
+ * must list their supported dynamic range profiles. In case the camera is not able to
+ * support every possible profile combination within a single capture request, then the
+ * constraints must be listed here as well.
+ */
+ ACAMERA_REQUEST_AVAILABLE_DYNAMIC_RANGE_PROFILES_MAP = // int64[n*3] (acamera_metadata_enum_android_request_available_dynamic_range_profiles_map_t)
+ ACAMERA_REQUEST_START + 19,
ACAMERA_REQUEST_END,
/**
@@ -3601,7 +3714,7 @@ typedef enum acamera_metadata_tag {
* IMPLEMENTATION_DEFINED | same as YUV_420_888 | Any |
* For applications targeting SDK version 31 or newer, if the mobile device declares to be
* media performance class 12 or higher by setting
- * MEDIA_PERFORMANCE_CLASS to be 31 or larger,
+ * VERSION#MEDIA_PERFORMANCE_CLASS to be 31 or larger,
* the primary camera devices (first rear/front camera in the camera ID list) will not
* support JPEG sizes smaller than 1080p. If the application configures a JPEG stream
* smaller than 1080p, the camera device will round up the JPEG image size to at least
@@ -3620,7 +3733,7 @@ typedef enum acamera_metadata_tag {
* IMPLEMENTATION_DEFINED | same as YUV_420_888 | Any |
* For applications targeting SDK version 31 or newer, if the mobile device doesn't declare
* to be media performance class 12 or better by setting
- * MEDIA_PERFORMANCE_CLASS to be 31 or larger,
+ * VERSION#MEDIA_PERFORMANCE_CLASS to be 31 or larger,
* or if the camera device isn't a primary rear/front camera, the minimum required output
* stream configurations are the same as for applications targeting SDK version older than
* 31.
@@ -4114,19 +4227,70 @@ typedef enum acamera_metadata_tag {
* to output different resolution images depending on the current active physical camera or
* pixel mode. With multi-resolution input streams, the camera device can reprocess images
* of different resolutions from different physical cameras or sensor pixel modes.
- * When set to TRUE:
- * * For a logical multi-camera, the camera framework derives
+ *
When set to TRUE:
+ *
+ * - For a logical multi-camera, the camera framework derives
* android.scaler.multiResolutionStreamConfigurationMap by combining the
* ACAMERA_SCALER_PHYSICAL_CAMERA_MULTI_RESOLUTION_STREAM_CONFIGURATIONS from its physical
- * cameras.
- * * For an ultra-high resolution sensor camera, the camera framework directly copies
+ * cameras.
+ * - For an ultra-high resolution sensor camera, the camera framework directly copies
* the value of ACAMERA_SCALER_PHYSICAL_CAMERA_MULTI_RESOLUTION_STREAM_CONFIGURATIONS to
- * android.scaler.multiResolutionStreamConfigurationMap.
+ * android.scaler.multiResolutionStreamConfigurationMap.
+ *
*
* @see ACAMERA_SCALER_PHYSICAL_CAMERA_MULTI_RESOLUTION_STREAM_CONFIGURATIONS
*/
ACAMERA_SCALER_MULTI_RESOLUTION_STREAM_SUPPORTED = // byte (acamera_metadata_enum_android_scaler_multi_resolution_stream_supported_t)
ACAMERA_SCALER_START + 24,
+ /**
+ * The stream use cases supported by this camera device.
+ *
+ * Type: int64[n] (acamera_metadata_enum_android_scaler_available_stream_use_cases_t)
+ *
+ * This tag may appear in:
+ *
+ * - ACameraMetadata from ACameraManager_getCameraCharacteristics
+ *
+ *
+ * The stream use case indicates the purpose of a particular camera stream from
+ * the end-user perspective. Some examples of camera use cases are: preview stream for
+ * live viewfinder shown to the user, still capture for generating high quality photo
+ * capture, video record for encoding the camera output for the purpose of future playback,
+ * and video call for live realtime video conferencing.
+ * With this flag, the camera device can optimize the image processing pipeline
+ * parameters, such as tuning, sensor mode, and ISP settings, independent of
+ * the properties of the immediate camera output surface. For example, if the output
+ * surface is a SurfaceTexture, the stream use case flag can be used to indicate whether
+ * the camera frames eventually go to display, video encoder,
+ * still image capture, or all of them combined.
+ * The application sets the use case of a camera stream by calling
+ * OutputConfiguration#setStreamUseCase.
+ * A camera device with
+ * CameraCharacteristics#REQUEST_AVAILABLE_CAPABILITIES_STREAM_USE_CASE
+ * capability must support the following stream use cases:
+ *
+ * - DEFAULT
+ * - PREVIEW
+ * - STILL_CAPTURE
+ * - VIDEO_RECORD
+ * - PREVIEW_VIDEO_STILL
+ * - VIDEO_CALL
+ *
+ * The guaranteed stream combinations related to stream use case for a camera device with
+ * CameraCharacteristics#REQUEST_AVAILABLE_CAPABILITIES_STREAM_USE_CASE
+ * capability is documented in the camera device
+ * guideline. The
+ * application is strongly recommended to use one of the guaranteed stream combinations.
+ * If the application creates a session with a stream combination not in the guaranteed
+ * list, or with mixed DEFAULT and non-DEFAULT use cases within the same session,
+ * the camera device may ignore some stream use cases due to hardware constraints
+ * and implementation details.
+ * For stream combinations not covered by the stream use case mandatory lists, such as
+ * reprocessable session, constrained high speed session, or RAW stream combinations, the
+ * application should leave stream use cases within the session as DEFAULT.
+ */
+ ACAMERA_SCALER_AVAILABLE_STREAM_USE_CASES = // int64[n] (acamera_metadata_enum_android_scaler_available_stream_use_cases_t)
+ ACAMERA_SCALER_START + 25,
ACAMERA_SCALER_END,
/**
@@ -4680,7 +4844,7 @@ typedef enum acamera_metadata_tag {
* noise model used here is:
* N(x) = sqrt(Sx + O)
* Where x represents the recorded signal of a CFA channel normalized to
- * the range [0, 1], and S and O are the noise model coeffiecients for
+ * the range [0, 1], and S and O are the noise model coefficients for
* that channel.
* A more detailed description of the noise model can be found in the
* Adobe DNG specification for the NoiseProfile tag.
@@ -4729,7 +4893,7 @@ typedef enum acamera_metadata_tag {
* - 1.20 <= R >= 1.03 will require some software
* correction to avoid demosaic errors (3-20% divergence).
* - R > 1.20 will require strong software correction to produce
- * a usuable image (>20% divergence).
+ * a usable image (>20% divergence).
*
* Starting from Android Q, this key will not be present for a MONOCHROME camera, even if
* the camera device has RAW capability.
@@ -4986,7 +5150,7 @@ typedef enum acamera_metadata_tag {
* - ACameraMetadata from ACameraCaptureSession_captureCallback_result callbacks
*
*
- * This key will only be present in devices advertisting the
+ *
This key will only be present in devices advertising the
* CameraMetadata#REQUEST_AVAILABLE_CAPABILITIES_ULTRA_HIGH_RESOLUTION_SENSOR
* capability which also advertise REMOSAIC_REPROCESSING
capability. On all other devices
* RAW targets will have a regular bayer pattern.
@@ -6171,9 +6335,11 @@ typedef enum acamera_metadata_tag {
* - ACaptureRequest
*
*
- * The tonemap curve will be defined the following formula:
- * * OUT = pow(IN, 1.0 / gamma)
- * where IN and OUT is the input pixel value scaled to range [0.0, 1.0],
+ *
The tonemap curve will be defined the following formula:
+ *
+ * - OUT = pow(IN, 1.0 / gamma)
+ *
+ * where IN and OUT is the input pixel value scaled to range [0.0, 1.0],
* pow is the power function and gamma is the gamma value specified by this
* key.
* The same curve will be applied to all color channels. The camera device
@@ -7071,6 +7237,87 @@ typedef enum acamera_metadata_tag {
ACAMERA_HEIC_START + 5,
ACAMERA_HEIC_END,
+ /**
+ *
Location of the cameras on the automotive devices.
+ *
+ * Type: byte (acamera_metadata_enum_android_automotive_location_t)
+ *
+ * This tag may appear in:
+ *
+ * - ACameraMetadata from ACameraManager_getCameraCharacteristics
+ *
+ *
+ * This enum defines the locations of the cameras relative to the vehicle body frame on
+ * the automotive sensor coordinate system.
+ * If the system has FEATURE_AUTOMOTIVE, the camera will have this entry in its static
+ * metadata.
+ *
+ * - INTERIOR is the inside of the vehicle body frame (or the passenger cabin).
+ * - EXTERIOR is the outside of the vehicle body frame.
+ * - EXTRA is the extra vehicle such as a trailer.
+ *
+ * Each side of the vehicle body frame on this coordinate system is defined as below:
+ *
+ * - FRONT is where the Y-axis increases toward.
+ * - REAR is where the Y-axis decreases toward.
+ * - LEFT is where the X-axis decreases toward.
+ * - RIGHT is where the X-axis increases toward.
+ *
+ * If the camera has either EXTERIOR_OTHER or EXTRA_OTHER, its static metadata will list
+ * the following entries, so that applications can determine the camera's exact location:
+ *
+ * - ACAMERA_LENS_POSE_REFERENCE
+ * - ACAMERA_LENS_POSE_ROTATION
+ * - ACAMERA_LENS_POSE_TRANSLATION
+ *
+ *
+ * @see ACAMERA_LENS_POSE_REFERENCE
+ * @see ACAMERA_LENS_POSE_ROTATION
+ * @see ACAMERA_LENS_POSE_TRANSLATION
+ */
+ ACAMERA_AUTOMOTIVE_LOCATION = // byte (acamera_metadata_enum_android_automotive_location_t)
+ ACAMERA_AUTOMOTIVE_START,
+ ACAMERA_AUTOMOTIVE_END,
+
+ /**
+ * The direction of the camera faces relative to the vehicle body frame and the
+ * passenger seats.
+ *
+ * Type: byte[n] (acamera_metadata_enum_android_automotive_lens_facing_t)
+ *
+ * This tag may appear in:
+ *
+ * - ACameraMetadata from ACameraManager_getCameraCharacteristics
+ *
+ *
+ * This enum defines the lens facing characteristic of the cameras on the automotive
+ * devices with locations ACAMERA_AUTOMOTIVE_LOCATION defines. If the system has
+ * FEATURE_AUTOMOTIVE, the camera will have this entry in its static metadata.
+ * When ACAMERA_AUTOMOTIVE_LOCATION is INTERIOR, this has one or more INTERIOR_*
+ * values or a single EXTERIOR_* value. When this has more than one INTERIOR_*,
+ * the first value must be the one for the seat closest to the optical axis. If this
+ * contains INTERIOR_OTHER, all other values will be ineffective.
+ * When ACAMERA_AUTOMOTIVE_LOCATION is EXTERIOR_* or EXTRA, this has a single
+ * EXTERIOR_* value.
+ * If a camera has INTERIOR_OTHER or EXTERIOR_OTHER, or more than one camera is at the
+ * same location and facing the same direction, their static metadata will list the
+ * following entries, so that applications can determine their lenses' exact facing
+ * directions:
+ *
+ * - ACAMERA_LENS_POSE_REFERENCE
+ * - ACAMERA_LENS_POSE_ROTATION
+ * - ACAMERA_LENS_POSE_TRANSLATION
+ *
+ *
+ * @see ACAMERA_AUTOMOTIVE_LOCATION
+ * @see ACAMERA_LENS_POSE_REFERENCE
+ * @see ACAMERA_LENS_POSE_ROTATION
+ * @see ACAMERA_LENS_POSE_TRANSLATION
+ */
+ ACAMERA_AUTOMOTIVE_LENS_FACING = // byte[n] (acamera_metadata_enum_android_automotive_lens_facing_t)
+ ACAMERA_AUTOMOTIVE_LENS_START,
+ ACAMERA_AUTOMOTIVE_LENS_END,
+
} acamera_metadata_tag_t;
/**
@@ -7972,6 +8219,17 @@ typedef enum acamera_metadata_enum_acamera_control_video_stabilization_mode {
*/
ACAMERA_CONTROL_VIDEO_STABILIZATION_MODE_ON = 1,
+ /**
+ * Preview stabilization, where the preview in addition to all other non-RAW streams are
+ * stabilized with the same quality of stabilization, is enabled. This mode aims to give
+ * clients a 'what you see is what you get' effect. In this mode, the FoV reduction will
+ * be a maximum of 20 % both horizontally and vertically
+ * (10% from left, right, top, bottom) for the given zoom ratio / crop region.
+ * The resultant FoV will also be the same across all processed streams
+ * (that have the same aspect ratio).
+ */
+ ACAMERA_CONTROL_VIDEO_STABILIZATION_MODE_PREVIEW_STABILIZATION = 2,
+
} acamera_metadata_enum_android_control_video_stabilization_mode_t;
// ACAMERA_CONTROL_AE_STATE
@@ -8452,6 +8710,14 @@ typedef enum acamera_metadata_enum_acamera_lens_pose_reference {
*/
ACAMERA_LENS_POSE_REFERENCE_UNDEFINED = 2,
+ /**
+ * The value of ACAMERA_LENS_POSE_TRANSLATION is relative to the origin of the
+ * automotive sensor coordinate system, which is at the center of the rear axle.
+ *
+ * @see ACAMERA_LENS_POSE_TRANSLATION
+ */
+ ACAMERA_LENS_POSE_REFERENCE_AUTOMOTIVE = 3,
+
} acamera_metadata_enum_android_lens_pose_reference_t;
@@ -8765,7 +9031,7 @@ typedef enum acamera_metadata_enum_acamera_request_available_capabilities {
* for the largest YUV_420_888 size.
* If the device supports the {@link AIMAGE_FORMAT_RAW10 }, {@link AIMAGE_FORMAT_RAW12 }, {@link AIMAGE_FORMAT_Y8 }, then those can also be
* captured at the same rate as the maximum-size YUV_420_888 resolution is.
- * In addition, the ACAMERA_SYNC_MAX_LATENCY field is guaranted to have a value between 0
+ *
In addition, the ACAMERA_SYNC_MAX_LATENCY field is guaranteed to have a value between 0
* and 4, inclusive. ACAMERA_CONTROL_AE_LOCK_AVAILABLE and ACAMERA_CONTROL_AWB_LOCK_AVAILABLE
* are also guaranteed to be true
so burst capture with these two locks ON yields
* consistent image output.
@@ -8931,7 +9197,7 @@ typedef enum acamera_metadata_enum_acamera_request_available_capabilities {
* non-active physical cameras. For example, if the logical camera has a wide-ultrawide
* configuration where the wide lens is the default, when the crop region is set to the
* logical camera's active array size, (and the zoom ratio set to 1.0 starting from
- * Android 11), a physical stream for the ultrawide camera may prefer outputing images
+ * Android 11), a physical stream for the ultrawide camera may prefer outputting images
* with larger field-of-view than that of the wide camera for better stereo matching
* margin or more robust motion tracking. At the same time, the physical non-RAW streams'
* field of view must not be smaller than the requested crop region and zoom ratio, as
@@ -9052,8 +9318,132 @@ typedef enum acamera_metadata_enum_acamera_request_available_capabilities {
ACAMERA_REQUEST_AVAILABLE_CAPABILITIES_ULTRA_HIGH_RESOLUTION_SENSOR
= 16,
+ /**
+ * The camera device supports selecting a per-stream use case via
+ * OutputConfiguration#setStreamUseCase
+ * so that the device can optimize camera pipeline parameters such as tuning, sensor
+ * mode, or ISP settings for a specific user scenario.
+ * Some sample usages of this capability are:
+ *
+ * - Distinguish high quality YUV captures from a regular YUV stream where
+ * the image quality may not be as good as the JPEG stream, or
+ * - Use one stream to serve multiple purposes: viewfinder, video recording and
+ * still capture. This is common with applications that wish to apply edits equally
+ * to preview, saved images, and saved videos.
+ *
+ * This capability requires the camera device to support the following
+ * stream use cases:
+ *
+ * - DEFAULT for backward compatibility where the application doesn't set
+ * a stream use case
+ * - PREVIEW for live viewfinder and in-app image analysis
+ * - STILL_CAPTURE for still photo capture
+ * - VIDEO_RECORD for recording video clips
+ * - PREVIEW_VIDEO_STILL for one single stream used for viewfinder, video
+ * recording, and still capture.
+ * - VIDEO_CALL for long running video calls
+ *
+ * CameraCharacteristics#SCALER_AVAILABLE_STREAM_USE_CASES
+ * lists all of the supported stream use cases.
+ * Refer to CameraDevice#createCaptureSession for the
+ * mandatory stream combinations involving stream use cases, which can also be queried
+ * via MandatoryStreamCombination.
+ */
+ ACAMERA_REQUEST_AVAILABLE_CAPABILITIES_STREAM_USE_CASE = 19,
+
} acamera_metadata_enum_android_request_available_capabilities_t;
+// ACAMERA_REQUEST_AVAILABLE_DYNAMIC_RANGE_PROFILES_MAP
+typedef enum acamera_metadata_enum_acamera_request_available_dynamic_range_profiles_map {
+ /**
+ * 8-bit SDR profile which is the default for all non 10-bit output capable devices.
+ */
+ ACAMERA_REQUEST_AVAILABLE_DYNAMIC_RANGE_PROFILES_MAP_STANDARD = 0x1,
+
+ /**
+ * 10-bit pixel samples encoded using the Hybrid log-gamma transfer function.
+ */
+ ACAMERA_REQUEST_AVAILABLE_DYNAMIC_RANGE_PROFILES_MAP_HLG10 = 0x2,
+
+ /**
+ * 10-bit pixel samples encoded using the SMPTE ST 2084 transfer function.
+ * This profile utilizes internal static metadata to increase the quality
+ * of the capture.
+ */
+ ACAMERA_REQUEST_AVAILABLE_DYNAMIC_RANGE_PROFILES_MAP_HDR10 = 0x4,
+
+ /**
+ * 10-bit pixel samples encoded using the SMPTE ST 2084 transfer function.
+ * In contrast to HDR10, this profile uses internal per-frame metadata
+ * to further enhance the quality of the capture.
+ */
+ ACAMERA_REQUEST_AVAILABLE_DYNAMIC_RANGE_PROFILES_MAP_HDR10_PLUS = 0x8,
+
+ /**
+ * This is a camera mode for Dolby Vision capture optimized for a more scene
+ * accurate capture. This would typically differ from what a specific device
+ * might want to tune for a consumer optimized Dolby Vision general capture.
+ */
+ ACAMERA_REQUEST_AVAILABLE_DYNAMIC_RANGE_PROFILES_MAP_DOLBY_VISION_10B_HDR_REF
+ = 0x10,
+
+ /**
+ * This is the power optimized mode for 10-bit Dolby Vision HDR Reference Mode.
+ */
+ ACAMERA_REQUEST_AVAILABLE_DYNAMIC_RANGE_PROFILES_MAP_DOLBY_VISION_10B_HDR_REF_PO
+ = 0x20,
+
+ /**
+ * This is the camera mode for the default Dolby Vision capture mode for the
+ * specific device. This would be tuned by each specific device for consumer
+ * pleasing results that resonate with their particular audience. We expect
+ * that each specific device would have a different look for their default
+ * Dolby Vision capture.
+ */
+ ACAMERA_REQUEST_AVAILABLE_DYNAMIC_RANGE_PROFILES_MAP_DOLBY_VISION_10B_HDR_OEM
+ = 0x40,
+
+ /**
+ * This is the power optimized mode for 10-bit Dolby Vision HDR device specific
+ * capture Mode.
+ */
+ ACAMERA_REQUEST_AVAILABLE_DYNAMIC_RANGE_PROFILES_MAP_DOLBY_VISION_10B_HDR_OEM_PO
+ = 0x80,
+
+ /**
+ * This is the 8-bit version of the Dolby Vision reference capture mode optimized
+ * for scene accuracy.
+ */
+ ACAMERA_REQUEST_AVAILABLE_DYNAMIC_RANGE_PROFILES_MAP_DOLBY_VISION_8B_HDR_REF
+ = 0x100,
+
+ /**
+ * This is the power optimized mode for 8-bit Dolby Vision HDR Reference Mode.
+ */
+ ACAMERA_REQUEST_AVAILABLE_DYNAMIC_RANGE_PROFILES_MAP_DOLBY_VISION_8B_HDR_REF_PO
+ = 0x200,
+
+ /**
+ * This is the 8-bit version of device specific tuned and optimized Dolby Vision
+ * capture mode.
+ */
+ ACAMERA_REQUEST_AVAILABLE_DYNAMIC_RANGE_PROFILES_MAP_DOLBY_VISION_8B_HDR_OEM
+ = 0x400,
+
+ /**
+ * This is the power optimized mode for 8-bit Dolby Vision HDR device specific
+ * capture Mode.
+ */
+ ACAMERA_REQUEST_AVAILABLE_DYNAMIC_RANGE_PROFILES_MAP_DOLBY_VISION_8B_HDR_OEM_PO
+ = 0x800,
+
+ /**
+ *
+ */
+ ACAMERA_REQUEST_AVAILABLE_DYNAMIC_RANGE_PROFILES_MAP_MAX = 0x1000,
+
+} acamera_metadata_enum_android_request_available_dynamic_range_profiles_map_t;
+
// ACAMERA_SCALER_AVAILABLE_STREAM_CONFIGURATIONS
typedef enum acamera_metadata_enum_acamera_scaler_available_stream_configurations {
@@ -9144,6 +9534,20 @@ typedef enum acamera_metadata_enum_acamera_scaler_available_recommended_stream_c
ACAMERA_SCALER_AVAILABLE_RECOMMENDED_STREAM_CONFIGURATIONS_PUBLIC_END
= 0x7,
+ /**
+ * If supported, the recommended 10-bit output stream configurations must include
+ * a subset of the advertised ImageFormat#YCBCR_P010 and
+ * ImageFormat#PRIVATE outputs that are optimized for power
+ * and performance when registered along with a supported 10-bit dynamic range profile.
+ * see android.hardware.camera2.params.OutputConfiguration#setDynamicRangeProfile for
+ * details.
+ */
+ ACAMERA_SCALER_AVAILABLE_RECOMMENDED_STREAM_CONFIGURATIONS_10BIT_OUTPUT
+ = 0x8,
+
+ ACAMERA_SCALER_AVAILABLE_RECOMMENDED_STREAM_CONFIGURATIONS_PUBLIC_END_3_8
+ = 0x9,
+
/**
* Vendor defined use cases. These depend on the vendor implementation.
*/
@@ -9225,6 +9629,76 @@ typedef enum acamera_metadata_enum_acamera_scaler_multi_resolution_stream_suppor
} acamera_metadata_enum_android_scaler_multi_resolution_stream_supported_t;
+// ACAMERA_SCALER_AVAILABLE_STREAM_USE_CASES
+typedef enum acamera_metadata_enum_acamera_scaler_available_stream_use_cases {
+ /**
+ * Default stream use case.
+ * This use case is the same as when the application doesn't set any use case for
+ * the stream. The camera device uses the properties of the output target, such as
+ * format, dataSpace, or surface class type, to optimize the image processing pipeline.
+ */
+ ACAMERA_SCALER_AVAILABLE_STREAM_USE_CASES_DEFAULT = 0x0,
+
+ /**
+ * Live stream shown to the user.
+ * Optimized for performance and usability as a viewfinder, but not necessarily for
+ * image quality. The output is not meant to be persisted as saved images or video.
+ * No stall if ACAMERA_CONTROL_* are set to FAST. There may be stall if
+ * they are set to HIGH_QUALITY. This use case has the same behavior as the
+ * default SurfaceView and SurfaceTexture targets. Additionally, this use case can be
+ * used for in-app image analysis.
+ */
+ ACAMERA_SCALER_AVAILABLE_STREAM_USE_CASES_PREVIEW = 0x1,
+
+ /**
+ * Still photo capture.
+ * Optimized for high-quality high-resolution capture, and not expected to maintain
+ * preview-like frame rates.
+ * The stream may have stalls regardless of whether ACAMERA_CONTROL_* is HIGH_QUALITY.
+ * This use case has the same behavior as the default JPEG and RAW related formats.
+ */
+ ACAMERA_SCALER_AVAILABLE_STREAM_USE_CASES_STILL_CAPTURE = 0x2,
+
+ /**
+ * Recording video clips.
+ * Optimized for high-quality video capture, including high-quality image stabilization
+ * if supported by the device and enabled by the application. As a result, may produce
+ * output frames with a substantial lag from real time, to allow for highest-quality
+ * stabilization or other processing. As such, such an output is not suitable for drawing
+ * to screen directly, and is expected to be persisted to disk or similar for later
+ * playback or processing. Only streams that set the VIDEO_RECORD use case are guaranteed
+ * to have video stabilization applied when the video stabilization control is set
+ * to ON, as opposed to PREVIEW_STABILIZATION.
+ * This use case has the same behavior as the default MediaRecorder and MediaCodec
+ * targets.
+ */
+ ACAMERA_SCALER_AVAILABLE_STREAM_USE_CASES_VIDEO_RECORD = 0x3,
+
+ /**
+ * One single stream used for combined purposes of preview, video, and still capture.
+ * For such multi-purpose streams, the camera device aims to make the best tradeoff
+ * between the individual use cases. For example, the STILL_CAPTURE use case by itself
+ * may have stalls for achieving best image quality. But if combined with PREVIEW and
+ * VIDEO_RECORD, the camera device needs to trade off the additional image processing
+ * for speed so that preview and video recording aren't slowed down.
+ * Similarly, VIDEO_RECORD may produce frames with a substantial lag, but
+ * PREVIEW_VIDEO_STILL must have minimal output delay. This means that to enable video
+ * stabilization with this use case, the device must support and the app must select the
+ * PREVIEW_STABILIZATION mode for video stabilization.
+ */
+ ACAMERA_SCALER_AVAILABLE_STREAM_USE_CASES_PREVIEW_VIDEO_STILL = 0x4,
+
+ /**
+ * Long-running video call optimized for both power efficiency and video quality.
+ * The camera sensor may run in a lower-resolution mode to reduce power consumption
+ * at the cost of some image and digital zoom quality. Unlike VIDEO_RECORD, VIDEO_CALL
+ * outputs are expected to work in dark conditions, so are usually accompanied with
+ * variable frame rate settings to allow sufficient exposure time in low light.
+ */
+ ACAMERA_SCALER_AVAILABLE_STREAM_USE_CASES_VIDEO_CALL = 0x5,
+
+} acamera_metadata_enum_android_scaler_available_stream_use_cases_t;
+
// ACAMERA_SENSOR_REFERENCE_ILLUMINANT1
typedef enum acamera_metadata_enum_acamera_sensor_reference_illuminant1 {
@@ -9642,7 +10116,7 @@ typedef enum acamera_metadata_enum_acamera_tonemap_mode {
ACAMERA_TONEMAP_MODE_HIGH_QUALITY = 2,
/**
- * Use the gamma value specified in ACAMERA_TONEMAP_GAMMA to peform
+ *
Use the gamma value specified in ACAMERA_TONEMAP_GAMMA to perform
* tonemapping.
* All color enhancement and tonemapping must be disabled, except
* for applying the tonemapping curve specified by ACAMERA_TONEMAP_GAMMA.
@@ -9654,7 +10128,7 @@ typedef enum acamera_metadata_enum_acamera_tonemap_mode {
/**
* Use the preset tonemapping curve specified in
- * ACAMERA_TONEMAP_PRESET_CURVE to peform tonemapping.
+ * ACAMERA_TONEMAP_PRESET_CURVE to perform tonemapping.
* All color enhancement and tonemapping must be disabled, except
* for applying the tonemapping curve specified by
* ACAMERA_TONEMAP_PRESET_CURVE.
@@ -9752,7 +10226,7 @@ typedef enum acamera_metadata_enum_acamera_info_supported_hardware_level {
* fire the flash for flash power metering during precapture, and then fire the flash
* for the final capture, if a flash is available on the device and the AE mode is set to
* enable the flash.
- * Devices that initially shipped with Android version Q or newer will not include any LEGACY-level devices.
+ * Devices that initially shipped with Android version Q or newer will not include any LEGACY-level devices.
*
* @see ACAMERA_CONTROL_AE_PRECAPTURE_TRIGGER
* @see ACAMERA_REQUEST_AVAILABLE_CAPABILITIES
@@ -9986,6 +10460,167 @@ typedef enum acamera_metadata_enum_acamera_heic_available_heic_stream_configurat
+// ACAMERA_AUTOMOTIVE_LOCATION
+typedef enum acamera_metadata_enum_acamera_automotive_location {
+ /**
+ * The camera device exists inside of the vehicle cabin.
+ */
+ ACAMERA_AUTOMOTIVE_LOCATION_INTERIOR = 0,
+
+ /**
+ * The camera exists outside of the vehicle body frame but not exactly on one of the
+ * exterior locations this enum defines. The applications should determine the exact
+ * location from ACAMERA_LENS_POSE_TRANSLATION.
+ *
+ * @see ACAMERA_LENS_POSE_TRANSLATION
+ */
+ ACAMERA_AUTOMOTIVE_LOCATION_EXTERIOR_OTHER = 1,
+
+ /**
+ * The camera device exists outside of the vehicle body frame and on its front side.
+ */
+ ACAMERA_AUTOMOTIVE_LOCATION_EXTERIOR_FRONT = 2,
+
+ /**
+ * The camera device exists outside of the vehicle body frame and on its rear side.
+ */
+ ACAMERA_AUTOMOTIVE_LOCATION_EXTERIOR_REAR = 3,
+
+ /**
+ * The camera device exists outside and on left side of the vehicle body frame.
+ */
+ ACAMERA_AUTOMOTIVE_LOCATION_EXTERIOR_LEFT = 4,
+
+ /**
+ * The camera device exists outside and on right side of the vehicle body frame.
+ */
+ ACAMERA_AUTOMOTIVE_LOCATION_EXTERIOR_RIGHT = 5,
+
+ /**
+ * The camera device exists on an extra vehicle, such as the trailer, but not exactly
+ * on one of front, rear, left, or right side. Applications should determine the exact
+ * location from ACAMERA_LENS_POSE_TRANSLATION.
+ *
+ * @see ACAMERA_LENS_POSE_TRANSLATION
+ */
+ ACAMERA_AUTOMOTIVE_LOCATION_EXTRA_OTHER = 6,
+
+ /**
+ * The camera device exists outside of the extra vehicle's body frame and on its front
+ * side.
+ */
+ ACAMERA_AUTOMOTIVE_LOCATION_EXTRA_FRONT = 7,
+
+ /**
+ * The camera device exists outside of the extra vehicle's body frame and on its rear
+ * side.
+ */
+ ACAMERA_AUTOMOTIVE_LOCATION_EXTRA_REAR = 8,
+
+ /**
+ * The camera device exists outside and on left side of the extra vehicle body.
+ */
+ ACAMERA_AUTOMOTIVE_LOCATION_EXTRA_LEFT = 9,
+
+ /**
+ * The camera device exists outside and on right side of the extra vehicle body.
+ */
+ ACAMERA_AUTOMOTIVE_LOCATION_EXTRA_RIGHT = 10,
+
+} acamera_metadata_enum_android_automotive_location_t;
+
+
+// ACAMERA_AUTOMOTIVE_LENS_FACING
+typedef enum acamera_metadata_enum_acamera_automotive_lens_facing {
+ /**
+ * The camera device faces the outside of the vehicle body frame but not exactly
+ * one of the exterior sides defined by this enum. Applications should determine
+ * the exact facing direction from ACAMERA_LENS_POSE_ROTATION and
+ * ACAMERA_LENS_POSE_TRANSLATION.
+ *
+ * @see ACAMERA_LENS_POSE_ROTATION
+ * @see ACAMERA_LENS_POSE_TRANSLATION
+ */
+ ACAMERA_AUTOMOTIVE_LENS_FACING_EXTERIOR_OTHER = 0,
+
+ /**
+ * The camera device faces the front of the vehicle body frame.
+ */
+ ACAMERA_AUTOMOTIVE_LENS_FACING_EXTERIOR_FRONT = 1,
+
+ /**
+ * The camera device faces the rear of the vehicle body frame.
+ */
+ ACAMERA_AUTOMOTIVE_LENS_FACING_EXTERIOR_REAR = 2,
+
+ /**
+ * The camera device faces the left side of the vehicle body frame.
+ */
+ ACAMERA_AUTOMOTIVE_LENS_FACING_EXTERIOR_LEFT = 3,
+
+ /**
+ * The camera device faces the right side of the vehicle body frame.
+ */
+ ACAMERA_AUTOMOTIVE_LENS_FACING_EXTERIOR_RIGHT = 4,
+
+ /**
+ * The camera device faces the inside of the vehicle body frame but not exactly
+ * one of seats described by this enum. Applications should determine the exact
+ * facing direction from ACAMERA_LENS_POSE_ROTATION and ACAMERA_LENS_POSE_TRANSLATION.
+ *
+ * @see ACAMERA_LENS_POSE_ROTATION
+ * @see ACAMERA_LENS_POSE_TRANSLATION
+ */
+ ACAMERA_AUTOMOTIVE_LENS_FACING_INTERIOR_OTHER = 5,
+
+ /**
+ * The camera device faces the left side seat of the first row.
+ */
+ ACAMERA_AUTOMOTIVE_LENS_FACING_INTERIOR_SEAT_ROW_1_LEFT = 6,
+
+ /**
+ * The camera device faces the center seat of the first row.
+ */
+ ACAMERA_AUTOMOTIVE_LENS_FACING_INTERIOR_SEAT_ROW_1_CENTER = 7,
+
+ /**
+ * The camera device faces the right seat of the first row.
+ */
+ ACAMERA_AUTOMOTIVE_LENS_FACING_INTERIOR_SEAT_ROW_1_RIGHT = 8,
+
+ /**
+ * The camera device faces the left side seat of the second row.
+ */
+ ACAMERA_AUTOMOTIVE_LENS_FACING_INTERIOR_SEAT_ROW_2_LEFT = 9,
+
+ /**
+ * The camera device faces the center seat of the second row.
+ */
+ ACAMERA_AUTOMOTIVE_LENS_FACING_INTERIOR_SEAT_ROW_2_CENTER = 10,
+
+ /**
+ * The camera device faces the right side seat of the second row.
+ */
+ ACAMERA_AUTOMOTIVE_LENS_FACING_INTERIOR_SEAT_ROW_2_RIGHT = 11,
+
+ /**
+ * The camera device faces the left side seat of the third row.
+ */
+ ACAMERA_AUTOMOTIVE_LENS_FACING_INTERIOR_SEAT_ROW_3_LEFT = 12,
+
+ /**
+ * The camera device faces the center seat of the third row.
+ */
+ ACAMERA_AUTOMOTIVE_LENS_FACING_INTERIOR_SEAT_ROW_3_CENTER = 13,
+
+ /**
+ * The camera device faces the right seat of the third row.
+ */
+ ACAMERA_AUTOMOTIVE_LENS_FACING_INTERIOR_SEAT_ROW_3_RIGHT = 14,
+
+} acamera_metadata_enum_android_automotive_lens_facing_t;
+
+
__END_DECLS
diff --git a/camera/ndk/libcamera2ndk.map.txt b/camera/ndk/libcamera2ndk.map.txt
index 2b630db3edfc782a09279d92d856215d8f4822e3..b3977ff4183a3682a35be5bbe029adcf586d67d0 100644
--- a/camera/ndk/libcamera2ndk.map.txt
+++ b/camera/ndk/libcamera2ndk.map.txt
@@ -2,11 +2,15 @@ LIBCAMERA2NDK {
global:
ACameraCaptureSession_abortCaptures;
ACameraCaptureSession_capture;
+ ACameraCaptureSession_captureV2; # introduced=33
ACameraCaptureSession_logicalCamera_capture; # introduced=29
+ ACameraCaptureSession_logicalCamera_captureV2; # introduced=33
ACameraCaptureSession_close;
ACameraCaptureSession_getDevice;
ACameraCaptureSession_setRepeatingRequest;
+ ACameraCaptureSession_setRepeatingRequestV2; # introduced=33
ACameraCaptureSession_logicalCamera_setRepeatingRequest; # introduced=29
+ ACameraCaptureSession_logicalCamera_setRepeatingRequestV2; # introduced=33
ACameraCaptureSession_stopRepeating;
ACameraCaptureSession_updateSharedOutput; # introduced=28
ACameraDevice_close;
diff --git a/camera/ndk/ndk_vendor/impl/ACameraDevice.cpp b/camera/ndk/ndk_vendor/impl/ACameraDevice.cpp
index 9f63099152d97e3ab6028e1b816e80a478fb8280..0a57590f8d219330a55476df2cb77e35bb452032 100644
--- a/camera/ndk/ndk_vendor/impl/ACameraDevice.cpp
+++ b/camera/ndk/ndk_vendor/impl/ACameraDevice.cpp
@@ -29,8 +29,6 @@
#include "ACaptureRequest.h"
#include "utils.h"
-#include "ACameraCaptureSession.inc"
-
#define CHECK_TRANSACTION_AND_RET(remoteRet, status, callName) \
if (!remoteRet.isOk()) { \
ALOGE("%s: Transaction error during %s call %s", __FUNCTION__, callName, \
@@ -332,7 +330,8 @@ camera_status_t CameraDevice::updateOutputConfigurationLocked(ACaptureSessionOut
return ACAMERA_ERROR_UNKNOWN;
}
- mConfiguredOutputs[streamId] = std::make_pair(output->mWindow, outConfigW);
+ mConfiguredOutputs[streamId] =
+ std::move(std::make_pair(std::move(output->mWindow), std::move(outConfigW)));
return ACAMERA_OK;
}
@@ -492,6 +491,7 @@ CameraDevice::disconnectLocked(sp& session) {
}
if (mRemote != nullptr) {
+ ALOGD("%s: binder disconnect reached", __FUNCTION__);
auto ret = mRemote->disconnect();
if (!ret.isOk()) {
ALOGE("%s: Transaction error while disconnecting device %s", __FUNCTION__,
@@ -625,7 +625,8 @@ CameraDevice::configureStreamsLocked(const ACaptureSessionOutputContainer* outpu
outConfigInsert.windowHandles[0] = anw;
outConfigInsert.physicalCameraId = outConfig.mPhysicalCameraId;
native_handle_ptr_wrapper wrap(anw);
- outputSet.insert(std::make_pair(anw, outConfigInsertW));
+
+ outputSet.emplace(std::make_pair(std::move(anw), std::move(outConfigInsertW)));
}
std::set> addSet = outputSet;
std::vector deleteList;
@@ -682,7 +683,7 @@ CameraDevice::configureStreamsLocked(const ACaptureSessionOutputContainer* outpu
}
// add new streams
- for (auto outputPair : addSet) {
+ for (const auto &outputPair : addSet) {
int streamId;
Status status = Status::UNKNOWN_ERROR;
auto ret = mRemote->createStream(outputPair.second,
@@ -847,12 +848,32 @@ CameraDevice::onCaptureErrorLocked(
return;
}
- const auto& windowHandles = outputPairIt->second.second.mOutputConfiguration.windowHandles;
- for (const auto& outHandle : windowHandles) {
- for (auto streamAndWindowId : request->mCaptureRequest.streamAndWindowIds) {
- int32_t windowId = streamAndWindowId.windowId;
- if (utils::isWindowNativeHandleEqual(windowHandles[windowId],outHandle)) {
- const native_handle_t* anw = windowHandles[windowId].getNativeHandle();
+ // Get the surfaces corresponding to the error stream id, go through
+ // them and try to match the surfaces in the corresponding
+ // CaptureRequest.
+ const auto& errorWindowHandles =
+ outputPairIt->second.second.mOutputConfiguration.windowHandles;
+ for (const auto& errorWindowHandle : errorWindowHandles) {
+ for (const auto &requestStreamAndWindowId :
+ request->mCaptureRequest.streamAndWindowIds) {
+ // Go through the surfaces in the capture request and see which
+ // ones match the surfaces in the error stream.
+ int32_t requestWindowId = requestStreamAndWindowId.windowId;
+ auto requestSurfacePairIt =
+ mConfiguredOutputs.find(requestStreamAndWindowId.streamId);
+ if (requestSurfacePairIt == mConfiguredOutputs.end()) {
+ ALOGE("%s: Error: request stream id %d does not exist", __FUNCTION__,
+ requestStreamAndWindowId.streamId);
+ setCameraDeviceErrorLocked(ACAMERA_ERROR_CAMERA_SERVICE);
+ return;
+ }
+
+ const auto &requestWindowHandles =
+ requestSurfacePairIt->second.second.mOutputConfiguration.windowHandles;
+ if (utils::isWindowNativeHandleEqual(
+ requestWindowHandles[requestWindowId], errorWindowHandle)) {
+ const native_handle_t* anw =
+ requestWindowHandles[requestWindowId].getNativeHandle();
ALOGV("Camera %s Lost output buffer for ANW %p frame %" PRId64,
getId(), anw, frameNumber);
@@ -910,6 +931,7 @@ void CameraDevice::CallbackHandler::onMessageReceived(
case kWhatOnError:
case kWhatSessionStateCb:
case kWhatCaptureStart:
+ case kWhatCaptureStart2:
case kWhatCaptureResult:
case kWhatLogicalCaptureResult:
case kWhatCaptureFail:
@@ -982,6 +1004,7 @@ void CameraDevice::CallbackHandler::onMessageReceived(
}
case kWhatSessionStateCb:
case kWhatCaptureStart:
+ case kWhatCaptureStart2:
case kWhatCaptureResult:
case kWhatLogicalCaptureResult:
case kWhatCaptureFail:
@@ -1002,6 +1025,7 @@ void CameraDevice::CallbackHandler::onMessageReceived(
const char *id_cstr = mId.c_str();
switch (msg->what()) {
case kWhatCaptureStart:
+ case kWhatCaptureStart2:
case kWhatCaptureResult:
case kWhatLogicalCaptureResult:
case kWhatCaptureFail:
@@ -1053,6 +1077,35 @@ void CameraDevice::CallbackHandler::onMessageReceived(
freeACaptureRequest(request);
break;
}
+ case kWhatCaptureStart2:
+ {
+ ACameraCaptureSession_captureCallback_startV2 onStart2;
+ found = msg->findPointer(kCallbackFpKey, (void**) &onStart2);
+ if (!found) {
+ ALOGE("%s: Cannot find capture startV2 callback!", __FUNCTION__);
+ return;
+ }
+ if (onStart2 == nullptr) {
+ return;
+ }
+ int64_t timestamp;
+ found = msg->findInt64(kTimeStampKey, ×tamp);
+ if (!found) {
+ ALOGE("%s: Cannot find timestamp!", __FUNCTION__);
+ return;
+ }
+ int64_t frameNumber;
+ found = msg->findInt64(kFrameNumberKey, &frameNumber);
+ if (!found) {
+ ALOGE("%s: Cannot find frame number!", __FUNCTION__);
+ return;
+ }
+
+ ACaptureRequest* request = allocateACaptureRequest(requestSp, id_cstr);
+ (*onStart2)(context, session.get(), request, timestamp, frameNumber);
+ freeACaptureRequest(request);
+ break;
+ }
case kWhatCaptureResult:
{
ACameraCaptureSession_captureCallback_result onResult;
@@ -1281,6 +1334,7 @@ CameraDevice::CallbackHolder::CallbackHolder(
ACameraCaptureSession_captureCallbacks* cbs) :
mSession(session), mRequests(requests),
mIsRepeating(isRepeating),
+ mIs2Callback(false),
mIsLogicalCameraCallback(false) {
initCaptureCallbacks(cbs);
@@ -1297,6 +1351,7 @@ CameraDevice::CallbackHolder::CallbackHolder(
ACameraCaptureSession_logicalCamera_captureCallbacks* lcbs) :
mSession(session), mRequests(requests),
mIsRepeating(isRepeating),
+ mIs2Callback(false),
mIsLogicalCameraCallback(true) {
initCaptureCallbacks(lcbs);
@@ -1306,6 +1361,40 @@ CameraDevice::CallbackHolder::CallbackHolder(
}
}
+CameraDevice::CallbackHolder::CallbackHolder(
+ sp session,
+ const Vector >& requests,
+ bool isRepeating,
+ ACameraCaptureSession_captureCallbacksV2* cbs) :
+ mSession(session), mRequests(requests),
+ mIsRepeating(isRepeating),
+ mIs2Callback(true),
+ mIsLogicalCameraCallback(false) {
+ initCaptureCallbacksV2(cbs);
+
+ if (cbs != nullptr) {
+ mOnCaptureCompleted = cbs->onCaptureCompleted;
+ mOnCaptureFailed = cbs->onCaptureFailed;
+ }
+}
+
+CameraDevice::CallbackHolder::CallbackHolder(
+ sp session,
+ const Vector >& requests,
+ bool isRepeating,
+ ACameraCaptureSession_logicalCamera_captureCallbacksV2* lcbs) :
+ mSession(session), mRequests(requests),
+ mIsRepeating(isRepeating),
+ mIs2Callback(true),
+ mIsLogicalCameraCallback(true) {
+ initCaptureCallbacksV2(lcbs);
+
+ if (lcbs != nullptr) {
+ mOnLogicalCameraCaptureCompleted = lcbs->onLogicalCameraCaptureCompleted;
+ mOnLogicalCameraCaptureFailed = lcbs->onLogicalCameraCaptureFailed;
+ }
+}
+
void
CameraDevice::checkRepeatingSequenceCompleteLocked(
const int sequenceId, const int64_t lastFrameNumber) {
@@ -1542,11 +1631,14 @@ CameraDevice::ServiceCallback::onCaptureStarted(
int32_t sequenceId = resultExtras.requestId;
int32_t burstId = resultExtras.burstId;
+ int64_t frameNumber = resultExtras.frameNumber;
auto it = dev->mSequenceCallbackMap.find(sequenceId);
if (it != dev->mSequenceCallbackMap.end()) {
CallbackHolder cbh = (*it).second;
ACameraCaptureSession_captureCallback_start onStart = cbh.mOnCaptureStarted;
+ ACameraCaptureSession_captureCallback_startV2 onStart2 = cbh.mOnCaptureStarted2;
+ bool v2Callback = cbh.mIs2Callback;
sp session = cbh.mSession;
if ((size_t) burstId >= cbh.mRequests.size()) {
ALOGE("%s: Error: request index %d out of bound (size %zu)",
@@ -1554,12 +1646,19 @@ CameraDevice::ServiceCallback::onCaptureStarted(
dev->setCameraDeviceErrorLocked(ACAMERA_ERROR_CAMERA_SERVICE);
}
sp request = cbh.mRequests[burstId];
- sp msg = new AMessage(kWhatCaptureStart, dev->mHandler);
+ sp msg = nullptr;
+ if (v2Callback) {
+ msg = new AMessage(kWhatCaptureStart2, dev->mHandler);
+ msg->setPointer(kCallbackFpKey, (void*) onStart2);
+ } else {
+ msg = new AMessage(kWhatCaptureStart, dev->mHandler);
+ msg->setPointer(kCallbackFpKey, (void*) onStart);
+ }
msg->setPointer(kContextKey, cbh.mContext);
msg->setObject(kSessionSpKey, session);
- msg->setPointer(kCallbackFpKey, (void*) onStart);
msg->setObject(kCaptureRequestKey, request);
msg->setInt64(kTimeStampKey, timestamp);
+ msg->setInt64(kFrameNumberKey, frameNumber);
dev->postSessionMsgAndCleanup(msg);
}
return ret;
diff --git a/camera/ndk/ndk_vendor/impl/ACameraDevice.h b/camera/ndk/ndk_vendor/impl/ACameraDevice.h
index 0b6c7c83af00fa7d000a2059d2fffa032d4d5a6b..c306206bc3685f03f01309cc45d16990c9038b0f 100644
--- a/camera/ndk/ndk_vendor/impl/ACameraDevice.h
+++ b/camera/ndk/ndk_vendor/impl/ACameraDevice.h
@@ -245,6 +245,7 @@ class CameraDevice final : public RefBase {
kWhatSessionStateCb, // onReady, onActive
// Capture callbacks
kWhatCaptureStart, // onCaptureStarted
+ kWhatCaptureStart2, // onCaptureStarted2
kWhatCaptureResult, // onCaptureProgressed, onCaptureCompleted
kWhatLogicalCaptureResult, // onLogicalCameraCaptureCompleted
kWhatCaptureFail, // onCaptureFailed
@@ -309,11 +310,18 @@ class CameraDevice final : public RefBase {
const Vector>& requests,
bool isRepeating,
ACameraCaptureSession_logicalCamera_captureCallbacks* lcbs);
-
- template
- void initCaptureCallbacks(T* cbs) {
+ CallbackHolder(sp session,
+ const Vector >& requests,
+ bool isRepeating,
+ ACameraCaptureSession_captureCallbacksV2* cbs);
+ CallbackHolder(sp session,
+ const Vector >& requests,
+ bool isRepeating,
+ ACameraCaptureSession_logicalCamera_captureCallbacksV2* lcbs);
+ void clearCallbacks() {
mContext = nullptr;
mOnCaptureStarted = nullptr;
+ mOnCaptureStarted2 = nullptr;
mOnCaptureProgressed = nullptr;
mOnCaptureCompleted = nullptr;
mOnLogicalCameraCaptureCompleted = nullptr;
@@ -322,6 +330,24 @@ class CameraDevice final : public RefBase {
mOnCaptureSequenceCompleted = nullptr;
mOnCaptureSequenceAborted = nullptr;
mOnCaptureBufferLost = nullptr;
+ }
+
+ template
+ void initCaptureCallbacksV2(T* cbs) {
+ clearCallbacks();
+ if (cbs != nullptr) {
+ mContext = cbs->context;
+ mOnCaptureStarted2 = cbs->onCaptureStarted;
+ mOnCaptureProgressed = cbs->onCaptureProgressed;
+ mOnCaptureSequenceCompleted = cbs->onCaptureSequenceCompleted;
+ mOnCaptureSequenceAborted = cbs->onCaptureSequenceAborted;
+ mOnCaptureBufferLost = cbs->onCaptureBufferLost;
+ }
+ }
+
+ template
+ void initCaptureCallbacks(T* cbs) {
+ clearCallbacks();
if (cbs != nullptr) {
mContext = cbs->context;
mOnCaptureStarted = cbs->onCaptureStarted;
@@ -335,10 +361,12 @@ class CameraDevice final : public RefBase {
sp mSession;
Vector> mRequests;
const bool mIsRepeating;
+ const bool mIs2Callback;
const bool mIsLogicalCameraCallback;
void* mContext;
ACameraCaptureSession_captureCallback_start mOnCaptureStarted;
+ ACameraCaptureSession_captureCallback_startV2 mOnCaptureStarted2;
ACameraCaptureSession_captureCallback_result mOnCaptureProgressed;
ACameraCaptureSession_captureCallback_result mOnCaptureCompleted;
ACameraCaptureSession_logicalCamera_captureCallback_result mOnLogicalCameraCaptureCompleted;
diff --git a/camera/ndk/ndk_vendor/impl/utils.h b/camera/ndk/ndk_vendor/impl/utils.h
index 6f5820ecee9cc67a8c364862adadf02db04f079c..62779a4b02992a067f1a92cb2061076dfac36579 100644
--- a/camera/ndk/ndk_vendor/impl/utils.h
+++ b/camera/ndk/ndk_vendor/impl/utils.h
@@ -109,8 +109,30 @@ struct OutputConfigurationWrapper {
mOutputConfiguration.windowGroupId = -1;
};
- OutputConfigurationWrapper(OutputConfiguration &outputConfiguration)
- : mOutputConfiguration((outputConfiguration)) { }
+ OutputConfigurationWrapper(const OutputConfigurationWrapper &other) {
+ *this = other;
+ }
+
+ // Needed to make sure that OutputConfiguration in
+ // OutputConfigurationWrapper, when copied doesn't call hidl_handle's
+ // assignment operator / copy constructor, which will lead to native handle
+ // cloning, which is not what we want for app callbacks which have the native
+ // handle as parameter.
+ OutputConfigurationWrapper &operator=(const OutputConfigurationWrapper &other) {
+ const OutputConfiguration &outputConfiguration = other.mOutputConfiguration;
+ mOutputConfiguration.rotation = outputConfiguration.rotation;
+ mOutputConfiguration.isDeferred = outputConfiguration.isDeferred;
+ mOutputConfiguration.width = outputConfiguration.width;
+ mOutputConfiguration.height = outputConfiguration.height;
+ mOutputConfiguration.windowGroupId = outputConfiguration.windowGroupId;
+ mOutputConfiguration.windowHandles.resize(outputConfiguration.windowHandles.size());
+ mOutputConfiguration.physicalCameraId = outputConfiguration.physicalCameraId;
+ size_t i = 0;
+ for (const auto &handle : outputConfiguration.windowHandles) {
+ mOutputConfiguration.windowHandles[i++] = handle.getNativeHandle();
+ }
+ return *this;
+ }
bool operator ==(const OutputConfiguration &other) const {
const OutputConfiguration &self = mOutputConfiguration;
diff --git a/camera/ndk/ndk_vendor/tests/AImageReaderVendorTest.cpp b/camera/ndk/ndk_vendor/tests/AImageReaderVendorTest.cpp
index ba14c5c4fd69fd029016e6870b7d55a1d35022e2..63cdb76ae5b0de4204d3c555d69e3e2159e7ff9d 100644
--- a/camera/ndk/ndk_vendor/tests/AImageReaderVendorTest.cpp
+++ b/camera/ndk/ndk_vendor/tests/AImageReaderVendorTest.cpp
@@ -236,6 +236,11 @@ class CameraHelper {
return ACameraCaptureSession_capture(mSession, &mCaptureCallbacks, 1, &mStillRequest,
&seqId);
}
+ int takePicture2() {
+ int seqId;
+ return ACameraCaptureSession_captureV2(mSession, &mCaptureCallbacksV2, 1,
+ &mStillRequest, &seqId);
+ }
int takeLogicalCameraPicture() {
int seqId;
@@ -243,15 +248,31 @@ class CameraHelper {
1, &mStillRequest, &seqId);
}
+ int takeLogicalCameraPicture2() {
+ int seqId;
+ return ACameraCaptureSession_logicalCamera_captureV2(mSession,
+ &mLogicalCaptureCallbacksV2, 1, &mStillRequest, &seqId);
+ }
+
bool checkCallbacks(int pictureCount) {
std::lock_guard lock(mMutex);
if (mCompletedCaptureCallbackCount != pictureCount) {
- ALOGE("Completed capture callaback count not as expected. expected %d actual %d",
+ ALOGE("Completed capture callback count not as expected. expected %d actual %d",
pictureCount, mCompletedCaptureCallbackCount);
return false;
}
return true;
}
+ bool checkCallbacksV2(int pictureCount) {
+ std::lock_guard lock(mMutex);
+ if (mCaptureStartedCallbackCount != pictureCount) {
+ ALOGE("Capture started callback count not as expected. expected %d actual %d",
+ pictureCount, mCaptureStartedCallbackCount);
+ return false;
+ }
+ return true;
+ }
+
private:
ACameraDevice_StateCallbacks mDeviceCb{this, nullptr, nullptr};
@@ -276,6 +297,7 @@ class CameraHelper {
const char* mCameraId;
ACameraManager* mCameraManager;
int mCompletedCaptureCallbackCount = 0;
+ int mCaptureStartedCallbackCount = 0;
std::mutex mMutex;
ACameraCaptureSession_captureCallbacks mCaptureCallbacks = {
// TODO: Add tests for other callbacks
@@ -293,8 +315,25 @@ class CameraHelper {
nullptr, // onCaptureSequenceAborted
nullptr, // onCaptureBufferLost
};
+ ACameraCaptureSession_captureCallbacksV2 mCaptureCallbacksV2 = {
+ this, // context
+ [](void* ctx , ACameraCaptureSession *,const ACaptureRequest *, int64_t,
+ int64_t frameNumber ) {
+ CameraHelper *ch = static_cast(ctx);
+ ASSERT_TRUE(frameNumber >= 0);
+ std::lock_guard lock(ch->mMutex);
+ ch->mCaptureStartedCallbackCount++;
+ },
+ nullptr, // onCaptureProgressed
+ nullptr, // onCaptureCompleted
+ nullptr, // onCaptureFailed
+ nullptr, // onCaptureSequenceCompleted
+ nullptr, // onCaptureSequenceAborted
+ nullptr, // onCaptureBufferLost
+ };
std::vector mPhysicalCameraIds;
+
ACameraCaptureSession_logicalCamera_captureCallbacks mLogicalCaptureCallbacks = {
// TODO: Add tests for other callbacks
this, // context
@@ -336,6 +375,23 @@ class CameraHelper {
nullptr, // onCaptureSequenceAborted
nullptr, // onCaptureBufferLost
};
+ ACameraCaptureSession_logicalCamera_captureCallbacksV2 mLogicalCaptureCallbacksV2 = {
+ this, // context
+ [](void* ctx , ACameraCaptureSession *,const ACaptureRequest *, int64_t,
+ int64_t frameNumber) {
+ CameraHelper *ch = static_cast(ctx);
+ ASSERT_TRUE(frameNumber >= 0);
+ std::lock_guard lock(ch->mMutex);
+ ch->mCaptureStartedCallbackCount++;
+ },
+ nullptr, // onCaptureProgressed
+ nullptr, //onLogicalCaptureCompleted
+ nullptr, //onLogicalCpatureFailed
+ nullptr, // onCaptureSequenceCompleted
+ nullptr, // onCaptureSequenceAborted
+ nullptr, // onCaptureBufferLost
+ };
+
};
class ImageReaderTestCase {
@@ -570,7 +626,7 @@ class AImageReaderVendorTest : public ::testing::Test {
}
bool takePictures(const char* id, uint64_t readerUsage, int readerMaxImages,
- bool readerAsync, int pictureCount) {
+ bool readerAsync, int pictureCount, bool v2 = false) {
int ret = 0;
ImageReaderTestCase testCase(
@@ -600,7 +656,11 @@ class AImageReaderVendorTest : public ::testing::Test {
}
for (int i = 0; i < pictureCount; i++) {
- ret = cameraHelper.takePicture();
+ if (v2) {
+ ret = cameraHelper.takePicture2();
+ } else {
+ ret = cameraHelper.takePicture();
+ }
if (ret < 0) {
ALOGE("Unable to take picture");
return false;
@@ -617,7 +677,8 @@ class AImageReaderVendorTest : public ::testing::Test {
}
}
return testCase.getAcquiredImageCount() == pictureCount &&
- cameraHelper.checkCallbacks(pictureCount);
+ v2 ? cameraHelper.checkCallbacksV2(pictureCount) :
+ cameraHelper.checkCallbacks(pictureCount);
}
bool testTakePicturesNative(const char* id) {
@@ -626,12 +687,14 @@ class AImageReaderVendorTest : public ::testing::Test {
for (auto& readerMaxImages : {1, 4, 8}) {
for (auto& readerAsync : {true, false}) {
for (auto& pictureCount : {1, 4, 8}) {
- if (!takePictures(id, readerUsage, readerMaxImages,
- readerAsync, pictureCount)) {
- ALOGE("Test takePictures failed for test case usage=%" PRIu64
- ", maxImages=%d, async=%d, pictureCount=%d",
- readerUsage, readerMaxImages, readerAsync, pictureCount);
- return false;
+ for ( auto & v2 : {true, false}) {
+ if (!takePictures(id, readerUsage, readerMaxImages,
+ readerAsync, pictureCount, v2)) {
+ ALOGE("Test takePictures failed for test case usage=%" PRIu64
+ ", maxImages=%d, async=%d, pictureCount=%d",
+ readerUsage, readerMaxImages, readerAsync, pictureCount);
+ return false;
+ }
}
}
}
@@ -725,7 +788,7 @@ class AImageReaderVendorTest : public ::testing::Test {
return;
}
- void testLogicalCameraPhysicalStream(bool usePhysicalSettings) {
+ void testLogicalCameraPhysicalStream(bool usePhysicalSettings, bool v2) {
const char* cameraId = nullptr;
ACameraMetadata* staticMetadata = nullptr;
std::vector physicalCameraIds;
@@ -772,7 +835,12 @@ class AImageReaderVendorTest : public ::testing::Test {
}
for (int i = 0; i < pictureCount; i++) {
- ret = cameraHelper.takeLogicalCameraPicture();
+ if (v2) {
+ ret = cameraHelper.takeLogicalCameraPicture2();
+ }
+ else {
+ ret = cameraHelper.takeLogicalCameraPicture();
+ }
ASSERT_EQ(ret, 0);
}
@@ -793,8 +861,11 @@ class AImageReaderVendorTest : public ::testing::Test {
ALOGI("Testing window %p", testCase->getNativeWindow());
ASSERT_EQ(testCase->getAcquiredImageCount(), pictureCount);
}
-
- ASSERT_TRUE(cameraHelper.checkCallbacks(pictureCount));
+ if (v2) {
+ ASSERT_TRUE(cameraHelper.checkCallbacksV2(pictureCount));
+ } else {
+ ASSERT_TRUE(cameraHelper.checkCallbacks(pictureCount));
+ }
ACameraMetadata_free(staticMetadata);
}
@@ -834,8 +905,10 @@ TEST_F(AImageReaderVendorTest, CreateWindowNativeHandle) {
}
TEST_F(AImageReaderVendorTest, LogicalCameraPhysicalStream) {
- testLogicalCameraPhysicalStream(false/*usePhysicalSettings*/);
- testLogicalCameraPhysicalStream(true/*usePhysicalSettings*/);
+ for (auto & v2 : {true, false}) {
+ testLogicalCameraPhysicalStream(false/*usePhysicalSettings*/, v2);
+ testLogicalCameraPhysicalStream(true/*usePhysicalSettings*/, v2);
+ }
}
} // namespace
diff --git a/camera/tests/CameraBinderTests.cpp b/camera/tests/CameraBinderTests.cpp
index 9f2f430f6246c3a3f8390117e4e4017796b6effd..17ea51270c590e2859fc92b8adc296fd17148060 100644
--- a/camera/tests/CameraBinderTests.cpp
+++ b/camera/tests/CameraBinderTests.cpp
@@ -96,6 +96,12 @@ public:
return binder::Status::ok();
};
+ virtual binder::Status onTorchStrengthLevelChanged(const String16& /*cameraId*/,
+ int32_t /*torchStrength*/) {
+ // No op
+ return binder::Status::ok();
+ }
+
virtual binder::Status onCameraAccessPrioritiesChanged() {
// No op
return binder::Status::ok();
diff --git a/cmds/screenrecord/Android.bp b/cmds/screenrecord/Android.bp
index 359a8350da1dc673f90d3f2c24de0b68882c5e32..d0b3ce074f08df4a6d52be10602f9876f90b0f69 100644
--- a/cmds/screenrecord/Android.bp
+++ b/cmds/screenrecord/Android.bp
@@ -55,12 +55,6 @@ cc_binary {
"libGLESv2",
],
- include_dirs: [
- "frameworks/av/media/libstagefright",
- "frameworks/av/media/libstagefright/include",
- "frameworks/native/include/media/openmax",
- ],
-
cflags: [
"-Werror",
"-Wall",
diff --git a/cmds/screenrecord/screenrecord.cpp b/cmds/screenrecord/screenrecord.cpp
index e6e347321d6769b70b9f0c577ffd8862f41b6e74..2e0b678e7cdd15310c74a641cbe06a6af3fcb854 100644
--- a/cmds/screenrecord/screenrecord.cpp
+++ b/cmds/screenrecord/screenrecord.cpp
@@ -701,7 +701,7 @@ static status_t recordScreen(const char* fileName) {
printf("Display is %dx%d @%.2ffps (orientation=%s), layerStack=%u\n",
layerStackSpaceRect.getWidth(), layerStackSpaceRect.getHeight(),
displayMode.refreshRate, toCString(displayState.orientation),
- displayState.layerStack);
+ displayState.layerStack.id);
fflush(stdout);
}
@@ -1067,7 +1067,7 @@ int main(int argc, char* const argv[]) {
std::optional displayId = SurfaceComposerClient::getInternalDisplayId();
if (!displayId) {
- fprintf(stderr, "Failed to get token for internal display\n");
+ fprintf(stderr, "Failed to get ID for internal display\n");
return 1;
}
@@ -1168,17 +1168,14 @@ int main(int argc, char* const argv[]) {
}
break;
case 'd':
- gPhysicalDisplayId = PhysicalDisplayId(atoll(optarg));
- if (gPhysicalDisplayId.value == 0) {
- fprintf(stderr, "Please specify a valid physical display id\n");
- return 2;
- } else if (SurfaceComposerClient::
- getPhysicalDisplayToken(gPhysicalDisplayId) == nullptr) {
- fprintf(stderr, "Invalid physical display id: %s\n",
- to_string(gPhysicalDisplayId).c_str());
- return 2;
+ if (const auto id = android::DisplayId::fromValue(atoll(optarg));
+ id && SurfaceComposerClient::getPhysicalDisplayToken(*id)) {
+ gPhysicalDisplayId = *id;
+ break;
}
- break;
+
+ fprintf(stderr, "Invalid physical display ID\n");
+ return 2;
default:
if (ic != '?') {
fprintf(stderr, "getopt_long returned unexpected value 0x%x\n", ic);
diff --git a/cmds/stagefright/Android.bp b/cmds/stagefright/Android.bp
index c4783d36dd452dca80b3e4f0dd961c203f50c645..e1fe07edcbe88624277d2caec8ce830ca2e1ac96 100644
--- a/cmds/stagefright/Android.bp
+++ b/cmds/stagefright/Android.bp
@@ -227,8 +227,6 @@ cc_binary {
"rs-headers",
],
- include_dirs: ["frameworks/av/media/libstagefright"],
-
shared_libs: [
"libstagefright",
"liblog",
diff --git a/cmds/stagefright/AudioPlayer.cpp b/cmds/stagefright/AudioPlayer.cpp
index 55427cafac9ce59169b5bba133c59fe34182bc2c..6cddf475040471b7647fd4f1043e77b1f996f875 100644
--- a/cmds/stagefright/AudioPlayer.cpp
+++ b/cmds/stagefright/AudioPlayer.cpp
@@ -249,7 +249,8 @@ status_t AudioPlayer::start(bool sourceAlreadyStarted) {
mAudioTrack = new AudioTrack(
AUDIO_STREAM_MUSIC, mSampleRate, AUDIO_FORMAT_PCM_16_BIT, audioMask,
- 0 /*frameCount*/, AUDIO_OUTPUT_FLAG_NONE, &AudioCallback, this,
+ 0 /*frameCount*/, AUDIO_OUTPUT_FLAG_NONE,
+ wp::fromExisting(this),
0 /*notificationFrames*/);
if ((err = mAudioTrack->initCheck()) != OK) {
@@ -397,10 +398,6 @@ void AudioPlayer::reset() {
mStartPosUs = 0;
}
-// static
-void AudioPlayer::AudioCallback(int event, void *user, void *info) {
- static_cast(user)->AudioCallback(event, info);
-}
bool AudioPlayer::reachedEOS(status_t *finalStatus) {
*finalStatus = OK;
@@ -455,20 +452,12 @@ size_t AudioPlayer::AudioSinkCallback(
return 0;
}
-void AudioPlayer::AudioCallback(int event, void *info) {
- switch (event) {
- case AudioTrack::EVENT_MORE_DATA:
- {
- AudioTrack::Buffer *buffer = (AudioTrack::Buffer *)info;
- size_t numBytesWritten = fillBuffer(buffer->raw, buffer->size);
- buffer->size = numBytesWritten;
- }
- break;
+size_t AudioPlayer::onMoreData(const AudioTrack::Buffer& buffer) {
+ return fillBuffer(buffer.data(), buffer.size());
+}
- case AudioTrack::EVENT_STREAM_END:
- mReachedEOS = true;
- break;
- }
+void AudioPlayer::onStreamEnd() {
+ mReachedEOS = true;
}
size_t AudioPlayer::fillBuffer(void *data, size_t size) {
diff --git a/cmds/stagefright/AudioPlayer.h b/cmds/stagefright/AudioPlayer.h
index 43550ea23dafe3bfc8324a247108107c4d25b2cf..608f54bb36dc3fbd2dce058171095b5e0d5fb733 100644
--- a/cmds/stagefright/AudioPlayer.h
+++ b/cmds/stagefright/AudioPlayer.h
@@ -19,6 +19,7 @@
#define AUDIO_PLAYER_H_
#include
+#include
#include
#include
#include
@@ -26,10 +27,9 @@
namespace android {
-class AudioTrack;
struct AwesomePlayer;
-class AudioPlayer {
+class AudioPlayer : AudioTrack::IAudioTrackCallback {
public:
enum {
REACHED_EOS,
@@ -66,6 +66,9 @@ public:
status_t getPlaybackRate(AudioPlaybackRate *rate /* nonnull */);
private:
+ friend sp;
+ size_t onMoreData(const AudioTrack::Buffer& buffer) override;
+ void onStreamEnd() override;
sp mSource;
sp mAudioTrack;
@@ -99,9 +102,6 @@ private:
int64_t mStartPosUs;
const uint32_t mCreateFlags;
- static void AudioCallback(int event, void *user, void *info);
- void AudioCallback(int event, void *info);
-
static size_t AudioSinkCallback(
MediaPlayerBase::AudioSink *audioSink,
void *data, size_t size, void *me,
diff --git a/cmds/stagefright/audioloop.cpp b/cmds/stagefright/audioloop.cpp
index 4b41ff8095574c06e99340b22e4c9e0c4297015e..83f8fe95cbd7d169e6be90931ae8e4f0f383db00 100644
--- a/cmds/stagefright/audioloop.cpp
+++ b/cmds/stagefright/audioloop.cpp
@@ -166,9 +166,9 @@ int main(int argc, char* argv[])
sp decoder = SimpleDecodingSource::Create(encoder);
if (playToSpeaker) {
- AudioPlayer player(NULL);
- player.setSource(decoder);
- player.start();
+ sp player = sp::make(nullptr);
+ player->setSource(decoder);
+ player->start();
sleep(duration);
ALOGI("Line: %d", __LINE__);
diff --git a/cmds/stagefright/record.cpp b/cmds/stagefright/record.cpp
index 098c27865a930c4ac02a6ec04d01ecb3327fbaf6..5743ad6fed5c3bd9c0e997ec677b83af83d0c6d0 100644
--- a/cmds/stagefright/record.cpp
+++ b/cmds/stagefright/record.cpp
@@ -32,7 +32,6 @@
#include
#include
-#include "AudioPlayer.h"
using namespace android;
@@ -274,17 +273,6 @@ int main(int /* argc */, char ** /* argv */) {
const int32_t kNumChannels = 2;
sp audioSource = new SineSource(kSampleRate, kNumChannels);
-#if 0
- sp audioSink;
- AudioPlayer *player = new AudioPlayer(audioSink);
- player->setSource(audioSource);
- player->start();
-
- sleep(10);
-
- player->stop();
-#endif
-
sp encMeta = new AMessage;
encMeta->setString("mime",
0 ? MEDIA_MIMETYPE_AUDIO_AMR_WB : MEDIA_MIMETYPE_AUDIO_AAC);
diff --git a/cmds/stagefright/stagefright.cpp b/cmds/stagefright/stagefright.cpp
index 6d1263efbb983eb4733fe8ef1772dd523a453db0..9783855b2a7e0b2ca92235b24b87e4cfad8b15df 100644
--- a/cmds/stagefright/stagefright.cpp
+++ b/cmds/stagefright/stagefright.cpp
@@ -224,7 +224,7 @@ static void playSource(sp &source) {
}
if (gPlaybackAudio) {
- AudioPlayer *player = new AudioPlayer(NULL);
+ sp player = sp::make(nullptr);
player->setSource(rawSource);
rawSource.clear();
@@ -239,9 +239,6 @@ static void playSource(sp &source) {
fprintf(stderr, "unable to start playback err=%d (0x%08x)\n", err, err);
}
- delete player;
- player = NULL;
-
return;
} else if (gReproduceBug >= 3 && gReproduceBug <= 5) {
int64_t durationUs;
diff --git a/drm/drmserver/DrmManager.cpp b/drm/drmserver/DrmManager.cpp
index 74e3223bce017ec1a2a64270a58c7523057a4970..f7989bd3661460c5b203e90dc07e493df860338e 100644
--- a/drm/drmserver/DrmManager.cpp
+++ b/drm/drmserver/DrmManager.cpp
@@ -208,7 +208,11 @@ void DrmManager::removeUniqueId(int uniqueId) {
}
status_t DrmManager::loadPlugIns() {
+#if __LP64__
+ String8 pluginDirPath("/system/lib64/drm");
+#else
String8 pluginDirPath("/system/lib/drm");
+#endif
loadPlugIns(pluginDirPath);
return DRM_NO_ERROR;
}
diff --git a/drm/libmediadrm/Android.bp b/drm/libmediadrm/Android.bp
index 71df58cae8c8b8220eda58dc70db52f70e781bb6..408d21664ad1fee8941ae9d7cc770cad987180d2 100644
--- a/drm/libmediadrm/Android.bp
+++ b/drm/libmediadrm/Android.bp
@@ -28,8 +28,13 @@ cc_library {
"DrmSessionManager.cpp",
"SharedLibrary.cpp",
"DrmHal.cpp",
+ "DrmHalHidl.cpp",
+ "DrmHalAidl.cpp",
"CryptoHal.cpp",
+ "CryptoHalHidl.cpp",
+ "CryptoHalAidl.cpp",
"DrmUtils.cpp",
+ "DrmHalListener.cpp",
],
local_include_dirs: [
@@ -63,10 +68,12 @@ cc_library {
"android.hardware.drm@1.4",
"libhidlallocatorutils",
"libhidlbase",
+ "android.hardware.drm-V1-ndk",
],
static_libs: [
"resourcemanager_aidl_interface-ndk",
+ "libaidlcommonsupport",
],
export_shared_lib_headers: [
@@ -162,10 +169,6 @@ cc_library_shared {
"DrmMetricsConsumer.cpp",
],
- include_dirs: [
- "frameworks/av/media/libmedia/include"
- ],
-
shared_libs: [
"android.hardware.drm@1.0",
"android.hardware.drm@1.1",
@@ -181,5 +184,6 @@ cc_library_shared {
header_libs: [
"libmediametrics_headers",
"libstagefright_foundation_headers",
+ "libmedia_headers",
],
}
diff --git a/drm/libmediadrm/CryptoHal.cpp b/drm/libmediadrm/CryptoHal.cpp
index e0db1c4378cf84831d56684cab5077103b80d47e..f95d5272ae9a5816cfd4c3f2f88329605e39194d 100644
--- a/drm/libmediadrm/CryptoHal.cpp
+++ b/drm/libmediadrm/CryptoHal.cpp
@@ -16,389 +16,100 @@
//#define LOG_NDEBUG 0
#define LOG_TAG "CryptoHal"
-#include
-
-#include
-#include
-#include
-#include
-#include
-#include
-#include
-#include
-#include
#include
+#include
+#include
#include
-using drm::V1_0::BufferType;
-using drm::V1_0::DestinationBuffer;
-using drm::V1_0::ICryptoFactory;
-using drm::V1_0::ICryptoPlugin;
-using drm::V1_0::Mode;
-using drm::V1_0::Pattern;
-using drm::V1_0::SharedBuffer;
-using drm::V1_0::Status;
-using drm::V1_0::SubSample;
-
-using ::android::DrmUtils::toStatusT;
-using ::android::hardware::hidl_array;
-using ::android::hardware::hidl_handle;
-using ::android::hardware::hidl_memory;
-using ::android::hardware::hidl_string;
-using ::android::hardware::hidl_vec;
-using ::android::hardware::HidlMemory;
-using ::android::hardware::Return;
-using ::android::hardware::Void;
-using ::android::sp;
-
-typedef drm::V1_2::Status Status_V1_2;
-
namespace android {
-static hidl_vec toHidlVec(const Vector &vector) {
- hidl_vec vec;
- vec.setToExternal(const_cast(vector.array()), vector.size());
- return vec;
-}
-
-static hidl_vec toHidlVec(const void *ptr, size_t size) {
- hidl_vec vec;
- vec.resize(size);
- memcpy(vec.data(), ptr, size);
- return vec;
-}
-
-static hidl_array toHidlArray16(const uint8_t *ptr) {
- if (!ptr) {
- return hidl_array();
- }
- return hidl_array(ptr);
-}
-
-
-static String8 toString8(hidl_string hString) {
- return String8(hString.c_str());
-}
-
-
-CryptoHal::CryptoHal()
- : mFactories(makeCryptoFactories()),
- mInitCheck((mFactories.size() == 0) ? ERROR_UNSUPPORTED : NO_INIT),
- mHeapSeqNum(0) {
-}
-
-CryptoHal::~CryptoHal() {
-}
-
-Vector> CryptoHal::makeCryptoFactories() {
- Vector> factories;
-
- auto manager = hardware::defaultServiceManager1_2();
- if (manager != NULL) {
- manager->listManifestByInterface(drm::V1_0::ICryptoFactory::descriptor,
- [&factories](const hidl_vec ®istered) {
- for (const auto &instance : registered) {
- auto factory = drm::V1_0::ICryptoFactory::getService(instance);
- if (factory != NULL) {
- ALOGD("found drm@1.0 ICryptoFactory %s", instance.c_str());
- factories.push_back(factory);
- }
- }
- }
- );
- manager->listManifestByInterface(drm::V1_1::ICryptoFactory::descriptor,
- [&factories](const hidl_vec ®istered) {
- for (const auto &instance : registered) {
- auto factory = drm::V1_1::ICryptoFactory::getService(instance);
- if (factory != NULL) {
- ALOGD("found drm@1.1 ICryptoFactory %s", instance.c_str());
- factories.push_back(factory);
- }
- }
- }
- );
- }
-
- if (factories.size() == 0) {
- // must be in passthrough mode, load the default passthrough service
- auto passthrough = ICryptoFactory::getService();
- if (passthrough != NULL) {
- ALOGI("makeCryptoFactories: using default passthrough crypto instance");
- factories.push_back(passthrough);
- } else {
- ALOGE("Failed to find any crypto factories");
- }
- }
- return factories;
-}
-
-sp CryptoHal::makeCryptoPlugin(const sp& factory,
- const uint8_t uuid[16], const void *initData, size_t initDataSize) {
-
- sp plugin;
- Return hResult = factory->createPlugin(toHidlArray16(uuid),
- toHidlVec(initData, initDataSize),
- [&](Status status, const sp& hPlugin) {
- if (status != Status::OK) {
- ALOGE("Failed to make crypto plugin");
- return;
- }
- plugin = hPlugin;
- }
- );
- if (!hResult.isOk()) {
- mInitCheck = DEAD_OBJECT;
- }
- return plugin;
+CryptoHal::CryptoHal() {
+ mCryptoHalAidl = sp::make();
+ mCryptoHalHidl = sp::make();
}
+CryptoHal::~CryptoHal() {}
status_t CryptoHal::initCheck() const {
- return mInitCheck;
+ if (mCryptoHalAidl->initCheck() == OK || mCryptoHalHidl->initCheck() == OK) return OK;
+ if (mCryptoHalAidl->initCheck() == NO_INIT || mCryptoHalHidl->initCheck() == NO_INIT)
+ return NO_INIT;
+ return mCryptoHalHidl->initCheck();
}
-
bool CryptoHal::isCryptoSchemeSupported(const uint8_t uuid[16]) {
- Mutex::Autolock autoLock(mLock);
-
- for (size_t i = 0; i < mFactories.size(); i++) {
- if (mFactories[i]->isCryptoSchemeSupported(uuid)) {
- return true;
- }
- }
- return false;
+ return mCryptoHalAidl->isCryptoSchemeSupported(uuid) ||
+ mCryptoHalHidl->isCryptoSchemeSupported(uuid);
}
-status_t CryptoHal::createPlugin(const uint8_t uuid[16], const void *data,
- size_t size) {
- Mutex::Autolock autoLock(mLock);
-
- for (size_t i = 0; i < mFactories.size(); i++) {
- if (mFactories[i]->isCryptoSchemeSupported(uuid)) {
- mPlugin = makeCryptoPlugin(mFactories[i], uuid, data, size);
- if (mPlugin != NULL) {
- mPluginV1_2 = drm::V1_2::ICryptoPlugin::castFrom(mPlugin);
- }
- }
- }
-
- if (mInitCheck == NO_INIT) {
- mInitCheck = mPlugin == NULL ? ERROR_UNSUPPORTED : OK;
- }
-
- return mInitCheck;
+status_t CryptoHal::createPlugin(const uint8_t uuid[16], const void* data, size_t size) {
+ if (mCryptoHalAidl->createPlugin(uuid, data, size) != OK)
+ return mCryptoHalHidl->createPlugin(uuid, data, size);
+ return OK;
}
status_t CryptoHal::destroyPlugin() {
- Mutex::Autolock autoLock(mLock);
-
- if (mInitCheck != OK) {
- return mInitCheck;
- }
-
- mPlugin.clear();
- mPluginV1_2.clear();
- return OK;
+ // This requires plugin to be created.
+ if (mCryptoHalAidl->initCheck() == OK) return mCryptoHalAidl->destroyPlugin();
+ return mCryptoHalHidl->destroyPlugin();
}
-bool CryptoHal::requiresSecureDecoderComponent(const char *mime) const {
- Mutex::Autolock autoLock(mLock);
-
- if (mInitCheck != OK) {
- return false;
- }
-
- Return hResult = mPlugin->requiresSecureDecoderComponent(hidl_string(mime));
- if (!hResult.isOk()) {
- return false;
- }
- return hResult;
+bool CryptoHal::requiresSecureDecoderComponent(const char* mime) const {
+ // This requires plugin to be created.
+ if (mCryptoHalAidl->initCheck() == OK)
+ return mCryptoHalAidl->requiresSecureDecoderComponent(mime);
+ return mCryptoHalHidl->requiresSecureDecoderComponent(mime);
}
-
-/**
- * If the heap base isn't set, get the heap base from the HidlMemory
- * and send it to the HAL so it can map a remote heap of the same
- * size. Once the heap base is established, shared memory buffers
- * are sent by providing an offset into the heap and a buffer size.
- */
-int32_t CryptoHal::setHeapBase(const sp& heap) {
- if (heap == NULL || mHeapSeqNum < 0) {
- ALOGE("setHeapBase(): heap %p mHeapSeqNum %d", heap.get(), mHeapSeqNum);
- return -1;
+void CryptoHal::notifyResolution(uint32_t width, uint32_t height) {
+ // This requires plugin to be created.
+ if (mCryptoHalAidl->initCheck() == OK) {
+ mCryptoHalAidl->notifyResolution(width, height);
+ return;
}
- Mutex::Autolock autoLock(mLock);
-
- int32_t seqNum = mHeapSeqNum++;
- uint32_t bufferId = static_cast(seqNum);
- mHeapSizes.add(seqNum, heap->size());
- Return hResult = mPlugin->setSharedBufferBase(*heap, bufferId);
- ALOGE_IF(!hResult.isOk(), "setSharedBufferBase(): remote call failed");
- return seqNum;
+ mCryptoHalHidl->notifyResolution(width, height);
}
-void CryptoHal::clearHeapBase(int32_t seqNum) {
- Mutex::Autolock autoLock(mLock);
-
- /*
- * Clear the remote shared memory mapping by setting the shared
- * buffer base to a null hidl_memory.
- *
- * TODO: Add a releaseSharedBuffer method in a future DRM HAL
- * API version to make this explicit.
- */
- ssize_t index = mHeapSizes.indexOfKey(seqNum);
- if (index >= 0) {
- if (mPlugin != NULL) {
- uint32_t bufferId = static_cast(seqNum);
- Return hResult = mPlugin->setSharedBufferBase(hidl_memory(), bufferId);
- ALOGE_IF(!hResult.isOk(), "setSharedBufferBase(): remote call failed");
- }
- mHeapSizes.removeItem(seqNum);
- }
+status_t CryptoHal::setMediaDrmSession(const Vector& sessionId) {
+ // This requires plugin to be created.
+ if (mCryptoHalAidl->initCheck() == OK) return mCryptoHalAidl->setMediaDrmSession(sessionId);
+ return mCryptoHalHidl->setMediaDrmSession(sessionId);
}
-status_t CryptoHal::checkSharedBuffer(const ::SharedBuffer &buffer) {
- int32_t seqNum = static_cast(buffer.bufferId);
- // memory must be in one of the heaps that have been set
- if (mHeapSizes.indexOfKey(seqNum) < 0) {
- return UNKNOWN_ERROR;
- }
-
- // memory must be within the address space of the heap
- size_t heapSize = mHeapSizes.valueFor(seqNum);
- if (heapSize < buffer.offset + buffer.size ||
- SIZE_MAX - buffer.offset < buffer.size) {
- android_errorWriteLog(0x534e4554, "76221123");
- return UNKNOWN_ERROR;
- }
-
- return OK;
+ssize_t CryptoHal::decrypt(const uint8_t key[16], const uint8_t iv[16], CryptoPlugin::Mode mode,
+ const CryptoPlugin::Pattern& pattern, const ::SharedBuffer& source,
+ size_t offset, const CryptoPlugin::SubSample* subSamples,
+ size_t numSubSamples, const ::DestinationBuffer& destination,
+ AString* errorDetailMsg) {
+ // This requires plugin to be created.
+ if (mCryptoHalAidl->initCheck() == OK)
+ return mCryptoHalAidl->decrypt(key, iv, mode, pattern, source, offset, subSamples,
+ numSubSamples, destination, errorDetailMsg);
+ return mCryptoHalHidl->decrypt(key, iv, mode, pattern, source, offset, subSamples,
+ numSubSamples, destination, errorDetailMsg);
}
-ssize_t CryptoHal::decrypt(const uint8_t keyId[16], const uint8_t iv[16],
- CryptoPlugin::Mode mode, const CryptoPlugin::Pattern &pattern,
- const ::SharedBuffer &hSource, size_t offset,
- const CryptoPlugin::SubSample *subSamples, size_t numSubSamples,
- const ::DestinationBuffer &hDestination, AString *errorDetailMsg) {
- Mutex::Autolock autoLock(mLock);
-
- if (mInitCheck != OK) {
- return mInitCheck;
- }
-
- Mode hMode;
- switch(mode) {
- case CryptoPlugin::kMode_Unencrypted:
- hMode = Mode::UNENCRYPTED ;
- break;
- case CryptoPlugin::kMode_AES_CTR:
- hMode = Mode::AES_CTR;
- break;
- case CryptoPlugin::kMode_AES_WV:
- hMode = Mode::AES_CBC_CTS;
- break;
- case CryptoPlugin::kMode_AES_CBC:
- hMode = Mode::AES_CBC;
- break;
- default:
- return UNKNOWN_ERROR;
- }
-
- Pattern hPattern;
- hPattern.encryptBlocks = pattern.mEncryptBlocks;
- hPattern.skipBlocks = pattern.mSkipBlocks;
-
- std::vector stdSubSamples;
- for (size_t i = 0; i < numSubSamples; i++) {
- SubSample subSample;
- subSample.numBytesOfClearData = subSamples[i].mNumBytesOfClearData;
- subSample.numBytesOfEncryptedData = subSamples[i].mNumBytesOfEncryptedData;
- stdSubSamples.push_back(subSample);
- }
- auto hSubSamples = hidl_vec(stdSubSamples);
-
- bool secure;
- if (hDestination.type == BufferType::SHARED_MEMORY) {
- status_t status = checkSharedBuffer(hDestination.nonsecureMemory);
- if (status != OK) {
- return status;
- }
- secure = false;
- } else if (hDestination.type == BufferType::NATIVE_HANDLE) {
- secure = true;
- } else {
- android_errorWriteLog(0x534e4554, "70526702");
- return UNKNOWN_ERROR;
- }
-
- status_t status = checkSharedBuffer(hSource);
- if (status != OK) {
- return status;
- }
-
- status_t err = UNKNOWN_ERROR;
- uint32_t bytesWritten = 0;
-
- Return hResult;
-
- mLock.unlock();
- if (mPluginV1_2 != NULL) {
- hResult = mPluginV1_2->decrypt_1_2(secure, toHidlArray16(keyId), toHidlArray16(iv),
- hMode, hPattern, hSubSamples, hSource, offset, hDestination,
- [&](Status_V1_2 status, uint32_t hBytesWritten, hidl_string hDetailedError) {
- if (status == Status_V1_2::OK) {
- bytesWritten = hBytesWritten;
- *errorDetailMsg = toString8(hDetailedError);
- }
- err = toStatusT(status);
- }
- );
- } else {
- hResult = mPlugin->decrypt(secure, toHidlArray16(keyId), toHidlArray16(iv),
- hMode, hPattern, hSubSamples, hSource, offset, hDestination,
- [&](Status status, uint32_t hBytesWritten, hidl_string hDetailedError) {
- if (status == Status::OK) {
- bytesWritten = hBytesWritten;
- *errorDetailMsg = toString8(hDetailedError);
- }
- err = toStatusT(status);
- }
- );
- }
-
- err = hResult.isOk() ? err : DEAD_OBJECT;
- if (err == OK) {
- return bytesWritten;
- }
- return err;
+int32_t CryptoHal::setHeap(const sp& heap) {
+ // This requires plugin to be created.
+ if (mCryptoHalAidl->initCheck() == OK) return mCryptoHalAidl->setHeap(heap);
+ return mCryptoHalHidl->setHeap(heap);
}
-void CryptoHal::notifyResolution(uint32_t width, uint32_t height) {
- Mutex::Autolock autoLock(mLock);
-
- if (mInitCheck != OK) {
+void CryptoHal::unsetHeap(int32_t seqNum) {
+ // This requires plugin to be created.
+ if (mCryptoHalAidl->initCheck() == OK) {
+ mCryptoHalAidl->unsetHeap(seqNum);
return;
}
- auto hResult = mPlugin->notifyResolution(width, height);
- ALOGE_IF(!hResult.isOk(), "notifyResolution txn failed %s", hResult.description().c_str());
+ mCryptoHalHidl->unsetHeap(seqNum);
}
-status_t CryptoHal::setMediaDrmSession(const Vector &sessionId) {
- Mutex::Autolock autoLock(mLock);
-
- if (mInitCheck != OK) {
- return mInitCheck;
- }
-
- auto err = mPlugin->setMediaDrmSession(toHidlVec(sessionId));
- return err.isOk() ? toStatusT(err) : DEAD_OBJECT;
+status_t CryptoHal::getLogMessages(Vector& logs) const {
+ // This requires plugin to be created.
+ if (mCryptoHalAidl->initCheck() == OK) return mCryptoHalAidl->getLogMessages(logs);
+ return mCryptoHalHidl->getLogMessages(logs);
}
-status_t CryptoHal::getLogMessages(Vector &logs) const {
- Mutex::Autolock autoLock(mLock);
- return DrmUtils::GetLogMessages(mPlugin, logs);
-}
-} // namespace android
+} // namespace android
\ No newline at end of file
diff --git a/drm/libmediadrm/CryptoHalAidl.cpp b/drm/libmediadrm/CryptoHalAidl.cpp
new file mode 100644
index 0000000000000000000000000000000000000000..8b9d1de93adc82228e6cfa6b526aaf1f878ff287
--- /dev/null
+++ b/drm/libmediadrm/CryptoHalAidl.cpp
@@ -0,0 +1,425 @@
+/*
+ * Copyright (C) 2021 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+//#define LOG_NDEBUG 0
+#define LOG_TAG "CryptoHalAidl"
+
+#include
+#include
+#include
+#include
+#include
+#include
+#include
+#include
+#include
+#include
+
+using ::aidl::android::hardware::drm::CryptoSchemes;
+using DestinationBufferAidl = ::aidl::android::hardware::drm::DestinationBuffer;
+using ::aidl::android::hardware::drm::Mode;
+using ::aidl::android::hardware::drm::Pattern;
+using SharedBufferAidl = ::aidl::android::hardware::drm::SharedBuffer;
+using ::aidl::android::hardware::drm::Status;
+using ::aidl::android::hardware::drm::SubSample;
+using ::aidl::android::hardware::drm::Uuid;
+using ::aidl::android::hardware::drm::SecurityLevel;
+using NativeHandleAidlCommon = ::aidl::android::hardware::common::NativeHandle;
+using ::aidl::android::hardware::drm::DecryptArgs;
+
+using ::android::sp;
+using ::android::DrmUtils::statusAidlToStatusT;
+using ::android::hardware::hidl_array;
+using ::android::hardware::hidl_handle;
+using ::android::hardware::hidl_memory;
+using ::android::hardware::hidl_string;
+using ::android::hardware::hidl_vec;
+using ::android::hardware::HidlMemory;
+using ::android::hardware::Return;
+using ::android::hardware::Void;
+
+using ::aidl::android::hardware::drm::Uuid;
+// -------Hidl interface related-----------------
+// TODO: replace before removing hidl interface
+
+using BufferTypeHidl = ::android::hardware::drm::V1_0::BufferType;
+using SharedBufferHidl = ::android::hardware::drm::V1_0::SharedBuffer;
+using DestinationBufferHidl = ::android::hardware::drm::V1_0::DestinationBuffer;
+
+// -------Hidl interface related end-------------
+
+namespace android {
+
+template
+static std::vector toStdVec(const Vector& vector) {
+ auto v = reinterpret_cast(vector.array());
+ std::vector vec(v, v + vector.size());
+ return vec;
+}
+
+// -------Hidl interface related-----------------
+// TODO: replace before removing hidl interface
+status_t CryptoHalAidl::checkSharedBuffer(const SharedBufferHidl& buffer) {
+ int32_t seqNum = static_cast(buffer.bufferId);
+ // memory must be in one of the heaps that have been set
+ if (mHeapSizes.indexOfKey(seqNum) < 0) {
+ return UNKNOWN_ERROR;
+ }
+
+ // memory must be within the address space of the heap
+ size_t heapSize = mHeapSizes.valueFor(seqNum);
+ if (heapSize < buffer.offset + buffer.size || SIZE_MAX - buffer.offset < buffer.size) {
+ android_errorWriteLog(0x534e4554, "76221123");
+ return UNKNOWN_ERROR;
+ }
+
+ return OK;
+}
+
+static SharedBufferAidl hidlSharedBufferToAidlSharedBuffer(const SharedBufferHidl& buffer) {
+ SharedBufferAidl aidlsb;
+ aidlsb.bufferId = buffer.bufferId;
+ aidlsb.offset = buffer.offset;
+ aidlsb.size = buffer.size;
+ return aidlsb;
+}
+
+static DestinationBufferAidl hidlDestinationBufferToAidlDestinationBuffer(
+ const DestinationBufferHidl& buffer) {
+ DestinationBufferAidl aidldb;
+ // skip negative convert check as count of enum elements are 2
+ switch(buffer.type) {
+ case BufferTypeHidl::SHARED_MEMORY:
+ aidldb.set(
+ hidlSharedBufferToAidlSharedBuffer(buffer.nonsecureMemory));
+ break;
+ default:
+ auto handle = buffer.secureMemory.getNativeHandle();
+ if (handle) {
+ aidldb.set(
+ ::android::dupToAidl(handle));
+ } else {
+ NativeHandleAidlCommon emptyhandle;
+ aidldb.set(
+ std::move(emptyhandle));
+ }
+ break;
+ }
+
+ return aidldb;
+}
+
+static hidl_vec toHidlVec(const void* ptr, size_t size) {
+ hidl_vec vec;
+ vec.resize(size);
+ memcpy(vec.data(), ptr, size);
+ return vec;
+}
+
+static const Vector toVector(const std::vector& vec) {
+ Vector vector;
+ vector.appendArray(vec.data(), vec.size());
+ return *const_cast*>(&vector);
+}
+
+static String8 toString8(const std::string& string) {
+ return String8(string.c_str());
+}
+
+static std::vector toStdVec(const uint8_t* ptr, size_t n) {
+ if (!ptr) {
+ return std::vector();
+ }
+ return std::vector(ptr, ptr + n);
+}
+
+// -------Hidl interface related end--------------
+
+bool CryptoHalAidl::isCryptoSchemeSupportedInternal(const uint8_t uuid[16], int* factoryIdx) {
+ Uuid uuidAidl = DrmUtils::toAidlUuid(uuid);
+ for (size_t i = 0; i < mFactories.size(); i++) {
+ CryptoSchemes schemes{};
+ if (mFactories[i]->getSupportedCryptoSchemes(&schemes).isOk()) {
+ if (std::count(schemes.uuids.begin(), schemes.uuids.end(), uuidAidl)) {
+ if (factoryIdx != NULL) *factoryIdx = i;
+ return true;
+ }
+ }
+ }
+
+ return false;
+}
+
+CryptoHalAidl::CryptoHalAidl()
+ : mFactories(DrmUtils::makeDrmFactoriesAidl()),
+ mInitCheck((mFactories.size() == 0) ? ERROR_UNSUPPORTED : NO_INIT),
+ mHeapSeqNum(0) {}
+
+CryptoHalAidl::~CryptoHalAidl() {}
+
+status_t CryptoHalAidl::initCheck() const {
+ return mInitCheck;
+}
+
+bool CryptoHalAidl::isCryptoSchemeSupported(const uint8_t uuid[16]) {
+ Mutex::Autolock autoLock(mLock);
+
+ return isCryptoSchemeSupportedInternal(uuid, NULL);
+}
+
+status_t CryptoHalAidl::createPlugin(const uint8_t uuid[16], const void* data, size_t size) {
+ Mutex::Autolock autoLock(mLock);
+
+ Uuid uuidAidl = DrmUtils::toAidlUuid(uuid);
+ std::vector dataAidl = toStdVec(toVector(toHidlVec(data, size)));
+ int i = 0;
+ if (isCryptoSchemeSupportedInternal(uuid, &i)) {
+ mPlugin = makeCryptoPlugin(mFactories[i], uuidAidl, dataAidl);
+ }
+
+ if (mInitCheck == NO_INIT) {
+ mInitCheck = mPlugin == NULL ? ERROR_UNSUPPORTED : OK;
+ }
+
+ return mInitCheck;
+}
+
+std::shared_ptr CryptoHalAidl::makeCryptoPlugin(
+ const std::shared_ptr& factory, const Uuid& uuidAidl,
+ const std::vector initData) {
+ std::shared_ptr pluginAidl;
+ if (factory->createCryptoPlugin(uuidAidl, initData, &pluginAidl).isOk()) {
+ ALOGI("Create ICryptoPluginAidl. UUID:[%s]", uuidAidl.toString().c_str());
+ } else {
+ mInitCheck = DEAD_OBJECT;
+ ALOGE("Failed to create ICryptoPluginAidl. UUID:[%s]", uuidAidl.toString().c_str());
+ }
+
+ return pluginAidl;
+}
+
+status_t CryptoHalAidl::destroyPlugin() {
+ Mutex::Autolock autoLock(mLock);
+
+ if (mInitCheck != OK) {
+ return mInitCheck;
+ }
+
+ mPlugin.reset();
+ mInitCheck = NO_INIT;
+ return OK;
+}
+
+bool CryptoHalAidl::requiresSecureDecoderComponent(const char* mime) const {
+ Mutex::Autolock autoLock(mLock);
+
+ if (mInitCheck != OK) {
+ return false;
+ }
+
+ std::string mimeStr = std::string(mime);
+ bool result;
+ if (!mPlugin->requiresSecureDecoderComponent(mimeStr, &result).isOk()) {
+ ALOGE("Failed to requiresSecureDecoderComponent. mime:[%s]", mime);
+ return false;
+ }
+
+ return result;
+}
+
+void CryptoHalAidl::notifyResolution(uint32_t width, uint32_t height) {
+ Mutex::Autolock autoLock(mLock);
+
+ if (mInitCheck != OK) {
+ return;
+ }
+
+ // Check negative width and height after type conversion
+ // Log error and return if any is negative
+ if ((int32_t)width < 0 || (int32_t)height < 0) {
+ ALOGE("Negative width: %d or height %d in notifyResolution", width, height);
+ return;
+ }
+
+ ::ndk::ScopedAStatus status = mPlugin->notifyResolution(width, height);
+ if (!status.isOk()) {
+ ALOGE("notifyResolution txn failed status code: %d", status.getServiceSpecificError());
+ }
+}
+
+status_t CryptoHalAidl::setMediaDrmSession(const Vector& sessionId) {
+ Mutex::Autolock autoLock(mLock);
+
+ if (mInitCheck != OK) {
+ return mInitCheck;
+ }
+
+ auto err = mPlugin->setMediaDrmSession(toStdVec(sessionId));
+ return statusAidlToStatusT(err);
+}
+
+ssize_t CryptoHalAidl::decrypt(const uint8_t keyId[16], const uint8_t iv[16],
+ CryptoPlugin::Mode mode, const CryptoPlugin::Pattern& pattern,
+ const SharedBufferHidl& hSource, size_t offset,
+ const CryptoPlugin::SubSample* subSamples, size_t numSubSamples,
+ const DestinationBufferHidl& hDestination, AString* errorDetailMsg) {
+ Mutex::Autolock autoLock(mLock);
+
+ if (mInitCheck != OK) {
+ return mInitCheck;
+ }
+
+ Mode aMode;
+ switch (mode) {
+ case CryptoPlugin::kMode_Unencrypted:
+ aMode = Mode::UNENCRYPTED;
+ break;
+ case CryptoPlugin::kMode_AES_CTR:
+ aMode = Mode::AES_CTR;
+ break;
+ case CryptoPlugin::kMode_AES_WV:
+ aMode = Mode::AES_CBC_CTS;
+ break;
+ case CryptoPlugin::kMode_AES_CBC:
+ aMode = Mode::AES_CBC;
+ break;
+ default:
+ return UNKNOWN_ERROR;
+ }
+
+ Pattern aPattern;
+ aPattern.encryptBlocks = pattern.mEncryptBlocks;
+ aPattern.skipBlocks = pattern.mSkipBlocks;
+
+ std::vector stdSubSamples;
+ for (size_t i = 0; i < numSubSamples; i++) {
+ SubSample subSample;
+ subSample.numBytesOfClearData = subSamples[i].mNumBytesOfClearData;
+ subSample.numBytesOfEncryptedData = subSamples[i].mNumBytesOfEncryptedData;
+ stdSubSamples.push_back(subSample);
+ }
+
+ bool secure;
+ if (hDestination.type == BufferTypeHidl::SHARED_MEMORY) {
+ status_t status = checkSharedBuffer(hDestination.nonsecureMemory);
+ if (status != OK) {
+ return status;
+ }
+ secure = false;
+ } else if (hDestination.type == BufferTypeHidl::NATIVE_HANDLE) {
+ secure = true;
+ } else {
+ android_errorWriteLog(0x534e4554, "70526702");
+ return UNKNOWN_ERROR;
+ }
+
+ status_t status = checkSharedBuffer(hSource);
+ if (status != OK) {
+ return status;
+ }
+
+ status_t err = UNKNOWN_ERROR;
+ mLock.unlock();
+
+ std::vector keyIdAidl(toStdVec(keyId, 16));
+ std::vector ivAidl(toStdVec(iv, 16));
+
+ DecryptArgs args;
+ args.secure = secure;
+ args.keyId = keyIdAidl;
+ args.iv = ivAidl;
+ args.mode = aMode;
+ args.pattern = aPattern;
+ args.subSamples = std::move(stdSubSamples);
+ args.source = hidlSharedBufferToAidlSharedBuffer(hSource);
+ args.offset = offset;
+ args.destination = hidlDestinationBufferToAidlDestinationBuffer(hDestination);
+
+
+ int32_t result = 0;
+ ::ndk::ScopedAStatus statusAidl = mPlugin->decrypt(args, &result);
+
+ err = statusAidlToStatusT(statusAidl);
+ std::string msgStr(statusAidl.getMessage());
+ if (errorDetailMsg != nullptr) {
+ *errorDetailMsg = toString8(msgStr);
+ }
+ if (err != OK) {
+ ALOGE("Failed on decrypt, error description:%s", statusAidl.getDescription().c_str());
+ return err;
+ }
+
+ return result;
+}
+
+int32_t CryptoHalAidl::setHeap(const sp& heap) {
+ if (heap == NULL || mHeapSeqNum < 0) {
+ ALOGE("setHeap(): heap %p mHeapSeqNum %d", heap.get(), mHeapSeqNum);
+ return -1;
+ }
+
+ Mutex::Autolock autoLock(mLock);
+
+ if (mInitCheck != OK) {
+ return -1;
+ }
+
+ int32_t seqNum = mHeapSeqNum++;
+ uint32_t bufferId = static_cast(seqNum);
+ mHeapSizes.add(seqNum, heap->size());
+
+ SharedBufferAidl memAidl;
+ memAidl.handle = ::android::dupToAidl(heap->handle());
+ memAidl.size = heap->size();
+ memAidl.bufferId = bufferId;
+
+ auto status = mPlugin->setSharedBufferBase(memAidl);
+ ALOGE_IF(!status.isOk(),
+ "setSharedBufferBase(): remote call failed");
+ return seqNum;
+}
+
+void CryptoHalAidl::unsetHeap(int32_t seqNum) {
+ Mutex::Autolock autoLock(mLock);
+
+ /*
+ * Clear the remote shared memory mapping by setting the shared
+ * buffer base to a null hidl_memory.
+ *
+ * TODO: Add a releaseSharedBuffer method in a future DRM HAL
+ * API version to make this explicit.
+ */
+ ssize_t index = mHeapSizes.indexOfKey(seqNum);
+ if (index >= 0) {
+ if (mPlugin != NULL) {
+ uint32_t bufferId = static_cast(seqNum);
+ SharedBufferAidl memAidl{};
+ memAidl.bufferId = bufferId;
+ auto status = mPlugin->setSharedBufferBase(memAidl);
+ ALOGE_IF(!status.isOk(),
+ "setSharedBufferBase(): remote call failed");
+ }
+ mHeapSizes.removeItem(seqNum);
+ }
+}
+
+status_t CryptoHalAidl::getLogMessages(Vector& logs) const {
+ Mutex::Autolock autoLock(mLock);
+ // Need to convert logmessage
+
+ return DrmUtils::GetLogMessagesAidl(mPlugin, logs);
+}
+} // namespace android
diff --git a/drm/libmediadrm/CryptoHalHidl.cpp b/drm/libmediadrm/CryptoHalHidl.cpp
new file mode 100644
index 0000000000000000000000000000000000000000..55364b5168f4578378ca45933d32a231b627dff0
--- /dev/null
+++ b/drm/libmediadrm/CryptoHalHidl.cpp
@@ -0,0 +1,404 @@
+/*
+ * Copyright (C) 2017 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+//#define LOG_NDEBUG 0
+#define LOG_TAG "CryptoHalHidl"
+#include
+
+#include
+#include
+#include
+#include
+#include
+#include
+#include
+#include
+#include
+#include
+#include
+
+using drm::V1_0::BufferType;
+using drm::V1_0::DestinationBuffer;
+using drm::V1_0::ICryptoFactory;
+using drm::V1_0::ICryptoPlugin;
+using drm::V1_0::Mode;
+using drm::V1_0::Pattern;
+using drm::V1_0::SharedBuffer;
+using drm::V1_0::Status;
+using drm::V1_0::SubSample;
+
+using ::android::sp;
+using ::android::DrmUtils::toStatusT;
+using ::android::hardware::hidl_array;
+using ::android::hardware::hidl_handle;
+using ::android::hardware::hidl_memory;
+using ::android::hardware::hidl_string;
+using ::android::hardware::hidl_vec;
+using ::android::hardware::HidlMemory;
+using ::android::hardware::Return;
+using ::android::hardware::Void;
+
+typedef drm::V1_2::Status Status_V1_2;
+
+namespace android {
+
+static hidl_vec toHidlVec(const Vector& vector) {
+ hidl_vec vec;
+ vec.setToExternal(const_cast(vector.array()), vector.size());
+ return vec;
+}
+
+static hidl_vec toHidlVec(const void* ptr, size_t size) {
+ hidl_vec vec;
+ vec.resize(size);
+ memcpy(vec.data(), ptr, size);
+ return vec;
+}
+
+static hidl_array toHidlArray16(const uint8_t* ptr) {
+ if (!ptr) {
+ return hidl_array();
+ }
+ return hidl_array(ptr);
+}
+
+static String8 toString8(hidl_string hString) {
+ return String8(hString.c_str());
+}
+
+CryptoHalHidl::CryptoHalHidl()
+ : mFactories(makeCryptoFactories()),
+ mInitCheck((mFactories.size() == 0) ? ERROR_UNSUPPORTED : NO_INIT),
+ mHeapSeqNum(0) {}
+
+CryptoHalHidl::~CryptoHalHidl() {}
+
+Vector> CryptoHalHidl::makeCryptoFactories() {
+ Vector> factories;
+
+ auto manager = hardware::defaultServiceManager1_2();
+ if (manager != NULL) {
+ manager->listManifestByInterface(
+ drm::V1_0::ICryptoFactory::descriptor,
+ [&factories](const hidl_vec& registered) {
+ for (const auto& instance : registered) {
+ auto factory = drm::V1_0::ICryptoFactory::getService(instance);
+ if (factory != NULL) {
+ ALOGD("found drm@1.0 ICryptoFactory %s", instance.c_str());
+ factories.push_back(factory);
+ }
+ }
+ });
+ manager->listManifestByInterface(
+ drm::V1_1::ICryptoFactory::descriptor,
+ [&factories](const hidl_vec& registered) {
+ for (const auto& instance : registered) {
+ auto factory = drm::V1_1::ICryptoFactory::getService(instance);
+ if (factory != NULL) {
+ ALOGD("found drm@1.1 ICryptoFactory %s", instance.c_str());
+ factories.push_back(factory);
+ }
+ }
+ });
+ }
+
+ if (factories.size() == 0) {
+ // must be in passthrough mode, load the default passthrough service
+ auto passthrough = ICryptoFactory::getService();
+ if (passthrough != NULL) {
+ ALOGI("makeCryptoFactories: using default passthrough crypto instance");
+ factories.push_back(passthrough);
+ } else {
+ ALOGE("Failed to find any crypto factories");
+ }
+ }
+ return factories;
+}
+
+sp CryptoHalHidl::makeCryptoPlugin(const sp& factory,
+ const uint8_t uuid[16], const void* initData,
+ size_t initDataSize) {
+ sp plugin;
+ Return hResult =
+ factory->createPlugin(toHidlArray16(uuid), toHidlVec(initData, initDataSize),
+ [&](Status status, const sp& hPlugin) {
+ if (status != Status::OK) {
+ ALOGE("Failed to make crypto plugin");
+ return;
+ }
+ plugin = hPlugin;
+ });
+ if (!hResult.isOk()) {
+ mInitCheck = DEAD_OBJECT;
+ }
+ return plugin;
+}
+
+status_t CryptoHalHidl::initCheck() const {
+ return mInitCheck;
+}
+
+bool CryptoHalHidl::isCryptoSchemeSupported(const uint8_t uuid[16]) {
+ Mutex::Autolock autoLock(mLock);
+
+ for (size_t i = 0; i < mFactories.size(); i++) {
+ if (mFactories[i]->isCryptoSchemeSupported(uuid)) {
+ return true;
+ }
+ }
+ return false;
+}
+
+status_t CryptoHalHidl::createPlugin(const uint8_t uuid[16], const void* data, size_t size) {
+ Mutex::Autolock autoLock(mLock);
+
+ for (size_t i = 0; i < mFactories.size(); i++) {
+ if (mFactories[i]->isCryptoSchemeSupported(uuid)) {
+ mPlugin = makeCryptoPlugin(mFactories[i], uuid, data, size);
+ if (mPlugin != NULL) {
+ mPluginV1_2 = drm::V1_2::ICryptoPlugin::castFrom(mPlugin);
+ }
+ }
+ }
+
+ if (mInitCheck == NO_INIT) {
+ mInitCheck = mPlugin == NULL ? ERROR_UNSUPPORTED : OK;
+ }
+
+ return mInitCheck;
+}
+
+status_t CryptoHalHidl::destroyPlugin() {
+ Mutex::Autolock autoLock(mLock);
+
+ if (mInitCheck != OK) {
+ return mInitCheck;
+ }
+
+ mPlugin.clear();
+ mPluginV1_2.clear();
+ mInitCheck = NO_INIT;
+ return OK;
+}
+
+bool CryptoHalHidl::requiresSecureDecoderComponent(const char* mime) const {
+ Mutex::Autolock autoLock(mLock);
+
+ if (mInitCheck != OK) {
+ return false;
+ }
+
+ Return hResult = mPlugin->requiresSecureDecoderComponent(hidl_string(mime));
+ if (!hResult.isOk()) {
+ return false;
+ }
+ return hResult;
+}
+
+/**
+ * If the heap base isn't set, get the heap base from the HidlMemory
+ * and send it to the HAL so it can map a remote heap of the same
+ * size. Once the heap base is established, shared memory buffers
+ * are sent by providing an offset into the heap and a buffer size.
+ */
+int32_t CryptoHalHidl::setHeapBase(const sp& heap) {
+ if (heap == NULL || mHeapSeqNum < 0) {
+ ALOGE("setHeapBase(): heap %p mHeapSeqNum %d", heap.get(), mHeapSeqNum);
+ return -1;
+ }
+
+ Mutex::Autolock autoLock(mLock);
+
+ if (mInitCheck != OK) {
+ return -1;
+ }
+
+ int32_t seqNum = mHeapSeqNum++;
+ uint32_t bufferId = static_cast(seqNum);
+ mHeapSizes.add(seqNum, heap->size());
+ Return hResult = mPlugin->setSharedBufferBase(*heap, bufferId);
+ ALOGE_IF(!hResult.isOk(), "setSharedBufferBase(): remote call failed");
+ return seqNum;
+}
+
+void CryptoHalHidl::clearHeapBase(int32_t seqNum) {
+ Mutex::Autolock autoLock(mLock);
+
+ /*
+ * Clear the remote shared memory mapping by setting the shared
+ * buffer base to a null hidl_memory.
+ *
+ * TODO: Add a releaseSharedBuffer method in a future DRM HAL
+ * API version to make this explicit.
+ */
+ ssize_t index = mHeapSizes.indexOfKey(seqNum);
+ if (index >= 0) {
+ if (mPlugin != NULL) {
+ uint32_t bufferId = static_cast(seqNum);
+ Return hResult = mPlugin->setSharedBufferBase(hidl_memory(), bufferId);
+ ALOGE_IF(!hResult.isOk(), "setSharedBufferBase(): remote call failed");
+ }
+ mHeapSizes.removeItem(seqNum);
+ }
+}
+
+status_t CryptoHalHidl::checkSharedBuffer(const ::SharedBuffer& buffer) {
+ int32_t seqNum = static_cast(buffer.bufferId);
+ // memory must be in one of the heaps that have been set
+ if (mHeapSizes.indexOfKey(seqNum) < 0) {
+ return UNKNOWN_ERROR;
+ }
+
+ // memory must be within the address space of the heap
+ size_t heapSize = mHeapSizes.valueFor(seqNum);
+ if (heapSize < buffer.offset + buffer.size || SIZE_MAX - buffer.offset < buffer.size) {
+ android_errorWriteLog(0x534e4554, "76221123");
+ return UNKNOWN_ERROR;
+ }
+
+ return OK;
+}
+
+ssize_t CryptoHalHidl::decrypt(const uint8_t keyId[16], const uint8_t iv[16],
+ CryptoPlugin::Mode mode, const CryptoPlugin::Pattern& pattern,
+ const drm::V1_0::SharedBuffer& hSource, size_t offset,
+ const CryptoPlugin::SubSample* subSamples, size_t numSubSamples,
+ const drm::V1_0::DestinationBuffer& hDestination,
+ AString* errorDetailMsg) {
+ Mutex::Autolock autoLock(mLock);
+
+ if (mInitCheck != OK) {
+ return mInitCheck;
+ }
+
+ Mode hMode;
+ switch (mode) {
+ case CryptoPlugin::kMode_Unencrypted:
+ hMode = Mode::UNENCRYPTED;
+ break;
+ case CryptoPlugin::kMode_AES_CTR:
+ hMode = Mode::AES_CTR;
+ break;
+ case CryptoPlugin::kMode_AES_WV:
+ hMode = Mode::AES_CBC_CTS;
+ break;
+ case CryptoPlugin::kMode_AES_CBC:
+ hMode = Mode::AES_CBC;
+ break;
+ default:
+ return UNKNOWN_ERROR;
+ }
+
+ Pattern hPattern;
+ hPattern.encryptBlocks = pattern.mEncryptBlocks;
+ hPattern.skipBlocks = pattern.mSkipBlocks;
+
+ std::vector stdSubSamples;
+ for (size_t i = 0; i < numSubSamples; i++) {
+ SubSample subSample;
+ subSample.numBytesOfClearData = subSamples[i].mNumBytesOfClearData;
+ subSample.numBytesOfEncryptedData = subSamples[i].mNumBytesOfEncryptedData;
+ stdSubSamples.push_back(subSample);
+ }
+ auto hSubSamples = hidl_vec(stdSubSamples);
+
+ bool secure;
+ if (hDestination.type == BufferType::SHARED_MEMORY) {
+ status_t status = checkSharedBuffer(hDestination.nonsecureMemory);
+ if (status != OK) {
+ return status;
+ }
+ secure = false;
+ } else if (hDestination.type == BufferType::NATIVE_HANDLE) {
+ secure = true;
+ } else {
+ android_errorWriteLog(0x534e4554, "70526702");
+ return UNKNOWN_ERROR;
+ }
+
+ status_t status = checkSharedBuffer(hSource);
+ if (status != OK) {
+ return status;
+ }
+
+ status_t err = UNKNOWN_ERROR;
+ uint32_t bytesWritten = 0;
+
+ Return hResult;
+
+ mLock.unlock();
+ if (mPluginV1_2 != NULL) {
+ hResult = mPluginV1_2->decrypt_1_2(
+ secure, toHidlArray16(keyId), toHidlArray16(iv), hMode, hPattern, hSubSamples,
+ hSource, offset, hDestination,
+ [&](Status_V1_2 status, uint32_t hBytesWritten, hidl_string hDetailedError) {
+ if (status == Status_V1_2::OK) {
+ bytesWritten = hBytesWritten;
+ if (errorDetailMsg != nullptr) {
+ *errorDetailMsg = toString8(hDetailedError);
+ }
+ }
+ err = toStatusT(status);
+ });
+ } else {
+ hResult = mPlugin->decrypt(
+ secure, toHidlArray16(keyId), toHidlArray16(iv), hMode, hPattern, hSubSamples,
+ hSource, offset, hDestination,
+ [&](Status status, uint32_t hBytesWritten, hidl_string hDetailedError) {
+ if (status == Status::OK) {
+ bytesWritten = hBytesWritten;
+ if (errorDetailMsg != nullptr) {
+ *errorDetailMsg = toString8(hDetailedError);
+ }
+ }
+ err = toStatusT(status);
+ });
+ }
+
+ err = hResult.isOk() ? err : DEAD_OBJECT;
+ if (err == OK) {
+ return bytesWritten;
+ }
+ return err;
+}
+
+void CryptoHalHidl::notifyResolution(uint32_t width, uint32_t height) {
+ Mutex::Autolock autoLock(mLock);
+
+ if (mInitCheck != OK) {
+ return;
+ }
+
+ auto hResult = mPlugin->notifyResolution(width, height);
+ ALOGE_IF(!hResult.isOk(), "notifyResolution txn failed %s", hResult.description().c_str());
+}
+
+status_t CryptoHalHidl::setMediaDrmSession(const Vector& sessionId) {
+ Mutex::Autolock autoLock(mLock);
+
+ if (mInitCheck != OK) {
+ return mInitCheck;
+ }
+
+ auto err = mPlugin->setMediaDrmSession(toHidlVec(sessionId));
+ return err.isOk() ? toStatusT(err) : DEAD_OBJECT;
+}
+
+status_t CryptoHalHidl::getLogMessages(Vector& logs) const {
+ Mutex::Autolock autoLock(mLock);
+ return DrmUtils::GetLogMessages(mPlugin, logs);
+}
+} // namespace android
diff --git a/drm/libmediadrm/DrmHal.cpp b/drm/libmediadrm/DrmHal.cpp
index 40d1e0c4af4ae38722928e5b076a0c1c4e10177a..c394d5aaf03a3459188b993147f23124a62d7978 100644
--- a/drm/libmediadrm/DrmHal.cpp
+++ b/drm/libmediadrm/DrmHal.cpp
@@ -17,1557 +17,280 @@
//#define LOG_NDEBUG 0
#define LOG_TAG "DrmHal"
-#include
-#include
-#include
-#include
-#include
-#include
-#include
-#include
-#include
-#include
-#include
-#include
-#include
-#include
#include
-#include
-#include
-#include
+#include
+#include
#include
-#include
-
-#include
-#include
-
-using drm::V1_0::KeyedVector;
-using drm::V1_0::KeyRequestType;
-using drm::V1_0::KeyType;
-using drm::V1_0::KeyValue;
-using drm::V1_0::SecureStop;
-using drm::V1_0::SecureStopId;
-using drm::V1_0::Status;
-using drm::V1_1::HdcpLevel;
-using drm::V1_1::SecureStopRelease;
-using drm::V1_1::SecurityLevel;
-using drm::V1_2::KeySetId;
-using drm::V1_2::KeyStatusType;
-using ::android::DrmUtils::toStatusT;
-using ::android::hardware::drm::V1_1::DrmMetricGroup;
-using ::android::hardware::hidl_array;
-using ::android::hardware::hidl_string;
-using ::android::hardware::hidl_vec;
-using ::android::hardware::Return;
-using ::android::hardware::Void;
-using ::android::os::PersistableBundle;
-using ::android::sp;
-
-typedef drm::V1_1::KeyRequestType KeyRequestType_V1_1;
-typedef drm::V1_2::Status Status_V1_2;
-typedef drm::V1_2::HdcpLevel HdcpLevel_V1_2;
-
-namespace {
-
-// This constant corresponds to the PROPERTY_DEVICE_UNIQUE_ID constant
-// in the MediaDrm API.
-constexpr char kPropertyDeviceUniqueId[] = "deviceUniqueId";
-constexpr char kEqualsSign[] = "=";
-
-template
-std::string toBase64StringNoPad(const T* data, size_t size) {
- // Note that the base 64 conversion only works with arrays of single-byte
- // values. If the source is empty or is not an array of single-byte values,
- // return empty string.
- if (size == 0 || sizeof(data[0]) != 1) {
- return "";
- }
-
- android::AString outputString;
- encodeBase64(data, size, &outputString);
- // Remove trailing equals padding if it exists.
- while (outputString.size() > 0 && outputString.endsWith(kEqualsSign)) {
- outputString.erase(outputString.size() - 1, 1);
- }
-
- return std::string(outputString.c_str(), outputString.size());
-}
-
-} // anonymous namespace
namespace android {
-#define INIT_CHECK() {if (mInitCheck != OK) return mInitCheck;}
-
-static const Vector toVector(const hidl_vec &vec) {
- Vector vector;
- vector.appendArray(vec.data(), vec.size());
- return *const_cast *>(&vector);
-}
-
-static hidl_vec toHidlVec(const Vector &vector) {
- hidl_vec vec;
- vec.setToExternal(const_cast(vector.array()), vector.size());
- return vec;
-}
-
-static String8 toString8(const hidl_string &string) {
- return String8(string.c_str());
-}
-
-static hidl_string toHidlString(const String8& string) {
- return hidl_string(string.string());
-}
-
-static DrmPlugin::SecurityLevel toSecurityLevel(SecurityLevel level) {
- switch(level) {
- case SecurityLevel::SW_SECURE_CRYPTO:
- return DrmPlugin::kSecurityLevelSwSecureCrypto;
- case SecurityLevel::SW_SECURE_DECODE:
- return DrmPlugin::kSecurityLevelSwSecureDecode;
- case SecurityLevel::HW_SECURE_CRYPTO:
- return DrmPlugin::kSecurityLevelHwSecureCrypto;
- case SecurityLevel::HW_SECURE_DECODE:
- return DrmPlugin::kSecurityLevelHwSecureDecode;
- case SecurityLevel::HW_SECURE_ALL:
- return DrmPlugin::kSecurityLevelHwSecureAll;
- default:
- return DrmPlugin::kSecurityLevelUnknown;
- }
-}
-
-static SecurityLevel toHidlSecurityLevel(DrmPlugin::SecurityLevel level) {
- switch(level) {
- case DrmPlugin::kSecurityLevelSwSecureCrypto:
- return SecurityLevel::SW_SECURE_CRYPTO;
- case DrmPlugin::kSecurityLevelSwSecureDecode:
- return SecurityLevel::SW_SECURE_DECODE;
- case DrmPlugin::kSecurityLevelHwSecureCrypto:
- return SecurityLevel::HW_SECURE_CRYPTO;
- case DrmPlugin::kSecurityLevelHwSecureDecode:
- return SecurityLevel::HW_SECURE_DECODE;
- case DrmPlugin::kSecurityLevelHwSecureAll:
- return SecurityLevel::HW_SECURE_ALL;
- default:
- return SecurityLevel::UNKNOWN;
- }
-}
-
-static DrmPlugin::OfflineLicenseState toOfflineLicenseState(
- OfflineLicenseState licenseState) {
- switch(licenseState) {
- case OfflineLicenseState::USABLE:
- return DrmPlugin::kOfflineLicenseStateUsable;
- case OfflineLicenseState::INACTIVE:
- return DrmPlugin::kOfflineLicenseStateReleased;
- default:
- return DrmPlugin::kOfflineLicenseStateUnknown;
- }
-}
-
-static DrmPlugin::HdcpLevel toHdcpLevel(HdcpLevel_V1_2 level) {
- switch(level) {
- case HdcpLevel_V1_2::HDCP_NONE:
- return DrmPlugin::kHdcpNone;
- case HdcpLevel_V1_2::HDCP_V1:
- return DrmPlugin::kHdcpV1;
- case HdcpLevel_V1_2::HDCP_V2:
- return DrmPlugin::kHdcpV2;
- case HdcpLevel_V1_2::HDCP_V2_1:
- return DrmPlugin::kHdcpV2_1;
- case HdcpLevel_V1_2::HDCP_V2_2:
- return DrmPlugin::kHdcpV2_2;
- case HdcpLevel_V1_2::HDCP_V2_3:
- return DrmPlugin::kHdcpV2_3;
- case HdcpLevel_V1_2::HDCP_NO_OUTPUT:
- return DrmPlugin::kHdcpNoOutput;
- default:
- return DrmPlugin::kHdcpLevelUnknown;
- }
-}
-static ::KeyedVector toHidlKeyedVector(const KeyedVector&
- keyedVector) {
- std::vector stdKeyedVector;
- for (size_t i = 0; i < keyedVector.size(); i++) {
- KeyValue keyValue;
- keyValue.key = toHidlString(keyedVector.keyAt(i));
- keyValue.value = toHidlString(keyedVector.valueAt(i));
- stdKeyedVector.push_back(keyValue);
- }
- return ::KeyedVector(stdKeyedVector);
-}
-
-static KeyedVector toKeyedVector(const ::KeyedVector&
- hKeyedVector) {
- KeyedVector keyedVector;
- for (size_t i = 0; i < hKeyedVector.size(); i++) {
- keyedVector.add(toString8(hKeyedVector[i].key),
- toString8(hKeyedVector[i].value));
- }
- return keyedVector;
-}
-
-static List> toSecureStops(const hidl_vec&
- hSecureStops) {
- List> secureStops;
- for (size_t i = 0; i < hSecureStops.size(); i++) {
- secureStops.push_back(toVector(hSecureStops[i].opaqueData));
- }
- return secureStops;
-}
-
-static List> toSecureStopIds(const hidl_vec&
- hSecureStopIds) {
- List> secureStopIds;
- for (size_t i = 0; i < hSecureStopIds.size(); i++) {
- secureStopIds.push_back(toVector(hSecureStopIds[i]));
- }
- return secureStopIds;
-}
-
-static List> toKeySetIds(const hidl_vec&
- hKeySetIds) {
- List> keySetIds;
- for (size_t i = 0; i < hKeySetIds.size(); i++) {
- keySetIds.push_back(toVector(hKeySetIds[i]));
- }
- return keySetIds;
-}
-
-Mutex DrmHal::mLock;
-
-struct DrmHal::DrmSessionClient : public aidl::android::media::BnResourceManagerClient {
- explicit DrmSessionClient(DrmHal* drm, const Vector& sessionId)
- : mSessionId(sessionId),
- mDrm(drm) {}
-
- ::ndk::ScopedAStatus reclaimResource(bool* _aidl_return) override;
- ::ndk::ScopedAStatus getName(::std::string* _aidl_return) override;
-
- const Vector mSessionId;
-
- virtual ~DrmSessionClient();
-
-private:
- wp mDrm;
-
- DISALLOW_EVIL_CONSTRUCTORS(DrmSessionClient);
-};
-
-::ndk::ScopedAStatus DrmHal::DrmSessionClient::reclaimResource(bool* _aidl_return) {
- auto sessionId = mSessionId;
- sp drm = mDrm.promote();
- if (drm == NULL) {
- *_aidl_return = true;
- return ::ndk::ScopedAStatus::ok();
- }
- status_t err = drm->closeSession(sessionId);
- if (err != OK) {
- *_aidl_return = false;
- return ::ndk::ScopedAStatus::ok();
- }
- drm->sendEvent(EventType::SESSION_RECLAIMED,
- toHidlVec(sessionId), hidl_vec());
- *_aidl_return = true;
- return ::ndk::ScopedAStatus::ok();
-}
-
-::ndk::ScopedAStatus DrmHal::DrmSessionClient::getName(::std::string* _aidl_return) {
- String8 name;
- sp drm = mDrm.promote();
- if (drm == NULL) {
- name.append("");
- } else if (drm->getPropertyStringInternal(String8("vendor"), name) != OK
- || name.isEmpty()) {
- name.append("");
- }
- name.append("[");
- for (size_t i = 0; i < mSessionId.size(); ++i) {
- name.appendFormat("%02x", mSessionId[i]);
- }
- name.append("]");
- *_aidl_return = name;
- return ::ndk::ScopedAStatus::ok();
-}
-
-DrmHal::DrmSessionClient::~DrmSessionClient() {
- DrmSessionManager::Instance()->removeSession(mSessionId);
-}
-
-DrmHal::DrmHal()
- : mFactories(makeDrmFactories()),
- mInitCheck((mFactories.size() == 0) ? ERROR_UNSUPPORTED : NO_INIT) {
-}
-
-void DrmHal::closeOpenSessions() {
- Mutex::Autolock autoLock(mLock);
- auto openSessions = mOpenSessions;
- for (size_t i = 0; i < openSessions.size(); i++) {
- mLock.unlock();
- closeSession(openSessions[i]->mSessionId);
- mLock.lock();
- }
- mOpenSessions.clear();
-}
-
-DrmHal::~DrmHal() {
-}
-
-void DrmHal::cleanup() {
- closeOpenSessions();
-
- Mutex::Autolock autoLock(mLock);
- reportFrameworkMetrics(reportPluginMetrics());
-
- setListener(NULL);
- mInitCheck = NO_INIT;
- if (mPluginV1_2 != NULL) {
- if (!mPluginV1_2->setListener(NULL).isOk()) {
- mInitCheck = DEAD_OBJECT;
- }
- } else if (mPlugin != NULL) {
- if (!mPlugin->setListener(NULL).isOk()) {
- mInitCheck = DEAD_OBJECT;
- }
- }
- mPlugin.clear();
- mPluginV1_1.clear();
- mPluginV1_2.clear();
- mPluginV1_4.clear();
+DrmHal::DrmHal() {
+ mDrmHalHidl = sp::make();
+ mDrmHalAidl = sp::make();
}
-std::vector> DrmHal::makeDrmFactories() {
- static std::vector> factories(DrmUtils::MakeDrmFactories());
- if (factories.size() == 0) {
- // must be in passthrough mode, load the default passthrough service
- auto passthrough = IDrmFactory::getService();
- if (passthrough != NULL) {
- DrmUtils::LOG2BI("makeDrmFactories: using default passthrough drm instance");
- factories.push_back(passthrough);
- } else {
- DrmUtils::LOG2BE("Failed to find any drm factories");
- }
- }
- return factories;
-}
-
-sp DrmHal::makeDrmPlugin(const sp& factory,
- const uint8_t uuid[16], const String8& appPackageName) {
- mAppPackageName = appPackageName;
- mMetrics.SetAppPackageName(appPackageName);
- mMetrics.SetAppUid(AIBinder_getCallingUid());
-
- sp plugin;
- Return hResult = factory->createPlugin(uuid, appPackageName.string(),
- [&](Status status, const sp& hPlugin) {
- if (status != Status::OK) {
- DrmUtils::LOG2BE(uuid, "Failed to make drm plugin: %d", status);
- return;
- }
- plugin = hPlugin;
- }
- );
-
- if (!hResult.isOk()) {
- DrmUtils::LOG2BE(uuid, "createPlugin remote call failed: %s",
- hResult.description().c_str());
- }
-
- return plugin;
-}
+DrmHal::~DrmHal() {}
status_t DrmHal::initCheck() const {
- return mInitCheck;
-}
-
-status_t DrmHal::setListener(const sp& listener)
-{
- Mutex::Autolock lock(mEventLock);
- mListener = listener;
- return NO_ERROR;
-}
-
-Return DrmHal::sendEvent(EventType hEventType,
- const hidl_vec