diff --git a/camera/CaptureResult.cpp b/camera/CaptureResult.cpp
index e6c0d00ece9bd0868b7c00c8e5f4d12ace5527ff..928a6bca0ac72b2795990adab30690406c902c30 100644
--- a/camera/CaptureResult.cpp
+++ b/camera/CaptureResult.cpp
@@ -60,6 +60,39 @@ status_t CaptureResultExtras::writeToParcel(android::Parcel *parcel) const {
return OK;
}
+status_t PhysicalCaptureResultInfo::readFromParcel(const android::Parcel* parcel) {
+ status_t res;
+
+ mPhysicalCameraId.remove(mPhysicalCameraId.size());
+ mPhysicalCameraMetadata.clear();
+
+ if ((res = parcel->readString16(&mPhysicalCameraId)) != OK) {
+ ALOGE("%s: Failed to read camera id: %d", __FUNCTION__, res);
+ return res;
+ }
+
+ if ((res = mPhysicalCameraMetadata.readFromParcel(parcel)) != OK) {
+ ALOGE("%s: Failed to read metadata from parcel: %d", __FUNCTION__, res);
+ return res;
+ }
+ return OK;
+}
+
+status_t PhysicalCaptureResultInfo::writeToParcel(android::Parcel* parcel) const {
+ status_t res;
+ if ((res = parcel->writeString16(mPhysicalCameraId)) != OK) {
+ ALOGE("%s: Failed to write physical camera ID to parcel: %d",
+ __FUNCTION__, res);
+ return res;
+ }
+ if ((res = mPhysicalCameraMetadata.writeToParcel(parcel)) != OK) {
+ ALOGE("%s: Failed to write physical camera metadata to parcel: %d",
+ __FUNCTION__, res);
+ return res;
+ }
+ return OK;
+}
+
CaptureResult::CaptureResult() :
mMetadata(), mResultExtras() {
}
@@ -67,6 +100,7 @@ CaptureResult::CaptureResult() :
CaptureResult::CaptureResult(const CaptureResult &otherResult) {
mResultExtras = otherResult.mResultExtras;
mMetadata = otherResult.mMetadata;
+ mPhysicalMetadatas = otherResult.mPhysicalMetadatas;
}
status_t CaptureResult::readFromParcel(android::Parcel *parcel) {
@@ -79,6 +113,7 @@ status_t CaptureResult::readFromParcel(android::Parcel *parcel) {
}
mMetadata.clear();
+ mPhysicalMetadatas.clear();
status_t res = OK;
res = mMetadata.readFromParcel(parcel);
@@ -89,6 +124,34 @@ status_t CaptureResult::readFromParcel(android::Parcel *parcel) {
}
ALOGV("%s: Read metadata from parcel", __FUNCTION__);
+ int32_t physicalMetadataCount;
+ if ((res = parcel->readInt32(&physicalMetadataCount)) != OK) {
+ ALOGE("%s: Failed to read the physical metadata count from parcel: %d", __FUNCTION__, res);
+ return res;
+ }
+ if (physicalMetadataCount < 0) {
+ ALOGE("%s: Invalid physical metadata count from parcel: %d",
+ __FUNCTION__, physicalMetadataCount);
+ return BAD_VALUE;
+ }
+
+ for (int32_t i = 0; i < physicalMetadataCount; i++) {
+ String16 cameraId;
+ if ((res = parcel->readString16(&cameraId)) != OK) {
+ ALOGE("%s: Failed to read camera id: %d", __FUNCTION__, res);
+ return res;
+ }
+
+ CameraMetadata physicalMetadata;
+ if ((res = physicalMetadata.readFromParcel(parcel)) != OK) {
+ ALOGE("%s: Failed to read metadata from parcel: %d", __FUNCTION__, res);
+ return res;
+ }
+
+ mPhysicalMetadatas.emplace(mPhysicalMetadatas.end(), cameraId, physicalMetadata);
+ }
+ ALOGV("%s: Read physical metadata from parcel", __FUNCTION__);
+
res = mResultExtras.readFromParcel(parcel);
if (res != OK) {
ALOGE("%s: Failed to read result extras from parcel.",
@@ -118,6 +181,27 @@ status_t CaptureResult::writeToParcel(android::Parcel *parcel) const {
}
ALOGV("%s: Wrote metadata to parcel", __FUNCTION__);
+ int32_t physicalMetadataCount = static_cast(mPhysicalMetadatas.size());
+ res = parcel->writeInt32(physicalMetadataCount);
+ if (res != OK) {
+ ALOGE("%s: Failed to write physical metadata count to parcel: %d",
+ __FUNCTION__, res);
+ return BAD_VALUE;
+ }
+ for (const auto& physicalMetadata : mPhysicalMetadatas) {
+ if ((res = parcel->writeString16(physicalMetadata.mPhysicalCameraId)) != OK) {
+ ALOGE("%s: Failed to write physical camera ID to parcel: %d",
+ __FUNCTION__, res);
+ return res;
+ }
+ if ((res = physicalMetadata.mPhysicalCameraMetadata.writeToParcel(parcel)) != OK) {
+ ALOGE("%s: Failed to write physical camera metadata to parcel: %d",
+ __FUNCTION__, res);
+ return res;
+ }
+ }
+ ALOGV("%s: Wrote physical camera metadata to parcel", __FUNCTION__);
+
res = mResultExtras.writeToParcel(parcel);
if (res != OK) {
ALOGE("%s: Failed to write result extras to parcel", __FUNCTION__);
diff --git a/camera/aidl/android/hardware/ICameraServiceProxy.aidl b/camera/aidl/android/hardware/ICameraServiceProxy.aidl
index 5dc23eb4775030b80b9201edf0d0d8f366b3375b..75759489e774a04ce4607fc54b2d07167643d0dc 100644
--- a/camera/aidl/android/hardware/ICameraServiceProxy.aidl
+++ b/camera/aidl/android/hardware/ICameraServiceProxy.aidl
@@ -45,9 +45,15 @@ interface ICameraServiceProxy
const int CAMERA_FACING_FRONT = 1;
const int CAMERA_FACING_EXTERNAL = 2;
+ /**
+ * Values for notifyCameraState api level
+ */
+ const int CAMERA_API_LEVEL_1 = 1;
+ const int CAMERA_API_LEVEL_2 = 2;
+
/**
* Update the status of a camera device.
*/
oneway void notifyCameraState(String cameraId, int facing, int newCameraState,
- String clientName);
+ String clientName, int apiLevel);
}
diff --git a/camera/aidl/android/hardware/camera2/ICameraDeviceCallbacks.aidl b/camera/aidl/android/hardware/camera2/ICameraDeviceCallbacks.aidl
index 28252c0d3edf6155b088945e71c9076fbb0767c3..58b19a311388b10d8377d11a58596c25fac27e49 100644
--- a/camera/aidl/android/hardware/camera2/ICameraDeviceCallbacks.aidl
+++ b/camera/aidl/android/hardware/camera2/ICameraDeviceCallbacks.aidl
@@ -18,6 +18,7 @@ package android.hardware.camera2;
import android.hardware.camera2.impl.CameraMetadataNative;
import android.hardware.camera2.impl.CaptureResultExtras;
+import android.hardware.camera2.impl.PhysicalCaptureResultInfo;
/** @hide */
interface ICameraDeviceCallbacks
@@ -30,12 +31,14 @@ interface ICameraDeviceCallbacks
const int ERROR_CAMERA_REQUEST = 3;
const int ERROR_CAMERA_RESULT = 4;
const int ERROR_CAMERA_BUFFER = 5;
+ const int ERROR_CAMERA_DISABLED = 6;
oneway void onDeviceError(int errorCode, in CaptureResultExtras resultExtras);
oneway void onDeviceIdle();
oneway void onCaptureStarted(in CaptureResultExtras resultExtras, long timestamp);
oneway void onResultReceived(in CameraMetadataNative result,
- in CaptureResultExtras resultExtras);
+ in CaptureResultExtras resultExtras,
+ in PhysicalCaptureResultInfo[] physicalCaptureResultInfos);
oneway void onPrepared(int streamId);
/**
diff --git a/camera/aidl/android/hardware/camera2/ICameraDeviceUser.aidl b/camera/aidl/android/hardware/camera2/ICameraDeviceUser.aidl
index 0771fc845c5f3c030a8153aa74ed020951730daa..4ced08c229800c9a7a0bb3a29658d150df01d5c8 100644
--- a/camera/aidl/android/hardware/camera2/ICameraDeviceUser.aidl
+++ b/camera/aidl/android/hardware/camera2/ICameraDeviceUser.aidl
@@ -79,8 +79,9 @@ interface ICameraDeviceUser
*
* @param operatingMode The kind of session to create; either NORMAL_MODE or
* CONSTRAINED_HIGH_SPEED_MODE. Must be a non-negative value.
+ * @param sessionParams Session wide camera parameters
*/
- void endConfigure(int operatingMode);
+ void endConfigure(int operatingMode, in CameraMetadataNative sessionParams);
void deleteStream(int streamId);
@@ -140,5 +141,7 @@ interface ICameraDeviceUser
void prepare2(int maxCount, int streamId);
+ void updateOutputConfiguration(int streamId, in OutputConfiguration outputConfiguration);
+
void finalizeOutputConfigurations(int streamId, in OutputConfiguration outputConfiguration);
}
diff --git a/camera/aidl/android/hardware/camera2/impl/PhysicalCaptureResultInfo.aidl b/camera/aidl/android/hardware/camera2/impl/PhysicalCaptureResultInfo.aidl
new file mode 100644
index 0000000000000000000000000000000000000000..78d9b7b8acfad0ddbd9a3a53e89a101a1def582d
--- /dev/null
+++ b/camera/aidl/android/hardware/camera2/impl/PhysicalCaptureResultInfo.aidl
@@ -0,0 +1,20 @@
+/*
+ * Copyright (C) 2018 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package android.hardware.camera2.impl;
+
+/** @hide */
+parcelable PhysicalCaptureResultInfo cpp_header "camera/CaptureResult.h";
diff --git a/camera/camera2/CaptureRequest.cpp b/camera/camera2/CaptureRequest.cpp
index 059795033205d51b4731ec4ef0b6c97774cdebcf..1843ec4869bf25cbb39d731c9d3b44d3c1866b0b 100644
--- a/camera/camera2/CaptureRequest.cpp
+++ b/camera/camera2/CaptureRequest.cpp
@@ -18,6 +18,7 @@
// #define LOG_NDEBUG 0
#define LOG_TAG "CameraRequest"
#include
+#include
#include
@@ -42,16 +43,46 @@ status_t CaptureRequest::readFromParcel(const android::Parcel* parcel) {
return BAD_VALUE;
}
- mMetadata.clear();
mSurfaceList.clear();
+ mStreamIdxList.clear();
+ mSurfaceIdxList.clear();
+ mPhysicalCameraSettings.clear();
status_t err = OK;
- if ((err = mMetadata.readFromParcel(parcel)) != OK) {
- ALOGE("%s: Failed to read metadata from parcel", __FUNCTION__);
+ int32_t settingsCount;
+ if ((err = parcel->readInt32(&settingsCount)) != OK) {
+ ALOGE("%s: Failed to read the settings count from parcel: %d", __FUNCTION__, err);
return err;
}
- ALOGV("%s: Read metadata from parcel", __FUNCTION__);
+
+ if (settingsCount <= 0) {
+ ALOGE("%s: Settings count %d should always be positive!", __FUNCTION__, settingsCount);
+ return BAD_VALUE;
+ }
+
+ for (int32_t i = 0; i < settingsCount; i++) {
+ String16 id;
+ if ((err = parcel->readString16(&id)) != OK) {
+ ALOGE("%s: Failed to read camera id!", __FUNCTION__);
+ return BAD_VALUE;
+ }
+
+ CameraMetadata settings;
+ if ((err = settings.readFromParcel(parcel)) != OK) {
+ ALOGE("%s: Failed to read metadata from parcel", __FUNCTION__);
+ return err;
+ }
+ ALOGV("%s: Read metadata from parcel", __FUNCTION__);
+ mPhysicalCameraSettings.push_back({std::string(String8(id).string()), settings});
+ }
+
+ int isReprocess = 0;
+ if ((err = parcel->readInt32(&isReprocess)) != OK) {
+ ALOGE("%s: Failed to read reprocessing from parcel", __FUNCTION__);
+ return err;
+ }
+ mIsReprocess = (isReprocess != 0);
int32_t size;
if ((err = parcel->readInt32(&size)) != OK) {
@@ -61,7 +92,7 @@ status_t CaptureRequest::readFromParcel(const android::Parcel* parcel) {
ALOGV("%s: Read surface list size = %d", __FUNCTION__, size);
// Do not distinguish null arrays from 0-sized arrays.
- for (int i = 0; i < size; ++i) {
+ for (int32_t i = 0; i < size; ++i) {
// Parcel.writeParcelableArray
size_t len;
const char16_t* className = parcel->readString16Inplace(&len);
@@ -88,12 +119,32 @@ status_t CaptureRequest::readFromParcel(const android::Parcel* parcel) {
mSurfaceList.push_back(surface);
}
- int isReprocess = 0;
- if ((err = parcel->readInt32(&isReprocess)) != OK) {
- ALOGE("%s: Failed to read reprocessing from parcel", __FUNCTION__);
+ int32_t streamSurfaceSize;
+ if ((err = parcel->readInt32(&streamSurfaceSize)) != OK) {
+ ALOGE("%s: Failed to read streamSurfaceSize from parcel", __FUNCTION__);
return err;
}
- mIsReprocess = (isReprocess != 0);
+
+ if (streamSurfaceSize < 0) {
+ ALOGE("%s: Bad streamSurfaceSize %d from parcel", __FUNCTION__, streamSurfaceSize);
+ return BAD_VALUE;
+ }
+
+ for (int32_t i = 0; i < streamSurfaceSize; ++i) {
+ int streamIdx;
+ if ((err = parcel->readInt32(&streamIdx)) != OK) {
+ ALOGE("%s: Failed to read stream index from parcel", __FUNCTION__);
+ return err;
+ }
+ mStreamIdxList.push_back(streamIdx);
+
+ int surfaceIdx;
+ if ((err = parcel->readInt32(&surfaceIdx)) != OK) {
+ ALOGE("%s: Failed to read surface index from parcel", __FUNCTION__);
+ return err;
+ }
+ mSurfaceIdxList.push_back(surfaceIdx);
+ }
return OK;
}
@@ -106,32 +157,62 @@ status_t CaptureRequest::writeToParcel(android::Parcel* parcel) const {
status_t err = OK;
- if ((err = mMetadata.writeToParcel(parcel)) != OK) {
+ int32_t settingsCount = static_cast(mPhysicalCameraSettings.size());
+
+ if ((err = parcel->writeInt32(settingsCount)) != OK) {
+ ALOGE("%s: Failed to write settings count!", __FUNCTION__);
return err;
}
- int32_t size = static_cast(mSurfaceList.size());
-
- // Send 0-sized arrays when it's empty. Do not send null arrays.
- parcel->writeInt32(size);
-
- for (int32_t i = 0; i < size; ++i) {
- // not sure if readParcelableArray does this, hard to tell from source
- parcel->writeString16(String16("android.view.Surface"));
+ for (const auto &it : mPhysicalCameraSettings) {
+ if ((err = parcel->writeString16(String16(it.id.c_str()))) != OK) {
+ ALOGE("%s: Failed to camera id!", __FUNCTION__);
+ return err;
+ }
- // Surface.writeToParcel
- view::Surface surfaceShim;
- surfaceShim.name = String16("unknown_name");
- surfaceShim.graphicBufferProducer = mSurfaceList[i]->getIGraphicBufferProducer();
- if ((err = surfaceShim.writeToParcel(parcel)) != OK) {
- ALOGE("%s: Failed to write output target Surface %d to parcel: %s (%d)",
- __FUNCTION__, i, strerror(-err), err);
+ if ((err = it.settings.writeToParcel(parcel)) != OK) {
+ ALOGE("%s: Failed to write settings!", __FUNCTION__);
return err;
}
}
parcel->writeInt32(mIsReprocess ? 1 : 0);
+ if (mSurfaceConverted) {
+ parcel->writeInt32(0); // 0-sized array
+ } else {
+ int32_t size = static_cast(mSurfaceList.size());
+
+ // Send 0-sized arrays when it's empty. Do not send null arrays.
+ parcel->writeInt32(size);
+
+ for (int32_t i = 0; i < size; ++i) {
+ // not sure if readParcelableArray does this, hard to tell from source
+ parcel->writeString16(String16("android.view.Surface"));
+
+ // Surface.writeToParcel
+ view::Surface surfaceShim;
+ surfaceShim.name = String16("unknown_name");
+ surfaceShim.graphicBufferProducer = mSurfaceList[i]->getIGraphicBufferProducer();
+ if ((err = surfaceShim.writeToParcel(parcel)) != OK) {
+ ALOGE("%s: Failed to write output target Surface %d to parcel: %s (%d)",
+ __FUNCTION__, i, strerror(-err), err);
+ return err;
+ }
+ }
+ }
+
+ parcel->writeInt32(mStreamIdxList.size());
+ for (size_t i = 0; i < mStreamIdxList.size(); ++i) {
+ if ((err = parcel->writeInt32(mStreamIdxList[i])) != OK) {
+ ALOGE("%s: Failed to write stream index to parcel", __FUNCTION__);
+ return err;
+ }
+ if ((err = parcel->writeInt32(mSurfaceIdxList[i])) != OK) {
+ ALOGE("%s: Failed to write surface index to parcel", __FUNCTION__);
+ return err;
+ }
+ }
return OK;
}
diff --git a/camera/camera2/OutputConfiguration.cpp b/camera/camera2/OutputConfiguration.cpp
index 468a1ebd1cd59f55b4464c038b327e594a7fed96..feb04c24ed85d6f3563589abd2e71d11891e9985 100644
--- a/camera/camera2/OutputConfiguration.cpp
+++ b/camera/camera2/OutputConfiguration.cpp
@@ -1,6 +1,6 @@
/*
**
-** Copyright 2015, The Android Open Source Project
+** Copyright 2015-2018, The Android Open Source Project
**
** Licensed under the Apache License, Version 2.0 (the "License");
** you may not use this file except in compliance with the License.
@@ -64,6 +64,10 @@ bool OutputConfiguration::isShared() const {
return mIsShared;
}
+String16 OutputConfiguration::getPhysicalCameraId() const {
+ return mPhysicalCameraId;
+}
+
OutputConfiguration::OutputConfiguration() :
mRotation(INVALID_ROTATION),
mSurfaceSetID(INVALID_SET_ID),
@@ -139,6 +143,8 @@ status_t OutputConfiguration::readFromParcel(const android::Parcel* parcel) {
return err;
}
+ parcel->readString16(&mPhysicalCameraId);
+
mRotation = rotation;
mSurfaceSetID = setID;
mSurfaceType = surfaceType;
@@ -153,19 +159,20 @@ status_t OutputConfiguration::readFromParcel(const android::Parcel* parcel) {
mGbps.push_back(surface.graphicBufferProducer);
}
- ALOGV("%s: OutputConfiguration: rotation = %d, setId = %d, surfaceType = %d",
- __FUNCTION__, mRotation, mSurfaceSetID, mSurfaceType);
+ ALOGV("%s: OutputConfiguration: rotation = %d, setId = %d, surfaceType = %d,"
+ " physicalCameraId = %s", __FUNCTION__, mRotation, mSurfaceSetID,
+ mSurfaceType, String8(mPhysicalCameraId).string());
return err;
}
OutputConfiguration::OutputConfiguration(sp& gbp, int rotation,
- int surfaceSetID) {
+ int surfaceSetID, bool isShared) {
mGbps.push_back(gbp);
mRotation = rotation;
mSurfaceSetID = surfaceSetID;
mIsDeferred = false;
- mIsShared = false;
+ mIsShared = isShared;
}
status_t OutputConfiguration::writeToParcel(android::Parcel* parcel) const {
@@ -204,6 +211,9 @@ status_t OutputConfiguration::writeToParcel(android::Parcel* parcel) const {
err = parcel->writeParcelableVector(surfaceShims);
if (err != OK) return err;
+ err = parcel->writeString16(mPhysicalCameraId);
+ if (err != OK) return err;
+
return OK;
}
diff --git a/camera/include/camera/CaptureResult.h b/camera/include/camera/CaptureResult.h
index 917d953f70e53fa53b096da6f1b63447a4f78275..56fa17886f4f4bd276a6edae7a158287d3fae6ef 100644
--- a/camera/include/camera/CaptureResult.h
+++ b/camera/include/camera/CaptureResult.h
@@ -91,14 +91,36 @@ struct CaptureResultExtras : public android::Parcelable {
virtual status_t readFromParcel(const android::Parcel* parcel) override;
virtual status_t writeToParcel(android::Parcel* parcel) const override;
};
+
+struct PhysicalCaptureResultInfo : public android::Parcelable {
+
+ PhysicalCaptureResultInfo()
+ : mPhysicalCameraId(),
+ mPhysicalCameraMetadata() {
+ }
+ PhysicalCaptureResultInfo(const String16& cameraId,
+ const CameraMetadata& cameraMetadata)
+ : mPhysicalCameraId(cameraId),
+ mPhysicalCameraMetadata(cameraMetadata) {
+ }
+
+ String16 mPhysicalCameraId;
+ CameraMetadata mPhysicalCameraMetadata;
+
+ virtual status_t readFromParcel(const android::Parcel* parcel) override;
+ virtual status_t writeToParcel(android::Parcel* parcel) const override;
+};
+
} // namespace impl
} // namespace camera2
} // namespace hardware
using hardware::camera2::impl::CaptureResultExtras;
+using hardware::camera2::impl::PhysicalCaptureResultInfo;
struct CaptureResult : public virtual LightRefBase {
CameraMetadata mMetadata;
+ std::vector mPhysicalMetadatas;
CaptureResultExtras mResultExtras;
CaptureResult();
diff --git a/camera/include/camera/camera2/CaptureRequest.h b/camera/include/camera/camera2/CaptureRequest.h
index 01801831ea9f4b120a6fdf537fe433d8d50143ad..506abab70474aec771059af0cb99117814b28475 100644
--- a/camera/include/camera/camera2/CaptureRequest.h
+++ b/camera/include/camera/camera2/CaptureRequest.h
@@ -40,14 +40,35 @@ struct CaptureRequest : public Parcelable {
CaptureRequest(CaptureRequest&& rhs) noexcept;
virtual ~CaptureRequest();
- CameraMetadata mMetadata;
+ struct PhysicalCameraSettings {
+ std::string id;
+ CameraMetadata settings;
+ };
+ std::vector mPhysicalCameraSettings;
+
+ // Used by NDK client to pass surfaces by stream/surface index.
+ bool mSurfaceConverted = false;
+
+ // Starting in Android O, create a Surface from Parcel will take one extra
+ // IPC call.
Vector > mSurfaceList;
+ // Optional way of passing surface list since passing Surface over binder
+ // is expensive. Use the stream/surface index from current output configuration
+ // to represent an configured output Surface. When stream/surface index is used,
+ // set mSurfaceList to zero length to save unparcel time.
+ Vector mStreamIdxList;
+ Vector mSurfaceIdxList; // per stream surface list index
+
bool mIsReprocess;
+ void* mContext; // arbitrary user context from NDK apps, null for java apps
+
/**
* Keep impl up-to-date with CaptureRequest.java in frameworks/base
*/
+ // used by cameraserver to receive CaptureRequest from java/NDK client
status_t readFromParcel(const android::Parcel* parcel) override;
+ // used by NDK client to send CaptureRequest to cameraserver
status_t writeToParcel(android::Parcel* parcel) const override;
};
diff --git a/camera/include/camera/camera2/OutputConfiguration.h b/camera/include/camera/camera2/OutputConfiguration.h
index 8e641c7d92fc5fa143e9a562abb03293bc91241e..a80f44b1f4c03793ab56eebcb76494534b37b5a3 100644
--- a/camera/include/camera/camera2/OutputConfiguration.h
+++ b/camera/include/camera/camera2/OutputConfiguration.h
@@ -1,5 +1,5 @@
/*
- * Copyright (C) 2015 The Android Open Source Project
+ * Copyright (C) 2015-2018 The Android Open Source Project
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
@@ -46,6 +46,7 @@ public:
int getHeight() const;
bool isDeferred() const;
bool isShared() const;
+ String16 getPhysicalCameraId() const;
/**
* Keep impl up-to-date with OutputConfiguration.java in frameworks/base
*/
@@ -64,7 +65,7 @@ public:
OutputConfiguration(const android::Parcel& parcel);
OutputConfiguration(sp& gbp, int rotation,
- int surfaceSetID = INVALID_SET_ID);
+ int surfaceSetID = INVALID_SET_ID, bool isShared = false);
bool operator == (const OutputConfiguration& other) const {
return ( mRotation == other.mRotation &&
@@ -74,7 +75,8 @@ public:
mHeight == other.mHeight &&
mIsDeferred == other.mIsDeferred &&
mIsShared == other.mIsShared &&
- gbpsEqual(other));
+ gbpsEqual(other) &&
+ mPhysicalCameraId == other.mPhysicalCameraId );
}
bool operator != (const OutputConfiguration& other) const {
return !(*this == other);
@@ -102,6 +104,9 @@ public:
if (mIsShared != other.mIsShared) {
return mIsShared < other.mIsShared;
}
+ if (mPhysicalCameraId != other.mPhysicalCameraId) {
+ return mPhysicalCameraId < other.mPhysicalCameraId;
+ }
return gbpsLessThan(other);
}
bool operator > (const OutputConfiguration& other) const {
@@ -110,6 +115,7 @@ public:
bool gbpsEqual(const OutputConfiguration& other) const;
bool gbpsLessThan(const OutputConfiguration& other) const;
+ void addGraphicProducer(sp gbp) {mGbps.push_back(gbp);}
private:
std::vector> mGbps;
int mRotation;
@@ -119,8 +125,7 @@ private:
int mHeight;
bool mIsDeferred;
bool mIsShared;
- // helper function
- static String16 readMaybeEmptyString16(const android::Parcel* parcel);
+ String16 mPhysicalCameraId;
};
} // namespace params
} // namespace camera2
diff --git a/camera/ndk/NdkCameraCaptureSession.cpp b/camera/ndk/NdkCameraCaptureSession.cpp
index 2a6b18200f1decd5dfc29968ebc64472bc431f64..fd95296161bb149a58e43d9faa353886f578c0aa 100644
--- a/camera/ndk/NdkCameraCaptureSession.cpp
+++ b/camera/ndk/NdkCameraCaptureSession.cpp
@@ -135,3 +135,19 @@ camera_status_t ACameraCaptureSession_abortCaptures(ACameraCaptureSession* sessi
}
return session->abortCaptures();
}
+
+EXPORT
+camera_status_t ACameraCaptureSession_updateSharedOutput(ACameraCaptureSession* session,
+ ACaptureSessionOutput* output) {
+ ATRACE_CALL();
+ if (session == nullptr) {
+ ALOGE("%s: Error: session is null", __FUNCTION__);
+ return ACAMERA_ERROR_INVALID_PARAMETER;
+ }
+
+ if (session->isClosed()) {
+ ALOGE("%s: session %p is already closed", __FUNCTION__, session);
+ return ACAMERA_ERROR_SESSION_CLOSED;
+ }
+ return session->updateOutputConfiguration(output);
+}
diff --git a/camera/ndk/NdkCameraDevice.cpp b/camera/ndk/NdkCameraDevice.cpp
index 9f6d1f7c6d056c33b0b92ea65f936207050b0abd..812a312c54483745429f846540127d0cf424ba73 100644
--- a/camera/ndk/NdkCameraDevice.cpp
+++ b/camera/ndk/NdkCameraDevice.cpp
@@ -103,10 +103,73 @@ camera_status_t ACaptureSessionOutput_create(
__FUNCTION__, window, out);
return ACAMERA_ERROR_INVALID_PARAMETER;
}
- *out = new ACaptureSessionOutput(window);
+ *out = new ACaptureSessionOutput(window, false);
return ACAMERA_OK;
}
+EXPORT
+camera_status_t ACaptureSessionSharedOutput_create(
+ ANativeWindow* window, /*out*/ACaptureSessionOutput** out) {
+ ATRACE_CALL();
+ if (window == nullptr || out == nullptr) {
+ ALOGE("%s: Error: bad argument. window %p, out %p",
+ __FUNCTION__, window, out);
+ return ACAMERA_ERROR_INVALID_PARAMETER;
+ }
+ *out = new ACaptureSessionOutput(window, true);
+ return ACAMERA_OK;
+}
+
+EXPORT
+camera_status_t ACaptureSessionSharedOutput_add(ACaptureSessionOutput *out,
+ ANativeWindow* window) {
+ ATRACE_CALL();
+ if ((window == nullptr) || (out == nullptr)) {
+ ALOGE("%s: Error: bad argument. window %p, out %p",
+ __FUNCTION__, window, out);
+ return ACAMERA_ERROR_INVALID_PARAMETER;
+ }
+ if (!out->mIsShared) {
+ ALOGE("%s: Error trying to insert a new window in non-shared output configuration",
+ __FUNCTION__);
+ return ACAMERA_ERROR_INVALID_OPERATION;
+ }
+ if (out->mWindow == window) {
+ ALOGE("%s: Error trying to add the same window associated with the output configuration",
+ __FUNCTION__);
+ return ACAMERA_ERROR_INVALID_PARAMETER;
+ }
+
+ auto insert = out->mSharedWindows.insert(window);
+ camera_status_t ret = (insert.second) ? ACAMERA_OK : ACAMERA_ERROR_INVALID_PARAMETER;
+ return ret;
+}
+
+EXPORT
+camera_status_t ACaptureSessionSharedOutput_remove(ACaptureSessionOutput *out,
+ ANativeWindow* window) {
+ ATRACE_CALL();
+ if ((window == nullptr) || (out == nullptr)) {
+ ALOGE("%s: Error: bad argument. window %p, out %p",
+ __FUNCTION__, window, out);
+ return ACAMERA_ERROR_INVALID_PARAMETER;
+ }
+ if (!out->mIsShared) {
+ ALOGE("%s: Error trying to remove a window in non-shared output configuration",
+ __FUNCTION__);
+ return ACAMERA_ERROR_INVALID_OPERATION;
+ }
+ if (out->mWindow == window) {
+ ALOGE("%s: Error trying to remove the same window associated with the output configuration",
+ __FUNCTION__);
+ return ACAMERA_ERROR_INVALID_PARAMETER;
+ }
+
+ auto remove = out->mSharedWindows.erase(window);
+ camera_status_t ret = (remove) ? ACAMERA_OK : ACAMERA_ERROR_INVALID_PARAMETER;
+ return ret;
+}
+
EXPORT
void ACaptureSessionOutput_free(ACaptureSessionOutput* output) {
ATRACE_CALL();
@@ -157,5 +220,21 @@ camera_status_t ACameraDevice_createCaptureSession(
__FUNCTION__, device, outputs, callbacks, session);
return ACAMERA_ERROR_INVALID_PARAMETER;
}
- return device->createCaptureSession(outputs, callbacks, session);
+ return device->createCaptureSession(outputs, nullptr, callbacks, session);
+}
+
+EXPORT
+camera_status_t ACameraDevice_createCaptureSessionWithSessionParameters(
+ ACameraDevice* device,
+ const ACaptureSessionOutputContainer* outputs,
+ const ACaptureRequest* sessionParameters,
+ const ACameraCaptureSession_stateCallbacks* callbacks,
+ /*out*/ACameraCaptureSession** session) {
+ ATRACE_CALL();
+ if (device == nullptr || outputs == nullptr || callbacks == nullptr || session == nullptr) {
+ ALOGE("%s: Error: invalid input: device %p, outputs %p, callbacks %p, session %p",
+ __FUNCTION__, device, outputs, callbacks, session);
+ return ACAMERA_ERROR_INVALID_PARAMETER;
+ }
+ return device->createCaptureSession(outputs, sessionParameters, callbacks, session);
}
diff --git a/camera/ndk/NdkCaptureRequest.cpp b/camera/ndk/NdkCaptureRequest.cpp
index 5b4c1809f71621f7dcdd8f0355fea126910c20f6..ac1856b6f17faa58c811c8c0c760daba68859403 100644
--- a/camera/ndk/NdkCaptureRequest.cpp
+++ b/camera/ndk/NdkCaptureRequest.cpp
@@ -142,3 +142,40 @@ void ACaptureRequest_free(ACaptureRequest* request) {
delete request;
return;
}
+
+EXPORT
+camera_status_t ACaptureRequest_setUserContext(
+ ACaptureRequest* request, void* context) {
+ if (request == nullptr) {
+ ALOGE("%s: invalid argument! request is NULL", __FUNCTION__);
+ return ACAMERA_ERROR_INVALID_PARAMETER;
+ }
+ return request->setContext(context);
+}
+
+EXPORT
+camera_status_t ACaptureRequest_getUserContext(
+ const ACaptureRequest* request, /*out*/void** context) {
+ if (request == nullptr || context == nullptr) {
+ ALOGE("%s: invalid argument! request %p, context %p",
+ __FUNCTION__, request, context);
+ return ACAMERA_ERROR_INVALID_PARAMETER;
+ }
+ return request->getContext(context);
+}
+
+EXPORT
+ACaptureRequest* ACaptureRequest_copy(const ACaptureRequest* src) {
+ ATRACE_CALL();
+ if (src == nullptr) {
+ ALOGE("%s: src is null!", __FUNCTION__);
+ return nullptr;
+ }
+
+ ACaptureRequest* pRequest = new ACaptureRequest();
+ pRequest->settings = new ACameraMetadata(*(src->settings));
+ pRequest->targets = new ACameraOutputTargets();
+ *(pRequest->targets) = *(src->targets);
+ pRequest->context = src->context;
+ return pRequest;
+}
diff --git a/camera/ndk/impl/ACameraCaptureSession.cpp b/camera/ndk/impl/ACameraCaptureSession.cpp
index b9c159de9a2143b35fb3a4cd1c12cefae15dc19b..f60e5fd9cdb9150b1d78f1d10129f9bdf42cbd58 100644
--- a/camera/ndk/impl/ACameraCaptureSession.cpp
+++ b/camera/ndk/impl/ACameraCaptureSession.cpp
@@ -148,6 +148,23 @@ camera_status_t ACameraCaptureSession::capture(
return ret;
}
+camera_status_t ACameraCaptureSession::updateOutputConfiguration(ACaptureSessionOutput *output) {
+ sp dev = getDeviceSp();
+ if (dev == nullptr) {
+ ALOGE("Error: Device associated with session %p has been closed!", this);
+ return ACAMERA_ERROR_SESSION_CLOSED;
+ }
+
+ camera_status_t ret;
+ dev->lockDeviceForSessionOps();
+ {
+ Mutex::Autolock _l(mSessionLock);
+ ret = dev->updateOutputConfigurationLocked(output);
+ }
+ dev->unlockDevice();
+ return ret;
+}
+
ACameraDevice*
ACameraCaptureSession::getDevice() {
Mutex::Autolock _l(mSessionLock);
diff --git a/camera/ndk/impl/ACameraCaptureSession.h b/camera/ndk/impl/ACameraCaptureSession.h
index 339c66577cefa27bc9d0faaa0a77ad5347d1ba3d..a2068e71b102652ed4e87286416289a26aa1f47a 100644
--- a/camera/ndk/impl/ACameraCaptureSession.h
+++ b/camera/ndk/impl/ACameraCaptureSession.h
@@ -24,7 +24,8 @@
using namespace android;
struct ACaptureSessionOutput {
- explicit ACaptureSessionOutput(ANativeWindow* window) : mWindow(window) {};
+ explicit ACaptureSessionOutput(ANativeWindow* window, bool isShared = false) :
+ mWindow(window), mIsShared(isShared) {};
bool operator == (const ACaptureSessionOutput& other) const {
return mWindow == other.mWindow;
@@ -40,6 +41,8 @@ struct ACaptureSessionOutput {
}
ANativeWindow* mWindow;
+ std::set mSharedWindows;
+ bool mIsShared;
int mRotation = CAMERA3_STREAM_ROTATION_0;
};
@@ -89,6 +92,8 @@ struct ACameraCaptureSession : public RefBase {
int numRequests, ACaptureRequest** requests,
/*optional*/int* captureSequenceId);
+ camera_status_t updateOutputConfiguration(ACaptureSessionOutput *output);
+
ACameraDevice* getDevice();
private:
diff --git a/camera/ndk/impl/ACameraDevice.cpp b/camera/ndk/impl/ACameraDevice.cpp
index 907802c8878f53e8fea89e5cc5324f214b4effea..907debc96ee798f66075371b3e3a9ff214612848 100644
--- a/camera/ndk/impl/ACameraDevice.cpp
+++ b/camera/ndk/impl/ACameraDevice.cpp
@@ -157,6 +157,7 @@ CameraDevice::createCaptureRequest(
camera_status_t
CameraDevice::createCaptureSession(
const ACaptureSessionOutputContainer* outputs,
+ const ACaptureRequest* sessionParameters,
const ACameraCaptureSession_stateCallbacks* callbacks,
/*out*/ACameraCaptureSession** session) {
sp currentSession = mCurrentSession.promote();
@@ -172,7 +173,7 @@ CameraDevice::createCaptureSession(
}
// Create new session
- ret = configureStreamsLocked(outputs);
+ ret = configureStreamsLocked(outputs, sessionParameters);
if (ret != ACAMERA_OK) {
ALOGE("Fail to create new session. cannot configure streams");
return ret;
@@ -289,13 +290,93 @@ CameraDevice::submitRequestsLocked(
return ACAMERA_OK;
}
+camera_status_t CameraDevice::updateOutputConfigurationLocked(ACaptureSessionOutput *output) {
+ camera_status_t ret = checkCameraClosedOrErrorLocked();
+ if (ret != ACAMERA_OK) {
+ return ret;
+ }
+
+ if (output == nullptr) {
+ return ACAMERA_ERROR_INVALID_PARAMETER;
+ }
+
+ if (!output->mIsShared) {
+ ALOGE("Error output configuration is not shared");
+ return ACAMERA_ERROR_INVALID_OPERATION;
+ }
+
+ int32_t streamId = -1;
+ for (auto& kvPair : mConfiguredOutputs) {
+ if (kvPair.second.first == output->mWindow) {
+ streamId = kvPair.first;
+ break;
+ }
+ }
+ if (streamId < 0) {
+ ALOGE("Error: Invalid output configuration");
+ return ACAMERA_ERROR_INVALID_PARAMETER;
+ }
+
+ sp iGBP(nullptr);
+ ret = getIGBPfromAnw(output->mWindow, iGBP);
+ if (ret != ACAMERA_OK) {
+ ALOGE("Camera device %s failed to extract graphic producer from native window",
+ getId());
+ return ret;
+ }
+
+ OutputConfiguration outConfig(iGBP, output->mRotation, OutputConfiguration::INVALID_SET_ID,
+ true);
+
+ for (auto& anw : output->mSharedWindows) {
+ ret = getIGBPfromAnw(anw, iGBP);
+ if (ret != ACAMERA_OK) {
+ ALOGE("Camera device %s failed to extract graphic producer from native window",
+ getId());
+ return ret;
+ }
+ outConfig.addGraphicProducer(iGBP);
+ }
+
+ auto remoteRet = mRemote->updateOutputConfiguration(streamId, outConfig);
+ if (!remoteRet.isOk()) {
+ switch (remoteRet.serviceSpecificErrorCode()) {
+ case hardware::ICameraService::ERROR_INVALID_OPERATION:
+ ALOGE("Camera device %s invalid operation: %s", getId(),
+ remoteRet.toString8().string());
+ return ACAMERA_ERROR_INVALID_OPERATION;
+ break;
+ case hardware::ICameraService::ERROR_ALREADY_EXISTS:
+ ALOGE("Camera device %s output surface already exists: %s", getId(),
+ remoteRet.toString8().string());
+ return ACAMERA_ERROR_INVALID_PARAMETER;
+ break;
+ case hardware::ICameraService::ERROR_ILLEGAL_ARGUMENT:
+ ALOGE("Camera device %s invalid input argument: %s", getId(),
+ remoteRet.toString8().string());
+ return ACAMERA_ERROR_INVALID_PARAMETER;
+ break;
+ default:
+ ALOGE("Camera device %s failed to add shared output: %s", getId(),
+ remoteRet.toString8().string());
+ return ACAMERA_ERROR_UNKNOWN;
+ }
+ }
+ mConfiguredOutputs[streamId] = std::make_pair(output->mWindow, outConfig);
+
+ return ACAMERA_OK;
+}
+
camera_status_t
CameraDevice::allocateCaptureRequest(
const ACaptureRequest* request, /*out*/sp& outReq) {
camera_status_t ret;
sp req(new CaptureRequest());
- req->mMetadata = request->settings->getInternalData();
+ req->mPhysicalCameraSettings.push_back({std::string(mCameraId.string()),
+ request->settings->getInternalData()});
req->mIsReprocess = false; // NDK does not support reprocessing yet
+ req->mContext = request->context;
+ req->mSurfaceConverted = true; // set to true, and fill in stream/surface idx to speed up IPC
for (auto outputTarget : request->targets->mOutputs) {
ANativeWindow* anw = outputTarget.mWindow;
@@ -306,7 +387,31 @@ CameraDevice::allocateCaptureRequest(
return ret;
}
req->mSurfaceList.push_back(surface);
+
+ bool found = false;
+ // lookup stream/surface ID
+ for (const auto& kvPair : mConfiguredOutputs) {
+ int streamId = kvPair.first;
+ const OutputConfiguration& outConfig = kvPair.second.second;
+ const auto& gbps = outConfig.getGraphicBufferProducers();
+ for (int surfaceId = 0; surfaceId < (int) gbps.size(); surfaceId++) {
+ if (gbps[surfaceId] == surface->getIGraphicBufferProducer()) {
+ found = true;
+ req->mStreamIdxList.push_back(streamId);
+ req->mSurfaceIdxList.push_back(surfaceId);
+ break;
+ }
+ }
+ if (found) {
+ break;
+ }
+ }
+ if (!found) {
+ ALOGE("Unconfigured output target %p in capture request!", anw);
+ return ret;
+ }
}
+
outReq = req;
return ACAMERA_OK;
}
@@ -314,7 +419,7 @@ CameraDevice::allocateCaptureRequest(
ACaptureRequest*
CameraDevice::allocateACaptureRequest(sp& req) {
ACaptureRequest* pRequest = new ACaptureRequest();
- CameraMetadata clone = req->mMetadata;
+ CameraMetadata clone = req->mPhysicalCameraSettings.begin()->settings;
pRequest->settings = new ACameraMetadata(clone.release(), ACameraMetadata::ACM_REQUEST);
pRequest->targets = new ACameraOutputTargets();
for (size_t i = 0; i < req->mSurfaceList.size(); i++) {
@@ -322,6 +427,7 @@ CameraDevice::allocateACaptureRequest(sp& req) {
ACameraOutputTarget outputTarget(anw);
pRequest->targets->mOutputs.insert(outputTarget);
}
+ pRequest->context = req->mContext;
return pRequest;
}
@@ -356,7 +462,7 @@ CameraDevice::notifySessionEndOfLifeLocked(ACameraCaptureSession* session) {
}
// No new session, unconfigure now
- camera_status_t ret = configureStreamsLocked(nullptr);
+ camera_status_t ret = configureStreamsLocked(nullptr, nullptr);
if (ret != ACAMERA_OK) {
ALOGE("Unconfigure stream failed. Device might still be configured! ret %d", ret);
}
@@ -486,17 +592,11 @@ camera_status_t
CameraDevice::getIGBPfromAnw(
ANativeWindow* anw,
sp& out) {
- if (anw == nullptr) {
- ALOGE("Error: output ANativeWindow is null");
- return ACAMERA_ERROR_INVALID_PARAMETER;
- }
- int value;
- int err = (*anw->query)(anw, NATIVE_WINDOW_CONCRETE_TYPE, &value);
- if (err != OK || value != NATIVE_WINDOW_SURFACE) {
- ALOGE("Error: ANativeWindow is not backed by Surface!");
- return ACAMERA_ERROR_INVALID_PARAMETER;
+ sp surface;
+ camera_status_t ret = getSurfaceFromANativeWindow(anw, surface);
+ if (ret != ACAMERA_OK) {
+ return ret;
}
- const sp surface(static_cast(anw));
out = surface->getIGraphicBufferProducer();
return ACAMERA_OK;
}
@@ -520,7 +620,8 @@ CameraDevice::getSurfaceFromANativeWindow(
}
camera_status_t
-CameraDevice::configureStreamsLocked(const ACaptureSessionOutputContainer* outputs) {
+CameraDevice::configureStreamsLocked(const ACaptureSessionOutputContainer* outputs,
+ const ACaptureRequest* sessionParameters) {
ACaptureSessionOutputContainer emptyOutput;
if (outputs == nullptr) {
outputs = &emptyOutput;
@@ -540,7 +641,8 @@ CameraDevice::configureStreamsLocked(const ACaptureSessionOutputContainer* outpu
return ret;
}
outputSet.insert(std::make_pair(
- anw, OutputConfiguration(iGBP, outConfig.mRotation)));
+ anw, OutputConfiguration(iGBP, outConfig.mRotation,
+ OutputConfiguration::INVALID_SET_ID, outConfig.mIsShared)));
}
auto addSet = outputSet;
std::vector deleteList;
@@ -615,7 +717,11 @@ CameraDevice::configureStreamsLocked(const ACaptureSessionOutputContainer* outpu
mConfiguredOutputs.insert(std::make_pair(streamId, outputPair));
}
- remoteRet = mRemote->endConfigure(/*isConstrainedHighSpeed*/ false);
+ CameraMetadata params;
+ if ((sessionParameters != nullptr) && (sessionParameters->settings != nullptr)) {
+ params.append(sessionParameters->settings->getInternalData());
+ }
+ remoteRet = mRemote->endConfigure(/*isConstrainedHighSpeed*/ false, params);
if (remoteRet.serviceSpecificErrorCode() == hardware::ICameraService::ERROR_ILLEGAL_ARGUMENT) {
ALOGE("Camera device %s cannnot support app output configuration: %s", getId(),
remoteRet.toString8().string());
@@ -730,19 +836,26 @@ CameraDevice::onCaptureErrorLocked(
setCameraDeviceErrorLocked(ACAMERA_ERROR_CAMERA_SERVICE);
return;
}
- ANativeWindow* anw = outputPairIt->second.first;
- ALOGV("Camera %s Lost output buffer for ANW %p frame %" PRId64,
- getId(), anw, frameNumber);
-
- sp msg = new AMessage(kWhatCaptureBufferLost, mHandler);
- msg->setPointer(kContextKey, cbh.mCallbacks.context);
- msg->setObject(kSessionSpKey, session);
- msg->setPointer(kCallbackFpKey, (void*) onBufferLost);
- msg->setObject(kCaptureRequestKey, request);
- msg->setPointer(kAnwKey, (void*) anw);
- msg->setInt64(kFrameNumberKey, frameNumber);
- postSessionMsgAndCleanup(msg);
+ const auto& gbps = outputPairIt->second.second.getGraphicBufferProducers();
+ for (const auto& outGbp : gbps) {
+ for (auto surface : request->mSurfaceList) {
+ if (surface->getIGraphicBufferProducer() == outGbp) {
+ ANativeWindow* anw = static_cast(surface.get());
+ ALOGV("Camera %s Lost output buffer for ANW %p frame %" PRId64,
+ getId(), anw, frameNumber);
+
+ sp msg = new AMessage(kWhatCaptureBufferLost, mHandler);
+ msg->setPointer(kContextKey, cbh.mCallbacks.context);
+ msg->setObject(kSessionSpKey, session);
+ msg->setPointer(kCallbackFpKey, (void*) onBufferLost);
+ msg->setObject(kCaptureRequestKey, request);
+ msg->setPointer(kAnwKey, (void*) anw);
+ msg->setInt64(kFrameNumberKey, frameNumber);
+ postSessionMsgAndCleanup(msg);
+ }
+ }
+ }
} else { // Handle other capture failures
// Fire capture failure callback if there is one registered
ACameraCaptureSession_captureCallback_failed onError = cbh.mCallbacks.onCaptureFailed;
@@ -1293,7 +1406,9 @@ CameraDevice::ServiceCallback::onCaptureStarted(
binder::Status
CameraDevice::ServiceCallback::onResultReceived(
const CameraMetadata& metadata,
- const CaptureResultExtras& resultExtras) {
+ const CaptureResultExtras& resultExtras,
+ const std::vector& physicalResultInfos) {
+ (void) physicalResultInfos;
binder::Status ret = binder::Status::ok();
sp dev = mDevice.promote();
diff --git a/camera/ndk/impl/ACameraDevice.h b/camera/ndk/impl/ACameraDevice.h
index 6ed3881932bf0e0c2a240898e3512a2bfce20f99..136914823adef6745323a803c2a17bf677df6aca 100644
--- a/camera/ndk/impl/ACameraDevice.h
+++ b/camera/ndk/impl/ACameraDevice.h
@@ -36,7 +36,8 @@
#include
#include
-#include
+#include
+#include
#include "ACameraMetadata.h"
namespace android {
@@ -59,6 +60,7 @@ class CameraDevice final : public RefBase {
camera_status_t createCaptureSession(
const ACaptureSessionOutputContainer* outputs,
+ const ACaptureRequest* sessionParameters,
const ACameraCaptureSession_stateCallbacks* callbacks,
/*out*/ACameraCaptureSession** session);
@@ -72,7 +74,8 @@ class CameraDevice final : public RefBase {
binder::Status onCaptureStarted(const CaptureResultExtras& resultExtras,
int64_t timestamp) override;
binder::Status onResultReceived(const CameraMetadata& metadata,
- const CaptureResultExtras& resultExtras) override;
+ const CaptureResultExtras& resultExtras,
+ const std::vector& physicalResultInfos) override;
binder::Status onPrepared(int streamId) override;
binder::Status onRequestQueueEmpty() override;
binder::Status onRepeatingRequestError(int64_t lastFrameNumber,
@@ -122,7 +125,9 @@ class CameraDevice final : public RefBase {
/*out*/int* captureSequenceId,
bool isRepeating);
- static camera_status_t allocateCaptureRequest(
+ camera_status_t updateOutputConfigurationLocked(ACaptureSessionOutput *output);
+
+ camera_status_t allocateCaptureRequest(
const ACaptureRequest* request, sp& outReq);
static ACaptureRequest* allocateACaptureRequest(sp& req);
@@ -136,7 +141,8 @@ class CameraDevice final : public RefBase {
// For capture session to notify its end of life
void notifySessionEndOfLifeLocked(ACameraCaptureSession* session);
- camera_status_t configureStreamsLocked(const ACaptureSessionOutputContainer* outputs);
+ camera_status_t configureStreamsLocked(const ACaptureSessionOutputContainer* outputs,
+ const ACaptureRequest* sessionParameters);
// Input message will be posted and cleared after this returns
void postSessionMsgAndCleanup(sp& msg);
@@ -306,9 +312,10 @@ struct ACameraDevice {
camera_status_t createCaptureSession(
const ACaptureSessionOutputContainer* outputs,
+ const ACaptureRequest* sessionParameters,
const ACameraCaptureSession_stateCallbacks* callbacks,
/*out*/ACameraCaptureSession** session) {
- return mDevice->createCaptureSession(outputs, callbacks, session);
+ return mDevice->createCaptureSession(outputs, sessionParameters, callbacks, session);
}
/***********************
diff --git a/camera/ndk/impl/ACameraManager.cpp b/camera/ndk/impl/ACameraManager.cpp
index a1a8cd6d1bf79553e15995c61872b718b2f04c60..c59d0e7cf6f353b71a6d806ab3b53f255ebdedef 100644
--- a/camera/ndk/impl/ACameraManager.cpp
+++ b/camera/ndk/impl/ACameraManager.cpp
@@ -221,7 +221,7 @@ void CameraManagerGlobal::unregisterAvailabilityCallback(
mCallbacks.erase(cb);
}
-void CameraManagerGlobal::getCameraIdList(std::vector *cameraIds) {
+void CameraManagerGlobal::getCameraIdList(std::vector* cameraIds) {
// Ensure that we have initialized/refreshed the list of available devices
auto cs = getCameraService();
Mutex::Autolock _l(mLock);
diff --git a/camera/ndk/impl/ACameraManager.h b/camera/ndk/impl/ACameraManager.h
index 4a172f38e51913eda258803b378baea3240e9d4d..cc42f77c2c365c72820dafc59fce7344423ef11e 100644
--- a/camera/ndk/impl/ACameraManager.h
+++ b/camera/ndk/impl/ACameraManager.h
@@ -19,6 +19,7 @@
#include
+#include
#include
#include
#include
@@ -140,8 +141,29 @@ class CameraManagerGlobal final : public RefBase {
static bool validStatus(int32_t status);
static bool isStatusAvailable(int32_t status);
+ // The sort logic must match the logic in
+ // libcameraservice/common/CameraProviderManager.cpp::getAPI1CompatibleCameraDeviceIds
+ struct CameraIdComparator {
+ bool operator()(const String8& a, const String8& b) const {
+ uint32_t aUint = 0, bUint = 0;
+ bool aIsUint = base::ParseUint(a.c_str(), &aUint);
+ bool bIsUint = base::ParseUint(b.c_str(), &bUint);
+
+ // Uint device IDs first
+ if (aIsUint && bIsUint) {
+ return aUint < bUint;
+ } else if (aIsUint) {
+ return true;
+ } else if (bIsUint) {
+ return false;
+ }
+ // Simple string compare if both id are not uint
+ return a < b;
+ }
+ };
+
// Map camera_id -> status
- std::map mDeviceStatusMap;
+ std::map mDeviceStatusMap;
// For the singleton instance
static Mutex sLock;
diff --git a/camera/ndk/impl/ACameraMetadata.cpp b/camera/ndk/impl/ACameraMetadata.cpp
index 7b33c32ce6d973013566ddef9ed5d4fae4ddffe9..fc00a2d2baf808be27f04c89ef79187750a2f56b 100644
--- a/camera/ndk/impl/ACameraMetadata.cpp
+++ b/camera/ndk/impl/ACameraMetadata.cpp
@@ -235,7 +235,7 @@ ACameraMetadata::getTags(/*out*/int32_t* numTags,
}
const CameraMetadata&
-ACameraMetadata::getInternalData() {
+ACameraMetadata::getInternalData() const {
return mData;
}
@@ -305,6 +305,7 @@ ACameraMetadata::isCaptureRequestTag(const uint32_t tag) {
case ACAMERA_STATISTICS_FACE_DETECT_MODE:
case ACAMERA_STATISTICS_HOT_PIXEL_MAP_MODE:
case ACAMERA_STATISTICS_LENS_SHADING_MAP_MODE:
+ case ACAMERA_STATISTICS_OIS_DATA_MODE:
case ACAMERA_TONEMAP_CURVE_BLUE:
case ACAMERA_TONEMAP_CURVE_GREEN:
case ACAMERA_TONEMAP_CURVE_RED:
@@ -312,6 +313,7 @@ ACameraMetadata::isCaptureRequestTag(const uint32_t tag) {
case ACAMERA_TONEMAP_GAMMA:
case ACAMERA_TONEMAP_PRESET_CURVE:
case ACAMERA_BLACK_LEVEL_LOCK:
+ case ACAMERA_DISTORTION_CORRECTION_MODE:
return true;
default:
return false;
diff --git a/camera/ndk/impl/ACameraMetadata.h b/camera/ndk/impl/ACameraMetadata.h
index 143efc7e1202842469cfca2eb84fbb2de55ecc38..0fd7efa1637827a8b9f0d5fbe0b03d12cc935136 100644
--- a/camera/ndk/impl/ACameraMetadata.h
+++ b/camera/ndk/impl/ACameraMetadata.h
@@ -64,7 +64,7 @@ struct ACameraMetadata : public RefBase {
void filterUnsupportedFeatures(); // Hide features not yet supported by NDK
void filterStreamConfigurations(); // Hide input streams, translate hal format to NDK formats
- const CameraMetadata& getInternalData();
+ const CameraMetadata& getInternalData() const;
template
camera_status_t updateImpl(uint32_t tag, uint32_t count, const NDK_T* data) {
diff --git a/camera/ndk/impl/ACaptureRequest.h b/camera/ndk/impl/ACaptureRequest.h
index e5b453e32c07df8eb2bc0d10279834442c1ba1b7..06b2cc3c2727e48ca911b6b613390af3842a12ee 100644
--- a/camera/ndk/impl/ACaptureRequest.h
+++ b/camera/ndk/impl/ACaptureRequest.h
@@ -45,8 +45,19 @@ struct ACameraOutputTargets {
};
struct ACaptureRequest {
+ camera_status_t setContext(void* ctx) {
+ context = ctx;
+ return ACAMERA_OK;
+ }
+
+ camera_status_t getContext(void** ctx) const {
+ *ctx = context;
+ return ACAMERA_OK;
+ }
+
ACameraMetadata* settings;
ACameraOutputTargets* targets;
+ void* context;
};
#endif // _ACAPTURE_REQUEST_H
diff --git a/camera/ndk/include/camera/NdkCameraCaptureSession.h b/camera/ndk/include/camera/NdkCameraCaptureSession.h
index b544b50372b28cd7cbc51f0130ef25bf0dd1f2f7..78e062a8895a80ab75d77c0502b47b5caa7ad545 100644
--- a/camera/ndk/include/camera/NdkCameraCaptureSession.h
+++ b/camera/ndk/include/camera/NdkCameraCaptureSession.h
@@ -593,6 +593,54 @@ camera_status_t ACameraCaptureSession_abortCaptures(ACameraCaptureSession* sessi
#endif /* __ANDROID_API__ >= 24 */
+#if __ANDROID_API__ >= 28
+
+typedef struct ACaptureSessionOutput ACaptureSessionOutput;
+
+/**
+ * Update shared ACaptureSessionOutput.
+ *
+ * A shared ACaptureSessionOutput (see {@link ACaptureSessionSharedOutput_create}) that
+ * was modified via calls to {@link ACaptureSessionSharedOutput_add} or
+ * {@link ACaptureSessionSharedOutput_remove} must be updated by calling this method before its
+ * changes take effect. After the update call returns with {@link ACAMERA_OK}, any newly added
+ * native windows can be used as a target in subsequent capture requests.
+ *
+ * Native windows that get removed must not be part of any active repeating or single/burst
+ * request or have any pending results. Consider updating repeating requests via
+ * {@link ACaptureSessionOutput_setRepeatingRequest} and then wait for the last frame number
+ * when the sequence completes
+ * {@link ACameraCaptureSession_captureCallback#onCaptureSequenceCompleted}.
+ *
+ * Native windows that get added must not be part of any other registered ACaptureSessionOutput
+ * and must be compatible. Compatible windows must have matching format, rotation and
+ * consumer usage.
+ *
+ * A shared ACameraCaptureSession can support up to 4 additional native windows.
+ *
+ * @param session the capture session of interest
+ * @param output the modified output configuration
+ *
+ * @return -
+ * {@link ACAMERA_OK} if the method succeeds.
+ * - {@link ACAMERA_ERROR_INVALID_PARAMETER} if session or output is NULL; or output
+ * contains invalid native windows; or if an attempt was made to add
+ * a native window to a different output configuration; or new native window is not
+ * compatible; or any removed native window still has pending requests;
+ * - {@link ACAMERA_ERROR_INVALID_OPERATION} if output configuration is not shared (see
+ * {@link ACaptureSessionSharedOutput_create}; or the number of additional
+ * native windows goes beyond the supported limit.
+ * - {@link ACAMERA_ERROR_SESSION_CLOSED} if the capture session has been closed
+ * - {@link ACAMERA_ERROR_CAMERA_DISCONNECTED} if the camera device is closed
+ * - {@link ACAMERA_ERROR_CAMERA_DEVICE} if the camera device encounters fatal error
+ * - {@link ACAMERA_ERROR_CAMERA_SERVICE} if the camera service encounters fatal
+ * error
+ * - {@link ACAMERA_ERROR_UNKNOWN} if the method fails for some other reasons
+ */
+camera_status_t ACameraCaptureSession_updateSharedOutput(ACameraCaptureSession* session,
+ ACaptureSessionOutput* output);
+#endif /* __ANDROID_API__ >= 28 */
+
__END_DECLS
#endif /* _NDK_CAMERA_CAPTURE_SESSION_H */
diff --git a/camera/ndk/include/camera/NdkCameraDevice.h b/camera/ndk/include/camera/NdkCameraDevice.h
index 2c65529b4b5970fcf8ef54016a613e70b1c878f2..b715b12e15669c153e643c3714fea1acc7b2bfe2 100644
--- a/camera/ndk/include/camera/NdkCameraDevice.h
+++ b/camera/ndk/include/camera/NdkCameraDevice.h
@@ -256,6 +256,7 @@ typedef enum {
* @see ACameraDevice_createCaptureRequest
*/
TEMPLATE_MANUAL = 6,
+
} ACameraDevice_request_template;
/**
@@ -666,9 +667,102 @@ camera_status_t ACameraDevice_createCaptureSession(
#endif /* __ANDROID_API__ >= 24 */
+#if __ANDROID_API__ >= 28
+
+/**
+ * Create a shared ACaptureSessionOutput object.
+ *
+ * The ACaptureSessionOutput is used in {@link ACaptureSessionOutputContainer_add} method to add
+ * an output {@link ANativeWindow} to ACaptureSessionOutputContainer. Use
+ * {@link ACaptureSessionOutput_free} to free the object and its memory after application no longer
+ * needs the {@link ACaptureSessionOutput}. A shared ACaptureSessionOutput can be further modified
+ * via {@link ACaptureSessionSharedOutput_add} or {@link ACaptureSessionSharedOutput_remove} and
+ * must be updated via {@link ACameraCaptureSession_updateSharedOutput}.
+ *
+ * @param anw the {@link ANativeWindow} to be associated with the {@link ACaptureSessionOutput}
+ * @param output the output {@link ACaptureSessionOutput} will be stored here if the
+ * method call succeeds.
+ *
+ * @return
+ * - {@link ACAMERA_OK} if the method call succeeds. The created container will be
+ * filled in the output argument.
+ * - {@link ACAMERA_ERROR_INVALID_PARAMETER} if anw or output is NULL.
+ *
+ * @see ACaptureSessionOutputContainer_add
+ */
+camera_status_t ACaptureSessionSharedOutput_create(
+ ANativeWindow* anw, /*out*/ACaptureSessionOutput** output);
+
+/**
+ * Add a native window to shared ACaptureSessionOutput.
+ *
+ * The ACaptureSessionOutput must be created via {@link ACaptureSessionSharedOutput_create}.
+ *
+ * @param output the shared ACaptureSessionOutput to be extended.
+ * @param anw The new native window.
+ *
+ * @return
+ * - {@link ACAMERA_OK} if the method call succeeds.
+ * - {@link ACAMERA_ERROR_INVALID_PARAMETER} if anw or output is NULL; or output is not
+ * shared see {@link ACaptureSessionSharedOutput_create}; or anw matches with the native
+ * window associated with ACaptureSessionOutput; or anw is already present inside
+ * ACaptureSessionOutput.
+ */
+camera_status_t ACaptureSessionSharedOutput_add(ACaptureSessionOutput *output, ANativeWindow *anw);
+
+/**
+ * Remove a native window from shared ACaptureSessionOutput.
+ *
+ * @param output the {@link ACaptureSessionOutput} to be modified.
+ * @param anw The native window to be removed.
+ *
+ * @return
+ * - {@link ACAMERA_OK} if the method call succeeds.
+ * - {@link ACAMERA_ERROR_INVALID_PARAMETER} if anw or output is NULL; or output is not
+ * shared see {@link ACaptureSessionSharedOutput_create}; or anw matches with the native
+ * window associated with ACaptureSessionOutput; or anw is not present inside
+ * ACaptureSessionOutput.
+ */
+camera_status_t ACaptureSessionSharedOutput_remove(ACaptureSessionOutput *output,
+ ANativeWindow* anw);
+
+/**
+ * Create a new camera capture session similar to {@link ACameraDevice_createCaptureSession}. This
+ * function allows clients to pass additional session parameters during session initialization. For
+ * further information about session parameters see {@link ACAMERA_REQUEST_AVAILABLE_SESSION_KEYS}.
+ *
+ * @param device the camera device of interest.
+ * @param outputs the {@link ACaptureSessionOutputContainer} describes all output streams.
+ * @param sessionParameters An optional capture request that contains the initial values of session
+ * parameters advertised in
+ * {@link ACAMERA_REQUEST_AVAILABLE_SESSION_KEYS}.
+ * @param callbacks the {@link ACameraCaptureSession_stateCallbacks}
+ * capture session state callbacks.
+ * @param session the created {@link ACameraCaptureSession} will be filled here if the method call
+ * succeeds.
+ *
+ * @return
+ * - {@link ACAMERA_OK} if the method call succeeds. The created capture session will be
+ * filled in session argument.
+ * - {@link ACAMERA_ERROR_INVALID_PARAMETER} if any of device, outputs, callbacks or
+ * session is NULL.
+ * - {@link ACAMERA_ERROR_CAMERA_DISCONNECTED} if the camera device is closed.
+ * - {@link ACAMERA_ERROR_CAMERA_DEVICE} if the camera device encounters fatal error.
+ * - {@link ACAMERA_ERROR_CAMERA_SERVICE} if the camera service encounters fatal error.
+ *
+ * - {@link ACAMERA_ERROR_UNKNOWN} if the method fails for some other reasons.
+ */
+camera_status_t ACameraDevice_createCaptureSessionWithSessionParameters(
+ ACameraDevice* device,
+ const ACaptureSessionOutputContainer* outputs,
+ const ACaptureRequest* sessionParameters,
+ const ACameraCaptureSession_stateCallbacks* callbacks,
+ /*out*/ACameraCaptureSession** session);
+
+#endif /* __ANDROID_API__ >= 28 */
+
__END_DECLS
#endif /* _NDK_CAMERA_DEVICE_H */
/** @} */
-
diff --git a/camera/ndk/include/camera/NdkCameraMetadataTags.h b/camera/ndk/include/camera/NdkCameraMetadataTags.h
index 629d75afae0b030f6e6c31ddf94bdad626da4895..3010646afce4b893f7eec72d2dcc20107f152aad 100644
--- a/camera/ndk/include/camera/NdkCameraMetadataTags.h
+++ b/camera/ndk/include/camera/NdkCameraMetadataTags.h
@@ -69,6 +69,8 @@ typedef enum acamera_metadata_section {
ACAMERA_SYNC,
ACAMERA_REPROCESS,
ACAMERA_DEPTH,
+ ACAMERA_LOGICAL_MULTI_CAMERA,
+ ACAMERA_DISTORTION_CORRECTION,
ACAMERA_SECTION_COUNT,
ACAMERA_VENDOR = 0x8000
@@ -104,6 +106,12 @@ typedef enum acamera_metadata_section_start {
ACAMERA_SYNC_START = ACAMERA_SYNC << 16,
ACAMERA_REPROCESS_START = ACAMERA_REPROCESS << 16,
ACAMERA_DEPTH_START = ACAMERA_DEPTH << 16,
+ ACAMERA_LOGICAL_MULTI_CAMERA_START
+ = ACAMERA_LOGICAL_MULTI_CAMERA
+ << 16,
+ ACAMERA_DISTORTION_CORRECTION_START
+ = ACAMERA_DISTORTION_CORRECTION
+ << 16,
ACAMERA_VENDOR_START = ACAMERA_VENDOR << 16
} acamera_metadata_section_start_t;
@@ -471,10 +479,6 @@ typedef enum acamera_metadata_tag {
* Otherwise will always be present.
* The maximum number of regions supported by the device is determined by the value
* of android.control.maxRegionsAe.
- * The data representation is int[5 * area_count].
- * Every five elements represent a metering region of (xmin, ymin, xmax, ymax, weight).
- * The rectangle is defined to be inclusive on xmin and ymin, but exclusive on xmax and
- * ymax.
* The coordinate system is based on the active pixel array,
* with (0,0) being the top-left pixel in the active pixel array, and
* (ACAMERA_SENSOR_INFO_ACTIVE_ARRAY_SIZE.width - 1,
@@ -495,6 +499,10 @@ typedef enum acamera_metadata_tag {
* region and output only the intersection rectangle as the metering region in the result
* metadata. If the region is entirely outside the crop region, it will be ignored and
* not reported in the result metadata.
+ * The data representation is int[5 * area_count]
.
+ * Every five elements represent a metering region of (xmin, ymin, xmax, ymax, weight)
.
+ * The rectangle is defined to be inclusive on xmin and ymin, but exclusive on xmax and
+ * ymax.
*
* @see ACAMERA_SCALER_CROP_REGION
* @see ACAMERA_SENSOR_INFO_ACTIVE_ARRAY_SIZE
@@ -633,10 +641,6 @@ typedef enum acamera_metadata_tag {
* Otherwise will always be present.
* The maximum number of focus areas supported by the device is determined by the value
* of android.control.maxRegionsAf.
- * The data representation is int[5 * area_count].
- * Every five elements represent a metering region of (xmin, ymin, xmax, ymax, weight).
- * The rectangle is defined to be inclusive on xmin and ymin, but exclusive on xmax and
- * ymax.
* The coordinate system is based on the active pixel array,
* with (0,0) being the top-left pixel in the active pixel array, and
* (ACAMERA_SENSOR_INFO_ACTIVE_ARRAY_SIZE.width - 1,
@@ -651,12 +655,17 @@ typedef enum acamera_metadata_tag {
* is used, all non-zero weights will have the same effect. A region with 0 weight is
* ignored.
* If all regions have 0 weight, then no specific metering area needs to be used by the
- * camera device.
+ * camera device. The capture result will either be a zero weight region as well, or
+ * the region selected by the camera device as the focus area of interest.
* If the metering region is outside the used ACAMERA_SCALER_CROP_REGION returned in
* capture result metadata, the camera device will ignore the sections outside the crop
* region and output only the intersection rectangle as the metering region in the result
* metadata. If the region is entirely outside the crop region, it will be ignored and
* not reported in the result metadata.
+ * The data representation is int[5 * area_count]
.
+ * Every five elements represent a metering region of (xmin, ymin, xmax, ymax, weight)
.
+ * The rectangle is defined to be inclusive on xmin and ymin, but exclusive on xmax and
+ * ymax.
*
* @see ACAMERA_SCALER_CROP_REGION
* @see ACAMERA_SENSOR_INFO_ACTIVE_ARRAY_SIZE
@@ -791,10 +800,6 @@ typedef enum acamera_metadata_tag {
* Otherwise will always be present.
* The maximum number of regions supported by the device is determined by the value
* of android.control.maxRegionsAwb.
- * The data representation is int[5 * area_count].
- * Every five elements represent a metering region of (xmin, ymin, xmax, ymax, weight).
- * The rectangle is defined to be inclusive on xmin and ymin, but exclusive on xmax and
- * ymax.
* The coordinate system is based on the active pixel array,
* with (0,0) being the top-left pixel in the active pixel array, and
* (ACAMERA_SENSOR_INFO_ACTIVE_ARRAY_SIZE.width - 1,
@@ -815,6 +820,10 @@ typedef enum acamera_metadata_tag {
* region and output only the intersection rectangle as the metering region in the result
* metadata. If the region is entirely outside the crop region, it will be ignored and
* not reported in the result metadata.
+ * The data representation is int[5 * area_count]
.
+ * Every five elements represent a metering region of (xmin, ymin, xmax, ymax, weight)
.
+ * The rectangle is defined to be inclusive on xmin and ymin, but exclusive on xmax and
+ * ymax.
*
* @see ACAMERA_SCALER_CROP_REGION
* @see ACAMERA_SENSOR_INFO_ACTIVE_ARRAY_SIZE
@@ -837,10 +846,13 @@ typedef enum acamera_metadata_tag {
*
* This control (except for MANUAL) is only effective if
* ACAMERA_CONTROL_MODE != OFF
and any 3A routine is active.
- * ZERO_SHUTTER_LAG will be supported if ACAMERA_REQUEST_AVAILABLE_CAPABILITIES
- * contains PRIVATE_REPROCESSING or YUV_REPROCESSING. MANUAL will be supported if
- * ACAMERA_REQUEST_AVAILABLE_CAPABILITIES contains MANUAL_SENSOR. Other intent values are
- * always supported.
+ * All intents are supported by all devices, except that:
+ * * ZERO_SHUTTER_LAG will be supported if ACAMERA_REQUEST_AVAILABLE_CAPABILITIES contains
+ * PRIVATE_REPROCESSING or YUV_REPROCESSING.
+ * * MANUAL will be supported if ACAMERA_REQUEST_AVAILABLE_CAPABILITIES contains
+ * MANUAL_SENSOR.
+ * * MOTION_TRACKING will be supported if ACAMERA_REQUEST_AVAILABLE_CAPABILITIES contains
+ * MOTION_TRACKING.
*
* @see ACAMERA_CONTROL_MODE
* @see ACAMERA_REQUEST_AVAILABLE_CAPABILITIES
@@ -885,11 +897,10 @@ typedef enum acamera_metadata_tag {
* When set to AUTO, the individual algorithm controls in
* ACAMERA_CONTROL_* are in effect, such as ACAMERA_CONTROL_AF_MODE.
* When set to USE_SCENE_MODE, the individual controls in
- * ACAMERA_CONTROL_* are mostly disabled, and the camera device implements
- * one of the scene mode settings (such as ACTION, SUNSET, or PARTY)
- * as it wishes. The camera device scene mode 3A settings are provided by
- * capture results {@link ACameraMetadata} from
- * {@link ACameraCaptureSession_captureCallback_result}.
+ * ACAMERA_CONTROL_* are mostly disabled, and the camera device
+ * implements one of the scene mode settings (such as ACTION,
+ * SUNSET, or PARTY) as it wishes. The camera device scene mode
+ * 3A settings are provided by {@link ACameraCaptureSession_captureCallback_result capture results}.
* When set to OFF_KEEP_STATE, it is similar to OFF mode, the only difference
* is that this frame will not be used by camera device background 3A statistics
* update, as if this frame is never captured. This mode can be used in the scenario
@@ -1043,20 +1054,18 @@ typedef enum acamera_metadata_tag {
*
For constant-framerate recording, for each normal
* CamcorderProfile, that is, a
* CamcorderProfile that has
- * quality
- * in the range [
- * QUALITY_LOW,
- * QUALITY_2160P],
- * if the profile is supported by the device and has
- * videoFrameRate
- * x
, this list will always include (x
,x
).
+ * quality in
+ * the range [QUALITY_LOW,
+ * QUALITY_2160P], if the profile is
+ * supported by the device and has
+ * videoFrameRate x
, this list will
+ * always include (x
,x
).
*
*
* Also, a camera device must either not support any
* CamcorderProfile,
* or support at least one
- * normal CamcorderProfile
- * that has
+ * normal CamcorderProfile that has
* videoFrameRate x
>= 24.
*
*
@@ -1282,7 +1291,7 @@ typedef enum acamera_metadata_tag {
* State | Transition Cause | New State | Notes
* :------------:|:----------------:|:---------:|:-----------------------:
* INACTIVE | | INACTIVE | Camera device auto exposure algorithm is disabled
- * When ACAMERA_CONTROL_AE_MODE is AE_MODE_ON_*:
+ * When ACAMERA_CONTROL_AE_MODE is AE_MODE_ON*:
* State | Transition Cause | New State | Notes
* :-------------:|:--------------------------------------------:|:--------------:|:-----------------:
* INACTIVE | Camera device initiates AE scan | SEARCHING | Values changing
@@ -1303,10 +1312,13 @@ typedef enum acamera_metadata_tag {
* LOCKED | aeLock is ON and aePrecaptureTrigger is CANCEL| LOCKED | Precapture trigger is ignored when AE is already locked
* Any state (excluding LOCKED) | ACAMERA_CONTROL_AE_PRECAPTURE_TRIGGER is START | PRECAPTURE | Start AE precapture metering sequence
* Any state (excluding LOCKED) | ACAMERA_CONTROL_AE_PRECAPTURE_TRIGGER is CANCEL| INACTIVE | Currently active precapture metering sequence is canceled
+ * If the camera device supports AE external flash mode (ON_EXTERNAL_FLASH is included in
+ * ACAMERA_CONTROL_AE_AVAILABLE_MODES), ACAMERA_CONTROL_AE_STATE must be FLASH_REQUIRED after
+ * the camera device finishes AE scan and it's too dark without flash.
* For the above table, the camera device may skip reporting any state changes that happen
* without application intervention (i.e. mode switch, trigger, locking). Any state that
* can be skipped in that manner is called a transient state.
- * For example, for above AE modes (AE_MODE_ON_*), in addition to the state transitions
+ *
For example, for above AE modes (AE_MODE_ON*), in addition to the state transitions
* listed in above table, it is also legal for the camera device to skip one or more
* transient states between two results. See below table for examples:
* State | Transition Cause | New State | Notes
@@ -1319,9 +1331,11 @@ typedef enum acamera_metadata_tag {
* CONVERGED | Camera device finished AE scan | FLASH_REQUIRED | Converged but too dark w/o flash after a new scan, transient states are skipped by camera device.
* FLASH_REQUIRED | Camera device finished AE scan | CONVERGED | Converged after a new scan, transient states are skipped by camera device.
*
+ * @see ACAMERA_CONTROL_AE_AVAILABLE_MODES
* @see ACAMERA_CONTROL_AE_LOCK
* @see ACAMERA_CONTROL_AE_MODE
* @see ACAMERA_CONTROL_AE_PRECAPTURE_TRIGGER
+ * @see ACAMERA_CONTROL_AE_STATE
* @see ACAMERA_CONTROL_MODE
* @see ACAMERA_CONTROL_SCENE_MODE
*/
@@ -1619,13 +1633,13 @@ typedef enum acamera_metadata_tag {
* compared to previous regular requests. enableZsl does not affect requests with other
* capture intents.
* For example, when requests are submitted in the following order:
- * Request A: enableZsl is true
, ACAMERA_CONTROL_CAPTURE_INTENT is PREVIEW
- * Request B: enableZsl is true
, ACAMERA_CONTROL_CAPTURE_INTENT is STILL_CAPTURE
+ * Request A: enableZsl is ON, ACAMERA_CONTROL_CAPTURE_INTENT is PREVIEW
+ * Request B: enableZsl is ON, ACAMERA_CONTROL_CAPTURE_INTENT is STILL_CAPTURE
* The output images for request B may have contents captured before the output images for
* request A, and the result metadata for request B may be older than the result metadata for
* request A.
- * Note that when enableZsl is true
, it is not guaranteed to get output images captured in the
- * past for requests with STILL_CAPTURE capture intent.
+ * Note that when enableZsl is true
, it is not guaranteed to get output images captured in
+ * the past for requests with STILL_CAPTURE capture intent.
* For applications targeting SDK versions O and newer, the value of enableZsl in
* TEMPLATE_STILL_CAPTURE template may be true
. The value in other templates is always
* false
if present.
@@ -1638,6 +1652,26 @@ typedef enum acamera_metadata_tag {
*/
ACAMERA_CONTROL_ENABLE_ZSL = // byte (acamera_metadata_enum_android_control_enable_zsl_t)
ACAMERA_CONTROL_START + 41,
+ /**
+ * Whether a significant scene change is detected within the currently-set AF
+ * region(s).
+ *
+ * Type: byte (acamera_metadata_enum_android_control_af_scene_change_t)
+ *
+ * This tag may appear in:
+ *
+ * - ACameraMetadata from ACameraCaptureSession_captureCallback_result callbacks
+ *
+ *
+ * When the camera focus routine detects a change in the scene it is looking at,
+ * such as a large shift in camera viewpoint, significant motion in the scene, or a
+ * significant illumination change, this value will be set to DETECTED for a single capture
+ * result. Otherwise the value will be NOT_DETECTED. The threshold for detection is similar
+ * to what would trigger a new passive focus scan to begin in CONTINUOUS autofocus modes.
+ * This key will be available if the camera device advertises this key via {@link ACAMERA_REQUEST_AVAILABLE_RESULT_KEYS }.
+ */
+ ACAMERA_CONTROL_AF_SCENE_CHANGE = // byte (acamera_metadata_enum_android_control_af_scene_change_t)
+ ACAMERA_CONTROL_START + 42,
ACAMERA_CONTROL_END,
/**
@@ -1879,8 +1913,8 @@ typedef enum acamera_metadata_tag {
* the thumbnail data will also be rotated.
* Note that this orientation is relative to the orientation of the camera sensor, given
* by ACAMERA_SENSOR_ORIENTATION.
- * To translate from the device orientation given by the Android sensor APIs, the following
- * sample code may be used:
+ * To translate from the device orientation given by the Android sensor APIs for camera
+ * sensors which are not EXTERNAL, the following sample code may be used:
* private int getJpegOrientation(CameraCharacteristics c, int deviceOrientation) {
* if (deviceOrientation == android.view.OrientationEventListener.ORIENTATION_UNKNOWN) return 0;
* int sensorOrientation = c.get(CameraCharacteristics.SENSOR_ORIENTATION);
@@ -1899,6 +1933,8 @@ typedef enum acamera_metadata_tag {
* return jpegOrientation;
* }
*
+ * For EXTERNAL cameras the sensor orientation will always be set to 0 and the facing will
+ * also be set to EXTERNAL. The above code is not relevant in such case.
*
* @see ACAMERA_SENSOR_ORIENTATION
*/
@@ -1959,14 +1995,14 @@ typedef enum acamera_metadata_tag {
* When an ACAMERA_JPEG_ORIENTATION of non-zero degree is requested,
* the camera device will handle thumbnail rotation in one of the following ways:
*
- * - Set the
- * EXIF orientation flag
+ *
- Set the EXIF orientation flag
* and keep jpeg and thumbnail image data unrotated.
* - Rotate the jpeg and thumbnail image data and not set
- * EXIF orientation flag.
- * In this case, LIMITED or FULL hardware level devices will report rotated thumnail size
- * in capture result, so the width and height will be interchanged if 90 or 270 degree
- * orientation is requested. LEGACY device will always report unrotated thumbnail size.
+ * EXIF orientation flag. In this
+ * case, LIMITED or FULL hardware level devices will report rotated thumnail size in
+ * capture result, so the width and height will be interchanged if 90 or 270 degree
+ * orientation is requested. LEGACY device will always report unrotated thumbnail
+ * size.
*
*
* @see ACAMERA_JPEG_ORIENTATION
@@ -2216,37 +2252,33 @@ typedef enum acamera_metadata_tag {
*
*
* The position of the camera device's lens optical center,
- * as a three-dimensional vector (x,y,z)
, relative to the
- * optical center of the largest camera device facing in the
- * same direction as this camera, in the
- * Android sensor coordinate axes.
- * Note that only the axis definitions are shared with
- * the sensor coordinate system, but not the origin.
- * If this device is the largest or only camera device with a
- * given facing, then this position will be (0, 0, 0)
; a
- * camera device with a lens optical center located 3 cm from
- * the main sensor along the +X axis (to the right from the
- * user's perspective) will report (0.03, 0, 0)
.
- * To transform a pixel coordinates between two cameras
- * facing the same direction, first the source camera
- * ACAMERA_LENS_RADIAL_DISTORTION must be corrected for. Then
- * the source camera ACAMERA_LENS_INTRINSIC_CALIBRATION needs
- * to be applied, followed by the ACAMERA_LENS_POSE_ROTATION
- * of the source camera, the translation of the source camera
- * relative to the destination camera, the
- * ACAMERA_LENS_POSE_ROTATION of the destination camera, and
- * finally the inverse of ACAMERA_LENS_INTRINSIC_CALIBRATION
- * of the destination camera. This obtains a
- * radial-distortion-free coordinate in the destination
- * camera pixel coordinates.
- * To compare this against a real image from the destination
- * camera, the destination camera image then needs to be
- * corrected for radial distortion before comparison or
- * sampling.
- *
+ * as a three-dimensional vector (x,y,z)
.
+ * Prior to Android P, or when ACAMERA_LENS_POSE_REFERENCE is PRIMARY_CAMERA, this position
+ * is relative to the optical center of the largest camera device facing in the same
+ * direction as this camera, in the Android sensor
+ * coordinate axes. Note that only the axis definitions are shared with the sensor
+ * coordinate system, but not the origin.
+ * If this device is the largest or only camera device with a given facing, then this
+ * position will be (0, 0, 0)
; a camera device with a lens optical center located 3 cm
+ * from the main sensor along the +X axis (to the right from the user's perspective) will
+ * report (0.03, 0, 0)
.
+ * To transform a pixel coordinates between two cameras facing the same direction, first
+ * the source camera ACAMERA_LENS_DISTORTION must be corrected for. Then the source
+ * camera ACAMERA_LENS_INTRINSIC_CALIBRATION needs to be applied, followed by the
+ * ACAMERA_LENS_POSE_ROTATION of the source camera, the translation of the source camera
+ * relative to the destination camera, the ACAMERA_LENS_POSE_ROTATION of the destination
+ * camera, and finally the inverse of ACAMERA_LENS_INTRINSIC_CALIBRATION of the destination
+ * camera. This obtains a radial-distortion-free coordinate in the destination camera pixel
+ * coordinates.
+ * To compare this against a real image from the destination camera, the destination camera
+ * image then needs to be corrected for radial distortion before comparison or sampling.
+ * When ACAMERA_LENS_POSE_REFERENCE is GYROSCOPE, then this position is relative to
+ * the center of the primary gyroscope on the device.
+ *
+ * @see ACAMERA_LENS_DISTORTION
* @see ACAMERA_LENS_INTRINSIC_CALIBRATION
+ * @see ACAMERA_LENS_POSE_REFERENCE
* @see ACAMERA_LENS_POSE_ROTATION
- * @see ACAMERA_LENS_RADIAL_DISTORTION
*/
ACAMERA_LENS_POSE_TRANSLATION = // float[3]
ACAMERA_LENS_START + 7,
@@ -2356,7 +2388,7 @@ typedef enum acamera_metadata_tag {
* where (0,0)
is the top-left of the
* preCorrectionActiveArraySize rectangle. Once the pose and
* intrinsic calibration transforms have been applied to a
- * world point, then the ACAMERA_LENS_RADIAL_DISTORTION
+ * world point, then the ACAMERA_LENS_DISTORTION
* transform needs to be applied, and the result adjusted to
* be in the ACAMERA_SENSOR_INFO_ACTIVE_ARRAY_SIZE coordinate
* system (where (0, 0)
is the top-left of the
@@ -2364,19 +2396,42 @@ typedef enum acamera_metadata_tag {
* coordinate of the world point for processed (non-RAW)
* output buffers.
*
+ * @see ACAMERA_LENS_DISTORTION
* @see ACAMERA_LENS_POSE_ROTATION
* @see ACAMERA_LENS_POSE_TRANSLATION
- * @see ACAMERA_LENS_RADIAL_DISTORTION
* @see ACAMERA_SENSOR_INFO_ACTIVE_ARRAY_SIZE
* @see ACAMERA_SENSOR_INFO_PRE_CORRECTION_ACTIVE_ARRAY_SIZE
*/
ACAMERA_LENS_INTRINSIC_CALIBRATION = // float[5]
ACAMERA_LENS_START + 10,
+ ACAMERA_LENS_RADIAL_DISTORTION = // Deprecated! DO NOT USE
+ ACAMERA_LENS_START + 11,
+ /**
+ * The origin for ACAMERA_LENS_POSE_TRANSLATION.
+ *
+ * @see ACAMERA_LENS_POSE_TRANSLATION
+ *
+ * Type: byte (acamera_metadata_enum_android_lens_pose_reference_t)
+ *
+ * This tag may appear in:
+ *
+ * - ACameraMetadata from ACameraManager_getCameraCharacteristics
+ *
+ *
+ * Different calibration methods and use cases can produce better or worse results
+ * depending on the selected coordinate origin.
+ */
+ ACAMERA_LENS_POSE_REFERENCE = // byte (acamera_metadata_enum_android_lens_pose_reference_t)
+ ACAMERA_LENS_START + 12,
/**
* The correction coefficients to correct for this camera device's
* radial and tangential lens distortion.
+ * Replaces the deprecated ACAMERA_LENS_RADIAL_DISTORTION field, which was
+ * inconsistently defined.
*
- * Type: float[6]
+ * @see ACAMERA_LENS_RADIAL_DISTORTION
+ *
+ * Type: float[5]
*
* This tag may appear in:
*
@@ -2384,13 +2439,13 @@ typedef enum acamera_metadata_tag {
* - ACameraMetadata from ACameraCaptureSession_captureCallback_result callbacks
*
*
- * Four radial distortion coefficients [kappa_0, kappa_1, kappa_2,
+ * Three radial distortion coefficients [kappa_1, kappa_2,
* kappa_3]
and two tangential distortion coefficients
* [kappa_4, kappa_5]
that can be used to correct the
* lens's geometric distortion with the mapping equations:
- * x_c = x_i * ( kappa_0 + kappa_1 * r^2 + kappa_2 * r^4 + kappa_3 * r^6 ) +
+ * x_c = x_i * ( 1 + kappa_1 * r^2 + kappa_2 * r^4 + kappa_3 * r^6 ) +
* kappa_4 * (2 * x_i * y_i) + kappa_5 * ( r^2 + 2 * x_i^2 )
- * y_c = y_i * ( kappa_0 + kappa_1 * r^2 + kappa_2 * r^4 + kappa_3 * r^6 ) +
+ * y_c = y_i * ( 1 + kappa_1 * r^2 + kappa_2 * r^4 + kappa_3 * r^6 ) +
* kappa_5 * (2 * x_i * y_i) + kappa_4 * ( r^2 + 2 * y_i^2 )
*
* Here, [x_c, y_c]
are the coordinates to sample in the
@@ -2398,23 +2453,21 @@ typedef enum acamera_metadata_tag {
* corrected image at the coordinate [x_i, y_i]
:
* correctedImage(x_i, y_i) = sample_at(x_c, y_c, inputImage)
*
- * The pixel coordinates are defined in a normalized
- * coordinate system related to the
- * ACAMERA_LENS_INTRINSIC_CALIBRATION calibration fields.
- * Both [x_i, y_i]
and [x_c, y_c]
have (0,0)
at the
- * lens optical center [c_x, c_y]
. The maximum magnitudes
- * of both x and y coordinates are normalized to be 1 at the
- * edge further from the optical center, so the range
- * for both dimensions is -1 <= x <= 1
.
+ * The pixel coordinates are defined in a coordinate system
+ * related to the ACAMERA_LENS_INTRINSIC_CALIBRATION
+ * calibration fields; see that entry for details of the mapping stages.
+ * Both [x_i, y_i]
and [x_c, y_c]
+ * have (0,0)
at the lens optical center [c_x, c_y]
, and
+ * the range of the coordinates depends on the focal length
+ * terms of the intrinsic calibration.
* Finally, r
represents the radial distance from the
- * optical center, r^2 = x_i^2 + y_i^2
, and its magnitude
- * is therefore no larger than |r| <= sqrt(2)
.
+ * optical center, r^2 = x_i^2 + y_i^2
.
* The distortion model used is the Brown-Conrady model.
*
* @see ACAMERA_LENS_INTRINSIC_CALIBRATION
*/
- ACAMERA_LENS_RADIAL_DISTORTION = // float[6]
- ACAMERA_LENS_START + 11,
+ ACAMERA_LENS_DISTORTION = // float[5]
+ ACAMERA_LENS_START + 13,
ACAMERA_LENS_END,
/**
@@ -2662,11 +2715,12 @@ typedef enum acamera_metadata_tag {
* into the 3 stream types as below:
*
* - Processed (but stalling): any non-RAW format with a stallDurations > 0.
- * Typically {@link AIMAGE_FORMAT_JPEG} format.
- * - Raw formats: {@link AIMAGE_FORMAT_RAW16}, {@link AIMAGE_FORMAT_RAW10}, or
- * {@link AIMAGE_FORMAT_RAW12}.
- * - Processed (but not-stalling): any non-RAW format without a stall duration.
- * Typically {@link AIMAGE_FORMAT_YUV_420_888}.
+ * Typically {@link AIMAGE_FORMAT_JPEG JPEG format}.
+ * - Raw formats: {@link AIMAGE_FORMAT_RAW16 RAW_SENSOR}, {@link AIMAGE_FORMAT_RAW10 RAW10}, or
+ * {@link AIMAGE_FORMAT_RAW12 RAW12}.
+ * - Processed (but not-stalling): any non-RAW format without a stall duration. Typically
+ * {@link AIMAGE_FORMAT_YUV_420_888 YUV_420_888},
+ * NV21, or YV12.
*
*
* @see ACAMERA_SCALER_AVAILABLE_STREAM_CONFIGURATIONS
@@ -2787,7 +2841,7 @@ typedef enum acamera_metadata_tag {
ACAMERA_REQUEST_START + 12,
/**
* A list of all keys that the camera device has available
- * to use with {@link ACaptureRequest}.
+ * to use with {@link ACaptureRequest }.
*
* Type: int32[n]
*
@@ -2809,9 +2863,7 @@ typedef enum acamera_metadata_tag {
ACAMERA_REQUEST_AVAILABLE_REQUEST_KEYS = // int32[n]
ACAMERA_REQUEST_START + 13,
/**
- * A list of all keys that the camera device has available
- * to query with {@link ACameraMetadata} from
- * {@link ACameraCaptureSession_captureCallback_result}.
+ * A list of all keys that the camera device has available to use with {@link ACameraCaptureSession_captureCallback_result }.
*
* Type: int32[n]
*
@@ -2842,9 +2894,7 @@ typedef enum acamera_metadata_tag {
ACAMERA_REQUEST_AVAILABLE_RESULT_KEYS = // int32[n]
ACAMERA_REQUEST_START + 14,
/**
- * A list of all keys that the camera device has available
- * to query with {@link ACameraMetadata} from
- * {@link ACameraManager_getCameraCharacteristics}.
+ * A list of all keys that the camera device has available to use with {@link ACameraManager_getCameraCharacteristics }.
*
* Type: int32[n]
*
@@ -2862,6 +2912,59 @@ typedef enum acamera_metadata_tag {
*/
ACAMERA_REQUEST_AVAILABLE_CHARACTERISTICS_KEYS = // int32[n]
ACAMERA_REQUEST_START + 15,
+ /**
+ * A subset of the available request keys that the camera device
+ * can pass as part of the capture session initialization.
+ *
+ * Type: int32[n]
+ *
+ * This tag may appear in:
+ *
+ * - ACameraMetadata from ACameraManager_getCameraCharacteristics
+ *
+ *
+ * This is a subset of ACAMERA_REQUEST_AVAILABLE_REQUEST_KEYS which
+ * contains a list of keys that are difficult to apply per-frame and
+ * can result in unexpected delays when modified during the capture session
+ * lifetime. Typical examples include parameters that require a
+ * time-consuming hardware re-configuration or internal camera pipeline
+ * change. For performance reasons we advise clients to pass their initial
+ * values as part of
+ * {@link ACameraDevice_createCaptureSessionWithSessionParameters }.
+ * Once the camera capture session is enabled it is also recommended to avoid
+ * changing them from their initial values set in
+ * {@link ACameraDevice_createCaptureSessionWithSessionParameters }.
+ * Control over session parameters can still be exerted in capture requests
+ * but clients should be aware and expect delays during their application.
+ * An example usage scenario could look like this:
+ *
+ * - The camera client starts by quering the session parameter key list via
+ * {@link ACameraManager_getCameraCharacteristics }.
+ * - Before triggering the capture session create sequence, a capture request
+ * must be built via
+ * {@link ACameraDevice_createCaptureRequest }
+ * using an appropriate template matching the particular use case.
+ * - The client should go over the list of session parameters and check
+ * whether some of the keys listed matches with the parameters that
+ * they intend to modify as part of the first capture request.
+ * - If there is no such match, the capture request can be passed
+ * unmodified to
+ * {@link ACameraDevice_createCaptureSessionWithSessionParameters }.
+ * - If matches do exist, the client should update the respective values
+ * and pass the request to
+ * {@link ACameraDevice_createCaptureSessionWithSessionParameters }.
+ * - After the capture session initialization completes the session parameter
+ * key list can continue to serve as reference when posting or updating
+ * further requests. As mentioned above further changes to session
+ * parameters should ideally be avoided, if updates are necessary
+ * however clients could expect a delay/glitch during the
+ * parameter switch.
+ *
+ *
+ * @see ACAMERA_REQUEST_AVAILABLE_REQUEST_KEYS
+ */
+ ACAMERA_REQUEST_AVAILABLE_SESSION_KEYS = // int32[n]
+ ACAMERA_REQUEST_START + 16,
ACAMERA_REQUEST_END,
/**
@@ -2876,7 +2979,6 @@ typedef enum acamera_metadata_tag {
*
*
* This control can be used to implement digital zoom.
- * The data representation is int[4], which maps to (left, top, width, height).
* The crop region coordinate system is based off
* ACAMERA_SENSOR_INFO_ACTIVE_ARRAY_SIZE, with (0, 0)
being the
* top-left corner of the sensor active array.
@@ -2906,6 +3008,7 @@ typedef enum acamera_metadata_tag {
* for rounding and other hardware requirements; the final
* crop region used will be included in the output capture
* result.
+ * The data representation is int[4], which maps to (left, top, width, height).
*
* @see ACAMERA_SCALER_AVAILABLE_MAX_DIGITAL_ZOOM
* @see ACAMERA_SENSOR_INFO_ACTIVE_ARRAY_SIZE
@@ -3061,13 +3164,14 @@ typedef enum acamera_metadata_tag {
* ignored).
* The following formats may always have a stall duration:
*
- * - {@link AIMAGE_FORMAT_JPEG}
- * - {@link AIMAGE_FORMAT_RAW16}
+ * - {@link AIMAGE_FORMAT_JPEG }
+ * - {@link AIMAGE_FORMAT_RAW16 }
*
* The following formats will never have a stall duration:
*
- * - {@link AIMAGE_FORMAT_YUV_420_888}
- * - {@link AIMAGE_FORMAT_RAW10}
+ * - {@link AIMAGE_FORMAT_YUV_420_888 }
+ * - {@link AIMAGE_FORMAT_RAW10 }
+ * - {@link AIMAGE_FORMAT_RAW12 }
*
* All other formats may or may not have an allowed stall duration on
* a per-capability basis; refer to ACAMERA_REQUEST_AVAILABLE_CAPABILITIES
@@ -3177,39 +3281,29 @@ typedef enum acamera_metadata_tag {
* can run concurrently to the rest of the camera pipeline, but
* cannot process more than 1 capture at a time.
*
- *
The necessary information for the application, given the model above,
- * is provided via
- * {@link ACAMERA_SCALER_AVAILABLE_MIN_FRAME_DURATIONS}.
- * These are used to determine the maximum frame rate / minimum frame
- * duration that is possible for a given stream configuration.
+ * The necessary information for the application, given the model above, is provided via
+ * {@link ACAMERA_SCALER_AVAILABLE_MIN_FRAME_DURATIONS }.
+ * These are used to determine the maximum frame rate / minimum frame duration that is
+ * possible for a given stream configuration.
* Specifically, the application can use the following rules to
* determine the minimum frame duration it can request from the camera
* device:
*
- * - Let the set of currently configured input/output streams
- * be called
S
.
- * - Find the minimum frame durations for each stream in
S
, by looking
- * it up in {@link ACAMERA_SCALER_AVAILABLE_MIN_FRAME_DURATIONS}
- * (with its respective size/format). Let this set of frame durations be
- * called F
.
- * - For any given request
R
, the minimum frame duration allowed
- * for R
is the maximum out of all values in F
. Let the streams
- * used in R
be called S_r
.
+ * - Let the set of currently configured input/output streams be called
S
.
+ * - Find the minimum frame durations for each stream in
S
, by looking it up in {@link ACAMERA_SCALER_AVAILABLE_MIN_FRAME_DURATIONS }
+ * (with its respective size/format). Let this set of frame durations be called F
.
+ * - For any given request
R
, the minimum frame duration allowed for R
is the maximum
+ * out of all values in F
. Let the streams used in R
be called S_r
.
*
- * If none of the streams in S_r
have a stall time (listed in {@link
- * ACAMERA_SCALER_AVAILABLE_STALL_DURATIONS}
- * using its respective size/format), then the frame duration in F
- * determines the steady state frame rate that the application will get
- * if it uses R
as a repeating request. Let this special kind of
- * request be called Rsimple
.
- * A repeating request Rsimple
can be occasionally interleaved
- * by a single capture of a new request Rstall
(which has at least
- * one in-use stream with a non-0 stall time) and if Rstall
has the
- * same minimum frame duration this will not cause a frame rate loss
- * if all buffers from the previous Rstall
have already been
- * delivered.
- * For more details about stalling, see
- * {@link ACAMERA_SCALER_AVAILABLE_STALL_DURATIONS}.
+ * If none of the streams in S_r
have a stall time (listed in {@link ACAMERA_SCALER_AVAILABLE_STALL_DURATIONS }
+ * using its respective size/format), then the frame duration in F
determines the steady
+ * state frame rate that the application will get if it uses R
as a repeating request. Let
+ * this special kind of request be called Rsimple
.
+ * A repeating request Rsimple
can be occasionally interleaved by a single capture of a
+ * new request Rstall
(which has at least one in-use stream with a non-0 stall time) and if
+ * Rstall
has the same minimum frame duration this will not cause a frame rate loss if all
+ * buffers from the previous Rstall
have already been delivered.
+ * For more details about stalling, see {@link ACAMERA_SCALER_AVAILABLE_STALL_DURATIONS }.
* This control is only effective if ACAMERA_CONTROL_AE_MODE or ACAMERA_CONTROL_MODE is set to
* OFF; otherwise the auto-exposure algorithm will override this value.
*
@@ -3567,14 +3661,12 @@ typedef enum acamera_metadata_tag {
* timestamps for other captures from the same camera device, but are
* not guaranteed to be comparable to any other time source.
* When ACAMERA_SENSOR_INFO_TIMESTAMP_SOURCE ==
REALTIME, the
- * timestamps measure time in the same timebase as
- * elapsedRealtimeNanos
- * (or CLOCK_BOOTTIME), and they can
+ * timestamps measure time in the same timebase as SystemClock#elapsedRealtimeNanos, and they can
* be compared to other timestamps from other subsystems that
* are using that base.
* For reprocessing, the timestamp will match the start of exposure of
- * the input image, i.e. {@link CaptureResult#SENSOR_TIMESTAMP the
- * timestamp} in the TotalCaptureResult that was used to create the
+ * the input image, i.e. the
+ * timestamp in the TotalCaptureResult that was used to create the
* reprocess capture request.
*
* @see ACAMERA_SENSOR_INFO_TIMESTAMP_SOURCE
@@ -3775,7 +3867,6 @@ typedef enum acamera_metadata_tag {
* optically shielded pixel areas. By blocking light, these pixels
* provides a reliable black reference for black level compensation
* in active array region.
- * The data representation is int[4], which maps to (left, top, width, height).
* This key provides a list of disjoint rectangles specifying the
* regions of optically shielded (with metal shield) black pixel
* regions if the camera device is capable of reading out these black
@@ -3785,6 +3876,7 @@ typedef enum acamera_metadata_tag {
* black level of each captured raw images.
* When this key is reported, the ACAMERA_SENSOR_DYNAMIC_BLACK_LEVEL and
* ACAMERA_SENSOR_DYNAMIC_WHITE_LEVEL will also be reported.
+ * The data representation is int[4]
, which maps to (left, top, width, height)
.
*
* @see ACAMERA_SENSOR_BLACK_LEVEL_PATTERN
* @see ACAMERA_SENSOR_DYNAMIC_BLACK_LEVEL
@@ -3825,9 +3917,8 @@ typedef enum acamera_metadata_tag {
* layout key (see ACAMERA_SENSOR_INFO_COLOR_FILTER_ARRANGEMENT), i.e. the
* nth value given corresponds to the black level offset for the nth
* color channel listed in the CFA.
- * This key will be available if ACAMERA_SENSOR_OPTICAL_BLACK_REGIONS is
- * available or the camera device advertises this key via
- * {@link ACAMERA_REQUEST_AVAILABLE_RESULT_KEYS}.
+ * This key will be available if ACAMERA_SENSOR_OPTICAL_BLACK_REGIONS is available or the
+ * camera device advertises this key via {@link ACAMERA_REQUEST_AVAILABLE_RESULT_KEYS }.
*
* @see ACAMERA_SENSOR_BLACK_LEVEL_PATTERN
* @see ACAMERA_SENSOR_INFO_COLOR_FILTER_ARRANGEMENT
@@ -3853,7 +3944,7 @@ typedef enum acamera_metadata_tag {
* estimated white level for each frame.
* This key will be available if ACAMERA_SENSOR_OPTICAL_BLACK_REGIONS is
* available or the camera device advertises this key via
- * {@link ACAMERA_REQUEST_AVAILABLE_RESULT_KEYS}.
+ * {@link ACAMERA_REQUEST_AVAILABLE_RESULT_KEYS }.
*
* @see ACAMERA_SENSOR_BLACK_LEVEL_PATTERN
* @see ACAMERA_SENSOR_INFO_WHITE_LEVEL
@@ -3882,13 +3973,13 @@ typedef enum acamera_metadata_tag {
* This rectangle is defined relative to the full pixel array; (0,0) is the top-left of
* the full pixel array, and the size of the full pixel array is given by
* ACAMERA_SENSOR_INFO_PIXEL_ARRAY_SIZE.
- * The data representation is int[4], which maps to (left, top, width, height).
* The coordinate system for most other keys that list pixel coordinates, including
* ACAMERA_SCALER_CROP_REGION, is defined relative to the active array rectangle given in
* this field, with (0, 0)
being the top-left of this rectangle.
* The active array may be smaller than the full pixel array, since the full array may
* include black calibration pixels or other inactive regions, and geometric correction
* resulting in scaling or cropping may have been applied.
+ * The data representation is int[4]
, which maps to (left, top, width, height)
.
*
* @see ACAMERA_SCALER_CROP_REGION
* @see ACAMERA_SENSOR_INFO_PIXEL_ARRAY_SIZE
@@ -3960,8 +4051,7 @@ typedef enum acamera_metadata_tag {
* Attempting to use frame durations beyond the maximum will result in the frame
* duration being clipped to the maximum. See that control for a full definition of frame
* durations.
- * Refer to {@link
- * ACAMERA_SCALER_AVAILABLE_MIN_FRAME_DURATIONS}
+ *
Refer to {@link ACAMERA_SCALER_AVAILABLE_MIN_FRAME_DURATIONS }
* for the minimum frame duration values.
*/
ACAMERA_SENSOR_INFO_MAX_FRAME_DURATION = // int64
@@ -4000,9 +4090,9 @@ typedef enum acamera_metadata_tag {
* the raw buffers produced by this sensor.
* If a camera device supports raw sensor formats, either this or
* ACAMERA_SENSOR_INFO_PRE_CORRECTION_ACTIVE_ARRAY_SIZE is the maximum dimensions for the raw
- * output formats listed in ACAMERA_SCALER_AVAILABLE_STREAM_CONFIGURATIONS (this depends on
- * whether or not the image sensor returns buffers containing pixels that are not
- * part of the active array region for blacklevel calibration or other purposes).
+ * output formats listed in {@link ACAMERA_SCALER_AVAILABLE_STREAM_CONFIGURATIONS }
+ * (this depends on whether or not the image sensor returns buffers containing pixels that
+ * are not part of the active array region for blacklevel calibration or other purposes).
* Some parts of the full pixel array may not receive light from the scene,
* or be otherwise inactive. The ACAMERA_SENSOR_INFO_PRE_CORRECTION_ACTIVE_ARRAY_SIZE key
* defines the rectangle of active pixels that will be included in processed image
@@ -4092,7 +4182,6 @@ typedef enum acamera_metadata_tag {
*
ACameraMetadata from ACameraManager_getCameraCharacteristics
*
*
- * The data representation is int[4], which maps to (left, top, width, height).
* This is the rectangle representing the size of the active region of the sensor (i.e.
* the region that actually receives light from the scene) before any geometric correction
* has been applied, and should be treated as the active region rectangle for any of the
@@ -4133,7 +4222,7 @@ typedef enum acamera_metadata_tag {
* ACAMERA_SENSOR_INFO_ACTIVE_ARRAY_SIZE.
* The currently supported fields that correct for geometric distortion are:
*
- * - ACAMERA_LENS_RADIAL_DISTORTION.
+ * - ACAMERA_LENS_DISTORTION.
*
* If all of the geometric distortion fields are no-ops, this rectangle will be the same
* as the post-distortion-corrected rectangle given in
@@ -4143,8 +4232,9 @@ typedef enum acamera_metadata_tag {
* ACAMERA_SENSOR_INFO_PIXEL_ARRAY_SIZE.
* The pre-correction active array may be smaller than the full pixel array, since the
* full array may include black calibration pixels or other inactive regions.
+ * The data representation is int[4]
, which maps to (left, top, width, height)
.
*
- * @see ACAMERA_LENS_RADIAL_DISTORTION
+ * @see ACAMERA_LENS_DISTORTION
* @see ACAMERA_SENSOR_INFO_ACTIVE_ARRAY_SIZE
* @see ACAMERA_SENSOR_INFO_PIXEL_ARRAY_SIZE
* @see ACAMERA_SENSOR_INFO_PRE_CORRECTION_ACTIVE_ARRAY_SIZE
@@ -4302,10 +4392,10 @@ typedef enum acamera_metadata_tag {
* ACameraMetadata from ACameraCaptureSession_captureCallback_result callbacks
*
*
- * The data representation is int[4], which maps to (left, top, width, height).
* The coordinate system is that of ACAMERA_SENSOR_INFO_ACTIVE_ARRAY_SIZE, with
* (0, 0)
being the top-left pixel of the active array.
- * Only available if ACAMERA_STATISTICS_FACE_DETECT_MODE != OFF
+ * Only available if ACAMERA_STATISTICS_FACE_DETECT_MODE != OFF
+ * The data representation is int[4]
, which maps to (left, top, width, height)
.
*
* @see ACAMERA_SENSOR_INFO_ACTIVE_ARRAY_SIZE
* @see ACAMERA_STATISTICS_FACE_DETECT_MODE
@@ -4483,6 +4573,80 @@ typedef enum acamera_metadata_tag {
*/
ACAMERA_STATISTICS_LENS_SHADING_MAP_MODE = // byte (acamera_metadata_enum_android_statistics_lens_shading_map_mode_t)
ACAMERA_STATISTICS_START + 16,
+ /**
+ * A control for selecting whether OIS position information is included in output
+ * result metadata.
+ *
+ * Type: byte (acamera_metadata_enum_android_statistics_ois_data_mode_t)
+ *
+ * This tag may appear in:
+ *
+ * - ACameraMetadata from ACameraCaptureSession_captureCallback_result callbacks
+ * - ACaptureRequest
+ *
+ *
+ */
+ ACAMERA_STATISTICS_OIS_DATA_MODE = // byte (acamera_metadata_enum_android_statistics_ois_data_mode_t)
+ ACAMERA_STATISTICS_START + 17,
+ /**
+ * An array of timestamps of OIS samples, in nanoseconds.
+ *
+ * Type: int64[n]
+ *
+ * This tag may appear in:
+ *
+ * - ACameraMetadata from ACameraCaptureSession_captureCallback_result callbacks
+ *
+ *
+ * The array contains the timestamps of OIS samples. The timestamps are in the same
+ * timebase as and comparable to ACAMERA_SENSOR_TIMESTAMP.
+ *
+ * @see ACAMERA_SENSOR_TIMESTAMP
+ */
+ ACAMERA_STATISTICS_OIS_TIMESTAMPS = // int64[n]
+ ACAMERA_STATISTICS_START + 18,
+ /**
+ * An array of shifts of OIS samples, in x direction.
+ *
+ * Type: float[n]
+ *
+ * This tag may appear in:
+ *
+ * - ACameraMetadata from ACameraCaptureSession_captureCallback_result callbacks
+ *
+ *
+ * The array contains the amount of shifts in x direction, in pixels, based on OIS samples.
+ * A positive value is a shift from left to right in active array coordinate system. For
+ * example, if the optical center is (1000, 500) in active array coordinates, a shift of
+ * (3, 0) puts the new optical center at (1003, 500).
+ * The number of shifts must match the number of timestamps in
+ * ACAMERA_STATISTICS_OIS_TIMESTAMPS.
+ *
+ * @see ACAMERA_STATISTICS_OIS_TIMESTAMPS
+ */
+ ACAMERA_STATISTICS_OIS_X_SHIFTS = // float[n]
+ ACAMERA_STATISTICS_START + 19,
+ /**
+ * An array of shifts of OIS samples, in y direction.
+ *
+ * Type: float[n]
+ *
+ * This tag may appear in:
+ *
+ * - ACameraMetadata from ACameraCaptureSession_captureCallback_result callbacks
+ *
+ *
+ * The array contains the amount of shifts in y direction, in pixels, based on OIS samples.
+ * A positive value is a shift from top to bottom in active array coordinate system. For
+ * example, if the optical center is (1000, 500) in active array coordinates, a shift of
+ * (0, 5) puts the new optical center at (1000, 505).
+ * The number of shifts must match the number of timestamps in
+ * ACAMERA_STATISTICS_OIS_TIMESTAMPS.
+ *
+ * @see ACAMERA_STATISTICS_OIS_TIMESTAMPS
+ */
+ ACAMERA_STATISTICS_OIS_Y_SHIFTS = // float[n]
+ ACAMERA_STATISTICS_START + 20,
ACAMERA_STATISTICS_END,
/**
@@ -4555,6 +4719,24 @@ typedef enum acamera_metadata_tag {
*/
ACAMERA_STATISTICS_INFO_AVAILABLE_LENS_SHADING_MAP_MODES = // byte[n]
ACAMERA_STATISTICS_INFO_START + 7,
+ /**
+ * List of OIS data output modes for ACAMERA_STATISTICS_OIS_DATA_MODE that
+ * are supported by this camera device.
+ *
+ * @see ACAMERA_STATISTICS_OIS_DATA_MODE
+ *
+ * Type: byte[n]
+ *
+ * This tag may appear in:
+ *
+ * - ACameraMetadata from ACameraManager_getCameraCharacteristics
+ *
+ *
+ * If no OIS data output is available for this camera device, this key will
+ * contain only OFF.
+ */
+ ACAMERA_STATISTICS_INFO_AVAILABLE_OIS_DATA_MODES = // byte[n]
+ ACAMERA_STATISTICS_INFO_START + 8,
ACAMERA_STATISTICS_INFO_END,
/**
@@ -4627,6 +4809,8 @@ typedef enum acamera_metadata_tag {
* of points can be less than max (that is, the request doesn't have to
* always provide a curve with number of points equivalent to
* ACAMERA_TONEMAP_MAX_CURVE_POINTS).
+ * For devices with MONOCHROME capability, only red channel is used. Green and blue channels
+ * are ignored.
* A few examples, and their corresponding graphical mappings; these
* only specify the red channel and the precision is limited to 4
* digits, for conciseness.
@@ -4832,7 +5016,7 @@ typedef enum acamera_metadata_tag {
* See the individual level enums for full descriptions of the supported capabilities. The
* ACAMERA_REQUEST_AVAILABLE_CAPABILITIES entry describes the device's capabilities at a
* finer-grain level, if needed. In addition, many controls have their available settings or
- * ranges defined in individual metadata tag entries in this document.
+ * ranges defined in individual entries from {@link ACameraManager_getCameraCharacteristics }.
* Some features are not part of any particular hardware level or capability and must be
* queried separately. These include:
*
@@ -4853,6 +5037,23 @@ typedef enum acamera_metadata_tag {
*/
ACAMERA_INFO_SUPPORTED_HARDWARE_LEVEL = // byte (acamera_metadata_enum_android_info_supported_hardware_level_t)
ACAMERA_INFO_START,
+ /**
+ * A short string for manufacturer version information about the camera device, such as
+ * ISP hardware, sensors, etc.
+ *
+ * Type: byte
+ *
+ * This tag may appear in:
+ *
+ * - ACameraMetadata from ACameraManager_getCameraCharacteristics
+ *
+ *
+ * This can be used in TAG_IMAGE_DESCRIPTION
+ * in jpeg EXIF. This key may be absent if no version information is available on the
+ * device.
+ */
+ ACAMERA_INFO_VERSION = // byte
+ ACAMERA_INFO_START + 1,
ACAMERA_INFO_END,
/**
@@ -5069,6 +5270,86 @@ typedef enum acamera_metadata_tag {
ACAMERA_DEPTH_START + 4,
ACAMERA_DEPTH_END,
+ /**
+ * The accuracy of frame timestamp synchronization between physical cameras
+ *
+ * Type: byte (acamera_metadata_enum_android_logical_multi_camera_sensor_sync_type_t)
+ *
+ * This tag may appear in:
+ *
+ * - ACameraMetadata from ACameraManager_getCameraCharacteristics
+ *
+ *
+ * The accuracy of the frame timestamp synchronization determines the physical cameras'
+ * ability to start exposure at the same time. If the sensorSyncType is CALIBRATED,
+ * the physical camera sensors usually run in master-slave mode so that their shutter
+ * time is synchronized. For APPROXIMATE sensorSyncType, the camera sensors usually run in
+ * master-master mode, and there could be offset between their start of exposure.
+ * In both cases, all images generated for a particular capture request still carry the same
+ * timestamps, so that they can be used to look up the matching frame number and
+ * onCaptureStarted callback.
+ */
+ ACAMERA_LOGICAL_MULTI_CAMERA_SENSOR_SYNC_TYPE = // byte (acamera_metadata_enum_android_logical_multi_camera_sensor_sync_type_t)
+ ACAMERA_LOGICAL_MULTI_CAMERA_START + 1,
+ ACAMERA_LOGICAL_MULTI_CAMERA_END,
+
+ /**
+ * Mode of operation for the lens distortion correction block.
+ *
+ * Type: byte (acamera_metadata_enum_android_distortion_correction_mode_t)
+ *
+ * This tag may appear in:
+ *
+ * - ACameraMetadata from ACameraCaptureSession_captureCallback_result callbacks
+ * - ACaptureRequest
+ *
+ *
+ * The lens distortion correction block attempts to improve image quality by fixing
+ * radial, tangential, or other geometric aberrations in the camera device's optics. If
+ * available, the ACAMERA_LENS_DISTORTION field documents the lens's distortion parameters.
+ * OFF means no distortion correction is done.
+ * FAST/HIGH_QUALITY both mean camera device determined distortion correction will be
+ * applied. HIGH_QUALITY mode indicates that the camera device will use the highest-quality
+ * correction algorithms, even if it slows down capture rate. FAST means the camera device
+ * will not slow down capture rate when applying correction. FAST may be the same as OFF if
+ * any correction at all would slow down capture rate. Every output stream will have a
+ * similar amount of enhancement applied.
+ * The correction only applies to processed outputs such as YUV, JPEG, or DEPTH16; it is not
+ * applied to any RAW output. Metadata coordinates such as face rectangles or metering
+ * regions are also not affected by correction.
+ * Applications enabling distortion correction need to pay extra attention when converting
+ * image coordinates between corrected output buffers and the sensor array. For example, if
+ * the app supports tap-to-focus and enables correction, it then has to apply the distortion
+ * model described in ACAMERA_LENS_DISTORTION to the image buffer tap coordinates to properly
+ * calculate the tap position on the sensor active array to be used with
+ * ACAMERA_CONTROL_AF_REGIONS. The same applies in reverse to detected face rectangles if
+ * they need to be drawn on top of the corrected output buffers.
+ *
+ * @see ACAMERA_CONTROL_AF_REGIONS
+ * @see ACAMERA_LENS_DISTORTION
+ */
+ ACAMERA_DISTORTION_CORRECTION_MODE = // byte (acamera_metadata_enum_android_distortion_correction_mode_t)
+ ACAMERA_DISTORTION_CORRECTION_START,
+ /**
+ * List of distortion correction modes for ACAMERA_DISTORTION_CORRECTION_MODE that are
+ * supported by this camera device.
+ *
+ * @see ACAMERA_DISTORTION_CORRECTION_MODE
+ *
+ * Type: byte[n]
+ *
+ * This tag may appear in:
+ *
+ * - ACameraMetadata from ACameraManager_getCameraCharacteristics
+ *
+ *
+ * No device is required to support this API; such devices will always list only 'OFF'.
+ * All devices that support this API will list both FAST and HIGH_QUALITY.
+ */
+ ACAMERA_DISTORTION_CORRECTION_AVAILABLE_MODES = // byte[n]
+ ACAMERA_DISTORTION_CORRECTION_START + 1,
+ ACAMERA_DISTORTION_CORRECTION_END,
+
} acamera_metadata_tag_t;
/**
@@ -5282,6 +5563,21 @@ typedef enum acamera_metadata_enum_acamera_control_ae_mode {
*/
ACAMERA_CONTROL_AE_MODE_ON_AUTO_FLASH_REDEYE = 4,
+ /**
+ * An external flash has been turned on.
+ * It informs the camera device that an external flash has been turned on, and that
+ * metering (and continuous focus if active) should be quickly recaculated to account
+ * for the external flash. Otherwise, this mode acts like ON.
+ * When the external flash is turned off, AE mode should be changed to one of the
+ * other available AE modes.
+ * If the camera device supports AE external flash mode, ACAMERA_CONTROL_AE_STATE must
+ * be FLASH_REQUIRED after the camera device finishes AE scan and it's too dark without
+ * flash.
+ *
+ * @see ACAMERA_CONTROL_AE_STATE
+ */
+ ACAMERA_CONTROL_AE_MODE_ON_EXTERNAL_FLASH = 5,
+
} acamera_metadata_enum_android_control_ae_mode_t;
// ACAMERA_CONTROL_AE_PRECAPTURE_TRIGGER
@@ -5645,6 +5941,15 @@ typedef enum acamera_metadata_enum_acamera_control_capture_intent {
*/
ACAMERA_CONTROL_CAPTURE_INTENT_MANUAL = 6,
+ /**
+ * This request is for a motion tracking use case, where
+ * the application will use camera and inertial sensor data to
+ * locate and track objects in the world.
+ * The camera device auto-exposure routine will limit the exposure time
+ * of the camera to no more than 20 milliseconds, to minimize motion blur.
+ */
+ ACAMERA_CONTROL_CAPTURE_INTENT_MOTION_TRACKING = 7,
+
} acamera_metadata_enum_android_control_capture_intent_t;
// ACAMERA_CONTROL_EFFECT_MODE
@@ -6135,6 +6440,20 @@ typedef enum acamera_metadata_enum_acamera_control_enable_zsl {
} acamera_metadata_enum_android_control_enable_zsl_t;
+// ACAMERA_CONTROL_AF_SCENE_CHANGE
+typedef enum acamera_metadata_enum_acamera_control_af_scene_change {
+ /**
+ * Scene change is not detected within the AF region(s).
+ */
+ ACAMERA_CONTROL_AF_SCENE_CHANGE_NOT_DETECTED = 0,
+
+ /**
+ * Scene change is detected within the AF region(s).
+ */
+ ACAMERA_CONTROL_AF_SCENE_CHANGE_DETECTED = 1,
+
+} acamera_metadata_enum_android_control_af_scene_change_t;
+
// ACAMERA_EDGE_MODE
@@ -6157,13 +6476,13 @@ typedef enum acamera_metadata_enum_acamera_edge_mode {
ACAMERA_EDGE_MODE_HIGH_QUALITY = 2,
/**
- * Edge enhancement is applied at different levels for different output streams,
- * based on resolution. Streams at maximum recording resolution (see {@link
- * ACameraDevice_createCaptureSession}) or below have
- * edge enhancement applied, while higher-resolution streams have no edge enhancement
- * applied. The level of edge enhancement for low-resolution streams is tuned so that
- * frame rate is not impacted, and the quality is equal to or better than FAST (since it
- * is only applied to lower-resolution outputs, quality may improve from FAST).
+ * Edge enhancement is applied at different
+ * levels for different output streams, based on resolution. Streams at maximum recording
+ * resolution (see {@link ACameraDevice_createCaptureSession })
+ * or below have edge enhancement applied, while higher-resolution streams have no edge
+ * enhancement applied. The level of edge enhancement for low-resolution streams is tuned
+ * so that frame rate is not impacted, and the quality is equal to or better than FAST
+ * (since it is only applied to lower-resolution outputs, quality may improve from FAST).
* This mode is intended to be used by applications operating in a zero-shutter-lag mode
* with YUV or PRIVATE reprocessing, where the application continuously captures
* high-resolution intermediate buffers into a circular buffer, from which a final image is
@@ -6342,6 +6661,27 @@ typedef enum acamera_metadata_enum_acamera_lens_state {
} acamera_metadata_enum_android_lens_state_t;
+// ACAMERA_LENS_POSE_REFERENCE
+typedef enum acamera_metadata_enum_acamera_lens_pose_reference {
+ /**
+ *
The value of ACAMERA_LENS_POSE_TRANSLATION is relative to the optical center of
+ * the largest camera device facing the same direction as this camera.
+ * This is the default value for API levels before Android P.
+ *
+ * @see ACAMERA_LENS_POSE_TRANSLATION
+ */
+ ACAMERA_LENS_POSE_REFERENCE_PRIMARY_CAMERA = 0,
+
+ /**
+ * The value of ACAMERA_LENS_POSE_TRANSLATION is relative to the position of the
+ * primary gyroscope of this Android device.
+ *
+ * @see ACAMERA_LENS_POSE_TRANSLATION
+ */
+ ACAMERA_LENS_POSE_REFERENCE_GYROSCOPE = 1,
+
+} acamera_metadata_enum_android_lens_pose_reference_t;
+
// ACAMERA_LENS_INFO_FOCUS_DISTANCE_CALIBRATION
typedef enum acamera_metadata_enum_acamera_lens_info_focus_distance_calibration {
@@ -6412,13 +6752,12 @@ typedef enum acamera_metadata_enum_acamera_noise_reduction_mode {
/**
* Noise reduction is applied at different levels for different output streams,
- * based on resolution. Streams at maximum recording resolution (see {@link
- * ACameraDevice_createCaptureSession}) or below have noise
- * reduction applied, while higher-resolution streams have MINIMAL (if supported) or no
- * noise reduction applied (if MINIMAL is not supported.) The degree of noise reduction
- * for low-resolution streams is tuned so that frame rate is not impacted, and the quality
- * is equal to or better than FAST (since it is only applied to lower-resolution outputs,
- * quality may improve from FAST).
+ * based on resolution. Streams at maximum recording resolution (see {@link ACameraDevice_createCaptureSession })
+ * or below have noise reduction applied, while higher-resolution streams have MINIMAL (if
+ * supported) or no noise reduction applied (if MINIMAL is not supported.) The degree of
+ * noise reduction for low-resolution streams is tuned so that frame rate is not impacted,
+ * and the quality is equal to or better than FAST (since it is only applied to
+ * lower-resolution outputs, quality may improve from FAST).
* This mode is intended to be used by applications operating in a zero-shutter-lag mode
* with YUV or PRIVATE reprocessing, where the application continuously captures
* high-resolution intermediate buffers into a circular buffer, from which a final image is
@@ -6635,18 +6974,16 @@ typedef enum acamera_metadata_enum_acamera_request_available_capabilities {
* to FAST. Additionally, maximum-resolution images can be captured at >= 10 frames
* per second. Here, 'high resolution' means at least 8 megapixels, or the maximum
* resolution of the device, whichever is smaller.
- * More specifically, this means that at least one output {@link
- * AIMAGE_FORMAT_YUV_420_888} size listed in
- * {@link ACAMERA_SCALER_AVAILABLE_STREAM_CONFIGURATIONS} is larger or equal to the
- * 'high resolution' defined above, and can be captured at at least 20 fps.
- * For the largest {@link AIMAGE_FORMAT_YUV_420_888} size listed in
- * {@link ACAMERA_SCALER_AVAILABLE_STREAM_CONFIGURATIONS}, camera device can capture this
- * size for at least 10 frames per second.
- * Also the ACAMERA_CONTROL_AE_AVAILABLE_TARGET_FPS_RANGES entry lists at least one FPS range
- * where the minimum FPS is >= 1 / minimumFrameDuration for the largest YUV_420_888 size.
- * If the device supports the {@link AIMAGE_FORMAT_RAW10}, {@link
- * AIMAGE_FORMAT_RAW12}, then those can also be captured at the same rate
- * as the maximum-size YUV_420_888 resolution is.
+ * More specifically, this means that at least one output {@link AIMAGE_FORMAT_YUV_420_888 } size listed in
+ * {@link ACAMERA_SCALER_AVAILABLE_STREAM_CONFIGURATIONS }
+ * is larger or equal to the 'high resolution' defined above, and can be captured at at
+ * least 20 fps. For the largest {@link AIMAGE_FORMAT_YUV_420_888 } size listed in
+ * {@link ACAMERA_SCALER_AVAILABLE_STREAM_CONFIGURATIONS },
+ * camera device can capture this size for at least 10 frames per second. Also the
+ * ACAMERA_CONTROL_AE_AVAILABLE_TARGET_FPS_RANGES entry lists at least one FPS range where
+ * the minimum FPS is >= 1 / minimumFrameDuration for the largest YUV_420_888 size.
+ * If the device supports the {@link AIMAGE_FORMAT_RAW10 }, {@link AIMAGE_FORMAT_RAW12 }, then those can also be
+ * captured at the same rate as the maximum-size YUV_420_888 resolution is.
* In addition, the ACAMERA_SYNC_MAX_LATENCY field is guaranted to have a value between 0
* and 4, inclusive. ACAMERA_CONTROL_AE_LOCK_AVAILABLE and ACAMERA_CONTROL_AWB_LOCK_AVAILABLE
* are also guaranteed to be true
so burst capture with these two locks ON yields
@@ -6663,42 +7000,114 @@ typedef enum acamera_metadata_enum_acamera_request_available_capabilities {
*
The camera device can produce depth measurements from its field of view.
* This capability requires the camera device to support the following:
*
- * - {@link AIMAGE_FORMAT_DEPTH16} is supported as an output format.
- * - {@link AIMAGE_FORMAT_DEPTH_POINT_CLOUD} is optionally supported as an
- * output format.
- * - This camera device, and all camera devices with the same ACAMERA_LENS_FACING,
- * will list the following calibration entries in {@link ACameraMetadata} from both
- * {@link ACameraManager_getCameraCharacteristics} and
- * {@link ACameraCaptureSession_captureCallback_result}:
+ * - {@link AIMAGE_FORMAT_DEPTH16 } is supported as
+ * an output format.
+ * - {@link AIMAGE_FORMAT_DEPTH_POINT_CLOUD } is
+ * optionally supported as an output format.
+ * - This camera device, and all camera devices with the same ACAMERA_LENS_FACING, will
+ * list the following calibration metadata entries in both {@link ACameraManager_getCameraCharacteristics }
+ * and {@link ACameraCaptureSession_captureCallback_result }:
* - ACAMERA_LENS_POSE_TRANSLATION
* - ACAMERA_LENS_POSE_ROTATION
* - ACAMERA_LENS_INTRINSIC_CALIBRATION
- * - ACAMERA_LENS_RADIAL_DISTORTION
+ * - ACAMERA_LENS_DISTORTION
*
*
* - The ACAMERA_DEPTH_DEPTH_IS_EXCLUSIVE entry is listed by this device.
+ * - As of Android P, the ACAMERA_LENS_POSE_REFERENCE entry is listed by this device.
* - A LIMITED camera with only the DEPTH_OUTPUT capability does not have to support
* normal YUV_420_888, JPEG, and PRIV-format outputs. It only has to support the DEPTH16
* format.
*
* Generally, depth output operates at a slower frame rate than standard color capture,
* so the DEPTH16 and DEPTH_POINT_CLOUD formats will commonly have a stall duration that
- * should be accounted for (see
- * {@link ACAMERA_DEPTH_AVAILABLE_DEPTH_STALL_DURATIONS}).
+ * should be accounted for (see {@link ACAMERA_DEPTH_AVAILABLE_DEPTH_STALL_DURATIONS }).
* On a device that supports both depth and color-based output, to enable smooth preview,
* using a repeating burst is recommended, where a depth-output target is only included
* once every N frames, where N is the ratio between preview output rate and depth output
* rate, including depth stall time.
*
* @see ACAMERA_DEPTH_DEPTH_IS_EXCLUSIVE
+ * @see ACAMERA_LENS_DISTORTION
* @see ACAMERA_LENS_FACING
* @see ACAMERA_LENS_INTRINSIC_CALIBRATION
+ * @see ACAMERA_LENS_POSE_REFERENCE
* @see ACAMERA_LENS_POSE_ROTATION
* @see ACAMERA_LENS_POSE_TRANSLATION
- * @see ACAMERA_LENS_RADIAL_DISTORTION
*/
ACAMERA_REQUEST_AVAILABLE_CAPABILITIES_DEPTH_OUTPUT = 8,
+ /**
+ * The camera device supports the MOTION_TRACKING value for
+ * ACAMERA_CONTROL_CAPTURE_INTENT, which limits maximum exposure time to 20 ms.
+ * This limits the motion blur of capture images, resulting in better image tracking
+ * results for use cases such as image stabilization or augmented reality.
+ *
+ * @see ACAMERA_CONTROL_CAPTURE_INTENT
+ */
+ ACAMERA_REQUEST_AVAILABLE_CAPABILITIES_MOTION_TRACKING = 10,
+
+ /**
+ * The camera device is a logical camera backed by two or more physical cameras that are
+ * also exposed to the application.
+ * Camera application shouldn't assume that there are at most 1 rear camera and 1 front
+ * camera in the system. For an application that switches between front and back cameras,
+ * the recommendation is to switch between the first rear camera and the first front
+ * camera in the list of supported camera devices.
+ * This capability requires the camera device to support the following:
+ *
+ * - This camera device must list the following static metadata entries in CameraCharacteristics:
+ * - android.logicalMultiCamera.physicalIds
+ * - ACAMERA_LOGICAL_MULTI_CAMERA_SENSOR_SYNC_TYPE
+ *
+ *
+ * - The underlying physical cameras' static metadata must list the following entries,
+ * so that the application can correlate pixels from the physical streams:
+ * - ACAMERA_LENS_POSE_REFERENCE
+ * - ACAMERA_LENS_POSE_ROTATION
+ * - ACAMERA_LENS_POSE_TRANSLATION
+ * - ACAMERA_LENS_INTRINSIC_CALIBRATION
+ * - ACAMERA_LENS_DISTORTION
+ *
+ *
+ * - The SENSOR_INFO_TIMESTAMP_SOURCE of the logical device and physical devices must be
+ * the same.
+ * - The logical camera device must be LIMITED or higher device.
+ *
+ * Both the logical camera device and its underlying physical devices support the
+ * mandatory stream combinations required for their device levels.
+ * Additionally, for each guaranteed stream combination, the logical camera supports:
+ *
+ * - For each guaranteed stream combination, the logical camera supports replacing one
+ * logical {@link AIMAGE_FORMAT_YUV_420_888 YUV_420_888}
+ * or raw stream with two physical streams of the same size and format, each from a
+ * separate physical camera, given that the size and format are supported by both
+ * physical cameras.
+ * - If the logical camera doesn't advertise RAW capability, but the underlying physical
+ * cameras do, the logical camera will support guaranteed stream combinations for RAW
+ * capability, except that the RAW streams will be physical streams, each from a separate
+ * physical camera. This is usually the case when the physical cameras have different
+ * sensor sizes.
+ *
+ * Using physical streams in place of a logical stream of the same size and format will
+ * not slow down the frame rate of the capture, as long as the minimum frame duration
+ * of the physical and logical streams are the same.
+ *
+ * @see ACAMERA_LENS_DISTORTION
+ * @see ACAMERA_LENS_INTRINSIC_CALIBRATION
+ * @see ACAMERA_LENS_POSE_REFERENCE
+ * @see ACAMERA_LENS_POSE_ROTATION
+ * @see ACAMERA_LENS_POSE_TRANSLATION
+ * @see ACAMERA_LOGICAL_MULTI_CAMERA_SENSOR_SYNC_TYPE
+ */
+ ACAMERA_REQUEST_AVAILABLE_CAPABILITIES_LOGICAL_MULTI_CAMERA = 11,
+
+ /**
+ * The camera device is a monochrome camera that doesn't contain a color filter array,
+ * and the pixel values on U and V planes are all 128.
+ */
+ ACAMERA_REQUEST_AVAILABLE_CAPABILITIES_MONOCHROME = 12,
+
} acamera_metadata_enum_android_request_available_capabilities_t;
@@ -6918,8 +7327,8 @@ typedef enum acamera_metadata_enum_acamera_sensor_info_timestamp_source {
/**
* Timestamps from ACAMERA_SENSOR_TIMESTAMP are in the same timebase as
- * elapsedRealtimeNanos
- * (or CLOCK_BOOTTIME), and they can be compared to other timestamps using that base.
+ * SystemClock#elapsedRealtimeNanos,
+ * and they can be compared to other timestamps using that base.
*
* @see ACAMERA_SENSOR_TIMESTAMP
*/
@@ -7030,6 +7439,26 @@ typedef enum acamera_metadata_enum_acamera_statistics_lens_shading_map_mode {
} acamera_metadata_enum_android_statistics_lens_shading_map_mode_t;
+// ACAMERA_STATISTICS_OIS_DATA_MODE
+typedef enum acamera_metadata_enum_acamera_statistics_ois_data_mode {
+ /**
+ * Do not include OIS data in the capture result.
+ */
+ ACAMERA_STATISTICS_OIS_DATA_MODE_OFF = 0,
+
+ /**
+ * Include OIS data in the capture result.
+ * ACAMERA_STATISTICS_OIS_TIMESTAMPS, ACAMERA_STATISTICS_OIS_X_SHIFTS,
+ * and ACAMERA_STATISTICS_OIS_Y_SHIFTS provide OIS data in the output result metadata.
+ *
+ * @see ACAMERA_STATISTICS_OIS_TIMESTAMPS
+ * @see ACAMERA_STATISTICS_OIS_X_SHIFTS
+ * @see ACAMERA_STATISTICS_OIS_Y_SHIFTS
+ */
+ ACAMERA_STATISTICS_OIS_DATA_MODE_ON = 1,
+
+} acamera_metadata_enum_android_statistics_ois_data_mode_t;
+
// ACAMERA_TONEMAP_MODE
@@ -7104,7 +7533,7 @@ typedef enum acamera_metadata_enum_acamera_info_supported_hardware_level {
* This camera device does not have enough capabilities to qualify as a FULL
device or
* better.
* Only the stream configurations listed in the LEGACY
and LIMITED
tables in the
- * {@link ACameraDevice_createCaptureSession} documentation are guaranteed to be supported.
+ * {@link ACameraDevice_createCaptureSession createCaptureSession} documentation are guaranteed to be supported.
* All LIMITED
devices support the BACKWARDS_COMPATIBLE
capability, indicating basic
* support for color image capture. The only exception is that the device may
* alternatively support only the DEPTH_OUTPUT
capability, if it can only output depth
@@ -7130,7 +7559,7 @@ typedef enum acamera_metadata_enum_acamera_info_supported_hardware_level {
/**
*
This camera device is capable of supporting advanced imaging applications.
* The stream configurations listed in the FULL
, LEGACY
and LIMITED
tables in the
- * {@link ACameraDevice_createCaptureSession} documentation are guaranteed to be supported.
+ * {@link ACameraDevice_createCaptureSession createCaptureSession} documentation are guaranteed to be supported.
* A FULL
device will support below capabilities:
*
* BURST_CAPTURE
capability (ACAMERA_REQUEST_AVAILABLE_CAPABILITIES contains
@@ -7157,8 +7586,7 @@ typedef enum acamera_metadata_enum_acamera_info_supported_hardware_level {
/**
* This camera device is running in backward compatibility mode.
- * Only the stream configurations listed in the LEGACY
table in the {@link
- * ACameraDevice_createCaptureSession} documentation are supported.
+ * Only the stream configurations listed in the LEGACY
table in the {@link ACameraDevice_createCaptureSession createCaptureSession} documentation are supported.
* A LEGACY
device does not support per-frame control, manual sensor control, manual
* post-processing, arbitrary cropping regions, and has relaxed performance constraints.
* No additional capabilities beyond BACKWARD_COMPATIBLE
will ever be listed by a
@@ -7179,9 +7607,7 @@ typedef enum acamera_metadata_enum_acamera_info_supported_hardware_level {
*
This camera device is capable of YUV reprocessing and RAW data capture, in addition to
* FULL-level capabilities.
* The stream configurations listed in the LEVEL_3
, RAW
, FULL
, LEGACY
and
- * LIMITED
tables in the {@link
- * ACameraDevice_createCaptureSession}
- * documentation are guaranteed to be supported.
+ * LIMITED
tables in the {@link ACameraDevice_createCaptureSession createCaptureSession} documentation are guaranteed to be supported.
* The following additional capabilities are guaranteed to be supported:
*
* YUV_REPROCESSING
capability (ACAMERA_REQUEST_AVAILABLE_CAPABILITIES contains
@@ -7194,6 +7620,37 @@ typedef enum acamera_metadata_enum_acamera_info_supported_hardware_level {
*/
ACAMERA_INFO_SUPPORTED_HARDWARE_LEVEL_3 = 3,
+ /**
+ * This camera device is backed by an external camera connected to this Android device.
+ * The device has capability identical to a LIMITED level device, with the following
+ * exceptions:
+ *
+ * - The device may not report lens/sensor related information such as
+ * - ACAMERA_LENS_FOCAL_LENGTH
+ * - ACAMERA_LENS_INFO_HYPERFOCAL_DISTANCE
+ * - ACAMERA_SENSOR_INFO_PHYSICAL_SIZE
+ * - ACAMERA_SENSOR_INFO_WHITE_LEVEL
+ * - ACAMERA_SENSOR_BLACK_LEVEL_PATTERN
+ * - ACAMERA_SENSOR_INFO_COLOR_FILTER_ARRANGEMENT
+ * - ACAMERA_SENSOR_ROLLING_SHUTTER_SKEW
+ *
+ *
+ * - The device will report 0 for ACAMERA_SENSOR_ORIENTATION
+ * - The device has less guarantee on stable framerate, as the framerate partly depends
+ * on the external camera being used.
+ *
+ *
+ * @see ACAMERA_LENS_FOCAL_LENGTH
+ * @see ACAMERA_LENS_INFO_HYPERFOCAL_DISTANCE
+ * @see ACAMERA_SENSOR_BLACK_LEVEL_PATTERN
+ * @see ACAMERA_SENSOR_INFO_COLOR_FILTER_ARRANGEMENT
+ * @see ACAMERA_SENSOR_INFO_PHYSICAL_SIZE
+ * @see ACAMERA_SENSOR_INFO_WHITE_LEVEL
+ * @see ACAMERA_SENSOR_ORIENTATION
+ * @see ACAMERA_SENSOR_ROLLING_SHUTTER_SKEW
+ */
+ ACAMERA_INFO_SUPPORTED_HARDWARE_LEVEL_EXTERNAL = 4,
+
} acamera_metadata_enum_android_info_supported_hardware_level_t;
@@ -7281,6 +7738,48 @@ typedef enum acamera_metadata_enum_acamera_depth_depth_is_exclusive {
} acamera_metadata_enum_android_depth_depth_is_exclusive_t;
+// ACAMERA_LOGICAL_MULTI_CAMERA_SENSOR_SYNC_TYPE
+typedef enum acamera_metadata_enum_acamera_logical_multi_camera_sensor_sync_type {
+ /**
+ * A software mechanism is used to synchronize between the physical cameras. As a result,
+ * the timestamp of an image from a physical stream is only an approximation of the
+ * image sensor start-of-exposure time.
+ */
+ ACAMERA_LOGICAL_MULTI_CAMERA_SENSOR_SYNC_TYPE_APPROXIMATE = 0,
+
+ /**
+ * The camera device supports frame timestamp synchronization at the hardware level,
+ * and the timestamp of a physical stream image accurately reflects its
+ * start-of-exposure time.
+ */
+ ACAMERA_LOGICAL_MULTI_CAMERA_SENSOR_SYNC_TYPE_CALIBRATED = 1,
+
+} acamera_metadata_enum_android_logical_multi_camera_sensor_sync_type_t;
+
+
+// ACAMERA_DISTORTION_CORRECTION_MODE
+typedef enum acamera_metadata_enum_acamera_distortion_correction_mode {
+ /**
+ * No distortion correction is applied.
+ */
+ ACAMERA_DISTORTION_CORRECTION_MODE_OFF = 0,
+
+ /**
+ * Lens distortion correction is applied without reducing frame rate
+ * relative to sensor output. It may be the same as OFF if distortion correction would
+ * reduce frame rate relative to sensor.
+ */
+ ACAMERA_DISTORTION_CORRECTION_MODE_FAST = 1,
+
+ /**
+ * High-quality distortion correction is applied, at the cost of
+ * possibly reduced frame rate relative to sensor output.
+ */
+ ACAMERA_DISTORTION_CORRECTION_MODE_HIGH_QUALITY = 2,
+
+} acamera_metadata_enum_android_distortion_correction_mode_t;
+
+
#endif /* __ANDROID_API__ >= 24 */
__END_DECLS
diff --git a/camera/ndk/include/camera/NdkCaptureRequest.h b/camera/ndk/include/camera/NdkCaptureRequest.h
index c62ba2c45bed0850f4d9514055e846e536544f49..4961ce39f66b76ad18cbd53ce186ee50de7a82f4 100644
--- a/camera/ndk/include/camera/NdkCaptureRequest.h
+++ b/camera/ndk/include/camera/NdkCaptureRequest.h
@@ -305,6 +305,58 @@ void ACaptureRequest_free(ACaptureRequest* request);
#endif /* __ANDROID_API__ >= 24 */
+#if __ANDROID_API__ >= 28
+
+/**
+ * Associate an arbitrary user context pointer to the {@link ACaptureRequest}
+ *
+ * This method is useful for user to identify the capture request in capture session callbacks.
+ * The context is NULL for newly created request.
+ * {@link ACameraOutputTarget_free} will not free the context. Also calling this method twice
+ * will not cause the previous context be freed.
+ * Also note that calling this method after the request has been sent to capture session will not
+ * change the context pointer in the capture callbacks.
+ *
+ * @param request the {@link ACaptureRequest} of interest.
+ * @param context the user context pointer to be associated with this capture request.
+ *
+ * @return
+ * - {@link ACAMERA_OK} if the method call succeeds.
+ * - {@link ACAMERA_ERROR_INVALID_PARAMETER} if request is NULL.
+ */
+camera_status_t ACaptureRequest_setUserContext(
+ ACaptureRequest* request, void* context);
+
+/**
+ * Get the user context pointer of the {@link ACaptureRequest}
+ *
+ * This method is useful for user to identify the capture request in capture session callbacks.
+ * The context is NULL for newly created request.
+ *
+ * @param request the {@link ACaptureRequest} of interest.
+ * @param context the user context pointer of this capture request.
+ *
+ * @return
+ * - {@link ACAMERA_OK} if the method call succeeds.
+ * - {@link ACAMERA_ERROR_INVALID_PARAMETER} if request is NULL.
+ */
+camera_status_t ACaptureRequest_getUserContext(
+ const ACaptureRequest* request, /*out*/void** context);
+
+/**
+ * Create a copy of input {@link ACaptureRequest}.
+ *
+ * The returned ACaptureRequest must be freed by the application by {@link ACaptureRequest_free}
+ * after application is done using it.
+ *
+ * @param src the input {@link ACaptureRequest} to be copied.
+ *
+ * @return a valid ACaptureRequest pointer or NULL if the input request cannot be copied.
+ */
+ACaptureRequest* ACaptureRequest_copy(const ACaptureRequest* src);
+
+#endif /* __ANDROID_API__ >= 28 */
+
__END_DECLS
#endif /* _NDK_CAPTURE_REQUEST_H */
diff --git a/camera/ndk/libcamera2ndk.map.txt b/camera/ndk/libcamera2ndk.map.txt
index 41bb22b93fc7cd50629b3c30cdcccea639af9398..d179aa01f04c6a2b5b6d34d5c08623b844904822 100644
--- a/camera/ndk/libcamera2ndk.map.txt
+++ b/camera/ndk/libcamera2ndk.map.txt
@@ -6,9 +6,11 @@ LIBCAMERA2NDK {
ACameraCaptureSession_getDevice;
ACameraCaptureSession_setRepeatingRequest;
ACameraCaptureSession_stopRepeating;
+ ACameraCaptureSession_updateSharedOutput;
ACameraDevice_close;
ACameraDevice_createCaptureRequest;
ACameraDevice_createCaptureSession;
+ ACameraDevice_createCaptureSessionWithSessionParameters;
ACameraDevice_getId;
ACameraManager_create;
ACameraManager_delete;
@@ -25,9 +27,11 @@ LIBCAMERA2NDK {
ACameraOutputTarget_create;
ACameraOutputTarget_free;
ACaptureRequest_addTarget;
+ ACaptureRequest_copy;
ACaptureRequest_free;
ACaptureRequest_getAllTags;
ACaptureRequest_getConstEntry;
+ ACaptureRequest_getUserContext;
ACaptureRequest_removeTarget;
ACaptureRequest_setEntry_double;
ACaptureRequest_setEntry_float;
@@ -35,11 +39,15 @@ LIBCAMERA2NDK {
ACaptureRequest_setEntry_i64;
ACaptureRequest_setEntry_rational;
ACaptureRequest_setEntry_u8;
+ ACaptureRequest_setUserContext;
ACaptureSessionOutputContainer_add;
ACaptureSessionOutputContainer_create;
ACaptureSessionOutputContainer_free;
ACaptureSessionOutputContainer_remove;
ACaptureSessionOutput_create;
+ ACaptureSessionSharedOutput_create;
+ ACaptureSessionSharedOutput_add;
+ ACaptureSessionSharedOutput_remove;
ACaptureSessionOutput_free;
local:
*;
diff --git a/camera/tests/CameraBinderTests.cpp b/camera/tests/CameraBinderTests.cpp
index 51d92140f36f8289ad256b6e61d145ce3e98c998..1de701381cb863cc2a8b887aef7d99545bacebf0 100644
--- a/camera/tests/CameraBinderTests.cpp
+++ b/camera/tests/CameraBinderTests.cpp
@@ -198,9 +198,11 @@ public:
virtual binder::Status onResultReceived(const CameraMetadata& metadata,
- const CaptureResultExtras& resultExtras) {
+ const CaptureResultExtras& resultExtras,
+ const std::vector& physicalResultInfos) {
(void) metadata;
(void) resultExtras;
+ (void) physicalResultInfos;
Mutex::Autolock l(mLock);
mLastStatus = SENT_RESULT;
mStatusesHit.push_back(mLastStatus);
@@ -317,6 +319,9 @@ TEST(CameraServiceBinderTest, CheckBinderCameraService) {
EXPECT_TRUE(res.isOk()) << res;
EXPECT_EQ(numCameras, static_cast(statuses.size()));
+ for (const auto &it : statuses) {
+ listener->onStatusChanged(it.status, String16(it.cameraId));
+ }
for (int32_t i = 0; i < numCameras; i++) {
String16 cameraId = String16(String8::format("%d", i));
@@ -421,6 +426,9 @@ protected:
serviceListener = new TestCameraServiceListener();
std::vector statuses;
service->addListener(serviceListener, &statuses);
+ for (const auto &it : statuses) {
+ serviceListener->onStatusChanged(it.status, String16(it.cameraId));
+ }
service->getNumberOfCameras(hardware::ICameraService::CAMERA_TYPE_BACKWARD_COMPATIBLE,
&numCameras);
}
@@ -439,8 +447,9 @@ TEST_F(CameraClientBinderTest, CheckBinderCameraDeviceUser) {
ASSERT_NOT_NULL(service);
EXPECT_TRUE(serviceListener->waitForNumCameras(numCameras));
for (int32_t i = 0; i < numCameras; i++) {
+ String8 cameraId8 = String8::format("%d", i);
// Make sure we're available, or skip device tests otherwise
- String16 cameraId(String8::format("%d",i));
+ String16 cameraId(cameraId8);
int32_t s = serviceListener->getStatus(cameraId);
EXPECT_EQ(hardware::ICameraServiceListener::STATUS_PRESENT, s);
if (s != hardware::ICameraServiceListener::STATUS_PRESENT) {
@@ -476,7 +485,8 @@ TEST_F(CameraClientBinderTest, CheckBinderCameraDeviceUser) {
res = device->createStream(output, &streamId);
EXPECT_TRUE(res.isOk()) << res;
EXPECT_LE(0, streamId);
- res = device->endConfigure(/*isConstrainedHighSpeed*/ false);
+ CameraMetadata sessionParams;
+ res = device->endConfigure(/*isConstrainedHighSpeed*/ false, sessionParams);
EXPECT_TRUE(res.isOk()) << res;
EXPECT_FALSE(callbacks->hadError());
@@ -487,7 +497,7 @@ TEST_F(CameraClientBinderTest, CheckBinderCameraDeviceUser) {
EXPECT_TRUE(res.isOk()) << res;
hardware::camera2::CaptureRequest request;
- request.mMetadata = requestTemplate;
+ request.mPhysicalCameraSettings.push_back({cameraId8.string(), requestTemplate});
request.mSurfaceList.add(surface);
request.mIsReprocess = false;
int64_t lastFrameNumber = 0;
@@ -514,7 +524,7 @@ TEST_F(CameraClientBinderTest, CheckBinderCameraDeviceUser) {
/*out*/&requestTemplate);
EXPECT_TRUE(res.isOk()) << res;
hardware::camera2::CaptureRequest request2;
- request2.mMetadata = requestTemplate;
+ request2.mPhysicalCameraSettings.push_back({cameraId8.string(), requestTemplate});
request2.mSurfaceList.add(surface);
request2.mIsReprocess = false;
callbacks->clearStatus();
@@ -547,10 +557,10 @@ TEST_F(CameraClientBinderTest, CheckBinderCameraDeviceUser) {
EXPECT_TRUE(res.isOk()) << res;
android::hardware::camera2::CaptureRequest request3;
android::hardware::camera2::CaptureRequest request4;
- request3.mMetadata = requestTemplate;
+ request3.mPhysicalCameraSettings.push_back({cameraId8.string(), requestTemplate});
request3.mSurfaceList.add(surface);
request3.mIsReprocess = false;
- request4.mMetadata = requestTemplate2;
+ request4.mPhysicalCameraSettings.push_back({cameraId8.string(), requestTemplate2});
request4.mSurfaceList.add(surface);
request4.mIsReprocess = false;
std::vector requestList;
@@ -574,7 +584,7 @@ TEST_F(CameraClientBinderTest, CheckBinderCameraDeviceUser) {
EXPECT_TRUE(res.isOk()) << res;
res = device->deleteStream(streamId);
EXPECT_TRUE(res.isOk()) << res;
- res = device->endConfigure(/*isConstrainedHighSpeed*/ false);
+ res = device->endConfigure(/*isConstrainedHighSpeed*/ false, sessionParams);
EXPECT_TRUE(res.isOk()) << res;
sleep(/*second*/1); // allow some time for errors to show up, if any
@@ -584,3 +594,62 @@ TEST_F(CameraClientBinderTest, CheckBinderCameraDeviceUser) {
}
};
+
+TEST_F(CameraClientBinderTest, CheckBinderCaptureRequest) {
+ sp requestOriginal, requestParceled;
+ sp gbProducer;
+ sp gbConsumer;
+ BufferQueue::createBufferQueue(&gbProducer, &gbConsumer);
+ sp surface(new Surface(gbProducer, /*controlledByApp*/false));
+ Vector> surfaceList;
+ surfaceList.push_back(surface);
+ std::string physicalDeviceId1 = "0";
+ std::string physicalDeviceId2 = "1";
+ CameraMetadata physicalDeviceSettings1, physicalDeviceSettings2;
+ uint8_t intent1 = ANDROID_CONTROL_CAPTURE_INTENT_PREVIEW;
+ uint8_t intent2 = ANDROID_CONTROL_CAPTURE_INTENT_VIDEO_RECORD;
+ EXPECT_EQ(OK, physicalDeviceSettings1.update(ANDROID_CONTROL_CAPTURE_INTENT, &intent1, 1));
+ EXPECT_EQ(OK, physicalDeviceSettings2.update(ANDROID_CONTROL_CAPTURE_INTENT, &intent2, 1));
+
+ requestParceled = new CaptureRequest();
+ Parcel p;
+ EXPECT_TRUE(requestParceled->readFromParcel(&p) != OK);
+ p.writeInt32(0);
+ p.setDataPosition(0);
+ EXPECT_TRUE(requestParceled->readFromParcel(&p) != OK);
+ p.freeData();
+ p.writeInt32(-1);
+ p.setDataPosition(0);
+ EXPECT_TRUE(requestParceled->readFromParcel(&p) != OK);
+ p.freeData();
+ p.writeInt32(1);
+ p.setDataPosition(0);
+ EXPECT_TRUE(requestParceled->readFromParcel(&p) != OK);
+
+ requestOriginal = new CaptureRequest();
+ requestOriginal->mPhysicalCameraSettings.push_back({physicalDeviceId1,
+ physicalDeviceSettings1});
+ requestOriginal->mPhysicalCameraSettings.push_back({physicalDeviceId2,
+ physicalDeviceSettings2});
+ requestOriginal->mSurfaceList.push_back(surface);
+ requestOriginal->mIsReprocess = false;
+ requestOriginal->mSurfaceConverted = false;
+
+ p.freeData();
+ EXPECT_TRUE(requestOriginal->writeToParcel(&p) == OK);
+ p.setDataPosition(0);
+ EXPECT_TRUE(requestParceled->readFromParcel(&p) == OK);
+ EXPECT_EQ(requestParceled->mIsReprocess, false);
+ EXPECT_FALSE(requestParceled->mSurfaceList.empty());
+ EXPECT_EQ(2u, requestParceled->mPhysicalCameraSettings.size());
+ auto it = requestParceled->mPhysicalCameraSettings.begin();
+ EXPECT_EQ(physicalDeviceId1, it->id);
+ EXPECT_TRUE(it->settings.exists(ANDROID_CONTROL_CAPTURE_INTENT));
+ auto entry = it->settings.find(ANDROID_CONTROL_CAPTURE_INTENT);
+ EXPECT_EQ(entry.data.u8[0], intent1);
+ it++;
+ EXPECT_EQ(physicalDeviceId2, it->id);
+ EXPECT_TRUE(it->settings.exists(ANDROID_CONTROL_CAPTURE_INTENT));
+ entry = it->settings.find(ANDROID_CONTROL_CAPTURE_INTENT);
+ EXPECT_EQ(entry.data.u8[0], intent2);
+};
diff --git a/camera/tests/CameraZSLTests.cpp b/camera/tests/CameraZSLTests.cpp
index ecca3546a20d9fc09bb7857afdb7b6407fa066bc..02c6e2aed204570aab8c05a8261c53b03dc04196 100644
--- a/camera/tests/CameraZSLTests.cpp
+++ b/camera/tests/CameraZSLTests.cpp
@@ -256,10 +256,10 @@ TEST_F(CameraZSLTests, TestAllPictureSizes) {
ASSERT_TRUE(nullptr != surfaceControl.get());
ASSERT_TRUE(surfaceControl->isValid());
- SurfaceComposerClient::openGlobalTransaction();
- ASSERT_EQ(NO_ERROR, surfaceControl->setLayer(0x7fffffff));
- ASSERT_EQ(NO_ERROR, surfaceControl->show());
- SurfaceComposerClient::closeGlobalTransaction();
+ SurfaceComposerClient::Transaction{}
+ .setLayer(surfaceControl, 0x7fffffff)
+ .show(surfaceControl)
+ .apply();
previewSurface = surfaceControl->getSurface();
ASSERT_TRUE(previewSurface != NULL);
diff --git a/camera/tests/VendorTagDescriptorTests.cpp b/camera/tests/VendorTagDescriptorTests.cpp
index 75cfb73dd1801edab6f60aae63392a1d56f00a69..0ee358d759df2f971ed15f46e4976376e986bc2f 100644
--- a/camera/tests/VendorTagDescriptorTests.cpp
+++ b/camera/tests/VendorTagDescriptorTests.cpp
@@ -142,6 +142,7 @@ TEST(VendorTagDescriptorTest, ConsistentAcrossParcel) {
EXPECT_EQ(OK, vDescOriginal->writeToParcel(&p));
p.setDataPosition(0);
+ vDescParceled = new VendorTagDescriptor();
ASSERT_EQ(OK, vDescParceled->readFromParcel(&p));
// Ensure consistent tag count
diff --git a/cmds/screenrecord/Android.mk b/cmds/screenrecord/Android.mk
index 7aa684a30a55eae43d581504f239043b45c616a2..5e83ed669113f0affcf611b9ad97d4452f906587 100644
--- a/cmds/screenrecord/Android.mk
+++ b/cmds/screenrecord/Android.mk
@@ -25,8 +25,8 @@ LOCAL_SRC_FILES := \
Program.cpp
LOCAL_SHARED_LIBRARIES := \
- libstagefright libmedia libutils libbinder libstagefright_foundation \
- libjpeg libgui libcutils liblog libEGL libGLESv2
+ libstagefright libmedia libmedia_omx libutils libbinder libstagefright_foundation \
+ libjpeg libui libgui libcutils liblog libEGL libGLESv2
LOCAL_C_INCLUDES := \
frameworks/av/media/libstagefright \
diff --git a/cmds/screenrecord/screenrecord.cpp b/cmds/screenrecord/screenrecord.cpp
index bc32bbe788148e94bf04f6e17f12c9a941feaf52..46035159eeeb2e3b5623f0d65af595faf99c5b53 100644
--- a/cmds/screenrecord/screenrecord.cpp
+++ b/cmds/screenrecord/screenrecord.cpp
@@ -50,6 +50,7 @@
#include
#include
#include
+#include
#include
#include
@@ -70,9 +71,11 @@ static const char* kMimeTypeAvc = "video/avc";
static bool gVerbose = false; // chatty on stdout
static bool gRotate = false; // rotate 90 degrees
static bool gMonotonicTime = false; // use system monotonic time for timestamps
+static bool gPersistentSurface = false; // use persistent surface
static enum {
FORMAT_MP4, FORMAT_H264, FORMAT_FRAMES, FORMAT_RAW_FRAMES
} gOutputFormat = FORMAT_MP4; // data format for output
+static AString gCodecName = ""; // codec name override
static bool gSizeSpecified = false; // was size explicitly requested?
static bool gWantInfoScreen = false; // do we want initial info screen?
static bool gWantFrameTime = false; // do we want times on each frame?
@@ -132,6 +135,7 @@ static status_t configureSignals() {
strerror(errno));
return err;
}
+ signal(SIGPIPE, SIG_IGN);
return NO_ERROR;
}
@@ -154,6 +158,7 @@ static status_t prepareEncoder(float displayFps, sp* pCodec,
if (gVerbose) {
printf("Configuring recorder for %dx%d %s at %.2fMbps\n",
gVideoWidth, gVideoHeight, kMimeTypeAvc, gBitRate / 1000000.0);
+ fflush(stdout);
}
sp format = new AMessage;
@@ -169,11 +174,21 @@ static status_t prepareEncoder(float displayFps, sp* pCodec,
looper->setName("screenrecord_looper");
looper->start();
ALOGV("Creating codec");
- sp codec = MediaCodec::CreateByType(looper, kMimeTypeAvc, true);
- if (codec == NULL) {
- fprintf(stderr, "ERROR: unable to create %s codec instance\n",
- kMimeTypeAvc);
- return UNKNOWN_ERROR;
+ sp codec;
+ if (gCodecName.empty()) {
+ codec = MediaCodec::CreateByType(looper, kMimeTypeAvc, true);
+ if (codec == NULL) {
+ fprintf(stderr, "ERROR: unable to create %s codec instance\n",
+ kMimeTypeAvc);
+ return UNKNOWN_ERROR;
+ }
+ } else {
+ codec = MediaCodec::CreateByComponentName(looper, gCodecName);
+ if (codec == NULL) {
+ fprintf(stderr, "ERROR: unable to create %s codec instance\n",
+ gCodecName.c_str());
+ return UNKNOWN_ERROR;
+ }
}
err = codec->configure(format, NULL, NULL,
@@ -187,10 +202,18 @@ static status_t prepareEncoder(float displayFps, sp* pCodec,
ALOGV("Creating encoder input surface");
sp bufferProducer;
- err = codec->createInputSurface(&bufferProducer);
+ if (gPersistentSurface) {
+ sp surface = MediaCodec::CreatePersistentInputSurface();
+ bufferProducer = surface->getBufferProducer();
+ err = codec->setInputSurface(surface);
+ } else {
+ err = codec->createInputSurface(&bufferProducer);
+ }
if (err != NO_ERROR) {
fprintf(stderr,
- "ERROR: unable to create encoder input surface (err=%d)\n", err);
+ "ERROR: unable to %s encoder input surface (err=%d)\n",
+ gPersistentSurface ? "set" : "create",
+ err);
codec->release();
return err;
}
@@ -213,7 +236,9 @@ static status_t prepareEncoder(float displayFps, sp* pCodec,
* Sets the display projection, based on the display dimensions, video size,
* and device orientation.
*/
-static status_t setDisplayProjection(const sp& dpy,
+static status_t setDisplayProjection(
+ SurfaceComposerClient::Transaction& t,
+ const sp& dpy,
const DisplayInfo& mainDpyInfo) {
// Set the region of the layer stack we're interested in, which in our
@@ -273,13 +298,15 @@ static status_t setDisplayProjection(const sp& dpy,
if (gRotate) {
printf("Rotated content area is %ux%u at offset x=%d y=%d\n",
outHeight, outWidth, offY, offX);
+ fflush(stdout);
} else {
printf("Content area is %ux%u at offset x=%d y=%d\n",
outWidth, outHeight, offX, offY);
+ fflush(stdout);
}
}
- SurfaceComposerClient::setDisplayProjection(dpy,
+ t.setDisplayProjection(dpy,
gRotate ? DISPLAY_ORIENTATION_90 : DISPLAY_ORIENTATION_0,
layerStackRect, displayRect);
return NO_ERROR;
@@ -295,11 +322,11 @@ static status_t prepareVirtualDisplay(const DisplayInfo& mainDpyInfo,
sp dpy = SurfaceComposerClient::createDisplay(
String8("ScreenRecorder"), false /*secure*/);
- SurfaceComposerClient::openGlobalTransaction();
- SurfaceComposerClient::setDisplaySurface(dpy, bufferProducer);
- setDisplayProjection(dpy, mainDpyInfo);
- SurfaceComposerClient::setDisplayLayerStack(dpy, 0); // default stack
- SurfaceComposerClient::closeGlobalTransaction();
+ SurfaceComposerClient::Transaction t;
+ t.setDisplaySurface(dpy, bufferProducer);
+ setDisplayProjection(t, dpy, mainDpyInfo);
+ t.setDisplayLayerStack(dpy, 0); // default stack
+ t.apply();
*pDisplayHandle = dpy;
@@ -344,6 +371,7 @@ static status_t runEncoder(const sp& encoder,
if (systemTime(CLOCK_MONOTONIC) > endWhenNsec) {
if (gVerbose) {
printf("Time limit reached\n");
+ fflush(stdout);
}
break;
}
@@ -379,9 +407,9 @@ static status_t runEncoder(const sp& encoder,
ALOGW("getDisplayInfo(main) failed: %d", err);
} else if (orientation != mainDpyInfo.orientation) {
ALOGD("orientation changed, now %d", mainDpyInfo.orientation);
- SurfaceComposerClient::openGlobalTransaction();
- setDisplayProjection(virtualDpy, mainDpyInfo);
- SurfaceComposerClient::closeGlobalTransaction();
+ SurfaceComposerClient::Transaction t;
+ setDisplayProjection(t, virtualDpy, mainDpyInfo);
+ t.apply();
orientation = mainDpyInfo.orientation;
}
}
@@ -481,6 +509,7 @@ static status_t runEncoder(const sp& encoder,
printf("Encoder stopping; recorded %u frames in %" PRId64 " seconds\n",
debugNumFrames, nanoseconds_to_seconds(
systemTime(CLOCK_MONOTONIC) - startWhenNsec));
+ fflush(stdout);
}
return NO_ERROR;
}
@@ -554,6 +583,7 @@ static status_t recordScreen(const char* fileName) {
printf("Main display is %dx%d @%.2ffps (orientation=%u)\n",
mainDpyInfo.w, mainDpyInfo.h, mainDpyInfo.fps,
mainDpyInfo.orientation);
+ fflush(stdout);
}
bool rotated = isDeviceRotated(mainDpyInfo.orientation);
@@ -621,6 +651,7 @@ static status_t recordScreen(const char* fileName) {
}
if (gVerbose) {
printf("Bugreport overlay created\n");
+ fflush(stdout);
}
} else {
// Use the encoder's input surface as the virtual display surface.
@@ -713,6 +744,7 @@ static status_t recordScreen(const char* fileName) {
if (gVerbose) {
printf("Stopping encoder and muxer\n");
+ fflush(stdout);
}
}
@@ -759,6 +791,7 @@ static status_t notifyMediaScanner(const char* fileName) {
printf(" %s", argv[i]);
}
putchar('\n');
+ fflush(stdout);
}
pid_t pid = fork();
@@ -896,7 +929,9 @@ int main(int argc, char* const argv[]) {
{ "show-frame-time", no_argument, NULL, 'f' },
{ "rotate", no_argument, NULL, 'r' },
{ "output-format", required_argument, NULL, 'o' },
+ { "codec-name", required_argument, NULL, 'N' },
{ "monotonic-time", no_argument, NULL, 'm' },
+ { "persistent-surface", no_argument, NULL, 'p' },
{ NULL, 0, NULL, 0 }
};
@@ -976,9 +1011,15 @@ int main(int argc, char* const argv[]) {
return 2;
}
break;
+ case 'N':
+ gCodecName = optarg;
+ break;
case 'm':
gMonotonicTime = true;
break;
+ case 'p':
+ gPersistentSurface = true;
+ break;
default:
if (ic != '?') {
fprintf(stderr, "getopt_long returned unexpected value 0x%x\n", ic);
diff --git a/cmds/stagefright/Android.mk b/cmds/stagefright/Android.mk
index f647ffd9e0c02b000ed7669089ecc7d91c8e0ae5..c7619afec03de896cd201feaa09fc3c70204051c 100644
--- a/cmds/stagefright/Android.mk
+++ b/cmds/stagefright/Android.mk
@@ -8,9 +8,9 @@ LOCAL_SRC_FILES:= \
SineSource.cpp
LOCAL_SHARED_LIBRARIES := \
- libstagefright libmedia libutils libbinder libstagefright_foundation \
- libjpeg libgui libcutils liblog \
- libhidlmemory \
+ libstagefright libmedia libmedia_omx libmediaextractor libutils libbinder \
+ libstagefright_foundation libjpeg libui libgui libcutils liblog \
+ libhidlbase \
android.hardware.media.omx@1.0 \
LOCAL_C_INCLUDES:= \
@@ -36,7 +36,8 @@ LOCAL_SRC_FILES:= \
record.cpp
LOCAL_SHARED_LIBRARIES := \
- libstagefright libmedia liblog libutils libbinder libstagefright_foundation
+ libstagefright libmedia libmediaextractor liblog libutils libbinder \
+ libstagefright_foundation
LOCAL_C_INCLUDES:= \
frameworks/av/media/libstagefright \
@@ -60,7 +61,8 @@ LOCAL_SRC_FILES:= \
recordvideo.cpp
LOCAL_SHARED_LIBRARIES := \
- libstagefright libmedia liblog libutils libbinder libstagefright_foundation
+ libstagefright libmedia libmediaextractor liblog libutils libbinder \
+ libstagefright_foundation
LOCAL_C_INCLUDES:= \
frameworks/av/media/libstagefright \
@@ -85,7 +87,8 @@ LOCAL_SRC_FILES:= \
audioloop.cpp
LOCAL_SHARED_LIBRARIES := \
- libstagefright libmedia liblog libutils libbinder libstagefright_foundation
+ libstagefright libmedia libmediaextractor liblog libutils libbinder \
+ libstagefright_foundation
LOCAL_C_INCLUDES:= \
frameworks/av/media/libstagefright \
@@ -107,8 +110,8 @@ LOCAL_SRC_FILES:= \
stream.cpp \
LOCAL_SHARED_LIBRARIES := \
- libstagefright liblog libutils libbinder libgui \
- libstagefright_foundation libmedia libcutils
+ libstagefright liblog libutils libbinder libui libgui \
+ libstagefright_foundation libmedia libcutils libmediaextractor
LOCAL_C_INCLUDES:= \
frameworks/av/media/libstagefright \
@@ -132,7 +135,7 @@ LOCAL_SRC_FILES:= \
LOCAL_SHARED_LIBRARIES := \
libstagefright liblog libutils libbinder libstagefright_foundation \
- libmedia libaudioclient libgui libcutils
+ libmedia libmedia_omx libaudioclient libui libgui libcutils
LOCAL_C_INCLUDES:= \
frameworks/av/media/libstagefright \
@@ -163,6 +166,8 @@ LOCAL_SHARED_LIBRARIES := \
libbinder \
libstagefright_foundation \
libmedia \
+ libmedia_omx \
+ libui \
libgui \
libcutils \
libRScpp \
@@ -199,7 +204,7 @@ LOCAL_SRC_FILES:= \
LOCAL_SHARED_LIBRARIES := \
libstagefright liblog libutils libbinder libstagefright_foundation \
- libcutils libc
+ libcutils libc libmediaextractor
LOCAL_C_INCLUDES:= \
frameworks/av/media/libstagefright \
diff --git a/cmds/stagefright/SineSource.cpp b/cmds/stagefright/SineSource.cpp
index cad8caff679de035fd332a648c39bdb9da59f4ca..0ecc16ccb537e8d431a75caf4919da22ff887ebc 100644
--- a/cmds/stagefright/SineSource.cpp
+++ b/cmds/stagefright/SineSource.cpp
@@ -4,6 +4,7 @@
#include
#include
+#include
#include
#include
@@ -59,10 +60,10 @@ sp SineSource::getFormat() {
}
status_t SineSource::read(
- MediaBuffer **out, const ReadOptions * /* options */) {
+ MediaBufferBase **out, const ReadOptions * /* options */) {
*out = NULL;
- MediaBuffer *buffer;
+ MediaBufferBase *buffer;
status_t err = mGroup->acquire_buffer(&buffer);
if (err != OK) {
@@ -88,7 +89,7 @@ status_t SineSource::read(
x += k;
}
- buffer->meta_data()->setInt64(
+ buffer->meta_data().setInt64(
kKeyTime, ((int64_t)mPhase * 1000000) / mSampleRate);
mPhase += numFramesPerBuffer;
diff --git a/cmds/stagefright/SineSource.h b/cmds/stagefright/SineSource.h
index be05661466e7c2e873abbd65df90af7f8112392c..1817291e8a20139acf732a5b6cde5f2f0c52ff79 100644
--- a/cmds/stagefright/SineSource.h
+++ b/cmds/stagefright/SineSource.h
@@ -2,7 +2,7 @@
#define SINE_SOURCE_H_
-#include
+#include
#include
namespace android {
@@ -18,7 +18,7 @@ struct SineSource : public MediaSource {
virtual sp getFormat();
virtual status_t read(
- MediaBuffer **out, const ReadOptions *options = NULL);
+ MediaBufferBase **out, const ReadOptions *options = NULL);
protected:
virtual ~SineSource();
diff --git a/cmds/stagefright/audioloop.cpp b/cmds/stagefright/audioloop.cpp
index ed44b4da2b376d03937755132c4b935de83426bc..d4f2e8d8d40ffe4e313240596958d1d2bcfd641d 100644
--- a/cmds/stagefright/audioloop.cpp
+++ b/cmds/stagefright/audioloop.cpp
@@ -14,6 +14,10 @@
* limitations under the License.
*/
+#define LOG_NDEBUG 0
+#define LOG_TAG "audioloop"
+#include
+
#include
#include
#include
@@ -29,7 +33,6 @@
#include
#include
#include
-#include
#include
#include "SineSource.h"
@@ -37,11 +40,13 @@ using namespace android;
static void usage(const char* name)
{
- fprintf(stderr, "Usage: %s [-d du.ration] [-m] [-w] []\n", name);
+ fprintf(stderr, "Usage: %s [-d du.ration] [-m] [-w] [-N name] []\n", name);
fprintf(stderr, "Encodes either a sine wave or microphone input to AMR format\n");
fprintf(stderr, " -d duration in seconds, default 5 seconds\n");
fprintf(stderr, " -m use microphone for input, default sine source\n");
fprintf(stderr, " -w use AMR wideband (default narrowband)\n");
+ fprintf(stderr, " -N name of the encoder; must be set with -M\n");
+ fprintf(stderr, " -M media type of the encoder; must be set with -N\n");
fprintf(stderr, " output file for AMR encoding,"
" if unspecified, decode to speaker.\n");
}
@@ -54,8 +59,10 @@ int main(int argc, char* argv[])
bool outputWBAMR = false;
bool playToSpeaker = true;
const char* fileOut = NULL;
+ AString name;
+ AString mediaType;
int ch;
- while ((ch = getopt(argc, argv, "d:mw")) != -1) {
+ while ((ch = getopt(argc, argv, "d:mwN:M:")) != -1) {
switch (ch) {
case 'd':
duration = atoi(optarg);
@@ -66,6 +73,12 @@ int main(int argc, char* argv[])
case 'w':
outputWBAMR = true;
break;
+ case 'N':
+ name.setTo(optarg);
+ break;
+ case 'M':
+ mediaType.setTo(optarg);
+ break;
default:
usage(argv[0]);
return -1;
@@ -76,8 +89,18 @@ int main(int argc, char* argv[])
if (argc == 1) {
fileOut = argv[0];
}
- const int32_t kSampleRate = outputWBAMR ? 16000 : 8000;
- const int32_t kBitRate = outputWBAMR ? 16000 : 8000;
+ if ((name.empty() && !mediaType.empty()) || (!name.empty() && mediaType.empty())) {
+ fprintf(stderr, "-N and -M must be set together\n");
+ usage(argv[0]);
+ return -1;
+ }
+ if (!name.empty() && fileOut != NULL) {
+ fprintf(stderr, "-N and -M cannot be used with