Donate to e Foundation | Murena handsets with /e/OS | Own a part of Murena! Learn more

Commit e4208927 authored by Shuzhen Wang's avatar Shuzhen Wang
Browse files

Camera: Add timestamp base in OutputConfiguration

Timestamp base allows the application to select the preferred
timestamp behavior for a particular stream.

Test: Camera CTS
Bug: 186700251
Bug: 200306379
Change-Id: I066eac4a95bddc007facfc9d68bc024a3f0884db
parent 3eeeda39
Loading
Loading
Loading
Loading
+22 −5
Original line number Diff line number Diff line
@@ -85,6 +85,10 @@ int OutputConfiguration::getStreamUseCase() const {
    return mStreamUseCase;
}

int OutputConfiguration::getTimestampBase() const {
    return mTimestampBase;
}

OutputConfiguration::OutputConfiguration() :
        mRotation(INVALID_ROTATION),
        mSurfaceSetID(INVALID_SET_ID),
@@ -95,7 +99,8 @@ OutputConfiguration::OutputConfiguration() :
        mIsShared(false),
        mIsMultiResolution(false),
        mDynamicRangeProfile(ANDROID_REQUEST_AVAILABLE_DYNAMIC_RANGE_PROFILES_MAP_STANDARD),
        mStreamUseCase(ANDROID_SCALER_AVAILABLE_STREAM_USE_CASES_DEFAULT) {
        mStreamUseCase(ANDROID_SCALER_AVAILABLE_STREAM_USE_CASES_DEFAULT),
        mTimestampBase(TIMESTAMP_BASE_DEFAULT) {
}

OutputConfiguration::OutputConfiguration(const android::Parcel& parcel) :
@@ -188,6 +193,12 @@ status_t OutputConfiguration::readFromParcel(const android::Parcel* parcel) {
        return err;
    }

    int timestampBase = TIMESTAMP_BASE_DEFAULT;
    if ((err = parcel->readInt32(&timestampBase)) != OK) {
        ALOGE("%s: Failed to read timestamp base from parcel", __FUNCTION__);
        return err;
    }

    mRotation = rotation;
    mSurfaceSetID = setID;
    mSurfaceType = surfaceType;
@@ -197,6 +208,7 @@ status_t OutputConfiguration::readFromParcel(const android::Parcel* parcel) {
    mIsShared = isShared != 0;
    mIsMultiResolution = isMultiResolution != 0;
    mStreamUseCase = streamUseCase;
    mTimestampBase = timestampBase;
    for (auto& surface : surfaceShims) {
        ALOGV("%s: OutputConfiguration: %p, name %s", __FUNCTION__,
                surface.graphicBufferProducer.get(),
@@ -208,9 +220,9 @@ status_t OutputConfiguration::readFromParcel(const android::Parcel* parcel) {
    mDynamicRangeProfile = dynamicProfile;

    ALOGV("%s: OutputConfiguration: rotation = %d, setId = %d, surfaceType = %d,"
          " physicalCameraId = %s, isMultiResolution = %d, streamUseCase = %d", __FUNCTION__,
          mRotation, mSurfaceSetID, mSurfaceType, String8(mPhysicalCameraId).string(),
          mIsMultiResolution, mStreamUseCase);
          " physicalCameraId = %s, isMultiResolution = %d, streamUseCase = %d, timestampBase = %d",
          __FUNCTION__, mRotation, mSurfaceSetID, mSurfaceType,
          String8(mPhysicalCameraId).string(), mIsMultiResolution, mStreamUseCase, timestampBase);

    return err;
}
@@ -227,6 +239,7 @@ OutputConfiguration::OutputConfiguration(sp<IGraphicBufferProducer>& gbp, int ro
    mIsMultiResolution = false;
    mDynamicRangeProfile = ANDROID_REQUEST_AVAILABLE_DYNAMIC_RANGE_PROFILES_MAP_STANDARD;
    mStreamUseCase = ANDROID_SCALER_AVAILABLE_STREAM_USE_CASES_DEFAULT;
    mTimestampBase = TIMESTAMP_BASE_DEFAULT;
}

OutputConfiguration::OutputConfiguration(
@@ -237,7 +250,8 @@ OutputConfiguration::OutputConfiguration(
    mWidth(width), mHeight(height), mIsDeferred(false), mIsShared(isShared),
    mPhysicalCameraId(physicalCameraId), mIsMultiResolution(false),
    mDynamicRangeProfile(ANDROID_REQUEST_AVAILABLE_DYNAMIC_RANGE_PROFILES_MAP_STANDARD),
    mStreamUseCase(ANDROID_SCALER_AVAILABLE_STREAM_USE_CASES_DEFAULT) { }
    mStreamUseCase(ANDROID_SCALER_AVAILABLE_STREAM_USE_CASES_DEFAULT),
    mTimestampBase(TIMESTAMP_BASE_DEFAULT) { }

status_t OutputConfiguration::writeToParcel(android::Parcel* parcel) const {

@@ -290,6 +304,9 @@ status_t OutputConfiguration::writeToParcel(android::Parcel* parcel) const {
    err = parcel->writeInt32(mStreamUseCase);
    if (err != OK) return err;

    err = parcel->writeInt32(mTimestampBase);
    if (err != OK) return err;

    return OK;
}

+15 −1
Original line number Diff line number Diff line
@@ -38,6 +38,14 @@ public:
        SURFACE_TYPE_SURFACE_VIEW = 0,
        SURFACE_TYPE_SURFACE_TEXTURE = 1
    };
    enum TimestampBaseByte {
        TIMESTAMP_BASE_DEFAULT = 0,
        TIMESTAMP_BASE_SENSOR = 1,
        TIMESTAMP_BASE_MONOTONIC = 2,
        TIMESTAMP_BASE_REALTIME = 3,
        TIMESTAMP_BASE_CHOREOGRAPHER_SYNCED = 4
    };

    const std::vector<sp<IGraphicBufferProducer>>& getGraphicBufferProducers() const;
    int                        getRotation() const;
    int                        getSurfaceSetID() const;
@@ -50,6 +58,7 @@ public:
    String16                   getPhysicalCameraId() const;
    bool                       isMultiResolution() const;
    int                        getStreamUseCase() const;
    int                        getTimestampBase() const;

    // set of sensor pixel mode resolutions allowed {MAX_RESOLUTION, DEFAULT_MODE};
    const std::vector<int32_t>&            getSensorPixelModesUsed() const;
@@ -93,7 +102,8 @@ public:
                mIsMultiResolution == other.mIsMultiResolution &&
                sensorPixelModesUsedEqual(other) &&
                mDynamicRangeProfile == other.mDynamicRangeProfile &&
                mStreamUseCase == other.mStreamUseCase );
                mStreamUseCase == other.mStreamUseCase &&
                mTimestampBase == other.mTimestampBase);
    }
    bool operator != (const OutputConfiguration& other) const {
        return !(*this == other);
@@ -136,6 +146,9 @@ public:
        if (mStreamUseCase != other.mStreamUseCase) {
            return mStreamUseCase < other.mStreamUseCase;
        }
        if (mTimestampBase != other.mTimestampBase) {
            return mTimestampBase < other.mTimestampBase;
        }
        return gbpsLessThan(other);
    }

@@ -162,6 +175,7 @@ private:
    std::vector<int32_t>       mSensorPixelModesUsed;
    int                        mDynamicRangeProfile;
    int                        mStreamUseCase;
    int                        mTimestampBase;
};
} // namespace params
} // namespace camera2
+10 −6
Original line number Diff line number Diff line
@@ -861,6 +861,7 @@ binder::Status CameraDeviceClient::createStream(
    bool isMultiResolution = outputConfiguration.isMultiResolution();
    int dynamicRangeProfile = outputConfiguration.getDynamicRangeProfile();
    int streamUseCase = outputConfiguration.getStreamUseCase();
    int timestampBase = outputConfiguration.getTimestampBase();

    res = SessionConfigurationUtils::checkSurfaceType(numBufferProducers, deferredConsumer,
            outputConfiguration.getSurfaceType());
@@ -905,7 +906,7 @@ binder::Status CameraDeviceClient::createStream(
        res = SessionConfigurationUtils::createSurfaceFromGbp(streamInfo,
                isStreamInfoValid, surface, bufferProducer, mCameraIdStr,
                mDevice->infoPhysical(physicalCameraId), sensorPixelModesUsed, dynamicRangeProfile,
                streamUseCase);
                streamUseCase, timestampBase);

        if (!res.isOk())
            return res;
@@ -951,7 +952,8 @@ binder::Status CameraDeviceClient::createStream(
                static_cast<camera_stream_rotation_t>(outputConfiguration.getRotation()),
                &streamId, physicalCameraId, streamInfo.sensorPixelModesUsed, &surfaceIds,
                outputConfiguration.getSurfaceSetID(), isShared, isMultiResolution,
                /*consumerUsage*/0, streamInfo.dynamicRangeProfile, streamInfo.streamUseCase);
                /*consumerUsage*/0, streamInfo.dynamicRangeProfile, streamInfo.streamUseCase,
                streamInfo.timestampBase);
    }

    if (err != OK) {
@@ -1062,7 +1064,8 @@ binder::Status CameraDeviceClient::createDeferredSurfaceStreamLocked(
                std::forward_as_tuple(width, height, format, dataSpace, consumerUsage,
                        overriddenSensorPixelModesUsed,
                        outputConfiguration.getDynamicRangeProfile(),
                        outputConfiguration.getStreamUseCase()));
                        outputConfiguration.getStreamUseCase(),
                        outputConfiguration.getTimestampBase()));

        ALOGV("%s: Camera %s: Successfully created a new stream ID %d for a deferred surface"
                " (%d x %d) stream with format 0x%x.",
@@ -1251,7 +1254,7 @@ binder::Status CameraDeviceClient::updateOutputConfiguration(int streamId,
    const std::vector<int32_t> &sensorPixelModesUsed =
            outputConfiguration.getSensorPixelModesUsed();
    int streamUseCase = outputConfiguration.getStreamUseCase();

    int timestampBase = outputConfiguration.getTimestampBase();
    int dynamicRangeProfile = outputConfiguration.getDynamicRangeProfile();

    for (size_t i = 0; i < newOutputsMap.size(); i++) {
@@ -1260,7 +1263,7 @@ binder::Status CameraDeviceClient::updateOutputConfiguration(int streamId,
        res = SessionConfigurationUtils::createSurfaceFromGbp(outInfo,
                /*isStreamInfoValid*/ false, surface, newOutputsMap.valueAt(i), mCameraIdStr,
                mDevice->infoPhysical(physicalCameraId), sensorPixelModesUsed, dynamicRangeProfile,
                streamUseCase);
                streamUseCase, timestampBase);
        if (!res.isOk())
            return res;

@@ -1619,6 +1622,7 @@ binder::Status CameraDeviceClient::finalizeOutputConfigurations(int32_t streamId
            outputConfiguration.getSensorPixelModesUsed();
    int dynamicRangeProfile = outputConfiguration.getDynamicRangeProfile();
    int streamUseCase= outputConfiguration.getStreamUseCase();
    int timestampBase = outputConfiguration.getTimestampBase();
    for (auto& bufferProducer : bufferProducers) {
        // Don't create multiple streams for the same target surface
        ssize_t index = mStreamMap.indexOfKey(IInterface::asBinder(bufferProducer));
@@ -1632,7 +1636,7 @@ binder::Status CameraDeviceClient::finalizeOutputConfigurations(int32_t streamId
        res = SessionConfigurationUtils::createSurfaceFromGbp(mStreamInfoMap[streamId],
                true /*isStreamInfoValid*/, surface, bufferProducer, mCameraIdStr,
                mDevice->infoPhysical(physicalId), sensorPixelModesUsed, dynamicRangeProfile,
                streamUseCase);
                streamUseCase, timestampBase);

        if (!res.isOk())
            return res;
+4 −2
Original line number Diff line number Diff line
@@ -184,7 +184,8 @@ class CameraDeviceBase : public virtual FrameProducer {
            bool isShared = false, bool isMultiResolution = false,
            uint64_t consumerUsage = 0,
            int dynamicProfile = ANDROID_REQUEST_AVAILABLE_DYNAMIC_RANGE_PROFILES_MAP_STANDARD,
            int streamUseCase = ANDROID_SCALER_AVAILABLE_STREAM_USE_CASES_DEFAULT) = 0;
            int streamUseCase = ANDROID_SCALER_AVAILABLE_STREAM_USE_CASES_DEFAULT,
            int timestampBase = OutputConfiguration::TIMESTAMP_BASE_DEFAULT) = 0;

    /**
     * Create an output stream of the requested size, format, rotation and
@@ -203,7 +204,8 @@ class CameraDeviceBase : public virtual FrameProducer {
            bool isShared = false, bool isMultiResolution = false,
            uint64_t consumerUsage = 0,
            int dynamicProfile = ANDROID_REQUEST_AVAILABLE_DYNAMIC_RANGE_PROFILES_MAP_STANDARD,
            int streamUseCase = ANDROID_SCALER_AVAILABLE_STREAM_USE_CASES_DEFAULT) = 0;
            int streamUseCase = ANDROID_SCALER_AVAILABLE_STREAM_USE_CASES_DEFAULT,
            int timestampBase = OutputConfiguration::TIMESTAMP_BASE_DEFAULT) = 0;

    /**
     * Create an input stream of width, height, and format.
+20 −11
Original line number Diff line number Diff line
@@ -86,6 +86,7 @@ Camera3Device::Camera3Device(const String8 &id, bool overrideForPerfClass, bool
        mStatusWaiters(0),
        mUsePartialResult(false),
        mNumPartialResults(1),
        mDeviceTimeBaseIsRealtime(false),
        mTimestampOffset(0),
        mNextResultFrameNumber(0),
        mNextReprocessResultFrameNumber(0),
@@ -189,11 +190,12 @@ status_t Camera3Device::initializeCommonLocked() {
    mIsInputStreamMultiResolution = false;

    // Measure the clock domain offset between camera and video/hw_composer
    mTimestampOffset = getMonoToBoottimeOffset();
    camera_metadata_entry timestampSource =
            mDeviceInfo.find(ANDROID_SENSOR_INFO_TIMESTAMP_SOURCE);
    if (timestampSource.count > 0 && timestampSource.data.u8[0] ==
            ANDROID_SENSOR_INFO_TIMESTAMP_SOURCE_REALTIME) {
        mTimestampOffset = getMonoToBoottimeOffset();
        mDeviceTimeBaseIsRealtime = true;
    }

    // Will the HAL be sending in early partial result metadata?
@@ -978,7 +980,7 @@ status_t Camera3Device::createStream(sp<Surface> consumer,
            const String8& physicalCameraId,
            const std::unordered_set<int32_t> &sensorPixelModesUsed,
            std::vector<int> *surfaceIds, int streamSetId, bool isShared, bool isMultiResolution,
            uint64_t consumerUsage, int dynamicRangeProfile, int streamUseCase) {
            uint64_t consumerUsage, int dynamicRangeProfile, int streamUseCase, int timestampBase) {
    ATRACE_CALL();

    if (consumer == nullptr) {
@@ -992,7 +994,7 @@ status_t Camera3Device::createStream(sp<Surface> consumer,
    return createStream(consumers, /*hasDeferredConsumer*/ false, width, height,
            format, dataSpace, rotation, id, physicalCameraId, sensorPixelModesUsed, surfaceIds,
            streamSetId, isShared, isMultiResolution, consumerUsage, dynamicRangeProfile,
            streamUseCase);
            streamUseCase, timestampBase);
}

static bool isRawFormat(int format) {
@@ -1012,16 +1014,18 @@ status_t Camera3Device::createStream(const std::vector<sp<Surface>>& consumers,
        android_dataspace dataSpace, camera_stream_rotation_t rotation, int *id,
        const String8& physicalCameraId, const std::unordered_set<int32_t> &sensorPixelModesUsed,
        std::vector<int> *surfaceIds, int streamSetId, bool isShared, bool isMultiResolution,
        uint64_t consumerUsage, int dynamicRangeProfile, int streamUseCase) {
        uint64_t consumerUsage, int dynamicRangeProfile, int streamUseCase, int timestampBase) {
    ATRACE_CALL();

    Mutex::Autolock il(mInterfaceLock);
    nsecs_t maxExpectedDuration = getExpectedInFlightDuration();
    Mutex::Autolock l(mLock);
    ALOGV("Camera %s: Creating new stream %d: %d x %d, format %d, dataspace %d rotation %d"
            " consumer usage %" PRIu64 ", isShared %d, physicalCameraId %s, isMultiResolution %d",
            " consumer usage %" PRIu64 ", isShared %d, physicalCameraId %s, isMultiResolution %d"
            " dynamicRangeProfile %d, streamUseCase %d, timestampBase %d",
            mId.string(), mNextStreamId, width, height, format, dataSpace, rotation,
            consumerUsage, isShared, physicalCameraId.string(), isMultiResolution);
            consumerUsage, isShared, physicalCameraId.string(), isMultiResolution,
            dynamicRangeProfile, streamUseCase, timestampBase);

    status_t res;
    bool wasActive = false;
@@ -1090,7 +1094,8 @@ status_t Camera3Device::createStream(const std::vector<sp<Surface>>& consumers,
        newStream = new Camera3OutputStream(mNextStreamId, consumers[0],
                width, height, blobBufferSize, format, dataSpace, rotation,
                mTimestampOffset, physicalCameraId, sensorPixelModesUsed, streamSetId,
                isMultiResolution, dynamicRangeProfile, streamUseCase);
                isMultiResolution, dynamicRangeProfile, streamUseCase, mDeviceTimeBaseIsRealtime,
                timestampBase);
    } else if (format == HAL_PIXEL_FORMAT_RAW_OPAQUE) {
        bool maxResolution =
                sensorPixelModesUsed.find(ANDROID_SENSOR_PIXEL_MODE_MAXIMUM_RESOLUTION) !=
@@ -1104,22 +1109,26 @@ status_t Camera3Device::createStream(const std::vector<sp<Surface>>& consumers,
        newStream = new Camera3OutputStream(mNextStreamId, consumers[0],
                width, height, rawOpaqueBufferSize, format, dataSpace, rotation,
                mTimestampOffset, physicalCameraId, sensorPixelModesUsed, streamSetId,
                isMultiResolution, dynamicRangeProfile, streamUseCase);
                isMultiResolution, dynamicRangeProfile, streamUseCase, mDeviceTimeBaseIsRealtime,
                timestampBase);
    } else if (isShared) {
        newStream = new Camera3SharedOutputStream(mNextStreamId, consumers,
                width, height, format, consumerUsage, dataSpace, rotation,
                mTimestampOffset, physicalCameraId, sensorPixelModesUsed, streamSetId,
                mUseHalBufManager, dynamicRangeProfile, streamUseCase);
                mUseHalBufManager, dynamicRangeProfile, streamUseCase, mDeviceTimeBaseIsRealtime,
                timestampBase);
    } else if (consumers.size() == 0 && hasDeferredConsumer) {
        newStream = new Camera3OutputStream(mNextStreamId,
                width, height, format, consumerUsage, dataSpace, rotation,
                mTimestampOffset, physicalCameraId, sensorPixelModesUsed, streamSetId,
                isMultiResolution, dynamicRangeProfile, streamUseCase);
                isMultiResolution, dynamicRangeProfile, streamUseCase, mDeviceTimeBaseIsRealtime,
                timestampBase);
    } else {
        newStream = new Camera3OutputStream(mNextStreamId, consumers[0],
                width, height, format, dataSpace, rotation,
                mTimestampOffset, physicalCameraId, sensorPixelModesUsed, streamSetId,
                isMultiResolution, dynamicRangeProfile, streamUseCase);
                isMultiResolution, dynamicRangeProfile, streamUseCase, mDeviceTimeBaseIsRealtime,
                timestampBase);
    }

    size_t consumerCount = consumers.size();
Loading