Donate to e Foundation | Murena handsets with /e/OS | Own a part of Murena! Learn more

Commit 90e813ba authored by Android Build Coastguard Worker's avatar Android Build Coastguard Worker
Browse files

Snap for 10171451 from 2a3af65a to udc-release

Change-Id: I0a11005cc75b2eef6e48c2956dd71a60cfb7a6e4
parents b39ac85c 2a3af65a
Loading
Loading
Loading
Loading
+6 −1
Original line number Diff line number Diff line
@@ -232,6 +232,7 @@ void VideoRenderQualityTracker::resetForDiscontinuity() {
    mLastContentTimeUs = -1;
    mLastRenderTimeUs = -1;
    mLastFreezeEndTimeUs = -1;
    mWasPreviousFrameDropped = false;

    // Don't worry about tracking frame rendering times from now up until playback catches up to the
    // discontinuity. While stuttering or freezing could be found in the next few frames, the impact
@@ -298,6 +299,7 @@ void VideoRenderQualityTracker::processMetricsForSkippedFrame(int64_t contentTim
    updateFrameDurations(mDesiredFrameDurationUs, -1);
    updateFrameDurations(mActualFrameDurationUs, -1);
    updateFrameRate(mMetrics.contentFrameRate, mContentFrameDurationUs, mConfiguration);
    mWasPreviousFrameDropped = false;
}

void VideoRenderQualityTracker::processMetricsForDroppedFrame(int64_t contentTimeUs,
@@ -308,6 +310,7 @@ void VideoRenderQualityTracker::processMetricsForDroppedFrame(int64_t contentTim
    updateFrameDurations(mActualFrameDurationUs, -1);
    updateFrameRate(mMetrics.contentFrameRate, mContentFrameDurationUs, mConfiguration);
    updateFrameRate(mMetrics.desiredFrameRate, mDesiredFrameDurationUs, mConfiguration);
    mWasPreviousFrameDropped = true;
}

void VideoRenderQualityTracker::processMetricsForRenderedFrame(int64_t contentTimeUs,
@@ -334,7 +337,7 @@ void VideoRenderQualityTracker::processMetricsForRenderedFrame(int64_t contentTi
    updateFrameRate(mMetrics.actualFrameRate, mActualFrameDurationUs, mConfiguration);

    // If the previous frame was dropped, there was a freeze if we've already rendered a frame
    if (mActualFrameDurationUs[1] == -1 && mLastRenderTimeUs != -1) {
    if (mWasPreviousFrameDropped && mLastRenderTimeUs != -1) {
        processFreeze(actualRenderTimeUs, mLastRenderTimeUs, mLastFreezeEndTimeUs, mMetrics);
        mLastFreezeEndTimeUs = actualRenderTimeUs;
    }
@@ -346,6 +349,8 @@ void VideoRenderQualityTracker::processMetricsForRenderedFrame(int64_t contentTi
    if (judderScore != 0) {
        mMetrics.judderScoreHistogram.insert(judderScore);
    }

    mWasPreviousFrameDropped = false;
}

void VideoRenderQualityTracker::processFreeze(int64_t actualRenderTimeUs, int64_t lastRenderTimeUs,
+3 −0
Original line number Diff line number Diff line
@@ -269,6 +269,9 @@ private:
    // The most recent timestamp of the first frame rendered after the freeze.
    int64_t mLastFreezeEndTimeUs;

    // The previous video frame was dropped.
    bool mWasPreviousFrameDropped;

    // The render duration of the playback.
    int64_t mRenderDurationMs;

+12 −0
Original line number Diff line number Diff line
@@ -232,6 +232,18 @@ TEST_F(VideoRenderQualityTrackerTest, whenFrameRateIsUnstable_doesntDetectFrameR
    EXPECT_EQ(h.getMetrics().actualFrameRate, FRAME_RATE_UNDETERMINED);
}

TEST_F(VideoRenderQualityTrackerTest, capturesFreezeRate) {
    Configuration c;
    Helper h(20, c);
    h.render(3);
    EXPECT_EQ(h.getMetrics().freezeRate, 0);
    h.drop(3);
    h.render(3);
    // +1 because the first frame before drops is considered frozen
    // and then -1 because the last frame has an unknown render duration
    EXPECT_EQ(h.getMetrics().freezeRate, 4.0 / 8.0);
}

TEST_F(VideoRenderQualityTrackerTest, capturesFreezeDurationHistogram) {
    Configuration c;
    // +17 because freeze durations include the render time of the previous frame
+3 −3
Original line number Diff line number Diff line
@@ -120,7 +120,7 @@ status_t HeicCompositeStream::createInternalStreams(const std::vector<sp<Surface
        camera_stream_rotation_t rotation, int *id, const String8& physicalCameraId,
        const std::unordered_set<int32_t> &sensorPixelModesUsed,
        std::vector<int> *surfaceIds,
        int /*streamSetId*/, bool /*isShared*/, int32_t /*colorSpace*/,
        int /*streamSetId*/, bool /*isShared*/, int32_t colorSpace,
        int64_t /*dynamicProfile*/, int64_t /*streamUseCase*/, bool useReadoutTimestamp) {
    sp<CameraDeviceBase> device = mDevice.promote();
    if (!device.get()) {
@@ -153,7 +153,7 @@ status_t HeicCompositeStream::createInternalStreams(const std::vector<sp<Surface
            ANDROID_SCALER_AVAILABLE_STREAM_USE_CASES_DEFAULT,
            OutputConfiguration::TIMESTAMP_BASE_DEFAULT,
            OutputConfiguration::MIRROR_MODE_AUTO,
            ANDROID_REQUEST_AVAILABLE_COLOR_SPACE_PROFILES_MAP_UNSPECIFIED,
            colorSpace,
            useReadoutTimestamp);
    if (res == OK) {
        mAppSegmentSurfaceId = (*surfaceIds)[0];
@@ -196,7 +196,7 @@ status_t HeicCompositeStream::createInternalStreams(const std::vector<sp<Surface
            ANDROID_SCALER_AVAILABLE_STREAM_USE_CASES_DEFAULT,
            OutputConfiguration::TIMESTAMP_BASE_DEFAULT,
            OutputConfiguration::MIRROR_MODE_AUTO,
            ANDROID_REQUEST_AVAILABLE_COLOR_SPACE_PROFILES_MAP_UNSPECIFIED,
            colorSpace,
            useReadoutTimestamp);
    if (res == OK) {
        mMainImageSurfaceId = sourceSurfaceId[0];