Donate to e Foundation | Murena handsets with /e/OS | Own a part of Murena! Learn more

Commit fa2deee7 authored by Brian Lindahl's avatar Brian Lindahl Committed by Automerger Merge Worker
Browse files

Support frame rendering metrics for tunnel mode am: f13b578a am: cdb185e0

parents 741c347b cdb185e0
Loading
Loading
Loading
Loading
+20 −8
Original line number Diff line number Diff line
@@ -1129,7 +1129,7 @@ void MediaCodec::updateMediametrics() {
    // Video rendering quality metrics
    {
        const VideoRenderQualityMetrics &m = mVideoRenderQualityTracker.getMetrics();
        if (m.frameRenderedCount > 0) {
        if (m.frameReleasedCount > 0) {
            mediametrics_setInt64(mMetricsHandle, kCodecFirstRenderTimeUs, m.firstRenderTimeUs);
            mediametrics_setInt64(mMetricsHandle, kCodecFramesReleased, m.frameReleasedCount);
            mediametrics_setInt64(mMetricsHandle, kCodecFramesRendered, m.frameRenderedCount);
@@ -1534,10 +1534,14 @@ void MediaCodec::processRenderedFrames(const sp<AMessage> &msg) {
                ALOGE("processRenderedFrames: no media time found");
                continue;
            }
            // Tunneled frames use INT64_MAX to indicate end-of-stream, so don't report it as a
            // rendered frame.
            if (!mTunneled || mediaTimeUs != INT64_MAX) {
                mVideoRenderQualityTracker.onFrameRendered(mediaTimeUs, renderTimeNs);
            }
        }
    }
}

// when we send a buffer to the codec;
void MediaCodec::statsBufferSent(int64_t presentationUs, const sp<MediaCodecBuffer> &buffer) {
@@ -5818,6 +5822,10 @@ status_t MediaCodec::onQueueInputBuffer(const sp<AMessage> &msg) {
    }

    if (err == OK) {
        if (mTunneled && (flags & (BUFFER_FLAG_DECODE_ONLY | BUFFER_FLAG_END_OF_STREAM)) == 0) {
            mVideoRenderQualityTracker.onTunnelFrameQueued(timeUs);
        }

        // synchronization boundary for getBufferAndFormat
        Mutex::Autolock al(mBufferLock);
        info->mOwnedByClient = false;
@@ -5900,7 +5908,7 @@ status_t MediaCodec::onReleaseOutputBuffer(const sp<AMessage> &msg) {
    }

    if (render && buffer->size() != 0) {
        int64_t mediaTimeUs = -1;
        int64_t mediaTimeUs = INT64_MIN;
        buffer->meta()->findInt64("timeUs", &mediaTimeUs);

        bool noRenderTime = false;
@@ -5931,8 +5939,11 @@ status_t MediaCodec::onReleaseOutputBuffer(const sp<AMessage> &msg) {
        // If rendering to the screen, then schedule a time in the future to poll to see if this
        // frame was ever rendered to seed onFrameRendered callbacks.
        if (mIsSurfaceToDisplay) {
            if (mediaTimeUs != INT64_MIN) {
                noRenderTime ? mVideoRenderQualityTracker.onFrameReleased(mediaTimeUs)
                         : mVideoRenderQualityTracker.onFrameReleased(mediaTimeUs, renderTimeNs);
                             : mVideoRenderQualityTracker.onFrameReleased(mediaTimeUs,
                                                                          renderTimeNs);
            }
            // can't initialize this in the constructor because the Looper parent class needs to be
            // initialized first
            if (mMsgPollForRenderedBuffers == nullptr) {
@@ -5963,10 +5974,11 @@ status_t MediaCodec::onReleaseOutputBuffer(const sp<AMessage> &msg) {
        }
    } else {
        if (mIsSurfaceToDisplay) {
            int64_t mediaTimeUs = -1;
            buffer->meta()->findInt64("timeUs", &mediaTimeUs);
            int64_t mediaTimeUs = INT64_MIN;
            if (buffer->meta()->findInt64("timeUs", &mediaTimeUs)) {
                mVideoRenderQualityTracker.onFrameSkipped(mediaTimeUs);
            }
        }
        mBufferChannel->discardBuffer(buffer);
    }

+27 −0
Original line number Diff line number Diff line
@@ -87,6 +87,25 @@ VideoRenderQualityTracker::VideoRenderQualityTracker(const Configuration &config
    clear();
}

void VideoRenderQualityTracker::onTunnelFrameQueued(int64_t contentTimeUs) {
    if (!mConfiguration.enabled) {
        return;
    }

    // Since P-frames are queued out of order, hold onto the P-frame until we can track it in
    // render order. This only works because it depends on today's encoding algorithms that only
    // allow B-frames to refer to ONE P-frame that comes after it. If the cardinality of P-frames
    // in a single mini-GOP is increased, this algorithm breaks down.
    if (mTunnelFrameQueuedContentTimeUs == -1) {
        mTunnelFrameQueuedContentTimeUs = contentTimeUs;
    } else if (contentTimeUs < mTunnelFrameQueuedContentTimeUs) {
        onFrameReleased(contentTimeUs, 0);
    } else {
        onFrameReleased(mTunnelFrameQueuedContentTimeUs, 0);
        mTunnelFrameQueuedContentTimeUs = contentTimeUs;
    }
}

void VideoRenderQualityTracker::onFrameSkipped(int64_t contentTimeUs) {
    if (!mConfiguration.enabled) {
        return;
@@ -137,6 +156,13 @@ void VideoRenderQualityTracker::onFrameRendered(int64_t contentTimeUs, int64_t a
    }
    mPendingSkippedFrameContentTimeUsList = {};

    // We can render a pending queued frame if it's the last frame of the video, so release it
    // immediately.
    if (contentTimeUs == mTunnelFrameQueuedContentTimeUs && mTunnelFrameQueuedContentTimeUs != -1) {
        onFrameReleased(mTunnelFrameQueuedContentTimeUs, 0);
        mTunnelFrameQueuedContentTimeUs = -1;
    }

    static const FrameInfo noFrame = {-1, -1};
    FrameInfo nextExpectedFrame = noFrame;
    while (!mNextExpectedRenderedFrameQueue.empty()) {
@@ -211,6 +237,7 @@ void VideoRenderQualityTracker::resetForDiscontinuity() {
    // discontinuity. While stuttering or freezing could be found in the next few frames, the impact
    // to the user is is minimal, so better to just keep things simple and don't bother.
    mNextExpectedRenderedFrameQueue = {};
    mTunnelFrameQueuedContentTimeUs = -1;

    // Ignore any frames that were skipped just prior to the discontinuity.
    mPendingSkippedFrameContentTimeUsList = {};
+1 −3
Original line number Diff line number Diff line
@@ -197,13 +197,11 @@ std::string MediaHistogram<T>::emitBuckets() const {
        for (int i = 0; i < mBucketLimits.size(); ++i) {
            ss << ',' << mBucketLimits[i];
        }
        ss << ',' << mCeiling;
    } else {
        ss << mFloor;
        for (int i = 0; i < mBuckets.size(); ++i) {
        for (int i = 1; i <= mBuckets.size(); ++i) {
            ss << ',' << (mFloor + i * mWidth);
        }
        ss << ',' << mCeiling;
    }
    return ss.str();
}
+8 −0
Original line number Diff line number Diff line
@@ -150,6 +150,9 @@ public:
    VideoRenderQualityTracker();
    VideoRenderQualityTracker(const Configuration &configuration);

    // Called when a tunnel mode frame has been queued.
    void onTunnelFrameQueued(int64_t contentTimeUs);

    // Called when the app has intentionally decided not to render this frame.
    void onFrameSkipped(int64_t contentTimeUs);

@@ -277,6 +280,11 @@ private:
    // checking to see if the next expected frame is rendered. If not, it is considered dropped.
    std::queue<FrameInfo> mNextExpectedRenderedFrameQueue;

    // When B-frames are present in the stream, a P-frame will be queued before the B-frame even
    // though it is rendered after. Therefore, the P-frame is held here and not inserted into
    // mNextExpectedRenderedFrameQueue until it should be inserted to maintain render order.
    int64_t mTunnelFrameQueuedContentTimeUs;

    // Frame durations derived from timestamps encoded into the content stream. These are the
    // durations that each frame is supposed to be rendered for.
    FrameDurationUs mContentFrameDurationUs;