Donate to e Foundation | Murena handsets with /e/OS | Own a part of Murena! Learn more

Commit 8b338c93 authored by Brian Lindahl's avatar Brian Lindahl
Browse files

Capture basic metrics for video frame rendering

Capture and compute frame drops, frame skips and frame rates for video
playback. Report these metrics through the existing media.metrics
pipeline for bug reports.

Bug: 234833109
Test: atest VideoRenderQualityTracker_test
Test: Play YouTube video, skip forward and backward, and check 'adb
shell dumpsys media.metrics'

Change-Id: Ie8b4d2c85cbc4b94d30926868e9aa4aa5cccf729
parent 04491e1d
Loading
Loading
Loading
Loading
+1 −0
Original line number Diff line number Diff line
@@ -270,6 +270,7 @@ cc_library {
        "SurfaceUtils.cpp",
        "ThrottledSource.cpp",
        "Utils.cpp",
        "VideoRenderQualityTracker.cpp",
        "VideoFrameSchedulerBase.cpp",
        "VideoFrameScheduler.cpp",
    ],
+62 −19
Original line number Diff line number Diff line
@@ -30,7 +30,6 @@
#include <C2Buffer.h>

#include "include/SoftwareRenderer.h"
#include "PlaybackDurationAccumulator.h"

#include <android/binder_manager.h>
#include <android/content/pm/IPackageManagerNative.h>
@@ -199,6 +198,14 @@ static const char *kCodecRecentLatencyHist = "android.media.mediacodec.recent.hi
static const char *kCodecPlaybackDurationSec =
        "android.media.mediacodec.playback-duration-sec"; /* in sec */

static const char *kCodecFramesReleased = "android.media.mediacodec.frames.released";
static const char *kCodecFramesRendered = "android.media.mediacodec.frames.rendered";
static const char *kCodecFramesSkipped = "android.media.mediacodec.frames.skipped";
static const char *kCodecFramesDropped = "android.media.mediacodec.frames.dropped";
static const char *kCodecFramerateContent = "android.media.mediacodec.framerate.content";
static const char *kCodecFramerateDesired = "android.media.mediacodec.framerate.desired";
static const char *kCodecFramerateActual = "android.media.mediacodec.framerate.actual";

/* -1: shaper disabled
   >=0: number of fields changed */
static const char *kCodecShapingEnhanced = "android.media.mediacodec.shaped";
@@ -960,8 +967,7 @@ MediaCodec::MediaCodec(
      mHaveInputSurface(false),
      mHavePendingInputBuffers(false),
      mCpuBoostRequested(false),
      mPlaybackDurationAccumulator(new PlaybackDurationAccumulator()),
      mIsSurfaceToScreen(false),
      mIsSurfaceToDisplay(false),
      mLatencyUnknown(0),
      mBytesEncoded(0),
      mEarliestEncodedPtsUs(INT64_MAX),
@@ -1096,6 +1102,20 @@ void MediaCodec::updateMediametrics() {
    mediametrics_setInt32(mMetricsHandle, kCodecResolutionChangeCount,
            mReliabilityContextMetrics.resolutionChangeCount);

    // Video rendering quality metrics
    {
        const VideoRenderQualityMetrics& m = mVideoRenderQualityTracker.getMetrics();
        if (m.frameRenderedCount > 0) {
            mediametrics_setInt64(mMetricsHandle, kCodecFramesReleased, m.frameReleasedCount);
            mediametrics_setInt64(mMetricsHandle, kCodecFramesRendered, m.frameRenderedCount);
            mediametrics_setInt64(mMetricsHandle, kCodecFramesSkipped, m.frameSkippedCount);
            mediametrics_setInt64(mMetricsHandle, kCodecFramesDropped, m.frameDroppedCount);
            mediametrics_setDouble(mMetricsHandle, kCodecFramerateContent, m.contentFrameRate);
            mediametrics_setDouble(mMetricsHandle, kCodecFramerateDesired, m.desiredFrameRate);
            mediametrics_setDouble(mMetricsHandle, kCodecFramerateActual, m.actualFrameRate);
        }
    }

    if (mLatencyHist.getCount() != 0 ) {
        mediametrics_setInt64(mMetricsHandle, kCodecLatencyMax, mLatencyHist.getMax());
        mediametrics_setInt64(mMetricsHandle, kCodecLatencyMin, mLatencyHist.getMin());
@@ -1111,7 +1131,7 @@ void MediaCodec::updateMediametrics() {
    if (mLatencyUnknown > 0) {
        mediametrics_setInt64(mMetricsHandle, kCodecLatencyUnknown, mLatencyUnknown);
    }
    int64_t playbackDurationSec = mPlaybackDurationAccumulator->getDurationInSeconds();
    int64_t playbackDurationSec = mPlaybackDurationAccumulator.getDurationInSeconds();
    if (playbackDurationSec > 0) {
        mediametrics_setInt64(mMetricsHandle, kCodecPlaybackDurationSec, playbackDurationSec);
    }
@@ -1436,25 +1456,33 @@ void MediaCodec::updateTunnelPeek(const sp<AMessage> &msg) {
    ALOGV("TunnelPeekState: %s -> %s", asString(previousState), asString(mTunnelPeekState));
}

void MediaCodec::updatePlaybackDuration(const sp<AMessage> &msg) {
void MediaCodec::processRenderedFrames(const sp<AMessage> &msg) {
    int what = 0;
    msg->findInt32("what", &what);
    if (msg->what() != kWhatCodecNotify && what != kWhatOutputFramesRendered) {
        static bool logged = false;
        if (!logged) {
            logged = true;
            ALOGE("updatePlaybackDuration: expected kWhatOuputFramesRendered (%d)", msg->what());
            ALOGE("processRenderedFrames: expected kWhatOutputFramesRendered (%d)", msg->what());
        }
        return;
    }
    // Playback duration only counts if the buffers are going to the screen.
    if (!mIsSurfaceToScreen) {
        return;
    }
    // Rendered frames only matter if they're being sent to the display
    if (mIsSurfaceToDisplay) {
        int64_t renderTimeNs;
    size_t index = 0;
    while (msg->findInt64(AStringPrintf("%zu-system-nano", index++).c_str(), &renderTimeNs)) {
        mPlaybackDurationAccumulator->processRenderTime(renderTimeNs);
        for (size_t index = 0;
            msg->findInt64(AStringPrintf("%zu-system-nano", index).c_str(), &renderTimeNs);
            index++) {
            // Capture metrics for playback duration
            mPlaybackDurationAccumulator.onFrameRendered(renderTimeNs);
            // Capture metrics for quality
            int64_t mediaTimeUs = 0;
            if (!msg->findInt64(AStringPrintf("%zu-media-time-us", index).c_str(), &mediaTimeUs)) {
                ALOGE("processRenderedFrames: no media time found");
                continue;
            }
            mVideoRenderQualityTracker.onFrameRendered(mediaTimeUs, renderTimeNs);
        }
    }
}

@@ -3964,7 +3992,7 @@ void MediaCodec::onMessageReceived(const sp<AMessage> &msg) {
                                asString(previousState),
                                asString(TunnelPeekState::kBufferRendered));
                    }
                    updatePlaybackDuration(msg);
                    processRenderedFrames(msg);
                    // check that we have a notification set
                    if (mOnFrameRenderedNotification != NULL) {
                        sp<AMessage> notify = mOnFrameRenderedNotification->dup();
@@ -4158,6 +4186,10 @@ void MediaCodec::onMessageReceived(const sp<AMessage> &msg) {
                              mState, stateString(mState).c_str());
                        break;
                    }

                    if (mIsSurfaceToDisplay) {
                        mVideoRenderQualityTracker.resetForDiscontinuity();
                    }
                    // Notify the RM that the codec has been stopped.
                    ClientConfigParcel clientConfig;
                    initClientConfigParcel(clientConfig);
@@ -4213,6 +4245,10 @@ void MediaCodec::onMessageReceived(const sp<AMessage> &msg) {
                        break;
                    }

                    if (mIsSurfaceToDisplay) {
                        mVideoRenderQualityTracker.resetForDiscontinuity();
                    }

                    if (mFlags & kFlagIsAsync) {
                        setState(FLUSHED);
                    } else {
@@ -5927,7 +5963,9 @@ status_t MediaCodec::onReleaseOutputBuffer(const sp<AMessage> &msg) {

        // If rendering to the screen, then schedule a time in the future to poll to see if this
        // frame was ever rendered to seed onFrameRendered callbacks.
        if (mIsSurfaceToScreen) {
        if (mIsSurfaceToDisplay) {
            noRenderTime ? mVideoRenderQualityTracker.onFrameReleased(mediaTimeUs)
                         : mVideoRenderQualityTracker.onFrameReleased(mediaTimeUs, renderTimeNs);
            // can't initialize this in the constructor because the Looper parent class needs to be
            // initialized first
            if (mMsgPollForRenderedBuffers == nullptr) {
@@ -5957,6 +5995,11 @@ status_t MediaCodec::onReleaseOutputBuffer(const sp<AMessage> &msg) {
            ALOGI("rendring output error %d", err);
        }
    } else {
        if (mIsSurfaceToDisplay) {
            int64_t mediaTimeUs = -1;
            buffer->meta()->findInt64("timeUs", &mediaTimeUs);
            mVideoRenderQualityTracker.onFrameSkipped(mediaTimeUs);
        }
        mBufferChannel->discardBuffer(buffer);
    }

@@ -6023,7 +6066,7 @@ status_t MediaCodec::connectToSurface(const sp<Surface> &surface) {

        // in case we don't connect, ensure that we don't signal the surface is
        // connected to the screen
        mIsSurfaceToScreen = false;
        mIsSurfaceToDisplay = false;

        err = nativeWindowConnect(surface.get(), "connectToSurface");
        if (err == OK) {
@@ -6053,7 +6096,7 @@ status_t MediaCodec::connectToSurface(const sp<Surface> &surface) {
            // keep track whether or not the buffers of the connected surface go to the screen
            int result = 0;
            surface->query(NATIVE_WINDOW_QUEUES_TO_WINDOW_COMPOSER, &result);
            mIsSurfaceToScreen = result != 0;
            mIsSurfaceToDisplay = result != 0;
        }
    }
    // do not return ALREADY_EXISTS unless surfaces are the same
@@ -6071,7 +6114,7 @@ status_t MediaCodec::disconnectFromSurface() {
        }
        // assume disconnected even on error
        mSurface.clear();
        mIsSurfaceToScreen = false;
        mIsSurfaceToDisplay = false;
    }
    return err;
}
+299 −0
Original line number Diff line number Diff line
/*
 * Copyright (C) 2023 The Android Open Source Project
 *
 * Licensed under the Apache License, Version 2.0 (the "License");
 * you may not use this file except in compliance with the License.
 * You may obtain a copy of the License at
 *
 *      http://www.apache.org/licenses/LICENSE-2.0
 *
 * Unless required by applicable law or agreed to in writing, software
 * distributed under the License is distributed on an "AS IS" BASIS,
 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
 * See the License for the specific language governing permissions and
 * limitations under the License.
 */

#define LOG_TAG "VideoRenderQualityTracker"
#include <utils/Log.h>

#include <media/stagefright/VideoRenderQualityTracker.h>

#include <assert.h>
#include <cmath>
#include <sys/time.h>

namespace android {

VideoRenderQualityMetrics::VideoRenderQualityMetrics() {
    firstFrameRenderTimeUs = 0;
    frameReleasedCount = 0;
    frameRenderedCount = 0;
    frameDroppedCount = 0;
    frameSkippedCount = 0;
    contentFrameRate = FRAME_RATE_UNDETERMINED;
    desiredFrameRate = FRAME_RATE_UNDETERMINED;
    actualFrameRate = FRAME_RATE_UNDETERMINED;
}

VideoRenderQualityTracker::Configuration::Configuration() {
    // Assume that the app is skipping frames because it's detected that the frame couldn't be
    // rendered in time.
    areSkippedFramesDropped = true;

    // 400ms is 8 frames at 20 frames per second and 24 frames at 60 frames per second
    maxExpectedContentFrameDurationUs = 400 * 1000;

    // Allow for 2 milliseconds of deviation when detecting frame rates
    frameRateDetectionToleranceUs = 2 * 1000;

    // Allow for a tolerance of 200 milliseconds for determining if we moved forward in content time
    // because of frame drops for live content, or because the user is seeking.
    contentTimeAdvancedForLiveContentToleranceUs = 200 * 1000;
}

VideoRenderQualityTracker::VideoRenderQualityTracker() :
        mConfiguration(Configuration()) {
    resetForDiscontinuity();
}

VideoRenderQualityTracker::VideoRenderQualityTracker(const Configuration &configuration) :
        mConfiguration(configuration) {
    resetForDiscontinuity();
}

void VideoRenderQualityTracker::onFrameSkipped(int64_t contentTimeUs) {
    // Frames skipped at the beginning shouldn't really be counted as skipped frames, since the
    // app might be seeking to a starting point that isn't the first key frame.
    if (mLastRenderTimeUs == -1) {
        return;
    }
    // Frames skipped at the end of playback shouldn't be counted as skipped frames, since the
    // app could be terminating the playback. The pending count will be added to the metrics if and
    // when the next frame is rendered.
    mPendingSkippedFrameContentTimeUsList.push_back(contentTimeUs);
}

void VideoRenderQualityTracker::onFrameReleased(int64_t contentTimeUs) {
    onFrameReleased(contentTimeUs, nowUs() * 1000);
}

void VideoRenderQualityTracker::onFrameReleased(int64_t contentTimeUs,
                                                int64_t desiredRenderTimeNs) {
    int64_t desiredRenderTimeUs = desiredRenderTimeNs / 1000;
    resetIfDiscontinuity(contentTimeUs, desiredRenderTimeUs);
    mMetrics.frameReleasedCount++;
    mNextExpectedRenderedFrameQueue.push({contentTimeUs, desiredRenderTimeUs});
    mLastContentTimeUs = contentTimeUs;
}

void VideoRenderQualityTracker::onFrameRendered(int64_t contentTimeUs, int64_t actualRenderTimeNs) {
    int64_t actualRenderTimeUs = actualRenderTimeNs / 1000;

    // Now that a frame has been rendered, the previously skipped frames can be processed as skipped
    // frames since the app is not skipping them to terminate playback.
    for (int64_t contentTimeUs : mPendingSkippedFrameContentTimeUsList) {
        processMetricsForSkippedFrame(contentTimeUs);
    }
    mPendingSkippedFrameContentTimeUsList = {};

    static const FrameInfo noFrame = {-1, -1};
    FrameInfo nextExpectedFrame = noFrame;
    while (!mNextExpectedRenderedFrameQueue.empty()) {
        nextExpectedFrame = mNextExpectedRenderedFrameQueue.front();
        mNextExpectedRenderedFrameQueue.pop();
        // Happy path - the rendered frame is what we expected it to be
        if (contentTimeUs == nextExpectedFrame.contentTimeUs) {
            break;
        }
        // This isn't really supposed to happen - the next rendered frame should be the expected
        // frame, or, if there's frame drops, it will be a frame later in the content stream
        if (contentTimeUs < nextExpectedFrame.contentTimeUs) {
            ALOGW("Rendered frame is earlier than the next expected frame (%lld, %lld)",
                  (long long) contentTimeUs, (long long) nextExpectedFrame.contentTimeUs);
            break;
        }
        processMetricsForDroppedFrame(nextExpectedFrame.contentTimeUs,
                                      nextExpectedFrame.desiredRenderTimeUs);
    }
    processMetricsForRenderedFrame(nextExpectedFrame.contentTimeUs,
                                   nextExpectedFrame.desiredRenderTimeUs, actualRenderTimeUs);
    mLastRenderTimeUs = actualRenderTimeUs;
}

const VideoRenderQualityMetrics &VideoRenderQualityTracker::getMetrics() const {
    return mMetrics;
}

void VideoRenderQualityTracker::resetForDiscontinuity() {
    mLastContentTimeUs = -1;
    mLastRenderTimeUs = -1;

    // Don't worry about tracking frame rendering times from now up until playback catches up to the
    // discontinuity. While stuttering or freezing could be found in the next few frames, the impact
    // to the user is is minimal, so better to just keep things simple and don't bother.
    mNextExpectedRenderedFrameQueue = {};

    // Ignore any frames that were skipped just prior to the discontinuity.
    mPendingSkippedFrameContentTimeUsList = {};

    // All frame durations can be now ignored since all bets are off now on what the render
    // durations should be after the discontinuity.
    for (int i = 0; i < FrameDurationUs::SIZE; ++i) {
        mActualFrameDurationUs[i] = -1;
        mDesiredFrameDurationUs[i] = -1;
        mContentFrameDurationUs[i] = -1;
    }
}

bool VideoRenderQualityTracker::resetIfDiscontinuity(int64_t contentTimeUs,
                                                     int64_t desiredRenderTimeUs) {
    if (mLastContentTimeUs == -1) {
        resetForDiscontinuity();
        return true;
    }
    if (contentTimeUs < mLastContentTimeUs) {
        ALOGI("Video playback jumped %d ms backwards in content time (%d -> %d)",
              int((mLastContentTimeUs - contentTimeUs) / 1000), int(mLastContentTimeUs / 1000),
              int(contentTimeUs / 1000));
        resetForDiscontinuity();
        return true;
    }
    if (contentTimeUs - mLastContentTimeUs > mConfiguration.maxExpectedContentFrameDurationUs) {
        // The content frame duration could be long due to frame drops for live content. This can be
        // detected by looking at the app's desired rendering duration. If the app's rendered frame
        // duration is roughly the same as the content's frame duration, then it is assumed that
        // the forward discontinuity is due to frame drops for live content. A false positive can
        // occur if the time the user spends seeking is equal to the duration of the seek. This is
        // very unlikely to occur in practice but CAN occur - the user starts seeking forward, gets
        // distracted, and then returns to seeking forward.
        int64_t contentFrameDurationUs = contentTimeUs - mLastContentTimeUs;
        int64_t desiredFrameDurationUs = desiredRenderTimeUs - mLastRenderTimeUs;
        bool skippedForwardDueToLiveContentFrameDrops =
                abs(contentFrameDurationUs - desiredFrameDurationUs) <
                mConfiguration.contentTimeAdvancedForLiveContentToleranceUs;
        if (!skippedForwardDueToLiveContentFrameDrops) {
            ALOGI("Video playback jumped %d ms forward in content time (%d -> %d) ",
                int((contentTimeUs - mLastContentTimeUs) / 1000), int(mLastContentTimeUs / 1000),
                int(contentTimeUs / 1000));
            resetForDiscontinuity();
            return true;
        }
    }
    return false;
}

void VideoRenderQualityTracker::processMetricsForSkippedFrame(int64_t contentTimeUs) {
    mMetrics.frameSkippedCount++;
    if (mConfiguration.areSkippedFramesDropped) {
        processMetricsForDroppedFrame(contentTimeUs, -1);
        return;
    }
    updateFrameDurations(mContentFrameDurationUs, contentTimeUs);
    updateFrameDurations(mDesiredFrameDurationUs, -1);
    updateFrameDurations(mActualFrameDurationUs, -1);
    updateFrameRate(mMetrics.contentFrameRate, mContentFrameDurationUs, mConfiguration);
}

void VideoRenderQualityTracker::processMetricsForDroppedFrame(int64_t contentTimeUs,
                                                              int64_t desiredRenderTimeUs) {
    mMetrics.frameDroppedCount++;
    updateFrameDurations(mContentFrameDurationUs, contentTimeUs);
    updateFrameDurations(mDesiredFrameDurationUs, desiredRenderTimeUs);
    updateFrameDurations(mActualFrameDurationUs, -1);
    updateFrameRate(mMetrics.contentFrameRate, mContentFrameDurationUs, mConfiguration);
    updateFrameRate(mMetrics.desiredFrameRate, mDesiredFrameDurationUs, mConfiguration);
}

void VideoRenderQualityTracker::processMetricsForRenderedFrame(int64_t contentTimeUs,
                                                               int64_t desiredRenderTimeUs,
                                                               int64_t actualRenderTimeUs) {
    // Capture the timestamp at which the first frame was rendered
    if (mMetrics.firstFrameRenderTimeUs == 0) {
        mMetrics.firstFrameRenderTimeUs = actualRenderTimeUs;
    }

    mMetrics.frameRenderedCount++;
    // The content time is -1 when it was rendered after a discontinuity (e.g. seek) was detected.
    // So, even though a frame was rendered, it's impact on the user is insignificant, so don't do
    // anything other than count it as a rendered frame.
    if (contentTimeUs == -1) {
        return;
    }
    updateFrameDurations(mContentFrameDurationUs, contentTimeUs);
    updateFrameDurations(mDesiredFrameDurationUs, desiredRenderTimeUs);
    updateFrameDurations(mActualFrameDurationUs, actualRenderTimeUs);
    updateFrameRate(mMetrics.contentFrameRate, mContentFrameDurationUs, mConfiguration);
    updateFrameRate(mMetrics.desiredFrameRate, mDesiredFrameDurationUs, mConfiguration);
    updateFrameRate(mMetrics.actualFrameRate, mActualFrameDurationUs, mConfiguration);
}

int64_t VideoRenderQualityTracker::nowUs() {
    struct timespec t;
    t.tv_sec = t.tv_nsec = 0;
    clock_gettime(CLOCK_MONOTONIC, &t);
    return (t.tv_sec * 1000000000LL + t.tv_nsec) / 1000LL;
}

void VideoRenderQualityTracker::updateFrameDurations(FrameDurationUs &durationUs,
                                                     int64_t newTimestampUs) {
    for (int i = FrameDurationUs::SIZE - 1; i > 0; --i ) {
        durationUs[i] = durationUs[i - 1];
    }
    if (newTimestampUs == -1) {
        durationUs[0] = -1;
    } else {
        durationUs[0] = durationUs.priorTimestampUs == -1 ? -1 :
                newTimestampUs - durationUs.priorTimestampUs;
        durationUs.priorTimestampUs = newTimestampUs;
    }
}

void VideoRenderQualityTracker::updateFrameRate(float &frameRate, const FrameDurationUs &durationUs,
                                                const Configuration &c) {
    float newFrameRate = detectFrameRate(durationUs, c);
    if (newFrameRate != FRAME_RATE_UNDETERMINED) {
        frameRate = newFrameRate;
    }
}

float VideoRenderQualityTracker::detectFrameRate(const FrameDurationUs &durationUs,
                                                 const Configuration &c) {
    // At least 3 frames are necessary to detect stable frame rates
    assert(FrameDurationUs::SIZE >= 3);
    if (durationUs[0] == -1 || durationUs[1] == -1 || durationUs[2] == -1) {
        return FRAME_RATE_UNDETERMINED;
    }
    // Only determine frame rate if the render durations are stable across 3 frames
    if (abs(durationUs[0] - durationUs[1]) > c.frameRateDetectionToleranceUs ||
        abs(durationUs[0] - durationUs[2]) > c.frameRateDetectionToleranceUs) {
        return is32pulldown(durationUs, c) ? FRAME_RATE_24HZ_3_2_PULLDOWN : FRAME_RATE_UNDETERMINED;
    }
    return 1000.0 * 1000.0 / durationUs[0];
}

bool VideoRenderQualityTracker::is32pulldown(const FrameDurationUs &durationUs,
                                             const Configuration &c) {
    // At least 5 frames are necessary to detect stable 3:2 pulldown
    assert(FrameDurationUs::SIZE >= 5);
    if (durationUs[0] == -1 || durationUs[1] == -1 || durationUs[2] == -1 || durationUs[3] == -1 ||
        durationUs[4] == -1) {
        return false;
    }
    // 3:2 pulldown expects that every other frame has identical duration...
    if (abs(durationUs[0] - durationUs[2]) > c.frameRateDetectionToleranceUs ||
        abs(durationUs[1] - durationUs[3]) > c.frameRateDetectionToleranceUs ||
        abs(durationUs[0] - durationUs[4]) > c.frameRateDetectionToleranceUs) {
        return false;
    }
    // ... for either 2 vsysncs or 3 vsyncs
    if ((abs(durationUs[0] - 33333) < c.frameRateDetectionToleranceUs &&
         abs(durationUs[1] - 50000) < c.frameRateDetectionToleranceUs) ||
        (abs(durationUs[0] - 50000) < c.frameRateDetectionToleranceUs &&
         abs(durationUs[1] - 33333) < c.frameRateDetectionToleranceUs)) {
        return true;
    }
    return false;
}

} // namespace android
+6 −4
Original line number Diff line number Diff line
@@ -30,6 +30,8 @@
#include <media/stagefright/foundation/AHandler.h>
#include <media/stagefright/CodecErrorLog.h>
#include <media/stagefright/FrameRenderTracker.h>
#include <media/stagefright/PlaybackDurationAccumulator.h>
#include <media/stagefright/VideoRenderQualityTracker.h>
#include <utils/Vector.h>

class C2Buffer;
@@ -63,7 +65,6 @@ class IMemory;
struct PersistentSurface;
class SoftwareRenderer;
class Surface;
class PlaybackDurationAccumulator;
namespace hardware {
namespace cas {
namespace native {
@@ -459,7 +460,7 @@ private:
    void onGetMetrics(const sp<AMessage>& msg);
    constexpr const char *asString(TunnelPeekState state, const char *default_string="?");
    void updateTunnelPeek(const sp<AMessage> &msg);
    void updatePlaybackDuration(const sp<AMessage> &msg);
    void processRenderedFrames(const sp<AMessage> &msg);

    inline void initClientConfigParcel(ClientConfigParcel& clientConfig);

@@ -569,8 +570,9 @@ private:
    sp<CryptoAsync> mCryptoAsync;
    sp<ALooper> mCryptoLooper;

    std::unique_ptr<PlaybackDurationAccumulator> mPlaybackDurationAccumulator;
    bool mIsSurfaceToScreen;
    bool mIsSurfaceToDisplay;
    PlaybackDurationAccumulator mPlaybackDurationAccumulator;
    VideoRenderQualityTracker mVideoRenderQualityTracker;

    MediaCodec(
            const sp<ALooper> &looper, pid_t pid, uid_t uid,
+3 −3
Original line number Diff line number Diff line
@@ -33,7 +33,7 @@ public:
    }

    // Process a render time expressed in nanoseconds.
    void processRenderTime(int64_t newRenderTimeNs) {
    void onFrameRendered(int64_t newRenderTimeNs) {
        // If we detect wrap-around or out of order frames, just ignore the duration for this
        // and the next frame.
        if (newRenderTimeNs < mPreviousRenderTimeNs) {
@@ -59,7 +59,7 @@ private:
    int64_t mPreviousRenderTimeNs;
};

}
} // android

#endif
#endif // PLAYBACK_DURATION_ACCUMULATOR_H_
Loading