Donate to e Foundation | Murena handsets with /e/OS | Own a part of Murena! Learn more

Commit 22d99c5f authored by Steve Kondik's avatar Steve Kondik
Browse files

Revert "frameworks/base: Add rendering statistics to Stagefright"

This reverts commit 035102c5.

Dear Adobe:

Please play by the rules and don't abuse internal parts of Android that
aren't meant to be used by external applications.

This patch breaks compatibility with the Flash native code, which
calls into Stagefright directly. Will revisit after 6.1 because this is
useful information for developers.
parent 7a1c9db1
Loading
Loading
Loading
Loading
+0 −3
Original line number Diff line number Diff line
@@ -45,9 +45,6 @@ struct MediaSource : public RefBase {
    // Returns the format of the data output by this media source.
    virtual sp<MetaData> getFormat() = 0;

    //Logs statistics profiling
    virtual void logTrackStatistics() {};

    struct ReadOptions;

    // Returns a new buffer of data. Call blocks until a
+1 −129
Original line number Diff line number Diff line
@@ -17,7 +17,6 @@
//#define LOG_NDEBUG 0
#define LOG_TAG "AwesomePlayer"
#include <utils/Log.h>
#include <cutils/properties.h>

#include <dlfcn.h>

@@ -188,14 +187,6 @@ AwesomePlayer::AwesomePlayer()
      mExtractorFlags(0),
      mLastVideoBuffer(NULL),
      mVideoBuffer(NULL),
      mFramesDropped(0),
      mConsecutiveFramesDropped(0),
      mCatchupTimeStart(0),
      mNumTimesSyncLoss(0),
      mMaxEarlyDelta(0),
      mMaxLateDelta(0),
      mMaxTimeSyncLoss(0),
      mTotalFrames(0),
      mSuspensionState(NULL) {
    CHECK_EQ(mClient.connect(), OK);

@@ -213,12 +204,6 @@ AwesomePlayer::AwesomePlayer()

    mAudioStatusEventPending = false;

    //for statistics profiling
    char value[PROPERTY_VALUE_MAX];
    mStatistics = false;
    property_get("persist.debug.sf.statistics", value, "0");
    if(atoi(value)) mStatistics = true;

    reset();
}

@@ -566,11 +551,6 @@ status_t AwesomePlayer::play_l() {
        mTimeSource = new SystemTimeSource;
    }

    if (mStatistics) {
        mFirstFrameLatencyStartUs = getTimeOfDayUs();
        mVeryFirstFrame = true;
    }

    if (mVideoSource != NULL) {
        // Kick off video playback
        postVideoEvent_l();
@@ -651,7 +631,6 @@ status_t AwesomePlayer::pause_l() {

    mFlags &= ~PLAYING;

    if(mStatistics && !(mFlags & AT_EOS)) logPause();
    return OK;
}

@@ -724,7 +703,6 @@ status_t AwesomePlayer::seekTo(int64_t timeUs) {

status_t AwesomePlayer::seekTo_l(int64_t timeUs) {
    mSeeking = true;
    if (mStatistics) mFirstFrameLatencyStartUs = getTimeOfDayUs();
    mSeekNotificationSent = false;
    mSeekTimeUs = timeUs;
    mFlags &= ~AT_EOS;
@@ -941,14 +919,12 @@ void AwesomePlayer::onVideoEvent() {
        mFlags |= FIRST_FRAME;
        mSeeking = false;
        mSeekNotificationSent = false;
        if (mStatistics) logSeek();
    }

    if (mFlags & FIRST_FRAME) {
        mFlags &= ~FIRST_FRAME;

        mTimeSourceDeltaUs = mTimeSource->getRealTimeUs() - timeUs;
        if (mStatistics && mVeryFirstFrame) logFirstFrame();
    }

    int64_t realTimeUs, mediaTimeUs;
@@ -968,24 +944,13 @@ void AwesomePlayer::onVideoEvent() {
        mVideoBuffer->release();
        mVideoBuffer = NULL;

        if (mStatistics) {
            mFramesDropped++;
            mConsecutiveFramesDropped++;
            if (mConsecutiveFramesDropped == 1) {
                mCatchupTimeStart = mTimeSource->getRealTimeUs();
            }
            if (!(mFlags & AT_EOS)) logLate(timeUs,nowUs,latenessUs);
        }
        postVideoEvent_l();
        return;
    }

    if (latenessUs < -10000) {
        // We're more than 10ms early.
        if (mStatistics) {
            logOnTime(timeUs,nowUs,latenessUs);
            mConsecutiveFramesDropped = 0;
        }

        postVideoEvent_l(10000);
        return;
    }
@@ -998,11 +963,6 @@ void AwesomePlayer::onVideoEvent() {

    if (mVideoRenderer != NULL) {
        mVideoRenderer->render(mVideoBuffer);
        if (mStatistics) {
            mTotalFrames++;
            logOnTime(timeUs,nowUs,latenessUs);
            mConsecutiveFramesDropped = 0;
        }
    }

    if (mLastVideoBuffer) {
@@ -1301,10 +1261,6 @@ void AwesomePlayer::onPrepareAsyncEvent() {
status_t AwesomePlayer::suspend() {
    LOGV("suspend");
    Mutex::Autolock autoLock(mLock);
    if (mStatistics) {
        logStatistics();
        logSyncLoss();
    }

    if (mSuspensionState != NULL) {
        if (mLastVideoBuffer == NULL) {
@@ -1434,89 +1390,5 @@ uint32_t AwesomePlayer::flags() const {
    return mExtractorFlags;
}


//Statistics profiling
void AwesomePlayer::logStatistics() {
    String8 mimeType;
    float confidence;
    if (mFileSource!=NULL) mFileSource->sniff(&mimeType, &confidence);
    LOGW("=====================================================");
    LOGW("MimeType: %s",mimeType.string());
    LOGW("Clip duration: %lld ms",mDurationUs/1000);
    if (mVideoTrack!=NULL) mVideoTrack->logTrackStatistics();
    LOGW("Number of frames dropped: %lu",mFramesDropped);
    LOGW("Number of frames rendered: %lu",mTotalFrames);
    LOGW("=====================================================");
}

inline void AwesomePlayer::logFirstFrame() {
    LOGW("=====================================================");
    LOGW("First frame latency: %lld ms",(getTimeOfDayUs()-mFirstFrameLatencyStartUs)/1000);
    LOGW("=====================================================");
    mVeryFirstFrame = false;
}

inline void AwesomePlayer::logSeek() {
    LOGW("=====================================================");
    LOGW("Seek position: %lld ms",mSeekTimeUs/1000);
    LOGW("Seek latency: %lld ms",(getTimeOfDayUs()-mFirstFrameLatencyStartUs)/1000);
    LOGW("=====================================================");
}

inline void AwesomePlayer::logPause() {
    LOGW("=====================================================");
    LOGW("Pause position: %lld ms",mVideoTimeUs/1000);
    LOGW("=====================================================");
}

inline void AwesomePlayer::logCatchUp(int64_t ts, int64_t clock, int64_t delta)
{
    if (mConsecutiveFramesDropped > 0) {
        LOGW("Frames dropped before catching up: %lu, Timestamp: %lld, Clock: %lld, Delta: %lld", mConsecutiveFramesDropped, ts/1000, clock/1000, delta/1000);
        mNumTimesSyncLoss++;
        if (mMaxTimeSyncLoss < (clock - mCatchupTimeStart) && clock > 0 && ts > 0) {
            mMaxTimeSyncLoss = clock - mCatchupTimeStart;
        }
    }
}

inline void AwesomePlayer::logLate(int64_t ts, int64_t clock, int64_t delta)
{
    LOGW("Video behind. Timestamp: %lld, Clock: %lld, Delta: %lld",ts/1000,clock/1000,delta/1000);
    if (mMaxLateDelta < delta && clock > 0 && ts > 0) {
        mMaxLateDelta = delta;
    }
}

inline void AwesomePlayer::logOnTime(int64_t ts, int64_t clock, int64_t delta)
{
    logCatchUp(ts, clock, delta);
    if (delta <= 0) {
        LOGW("Video ahead. Timestamp: %lld, Clock: %lld, Delta: %lld",ts/1000,clock/1000,-delta/1000);
        if ((-delta) > (-mMaxEarlyDelta) && clock > 0 && ts > 0) {
            mMaxEarlyDelta = delta;
        }
    }
    else logLate(ts, clock, delta);
}

void AwesomePlayer::logSyncLoss()
{
    LOGW("=====================================================");
    LOGW("Number of times AV Sync Losses = %lu", mNumTimesSyncLoss);
    LOGW("Max Video Ahead time delta = %lu", -mMaxEarlyDelta/1000);
    LOGW("Max Video Behind time delta = %lu", mMaxLateDelta/1000);
    LOGW("Max Time sync loss = %lu",mMaxTimeSyncLoss/1000);
    LOGW("=====================================================");

}

inline int64_t AwesomePlayer::getTimeOfDayUs() {
    struct timeval tv;
    gettimeofday(&tv, NULL);

    return (int64_t)tv.tv_sec * 1000000 + tv.tv_usec;
}

}  // namespace android
+1 −48
Original line number Diff line number Diff line
@@ -38,8 +38,6 @@
#include <media/stagefright/Utils.h>
#include <utils/String8.h>

#include <cutils/properties.h>

namespace android {

class MPEG4Source : public MediaSource {
@@ -58,8 +56,6 @@ public:
    virtual status_t read(
            MediaBuffer **buffer, const ReadOptions *options = NULL);

    virtual void logTrackStatistics();

protected:
    virtual ~MPEG4Source();

@@ -85,11 +81,6 @@ private:

    uint8_t *mSrcBuffer;

    //For statistics profiling
    uint32_t mNumSamplesReadError;
    bool mStatistics;
    void logExpectedFrames();

    size_t parseNALSize(const uint8_t *data) const;

    MPEG4Source(const MPEG4Source &);
@@ -1358,19 +1349,11 @@ MPEG4Source::MPEG4Source(
      mGroup(NULL),
      mBuffer(NULL),
      mWantsNALFragments(false),
      mSrcBuffer(NULL),
      mNumSamplesReadError(0) {
      mSrcBuffer(NULL) {
    const char *mime;
    bool success = mFormat->findCString(kKeyMIMEType, &mime);
    CHECK(success);


    //for statistics profiling
    char value[PROPERTY_VALUE_MAX];
    mStatistics = false;
    property_get("persist.debug.sf.statistics", value, "0");
    if(atoi(value)) mStatistics = true;

    mIsAVC = !strcasecmp(mime, MEDIA_MIMETYPE_VIDEO_AVC);

    if (mIsAVC) {
@@ -1387,7 +1370,6 @@ MPEG4Source::MPEG4Source(
        // The number of bytes used to encode the length of a NAL unit.
        mNALLengthSize = 1 + (ptr[4] & 3);
    }
    if (mStatistics) logExpectedFrames();
}

MPEG4Source::~MPEG4Source() {
@@ -1518,7 +1500,6 @@ status_t MPEG4Source::read(
                    mCurrentSampleIndex, &offset, &size, &dts);

        if (err != OK) {
            if (mStatistics) mNumSamplesReadError++;
            return err;
        }

@@ -1526,7 +1507,6 @@ status_t MPEG4Source::read(

        if (err != OK) {
            CHECK_EQ(mBuffer, NULL);
            if (mStatistics) mNumSamplesReadError++;
            return err;
        }
    }
@@ -1540,7 +1520,6 @@ status_t MPEG4Source::read(
                mBuffer->release();
                mBuffer = NULL;

                if (mStatistics) mNumSamplesReadError++;
                return ERROR_IO;
            }

@@ -1574,7 +1553,6 @@ status_t MPEG4Source::read(
            mBuffer->release();
            mBuffer = NULL;

            if (mStatistics) mNumSamplesReadError++;
            return ERROR_MALFORMED;
        }

@@ -1606,7 +1584,6 @@ status_t MPEG4Source::read(
            mBuffer->release();
            mBuffer = NULL;

            if (mStatistics) mNumSamplesReadError++;
            return ERROR_IO;
        }

@@ -1623,7 +1600,6 @@ status_t MPEG4Source::read(
                mBuffer->release();
                mBuffer = NULL;

                if (mStatistics) mNumSamplesReadError++;
                return ERROR_MALFORMED;
            }

@@ -1657,29 +1633,6 @@ status_t MPEG4Source::read(
    }
}

void MPEG4Source::logTrackStatistics()
{
    LOGW("Total number of samples in track: %lu",mSampleTable->countSamples());
    LOGW("Number of key samples: %lu",mSampleTable->getNumSyncSamples());
    LOGW("Number of corrupt samples: %lu",mNumSamplesReadError ?
           mNumSamplesReadError-1 : mNumSamplesReadError); //last sample reads error for EOS
}


void MPEG4Source::logExpectedFrames()
{
    const char *mime;
    mFormat->findCString(kKeyMIMEType, &mime);
    int64_t durationUs;
    getFormat()->findInt64(kKeyDuration, &durationUs);
    LOGW("=====================================================");
    LOGW("Mime type: %s",mime);
    LOGW("Track duration: %lld",durationUs/1000);
    LOGW("Total number of samples in track: %lu",mSampleTable->countSamples());
    LOGW("Expected frames per second: %.2f",((float)mSampleTable->countSamples()*1000)/((float)durationUs/1000));
    LOGW("=====================================================");
}

bool SniffMPEG4(
        const sp<DataSource> &source, String8 *mimeType, float *confidence) {
    uint8_t header[8];
+0 −5
Original line number Diff line number Diff line
@@ -471,10 +471,5 @@ status_t SampleTable::getMetaDataForSample(
    return OK;
}

uint32_t SampleTable::getNumSyncSamples()
{
    return mNumSyncSamples;
}

}  // namespace android
+0 −24
Original line number Diff line number Diff line
@@ -169,19 +169,6 @@ private:
    sp<Prefetcher> mPrefetcher;
    sp<HTTPDataSource> mConnectingDataSource;

    //Statistics profiling
    uint32_t mFramesDropped;
    uint32_t mConsecutiveFramesDropped;
    uint32_t mCatchupTimeStart;
    uint32_t mNumTimesSyncLoss;
    uint32_t mMaxEarlyDelta;
    uint32_t mMaxLateDelta;
    uint32_t mMaxTimeSyncLoss;
    uint32_t mTotalFrames;
    int64_t mFirstFrameLatencyStartUs; //first frame latency start
    bool mVeryFirstFrame;
    bool mStatistics;

    struct SuspensionState {
        String8 mUri;
        KeyedVector<String8, String8> mUriHeaders;
@@ -242,17 +229,6 @@ private:

    static bool ContinuePreparation(void *cookie);

    //Statistics Profiling
    void logStatistics();
    void logFirstFrame();
    void logSeek();
    void logPause();
    void logCatchUp(int64_t ts, int64_t clock, int64_t delta);
    void logLate(int64_t ts, int64_t clock, int64_t delta);
    void logOnTime(int64_t ts, int64_t clock, int64_t delta);
    void logSyncLoss();
    int64_t getTimeOfDayUs();

    AwesomePlayer(const AwesomePlayer &);
    AwesomePlayer &operator=(const AwesomePlayer &);
};
Loading