Donate to e Foundation | Murena handsets with /e/OS | Own a part of Murena! Learn more

Commit fc667dab authored by James Dong's avatar James Dong Committed by Android Git Automerger
Browse files

am 06967683: am e5671bfe: Merge "Audio/video sync during recording (second part)" into gingerbread

Merge commit '06967683'

* commit '06967683':
  Audio/video sync during recording (second part)
parents 6c55b8b1 06967683
Loading
Loading
Loading
Loading
+1 −0
Original line number Original line Diff line number Diff line
@@ -56,6 +56,7 @@ private:
    int64_t mTotalReadTimeUs;
    int64_t mTotalReadTimeUs;
    int64_t mTotalReadBytes;
    int64_t mTotalReadBytes;
    int64_t mTotalReads;
    int64_t mTotalReads;
    int64_t mStartTimeUs;


    MediaBufferGroup *mGroup;
    MediaBufferGroup *mGroup;


+4 −0
Original line number Original line Diff line number Diff line
@@ -60,11 +60,14 @@ private:
    List<sp<IMemory> > mFramesBeingEncoded;
    List<sp<IMemory> > mFramesBeingEncoded;
    List<int64_t> mFrameTimes;
    List<int64_t> mFrameTimes;


    int64_t mStartTimeUs;
    int64_t mFirstFrameTimeUs;
    int64_t mFirstFrameTimeUs;
    int64_t mLastFrameTimestampUs;
    int64_t mLastFrameTimestampUs;
    int32_t mNumFramesReceived;
    int32_t mNumFramesReceived;
    int32_t mNumFramesEncoded;
    int32_t mNumFramesEncoded;
    int32_t mNumFramesDropped;
    int32_t mNumFramesDropped;
    int32_t mNumGlitches;
    int64_t mGlitchDurationThresholdUs;
    bool mCollectStats;
    bool mCollectStats;
    bool mStarted;
    bool mStarted;


@@ -74,6 +77,7 @@ private:
            int64_t timestampUs, int32_t msgType, const sp<IMemory> &data);
            int64_t timestampUs, int32_t msgType, const sp<IMemory> &data);


    void releaseQueuedFrames();
    void releaseQueuedFrames();
    void releaseOneRecordingFrame(const sp<IMemory>& frame);


    CameraSource(const CameraSource &);
    CameraSource(const CameraSource &);
    CameraSource &operator=(const CameraSource &);
    CameraSource &operator=(const CameraSource &);
+2 −2
Original line number Original line Diff line number Diff line
@@ -78,8 +78,8 @@ private:


    List<off_t> mBoxes;
    List<off_t> mBoxes;


    void setStartTimestamp(int64_t timeUs);
    void setStartTimestampUs(int64_t timeUs);
    int64_t getStartTimestamp();  // Not const
    int64_t getStartTimestampUs();  // Not const
    status_t startTracks();
    status_t startTracks();


    void lock();
    void lock();
+23 −10
Original line number Original line Diff line number Diff line
@@ -77,6 +77,12 @@ status_t AudioSource::start(MetaData *params) {
        && (!strcmp(value, "1") || !strcasecmp(value, "true"))) {
        && (!strcmp(value, "1") || !strcasecmp(value, "true"))) {
        mCollectStats = true;
        mCollectStats = true;
    }
    }

    mStartTimeUs = 0;
    int64_t startTimeUs;
    if (params && params->findInt64(kKeyTime, &startTimeUs)) {
        mStartTimeUs = startTimeUs;
    }
    status_t err = mRecord->start();
    status_t err = mRecord->start();


    if (err == OK) {
    if (err == OK) {
@@ -132,21 +138,22 @@ status_t AudioSource::read(
    uint32_t numFramesRecorded;
    uint32_t numFramesRecorded;
    mRecord->getPosition(&numFramesRecorded);
    mRecord->getPosition(&numFramesRecorded);
    int64_t latency = mRecord->latency() * 1000;
    int64_t latency = mRecord->latency() * 1000;
    uint32_t sampleRate = mRecord->getSampleRate();
    int64_t timestampUs = (1000000LL * numFramesRecorded) / sampleRate - latency;
    LOGV("latency: %lld, sample rate: %d, timestamp: %lld",
            latency, sampleRate, timestampUs);


    buffer->meta_data()->setInt64(kKeyTime, timestampUs);
    int64_t readTime = systemTime() / 1000;
    if (numFramesRecorded == 0) {
        // Initial delay
        if (mStartTimeUs > 0) {
            mStartTimeUs = readTime - mStartTimeUs;
        } else {
            mStartTimeUs += latency;
        }
    }


    ssize_t n = 0;
    ssize_t n = 0;
    if (mCollectStats) {
    if (mCollectStats) {
        struct timeval tv_start, tv_end;
        gettimeofday(&tv_start, NULL);
        n = mRecord->read(buffer->data(), buffer->size());
        n = mRecord->read(buffer->data(), buffer->size());
        gettimeofday(&tv_end, NULL);
        int64_t endTime = systemTime() / 1000;
        mTotalReadTimeUs += ((1000000LL * (tv_end.tv_sec - tv_start.tv_sec))
        mTotalReadTimeUs += (endTime - readTime);
                + (tv_end.tv_usec - tv_start.tv_usec));
        if (n >= 0) {
        if (n >= 0) {
            mTotalReadBytes += n;
            mTotalReadBytes += n;
        }
        }
@@ -161,6 +168,12 @@ status_t AudioSource::read(
        return (status_t)n;
        return (status_t)n;
    }
    }


    uint32_t sampleRate = mRecord->getSampleRate();
    int64_t timestampUs = (1000000LL * numFramesRecorded) / sampleRate + mStartTimeUs;
    buffer->meta_data()->setInt64(kKeyTime, timestampUs);
    LOGV("initial delay: %lld, sample rate: %d, timestamp: %lld",
            mStartTimeUs, sampleRate, timestampUs);

    buffer->set_range(0, n);
    buffer->set_range(0, n);


    *out = buffer;
    *out = buffer;
+46 −10
Original line number Original line Diff line number Diff line
@@ -123,6 +123,8 @@ CameraSource::CameraSource(const sp<Camera> &camera)
      mNumFramesReceived(0),
      mNumFramesReceived(0),
      mNumFramesEncoded(0),
      mNumFramesEncoded(0),
      mNumFramesDropped(0),
      mNumFramesDropped(0),
      mNumGlitches(0),
      mGlitchDurationThresholdUs(200000),
      mCollectStats(false),
      mCollectStats(false),
      mStarted(false) {
      mStarted(false) {


@@ -136,6 +138,13 @@ CameraSource::CameraSource(const sp<Camera> &camera)
    CameraParameters params(s);
    CameraParameters params(s);
    params.getPreviewSize(&width, &height);
    params.getPreviewSize(&width, &height);


    // Calculate glitch duraton threshold based on frame rate
    int32_t frameRate = params.getPreviewFrameRate();
    int64_t glitchDurationUs = (1000000LL / frameRate);
    if (glitchDurationUs > mGlitchDurationThresholdUs) {
        mGlitchDurationThresholdUs = glitchDurationUs;
    }

    const char *colorFormatStr = params.get(CameraParameters::KEY_VIDEO_FRAME_FORMAT);
    const char *colorFormatStr = params.get(CameraParameters::KEY_VIDEO_FRAME_FORMAT);
    CHECK(colorFormatStr != NULL);
    CHECK(colorFormatStr != NULL);
    int32_t colorFormat = getColorFormat(colorFormatStr);
    int32_t colorFormat = getColorFormat(colorFormatStr);
@@ -161,8 +170,7 @@ CameraSource::~CameraSource() {
    }
    }
}
}


status_t CameraSource::start(MetaData *) {
status_t CameraSource::start(MetaData *meta) {
    LOGV("start");
    CHECK(!mStarted);
    CHECK(!mStarted);


    char value[PROPERTY_VALUE_MAX];
    char value[PROPERTY_VALUE_MAX];
@@ -171,6 +179,12 @@ status_t CameraSource::start(MetaData *) {
        mCollectStats = true;
        mCollectStats = true;
    }
    }


    mStartTimeUs = 0;
    int64_t startTimeUs;
    if (meta && meta->findInt64(kKeyTime, &startTimeUs)) {
        mStartTimeUs = startTimeUs;
    }

    int64_t token = IPCThreadState::self()->clearCallingIdentity();
    int64_t token = IPCThreadState::self()->clearCallingIdentity();
    mCamera->setListener(new CameraSourceListener(this));
    mCamera->setListener(new CameraSourceListener(this));
    CHECK_EQ(OK, mCamera->startRecording());
    CHECK_EQ(OK, mCamera->startRecording());
@@ -222,16 +236,19 @@ sp<MetaData> CameraSource::getFormat() {
    return mMeta;
    return mMeta;
}
}


void CameraSource::releaseOneRecordingFrame(const sp<IMemory>& frame) {
    int64_t token = IPCThreadState::self()->clearCallingIdentity();
    mCamera->releaseRecordingFrame(frame);
    IPCThreadState::self()->restoreCallingIdentity(token);
}

void CameraSource::signalBufferReturned(MediaBuffer *buffer) {
void CameraSource::signalBufferReturned(MediaBuffer *buffer) {
    LOGV("signalBufferReturned: %p", buffer->data());
    LOGV("signalBufferReturned: %p", buffer->data());
    for (List<sp<IMemory> >::iterator it = mFramesBeingEncoded.begin();
    for (List<sp<IMemory> >::iterator it = mFramesBeingEncoded.begin();
         it != mFramesBeingEncoded.end(); ++it) {
         it != mFramesBeingEncoded.end(); ++it) {
        if ((*it)->pointer() ==  buffer->data()) {
        if ((*it)->pointer() ==  buffer->data()) {


            int64_t token = IPCThreadState::self()->clearCallingIdentity();
            releaseOneRecordingFrame((*it));
            mCamera->releaseRecordingFrame((*it));
            IPCThreadState::self()->restoreCallingIdentity(token);

            mFramesBeingEncoded.erase(it);
            mFramesBeingEncoded.erase(it);
            ++mNumFramesEncoded;
            ++mNumFramesEncoded;
            buffer->setObserver(0);
            buffer->setObserver(0);
@@ -285,22 +302,41 @@ void CameraSource::dataCallbackTimestamp(int64_t timestampUs,
    LOGV("dataCallbackTimestamp: timestamp %lld us", timestampUs);
    LOGV("dataCallbackTimestamp: timestamp %lld us", timestampUs);
    Mutex::Autolock autoLock(mLock);
    Mutex::Autolock autoLock(mLock);
    if (!mStarted) {
    if (!mStarted) {
        int64_t token = IPCThreadState::self()->clearCallingIdentity();
        releaseOneRecordingFrame(data);
        mCamera->releaseRecordingFrame(data);
        IPCThreadState::self()->restoreCallingIdentity(token);
        ++mNumFramesReceived;
        ++mNumFramesReceived;
        ++mNumFramesDropped;
        ++mNumFramesDropped;
        return;
        return;
    }
    }


    if (mNumFramesReceived > 0 &&
        timestampUs - mLastFrameTimestampUs > mGlitchDurationThresholdUs) {
        if (mNumGlitches % 10 == 0) {  // Don't spam the log
            LOGW("Long delay detected in video recording");
        }
        ++mNumGlitches;
    }

    mLastFrameTimestampUs = timestampUs;
    mLastFrameTimestampUs = timestampUs;
    if (mNumFramesReceived == 0) {
    if (mNumFramesReceived == 0) {
        mFirstFrameTimeUs = timestampUs;
        mFirstFrameTimeUs = timestampUs;
        // Initial delay
        if (mStartTimeUs > 0) {
            if (timestampUs < mStartTimeUs) {
                // Frame was captured before recording was started
                // Drop it without updating the statistical data.
                releaseOneRecordingFrame(data);
                return;
            }
            mStartTimeUs = timestampUs - mStartTimeUs;
        }
    }
    }
    ++mNumFramesReceived;
    ++mNumFramesReceived;


    mFramesReceived.push_back(data);
    mFramesReceived.push_back(data);
    mFrameTimes.push_back(timestampUs - mFirstFrameTimeUs);
    int64_t timeUs = mStartTimeUs + (timestampUs - mFirstFrameTimeUs);
    mFrameTimes.push_back(timeUs);
    LOGV("initial delay: %lld, current time stamp: %lld",
        mStartTimeUs, timeUs);
    mFrameAvailableCondition.signal();
    mFrameAvailableCondition.signal();
}
}


Loading