Donate to e Foundation | Murena handsets with /e/OS | Own a part of Murena! Learn more

Commit e5671bfe authored by James Dong's avatar James Dong Committed by Android (Google) Code Review
Browse files

Merge "Audio/video sync during recording (second part)" into gingerbread

parents 613dde4a 36e573bf
Loading
Loading
Loading
Loading
+1 −0
Original line number Diff line number Diff line
@@ -56,6 +56,7 @@ private:
    int64_t mTotalReadTimeUs;
    int64_t mTotalReadBytes;
    int64_t mTotalReads;
    int64_t mStartTimeUs;

    MediaBufferGroup *mGroup;

+4 −0
Original line number Diff line number Diff line
@@ -60,11 +60,14 @@ private:
    List<sp<IMemory> > mFramesBeingEncoded;
    List<int64_t> mFrameTimes;

    int64_t mStartTimeUs;
    int64_t mFirstFrameTimeUs;
    int64_t mLastFrameTimestampUs;
    int32_t mNumFramesReceived;
    int32_t mNumFramesEncoded;
    int32_t mNumFramesDropped;
    int32_t mNumGlitches;
    int64_t mGlitchDurationThresholdUs;
    bool mCollectStats;
    bool mStarted;

@@ -74,6 +77,7 @@ private:
            int64_t timestampUs, int32_t msgType, const sp<IMemory> &data);

    void releaseQueuedFrames();
    void releaseOneRecordingFrame(const sp<IMemory>& frame);

    CameraSource(const CameraSource &);
    CameraSource &operator=(const CameraSource &);
+2 −2
Original line number Diff line number Diff line
@@ -78,8 +78,8 @@ private:

    List<off_t> mBoxes;

    void setStartTimestamp(int64_t timeUs);
    int64_t getStartTimestamp();  // Not const
    void setStartTimestampUs(int64_t timeUs);
    int64_t getStartTimestampUs();  // Not const
    status_t startTracks();

    void lock();
+23 −10
Original line number Diff line number Diff line
@@ -77,6 +77,12 @@ status_t AudioSource::start(MetaData *params) {
        && (!strcmp(value, "1") || !strcasecmp(value, "true"))) {
        mCollectStats = true;
    }

    mStartTimeUs = 0;
    int64_t startTimeUs;
    if (params && params->findInt64(kKeyTime, &startTimeUs)) {
        mStartTimeUs = startTimeUs;
    }
    status_t err = mRecord->start();

    if (err == OK) {
@@ -132,21 +138,22 @@ status_t AudioSource::read(
    uint32_t numFramesRecorded;
    mRecord->getPosition(&numFramesRecorded);
    int64_t latency = mRecord->latency() * 1000;
    uint32_t sampleRate = mRecord->getSampleRate();
    int64_t timestampUs = (1000000LL * numFramesRecorded) / sampleRate - latency;
    LOGV("latency: %lld, sample rate: %d, timestamp: %lld",
            latency, sampleRate, timestampUs);

    buffer->meta_data()->setInt64(kKeyTime, timestampUs);
    int64_t readTime = systemTime() / 1000;
    if (numFramesRecorded == 0) {
        // Initial delay
        if (mStartTimeUs > 0) {
            mStartTimeUs = readTime - mStartTimeUs;
        } else {
            mStartTimeUs += latency;
        }
    }

    ssize_t n = 0;
    if (mCollectStats) {
        struct timeval tv_start, tv_end;
        gettimeofday(&tv_start, NULL);
        n = mRecord->read(buffer->data(), buffer->size());
        gettimeofday(&tv_end, NULL);
        mTotalReadTimeUs += ((1000000LL * (tv_end.tv_sec - tv_start.tv_sec))
                + (tv_end.tv_usec - tv_start.tv_usec));
        int64_t endTime = systemTime() / 1000;
        mTotalReadTimeUs += (endTime - readTime);
        if (n >= 0) {
            mTotalReadBytes += n;
        }
@@ -161,6 +168,12 @@ status_t AudioSource::read(
        return (status_t)n;
    }

    uint32_t sampleRate = mRecord->getSampleRate();
    int64_t timestampUs = (1000000LL * numFramesRecorded) / sampleRate + mStartTimeUs;
    buffer->meta_data()->setInt64(kKeyTime, timestampUs);
    LOGV("initial delay: %lld, sample rate: %d, timestamp: %lld",
            mStartTimeUs, sampleRate, timestampUs);

    buffer->set_range(0, n);

    *out = buffer;
+46 −10
Original line number Diff line number Diff line
@@ -123,6 +123,8 @@ CameraSource::CameraSource(const sp<Camera> &camera)
      mNumFramesReceived(0),
      mNumFramesEncoded(0),
      mNumFramesDropped(0),
      mNumGlitches(0),
      mGlitchDurationThresholdUs(200000),
      mCollectStats(false),
      mStarted(false) {

@@ -136,6 +138,13 @@ CameraSource::CameraSource(const sp<Camera> &camera)
    CameraParameters params(s);
    params.getPreviewSize(&width, &height);

    // Calculate glitch duraton threshold based on frame rate
    int32_t frameRate = params.getPreviewFrameRate();
    int64_t glitchDurationUs = (1000000LL / frameRate);
    if (glitchDurationUs > mGlitchDurationThresholdUs) {
        mGlitchDurationThresholdUs = glitchDurationUs;
    }

    const char *colorFormatStr = params.get(CameraParameters::KEY_VIDEO_FRAME_FORMAT);
    CHECK(colorFormatStr != NULL);
    int32_t colorFormat = getColorFormat(colorFormatStr);
@@ -161,8 +170,7 @@ CameraSource::~CameraSource() {
    }
}

status_t CameraSource::start(MetaData *) {
    LOGV("start");
status_t CameraSource::start(MetaData *meta) {
    CHECK(!mStarted);

    char value[PROPERTY_VALUE_MAX];
@@ -171,6 +179,12 @@ status_t CameraSource::start(MetaData *) {
        mCollectStats = true;
    }

    mStartTimeUs = 0;
    int64_t startTimeUs;
    if (meta && meta->findInt64(kKeyTime, &startTimeUs)) {
        mStartTimeUs = startTimeUs;
    }

    int64_t token = IPCThreadState::self()->clearCallingIdentity();
    mCamera->setListener(new CameraSourceListener(this));
    CHECK_EQ(OK, mCamera->startRecording());
@@ -222,16 +236,19 @@ sp<MetaData> CameraSource::getFormat() {
    return mMeta;
}

void CameraSource::releaseOneRecordingFrame(const sp<IMemory>& frame) {
    int64_t token = IPCThreadState::self()->clearCallingIdentity();
    mCamera->releaseRecordingFrame(frame);
    IPCThreadState::self()->restoreCallingIdentity(token);
}

void CameraSource::signalBufferReturned(MediaBuffer *buffer) {
    LOGV("signalBufferReturned: %p", buffer->data());
    for (List<sp<IMemory> >::iterator it = mFramesBeingEncoded.begin();
         it != mFramesBeingEncoded.end(); ++it) {
        if ((*it)->pointer() ==  buffer->data()) {

            int64_t token = IPCThreadState::self()->clearCallingIdentity();
            mCamera->releaseRecordingFrame((*it));
            IPCThreadState::self()->restoreCallingIdentity(token);

            releaseOneRecordingFrame((*it));
            mFramesBeingEncoded.erase(it);
            ++mNumFramesEncoded;
            buffer->setObserver(0);
@@ -285,22 +302,41 @@ void CameraSource::dataCallbackTimestamp(int64_t timestampUs,
    LOGV("dataCallbackTimestamp: timestamp %lld us", timestampUs);
    Mutex::Autolock autoLock(mLock);
    if (!mStarted) {
        int64_t token = IPCThreadState::self()->clearCallingIdentity();
        mCamera->releaseRecordingFrame(data);
        IPCThreadState::self()->restoreCallingIdentity(token);
        releaseOneRecordingFrame(data);
        ++mNumFramesReceived;
        ++mNumFramesDropped;
        return;
    }

    if (mNumFramesReceived > 0 &&
        timestampUs - mLastFrameTimestampUs > mGlitchDurationThresholdUs) {
        if (mNumGlitches % 10 == 0) {  // Don't spam the log
            LOGW("Long delay detected in video recording");
        }
        ++mNumGlitches;
    }

    mLastFrameTimestampUs = timestampUs;
    if (mNumFramesReceived == 0) {
        mFirstFrameTimeUs = timestampUs;
        // Initial delay
        if (mStartTimeUs > 0) {
            if (timestampUs < mStartTimeUs) {
                // Frame was captured before recording was started
                // Drop it without updating the statistical data.
                releaseOneRecordingFrame(data);
                return;
            }
            mStartTimeUs = timestampUs - mStartTimeUs;
        }
    }
    ++mNumFramesReceived;

    mFramesReceived.push_back(data);
    mFrameTimes.push_back(timestampUs - mFirstFrameTimeUs);
    int64_t timeUs = mStartTimeUs + (timestampUs - mFirstFrameTimeUs);
    mFrameTimes.push_back(timeUs);
    LOGV("initial delay: %lld, current time stamp: %lld",
        mStartTimeUs, timeUs);
    mFrameAvailableCondition.signal();
}

Loading