Donate to e Foundation | Murena handsets with /e/OS | Own a part of Murena! Learn more

Commit 6ce2ed7e authored by Wei Jia's avatar Wei Jia Committed by Android (Google) Code Review
Browse files

Merge "MediaSync: support MediaSync in native code."

parents ec648447 c8db9712
Loading
Loading
Loading
Loading
+13 −11
Original line number Diff line number Diff line
@@ -45,22 +45,24 @@ struct MediaClock : public RefBase {

    // query media time corresponding to real time |realUs|, and save the
    // result in |outMediaUs|.
    status_t getMediaTime(int64_t realUs,
    status_t getMediaTime(
            int64_t realUs,
            int64_t *outMediaUs,
                          bool allowPastMaxTime = false);
            bool allowPastMaxTime = false) const;
    // query real time corresponding to media time |targetMediaUs|.
    // The result is saved in |outRealUs|.
    status_t getRealTimeFor(int64_t targetMediaUs, int64_t *outRealUs);
    status_t getRealTimeFor(int64_t targetMediaUs, int64_t *outRealUs) const;

protected:
    virtual ~MediaClock();

private:
    status_t getMediaTime_l(int64_t realUs,
    status_t getMediaTime_l(
            int64_t realUs,
            int64_t *outMediaUs,
                            bool allowPastMaxTime);
            bool allowPastMaxTime) const;

    Mutex mLock;
    mutable Mutex mLock;

    int64_t mAnchorTimeMediaUs;
    int64_t mAnchorTimeRealUs;
+239 −0
Original line number Diff line number Diff line
/*
 * Copyright 2015 The Android Open Source Project
 *
 * Licensed under the Apache License, Version 2.0 (the "License");
 * you may not use this file except in compliance with the License.
 * You may obtain a copy of the License at
 *
 *      http://www.apache.org/licenses/LICENSE-2.0
 *
 * Unless required by applicable law or agreed to in writing, software
 * distributed under the License is distributed on an "AS IS" BASIS,
 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
 * See the License for the specific language governing permissions and
 * limitations under the License.
 */

#ifndef MEDIA_SYNC_H
#define MEDIA_SYNC_H

#include <gui/IConsumerListener.h>
#include <gui/IProducerListener.h>

#include <media/stagefright/foundation/AHandler.h>

#include <utils/Condition.h>
#include <utils/Mutex.h>

namespace android {

class AudioTrack;
class BufferItem;
class Fence;
class GraphicBuffer;
class IGraphicBufferConsumer;
class IGraphicBufferProducer;
struct MediaClock;

// MediaSync manages media playback and its synchronization to a media clock
// source. It can be also used for video-only playback.
//
// For video playback, it requires an output surface and provides an input
// surface. It then controls the rendering of input buffers (buffer queued to
// the input surface) on the output surface to happen at the appropriate time.
//
// For audio playback, it requires an audio track and takes updates of
// information of rendered audio data to maintain media clock when audio track
// serves as media clock source. (TODO: move audio rendering from JAVA to
// native code).
//
// It can use the audio or video track as media clock source, as well as an
// external clock. (TODO: actually support external clock as media clock
// sources; use video track as media clock source for audio-and-video stream).
//
// In video-only mode, MediaSync will playback every video frame even though
// a video frame arrives late based on its timestamp and last frame's.
//
// The client needs to configure surface (for output video rendering) and audio
// track (for querying information of audio rendering) for MediaSync.
//
// Then the client needs to obtain a surface from MediaSync and render video
// frames onto that surface. Internally, the MediaSync will receive those video
// frames and render them onto the output surface at the appropriate time.
//
// The client needs to call updateQueuedAudioData() immediately after it writes
// audio data to the audio track. Such information will be used to update media
// clock.
//
class MediaSync : public AHandler {
public:
    // Create an instance of MediaSync.
    static sp<MediaSync> create();

    // Called when MediaSync is used to render video. It should be called
    // before createInputSurface().
    status_t configureSurface(const sp<IGraphicBufferProducer> &output);

    // Called when audio track is used as media clock source. It should be
    // called before updateQueuedAudioData().
    // |nativeSampleRateInHz| is the sample rate of audio data fed into audio
    // track. It's the same number used to create AudioTrack.
    status_t configureAudioTrack(
            const sp<AudioTrack> &audioTrack, uint32_t nativeSampleRateInHz);

    // Create a surface for client to render video frames. This is the surface
    // on which the client should render video frames. Those video frames will
    // be internally directed to output surface for rendering at appropriate
    // time.
    status_t createInputSurface(sp<IGraphicBufferProducer> *outBufferProducer);

    // Update just-rendered audio data size and the presentation timestamp of
    // the first frame of that audio data. It should be called immediately
    // after the client write audio data into AudioTrack.
    // This function assumes continous audio stream.
    // TODO: support gap or backwards updates.
    status_t updateQueuedAudioData(
            size_t sizeInBytes, int64_t presentationTimeUs);

    // Set the consumer name of the input queue.
    void setName(const AString &name);

    // Set the playback in a desired speed.
    // This method can be called any time.
    // |rate| is the ratio between desired speed and the normal one, and should
    // be non-negative. The meaning of rate values:
    // 1.0 -- normal playback
    // 0.0 -- stop or pause
    // larger than 1.0 -- faster than normal speed
    // between 0.0 and 1.0 -- slower than normal speed
    status_t setPlaybackRate(float rate);

    // Get the media clock used by the MediaSync so that the client can obtain
    // corresponding media time or real time via
    // MediaClock::getMediaTime() and MediaClock::getRealTimeFor().
    sp<const MediaClock> getMediaClock();

protected:
    virtual void onMessageReceived(const sp<AMessage> &msg);

private:
    enum {
        kWhatDrainVideo = 'dVid',
    };

    static const int MAX_OUTSTANDING_BUFFERS = 2;

    // This is a thin wrapper class that lets us listen to
    // IConsumerListener::onFrameAvailable from mInput.
    class InputListener : public BnConsumerListener,
                          public IBinder::DeathRecipient {
    public:
        InputListener(const sp<MediaSync> &sync);
        virtual ~InputListener();

        // From IConsumerListener
        virtual void onFrameAvailable(const BufferItem &item);

        // From IConsumerListener
        // We don't care about released buffers because we detach each buffer as
        // soon as we acquire it. See the comment for onBufferReleased below for
        // some clarifying notes about the name.
        virtual void onBuffersReleased() {}

        // From IConsumerListener
        // We don't care about sideband streams, since we won't relay them.
        virtual void onSidebandStreamChanged();

        // From IBinder::DeathRecipient
        virtual void binderDied(const wp<IBinder> &who);

    private:
        sp<MediaSync> mSync;
    };

    // This is a thin wrapper class that lets us listen to
    // IProducerListener::onBufferReleased from mOutput.
    class OutputListener : public BnProducerListener,
                           public IBinder::DeathRecipient {
    public:
        OutputListener(const sp<MediaSync> &sync);
        virtual ~OutputListener();

        // From IProducerListener
        virtual void onBufferReleased();

        // From IBinder::DeathRecipient
        virtual void binderDied(const wp<IBinder> &who);

    private:
        sp<MediaSync> mSync;
    };

    // mIsAbandoned is set to true when the input or output dies.
    // Once the MediaSync has been abandoned by one side, it will disconnect
    // from the other side and not attempt to communicate with it further.
    bool mIsAbandoned;

    mutable Mutex mMutex;
    Condition mReleaseCondition;
    size_t mNumOutstandingBuffers;
    sp<IGraphicBufferConsumer> mInput;
    sp<IGraphicBufferProducer> mOutput;

    sp<AudioTrack> mAudioTrack;
    uint32_t mNativeSampleRateInHz;
    int64_t mNumFramesWritten;
    bool mHasAudio;

    int64_t mNextBufferItemMediaUs;
    List<BufferItem> mBufferItems;
    sp<ALooper> mLooper;
    float mPlaybackRate;

    sp<MediaClock> mMediaClock;

    MediaSync();

    // Must be accessed through RefBase
    virtual ~MediaSync();

    int64_t getRealTime(int64_t mediaTimeUs, int64_t nowUs);
    int64_t getDurationIfPlayedAtNativeSampleRate_l(int64_t numFrames);
    int64_t getPlayedOutAudioDurationMedia_l(int64_t nowUs);

    void onDrainVideo_l();

    // This implements the onFrameAvailable callback from IConsumerListener.
    // It gets called from an InputListener.
    // During this callback, we detach the buffer from the input, and queue
    // it for rendering on the output. This call can block if there are too
    // many outstanding buffers. If it blocks, it will resume when
    // onBufferReleasedByOutput releases a buffer back to the input.
    void onFrameAvailableFromInput();

    // Send |bufferItem| to the output for rendering.
    void renderOneBufferItem_l(const BufferItem &bufferItem);

    // This implements the onBufferReleased callback from IProducerListener.
    // It gets called from an OutputListener.
    // During this callback, we detach the buffer from the output, and release
    // it to the input. A blocked onFrameAvailable call will be allowed to proceed.
    void onBufferReleasedByOutput();

    // Return |buffer| back to the input.
    void returnBufferToInput_l(const sp<GraphicBuffer> &buffer, const sp<Fence> &fence);

    // When this is called, the MediaSync disconnects from (i.e., abandons) its
    // input or output, and signals any waiting onFrameAvailable calls to wake
    // up. This must be called with mMutex locked.
    void onAbandoned_l(bool isInput);

    // helper.
    bool isPlaying() { return mPlaybackRate != 0.0; }

    DISALLOW_EVIL_CONSTRUCTORS(MediaSync);
};

} // namespace android

#endif
+1 −0
Original line number Diff line number Diff line
@@ -37,6 +37,7 @@ LOCAL_SRC_FILES:= \
        MediaCodecSource.cpp              \
        MediaDefs.cpp                     \
        MediaExtractor.cpp                \
        MediaSync.cpp                     \
        MidiExtractor.cpp                 \
        http/MediaHTTP.cpp                \
        MediaMuxer.cpp                    \
+12 −3
Original line number Diff line number Diff line
@@ -93,13 +93,17 @@ void MediaClock::setPlaybackRate(float rate) {
}

status_t MediaClock::getMediaTime(
        int64_t realUs, int64_t *outMediaUs, bool allowPastMaxTime) {
        int64_t realUs, int64_t *outMediaUs, bool allowPastMaxTime) const {
    if (outMediaUs == NULL) {
        return BAD_VALUE;
    }

    Mutex::Autolock autoLock(mLock);
    return getMediaTime_l(realUs, outMediaUs, allowPastMaxTime);
}

status_t MediaClock::getMediaTime_l(
        int64_t realUs, int64_t *outMediaUs, bool allowPastMaxTime) {
        int64_t realUs, int64_t *outMediaUs, bool allowPastMaxTime) const {
    if (mAnchorTimeRealUs == -1) {
        return NO_INIT;
    }
@@ -119,7 +123,12 @@ status_t MediaClock::getMediaTime_l(
    return OK;
}

status_t MediaClock::getRealTimeFor(int64_t targetMediaUs, int64_t *outRealUs) {
status_t MediaClock::getRealTimeFor(
        int64_t targetMediaUs, int64_t *outRealUs) const {
    if (outRealUs == NULL) {
        return BAD_VALUE;
    }

    Mutex::Autolock autoLock(mLock);
    if (mPlaybackRate == 0.0) {
        return NO_INIT;
+541 −0

File added.

Preview size limit exceeded, changes collapsed.