Loading media/java/android/media/MediaSync.java +33 −7 Original line number Original line Diff line number Diff line Loading @@ -24,6 +24,7 @@ import android.media.PlaybackParams; import android.os.Handler; import android.os.Handler; import android.os.Looper; import android.os.Looper; import android.os.Message; import android.os.Message; import android.util.Log; import android.view.Surface; import android.view.Surface; import java.lang.annotation.Retention; import java.lang.annotation.Retention; Loading Loading @@ -82,7 +83,7 @@ import java.util.List; * codec.releaseOutputBuffer(bufferId, 1000 * info.presentationTime); * codec.releaseOutputBuffer(bufferId, 1000 * info.presentationTime); * } else { * } else { * ByteBuffer audioByteBuffer = codec.getOutputBuffer(bufferId); * ByteBuffer audioByteBuffer = codec.getOutputBuffer(bufferId); * sync.queueByteBuffer(audioByteBuffer, bufferId, info.size, info.presentationTime); * sync.queueAudio(audioByteBuffer, bufferId, info.presentationTime); * } * } * // ... * // ... * } * } Loading Loading @@ -427,6 +428,11 @@ public final class MediaSync { /** /** * Flushes all buffers from the sync object. * Flushes all buffers from the sync object. * <p> * <p> * All pending unprocessed audio and video buffers are discarded. If an audio track was * configured, it is flushed and stopped. If a video output surface was configured, the * last frame queued to it is left on the frame. Queue a blank video frame to clear the * surface, * <p> * No callbacks are received for the flushed buffers. * No callbacks are received for the flushed buffers. * * * @throws IllegalStateException if the internal player engine has not been * @throws IllegalStateException if the internal player engine has not been Loading @@ -437,9 +443,18 @@ public final class MediaSync { mAudioBuffers.clear(); mAudioBuffers.clear(); mCallbackHandler.removeCallbacksAndMessages(null); mCallbackHandler.removeCallbacksAndMessages(null); } } // TODO implement this for surface buffers. if (mAudioTrack != null) { mAudioTrack.pause(); mAudioTrack.flush(); // Call stop() to signal to the AudioSink to completely fill the // internal buffer before resuming playback. mAudioTrack.stop(); } native_flush(); } } private native final void native_flush(); /** /** * Get current playback position. * Get current playback position. * <p> * <p> Loading Loading @@ -478,6 +493,7 @@ public final class MediaSync { /** /** * Queues the audio data asynchronously for playback (AudioTrack must be in streaming mode). * Queues the audio data asynchronously for playback (AudioTrack must be in streaming mode). * If the audio track was flushed as a result of {@link #flush}, it will be restarted. * @param audioData the buffer that holds the data to play. This buffer will be returned * @param audioData the buffer that holds the data to play. This buffer will be returned * to the client via registered callback. * to the client via registered callback. * @param bufferId an integer used to identify audioData. It will be returned to * @param bufferId an integer used to identify audioData. It will be returned to Loading Loading @@ -519,6 +535,14 @@ public final class MediaSync { AudioBuffer audioBuffer = mAudioBuffers.get(0); AudioBuffer audioBuffer = mAudioBuffers.get(0); int size = audioBuffer.mByteBuffer.remaining(); int size = audioBuffer.mByteBuffer.remaining(); // restart audio track after flush if (size > 0 && mAudioTrack.getPlayState() != AudioTrack.PLAYSTATE_PLAYING) { try { mAudioTrack.play(); } catch (IllegalStateException e) { Log.w(TAG, "could not start audio track"); } } int sizeWritten = mAudioTrack.write( int sizeWritten = mAudioTrack.write( audioBuffer.mByteBuffer, audioBuffer.mByteBuffer, size, size, Loading Loading @@ -558,17 +582,19 @@ public final class MediaSync { final MediaSync sync = this; final MediaSync sync = this; mCallbackHandler.post(new Runnable() { mCallbackHandler.post(new Runnable() { public void run() { public void run() { Callback callback; synchronized(mCallbackLock) { synchronized(mCallbackLock) { callback = mCallback; if (mCallbackHandler == null if (mCallbackHandler == null || mCallbackHandler.getLooper().getThread() || mCallbackHandler.getLooper().getThread() != Thread.currentThread()) { != Thread.currentThread()) { // callback handler has been changed. // callback handler has been changed. return; return; } } if (mCallback != null) { mCallback.onAudioBufferConsumed(sync, audioBuffer.mByteBuffer, audioBuffer.mBufferIndex); } } if (callback != null) { callback.onAudioBufferConsumed(sync, audioBuffer.mByteBuffer, audioBuffer.mBufferIndex); } } } } }); }); Loading media/jni/android_media_MediaSync.cpp +16 −0 Original line number Original line Diff line number Diff line Loading @@ -102,6 +102,10 @@ float JMediaSync::getVideoFrameRate() { return mSync->getVideoFrameRate(); return mSync->getVideoFrameRate(); } } void JMediaSync::flush() { mSync->flush(); } status_t JMediaSync::updateQueuedAudioData( status_t JMediaSync::updateQueuedAudioData( int sizeInBytes, int64_t presentationTimeUs) { int sizeInBytes, int64_t presentationTimeUs) { return mSync->updateQueuedAudioData(sizeInBytes, presentationTimeUs); return mSync->updateQueuedAudioData(sizeInBytes, presentationTimeUs); Loading Loading @@ -464,6 +468,16 @@ static jobject android_media_MediaSync_getSyncParams(JNIEnv *env, jobject thiz) return scs.asJobject(env, gSyncParamsFields); return scs.asJobject(env, gSyncParamsFields); } } static void android_media_MediaSync_native_flush(JNIEnv *env, jobject thiz) { sp<JMediaSync> sync = getMediaSync(env, thiz); if (sync == NULL) { throwExceptionAsNecessary(env, INVALID_OPERATION); return; } sync->flush(); } static void android_media_MediaSync_native_init(JNIEnv *env) { static void android_media_MediaSync_native_init(JNIEnv *env) { ScopedLocalRef<jclass> clazz(env, env->FindClass("android/media/MediaSync")); ScopedLocalRef<jclass> clazz(env, env->FindClass("android/media/MediaSync")); CHECK(clazz.get() != NULL); CHECK(clazz.get() != NULL); Loading Loading @@ -524,6 +538,8 @@ static JNINativeMethod gMethods[] = { "()J", "()J", (void *)android_media_MediaSync_native_getPlayTimeForPendingAudioFrames }, (void *)android_media_MediaSync_native_getPlayTimeForPendingAudioFrames }, { "native_flush", "()V", (void *)android_media_MediaSync_native_flush }, { "native_init", "()V", (void *)android_media_MediaSync_native_init }, { "native_init", "()V", (void *)android_media_MediaSync_native_init }, { "native_setup", "()V", (void *)android_media_MediaSync_native_setup }, { "native_setup", "()V", (void *)android_media_MediaSync_native_setup }, Loading media/jni/android_media_MediaSync.h +2 −0 Original line number Original line Diff line number Diff line Loading @@ -49,6 +49,8 @@ struct JMediaSync : public RefBase { status_t setVideoFrameRateHint(float rate); status_t setVideoFrameRateHint(float rate); float getVideoFrameRate(); float getVideoFrameRate(); void flush(); sp<const MediaClock> getMediaClock(); sp<const MediaClock> getMediaClock(); protected: protected: Loading Loading
media/java/android/media/MediaSync.java +33 −7 Original line number Original line Diff line number Diff line Loading @@ -24,6 +24,7 @@ import android.media.PlaybackParams; import android.os.Handler; import android.os.Handler; import android.os.Looper; import android.os.Looper; import android.os.Message; import android.os.Message; import android.util.Log; import android.view.Surface; import android.view.Surface; import java.lang.annotation.Retention; import java.lang.annotation.Retention; Loading Loading @@ -82,7 +83,7 @@ import java.util.List; * codec.releaseOutputBuffer(bufferId, 1000 * info.presentationTime); * codec.releaseOutputBuffer(bufferId, 1000 * info.presentationTime); * } else { * } else { * ByteBuffer audioByteBuffer = codec.getOutputBuffer(bufferId); * ByteBuffer audioByteBuffer = codec.getOutputBuffer(bufferId); * sync.queueByteBuffer(audioByteBuffer, bufferId, info.size, info.presentationTime); * sync.queueAudio(audioByteBuffer, bufferId, info.presentationTime); * } * } * // ... * // ... * } * } Loading Loading @@ -427,6 +428,11 @@ public final class MediaSync { /** /** * Flushes all buffers from the sync object. * Flushes all buffers from the sync object. * <p> * <p> * All pending unprocessed audio and video buffers are discarded. If an audio track was * configured, it is flushed and stopped. If a video output surface was configured, the * last frame queued to it is left on the frame. Queue a blank video frame to clear the * surface, * <p> * No callbacks are received for the flushed buffers. * No callbacks are received for the flushed buffers. * * * @throws IllegalStateException if the internal player engine has not been * @throws IllegalStateException if the internal player engine has not been Loading @@ -437,9 +443,18 @@ public final class MediaSync { mAudioBuffers.clear(); mAudioBuffers.clear(); mCallbackHandler.removeCallbacksAndMessages(null); mCallbackHandler.removeCallbacksAndMessages(null); } } // TODO implement this for surface buffers. if (mAudioTrack != null) { mAudioTrack.pause(); mAudioTrack.flush(); // Call stop() to signal to the AudioSink to completely fill the // internal buffer before resuming playback. mAudioTrack.stop(); } native_flush(); } } private native final void native_flush(); /** /** * Get current playback position. * Get current playback position. * <p> * <p> Loading Loading @@ -478,6 +493,7 @@ public final class MediaSync { /** /** * Queues the audio data asynchronously for playback (AudioTrack must be in streaming mode). * Queues the audio data asynchronously for playback (AudioTrack must be in streaming mode). * If the audio track was flushed as a result of {@link #flush}, it will be restarted. * @param audioData the buffer that holds the data to play. This buffer will be returned * @param audioData the buffer that holds the data to play. This buffer will be returned * to the client via registered callback. * to the client via registered callback. * @param bufferId an integer used to identify audioData. It will be returned to * @param bufferId an integer used to identify audioData. It will be returned to Loading Loading @@ -519,6 +535,14 @@ public final class MediaSync { AudioBuffer audioBuffer = mAudioBuffers.get(0); AudioBuffer audioBuffer = mAudioBuffers.get(0); int size = audioBuffer.mByteBuffer.remaining(); int size = audioBuffer.mByteBuffer.remaining(); // restart audio track after flush if (size > 0 && mAudioTrack.getPlayState() != AudioTrack.PLAYSTATE_PLAYING) { try { mAudioTrack.play(); } catch (IllegalStateException e) { Log.w(TAG, "could not start audio track"); } } int sizeWritten = mAudioTrack.write( int sizeWritten = mAudioTrack.write( audioBuffer.mByteBuffer, audioBuffer.mByteBuffer, size, size, Loading Loading @@ -558,17 +582,19 @@ public final class MediaSync { final MediaSync sync = this; final MediaSync sync = this; mCallbackHandler.post(new Runnable() { mCallbackHandler.post(new Runnable() { public void run() { public void run() { Callback callback; synchronized(mCallbackLock) { synchronized(mCallbackLock) { callback = mCallback; if (mCallbackHandler == null if (mCallbackHandler == null || mCallbackHandler.getLooper().getThread() || mCallbackHandler.getLooper().getThread() != Thread.currentThread()) { != Thread.currentThread()) { // callback handler has been changed. // callback handler has been changed. return; return; } } if (mCallback != null) { mCallback.onAudioBufferConsumed(sync, audioBuffer.mByteBuffer, audioBuffer.mBufferIndex); } } if (callback != null) { callback.onAudioBufferConsumed(sync, audioBuffer.mByteBuffer, audioBuffer.mBufferIndex); } } } } }); }); Loading
media/jni/android_media_MediaSync.cpp +16 −0 Original line number Original line Diff line number Diff line Loading @@ -102,6 +102,10 @@ float JMediaSync::getVideoFrameRate() { return mSync->getVideoFrameRate(); return mSync->getVideoFrameRate(); } } void JMediaSync::flush() { mSync->flush(); } status_t JMediaSync::updateQueuedAudioData( status_t JMediaSync::updateQueuedAudioData( int sizeInBytes, int64_t presentationTimeUs) { int sizeInBytes, int64_t presentationTimeUs) { return mSync->updateQueuedAudioData(sizeInBytes, presentationTimeUs); return mSync->updateQueuedAudioData(sizeInBytes, presentationTimeUs); Loading Loading @@ -464,6 +468,16 @@ static jobject android_media_MediaSync_getSyncParams(JNIEnv *env, jobject thiz) return scs.asJobject(env, gSyncParamsFields); return scs.asJobject(env, gSyncParamsFields); } } static void android_media_MediaSync_native_flush(JNIEnv *env, jobject thiz) { sp<JMediaSync> sync = getMediaSync(env, thiz); if (sync == NULL) { throwExceptionAsNecessary(env, INVALID_OPERATION); return; } sync->flush(); } static void android_media_MediaSync_native_init(JNIEnv *env) { static void android_media_MediaSync_native_init(JNIEnv *env) { ScopedLocalRef<jclass> clazz(env, env->FindClass("android/media/MediaSync")); ScopedLocalRef<jclass> clazz(env, env->FindClass("android/media/MediaSync")); CHECK(clazz.get() != NULL); CHECK(clazz.get() != NULL); Loading Loading @@ -524,6 +538,8 @@ static JNINativeMethod gMethods[] = { "()J", "()J", (void *)android_media_MediaSync_native_getPlayTimeForPendingAudioFrames }, (void *)android_media_MediaSync_native_getPlayTimeForPendingAudioFrames }, { "native_flush", "()V", (void *)android_media_MediaSync_native_flush }, { "native_init", "()V", (void *)android_media_MediaSync_native_init }, { "native_init", "()V", (void *)android_media_MediaSync_native_init }, { "native_setup", "()V", (void *)android_media_MediaSync_native_setup }, { "native_setup", "()V", (void *)android_media_MediaSync_native_setup }, Loading
media/jni/android_media_MediaSync.h +2 −0 Original line number Original line Diff line number Diff line Loading @@ -49,6 +49,8 @@ struct JMediaSync : public RefBase { status_t setVideoFrameRateHint(float rate); status_t setVideoFrameRateHint(float rate); float getVideoFrameRate(); float getVideoFrameRate(); void flush(); sp<const MediaClock> getMediaClock(); sp<const MediaClock> getMediaClock(); protected: protected: Loading