Loading api/current.txt +8 −0 Original line number Diff line number Diff line Loading @@ -15985,6 +15985,7 @@ package android.media { method public void configureAudioTrack(android.media.AudioTrack, int); method public void configureSurface(android.view.Surface); method public final android.view.Surface createInputSurface(); method public boolean getTimestamp(android.media.MediaTimestamp); method public void queueAudio(java.nio.ByteBuffer, int, int, long); method public final void release(); method public void setCallback(android.media.MediaSync.Callback, android.os.Handler); Loading @@ -16006,6 +16007,13 @@ package android.media { field public static final int SYNC_EVENT_PRESENTATION_COMPLETE = 1; // 0x1 } public final class MediaTimestamp { ctor public MediaTimestamp(); field public float clockRate; field public long mediaTimeUs; field public long nanoTime; } public final class NotProvisionedException extends android.media.MediaDrmException { ctor public NotProvisionedException(java.lang.String); } api/system-current.txt +8 −0 Original line number Diff line number Diff line Loading @@ -17196,6 +17196,7 @@ package android.media { method public void configureAudioTrack(android.media.AudioTrack, int); method public void configureSurface(android.view.Surface); method public final android.view.Surface createInputSurface(); method public boolean getTimestamp(android.media.MediaTimestamp); method public void queueAudio(java.nio.ByteBuffer, int, int, long); method public final void release(); method public void setCallback(android.media.MediaSync.Callback, android.os.Handler); Loading @@ -17217,6 +17218,13 @@ package android.media { field public static final int SYNC_EVENT_PRESENTATION_COMPLETE = 1; // 0x1 } public final class MediaTimestamp { ctor public MediaTimestamp(); field public float clockRate; field public long mediaTimeUs; field public long nanoTime; } public final class NotProvisionedException extends android.media.MediaDrmException { ctor public NotProvisionedException(java.lang.String); } media/java/android/media/MediaSync.java +32 −0 Original line number Diff line number Diff line Loading @@ -17,6 +17,7 @@ package android.media; import android.annotation.IntDef; import android.annotation.NonNull; import android.media.AudioTrack; import android.os.Handler; import android.os.Looper; Loading Loading @@ -386,6 +387,37 @@ final public class MediaSync { return (mode == PLAYBACK_RATE_AUDIO_MODE_RESAMPLE); } /** * Get current playback position. * <p> * The MediaTimestamp represents a clock ticking during media playback. It's represented * by an anchor frame ({@link MediaTimestamp#mediaTimeUs} and {@link MediaTimestamp#nanoTime}) * and clock speed ({@link MediaTimestamp#clockRate}). For continous playback with * constant speed, its anchor frame doesn't change that often. Thereafter, it's recommended * to not call this method often. * <p> * To help users to get current playback position, this method always returns the timestamp of * just-rendered frame, i.e., {@link System#nanoTime} and its corresponding media time. They * can be used as current playback position. * * @param timestamp a reference to a non-null MediaTimestamp instance allocated * and owned by caller. * @return true if a timestamp is available, or false if no timestamp is available. * If a timestamp if available, the MediaTimestamp instance is filled in with * playback rate, together with the current media timestamp and the system nanoTime * corresponding to the measured media timestamp. * In the case that no timestamp is available, any supplied instance is left unaltered. */ public boolean getTimestamp(@NonNull MediaTimestamp timestamp) { if (timestamp == null) { throw new IllegalArgumentException(); } return native_getTimestamp(timestamp); } private native final boolean native_getTimestamp(MediaTimestamp timestamp); /** * Queues the audio data asynchronously for playback (AudioTrack must be in streaming mode). * @param audioData the buffer that holds the data to play. This buffer will be returned Loading media/java/android/media/MediaTimestamp.java 0 → 100644 +52 −0 Original line number Diff line number Diff line /* * Copyright 2015 The Android Open Source Project * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package android.media; /** * Structure that groups clock rate of the stream playback, together with the media timestamp * of an anchor frame and the system time when that frame was presented or is committed * to be presented. * The "present" means that audio/video produced on device is detectable by an external * observer off device. * The time is based on the implementation's best effort, using whatever knowledge * is available to the system, but cannot account for any delay unknown to the implementation. * The anchor frame could be any frame, including just-rendered frame, dependent on how * it's selected. When the anchor frame is the just-rendered one, the media time stands for * current position of the playback. * * @see MediaSync#getTimestamp */ public final class MediaTimestamp { /** * Media timestamp in microseconds. */ public long mediaTimeUs; /** * The {@link java.lang.System#nanoTime} corresponding to the media timestamp. */ public long nanoTime; /** * Media clock rate. * It is 1.0 if media clock is in sync with the system clock; * greater than 1.0 if media clock is faster than the system clock; * less than 1.0 if media clock is slower than the system clock. */ public float clockRate; } media/jni/android_media_MediaSync.cpp +55 −0 Original line number Diff line number Diff line Loading @@ -29,6 +29,7 @@ #include <gui/Surface.h> #include <media/AudioTrack.h> #include <media/stagefright/MediaClock.h> #include <media/stagefright/MediaSync.h> #include <media/stagefright/foundation/ADebug.h> #include <media/stagefright/foundation/AString.h> Loading @@ -39,6 +40,9 @@ namespace android { struct fields_t { jfieldID context; jfieldID mediaTimestampMediaTimeUsID; jfieldID mediaTimestampNanoTimeID; jfieldID mediaTimestampClockRateID; }; static fields_t gFields; Loading Loading @@ -71,6 +75,10 @@ void JMediaSync::setPlaybackRate(float rate) { mSync->setPlaybackRate(rate); } sp<const MediaClock> JMediaSync::getMediaClock() { return mSync->getMediaClock(); } status_t JMediaSync::updateQueuedAudioData( int sizeInBytes, int64_t presentationTimeUs) { return mSync->updateQueuedAudioData(sizeInBytes, presentationTimeUs); Loading Loading @@ -222,12 +230,55 @@ static void android_media_MediaSync_native_updateQueuedAudioData( } } static jboolean android_media_MediaSync_native_getTimestamp( JNIEnv *env, jobject thiz, jobject timestamp) { sp<JMediaSync> sync = getMediaSync(env, thiz); if (sync == NULL) { throwExceptionAsNecessary(env, INVALID_OPERATION); return JNI_FALSE; } sp<const MediaClock> mediaClock = sync->getMediaClock(); if (mediaClock == NULL) { return JNI_FALSE; } int64_t nowUs = ALooper::GetNowUs(); int64_t mediaUs = 0; if (mediaClock->getMediaTime(nowUs, &mediaUs) != OK) { return JNI_FALSE; } env->SetLongField(timestamp, gFields.mediaTimestampMediaTimeUsID, (jlong)mediaUs); env->SetLongField(timestamp, gFields.mediaTimestampNanoTimeID, (jlong)(nowUs * 1000)); env->SetFloatField(timestamp, gFields.mediaTimestampClockRateID, (jfloat)mediaClock->getPlaybackRate()); return JNI_TRUE; } static void android_media_MediaSync_native_init(JNIEnv *env) { ScopedLocalRef<jclass> clazz(env, env->FindClass("android/media/MediaSync")); CHECK(clazz.get() != NULL); gFields.context = env->GetFieldID(clazz.get(), "mNativeContext", "J"); CHECK(gFields.context != NULL); clazz.reset(env->FindClass("android/media/MediaTimestamp")); CHECK(clazz.get() != NULL); gFields.mediaTimestampMediaTimeUsID = env->GetFieldID(clazz.get(), "mediaTimeUs", "J"); CHECK(gFields.mediaTimestampMediaTimeUsID != NULL); gFields.mediaTimestampNanoTimeID = env->GetFieldID(clazz.get(), "nanoTime", "J"); CHECK(gFields.mediaTimestampNanoTimeID != NULL); gFields.mediaTimestampClockRateID = env->GetFieldID(clazz.get(), "ClockRate", "F"); CHECK(gFields.mediaTimestampClockRateID != NULL); } static void android_media_MediaSync_native_setup(JNIEnv *env, jobject thiz) { Loading Loading @@ -267,6 +318,10 @@ static JNINativeMethod gMethods[] = { "(IJ)V", (void *)android_media_MediaSync_native_updateQueuedAudioData }, { "native_getTimestamp", "(Landroid/media/MediaTimestamp;)Z", (void *)android_media_MediaSync_native_getTimestamp }, { "native_init", "()V", (void *)android_media_MediaSync_native_init }, { "native_setup", "()V", (void *)android_media_MediaSync_native_setup }, Loading Loading
api/current.txt +8 −0 Original line number Diff line number Diff line Loading @@ -15985,6 +15985,7 @@ package android.media { method public void configureAudioTrack(android.media.AudioTrack, int); method public void configureSurface(android.view.Surface); method public final android.view.Surface createInputSurface(); method public boolean getTimestamp(android.media.MediaTimestamp); method public void queueAudio(java.nio.ByteBuffer, int, int, long); method public final void release(); method public void setCallback(android.media.MediaSync.Callback, android.os.Handler); Loading @@ -16006,6 +16007,13 @@ package android.media { field public static final int SYNC_EVENT_PRESENTATION_COMPLETE = 1; // 0x1 } public final class MediaTimestamp { ctor public MediaTimestamp(); field public float clockRate; field public long mediaTimeUs; field public long nanoTime; } public final class NotProvisionedException extends android.media.MediaDrmException { ctor public NotProvisionedException(java.lang.String); }
api/system-current.txt +8 −0 Original line number Diff line number Diff line Loading @@ -17196,6 +17196,7 @@ package android.media { method public void configureAudioTrack(android.media.AudioTrack, int); method public void configureSurface(android.view.Surface); method public final android.view.Surface createInputSurface(); method public boolean getTimestamp(android.media.MediaTimestamp); method public void queueAudio(java.nio.ByteBuffer, int, int, long); method public final void release(); method public void setCallback(android.media.MediaSync.Callback, android.os.Handler); Loading @@ -17217,6 +17218,13 @@ package android.media { field public static final int SYNC_EVENT_PRESENTATION_COMPLETE = 1; // 0x1 } public final class MediaTimestamp { ctor public MediaTimestamp(); field public float clockRate; field public long mediaTimeUs; field public long nanoTime; } public final class NotProvisionedException extends android.media.MediaDrmException { ctor public NotProvisionedException(java.lang.String); }
media/java/android/media/MediaSync.java +32 −0 Original line number Diff line number Diff line Loading @@ -17,6 +17,7 @@ package android.media; import android.annotation.IntDef; import android.annotation.NonNull; import android.media.AudioTrack; import android.os.Handler; import android.os.Looper; Loading Loading @@ -386,6 +387,37 @@ final public class MediaSync { return (mode == PLAYBACK_RATE_AUDIO_MODE_RESAMPLE); } /** * Get current playback position. * <p> * The MediaTimestamp represents a clock ticking during media playback. It's represented * by an anchor frame ({@link MediaTimestamp#mediaTimeUs} and {@link MediaTimestamp#nanoTime}) * and clock speed ({@link MediaTimestamp#clockRate}). For continous playback with * constant speed, its anchor frame doesn't change that often. Thereafter, it's recommended * to not call this method often. * <p> * To help users to get current playback position, this method always returns the timestamp of * just-rendered frame, i.e., {@link System#nanoTime} and its corresponding media time. They * can be used as current playback position. * * @param timestamp a reference to a non-null MediaTimestamp instance allocated * and owned by caller. * @return true if a timestamp is available, or false if no timestamp is available. * If a timestamp if available, the MediaTimestamp instance is filled in with * playback rate, together with the current media timestamp and the system nanoTime * corresponding to the measured media timestamp. * In the case that no timestamp is available, any supplied instance is left unaltered. */ public boolean getTimestamp(@NonNull MediaTimestamp timestamp) { if (timestamp == null) { throw new IllegalArgumentException(); } return native_getTimestamp(timestamp); } private native final boolean native_getTimestamp(MediaTimestamp timestamp); /** * Queues the audio data asynchronously for playback (AudioTrack must be in streaming mode). * @param audioData the buffer that holds the data to play. This buffer will be returned Loading
media/java/android/media/MediaTimestamp.java 0 → 100644 +52 −0 Original line number Diff line number Diff line /* * Copyright 2015 The Android Open Source Project * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package android.media; /** * Structure that groups clock rate of the stream playback, together with the media timestamp * of an anchor frame and the system time when that frame was presented or is committed * to be presented. * The "present" means that audio/video produced on device is detectable by an external * observer off device. * The time is based on the implementation's best effort, using whatever knowledge * is available to the system, but cannot account for any delay unknown to the implementation. * The anchor frame could be any frame, including just-rendered frame, dependent on how * it's selected. When the anchor frame is the just-rendered one, the media time stands for * current position of the playback. * * @see MediaSync#getTimestamp */ public final class MediaTimestamp { /** * Media timestamp in microseconds. */ public long mediaTimeUs; /** * The {@link java.lang.System#nanoTime} corresponding to the media timestamp. */ public long nanoTime; /** * Media clock rate. * It is 1.0 if media clock is in sync with the system clock; * greater than 1.0 if media clock is faster than the system clock; * less than 1.0 if media clock is slower than the system clock. */ public float clockRate; }
media/jni/android_media_MediaSync.cpp +55 −0 Original line number Diff line number Diff line Loading @@ -29,6 +29,7 @@ #include <gui/Surface.h> #include <media/AudioTrack.h> #include <media/stagefright/MediaClock.h> #include <media/stagefright/MediaSync.h> #include <media/stagefright/foundation/ADebug.h> #include <media/stagefright/foundation/AString.h> Loading @@ -39,6 +40,9 @@ namespace android { struct fields_t { jfieldID context; jfieldID mediaTimestampMediaTimeUsID; jfieldID mediaTimestampNanoTimeID; jfieldID mediaTimestampClockRateID; }; static fields_t gFields; Loading Loading @@ -71,6 +75,10 @@ void JMediaSync::setPlaybackRate(float rate) { mSync->setPlaybackRate(rate); } sp<const MediaClock> JMediaSync::getMediaClock() { return mSync->getMediaClock(); } status_t JMediaSync::updateQueuedAudioData( int sizeInBytes, int64_t presentationTimeUs) { return mSync->updateQueuedAudioData(sizeInBytes, presentationTimeUs); Loading Loading @@ -222,12 +230,55 @@ static void android_media_MediaSync_native_updateQueuedAudioData( } } static jboolean android_media_MediaSync_native_getTimestamp( JNIEnv *env, jobject thiz, jobject timestamp) { sp<JMediaSync> sync = getMediaSync(env, thiz); if (sync == NULL) { throwExceptionAsNecessary(env, INVALID_OPERATION); return JNI_FALSE; } sp<const MediaClock> mediaClock = sync->getMediaClock(); if (mediaClock == NULL) { return JNI_FALSE; } int64_t nowUs = ALooper::GetNowUs(); int64_t mediaUs = 0; if (mediaClock->getMediaTime(nowUs, &mediaUs) != OK) { return JNI_FALSE; } env->SetLongField(timestamp, gFields.mediaTimestampMediaTimeUsID, (jlong)mediaUs); env->SetLongField(timestamp, gFields.mediaTimestampNanoTimeID, (jlong)(nowUs * 1000)); env->SetFloatField(timestamp, gFields.mediaTimestampClockRateID, (jfloat)mediaClock->getPlaybackRate()); return JNI_TRUE; } static void android_media_MediaSync_native_init(JNIEnv *env) { ScopedLocalRef<jclass> clazz(env, env->FindClass("android/media/MediaSync")); CHECK(clazz.get() != NULL); gFields.context = env->GetFieldID(clazz.get(), "mNativeContext", "J"); CHECK(gFields.context != NULL); clazz.reset(env->FindClass("android/media/MediaTimestamp")); CHECK(clazz.get() != NULL); gFields.mediaTimestampMediaTimeUsID = env->GetFieldID(clazz.get(), "mediaTimeUs", "J"); CHECK(gFields.mediaTimestampMediaTimeUsID != NULL); gFields.mediaTimestampNanoTimeID = env->GetFieldID(clazz.get(), "nanoTime", "J"); CHECK(gFields.mediaTimestampNanoTimeID != NULL); gFields.mediaTimestampClockRateID = env->GetFieldID(clazz.get(), "ClockRate", "F"); CHECK(gFields.mediaTimestampClockRateID != NULL); } static void android_media_MediaSync_native_setup(JNIEnv *env, jobject thiz) { Loading Loading @@ -267,6 +318,10 @@ static JNINativeMethod gMethods[] = { "(IJ)V", (void *)android_media_MediaSync_native_updateQueuedAudioData }, { "native_getTimestamp", "(Landroid/media/MediaTimestamp;)Z", (void *)android_media_MediaSync_native_getTimestamp }, { "native_init", "()V", (void *)android_media_MediaSync_native_init }, { "native_setup", "()V", (void *)android_media_MediaSync_native_setup }, Loading