Donate to e Foundation | Murena handsets with /e/OS | Own a part of Murena! Learn more

Commit acf45205 authored by Andy Hung's avatar Andy Hung Committed by Android (Google) Code Review
Browse files

Merge "Add TV audio device management APIs."

parents 589424a2 97aa07f8
Loading
Loading
Loading
Loading
+4 −0
Original line number Diff line number Diff line
@@ -23830,6 +23830,8 @@ package android.media {
    method @NonNull public int[] getChannelCounts();
    method @NonNull public int[] getChannelIndexMasks();
    method @NonNull public int[] getChannelMasks();
    method @NonNull public int[] getEncapsulationMetadataTypes();
    method @NonNull public int[] getEncapsulationModes();
    method @NonNull public int[] getEncodings();
    method public int getId();
    method public CharSequence getProductName();
@@ -24450,6 +24452,8 @@ package android.media {
    field public static final int DUAL_MONO_MODE_LR = 1; // 0x1
    field public static final int DUAL_MONO_MODE_OFF = 0; // 0x0
    field public static final int DUAL_MONO_MODE_RR = 3; // 0x3
    field public static final int ENCAPSULATION_METADATA_TYPE_DVB_AD_DESCRIPTOR = 2; // 0x2
    field public static final int ENCAPSULATION_METADATA_TYPE_FRAMEWORK_TUNER = 1; // 0x1
    field public static final int ENCAPSULATION_MODE_ELEMENTARY_STREAM = 1; // 0x1
    field public static final int ENCAPSULATION_MODE_HANDLE = 2; // 0x2
    field public static final int ENCAPSULATION_MODE_NONE = 0; // 0x0
+3 −0
Original line number Diff line number Diff line
@@ -4222,9 +4222,11 @@ package android.media {
    method @Deprecated public int abandonAudioFocus(android.media.AudioManager.OnAudioFocusChangeListener, android.media.AudioAttributes);
    method public void clearAudioServerStateCallback();
    method @RequiresPermission(android.Manifest.permission.MODIFY_AUDIO_ROUTING) public int dispatchAudioFocusChange(@NonNull android.media.AudioFocusInfo, int, @NonNull android.media.audiopolicy.AudioPolicy);
    method @IntRange(from=0) public int getAdditionalOutputDeviceDelay(@NonNull android.media.AudioDeviceInfo);
    method @NonNull @RequiresPermission(android.Manifest.permission.MODIFY_AUDIO_ROUTING) public static java.util.List<android.media.audiopolicy.AudioProductStrategy> getAudioProductStrategies();
    method @NonNull @RequiresPermission(android.Manifest.permission.MODIFY_AUDIO_ROUTING) public static java.util.List<android.media.audiopolicy.AudioVolumeGroup> getAudioVolumeGroups();
    method @NonNull @RequiresPermission(android.Manifest.permission.MODIFY_AUDIO_ROUTING) public java.util.List<android.media.AudioDeviceAddress> getDevicesForAttributes(@NonNull android.media.AudioAttributes);
    method @IntRange(from=0) public int getMaxAdditionalOutputDeviceDelay(@NonNull android.media.AudioDeviceInfo);
    method @IntRange(from=0) @RequiresPermission(android.Manifest.permission.MODIFY_AUDIO_ROUTING) public int getMaxVolumeIndexForAttributes(@NonNull android.media.AudioAttributes);
    method @IntRange(from=0) @RequiresPermission(android.Manifest.permission.MODIFY_AUDIO_ROUTING) public int getMinVolumeIndexForAttributes(@NonNull android.media.AudioAttributes);
    method @Nullable @RequiresPermission(android.Manifest.permission.MODIFY_AUDIO_ROUTING) public android.media.AudioDeviceAddress getPreferredDeviceForStrategy(@NonNull android.media.audiopolicy.AudioProductStrategy);
@@ -4238,6 +4240,7 @@ package android.media {
    method @RequiresPermission(android.Manifest.permission.MODIFY_PHONE_STATE) public int requestAudioFocus(android.media.AudioManager.OnAudioFocusChangeListener, @NonNull android.media.AudioAttributes, int, int) throws java.lang.IllegalArgumentException;
    method @Deprecated @RequiresPermission(anyOf={android.Manifest.permission.MODIFY_PHONE_STATE, android.Manifest.permission.MODIFY_AUDIO_ROUTING}) public int requestAudioFocus(android.media.AudioManager.OnAudioFocusChangeListener, @NonNull android.media.AudioAttributes, int, int, android.media.audiopolicy.AudioPolicy) throws java.lang.IllegalArgumentException;
    method @RequiresPermission(android.Manifest.permission.MODIFY_AUDIO_ROUTING) public int requestAudioFocus(@NonNull android.media.AudioFocusRequest, @Nullable android.media.audiopolicy.AudioPolicy);
    method @RequiresPermission(android.Manifest.permission.MODIFY_AUDIO_ROUTING) public boolean setAdditionalOutputDeviceDelay(@NonNull android.media.AudioDeviceInfo, @IntRange(from=0) int);
    method public void setAudioServerStateCallback(@NonNull java.util.concurrent.Executor, @NonNull android.media.AudioManager.AudioServerStateCallback);
    method @RequiresPermission(android.Manifest.permission.MODIFY_AUDIO_ROUTING) public void setFocusRequestResult(@NonNull android.media.AudioFocusInfo, int, @NonNull android.media.audiopolicy.AudioPolicy);
    method @RequiresPermission(android.Manifest.permission.MODIFY_AUDIO_ROUTING) public boolean setPreferredDeviceForStrategy(@NonNull android.media.audiopolicy.AudioProductStrategy, @NonNull android.media.AudioDeviceAddress);
+34 −0
Original line number Diff line number Diff line
@@ -422,6 +422,40 @@ public final class AudioDeviceInfo {
        return AudioFormat.filterPublicFormats(mPort.formats());
    }

    /**
     * Returns an array of supported encapsulation modes for the device.
     *
     * The array can include any of
     * {@link AudioTrack#ENCAPSULATION_MODE_ELEMENTARY_STREAM},
     * {@link AudioTrack#ENCAPSULATION_MODE_HANDLE}.
     *
     * @return An array of supported encapsulation modes for the device.  This
     *     may be an empty array if no encapsulation modes are supported.
     */
    public @NonNull int[] getEncapsulationModes() {
        // Implement a getter in r-dev or r-tv-dev as needed.
        return new int[0];  // be careful of returning a copy of any internal data.
    }

    /**
     * Returns an array of supported encapsulation metadata types for the device.
     *
     * The metadata type returned should be allowed for all encapsulation modes supported
     * by the device.  Some metadata types may apply only to certain
     * compressed stream formats, the returned list is the union of subsets.
     *
     * The array can include any of
     * {@link AudioTrack#ENCAPSULATION_METADATA_TYPE_FRAMEWORK_TUNER},
     * {@link AudioTrack#ENCAPSULATION_METADATA_TYPE_DVB_AD_DESCRIPTOR}.
     *
     * @return An array of supported encapsulation metadata types for the device.  This
     *     may be an empty array if no metadata types are supported.
     */
    public @NonNull int[] getEncapsulationMetadataTypes() {
        // Implement a getter in r-dev or r-tv-dev as needed.
        return new int[0];  // be careful of returning a copy of any internal data.
    }

   /**
     * @return The device type identifier of the audio device (i.e. TYPE_BUILTIN_SPEAKER).
     */
+64 −0
Original line number Diff line number Diff line
@@ -4566,6 +4566,70 @@ public class AudioManager {
        }
    }

    /**
     * @hide
     * Sets an additional audio output device delay in milliseconds.
     *
     * The additional output delay is a request to the output device to
     * delay audio presentation (generally with respect to video presentation for better
     * synchronization).
     * It may not be supported by all output devices,
     * and typically increases the audio latency by the amount of additional
     * audio delay requested.
     *
     * If additional audio delay is supported by an audio output device,
     * it is expected to be supported for all output streams (and configurations)
     * opened on that device.
     *
     * @param device an instance of {@link AudioDeviceInfo} returned from {@link getDevices()}.
     * @param delayMs delay in milliseconds desired.  This should be in range of {@code 0}
     *     to the value returned by {@link #getMaxAdditionalOutputDeviceDelay()}.
     * @return true if successful, false if the device does not support output device delay
     *     or the delay is not in range of {@link #getMaxAdditionalOutputDeviceDelay()}.
     */
    @SystemApi
    @RequiresPermission(android.Manifest.permission.MODIFY_AUDIO_ROUTING)
    public boolean setAdditionalOutputDeviceDelay(
            @NonNull AudioDeviceInfo device, @IntRange(from = 0) int delayMs) {
        Objects.requireNonNull(device);
        // Implement the setter in r-dev or r-tv-dev as needed.
        return false;
    }

    /**
     * @hide
     * Returns the current additional audio output device delay in milliseconds.
     *
     * @param device an instance of {@link AudioDeviceInfo} returned from {@link getDevices()}.
     * @return the additional output device delay. This is a non-negative number.
     *     {@code 0} is returned if unsupported.
     */
    @SystemApi
    @IntRange(from = 0)
    public int getAdditionalOutputDeviceDelay(@NonNull AudioDeviceInfo device) {
        Objects.requireNonNull(device);
        // Implement the getter in r-dev or r-tv-dev as needed.
        return 0;
    }

    /**
     * @hide
     * Returns the maximum additional audio output device delay in milliseconds.
     *
     * @param device an instance of {@link AudioDeviceInfo} returned from {@link getDevices()}.
     * @return the maximum output device delay in milliseconds that can be set.
     *     This is a non-negative number
     *     representing the additional audio delay supported for the device.
     *     {@code 0} is returned if unsupported.
     */
    @SystemApi
    @IntRange(from = 0)
    public int getMaxAdditionalOutputDeviceDelay(@NonNull AudioDeviceInfo device) {
        Objects.requireNonNull(device);
        // Implement the getter in r-dev or r-tv-dev as needed.
        return 0;
    }

    /**
     * Returns the estimated latency for the given stream type in milliseconds.
     *
+32 −0
Original line number Diff line number Diff line
@@ -256,6 +256,38 @@ public class AudioTrack extends PlayerBase
     */
    public static final int ENCAPSULATION_MODE_HANDLE = 2;

    /* Enumeration of metadata types permitted for use by
     * encapsulation mode audio streams.
     */
    /** @hide */
    @IntDef(prefix = { "ENCAPSULATION_METADATA_TYPE_" }, value = {
        ENCAPSULATION_METADATA_TYPE_NONE, /* reserved */
        ENCAPSULATION_METADATA_TYPE_FRAMEWORK_TUNER,
        ENCAPSULATION_METADATA_TYPE_DVB_AD_DESCRIPTOR,
    })
    @Retention(RetentionPolicy.SOURCE)
    public @interface EncapsulationMetadataType {}

    /**
     * Reserved do not use.
     * @hide
     */
    public static final int ENCAPSULATION_METADATA_TYPE_NONE = 0; // reserved

    /**
     * Encapsulation metadata type for framework tuner information.
     *
     * TODO(b/147778408) Link: Fill in Tuner API info.
     */
    public static final int ENCAPSULATION_METADATA_TYPE_FRAMEWORK_TUNER = 1;

    /**
     * Encapsulation metadata type for DVB AD descriptor.
     *
     * This metadata is formatted per ETSI TS 101 154 Table E.1: AD_descriptor.
     */
    public static final int ENCAPSULATION_METADATA_TYPE_DVB_AD_DESCRIPTOR = 2;

    /* Dual Mono handling is used when a stereo audio stream
     * contains separate audio content on the left and right channels.
     * Such information about the content of the stream may be found, for example, in