Donate to e Foundation | Murena handsets with /e/OS | Own a part of Murena! Learn more

Commit b14f3483 authored by TreeHugger Robot's avatar TreeHugger Robot Committed by Android (Google) Code Review
Browse files

Merge changes from topic "audio-hal-v4"

* changes:
  Audio V4: More fixes of the audio 2.0 API
  Audio V4: Support query microphones information
  Audio V4: Only expose one notification usage
  Audio V4: Add new api in XSD
  Audio V4: Copy audio_policy_configure.xsd 2.0 to 4.0
  Audio V4: Forward tracks attributes to the hal
  Audio V4: Remove deprecated function and enum
  Audio V4: bitfield enum now use the bitfield class
  Audio V4: Add MSD formats and Device
  Audio V4: Add new Audio HAL API
  Audio V4: Remove all non extensible enum value
  Audio V4: Fix doc comments for hidl-doc
  Audio V4: Fix documentation and useable/untestable bugs
  Audio V4: Copy 2.0 .hal files in 4.0
parents 5dd8a990 eacb9969
Loading
Loading
Loading
Loading

audio/4.0/IDevice.hal

0 → 100644
+271 −0
Original line number Diff line number Diff line
/*
 * Copyright (C) 2018 The Android Open Source Project
 *
 * Licensed under the Apache License, Version 2.0 (the "License");
 * you may not use this file except in compliance with the License.
 * You may obtain a copy of the License at
 *
 *      http://www.apache.org/licenses/LICENSE-2.0
 *
 * Unless required by applicable law or agreed to in writing, software
 * distributed under the License is distributed on an "AS IS" BASIS,
 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
 * See the License for the specific language governing permissions and
 * limitations under the License.
 */

package android.hardware.audio@4.0;

import android.hardware.audio.common@4.0;
import IStreamIn;
import IStreamOut;

interface IDevice {
    /**
     * Returns whether the audio hardware interface has been initialized.
     *
     * @return retval OK on success, NOT_INITIALIZED on failure.
     */
    initCheck() generates (Result retval);

    /**
     * Sets the audio volume for all audio activities other than voice call. If
     * NOT_SUPPORTED is returned, the software mixer will emulate this
     * capability.
     *
     * @param volume 1.0f means unity, 0.0f is zero.
     * @return retval operation completion status.
     */
    setMasterVolume(float volume) generates (Result retval);

    /**
     * Get the current master volume value for the HAL, if the HAL supports
     * master volume control. For example, AudioFlinger will query this value
     * from the primary audio HAL when the service starts and use the value for
     * setting the initial master volume across all HALs. HALs which do not
     * support this method must return NOT_SUPPORTED in 'retval'.
     *
     * @return retval operation completion status.
     * @return volume 1.0f means unity, 0.0f is zero.
     */
    getMasterVolume() generates (Result retval, float volume);

    /**
     * Sets microphone muting state.
     *
     * @param mute whether microphone is muted.
     * @return retval operation completion status.
     */
    setMicMute(bool mute) generates (Result retval);

    /**
     * Gets whether microphone is muted.
     *
     * @return retval operation completion status.
     * @return mute whether microphone is muted.
     */
    getMicMute() generates (Result retval, bool mute);

    /**
     * Set the audio mute status for all audio activities. If the return value
     * is NOT_SUPPORTED, the software mixer will emulate this capability.
     *
     * @param mute whether audio is muted.
     * @return retval operation completion status.
     */
    setMasterMute(bool mute) generates (Result retval);

    /**
     * Get the current master mute status for the HAL, if the HAL supports
     * master mute control. AudioFlinger will query this value from the primary
     * audio HAL when the service starts and use the value for setting the
     * initial master mute across all HALs. HAL must indicate that the feature
     * is not supported by returning NOT_SUPPORTED status.
     *
     * @return retval operation completion status.
     * @return mute whether audio is muted.
     */
    getMasterMute() generates (Result retval, bool mute);

    /**
     * Returns audio input buffer size according to parameters passed or
     * INVALID_ARGUMENTS if one of the parameters is not supported.
     *
     * @param config audio configuration.
     * @return retval operation completion status.
     * @return bufferSize input buffer size in bytes.
     */
    getInputBufferSize(AudioConfig config)
            generates (Result retval, uint64_t bufferSize);

    /**
     * This method creates and opens the audio hardware output stream.
     * If the stream can not be opened with the proposed audio config,
     * HAL must provide suggested values for the audio config.
     *
     * @param ioHandle handle assigned by AudioFlinger.
     * @param device device type and (if needed) address.
     * @param config stream configuration.
     * @param flags additional flags.
     * @param sourceMetadata Description of the audio that will be played.
                             May be used by implementations to configure hardware effects.
     * @return retval operation completion status.
     * @return outStream created output stream.
     * @return suggestedConfig in case of invalid parameters, suggested config.
     */
    openOutputStream(
            AudioIoHandle ioHandle,
            DeviceAddress device,
            AudioConfig config,
            bitfield<AudioOutputFlag> flags,
            SourceMetadata sourceMetadata) generates (
                    Result retval,
                    IStreamOut outStream,
                    AudioConfig suggestedConfig);

    /**
     * This method creates and opens the audio hardware input stream.
     * If the stream can not be opened with the proposed audio config,
     * HAL must provide suggested values for the audio config.
     *
     * @param ioHandle handle assigned by AudioFlinger.
     * @param device device type and (if needed) address.
     * @param config stream configuration.
     * @param flags additional flags.
     * @param source source specification.
     * @param sinkMetadata Description of the audio that is suggested by the client.
     *                     May be used by implementations to configure hardware effects.
     * @return retval operation completion status.
     * @return inStream in case of success, created input stream.
     * @return suggestedConfig in case of invalid parameters, suggested config.
     */
    openInputStream(
            AudioIoHandle ioHandle,
            DeviceAddress device,
            AudioConfig config,
            bitfield<AudioInputFlag> flags,
            SinkMetadata sinkMetadata) generates (
                    Result retval,
                    IStreamIn inStream,
                    AudioConfig suggestedConfig);

    /**
     * Returns whether HAL supports audio patches.
     *
     * @return supports true if audio patches are supported.
     */
    supportsAudioPatches() generates (bool supports);

    /**
     * Creates an audio patch between several source and sink ports.  The handle
     * is allocated by the HAL and must be unique for this audio HAL module.
     *
     * @param sources patch sources.
     * @param sinks patch sinks.
     * @return retval operation completion status.
     * @return patch created patch handle.
     */
    createAudioPatch(vec<AudioPortConfig> sources, vec<AudioPortConfig> sinks)
            generates (Result retval, AudioPatchHandle patch);

    /**
     * Release an audio patch.
     *
     * @param patch patch handle.
     * @return retval operation completion status.
     */
    releaseAudioPatch(AudioPatchHandle patch) generates (Result retval);

    /**
     * Returns the list of supported attributes for a given audio port.
     *
     * As input, 'port' contains the information (type, role, address etc...)
     * needed by the HAL to identify the port.
     *
     * As output, 'resultPort' contains possible attributes (sampling rates,
     * formats, channel masks, gain controllers...) for this port.
     *
     * @param port port identifier.
     * @return retval operation completion status.
     * @return resultPort port descriptor with all parameters filled up.
     */
    getAudioPort(AudioPort port)
            generates (Result retval, AudioPort resultPort);

    /**
     * Set audio port configuration.
     *
     * @param config audio port configuration.
     * @return retval operation completion status.
     */
    setAudioPortConfig(AudioPortConfig config) generates (Result retval);

    /**
     * Gets the HW synchronization source of the device. Calling this method is
     * equivalent to getting AUDIO_PARAMETER_HW_AV_SYNC on the legacy HAL.
     * Optional method
     *
     * @return retval operation completion status: OK or NOT_SUPPORTED.
     * @return hwAvSync HW synchronization source
     */
    getHwAvSync() generates (Result retval, AudioHwSync hwAvSync);

    /**
     * Sets whether the screen is on. Calling this method is equivalent to
     * setting AUDIO_PARAMETER_KEY_SCREEN_STATE on the legacy HAL.
     * Optional method
     *
     * @param turnedOn whether the screen is turned on.
     * @return retval operation completion status.
     */
    setScreenState(bool turnedOn) generates (Result retval);

    /**
     * Generic method for retrieving vendor-specific parameter values.
     * The framework does not interpret the parameters, they are passed
     * in an opaque manner between a vendor application and HAL.
     *
     * Multiple parameters can be retrieved at the same time.
     * The implementation should return as many requested parameters
     * as possible, even if one or more is not supported
     *
     * @param context provides more information about the request
     * @param keys keys of the requested parameters
     * @return retval operation completion status.
     *         OK must be returned if keys is empty.
     *         NOT_SUPPORTED must be returned if at least one key is unknown.
     * @return parameters parameter key value pairs.
     *         Must contain the value of all requested keys if retval == OK
     */
    getParameters(vec<ParameterValue> context, vec<string> keys)
            generates (Result retval, vec<ParameterValue> parameters);

    /**
     * Generic method for setting vendor-specific parameter values.
     * The framework does not interpret the parameters, they are passed
     * in an opaque manner between a vendor application and HAL.
     *
     * Multiple parameters can be set at the same time though this is
     * discouraged as it make failure analysis harder.
     *
     * If possible, a failed setParameters should not impact the platform state.
     *
     * @param context provides more information about the request
     * @param parameters parameter key value pairs.
     * @return retval operation completion status.
     *         All parameters must be successfully set for OK to be returned
     */
    setParameters(vec<ParameterValue> context, vec<ParameterValue> parameters)
            generates (Result retval);

    /**
     * Returns an array with available microphones in device.
     *
     * @return retval INVALID_STATE if the call is not successful,
     *                OK otherwise.
     *
     * @return microphones array with microphones info
     */
    getMicrophones()
         generates(Result retval, vec<MicrophoneInfo> microphones);
};
+59 −0
Original line number Diff line number Diff line
/*
 * Copyright (C) 2018 The Android Open Source Project
 *
 * Licensed under the Apache License, Version 2.0 (the "License");
 * you may not use this file except in compliance with the License.
 * You may obtain a copy of the License at
 *
 *      http://www.apache.org/licenses/LICENSE-2.0
 *
 * Unless required by applicable law or agreed to in writing, software
 * distributed under the License is distributed on an "AS IS" BASIS,
 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
 * See the License for the specific language governing permissions and
 * limitations under the License.
 */

package android.hardware.audio@4.0;

import android.hardware.audio.common@4.0;
import IDevice;

interface IDevicesFactory {
    /** Allows a HAL implementation to be split in multiple independent
     *  devices (called module in the pre-treble API).
     *  Note that this division is arbitrary and implementation are free
     *  to only have a Primary.
     *  The framework will query the devices according to audio_policy_configuration.xml
     *
     *  Each Device value is interchangeable with any other and the framework
     *  does not differentiate between values with the following exceptions:
     *  - the Primary device must always be present
     *  - the R_SUBMIX that is used to forward audio of REMOTE_SUBMIX DEVICES
     */
    enum Device : int32_t {
        PRIMARY,
        A2DP,
        USB,
        R_SUBMIX,
        STUB,
        CODEC_OFFLOAD,
        SECONDARY,
        AUXILIARY,
        /** Multi Stream Decoder */
        MSD
    };

    /**
     * Opens an audio device. To close the device, it is necessary to release
     * references to the returned device object.
     *
     * @param device device type.
     * @return retval operation completion status. Returns INVALID_ARGUMENTS
     *         if there is no corresponding hardware module found,
     *         NOT_INITIALIZED if an error occured while opening the hardware
     *         module.
     * @return result the interface for the created device.
     */
    openDevice(Device device) generates (Result retval, IDevice result);
};
+124 −0
Original line number Diff line number Diff line
/*
 * Copyright (C) 2018 The Android Open Source Project
 *
 * Licensed under the Apache License, Version 2.0 (the "License");
 * you may not use this file except in compliance with the License.
 * You may obtain a copy of the License at
 *
 *      http://www.apache.org/licenses/LICENSE-2.0
 *
 * Unless required by applicable law or agreed to in writing, software
 * distributed under the License is distributed on an "AS IS" BASIS,
 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
 * See the License for the specific language governing permissions and
 * limitations under the License.
 */

package android.hardware.audio@4.0;

import android.hardware.audio.common@4.0;
import IDevice;

interface IPrimaryDevice extends IDevice {
    /**
     * Sets the audio volume of a voice call.
     *
     * @param volume 1.0f means unity, 0.0f is zero.
     * @return retval operation completion status.
     */
    setVoiceVolume(float volume) generates (Result retval);

    /**
     * This method is used to notify the HAL about audio mode changes.
     *
     * @param mode new mode.
     * @return retval operation completion status.
     */
    setMode(AudioMode mode) generates (Result retval);

    /**
     * Gets whether BT SCO Noise Reduction and Echo Cancellation are enabled.
     * Calling this method is equivalent to getting AUDIO_PARAMETER_KEY_BT_NREC
     * on the legacy HAL.
     *
     * @return retval operation completion status.
     * @return enabled whether BT SCO NR + EC are enabled.
     */
    getBtScoNrecEnabled() generates (Result retval, bool enabled);

    /**
     * Sets whether BT SCO Noise Reduction and Echo Cancellation are enabled.
     * Calling this method is equivalent to setting AUDIO_PARAMETER_KEY_BT_NREC
     * on the legacy HAL.
     * Optional method
     *
     * @param enabled whether BT SCO NR + EC are enabled.
     * @return retval operation completion status.
     */
    setBtScoNrecEnabled(bool enabled) generates (Result retval);

    /**
     * Gets whether BT SCO Wideband mode is enabled. Calling this method is
     * equivalent to getting AUDIO_PARAMETER_KEY_BT_SCO_WB on the legacy HAL.
     *
     * @return retval operation completion status.
     * @return enabled whether BT Wideband is enabled.
     */
    getBtScoWidebandEnabled() generates (Result retval, bool enabled);

    /**
     * Sets whether BT SCO Wideband mode is enabled. Calling this method is
     * equivalent to setting AUDIO_PARAMETER_KEY_BT_SCO_WB on the legacy HAL.
     * Optional method
     *
     * @param enabled whether BT Wideband is enabled.
     * @return retval operation completion status.
     */
    setBtScoWidebandEnabled(bool enabled) generates (Result retval);

    enum TtyMode : int32_t {
        OFF,
        VCO,
        HCO,
        FULL
    };

    /**
     * Gets current TTY mode selection. Calling this method is equivalent to
     * getting AUDIO_PARAMETER_KEY_TTY_MODE on the legacy HAL.
     *
     * @return retval operation completion status.
     * @return mode TTY mode.
     */
    getTtyMode() generates (Result retval, TtyMode mode);

    /**
     * Sets current TTY mode. Calling this method is equivalent to setting
     * AUDIO_PARAMETER_KEY_TTY_MODE on the legacy HAL.
     *
     * @param mode TTY mode.
     * @return retval operation completion status.
     */
    setTtyMode(TtyMode mode) generates (Result retval);

    /**
     * Gets whether Hearing Aid Compatibility - Telecoil (HAC-T) mode is
     * enabled. Calling this method is equivalent to getting
     * AUDIO_PARAMETER_KEY_HAC on the legacy HAL.
     *
     * @return retval operation completion status.
     * @return enabled whether HAC mode is enabled.
     */
    getHacEnabled() generates (Result retval, bool enabled);

    /**
     * Sets whether Hearing Aid Compatibility - Telecoil (HAC-T) mode is
     * enabled. Calling this method is equivalent to setting
     * AUDIO_PARAMETER_KEY_HAC on the legacy HAL.
     * Optional method
     *
     * @param enabled whether HAC mode is enabled.
     * @return retval operation completion status.
     */
    setHacEnabled(bool enabled) generates (Result retval);
};

audio/4.0/IStream.hal

0 → 100644
+322 −0

File added.

Preview size limit exceeded, changes collapsed.

+168 −0
Original line number Diff line number Diff line
/*
 * Copyright (C) 2018 The Android Open Source Project
 *
 * Licensed under the Apache License, Version 2.0 (the "License");
 * you may not use this file except in compliance with the License.
 * You may obtain a copy of the License at
 *
 *      http://www.apache.org/licenses/LICENSE-2.0
 *
 * Unless required by applicable law or agreed to in writing, software
 * distributed under the License is distributed on an "AS IS" BASIS,
 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
 * See the License for the specific language governing permissions and
 * limitations under the License.
 */

package android.hardware.audio@4.0;

import android.hardware.audio.common@4.0;
import IStream;

interface IStreamIn extends IStream {
    /**
     * Returns the source descriptor of the input stream. Calling this method is
     * equivalent to getting AUDIO_PARAMETER_STREAM_INPUT_SOURCE on the legacy
     * HAL.
     * Optional method
     *
     * @return retval operation completion status.
     * @return source audio source.
     */
    getAudioSource() generates (Result retval, AudioSource source);

    /**
     * Set the input gain for the audio driver.
     * Optional method
     *
     * @param gain 1.0f is unity, 0.0f is zero.
     * @result retval operation completion status.
     */
    setGain(float gain) generates (Result retval);

    /**
     * Commands that can be executed on the driver reader thread.
     */
    enum ReadCommand : int32_t {
        READ,
        GET_CAPTURE_POSITION
    };

    /**
     * Data structure passed to the driver for executing commands
     * on the driver reader thread.
     */
    struct ReadParameters {
        ReadCommand command;  // discriminator
        union Params {
            uint64_t read;    // READ command, amount of bytes to read, >= 0.
            // No parameters for GET_CAPTURE_POSITION.
        } params;
    };

    /**
     * Data structure passed back to the client via status message queue
     * of 'read' operation.
     *
     * Possible values of 'retval' field:
     *  - OK, read operation was successful;
     *  - INVALID_ARGUMENTS, stream was not configured properly;
     *  - INVALID_STATE, stream is in a state that doesn't allow reads.
     */
    struct ReadStatus {
        Result retval;
        ReadCommand replyTo;  // discriminator
        union Reply {
            uint64_t read;    // READ command, amount of bytes read, >= 0.
            struct CapturePosition { // same as generated by getCapturePosition.
                uint64_t frames;
                uint64_t time;
            } capturePosition;
        } reply;
    };

    /**
     * Called when the metadata of the stream's sink has been changed.
     * @param sinkMetadata Description of the audio that is suggested by the clients.
     */
    updateSinkMetadata(SinkMetadata sinkMetadata);

    /**
     * Set up required transports for receiving audio buffers from the driver.
     *
     * The transport consists of three message queues:
     *  -- command queue is used to instruct the reader thread what operation
     *     to perform;
     *  -- data queue is used for passing audio data from the driver
     *     to the client;
     *  -- status queue is used for reporting operation status
     *     (e.g. amount of bytes actually read or error code).
     *
     * The driver operates on a dedicated thread. The client must ensure that
     * the thread is given an appropriate priority and assigned to correct
     * scheduler and cgroup. For this purpose, the method returns identifiers
     * of the driver thread.
     *
     * @param frameSize the size of a single frame, in bytes.
     * @param framesCount the number of frames in a buffer.
     * @param threadPriority priority of the driver thread.
     * @return retval OK if both message queues were created successfully.
     *                INVALID_STATE if the method was already called.
     *                INVALID_ARGUMENTS if there was a problem setting up
     *                                  the queues.
     * @return commandMQ a message queue used for passing commands.
     * @return dataMQ a message queue used for passing audio data in the format
     *                specified at the stream opening.
     * @return statusMQ a message queue used for passing status from the driver
     *                  using ReadStatus structures.
     * @return threadInfo identifiers of the driver's dedicated thread.
     */
    prepareForReading(uint32_t frameSize, uint32_t framesCount)
    generates (
            Result retval,
            fmq_sync<ReadParameters> commandMQ,
            fmq_sync<uint8_t> dataMQ,
            fmq_sync<ReadStatus> statusMQ,
            ThreadInfo threadInfo);

    /**
     * Return the amount of input frames lost in the audio driver since the last
     * call of this function.
     *
     * Audio driver is expected to reset the value to 0 and restart counting
     * upon returning the current value by this function call. Such loss
     * typically occurs when the user space process is blocked longer than the
     * capacity of audio driver buffers.
     *
     * @return framesLost the number of input audio frames lost.
     */
    getInputFramesLost() generates (uint32_t framesLost);

    /**
     * Return a recent count of the number of audio frames received and the
     * clock time associated with that frame count.
     *
     * @return retval INVALID_STATE if the device is not ready/available,
     *                NOT_SUPPORTED if the command is not supported,
     *                OK otherwise.
     * @return frames the total frame count received. This must be as early in
     *                the capture pipeline as possible. In general, frames
     *                must be non-negative and must not go "backwards".
     * @return time is the clock monotonic time when frames was measured. In
     *              general, time must be a positive quantity and must not
     *              go "backwards".
     */
    getCapturePosition()
            generates (Result retval, uint64_t frames, uint64_t time);

    /**
     * Returns an array with active microphones in the stream.
     *
     * @return retval INVALID_STATE if the call is not successful,
     *                OK otherwise.
     *
     * @return microphones array with microphones info
     */
    getActiveMicrophones()
               generates(Result retval, vec<MicrophoneInfo> microphones);
};
Loading