Donate to e Foundation | Murena handsets with /e/OS | Own a part of Murena! Learn more

Commit 967eac2f authored by Narayan Kamath's avatar Narayan Kamath Committed by Android (Google) Code Review
Browse files

Merge "Fix some AudioTrack related issues."

parents 7a1ac5c0 673f360b
Loading
Loading
Loading
Loading
+27 −19
Original line number Original line Diff line number Diff line
@@ -356,9 +356,7 @@ class AudioPlaybackHandler {
        mLastSynthesisRequest = param;
        mLastSynthesisRequest = param;


        // Create the audio track.
        // Create the audio track.
        final AudioTrack audioTrack = createStreamingAudioTrack(
        final AudioTrack audioTrack = createStreamingAudioTrack(param);
                param.mStreamType, param.mSampleRateInHz, param.mAudioFormat,
                param.mChannelCount, param.mVolume, param.mPan);


        if (DBG) Log.d(TAG, "Created audio track [" + audioTrack.hashCode() + "]");
        if (DBG) Log.d(TAG, "Created audio track [" + audioTrack.hashCode() + "]");


@@ -405,16 +403,10 @@ class AudioPlaybackHandler {
        param.mLogger.onPlaybackStart();
        param.mLogger.onPlaybackStart();
    }
    }


    // Wait for the audio track to stop playing, and then release its resources.
    private void handleSynthesisDone(MessageParams msg) {
    private void handleSynthesisDone(MessageParams msg) {
        final SynthesisMessageParams params = (SynthesisMessageParams) msg;
        final SynthesisMessageParams params = (SynthesisMessageParams) msg;
        handleSynthesisDone(params);
        // This call is delayed more than it should be, but we are
        // certain at this point that we have all the data we want.
        params.mLogger.onWriteData();
    }


    // Wait for the audio track to stop playing, and then release it's resources.
    private void handleSynthesisDone(SynthesisMessageParams params) {
        if (DBG) Log.d(TAG, "handleSynthesisDone()");
        if (DBG) Log.d(TAG, "handleSynthesisDone()");
        final AudioTrack audioTrack = params.getAudioTrack();
        final AudioTrack audioTrack = params.getAudioTrack();


@@ -422,6 +414,10 @@ class AudioPlaybackHandler {
            return;
            return;
        }
        }


        if (params.mBytesWritten < params.mAudioBufferSize) {
            audioTrack.stop();
        }

        if (DBG) Log.d(TAG, "Waiting for audio track to complete : " +
        if (DBG) Log.d(TAG, "Waiting for audio track to complete : " +
                audioTrack.hashCode());
                audioTrack.hashCode());
        blockUntilDone(params);
        blockUntilDone(params);
@@ -442,8 +438,15 @@ class AudioPlaybackHandler {
        }
        }
        params.getDispatcher().dispatchUtteranceCompleted();
        params.getDispatcher().dispatchUtteranceCompleted();
        mLastSynthesisRequest = null;
        mLastSynthesisRequest = null;
        params.mLogger.onWriteData();
    }
    }


    /**
     * The minimum increment of time to wait for an audiotrack to finish
     * playing.
     */
    private static final long MIN_SLEEP_TIME_MS = 20;

    private static void blockUntilDone(SynthesisMessageParams params) {
    private static void blockUntilDone(SynthesisMessageParams params) {
        if (params.mAudioTrack == null || params.mBytesWritten <= 0) {
        if (params.mAudioTrack == null || params.mBytesWritten <= 0) {
            return;
            return;
@@ -460,36 +463,41 @@ class AudioPlaybackHandler {
                break;
                break;
            }
            }


            long estimatedTimeMs = ((lengthInFrames - currentPosition) * 1000) /
            final long estimatedTimeMs = ((lengthInFrames - currentPosition) * 1000) /
                    audioTrack.getSampleRate();
                    audioTrack.getSampleRate();


            if (DBG) Log.d(TAG, "About to sleep for : " + estimatedTimeMs + " ms," +
            final long sleepTimeMs = Math.max(estimatedTimeMs, MIN_SLEEP_TIME_MS);
                    " Playback position : " + currentPosition);

            if (DBG) Log.d(TAG, "About to sleep for : " + sleepTimeMs + " ms," +
                    " Playback position : " + currentPosition + ", Length in frames : "
                    + lengthInFrames);
            try {
            try {
                Thread.sleep(estimatedTimeMs);
                Thread.sleep(sleepTimeMs);
            } catch (InterruptedException ie) {
            } catch (InterruptedException ie) {
                break;
                break;
            }
            }
        }
        }
    }
    }


    private static AudioTrack createStreamingAudioTrack(int streamType, int sampleRateInHz,
    private static AudioTrack createStreamingAudioTrack(SynthesisMessageParams params) {
            int audioFormat, int channelCount, float volume, float pan) {
        final int channelConfig = getChannelConfig(params.mChannelCount);
        int channelConfig = getChannelConfig(channelCount);
        final int sampleRateInHz = params.mSampleRateInHz;
        final int audioFormat = params.mAudioFormat;


        int minBufferSizeInBytes
        int minBufferSizeInBytes
                = AudioTrack.getMinBufferSize(sampleRateInHz, channelConfig, audioFormat);
                = AudioTrack.getMinBufferSize(sampleRateInHz, channelConfig, audioFormat);
        int bufferSizeInBytes = Math.max(MIN_AUDIO_BUFFER_SIZE, minBufferSizeInBytes);
        int bufferSizeInBytes = Math.max(MIN_AUDIO_BUFFER_SIZE, minBufferSizeInBytes);


        AudioTrack audioTrack = new AudioTrack(streamType, sampleRateInHz, channelConfig,
        AudioTrack audioTrack = new AudioTrack(params.mStreamType, sampleRateInHz, channelConfig,
                audioFormat, bufferSizeInBytes, AudioTrack.MODE_STREAM);
                audioFormat, bufferSizeInBytes, AudioTrack.MODE_STREAM);
        if (audioTrack.getState() != AudioTrack.STATE_INITIALIZED) {
        if (audioTrack.getState() != AudioTrack.STATE_INITIALIZED) {
            Log.w(TAG, "Unable to create audio track.");
            Log.w(TAG, "Unable to create audio track.");
            audioTrack.release();
            audioTrack.release();
            return null;
            return null;
        }
        }
        params.mAudioBufferSize = bufferSizeInBytes;


        setupVolume(audioTrack, volume, pan);
        setupVolume(audioTrack, params.mVolume, params.mPan);
        return audioTrack;
        return audioTrack;
    }
    }


+2 −0
Original line number Original line Diff line number Diff line
@@ -35,6 +35,7 @@ final class SynthesisMessageParams extends MessageParams {
    volatile AudioTrack mAudioTrack;
    volatile AudioTrack mAudioTrack;
    // Not volatile, accessed only from the synthesis thread.
    // Not volatile, accessed only from the synthesis thread.
    int mBytesWritten;
    int mBytesWritten;
    int mAudioBufferSize;


    private final LinkedList<ListEntry> mDataBufferList = new LinkedList<ListEntry>();
    private final LinkedList<ListEntry> mDataBufferList = new LinkedList<ListEntry>();


@@ -55,6 +56,7 @@ final class SynthesisMessageParams extends MessageParams {
        // initially null.
        // initially null.
        mAudioTrack = null;
        mAudioTrack = null;
        mBytesWritten = 0;
        mBytesWritten = 0;
        mAudioBufferSize = 0;
    }
    }


    @Override
    @Override