Donate to e Foundation | Murena handsets with /e/OS | Own a part of Murena! Learn more

Commit 83c38843 authored by Arun Johnson's avatar Arun Johnson
Browse files

Media Benchmark: Simulating audio playback

Adds frame releasing functionality to benchmarks to
simulate audio playback. Added to measure and compare
power with different configurations

Bug: 361349204
Flag: EXEMPT does not affect framework behaviour

Change-Id: Iea6e31981cc2b7908d8e5547c3f44bbc6d915e6b
parent 84fdab92
Loading
Loading
Loading
Loading
+40 −2
Original line number Diff line number Diff line
@@ -18,6 +18,7 @@ package com.android.media.benchmark.library;

import android.view.Surface;

import android.media.AudioFormat;
import android.media.MediaCodec;
import android.media.MediaCodec.BufferInfo;
import android.media.MediaFormat;
@@ -58,6 +59,8 @@ public class Decoder implements IBufferXfer.IReceiveBuffer {
    protected int mNumOutputFrame;
    protected int mIndex;

    protected boolean mUseFrameReleaseQueue = false;

    protected ArrayList<ByteBuffer> mInputBuffer;
    protected FileOutputStream mOutputStream;
    protected FrameReleaseQueue mFrameReleaseQueue = null;
@@ -94,10 +97,24 @@ public class Decoder implements IBufferXfer.IReceiveBuffer {
        mSignalledError = false;
        mOutputStream = outputStream;
    }

    /*
     * This can be used to setup audio decoding, simulating audio playback.
     */
    public void setupDecoder(
            boolean render, boolean useFrameReleaseQueue, int numInFramesRequired) {
        mRender = render;
        mUseFrameReleaseQueue = useFrameReleaseQueue;
        mNumInFramesRequired = numInFramesRequired;
        mSignalledError = false;
        setupDecoder(null);
    }

    public void setupDecoder(Surface surface, boolean render,
            boolean useFrameReleaseQueue, int frameRate) {
        setupDecoder(surface, render, useFrameReleaseQueue, frameRate, -1);
    }

    public void setupDecoder(Surface surface, boolean render,
            boolean useFrameReleaseQueue, int frameRate, int numInFramesRequired) {
        mSignalledError = false;
@@ -166,6 +183,18 @@ public class Decoder implements IBufferXfer.IReceiveBuffer {
        public void onOutputFormatChanged(
                @NonNull MediaCodec mediaCodec, @NonNull MediaFormat format) {
            Log.i(TAG, "Output format changed. Format: " + format.toString());
            if (mUseFrameReleaseQueue
                    && mFrameReleaseQueue == null && mMime.startsWith("audio/")) {
                // start a frame release thread for this configuration.
                int bytesPerSample = AudioFormat.getBytesPerSample(
                        format.getInteger(MediaFormat.KEY_PCM_ENCODING,
                                AudioFormat.ENCODING_PCM_16BIT));
                int sampleRate = format.getInteger(MediaFormat.KEY_SAMPLE_RATE);
                int channelCount = format.getInteger(MediaFormat.KEY_CHANNEL_COUNT);
                mFrameReleaseQueue = new FrameReleaseQueue(
                        mRender, sampleRate, channelCount, bytesPerSample);
                mFrameReleaseQueue.setMediaCodec(mCodec);
            }
        }

        @Override
@@ -395,8 +424,17 @@ public class Decoder implements IBufferXfer.IReceiveBuffer {
            }
        }
        if (mFrameReleaseQueue != null) {
            if (mMime.startsWith("audio/")) {
                try {
                    ByteBuffer outputBuffer = mediaCodec.getOutputBuffer(outputBufferId);
                    mFrameReleaseQueue.pushFrame(outputBufferId, outputBuffer.remaining());
                } catch (Exception e) {
                    Log.d(TAG, "Error in getting MediaCodec buffer" + e.toString());
                }
            } else {
                mFrameReleaseQueue.pushFrame(mNumOutputFrame, outputBufferId,
                                                outputBufferInfo.presentationTimeUs);
            }
        } else if (mIBufferSend != null) {
            IBufferXfer.BufferXferInfo info = new IBufferXfer.BufferXferInfo();
            info.buf = mediaCodec.getOutputBuffer(outputBufferId);
+146 −24
Original line number Diff line number Diff line
@@ -29,45 +29,89 @@ import java.util.concurrent.TimeUnit;

public class FrameReleaseQueue {
    private static final String TAG = "FrameReleaseQueue";
    private static final boolean DEBUG = false;
    private final String MIME_AV1 = "video/av01";
    private final int AV1_SUPERFRAME_DELAY = 6;
    private final int THRESHOLD_TIME = 5;

    private final long HOUR_IN_MS = (60 * 60 * 1000L);
    private final long MINUTE_IN_MS = (60 * 1000L);

    private MediaCodec mCodec;
    private LinkedBlockingQueue<FrameInfo> mFrameInfoQueue;
    private ReleaseThread mReleaseThread;
    private AtomicBoolean doFrameRelease = new AtomicBoolean(false);
    private boolean mReleaseJobStarted = false;
    private AtomicBoolean mReleaseJobStarted = new AtomicBoolean(false);
    private boolean mRender = false;
    private int mWaitTime = 40; // milliseconds per frame
    private long mWaitTime = 40; // milliseconds per frame
    private int mWaitTimeCorrection = 0;
    private int mCorrectionLoopCount;
    private int firstReleaseTime = -1;
    private int mAllowedDelayTime = THRESHOLD_TIME;
    protected long firstReleaseTime = -1;
    private long mAllowedDelayTime = THRESHOLD_TIME;
    private int mFrameDelay = 0;
    private final ScheduledExecutorService mScheduler = Executors.newScheduledThreadPool(1);

    public FrameReleaseQueue(boolean render, int frameRate) {
        this.mFrameInfoQueue = new LinkedBlockingQueue();
        this.mReleaseThread = new ReleaseThread();
        this.doFrameRelease.set(true);
        this.mRender = render;
        this.mWaitTime = 1000 / frameRate; // wait time in milliseconds per frame
        int waitTimeRemainder = 1000 % frameRate;
        int gcd = gcd(frameRate, waitTimeRemainder);
        this.mCorrectionLoopCount = frameRate / gcd;
        this.mWaitTimeCorrection = waitTimeRemainder / gcd;
        Log.i(TAG, "Constructed FrameReleaseQueue with wait time " + this.mWaitTime + " ms");
    }

    public FrameReleaseQueue(boolean render, int sampleRate, int nChannels, int bytesPerChannel) {
        this.mFrameInfoQueue = new LinkedBlockingQueue();
        this.doFrameRelease.set(true);
        this.mRender = render;
        this.mReleaseThread = new AudioRendererThread(sampleRate, nChannels, bytesPerChannel);
    }

    private static class FrameInfo {
        private int number;
        private int bufferId;
        private int displayTime;
        private int bytes;
        public FrameInfo(int frameNumber, int frameBufferId, int frameDisplayTime) {
            this.number = frameNumber;
            this.bufferId = frameBufferId;
            this.displayTime = frameDisplayTime;
        }
        public FrameInfo(int frameBufferId, int bytes) {
            this.bufferId = frameBufferId;
            this.bytes = bytes;
        }
    }

    private class ReleaseThread extends Thread {
        private int mLoopCount = 0;
        private int mNextReleaseTime = 0;
        private long mNextReleaseTime = 0;

        protected void printPlaybackTime() {
            if (firstReleaseTime == -1) {
                Log.d(TAG, "Playback Time not initialized");
                return;
            }
            long curTime = getCurSysTime() - firstReleaseTime;
            long hours = curTime / (HOUR_IN_MS);
            curTime -= (hours * HOUR_IN_MS);
            long min = curTime / MINUTE_IN_MS;
            curTime -= (min * MINUTE_IN_MS);
            Log.d(TAG, "Playback time: "
                    + hours + "h "
                    + min + "m "
                    + (double)(curTime / (double)1000) +"s");
        }

        @SuppressWarnings("FutureReturnValueIgnored")
        public void run() {
            /* Check if the release thread wakes up too late */
            if (mLoopCount != 0) {
                int delta = getCurSysTime() - mNextReleaseTime;
                long delta = getCurSysTime() - mNextReleaseTime;
                if (delta >= THRESHOLD_TIME) {
                    Log.d(TAG, "Release thread wake up late by " + delta);
                    /* For accidental late wake up, we should relax the timestamp
@@ -93,8 +137,8 @@ public class FrameReleaseQueue {
                        popAndRelease(false);
                    } else {
                        mNextReleaseTime += mWaitTime;
                        int curSysTime = getCurSysTime();
                        int curMediaTime = curSysTime - firstReleaseTime;
                        long curSysTime = getCurSysTime();
                        long curMediaTime = curSysTime - firstReleaseTime;
                        while (curFrameInfo != null && curFrameInfo.displayTime > 0 &&
                                curFrameInfo.displayTime <= curMediaTime) {
                            if (!((curMediaTime - curFrameInfo.displayTime) <= mAllowedDelayTime)) {
@@ -123,21 +167,86 @@ public class FrameReleaseQueue {
                    mNextReleaseTime += mWaitTimeCorrection;
                }
                mLoopCount += 1;
            } else {
                mReleaseJobStarted.set(false);
            }
        }
    }

    public FrameReleaseQueue(boolean render, int frameRate) {
        this.mFrameInfoQueue = new LinkedBlockingQueue();
        this.mReleaseThread = new ReleaseThread();
        this.doFrameRelease.set(true);
        this.mRender = render;
        this.mWaitTime = 1000 / frameRate; // wait time in milliseconds per frame
        int waitTimeRemainder = 1000 % frameRate;
        int gcd = gcd(frameRate, waitTimeRemainder);
        this.mCorrectionLoopCount = frameRate / gcd;
        this.mWaitTimeCorrection = waitTimeRemainder / gcd;
        Log.i(TAG, "Constructed FrameReleaseQueue with wait time " + this.mWaitTime + " ms");
    private class AudioRendererThread extends ReleaseThread {
        private final int WAIT_FOR_BUFFER_IN_SEC = 2;
        private double mTimeAdjustMs = 0;
        private double mMsForByte = 0;
        private double mExpectedWakeUpTime = 0;
        private FrameInfo mCurrentFrameInfo;

        AudioRendererThread(int sampleRate, int nChannels, int bytesPerChannel) {
            if (DEBUG) {
                Log.d(TAG, "sampleRate " + sampleRate
                        + " nChannels " + nChannels
                        + " bytesPerChannel " + bytesPerChannel);
            }
            this.mMsForByte = 1000 / (double)(sampleRate * nChannels * bytesPerChannel);
        }

        @Override
        @SuppressWarnings("FutureReturnValueIgnored")
        public void run() {
            long curTime = getCurSysTime();
            if (DEBUG) {
                if (firstReleaseTime == -1) {
                    firstReleaseTime = curTime;
                }
                printPlaybackTime();
            }
            if (mMsForByte == 0) {
                Log.e(TAG, "Audio rendering not possible, no valid params");
                return;
            }
            if (mCurrentFrameInfo != null) {
                try {
                    mCodec.releaseOutputBuffer(mCurrentFrameInfo.bufferId, mRender);
                } catch (IllegalStateException e) {
                    doFrameRelease.set(false);
                    Log.e(TAG, "Threw InterruptedException on releaseOutputBuffer");
                } finally {
                    mCurrentFrameInfo = null;
                }
            }
            boolean requestedSchedule = false;
            try {
                while (doFrameRelease.get() || mFrameInfoQueue.size() > 0) {
                    mCurrentFrameInfo = mFrameInfoQueue.poll(
                            WAIT_FOR_BUFFER_IN_SEC, TimeUnit.SECONDS);
                    if (mCurrentFrameInfo != null) {
                        mTimeAdjustMs = 0;
                        if (mExpectedWakeUpTime != 0) {
                            mTimeAdjustMs = mExpectedWakeUpTime - getCurSysTime();
                        }
                        double sleepTimeUs =
                                (mMsForByte * mCurrentFrameInfo.bytes + mTimeAdjustMs) * 1000;
                        mExpectedWakeUpTime = getCurSysTime() + (sleepTimeUs / 1000);
                        if (DEBUG) {
                            Log.d(TAG, " mExpectedWakeUpTime " + mExpectedWakeUpTime
                                + " Waiting for " + (long)(sleepTimeUs) + "us"
                                + " Now " + getCurSysTime()
                                + " bytes " + mCurrentFrameInfo.bytes
                                + " bufferID " + mCurrentFrameInfo.bufferId);
                        }
                        mScheduler.schedule(
                                mReleaseThread,(long)(sleepTimeUs),TimeUnit.MICROSECONDS);
                        requestedSchedule = true;
                        break;
                    }
                }
            } catch(InterruptedException e) {
                Log.d(TAG, "Interrupted during poll wait");
                doFrameRelease.set(false);
            }
            if (!requestedSchedule) {
                mReleaseJobStarted.set(false);
            }
        }
    }

    private static int gcd(int a, int b) {
@@ -154,6 +263,19 @@ public class FrameReleaseQueue {
        }
    }

    public boolean pushFrame(int frameBufferId, int bytes) {
        FrameInfo info = new FrameInfo(frameBufferId, bytes);
        boolean pushSuccess = mFrameInfoQueue.offer(info);
        if (!pushSuccess) {
            Log.e(TAG, "Failed to push frame with buffer id " + info.bufferId);
            return false;
        }
        if (!mReleaseJobStarted.get()) {
            mScheduler.execute(mReleaseThread);
            mReleaseJobStarted.set(true);
        }
        return true;
    }
    public boolean pushFrame(int frameNumber, int frameBufferId, long frameDisplayTime) {
        int frameDisplayTimeMs = (int)(frameDisplayTime/1000);
        FrameInfo curFrameInfo = new FrameInfo(frameNumber, frameBufferId, frameDisplayTimeMs);
@@ -163,16 +285,16 @@ public class FrameReleaseQueue {
            return false;
        }

        if (!mReleaseJobStarted && frameNumber >= mFrameDelay) {
        if (!mReleaseJobStarted.get() && frameNumber >= mFrameDelay) {
            mScheduler.execute(mReleaseThread);
            mReleaseJobStarted = true;
            mReleaseJobStarted.set(true);
            Log.i(TAG, "Started frame release thread");
        }
        return true;
    }

    private int getCurSysTime() {
        return (int)(System.nanoTime()/1000000);
    private long getCurSysTime() {
        return (long)(System.nanoTime() / 1000000L);
    }

    @SuppressWarnings("FutureReturnValueIgnored")
@@ -196,7 +318,7 @@ public class FrameReleaseQueue {

    public void stopFrameRelease() {
        doFrameRelease.set(false);
        while (mFrameInfoQueue.size() > 0) {
        while (mReleaseJobStarted.get()) {
            try {
                TimeUnit.SECONDS.sleep(1);
            } catch (InterruptedException e) {
+31 −15
Original line number Diff line number Diff line
@@ -18,6 +18,7 @@ package com.android.media.benchmark.library;

import android.view.Surface;

import android.media.AudioFormat;
import android.media.MediaCodec;
import android.media.MediaCodec.BufferInfo;
import android.media.MediaFormat;
@@ -93,6 +94,16 @@ public class MultiAccessUnitDecoder extends Decoder {
                final int maxOutputSize = format.getNumber(
                            MediaFormat.KEY_BUFFER_BATCH_MAX_OUTPUT_SIZE, 0).intValue();
                isUsingLargeFrameMode = (maxOutputSize > 0);
                if (mUseFrameReleaseQueue && mFrameReleaseQueue == null) {
                    int bytesPerSample = AudioFormat.getBytesPerSample(
                            format.getInteger(MediaFormat.KEY_PCM_ENCODING,
                                    AudioFormat.ENCODING_PCM_16BIT));
                    int sampleRate = format.getInteger(MediaFormat.KEY_SAMPLE_RATE);
                    int channelCount = format.getInteger(MediaFormat.KEY_CHANNEL_COUNT);
                    mFrameReleaseQueue = new FrameReleaseQueue(
                            mRender, sampleRate, channelCount, bytesPerSample);
                    mFrameReleaseQueue.setMediaCodec(mCodec);
                }
            }

            @Override
@@ -177,18 +188,30 @@ public class MultiAccessUnitDecoder extends Decoder {
        if (mSawOutputEOS || outputBufferId < 0) {
            return;
        }
        if (mOutputStream != null) {
            try {
                ByteBuffer outputBuffer = mc.getOutputBuffer(outputBufferId);
                byte[] bytesOutput = new byte[outputBuffer.remaining()];
                outputBuffer.get(bytesOutput);
                mOutputStream.write(bytesOutput);
            } catch (IOException e) {
                e.printStackTrace();
                Log.d(TAG, "Error Dumping File: Exception " + e.toString());
            }
        }
        Iterator<BufferInfo> iter = infos.iterator();
        while (iter.hasNext()) {
            BufferInfo bufferInfo = iter.next();
            mNumOutputFrame++;
            if (DEBUG) {
                Log.d(TAG,
                        "In OutputBufferAvailable ,"
                        "In OutputBuffersAvailable ,"
                                + " OutputBuffer ID " + outputBufferId
                                + " output frame number = " + mNumOutputFrame
                                + " timestamp = " + bufferInfo.presentationTimeUs
                                + " size = " + bufferInfo.size);
            }
            if (mIBufferSend != null) {
            if (mIBufferSend != null && mFrameReleaseQueue == null) {
                IBufferXfer.BufferXferInfo info = new IBufferXfer.BufferXferInfo();
                info.buf = mc.getOutputBuffer(outputBufferId);
                info.idx = outputBufferId;
@@ -201,18 +224,11 @@ public class MultiAccessUnitDecoder extends Decoder {
            }
            mSawOutputEOS |= (bufferInfo.flags & MediaCodec.BUFFER_FLAG_END_OF_STREAM) != 0;
        }
        if (mOutputStream != null) {
            try {
        if (mFrameReleaseQueue != null) {
            ByteBuffer outputBuffer = mc.getOutputBuffer(outputBufferId);
                byte[] bytesOutput = new byte[outputBuffer.remaining()];
                outputBuffer.get(bytesOutput);
                mOutputStream.write(bytesOutput);
            } catch (IOException e) {
                e.printStackTrace();
                Log.d(TAG, "Error Dumping File: Exception " + e.toString());
            }
        }
        if (mIBufferSend == null) {
            mFrameReleaseQueue.pushFrame(
                    outputBufferId, outputBuffer.remaining());
        } else if (mIBufferSend == null) {
            mc.releaseOutputBuffer(outputBufferId, mRender);
        }
        if (mSawOutputEOS) {