Donate to e Foundation | Murena handsets with /e/OS | Own a part of Murena! Learn more

Commit e441dcbb authored by Android (Google) Code Review's avatar Android (Google) Code Review
Browse files

Merge change 22561 into eclair

* changes:
  Performance measurement framework:
parents 9a7efa45 ff1df69d
Loading
Loading
Loading
Loading
+5 −81
Original line number Diff line number Diff line
@@ -24,6 +24,7 @@ import android.content.IntentFilter;
import android.content.pm.ActivityInfo;
import android.content.res.Configuration;
import android.os.Bundle;
import android.os.PerformanceCollector;
import android.os.RemoteException;
import android.os.Debug;
import android.os.IBinder;
@@ -83,10 +84,8 @@ public class Instrumentation {
    private List<ActivityWaiter> mWaitingActivities;
    private List<ActivityMonitor> mActivityMonitors;
    private IInstrumentationWatcher mWatcher;
    private long mPreCpuTime;
    private long mStart;
    private boolean mAutomaticPerformanceSnapshots = false;
    private Bundle mPrePerfMetrics = new Bundle();
    private PerformanceCollector mPerformanceCollector;
    private Bundle mPerfMetrics = new Bundle();

    public Instrumentation() {
@@ -191,94 +190,19 @@ public class Instrumentation {
    
    public void setAutomaticPerformanceSnapshots() {
        mAutomaticPerformanceSnapshots = true;
        mPerformanceCollector = new PerformanceCollector();
    }

    public void startPerformanceSnapshot() {
        mStart = 0;
        if (!isProfiling()) {
            // Add initial binder counts
            Bundle binderCounts = getBinderCounts();
            for (String key: binderCounts.keySet()) {
                addPerfMetricLong("pre_" + key, binderCounts.getLong(key));
            }

            // Force a GC and zero out the performance counters.  Do this
            // before reading initial CPU/wall-clock times so we don't include
            // the cost of this setup in our final metrics.
            startAllocCounting();

            // Record CPU time up to this point, and start timing.  Note:  this
            // must happen at the end of this method, otherwise the timing will
            // include noise.
            mStart = SystemClock.uptimeMillis();
            mPreCpuTime = Process.getElapsedCpuTime();
            mPerformanceCollector.beginSnapshot(null);
        }
    }
    
    public void endPerformanceSnapshot() {
        if (!isProfiling()) {
            // Stop the timing. This must be done first before any other counting is stopped.
            long cpuTime = Process.getElapsedCpuTime();
            long duration = SystemClock.uptimeMillis();
            
            stopAllocCounting();
            
            long nativeMax = Debug.getNativeHeapSize() / 1024;
            long nativeAllocated = Debug.getNativeHeapAllocatedSize() / 1024;
            long nativeFree = Debug.getNativeHeapFreeSize() / 1024;

            Debug.MemoryInfo memInfo = new Debug.MemoryInfo();
            Debug.getMemoryInfo(memInfo);

            Runtime runtime = Runtime.getRuntime();

            long dalvikMax = runtime.totalMemory() / 1024;
            long dalvikFree = runtime.freeMemory() / 1024;
            long dalvikAllocated = dalvikMax - dalvikFree;
            
            // Add final binder counts
            Bundle binderCounts = getBinderCounts();
            for (String key: binderCounts.keySet()) {
                addPerfMetricLong(key, binderCounts.getLong(key));
            }
            
            // Add alloc counts
            Bundle allocCounts = getAllocCounts();
            for (String key: allocCounts.keySet()) {
                addPerfMetricLong(key, allocCounts.getLong(key));
            mPerfMetrics = mPerformanceCollector.endSnapshot();
        }
            
            addPerfMetricLong("execution_time", duration - mStart);
            addPerfMetricLong("pre_cpu_time", mPreCpuTime);
            addPerfMetricLong("cpu_time", cpuTime - mPreCpuTime);

            addPerfMetricLong("native_size", nativeMax);
            addPerfMetricLong("native_allocated", nativeAllocated);
            addPerfMetricLong("native_free", nativeFree);
            addPerfMetricInt("native_pss", memInfo.nativePss);
            addPerfMetricInt("native_private_dirty", memInfo.nativePrivateDirty);
            addPerfMetricInt("native_shared_dirty", memInfo.nativeSharedDirty);
            
            addPerfMetricLong("java_size", dalvikMax);
            addPerfMetricLong("java_allocated", dalvikAllocated);
            addPerfMetricLong("java_free", dalvikFree);
            addPerfMetricInt("java_pss", memInfo.dalvikPss);
            addPerfMetricInt("java_private_dirty", memInfo.dalvikPrivateDirty);
            addPerfMetricInt("java_shared_dirty", memInfo.dalvikSharedDirty);
            
            addPerfMetricInt("other_pss", memInfo.otherPss);
            addPerfMetricInt("other_private_dirty", memInfo.otherPrivateDirty);
            addPerfMetricInt("other_shared_dirty", memInfo.otherSharedDirty);
            
        }
    }
    
    private void addPerfMetricLong(String key, long value) {
        mPerfMetrics.putLong("performance." + key, value);
    }
    
    private void addPerfMetricInt(String key, int value) {
        mPerfMetrics.putInt("performance." + key, value);
    }
    
    /**
+524 −0

File added.

Preview size limit exceeded, changes collapsed.

+32 −0
Original line number Diff line number Diff line
/*
 * Copyright (C) 2009 The Android Open Source Project
 *
 * Licensed under the Apache License, Version 2.0 (the "License");
 * you may not use this file except in compliance with the License.
 * You may obtain a copy of the License at
 *
 *      http://www.apache.org/licenses/LICENSE-2.0
 *
 * Unless required by applicable law or agreed to in writing, software
 * distributed under the License is distributed on an "AS IS" BASIS,
 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
 * See the License for the specific language governing permissions and
 * limitations under the License.
 */

package android.test;

import java.lang.annotation.Retention;
import java.lang.annotation.RetentionPolicy;

/**
 * This annotation can be used on an {@link junit.framework.TestCase}'s test
 * methods. When the annotation is present, the test method is timed and the
 * results written through instrumentation output. It can also be used on the
 * class itself, which is equivalent to tagging all test methods with this
 * annotation.
 *
 * {@hide} Pending approval for public API.
 */
@Retention(RetentionPolicy.RUNTIME)
public @interface TimedTest { }
 No newline at end of file
+18 −0
Original line number Diff line number Diff line
@@ -18,6 +18,8 @@ package android.test;

import android.app.Instrumentation;
import android.content.Context;
import android.os.PerformanceCollector.PerformanceResultsWriter;

import com.google.android.collect.Lists;
import junit.framework.Test;
import junit.framework.TestCase;
@@ -39,6 +41,7 @@ public class AndroidTestRunner extends BaseTestRunner {

    private List<TestListener> mTestListeners = Lists.newArrayList();
    private Instrumentation mInstrumentation;
    private PerformanceResultsWriter mPerfWriter;

    @SuppressWarnings("unchecked")
    public void setTestClassName(String testClassName, String testMethodName) {
@@ -162,6 +165,7 @@ public class AndroidTestRunner extends BaseTestRunner {
        for (TestCase testCase : mTestCases) {
            setContextIfAndroidTestCase(testCase, mContext, testContext);
            setInstrumentationIfInstrumentationTestCase(testCase, mInstrumentation);
            setPerformanceWriterIfPerformanceTestCase(testCase, mPerfWriter);
            testCase.run(mTestResult);
        }
    }
@@ -184,6 +188,13 @@ public class AndroidTestRunner extends BaseTestRunner {
        }
    }

    private void setPerformanceWriterIfPerformanceTestCase(
            Test test, PerformanceResultsWriter writer) {
        if (PerformanceTestBase.class.isAssignableFrom(test.getClass())) {
            ((PerformanceTestBase) test).setPerformanceResultsWriter(writer);
        }
    }

    public void setInstrumentation(Instrumentation instrumentation) {
        mInstrumentation = instrumentation;
    }
@@ -197,6 +208,13 @@ public class AndroidTestRunner extends BaseTestRunner {
        setInstrumentation(instrumentation);
    }

    /**
     * {@hide} Pending approval for public API.
     */
    public void setPerformanceResultsWriter(PerformanceResultsWriter writer) {
        mPerfWriter = writer;
    }

    @Override
    protected Class loadSuiteClass(String suiteClassName) throws ClassNotFoundException {
        return mContext.getClassLoader().loadClass(suiteClassName);
+176 −98
Original line number Diff line number Diff line
@@ -17,17 +17,31 @@
package android.test;

import static android.test.suitebuilder.TestPredicates.REJECT_PERFORMANCE;

import com.android.internal.util.Predicate;

import android.app.Activity;
import android.app.Instrumentation;
import android.os.Bundle;
import android.os.Debug;
import android.os.Looper;
import android.os.Parcelable;
import android.os.PerformanceCollector;
import android.os.Process;
import android.os.SystemClock;
import android.os.PerformanceCollector.PerformanceResultsWriter;
import android.test.suitebuilder.TestMethod;
import android.test.suitebuilder.TestPredicates;
import android.test.suitebuilder.TestSuiteBuilder;
import android.util.Log;

import com.android.internal.util.Predicate;
import java.io.ByteArrayOutputStream;
import java.io.File;
import java.io.PrintStream;
import java.lang.reflect.InvocationTargetException;
import java.lang.reflect.Method;
import java.util.ArrayList;
import java.util.List;

import junit.framework.AssertionFailedError;
import junit.framework.Test;
@@ -38,22 +52,13 @@ import junit.framework.TestSuite;
import junit.runner.BaseTestRunner;
import junit.textui.ResultPrinter;

import java.io.ByteArrayOutputStream;
import java.io.File;
import java.io.PrintStream;
import java.lang.reflect.InvocationTargetException;
import java.lang.reflect.Method;
import java.util.ArrayList;
import java.util.List;


/**
 * An {@link Instrumentation} that runs various types of {@link junit.framework.TestCase}s against
 * an Android package (application). Typical usage:
 * <ol>
 * <li>Write {@link junit.framework.TestCase}s that perform unit, functional, or performance tests
 * against the classes in your package.  Typically these are subclassed from:
 *   <ul><li>{@link android.test.ActivityInstrumentationTestCase}</li>
 *   <ul><li>{@link android.test.ActivityInstrumentationTestCase2}</li>
 *   <li>{@link android.test.ActivityUnitTestCase}</li>
 *   <li>{@link android.test.AndroidTestCase}</li>
 *   <li>{@link android.test.ApplicationTestCase}</li>
@@ -129,11 +134,10 @@ import java.util.List;

/* (not JavaDoc)
 * Although not necessary in most case, another way to use this class is to extend it and have the
 * derived class return
 * the desired test suite from the {@link #getTestSuite()} method. The test suite returned from this
 * method will be used if no target class is defined in the meta-data or command line argument
 * parameters. If a derived class is used it needs to be added as an instrumentation to the
 * AndroidManifest.xml and the command to run it would look like:
 * derived class return the desired test suite from the {@link #getTestSuite()} method. The test
 * suite returned from this method will be used if no target class is defined in the meta-data or
 * command line argument parameters. If a derived class is used it needs to be added as an
 * instrumentation to the AndroidManifest.xml and the command to run it would look like:
 * <p/>
 * adb shell am instrument -w com.android.foo/<i>com.android.FooInstrumentationTestRunner</i>
 * <p/>
@@ -160,16 +164,15 @@ public class InstrumentationTestRunner extends Instrumentation implements TestSu

    private static final String ARGUMENT_LOG_ONLY = "log";

   
    /**
     * This constant defines the maximum allowed runtime (in ms) for a test included in the "small" suite. 
     * It is used to make an educated guess at what suite an unlabeled test belongs.
     * This constant defines the maximum allowed runtime (in ms) for a test included in the "small"
     * suite. It is used to make an educated guess at what suite an unlabeled test belongs.
     */
    private static final float SMALL_SUITE_MAX_RUNTIME = 100;

    /**
     * This constant defines the maximum allowed runtime (in ms) for a test included in the "medium" suite. 
     * It is used to make an educated guess at what suite an unlabeled test belongs.
     * This constant defines the maximum allowed runtime (in ms) for a test included in the
     * "medium" suite. It is used to make an educated guess at what suite an unlabeled test belongs.
     */
    private static final float MEDIUM_SUITE_MAX_RUNTIME = 1000;

@@ -223,6 +226,19 @@ public class InstrumentationTestRunner extends Instrumentation implements TestSu
     * identifies the path to the generated code coverage file.
     */
    private static final String REPORT_KEY_COVERAGE_PATH = "coverageFilePath";
    /**
     * If included in the status or final bundle sent to an IInstrumentationWatcher, this key
     * reports the cpu time in milliseconds of the current test.
     */
    private static final String REPORT_KEY_PERF_CPU_TIME =
        "performance." + PerformanceCollector.METRIC_KEY_CPU_TIME;
    /**
     * If included in the status or final bundle sent to an IInstrumentationWatcher, this key
     * reports the run time in milliseconds of the current test.
     */
    private static final String REPORT_KEY_PERF_EXECUTION_TIME =
        "performance." + PerformanceCollector.METRIC_KEY_EXECUTION_TIME;

    /**
     * The test is starting.
     */
@@ -336,8 +352,10 @@ public class InstrumentationTestRunner extends Instrumentation implements TestSu
        if (mSuiteAssignmentMode) {
            mTestRunner.addTestListener(new SuiteAssignmentPrinter());
        } else {
            WatcherResultPrinter resultPrinter = new WatcherResultPrinter(mTestCount);
            mTestRunner.addTestListener(new TestPrinter("TestRunner", false));
            mTestRunner.addTestListener(new WatcherResultPrinter(mTestCount));
            mTestRunner.addTestListener(resultPrinter);
            mTestRunner.setPerformanceResultsWriter(resultPrinter);
        }
        start();
    }
@@ -348,6 +366,7 @@ public class InstrumentationTestRunner extends Instrumentation implements TestSu

    /**
     * Parses and loads the specified set of test classes
     *
     * @param testClassArg - comma-separated list of test classes and methods
     * @param testSuiteBuilder - builder to add tests to
     */
@@ -360,8 +379,9 @@ public class InstrumentationTestRunner extends Instrumentation implements TestSu

    /**
     * Parse and load the given test class and, optionally, method
     * @param testClassName - full package name of test class and optionally method to add. Expected
     *   format: com.android.TestClass#testMethod
     *
     * @param testClassName - full package name of test class and optionally method to add.
     *        Expected format: com.android.TestClass#testMethod
     * @param testSuiteBuilder - builder to add tests to
     */
    private void parseTestClass(String testClassName, TestSuiteBuilder testSuiteBuilder) {
@@ -372,8 +392,7 @@ public class InstrumentationTestRunner extends Instrumentation implements TestSu
            testMethodName = testClassName.substring(methodSeparatorIndex + 1);
            testClassName = testClassName.substring(0, methodSeparatorIndex);
        }
        testSuiteBuilder.addTestClassByName(testClassName, testMethodName, 
                getTargetContext());
        testSuiteBuilder.addTestClassByName(testClassName, testMethodName, getTargetContext());
    }

    protected AndroidTestRunner getAndroidTestRunner() {
@@ -496,8 +515,7 @@ public class InstrumentationTestRunner extends Instrumentation implements TestSu
        if (mCoverageFilePath == null) {
            return getTargetContext().getFilesDir().getAbsolutePath() + File.separator +
                   DEFAULT_COVERAGE_FILE_NAME;
         }
        else {
        } else {
            return mCoverageFilePath;
        }
    }
@@ -529,8 +547,7 @@ public class InstrumentationTestRunner extends Instrumentation implements TestSu
     * This class sends status reports back to the IInstrumentationWatcher about
     * which suite each test belongs.
     */
    private class SuiteAssignmentPrinter implements TestListener
    {
    private class SuiteAssignmentPrinter implements TestListener {

        private Bundle mTestResult;
        private long mStartTime;
@@ -602,13 +619,15 @@ public class InstrumentationTestRunner extends Instrumentation implements TestSu
    /**
     * This class sends status reports back to the IInstrumentationWatcher
     */
    private class WatcherResultPrinter implements TestListener
    {
    private class WatcherResultPrinter implements TestListener, PerformanceResultsWriter {
        private final Bundle mResultTemplate;
        Bundle mTestResult;
        int mTestNum = 0;
        int mTestResultCode = 0;
        String mTestClass = null;
        boolean mIsTimedTest = false;
        long mCpuTime = 0;
        long mExecTime = 0;

        public WatcherResultPrinter(int numTests) {
            mResultTemplate = new Bundle();
@@ -617,13 +636,15 @@ public class InstrumentationTestRunner extends Instrumentation implements TestSu
        }

        /**
         * send a status for the start of a each test, so long tests can be seen as "running"
         * send a status for the start of a each test, so long tests can be seen
         * as "running"
         */
        public void startTest(Test test) {
            String testClass = test.getClass().getName();
            String testName = ((TestCase)test).getName();
            mTestResult = new Bundle(mResultTemplate);
            mTestResult.putString(REPORT_KEY_NAME_CLASS, testClass);
            mTestResult.putString(REPORT_KEY_NAME_TEST, ((TestCase) test).getName());
            mTestResult.putString(REPORT_KEY_NAME_TEST, testName);
            mTestResult.putInt(REPORT_KEY_NUM_CURRENT, ++mTestNum);
            // pretty printing
            if (testClass != null && !testClass.equals(mTestClass)) {
@@ -647,6 +668,23 @@ public class InstrumentationTestRunner extends Instrumentation implements TestSu

            sendStatus(REPORT_VALUE_RESULT_START, mTestResult);
            mTestResultCode = 0;

            mIsTimedTest = false;
            try {
                // Look for TimedTest annotation on both test class and test
                // method
                mIsTimedTest = test.getClass().isAnnotationPresent(TimedTest.class) ||
                    test.getClass().getMethod(testName).isAnnotationPresent(TimedTest.class);
            } catch (SecurityException e) {
                throw new IllegalStateException(e);
            } catch (NoSuchMethodException e) {
                throw new IllegalStateException(e);
            }

            if (mIsTimedTest) {
                mExecTime = SystemClock.uptimeMillis();
                mCpuTime = Process.getElapsedCpuTime();
            }
        }

        /**
@@ -677,6 +715,13 @@ public class InstrumentationTestRunner extends Instrumentation implements TestSu
         * @see junit.framework.TestListener#endTest(Test)
         */
        public void endTest(Test test) {
            if (mIsTimedTest) {
                mCpuTime = Process.getElapsedCpuTime() - mCpuTime;
                mExecTime = SystemClock.uptimeMillis() - mExecTime;
                mTestResult.putLong(REPORT_KEY_PERF_CPU_TIME, mCpuTime);
                mTestResult.putLong(REPORT_KEY_PERF_EXECUTION_TIME, mExecTime);
            }

            if (mTestResultCode == 0) {
                mTestResult.putString(Instrumentation.REPORT_KEY_STREAMRESULT, ".");
            }
@@ -689,7 +734,40 @@ public class InstrumentationTestRunner extends Instrumentation implements TestSu
            }
        }

        public void writeBeginSnapshot(String label) {
            // Do nothing
        }

        public void writeEndSnapshot(Bundle results) {
            // Copy all snapshot data fields as type long into mResults, which
            // is outputted via Instrumentation.finish
            for (String key : results.keySet()) {
                mResults.putLong(key, results.getLong(key));
            }
        }

        public void writeStartTiming(String label) {
            // Do nothing
        }

        public void writeStopTiming(Bundle results) {
            // Copy results into mTestResult by flattening list of iterations,
            // which is outputted via WatcherResultPrinter.endTest
            int i = 0;
            for (Parcelable p :
                    results.getParcelableArrayList(PerformanceCollector.METRIC_KEY_ITERATIONS)) {
                Bundle iteration = (Bundle)p;
                String index = "performance.iteration" + i + ".";
                mTestResult.putString(index + PerformanceCollector.METRIC_KEY_LABEL,
                        iteration.getString(PerformanceCollector.METRIC_KEY_LABEL));
                mTestResult.putLong(index + PerformanceCollector.METRIC_KEY_CPU_TIME,
                        iteration.getLong(PerformanceCollector.METRIC_KEY_CPU_TIME));
                mTestResult.putLong(index + PerformanceCollector.METRIC_KEY_EXECUTION_TIME,
                        iteration.getLong(PerformanceCollector.METRIC_KEY_EXECUTION_TIME));
                i++;
            }
        }

        // TODO report the end of the cycle
        // TODO report runtime for each test
    }
}
Loading