Performance measurement framework:
+ For bug 1810508 - Added PerformanceCollector class to collect runtime and memory usage data * Moved performance snapshotting from Intrumentation to PerformanceCollector - Added PerformanceResultsWriter interface which defines functions for reporting performance data + Framework integration - Added TimedTest annotation to automatically time tests and write results to instrumentation output - Modified PerformanceTestBase to add collection hooks and wrapper methods - Modified WatcherResultPrinter in InstrumentationTestRunner to implement PerformanceResultsWriter for instrumentation output of performance data - Modified InstrumentationTestRunner and AndroidTestRunner to pass writer instance to test
This commit is contained in:
@ -24,6 +24,7 @@ import android.content.IntentFilter;
|
||||
import android.content.pm.ActivityInfo;
|
||||
import android.content.res.Configuration;
|
||||
import android.os.Bundle;
|
||||
import android.os.PerformanceCollector;
|
||||
import android.os.RemoteException;
|
||||
import android.os.Debug;
|
||||
import android.os.IBinder;
|
||||
@ -83,10 +84,8 @@ public class Instrumentation {
|
||||
private List<ActivityWaiter> mWaitingActivities;
|
||||
private List<ActivityMonitor> mActivityMonitors;
|
||||
private IInstrumentationWatcher mWatcher;
|
||||
private long mPreCpuTime;
|
||||
private long mStart;
|
||||
private boolean mAutomaticPerformanceSnapshots = false;
|
||||
private Bundle mPrePerfMetrics = new Bundle();
|
||||
private PerformanceCollector mPerformanceCollector;
|
||||
private Bundle mPerfMetrics = new Bundle();
|
||||
|
||||
public Instrumentation() {
|
||||
@ -191,96 +190,21 @@ public class Instrumentation {
|
||||
|
||||
public void setAutomaticPerformanceSnapshots() {
|
||||
mAutomaticPerformanceSnapshots = true;
|
||||
mPerformanceCollector = new PerformanceCollector();
|
||||
}
|
||||
|
||||
public void startPerformanceSnapshot() {
|
||||
mStart = 0;
|
||||
if (!isProfiling()) {
|
||||
// Add initial binder counts
|
||||
Bundle binderCounts = getBinderCounts();
|
||||
for (String key: binderCounts.keySet()) {
|
||||
addPerfMetricLong("pre_" + key, binderCounts.getLong(key));
|
||||
}
|
||||
|
||||
// Force a GC and zero out the performance counters. Do this
|
||||
// before reading initial CPU/wall-clock times so we don't include
|
||||
// the cost of this setup in our final metrics.
|
||||
startAllocCounting();
|
||||
|
||||
// Record CPU time up to this point, and start timing. Note: this
|
||||
// must happen at the end of this method, otherwise the timing will
|
||||
// include noise.
|
||||
mStart = SystemClock.uptimeMillis();
|
||||
mPreCpuTime = Process.getElapsedCpuTime();
|
||||
mPerformanceCollector.beginSnapshot(null);
|
||||
}
|
||||
}
|
||||
|
||||
public void endPerformanceSnapshot() {
|
||||
if (!isProfiling()) {
|
||||
// Stop the timing. This must be done first before any other counting is stopped.
|
||||
long cpuTime = Process.getElapsedCpuTime();
|
||||
long duration = SystemClock.uptimeMillis();
|
||||
|
||||
stopAllocCounting();
|
||||
|
||||
long nativeMax = Debug.getNativeHeapSize() / 1024;
|
||||
long nativeAllocated = Debug.getNativeHeapAllocatedSize() / 1024;
|
||||
long nativeFree = Debug.getNativeHeapFreeSize() / 1024;
|
||||
|
||||
Debug.MemoryInfo memInfo = new Debug.MemoryInfo();
|
||||
Debug.getMemoryInfo(memInfo);
|
||||
|
||||
Runtime runtime = Runtime.getRuntime();
|
||||
|
||||
long dalvikMax = runtime.totalMemory() / 1024;
|
||||
long dalvikFree = runtime.freeMemory() / 1024;
|
||||
long dalvikAllocated = dalvikMax - dalvikFree;
|
||||
|
||||
// Add final binder counts
|
||||
Bundle binderCounts = getBinderCounts();
|
||||
for (String key: binderCounts.keySet()) {
|
||||
addPerfMetricLong(key, binderCounts.getLong(key));
|
||||
}
|
||||
|
||||
// Add alloc counts
|
||||
Bundle allocCounts = getAllocCounts();
|
||||
for (String key: allocCounts.keySet()) {
|
||||
addPerfMetricLong(key, allocCounts.getLong(key));
|
||||
}
|
||||
|
||||
addPerfMetricLong("execution_time", duration - mStart);
|
||||
addPerfMetricLong("pre_cpu_time", mPreCpuTime);
|
||||
addPerfMetricLong("cpu_time", cpuTime - mPreCpuTime);
|
||||
|
||||
addPerfMetricLong("native_size", nativeMax);
|
||||
addPerfMetricLong("native_allocated", nativeAllocated);
|
||||
addPerfMetricLong("native_free", nativeFree);
|
||||
addPerfMetricInt("native_pss", memInfo.nativePss);
|
||||
addPerfMetricInt("native_private_dirty", memInfo.nativePrivateDirty);
|
||||
addPerfMetricInt("native_shared_dirty", memInfo.nativeSharedDirty);
|
||||
|
||||
addPerfMetricLong("java_size", dalvikMax);
|
||||
addPerfMetricLong("java_allocated", dalvikAllocated);
|
||||
addPerfMetricLong("java_free", dalvikFree);
|
||||
addPerfMetricInt("java_pss", memInfo.dalvikPss);
|
||||
addPerfMetricInt("java_private_dirty", memInfo.dalvikPrivateDirty);
|
||||
addPerfMetricInt("java_shared_dirty", memInfo.dalvikSharedDirty);
|
||||
|
||||
addPerfMetricInt("other_pss", memInfo.otherPss);
|
||||
addPerfMetricInt("other_private_dirty", memInfo.otherPrivateDirty);
|
||||
addPerfMetricInt("other_shared_dirty", memInfo.otherSharedDirty);
|
||||
|
||||
mPerfMetrics = mPerformanceCollector.endSnapshot();
|
||||
}
|
||||
}
|
||||
|
||||
private void addPerfMetricLong(String key, long value) {
|
||||
mPerfMetrics.putLong("performance." + key, value);
|
||||
}
|
||||
|
||||
private void addPerfMetricInt(String key, int value) {
|
||||
mPerfMetrics.putInt("performance." + key, value);
|
||||
}
|
||||
|
||||
/**
|
||||
* Called when the instrumented application is stopping, after all of the
|
||||
* normal application cleanup has occurred.
|
||||
|
524
core/java/android/os/PerformanceCollector.java
Normal file
524
core/java/android/os/PerformanceCollector.java
Normal file
@ -0,0 +1,524 @@
|
||||
/*
|
||||
* Copyright (C) 2009 The Android Open Source Project
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
|
||||
package android.os;
|
||||
|
||||
|
||||
import java.util.ArrayList;
|
||||
|
||||
/**
|
||||
* Collects performance data between two function calls in Bundle objects and
|
||||
* outputs the results using writer of type {@link PerformanceResultsWriter}.
|
||||
* <p>
|
||||
* {@link #beginSnapshot(String)} and {@link #endSnapshot()} functions collect
|
||||
* memory usage information and measure runtime between calls to begin and end.
|
||||
* These functions logically wrap around an entire test, and should be called
|
||||
* with name of test as the label, e.g. EmailPerformanceTest.
|
||||
* <p>
|
||||
* {@link #startTiming(String)} and {@link #stopTiming(String)} functions
|
||||
* measure runtime between calls to start and stop. These functions logically
|
||||
* wrap around a single test case or a small block of code, and should be called
|
||||
* with the name of test case as the label, e.g. testSimpleSendMailSequence.
|
||||
* <p>
|
||||
* {@link #addIteration(String)} inserts intermediate measurement point which
|
||||
* can be labeled with a String, e.g. Launch email app, compose, send, etc.
|
||||
* <p>
|
||||
* Snapshot and timing functions do not interfere with each other, and thus can
|
||||
* be called in any order. The intended structure is to wrap begin/endSnapshot
|
||||
* around calls to start/stopTiming, for example:
|
||||
* <p>
|
||||
* <code>beginSnapshot("EmailPerformanceTest");
|
||||
* startTiming("testSimpleSendSequence");
|
||||
* addIteration("Launch email app");
|
||||
* addIteration("Compose");
|
||||
* stopTiming("Send");
|
||||
* startTiming("testComplexSendSequence");
|
||||
* stopTiming("");
|
||||
* startTiming("testAddLabel");
|
||||
* stopTiming("");
|
||||
* endSnapshot();</code>
|
||||
* <p>
|
||||
* Structure of results output is up to implementor of
|
||||
* {@link PerformanceResultsWriter }.
|
||||
*
|
||||
* {@hide} Pending approval for public API.
|
||||
*/
|
||||
public class PerformanceCollector {
|
||||
|
||||
/**
|
||||
* Interface for reporting performance data.
|
||||
*/
|
||||
public interface PerformanceResultsWriter {
|
||||
|
||||
/**
|
||||
* Callback invoked as first action in
|
||||
* PerformanceCollector#beginSnapshot(String) for reporting the start of
|
||||
* a performance snapshot.
|
||||
*
|
||||
* @param label description of code block between beginSnapshot and
|
||||
* PerformanceCollector#endSnapshot()
|
||||
* @see PerformanceCollector#beginSnapshot(String)
|
||||
*/
|
||||
public void writeBeginSnapshot(String label);
|
||||
|
||||
/**
|
||||
* Callback invoked as last action in PerformanceCollector#endSnapshot()
|
||||
* for reporting performance data collected in the snapshot.
|
||||
*
|
||||
* @param results memory and runtime metrics stored as key/value pairs,
|
||||
* in the same structure as returned by
|
||||
* PerformanceCollector#endSnapshot()
|
||||
* @see PerformanceCollector#endSnapshot()
|
||||
*/
|
||||
public void writeEndSnapshot(Bundle results);
|
||||
|
||||
/**
|
||||
* Callback invoked as first action in
|
||||
* PerformanceCollector#startTiming(String) for reporting the start of
|
||||
* a timing measurement.
|
||||
*
|
||||
* @param label description of code block between startTiming and
|
||||
* PerformanceCollector#stopTiming(String)
|
||||
* @see PerformanceCollector#startTiming(String)
|
||||
*/
|
||||
public void writeStartTiming(String label);
|
||||
|
||||
/**
|
||||
* Callback invoked as last action in
|
||||
* {@link PerformanceCollector#stopTiming(String)} for reporting the
|
||||
* sequence of timings measured.
|
||||
*
|
||||
* @param results runtime metrics of code block between calls to
|
||||
* startTiming and stopTiming, in the same structure as
|
||||
* returned by PerformanceCollector#stopTiming(String)
|
||||
* @see PerformanceCollector#stopTiming(String)
|
||||
*/
|
||||
public void writeStopTiming(Bundle results);
|
||||
}
|
||||
|
||||
/**
|
||||
* In a results Bundle, this key references a List of iteration Bundles.
|
||||
*/
|
||||
public static final String METRIC_KEY_ITERATIONS = "iterations";
|
||||
/**
|
||||
* In an iteration Bundle, this key describes the iteration.
|
||||
*/
|
||||
public static final String METRIC_KEY_LABEL = "label";
|
||||
/**
|
||||
* In a results Bundle, this key reports the cpu time of the code block
|
||||
* under measurement.
|
||||
*/
|
||||
public static final String METRIC_KEY_CPU_TIME = "cpu_time";
|
||||
/**
|
||||
* In a results Bundle, this key reports the execution time of the code
|
||||
* block under measurement.
|
||||
*/
|
||||
public static final String METRIC_KEY_EXECUTION_TIME = "execution_time";
|
||||
/**
|
||||
* In a snapshot Bundle, this key reports the number of received
|
||||
* transactions from the binder driver before collection started.
|
||||
*/
|
||||
public static final String METRIC_KEY_PRE_RECEIVED_TRANSACTIONS = "pre_received_transactions";
|
||||
/**
|
||||
* In a snapshot Bundle, this key reports the number of transactions sent by
|
||||
* the running program before collection started.
|
||||
*/
|
||||
public static final String METRIC_KEY_PRE_SENT_TRANSACTIONS = "pre_sent_transactions";
|
||||
/**
|
||||
* In a snapshot Bundle, this key reports the number of received
|
||||
* transactions from the binder driver.
|
||||
*/
|
||||
public static final String METRIC_KEY_RECEIVED_TRANSACTIONS = "received_transactions";
|
||||
/**
|
||||
* In a snapshot Bundle, this key reports the number of transactions sent by
|
||||
* the running program.
|
||||
*/
|
||||
public static final String METRIC_KEY_SENT_TRANSACTIONS = "sent_transactions";
|
||||
/**
|
||||
* In a snapshot Bundle, this key reports the number of garbage collection
|
||||
* invocations.
|
||||
*/
|
||||
public static final String METRIC_KEY_GC_INVOCATION_COUNT = "gc_invocation_count";
|
||||
/**
|
||||
* In a snapshot Bundle, this key reports the amount of allocated memory
|
||||
* used by the running program.
|
||||
*/
|
||||
public static final String METRIC_KEY_JAVA_ALLOCATED = "java_allocated";
|
||||
/**
|
||||
* In a snapshot Bundle, this key reports the amount of free memory
|
||||
* available to the running program.
|
||||
*/
|
||||
public static final String METRIC_KEY_JAVA_FREE = "java_free";
|
||||
/**
|
||||
* In a snapshot Bundle, this key reports the number of private dirty pages
|
||||
* used by dalvik.
|
||||
*/
|
||||
public static final String METRIC_KEY_JAVA_PRIVATE_DIRTY = "java_private_dirty";
|
||||
/**
|
||||
* In a snapshot Bundle, this key reports the proportional set size for
|
||||
* dalvik.
|
||||
*/
|
||||
public static final String METRIC_KEY_JAVA_PSS = "java_pss";
|
||||
/**
|
||||
* In a snapshot Bundle, this key reports the number of shared dirty pages
|
||||
* used by dalvik.
|
||||
*/
|
||||
public static final String METRIC_KEY_JAVA_SHARED_DIRTY = "java_shared_dirty";
|
||||
/**
|
||||
* In a snapshot Bundle, this key reports the total amount of memory
|
||||
* available to the running program.
|
||||
*/
|
||||
public static final String METRIC_KEY_JAVA_SIZE = "java_size";
|
||||
/**
|
||||
* In a snapshot Bundle, this key reports the amount of allocated memory in
|
||||
* the native heap.
|
||||
*/
|
||||
public static final String METRIC_KEY_NATIVE_ALLOCATED = "native_allocated";
|
||||
/**
|
||||
* In a snapshot Bundle, this key reports the amount of free memory in the
|
||||
* native heap.
|
||||
*/
|
||||
public static final String METRIC_KEY_NATIVE_FREE = "native_free";
|
||||
/**
|
||||
* In a snapshot Bundle, this key reports the number of private dirty pages
|
||||
* used by the native heap.
|
||||
*/
|
||||
public static final String METRIC_KEY_NATIVE_PRIVATE_DIRTY = "native_private_dirty";
|
||||
/**
|
||||
* In a snapshot Bundle, this key reports the proportional set size for the
|
||||
* native heap.
|
||||
*/
|
||||
public static final String METRIC_KEY_NATIVE_PSS = "native_pss";
|
||||
/**
|
||||
* In a snapshot Bundle, this key reports the number of shared dirty pages
|
||||
* used by the native heap.
|
||||
*/
|
||||
public static final String METRIC_KEY_NATIVE_SHARED_DIRTY = "native_shared_dirty";
|
||||
/**
|
||||
* In a snapshot Bundle, this key reports the size of the native heap.
|
||||
*/
|
||||
public static final String METRIC_KEY_NATIVE_SIZE = "native_size";
|
||||
/**
|
||||
* In a snapshot Bundle, this key reports the number of objects allocated
|
||||
* globally.
|
||||
*/
|
||||
public static final String METRIC_KEY_GLOBAL_ALLOC_COUNT = "global_alloc_count";
|
||||
/**
|
||||
* In a snapshot Bundle, this key reports the size of all objects allocated
|
||||
* globally.
|
||||
*/
|
||||
public static final String METRIC_KEY_GLOBAL_ALLOC_SIZE = "global_alloc_size";
|
||||
/**
|
||||
* In a snapshot Bundle, this key reports the number of objects freed
|
||||
* globally.
|
||||
*/
|
||||
public static final String METRIC_KEY_GLOBAL_FREED_COUNT = "global_freed_count";
|
||||
/**
|
||||
* In a snapshot Bundle, this key reports the size of all objects freed
|
||||
* globally.
|
||||
*/
|
||||
public static final String METRIC_KEY_GLOBAL_FREED_SIZE = "global_freed_size";
|
||||
/**
|
||||
* In a snapshot Bundle, this key reports the number of private dirty pages
|
||||
* used by everything else.
|
||||
*/
|
||||
public static final String METRIC_KEY_OTHER_PRIVATE_DIRTY = "other_private_dirty";
|
||||
/**
|
||||
* In a snapshot Bundle, this key reports the proportional set size for
|
||||
* everything else.
|
||||
*/
|
||||
public static final String METRIC_KEY_OTHER_PSS = "other_pss";
|
||||
/**
|
||||
* In a snapshot Bundle, this key reports the number of shared dirty pages
|
||||
* used by everything else.
|
||||
*/
|
||||
public static final String METRIC_KEY_OTHER_SHARED_DIRTY = "other_shared_dirty";
|
||||
|
||||
private PerformanceResultsWriter mPerfWriter;
|
||||
private Bundle mPerfSnapshot;
|
||||
private Bundle mPerfMeasurement;
|
||||
private long mSnapshotCpuTime;
|
||||
private long mSnapshotExecTime;
|
||||
private long mCpuTime;
|
||||
private long mExecTime;
|
||||
|
||||
public PerformanceCollector() {
|
||||
}
|
||||
|
||||
public PerformanceCollector(PerformanceResultsWriter writer) {
|
||||
setPerformanceResultsWriter(writer);
|
||||
}
|
||||
|
||||
public void setPerformanceResultsWriter(PerformanceResultsWriter writer) {
|
||||
mPerfWriter = writer;
|
||||
}
|
||||
|
||||
/**
|
||||
* Begin collection of memory usage information.
|
||||
*
|
||||
* @param label description of code block between beginSnapshot and
|
||||
* endSnapshot, used to label output
|
||||
*/
|
||||
public void beginSnapshot(String label) {
|
||||
if (mPerfWriter != null)
|
||||
mPerfWriter.writeBeginSnapshot(label);
|
||||
startPerformanceSnapshot();
|
||||
}
|
||||
|
||||
/**
|
||||
* End collection of memory usage information. Returns collected data in a
|
||||
* Bundle object.
|
||||
*
|
||||
* @return Memory and runtime metrics stored as key/value pairs. Values are
|
||||
* of type long, and keys include:
|
||||
* <ul>
|
||||
* <li>{@link #METRIC_KEY_CPU_TIME cpu_time}
|
||||
* <li>{@link #METRIC_KEY_EXECUTION_TIME execution_time}
|
||||
* <li>{@link #METRIC_KEY_PRE_RECEIVED_TRANSACTIONS
|
||||
* pre_received_transactions}
|
||||
* <li>{@link #METRIC_KEY_PRE_SENT_TRANSACTIONS
|
||||
* pre_sent_transactions}
|
||||
* <li>{@link #METRIC_KEY_RECEIVED_TRANSACTIONS
|
||||
* received_transactions}
|
||||
* <li>{@link #METRIC_KEY_SENT_TRANSACTIONS sent_transactions}
|
||||
* <li>{@link #METRIC_KEY_GC_INVOCATION_COUNT gc_invocation_count}
|
||||
* <li>{@link #METRIC_KEY_JAVA_ALLOCATED java_allocated}
|
||||
* <li>{@link #METRIC_KEY_JAVA_FREE java_free}
|
||||
* <li>{@link #METRIC_KEY_JAVA_PRIVATE_DIRTY java_private_dirty}
|
||||
* <li>{@link #METRIC_KEY_JAVA_PSS java_pss}
|
||||
* <li>{@link #METRIC_KEY_JAVA_SHARED_DIRTY java_shared_dirty}
|
||||
* <li>{@link #METRIC_KEY_JAVA_SIZE java_size}
|
||||
* <li>{@link #METRIC_KEY_NATIVE_ALLOCATED native_allocated}
|
||||
* <li>{@link #METRIC_KEY_NATIVE_FREE native_free}
|
||||
* <li>{@link #METRIC_KEY_NATIVE_PRIVATE_DIRTY native_private_dirty}
|
||||
* <li>{@link #METRIC_KEY_NATIVE_PSS native_pss}
|
||||
* <li>{@link #METRIC_KEY_NATIVE_SHARED_DIRTY native_shared_dirty}
|
||||
* <li>{@link #METRIC_KEY_NATIVE_SIZE native_size}
|
||||
* <li>{@link #METRIC_KEY_GLOBAL_ALLOC_COUNT global_alloc_count}
|
||||
* <li>{@link #METRIC_KEY_GLOBAL_ALLOC_SIZE global_alloc_size}
|
||||
* <li>{@link #METRIC_KEY_GLOBAL_FREED_COUNT global_freed_count}
|
||||
* <li>{@link #METRIC_KEY_GLOBAL_FREED_SIZE global_freed_size}
|
||||
* <li>{@link #METRIC_KEY_OTHER_PRIVATE_DIRTY other_private_dirty}
|
||||
* <li>{@link #METRIC_KEY_OTHER_PSS other_pss}
|
||||
* <li>{@link #METRIC_KEY_OTHER_SHARED_DIRTY other_shared_dirty}
|
||||
* </ul>
|
||||
*/
|
||||
public Bundle endSnapshot() {
|
||||
endPerformanceSnapshot();
|
||||
if (mPerfWriter != null)
|
||||
mPerfWriter.writeEndSnapshot(mPerfSnapshot);
|
||||
return mPerfSnapshot;
|
||||
}
|
||||
|
||||
/**
|
||||
* Start measurement of user and cpu time.
|
||||
*
|
||||
* @param label description of code block between startTiming and
|
||||
* stopTiming, used to label output
|
||||
*/
|
||||
public void startTiming(String label) {
|
||||
if (mPerfWriter != null)
|
||||
mPerfWriter.writeStartTiming(label);
|
||||
mPerfMeasurement = new Bundle();
|
||||
mPerfMeasurement.putParcelableArrayList(
|
||||
METRIC_KEY_ITERATIONS, new ArrayList<Parcelable>());
|
||||
mExecTime = SystemClock.uptimeMillis();
|
||||
mCpuTime = Process.getElapsedCpuTime();
|
||||
}
|
||||
|
||||
/**
|
||||
* Add a measured segment, and start measuring the next segment. Returns
|
||||
* collected data in a Bundle object.
|
||||
*
|
||||
* @param label description of code block between startTiming and
|
||||
* addIteration, and between two calls to addIteration, used
|
||||
* to label output
|
||||
* @return Runtime metrics stored as key/value pairs. Values are of type
|
||||
* long, and keys include:
|
||||
* <ul>
|
||||
* <li>{@link #METRIC_KEY_LABEL label}
|
||||
* <li>{@link #METRIC_KEY_CPU_TIME cpu_time}
|
||||
* <li>{@link #METRIC_KEY_EXECUTION_TIME execution_time}
|
||||
* </ul>
|
||||
*/
|
||||
public Bundle addIteration(String label) {
|
||||
mCpuTime = Process.getElapsedCpuTime() - mCpuTime;
|
||||
mExecTime = SystemClock.uptimeMillis() - mExecTime;
|
||||
|
||||
Bundle iteration = new Bundle();
|
||||
iteration.putString(METRIC_KEY_LABEL, label);
|
||||
iteration.putLong(METRIC_KEY_EXECUTION_TIME, mExecTime);
|
||||
iteration.putLong(METRIC_KEY_CPU_TIME, mCpuTime);
|
||||
mPerfMeasurement.getParcelableArrayList(METRIC_KEY_ITERATIONS).add(iteration);
|
||||
|
||||
mExecTime = SystemClock.uptimeMillis();
|
||||
mCpuTime = Process.getElapsedCpuTime();
|
||||
return iteration;
|
||||
}
|
||||
|
||||
/**
|
||||
* Stop measurement of user and cpu time.
|
||||
*
|
||||
* @param label description of code block between addIteration or
|
||||
* startTiming and stopTiming, used to label output
|
||||
* @return Runtime metrics stored in a bundle, including all iterations
|
||||
* between calls to startTiming and stopTiming. List of iterations
|
||||
* is keyed by {@link #METRIC_KEY_ITERATIONS iterations}.
|
||||
*/
|
||||
public Bundle stopTiming(String label) {
|
||||
addIteration(label);
|
||||
if (mPerfWriter != null)
|
||||
mPerfWriter.writeStopTiming(mPerfMeasurement);
|
||||
return mPerfMeasurement;
|
||||
}
|
||||
|
||||
/*
|
||||
* Starts tracking memory usage, binder transactions, and real & cpu timing.
|
||||
*/
|
||||
private void startPerformanceSnapshot() {
|
||||
// Create new snapshot
|
||||
mPerfSnapshot = new Bundle();
|
||||
|
||||
// Add initial binder counts
|
||||
Bundle binderCounts = getBinderCounts();
|
||||
for (String key : binderCounts.keySet()) {
|
||||
mPerfSnapshot.putLong("pre_" + key, binderCounts.getLong(key));
|
||||
}
|
||||
|
||||
// Force a GC and zero out the performance counters. Do this
|
||||
// before reading initial CPU/wall-clock times so we don't include
|
||||
// the cost of this setup in our final metrics.
|
||||
startAllocCounting();
|
||||
|
||||
// Record CPU time up to this point, and start timing. Note: this
|
||||
// must happen at the end of this method, otherwise the timing will
|
||||
// include noise.
|
||||
mSnapshotExecTime = SystemClock.uptimeMillis();
|
||||
mSnapshotCpuTime = Process.getElapsedCpuTime();
|
||||
}
|
||||
|
||||
/*
|
||||
* Stops tracking memory usage, binder transactions, and real & cpu timing.
|
||||
* Stores collected data as type long into Bundle object for reporting.
|
||||
*/
|
||||
private void endPerformanceSnapshot() {
|
||||
// Stop the timing. This must be done first before any other counting is
|
||||
// stopped.
|
||||
mSnapshotCpuTime = Process.getElapsedCpuTime() - mSnapshotCpuTime;
|
||||
mSnapshotExecTime = SystemClock.uptimeMillis() - mSnapshotExecTime;
|
||||
|
||||
stopAllocCounting();
|
||||
|
||||
long nativeMax = Debug.getNativeHeapSize() / 1024;
|
||||
long nativeAllocated = Debug.getNativeHeapAllocatedSize() / 1024;
|
||||
long nativeFree = Debug.getNativeHeapFreeSize() / 1024;
|
||||
|
||||
Debug.MemoryInfo memInfo = new Debug.MemoryInfo();
|
||||
Debug.getMemoryInfo(memInfo);
|
||||
|
||||
Runtime runtime = Runtime.getRuntime();
|
||||
|
||||
long dalvikMax = runtime.totalMemory() / 1024;
|
||||
long dalvikFree = runtime.freeMemory() / 1024;
|
||||
long dalvikAllocated = dalvikMax - dalvikFree;
|
||||
|
||||
// Add final binder counts
|
||||
Bundle binderCounts = getBinderCounts();
|
||||
for (String key : binderCounts.keySet()) {
|
||||
mPerfSnapshot.putLong(key, binderCounts.getLong(key));
|
||||
}
|
||||
|
||||
// Add alloc counts
|
||||
Bundle allocCounts = getAllocCounts();
|
||||
for (String key : allocCounts.keySet()) {
|
||||
mPerfSnapshot.putLong(key, allocCounts.getLong(key));
|
||||
}
|
||||
|
||||
mPerfSnapshot.putLong(METRIC_KEY_EXECUTION_TIME, mSnapshotExecTime);
|
||||
mPerfSnapshot.putLong(METRIC_KEY_CPU_TIME, mSnapshotCpuTime);
|
||||
|
||||
mPerfSnapshot.putLong(METRIC_KEY_NATIVE_SIZE, nativeMax);
|
||||
mPerfSnapshot.putLong(METRIC_KEY_NATIVE_ALLOCATED, nativeAllocated);
|
||||
mPerfSnapshot.putLong(METRIC_KEY_NATIVE_FREE, nativeFree);
|
||||
mPerfSnapshot.putLong(METRIC_KEY_NATIVE_PSS, memInfo.nativePss);
|
||||
mPerfSnapshot.putLong(METRIC_KEY_NATIVE_PRIVATE_DIRTY, memInfo.nativePrivateDirty);
|
||||
mPerfSnapshot.putLong(METRIC_KEY_NATIVE_SHARED_DIRTY, memInfo.nativeSharedDirty);
|
||||
|
||||
mPerfSnapshot.putLong(METRIC_KEY_JAVA_SIZE, dalvikMax);
|
||||
mPerfSnapshot.putLong(METRIC_KEY_JAVA_ALLOCATED, dalvikAllocated);
|
||||
mPerfSnapshot.putLong(METRIC_KEY_JAVA_FREE, dalvikFree);
|
||||
mPerfSnapshot.putLong(METRIC_KEY_JAVA_PSS, memInfo.dalvikPss);
|
||||
mPerfSnapshot.putLong(METRIC_KEY_JAVA_PRIVATE_DIRTY, memInfo.dalvikPrivateDirty);
|
||||
mPerfSnapshot.putLong(METRIC_KEY_JAVA_SHARED_DIRTY, memInfo.dalvikSharedDirty);
|
||||
|
||||
mPerfSnapshot.putLong(METRIC_KEY_OTHER_PSS, memInfo.otherPss);
|
||||
mPerfSnapshot.putLong(METRIC_KEY_OTHER_PRIVATE_DIRTY, memInfo.otherPrivateDirty);
|
||||
mPerfSnapshot.putLong(METRIC_KEY_OTHER_SHARED_DIRTY, memInfo.otherSharedDirty);
|
||||
}
|
||||
|
||||
/*
|
||||
* Starts allocation counting. This triggers a gc and resets the counts.
|
||||
*/
|
||||
private static void startAllocCounting() {
|
||||
// Before we start trigger a GC and reset the debug counts. Run the
|
||||
// finalizers and another GC before starting and stopping the alloc
|
||||
// counts. This will free up any objects that were just sitting around
|
||||
// waiting for their finalizers to be run.
|
||||
Runtime.getRuntime().gc();
|
||||
Runtime.getRuntime().runFinalization();
|
||||
Runtime.getRuntime().gc();
|
||||
|
||||
Debug.resetAllCounts();
|
||||
|
||||
// start the counts
|
||||
Debug.startAllocCounting();
|
||||
}
|
||||
|
||||
/*
|
||||
* Stops allocation counting.
|
||||
*/
|
||||
private static void stopAllocCounting() {
|
||||
Runtime.getRuntime().gc();
|
||||
Runtime.getRuntime().runFinalization();
|
||||
Runtime.getRuntime().gc();
|
||||
Debug.stopAllocCounting();
|
||||
}
|
||||
|
||||
/*
|
||||
* Returns a bundle with the current results from the allocation counting.
|
||||
*/
|
||||
private static Bundle getAllocCounts() {
|
||||
Bundle results = new Bundle();
|
||||
results.putLong(METRIC_KEY_GLOBAL_ALLOC_COUNT, Debug.getGlobalAllocCount());
|
||||
results.putLong(METRIC_KEY_GLOBAL_ALLOC_SIZE, Debug.getGlobalAllocSize());
|
||||
results.putLong(METRIC_KEY_GLOBAL_FREED_COUNT, Debug.getGlobalFreedCount());
|
||||
results.putLong(METRIC_KEY_GLOBAL_FREED_SIZE, Debug.getGlobalFreedSize());
|
||||
results.putLong(METRIC_KEY_GC_INVOCATION_COUNT, Debug.getGlobalGcInvocationCount());
|
||||
return results;
|
||||
}
|
||||
|
||||
/*
|
||||
* Returns a bundle with the counts for various binder counts for this
|
||||
* process. Currently the only two that are reported are the number of send
|
||||
* and the number of received transactions.
|
||||
*/
|
||||
private static Bundle getBinderCounts() {
|
||||
Bundle results = new Bundle();
|
||||
results.putLong(METRIC_KEY_SENT_TRANSACTIONS, Debug.getBinderSentTransactions());
|
||||
results.putLong(METRIC_KEY_RECEIVED_TRANSACTIONS, Debug.getBinderReceivedTransactions());
|
||||
return results;
|
||||
}
|
||||
}
|
32
core/java/android/test/TimedTest.java
Normal file
32
core/java/android/test/TimedTest.java
Normal file
@ -0,0 +1,32 @@
|
||||
/*
|
||||
* Copyright (C) 2009 The Android Open Source Project
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
|
||||
package android.test;
|
||||
|
||||
import java.lang.annotation.Retention;
|
||||
import java.lang.annotation.RetentionPolicy;
|
||||
|
||||
/**
|
||||
* This annotation can be used on an {@link junit.framework.TestCase}'s test
|
||||
* methods. When the annotation is present, the test method is timed and the
|
||||
* results written through instrumentation output. It can also be used on the
|
||||
* class itself, which is equivalent to tagging all test methods with this
|
||||
* annotation.
|
||||
*
|
||||
* {@hide} Pending approval for public API.
|
||||
*/
|
||||
@Retention(RetentionPolicy.RUNTIME)
|
||||
public @interface TimedTest { }
|
@ -18,6 +18,8 @@ package android.test;
|
||||
|
||||
import android.app.Instrumentation;
|
||||
import android.content.Context;
|
||||
import android.os.PerformanceCollector.PerformanceResultsWriter;
|
||||
|
||||
import com.google.android.collect.Lists;
|
||||
import junit.framework.Test;
|
||||
import junit.framework.TestCase;
|
||||
@ -39,6 +41,7 @@ public class AndroidTestRunner extends BaseTestRunner {
|
||||
|
||||
private List<TestListener> mTestListeners = Lists.newArrayList();
|
||||
private Instrumentation mInstrumentation;
|
||||
private PerformanceResultsWriter mPerfWriter;
|
||||
|
||||
@SuppressWarnings("unchecked")
|
||||
public void setTestClassName(String testClassName, String testMethodName) {
|
||||
@ -162,6 +165,7 @@ public class AndroidTestRunner extends BaseTestRunner {
|
||||
for (TestCase testCase : mTestCases) {
|
||||
setContextIfAndroidTestCase(testCase, mContext, testContext);
|
||||
setInstrumentationIfInstrumentationTestCase(testCase, mInstrumentation);
|
||||
setPerformanceWriterIfPerformanceTestCase(testCase, mPerfWriter);
|
||||
testCase.run(mTestResult);
|
||||
}
|
||||
}
|
||||
@ -184,6 +188,13 @@ public class AndroidTestRunner extends BaseTestRunner {
|
||||
}
|
||||
}
|
||||
|
||||
private void setPerformanceWriterIfPerformanceTestCase(
|
||||
Test test, PerformanceResultsWriter writer) {
|
||||
if (PerformanceTestBase.class.isAssignableFrom(test.getClass())) {
|
||||
((PerformanceTestBase) test).setPerformanceResultsWriter(writer);
|
||||
}
|
||||
}
|
||||
|
||||
public void setInstrumentation(Instrumentation instrumentation) {
|
||||
mInstrumentation = instrumentation;
|
||||
}
|
||||
@ -197,6 +208,13 @@ public class AndroidTestRunner extends BaseTestRunner {
|
||||
setInstrumentation(instrumentation);
|
||||
}
|
||||
|
||||
/**
|
||||
* {@hide} Pending approval for public API.
|
||||
*/
|
||||
public void setPerformanceResultsWriter(PerformanceResultsWriter writer) {
|
||||
mPerfWriter = writer;
|
||||
}
|
||||
|
||||
@Override
|
||||
protected Class loadSuiteClass(String suiteClassName) throws ClassNotFoundException {
|
||||
return mContext.getClassLoader().loadClass(suiteClassName);
|
||||
|
@ -17,17 +17,31 @@
|
||||
package android.test;
|
||||
|
||||
import static android.test.suitebuilder.TestPredicates.REJECT_PERFORMANCE;
|
||||
|
||||
import com.android.internal.util.Predicate;
|
||||
|
||||
import android.app.Activity;
|
||||
import android.app.Instrumentation;
|
||||
import android.os.Bundle;
|
||||
import android.os.Debug;
|
||||
import android.os.Looper;
|
||||
import android.os.Parcelable;
|
||||
import android.os.PerformanceCollector;
|
||||
import android.os.Process;
|
||||
import android.os.SystemClock;
|
||||
import android.os.PerformanceCollector.PerformanceResultsWriter;
|
||||
import android.test.suitebuilder.TestMethod;
|
||||
import android.test.suitebuilder.TestPredicates;
|
||||
import android.test.suitebuilder.TestSuiteBuilder;
|
||||
import android.util.Log;
|
||||
|
||||
import com.android.internal.util.Predicate;
|
||||
import java.io.ByteArrayOutputStream;
|
||||
import java.io.File;
|
||||
import java.io.PrintStream;
|
||||
import java.lang.reflect.InvocationTargetException;
|
||||
import java.lang.reflect.Method;
|
||||
import java.util.ArrayList;
|
||||
import java.util.List;
|
||||
|
||||
import junit.framework.AssertionFailedError;
|
||||
import junit.framework.Test;
|
||||
@ -38,22 +52,13 @@ import junit.framework.TestSuite;
|
||||
import junit.runner.BaseTestRunner;
|
||||
import junit.textui.ResultPrinter;
|
||||
|
||||
import java.io.ByteArrayOutputStream;
|
||||
import java.io.File;
|
||||
import java.io.PrintStream;
|
||||
import java.lang.reflect.InvocationTargetException;
|
||||
import java.lang.reflect.Method;
|
||||
import java.util.ArrayList;
|
||||
import java.util.List;
|
||||
|
||||
|
||||
/**
|
||||
* An {@link Instrumentation} that runs various types of {@link junit.framework.TestCase}s against
|
||||
* an Android package (application). Typical usage:
|
||||
* <ol>
|
||||
* <li>Write {@link junit.framework.TestCase}s that perform unit, functional, or performance tests
|
||||
* against the classes in your package. Typically these are subclassed from:
|
||||
* <ul><li>{@link android.test.ActivityInstrumentationTestCase}</li>
|
||||
* <ul><li>{@link android.test.ActivityInstrumentationTestCase2}</li>
|
||||
* <li>{@link android.test.ActivityUnitTestCase}</li>
|
||||
* <li>{@link android.test.AndroidTestCase}</li>
|
||||
* <li>{@link android.test.ApplicationTestCase}</li>
|
||||
@ -111,13 +116,13 @@ import java.util.List;
|
||||
* <p/>
|
||||
* <b>To run in 'log only' mode</b>
|
||||
* -e log true
|
||||
* This option will load and iterate through all test classes and methods, but will bypass actual
|
||||
* test execution. Useful for quickly obtaining info on the tests to be executed by an
|
||||
* This option will load and iterate through all test classes and methods, but will bypass actual
|
||||
* test execution. Useful for quickly obtaining info on the tests to be executed by an
|
||||
* instrumentation command.
|
||||
* <p/>
|
||||
* <b>To generate EMMA code coverage:</b>
|
||||
* -e coverage true
|
||||
* Note: this requires an emma instrumented build. By default, the code coverage results file
|
||||
* Note: this requires an emma instrumented build. By default, the code coverage results file
|
||||
* will be saved in a /data/<app>/coverage.ec file, unless overridden by coverageFile flag (see
|
||||
* below)
|
||||
* <p/>
|
||||
@ -129,11 +134,10 @@ import java.util.List;
|
||||
|
||||
/* (not JavaDoc)
|
||||
* Although not necessary in most case, another way to use this class is to extend it and have the
|
||||
* derived class return
|
||||
* the desired test suite from the {@link #getTestSuite()} method. The test suite returned from this
|
||||
* method will be used if no target class is defined in the meta-data or command line argument
|
||||
* parameters. If a derived class is used it needs to be added as an instrumentation to the
|
||||
* AndroidManifest.xml and the command to run it would look like:
|
||||
* derived class return the desired test suite from the {@link #getTestSuite()} method. The test
|
||||
* suite returned from this method will be used if no target class is defined in the meta-data or
|
||||
* command line argument parameters. If a derived class is used it needs to be added as an
|
||||
* instrumentation to the AndroidManifest.xml and the command to run it would look like:
|
||||
* <p/>
|
||||
* adb shell am instrument -w com.android.foo/<i>com.android.FooInstrumentationTestRunner</i>
|
||||
* <p/>
|
||||
@ -155,66 +159,65 @@ public class InstrumentationTestRunner extends Instrumentation implements TestSu
|
||||
public static final String ARGUMENT_DELAY_MSEC = "delay_msec";
|
||||
|
||||
private static final String SMALL_SUITE = "small";
|
||||
private static final String MEDIUM_SUITE = "medium";
|
||||
private static final String MEDIUM_SUITE = "medium";
|
||||
private static final String LARGE_SUITE = "large";
|
||||
|
||||
|
||||
private static final String ARGUMENT_LOG_ONLY = "log";
|
||||
|
||||
|
||||
/**
|
||||
* This constant defines the maximum allowed runtime (in ms) for a test included in the "small" suite.
|
||||
* It is used to make an educated guess at what suite an unlabeled test belongs.
|
||||
* This constant defines the maximum allowed runtime (in ms) for a test included in the "small"
|
||||
* suite. It is used to make an educated guess at what suite an unlabeled test belongs.
|
||||
*/
|
||||
private static final float SMALL_SUITE_MAX_RUNTIME = 100;
|
||||
|
||||
|
||||
/**
|
||||
* This constant defines the maximum allowed runtime (in ms) for a test included in the "medium" suite.
|
||||
* It is used to make an educated guess at what suite an unlabeled test belongs.
|
||||
* This constant defines the maximum allowed runtime (in ms) for a test included in the
|
||||
* "medium" suite. It is used to make an educated guess at what suite an unlabeled test belongs.
|
||||
*/
|
||||
private static final float MEDIUM_SUITE_MAX_RUNTIME = 1000;
|
||||
|
||||
|
||||
/**
|
||||
* The following keys are used in the status bundle to provide structured reports to
|
||||
* an IInstrumentationWatcher.
|
||||
* The following keys are used in the status bundle to provide structured reports to
|
||||
* an IInstrumentationWatcher.
|
||||
*/
|
||||
|
||||
/**
|
||||
* This value, if stored with key {@link android.app.Instrumentation#REPORT_KEY_IDENTIFIER},
|
||||
* This value, if stored with key {@link android.app.Instrumentation#REPORT_KEY_IDENTIFIER},
|
||||
* identifies InstrumentationTestRunner as the source of the report. This is sent with all
|
||||
* status messages.
|
||||
*/
|
||||
public static final String REPORT_VALUE_ID = "InstrumentationTestRunner";
|
||||
/**
|
||||
* If included in the status or final bundle sent to an IInstrumentationWatcher, this key
|
||||
* If included in the status or final bundle sent to an IInstrumentationWatcher, this key
|
||||
* identifies the total number of tests that are being run. This is sent with all status
|
||||
* messages.
|
||||
*/
|
||||
public static final String REPORT_KEY_NUM_TOTAL = "numtests";
|
||||
/**
|
||||
* If included in the status or final bundle sent to an IInstrumentationWatcher, this key
|
||||
* If included in the status or final bundle sent to an IInstrumentationWatcher, this key
|
||||
* identifies the sequence number of the current test. This is sent with any status message
|
||||
* describing a specific test being started or completed.
|
||||
*/
|
||||
public static final String REPORT_KEY_NUM_CURRENT = "current";
|
||||
/**
|
||||
* If included in the status or final bundle sent to an IInstrumentationWatcher, this key
|
||||
* If included in the status or final bundle sent to an IInstrumentationWatcher, this key
|
||||
* identifies the name of the current test class. This is sent with any status message
|
||||
* describing a specific test being started or completed.
|
||||
*/
|
||||
public static final String REPORT_KEY_NAME_CLASS = "class";
|
||||
/**
|
||||
* If included in the status or final bundle sent to an IInstrumentationWatcher, this key
|
||||
* If included in the status or final bundle sent to an IInstrumentationWatcher, this key
|
||||
* identifies the name of the current test. This is sent with any status message
|
||||
* describing a specific test being started or completed.
|
||||
*/
|
||||
public static final String REPORT_KEY_NAME_TEST = "test";
|
||||
/**
|
||||
* If included in the status or final bundle sent to an IInstrumentationWatcher, this key
|
||||
* If included in the status or final bundle sent to an IInstrumentationWatcher, this key
|
||||
* reports the run time in seconds of the current test.
|
||||
*/
|
||||
private static final String REPORT_KEY_RUN_TIME = "runtime";
|
||||
/**
|
||||
* If included in the status or final bundle sent to an IInstrumentationWatcher, this key
|
||||
* If included in the status or final bundle sent to an IInstrumentationWatcher, this key
|
||||
* reports the guessed suite assignment for the current test.
|
||||
*/
|
||||
private static final String REPORT_KEY_SUITE_ASSIGNMENT = "suiteassignment";
|
||||
@ -223,6 +226,19 @@ public class InstrumentationTestRunner extends Instrumentation implements TestSu
|
||||
* identifies the path to the generated code coverage file.
|
||||
*/
|
||||
private static final String REPORT_KEY_COVERAGE_PATH = "coverageFilePath";
|
||||
/**
|
||||
* If included in the status or final bundle sent to an IInstrumentationWatcher, this key
|
||||
* reports the cpu time in milliseconds of the current test.
|
||||
*/
|
||||
private static final String REPORT_KEY_PERF_CPU_TIME =
|
||||
"performance." + PerformanceCollector.METRIC_KEY_CPU_TIME;
|
||||
/**
|
||||
* If included in the status or final bundle sent to an IInstrumentationWatcher, this key
|
||||
* reports the run time in milliseconds of the current test.
|
||||
*/
|
||||
private static final String REPORT_KEY_PERF_EXECUTION_TIME =
|
||||
"performance." + PerformanceCollector.METRIC_KEY_EXECUTION_TIME;
|
||||
|
||||
/**
|
||||
* The test is starting.
|
||||
*/
|
||||
@ -240,15 +256,15 @@ public class InstrumentationTestRunner extends Instrumentation implements TestSu
|
||||
*/
|
||||
public static final int REPORT_VALUE_RESULT_FAILURE = -2;
|
||||
/**
|
||||
* If included in the status bundle sent to an IInstrumentationWatcher, this key
|
||||
* identifies a stack trace describing an error or failure. This is sent with any status
|
||||
* If included in the status bundle sent to an IInstrumentationWatcher, this key
|
||||
* identifies a stack trace describing an error or failure. This is sent with any status
|
||||
* message describing a specific test being completed.
|
||||
*/
|
||||
public static final String REPORT_KEY_STACK = "stack";
|
||||
|
||||
// Default file name for code coverage
|
||||
private static final String DEFAULT_COVERAGE_FILE_NAME = "coverage.ec";
|
||||
|
||||
|
||||
private static final String LOG_TAG = "InstrumentationTestRunner";
|
||||
|
||||
private final Bundle mResults = new Bundle();
|
||||
@ -316,7 +332,7 @@ public class InstrumentationTestRunner extends Instrumentation implements TestSu
|
||||
if (testSuite != null) {
|
||||
testSuiteBuilder.addTestSuite(testSuite);
|
||||
} else {
|
||||
// no package or class bundle arguments were supplied, and no test suite
|
||||
// no package or class bundle arguments were supplied, and no test suite
|
||||
// provided so add all tests in application
|
||||
testSuiteBuilder.includePackages("");
|
||||
}
|
||||
@ -324,7 +340,7 @@ public class InstrumentationTestRunner extends Instrumentation implements TestSu
|
||||
} else {
|
||||
parseTestClasses(testClassesArg, testSuiteBuilder);
|
||||
}
|
||||
|
||||
|
||||
testSuiteBuilder.addRequirements(getBuilderRequirements());
|
||||
|
||||
mTestRunner = getAndroidTestRunner();
|
||||
@ -336,8 +352,10 @@ public class InstrumentationTestRunner extends Instrumentation implements TestSu
|
||||
if (mSuiteAssignmentMode) {
|
||||
mTestRunner.addTestListener(new SuiteAssignmentPrinter());
|
||||
} else {
|
||||
WatcherResultPrinter resultPrinter = new WatcherResultPrinter(mTestCount);
|
||||
mTestRunner.addTestListener(new TestPrinter("TestRunner", false));
|
||||
mTestRunner.addTestListener(new WatcherResultPrinter(mTestCount));
|
||||
mTestRunner.addTestListener(resultPrinter);
|
||||
mTestRunner.setPerformanceResultsWriter(resultPrinter);
|
||||
}
|
||||
start();
|
||||
}
|
||||
@ -347,7 +365,8 @@ public class InstrumentationTestRunner extends Instrumentation implements TestSu
|
||||
}
|
||||
|
||||
/**
|
||||
* Parses and loads the specified set of test classes
|
||||
* Parses and loads the specified set of test classes
|
||||
*
|
||||
* @param testClassArg - comma-separated list of test classes and methods
|
||||
* @param testSuiteBuilder - builder to add tests to
|
||||
*/
|
||||
@ -360,8 +379,9 @@ public class InstrumentationTestRunner extends Instrumentation implements TestSu
|
||||
|
||||
/**
|
||||
* Parse and load the given test class and, optionally, method
|
||||
* @param testClassName - full package name of test class and optionally method to add. Expected
|
||||
* format: com.android.TestClass#testMethod
|
||||
*
|
||||
* @param testClassName - full package name of test class and optionally method to add.
|
||||
* Expected format: com.android.TestClass#testMethod
|
||||
* @param testSuiteBuilder - builder to add tests to
|
||||
*/
|
||||
private void parseTestClass(String testClassName, TestSuiteBuilder testSuiteBuilder) {
|
||||
@ -372,8 +392,7 @@ public class InstrumentationTestRunner extends Instrumentation implements TestSu
|
||||
testMethodName = testClassName.substring(methodSeparatorIndex + 1);
|
||||
testClassName = testClassName.substring(0, methodSeparatorIndex);
|
||||
}
|
||||
testSuiteBuilder.addTestClassByName(testClassName, testMethodName,
|
||||
getTargetContext());
|
||||
testSuiteBuilder.addTestClassByName(testClassName, testMethodName, getTargetContext());
|
||||
}
|
||||
|
||||
protected AndroidTestRunner getAndroidTestRunner() {
|
||||
@ -384,12 +403,12 @@ public class InstrumentationTestRunner extends Instrumentation implements TestSu
|
||||
String tagString = arguments.getString(tag);
|
||||
return tagString != null && Boolean.parseBoolean(tagString);
|
||||
}
|
||||
|
||||
|
||||
/*
|
||||
* Returns the size predicate object, corresponding to the "size" argument value.
|
||||
*/
|
||||
private Predicate<TestMethod> getSizePredicateFromArg(String sizeArg) {
|
||||
|
||||
|
||||
if (SMALL_SUITE.equals(sizeArg)) {
|
||||
return TestPredicates.SELECT_SMALL;
|
||||
} else if (MEDIUM_SUITE.equals(sizeArg)) {
|
||||
@ -400,11 +419,11 @@ public class InstrumentationTestRunner extends Instrumentation implements TestSu
|
||||
return null;
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
@Override
|
||||
public void onStart() {
|
||||
Looper.prepare();
|
||||
|
||||
|
||||
if (mJustCount) {
|
||||
mResults.putString(Instrumentation.REPORT_KEY_IDENTIFIER, REPORT_VALUE_ID);
|
||||
mResults.putInt(REPORT_KEY_NUM_TOTAL, mTestCount);
|
||||
@ -413,30 +432,30 @@ public class InstrumentationTestRunner extends Instrumentation implements TestSu
|
||||
if (mDebug) {
|
||||
Debug.waitForDebugger();
|
||||
}
|
||||
|
||||
|
||||
ByteArrayOutputStream byteArrayOutputStream = new ByteArrayOutputStream();
|
||||
PrintStream writer = new PrintStream(byteArrayOutputStream);
|
||||
try {
|
||||
StringResultPrinter resultPrinter = new StringResultPrinter(writer);
|
||||
|
||||
|
||||
mTestRunner.addTestListener(resultPrinter);
|
||||
|
||||
|
||||
long startTime = System.currentTimeMillis();
|
||||
mTestRunner.runTest();
|
||||
long runTime = System.currentTimeMillis() - startTime;
|
||||
|
||||
|
||||
resultPrinter.print(mTestRunner.getTestResult(), runTime);
|
||||
} finally {
|
||||
mResults.putString(Instrumentation.REPORT_KEY_STREAMRESULT,
|
||||
String.format("\nTest results for %s=%s",
|
||||
mTestRunner.getTestClassName(),
|
||||
mResults.putString(Instrumentation.REPORT_KEY_STREAMRESULT,
|
||||
String.format("\nTest results for %s=%s",
|
||||
mTestRunner.getTestClassName(),
|
||||
byteArrayOutputStream.toString()));
|
||||
|
||||
if (mCoverage) {
|
||||
generateCoverageReport();
|
||||
}
|
||||
writer.close();
|
||||
|
||||
|
||||
finish(Activity.RESULT_OK, mResults);
|
||||
}
|
||||
}
|
||||
@ -459,7 +478,7 @@ public class InstrumentationTestRunner extends Instrumentation implements TestSu
|
||||
public ClassLoader getLoader() {
|
||||
return null;
|
||||
}
|
||||
|
||||
|
||||
private void generateCoverageReport() {
|
||||
// use reflection to call emma dump coverage method, to avoid
|
||||
// always statically compiling against emma jar
|
||||
@ -467,9 +486,9 @@ public class InstrumentationTestRunner extends Instrumentation implements TestSu
|
||||
java.io.File coverageFile = new java.io.File(coverageFilePath);
|
||||
try {
|
||||
Class emmaRTClass = Class.forName("com.vladium.emma.rt.RT");
|
||||
Method dumpCoverageMethod = emmaRTClass.getMethod("dumpCoverageData",
|
||||
Method dumpCoverageMethod = emmaRTClass.getMethod("dumpCoverageData",
|
||||
coverageFile.getClass(), boolean.class, boolean.class);
|
||||
|
||||
|
||||
dumpCoverageMethod.invoke(null, coverageFile, false, false);
|
||||
// output path to generated coverage file so it can be parsed by a test harness if
|
||||
// needed
|
||||
@ -495,15 +514,14 @@ public class InstrumentationTestRunner extends Instrumentation implements TestSu
|
||||
private String getCoverageFilePath() {
|
||||
if (mCoverageFilePath == null) {
|
||||
return getTargetContext().getFilesDir().getAbsolutePath() + File.separator +
|
||||
DEFAULT_COVERAGE_FILE_NAME;
|
||||
}
|
||||
else {
|
||||
DEFAULT_COVERAGE_FILE_NAME;
|
||||
} else {
|
||||
return mCoverageFilePath;
|
||||
}
|
||||
}
|
||||
|
||||
private void reportEmmaError(Exception e) {
|
||||
reportEmmaError("", e);
|
||||
reportEmmaError("", e);
|
||||
}
|
||||
|
||||
private void reportEmmaError(String hint, Exception e) {
|
||||
@ -524,30 +542,29 @@ public class InstrumentationTestRunner extends Instrumentation implements TestSu
|
||||
printFooter(result);
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
/**
|
||||
* This class sends status reports back to the IInstrumentationWatcher about
|
||||
* This class sends status reports back to the IInstrumentationWatcher about
|
||||
* which suite each test belongs.
|
||||
*/
|
||||
private class SuiteAssignmentPrinter implements TestListener
|
||||
{
|
||||
|
||||
private class SuiteAssignmentPrinter implements TestListener {
|
||||
|
||||
private Bundle mTestResult;
|
||||
private long mStartTime;
|
||||
private long mEndTime;
|
||||
private boolean mTimingValid;
|
||||
|
||||
|
||||
public SuiteAssignmentPrinter() {
|
||||
}
|
||||
|
||||
|
||||
/**
|
||||
* send a status for the start of a each test, so long tests can be seen as "running"
|
||||
*/
|
||||
public void startTest(Test test) {
|
||||
mTimingValid = true;
|
||||
mStartTime = System.currentTimeMillis();
|
||||
mStartTime = System.currentTimeMillis();
|
||||
}
|
||||
|
||||
|
||||
/**
|
||||
* @see junit.framework.TestListener#addError(Test, Throwable)
|
||||
*/
|
||||
@ -576,7 +593,7 @@ public class InstrumentationTestRunner extends Instrumentation implements TestSu
|
||||
runTime = -1;
|
||||
} else {
|
||||
runTime = mEndTime - mStartTime;
|
||||
if (runTime < SMALL_SUITE_MAX_RUNTIME
|
||||
if (runTime < SMALL_SUITE_MAX_RUNTIME
|
||||
&& !InstrumentationTestCase.class.isAssignableFrom(test.getClass())) {
|
||||
assignmentSuite = SMALL_SUITE;
|
||||
} else if (runTime < MEDIUM_SUITE_MAX_RUNTIME) {
|
||||
@ -588,8 +605,8 @@ public class InstrumentationTestRunner extends Instrumentation implements TestSu
|
||||
// Clear mStartTime so that we can verify that it gets set next time.
|
||||
mStartTime = -1;
|
||||
|
||||
mTestResult.putString(Instrumentation.REPORT_KEY_STREAMRESULT,
|
||||
test.getClass().getName() + "#" + ((TestCase) test).getName()
|
||||
mTestResult.putString(Instrumentation.REPORT_KEY_STREAMRESULT,
|
||||
test.getClass().getName() + "#" + ((TestCase) test).getName()
|
||||
+ "\nin " + assignmentSuite + " suite\nrunTime: "
|
||||
+ String.valueOf(runTime) + "\n");
|
||||
mTestResult.putFloat(REPORT_KEY_RUN_TIME, runTime);
|
||||
@ -598,36 +615,40 @@ public class InstrumentationTestRunner extends Instrumentation implements TestSu
|
||||
sendStatus(0, mTestResult);
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
/**
|
||||
* This class sends status reports back to the IInstrumentationWatcher
|
||||
*/
|
||||
private class WatcherResultPrinter implements TestListener
|
||||
{
|
||||
private class WatcherResultPrinter implements TestListener, PerformanceResultsWriter {
|
||||
private final Bundle mResultTemplate;
|
||||
Bundle mTestResult;
|
||||
int mTestNum = 0;
|
||||
int mTestResultCode = 0;
|
||||
String mTestClass = null;
|
||||
|
||||
boolean mIsTimedTest = false;
|
||||
long mCpuTime = 0;
|
||||
long mExecTime = 0;
|
||||
|
||||
public WatcherResultPrinter(int numTests) {
|
||||
mResultTemplate = new Bundle();
|
||||
mResultTemplate.putString(Instrumentation.REPORT_KEY_IDENTIFIER, REPORT_VALUE_ID);
|
||||
mResultTemplate.putInt(REPORT_KEY_NUM_TOTAL, numTests);
|
||||
}
|
||||
|
||||
|
||||
/**
|
||||
* send a status for the start of a each test, so long tests can be seen as "running"
|
||||
* send a status for the start of a each test, so long tests can be seen
|
||||
* as "running"
|
||||
*/
|
||||
public void startTest(Test test) {
|
||||
String testClass = test.getClass().getName();
|
||||
String testName = ((TestCase)test).getName();
|
||||
mTestResult = new Bundle(mResultTemplate);
|
||||
mTestResult.putString(REPORT_KEY_NAME_CLASS, testClass);
|
||||
mTestResult.putString(REPORT_KEY_NAME_TEST, ((TestCase) test).getName());
|
||||
mTestResult.putString(REPORT_KEY_NAME_TEST, testName);
|
||||
mTestResult.putInt(REPORT_KEY_NUM_CURRENT, ++mTestNum);
|
||||
// pretty printing
|
||||
if (testClass != null && !testClass.equals(mTestClass)) {
|
||||
mTestResult.putString(Instrumentation.REPORT_KEY_STREAMRESULT,
|
||||
mTestResult.putString(Instrumentation.REPORT_KEY_STREAMRESULT,
|
||||
String.format("\n%s:", testClass));
|
||||
mTestClass = testClass;
|
||||
} else {
|
||||
@ -635,9 +656,9 @@ public class InstrumentationTestRunner extends Instrumentation implements TestSu
|
||||
}
|
||||
|
||||
// The delay_msec parameter is normally used to provide buffers of idle time
|
||||
// for power measurement purposes. To make sure there is a delay before and after
|
||||
// for power measurement purposes. To make sure there is a delay before and after
|
||||
// every test in a suite, we delay *after* every test (see endTest below) and also
|
||||
// delay *before* the first test. So, delay test1 delay test2 delay.
|
||||
// delay *before* the first test. So, delay test1 delay test2 delay.
|
||||
|
||||
try {
|
||||
if (mTestNum == 1) Thread.sleep(mDelayMsec);
|
||||
@ -647,8 +668,25 @@ public class InstrumentationTestRunner extends Instrumentation implements TestSu
|
||||
|
||||
sendStatus(REPORT_VALUE_RESULT_START, mTestResult);
|
||||
mTestResultCode = 0;
|
||||
|
||||
mIsTimedTest = false;
|
||||
try {
|
||||
// Look for TimedTest annotation on both test class and test
|
||||
// method
|
||||
mIsTimedTest = test.getClass().isAnnotationPresent(TimedTest.class) ||
|
||||
test.getClass().getMethod(testName).isAnnotationPresent(TimedTest.class);
|
||||
} catch (SecurityException e) {
|
||||
throw new IllegalStateException(e);
|
||||
} catch (NoSuchMethodException e) {
|
||||
throw new IllegalStateException(e);
|
||||
}
|
||||
|
||||
if (mIsTimedTest) {
|
||||
mExecTime = SystemClock.uptimeMillis();
|
||||
mCpuTime = Process.getElapsedCpuTime();
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
/**
|
||||
* @see junit.framework.TestListener#addError(Test, Throwable)
|
||||
*/
|
||||
@ -656,9 +694,9 @@ public class InstrumentationTestRunner extends Instrumentation implements TestSu
|
||||
mTestResult.putString(REPORT_KEY_STACK, BaseTestRunner.getFilteredTrace(t));
|
||||
mTestResultCode = REPORT_VALUE_RESULT_ERROR;
|
||||
// pretty printing
|
||||
mTestResult.putString(Instrumentation.REPORT_KEY_STREAMRESULT,
|
||||
String.format("\nError in %s:\n%s",
|
||||
((TestCase) test).getName(), BaseTestRunner.getFilteredTrace(t)));
|
||||
mTestResult.putString(Instrumentation.REPORT_KEY_STREAMRESULT,
|
||||
String.format("\nError in %s:\n%s",
|
||||
((TestCase)test).getName(), BaseTestRunner.getFilteredTrace(t)));
|
||||
}
|
||||
|
||||
/**
|
||||
@ -668,28 +706,68 @@ public class InstrumentationTestRunner extends Instrumentation implements TestSu
|
||||
mTestResult.putString(REPORT_KEY_STACK, BaseTestRunner.getFilteredTrace(t));
|
||||
mTestResultCode = REPORT_VALUE_RESULT_FAILURE;
|
||||
// pretty printing
|
||||
mTestResult.putString(Instrumentation.REPORT_KEY_STREAMRESULT,
|
||||
String.format("\nFailure in %s:\n%s",
|
||||
((TestCase) test).getName(), BaseTestRunner.getFilteredTrace(t)));
|
||||
mTestResult.putString(Instrumentation.REPORT_KEY_STREAMRESULT,
|
||||
String.format("\nFailure in %s:\n%s",
|
||||
((TestCase)test).getName(), BaseTestRunner.getFilteredTrace(t)));
|
||||
}
|
||||
|
||||
/**
|
||||
* @see junit.framework.TestListener#endTest(Test)
|
||||
*/
|
||||
public void endTest(Test test) {
|
||||
if (mIsTimedTest) {
|
||||
mCpuTime = Process.getElapsedCpuTime() - mCpuTime;
|
||||
mExecTime = SystemClock.uptimeMillis() - mExecTime;
|
||||
mTestResult.putLong(REPORT_KEY_PERF_CPU_TIME, mCpuTime);
|
||||
mTestResult.putLong(REPORT_KEY_PERF_EXECUTION_TIME, mExecTime);
|
||||
}
|
||||
|
||||
if (mTestResultCode == 0) {
|
||||
mTestResult.putString(Instrumentation.REPORT_KEY_STREAMRESULT, ".");
|
||||
}
|
||||
sendStatus(mTestResultCode, mTestResult);
|
||||
|
||||
try { // Sleep after every test, if specified
|
||||
try { // Sleep after every test, if specified
|
||||
Thread.sleep(mDelayMsec);
|
||||
} catch (InterruptedException e) {
|
||||
throw new IllegalStateException(e);
|
||||
}
|
||||
}
|
||||
|
||||
public void writeBeginSnapshot(String label) {
|
||||
// Do nothing
|
||||
}
|
||||
|
||||
public void writeEndSnapshot(Bundle results) {
|
||||
// Copy all snapshot data fields as type long into mResults, which
|
||||
// is outputted via Instrumentation.finish
|
||||
for (String key : results.keySet()) {
|
||||
mResults.putLong(key, results.getLong(key));
|
||||
}
|
||||
}
|
||||
|
||||
public void writeStartTiming(String label) {
|
||||
// Do nothing
|
||||
}
|
||||
|
||||
public void writeStopTiming(Bundle results) {
|
||||
// Copy results into mTestResult by flattening list of iterations,
|
||||
// which is outputted via WatcherResultPrinter.endTest
|
||||
int i = 0;
|
||||
for (Parcelable p :
|
||||
results.getParcelableArrayList(PerformanceCollector.METRIC_KEY_ITERATIONS)) {
|
||||
Bundle iteration = (Bundle)p;
|
||||
String index = "performance.iteration" + i + ".";
|
||||
mTestResult.putString(index + PerformanceCollector.METRIC_KEY_LABEL,
|
||||
iteration.getString(PerformanceCollector.METRIC_KEY_LABEL));
|
||||
mTestResult.putLong(index + PerformanceCollector.METRIC_KEY_CPU_TIME,
|
||||
iteration.getLong(PerformanceCollector.METRIC_KEY_CPU_TIME));
|
||||
mTestResult.putLong(index + PerformanceCollector.METRIC_KEY_EXECUTION_TIME,
|
||||
iteration.getLong(PerformanceCollector.METRIC_KEY_EXECUTION_TIME));
|
||||
i++;
|
||||
}
|
||||
}
|
||||
|
||||
// TODO report the end of the cycle
|
||||
// TODO report runtime for each test
|
||||
}
|
||||
}
|
||||
|
@ -16,13 +16,95 @@
|
||||
|
||||
package android.test;
|
||||
|
||||
import android.test.PerformanceTestCase;
|
||||
import junit.framework.TestCase;
|
||||
import android.os.Bundle;
|
||||
import android.os.PerformanceCollector;
|
||||
import android.os.PerformanceCollector.PerformanceResultsWriter;
|
||||
|
||||
import java.lang.reflect.Method;
|
||||
|
||||
/**
|
||||
* {@hide} Not needed for SDK.
|
||||
* Provides hooks and wrappers to automatically and manually collect and report
|
||||
* performance data in tests.
|
||||
*
|
||||
* {@hide} Pending approval for public API.
|
||||
*/
|
||||
public abstract class PerformanceTestBase extends TestCase implements PerformanceTestCase {
|
||||
public class PerformanceTestBase extends InstrumentationTestCase implements PerformanceTestCase {
|
||||
|
||||
private static PerformanceCollector sPerfCollector = new PerformanceCollector();
|
||||
private static int sNumTestMethods = 0;
|
||||
private static int sNumTestMethodsLeft = 0;
|
||||
|
||||
// Count number of tests, used to emulate beforeClass and afterClass from JUnit4
|
||||
public PerformanceTestBase() {
|
||||
if (sNumTestMethods == 0) {
|
||||
Method methods[] = getClass().getMethods();
|
||||
for (Method m : methods) {
|
||||
if (m.getName().startsWith("test")) {
|
||||
sNumTestMethods ++;
|
||||
sNumTestMethodsLeft ++;
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
protected void setUp() throws Exception {
|
||||
super.setUp();
|
||||
// @beforeClass
|
||||
// Will skew timing measured by TestRunner, but not by PerformanceCollector
|
||||
if (sNumTestMethodsLeft == sNumTestMethods) {
|
||||
sPerfCollector.beginSnapshot(this.getClass().getName());
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
protected void tearDown() throws Exception {
|
||||
// @afterClass
|
||||
// Will skew timing measured by TestRunner, but not by PerformanceCollector
|
||||
if (--sNumTestMethodsLeft == 0) {
|
||||
sPerfCollector.endSnapshot();
|
||||
}
|
||||
super.tearDown();
|
||||
}
|
||||
|
||||
public void setPerformanceResultsWriter(PerformanceResultsWriter writer) {
|
||||
sPerfCollector.setPerformanceResultsWriter(writer);
|
||||
}
|
||||
|
||||
/**
|
||||
* @see PerformanceCollector#beginSnapshot(String)
|
||||
*/
|
||||
protected void beginSnapshot(String label) {
|
||||
sPerfCollector.beginSnapshot(label);
|
||||
}
|
||||
|
||||
/**
|
||||
* @see PerformanceCollector#endSnapshot()
|
||||
*/
|
||||
protected Bundle endSnapshot() {
|
||||
return sPerfCollector.endSnapshot();
|
||||
}
|
||||
|
||||
/**
|
||||
* @see PerformanceCollector#startTiming(String)
|
||||
*/
|
||||
protected void startTiming(String label) {
|
||||
sPerfCollector.startTiming(label);
|
||||
}
|
||||
|
||||
/**
|
||||
* @see PerformanceCollector#addIteration(String)
|
||||
*/
|
||||
protected Bundle addIteration(String label) {
|
||||
return sPerfCollector.addIteration(label);
|
||||
}
|
||||
|
||||
/**
|
||||
* @see PerformanceCollector#stopTiming(String)
|
||||
*/
|
||||
protected Bundle stopTiming(String label) {
|
||||
return sPerfCollector.stopTiming(label);
|
||||
}
|
||||
|
||||
public int startPerformance(PerformanceTestCase.Intermediates intermediates) {
|
||||
return 0;
|
||||
@ -31,12 +113,4 @@ public abstract class PerformanceTestBase extends TestCase implements Performanc
|
||||
public boolean isPerformanceOnly() {
|
||||
return true;
|
||||
}
|
||||
|
||||
/*
|
||||
* Temporary hack to get some things working again.
|
||||
*/
|
||||
public void testRun() {
|
||||
throw new RuntimeException("test implementation not provided");
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -0,0 +1,441 @@
|
||||
/*
|
||||
* Copyright (C) 2009 The Android Open Source Project
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
|
||||
package com.android.unit_tests.os;
|
||||
|
||||
import android.os.Bundle;
|
||||
import android.os.Parcelable;
|
||||
import android.os.PerformanceCollector;
|
||||
import android.os.PerformanceCollector.PerformanceResultsWriter;
|
||||
import android.test.suitebuilder.annotation.LargeTest;
|
||||
|
||||
import java.lang.reflect.Field;
|
||||
import java.util.ArrayList;
|
||||
import java.util.Random;
|
||||
|
||||
import junit.framework.TestCase;
|
||||
|
||||
public class PerformanceCollectorTest extends TestCase {
|
||||
|
||||
private PerformanceCollector mPerfCollector;
|
||||
|
||||
@Override
|
||||
protected void setUp() throws Exception {
|
||||
super.setUp();
|
||||
mPerfCollector = new PerformanceCollector();
|
||||
}
|
||||
|
||||
@Override
|
||||
protected void tearDown() throws Exception {
|
||||
super.tearDown();
|
||||
mPerfCollector = null;
|
||||
}
|
||||
|
||||
public void testBeginSnapshotNoWriter() throws Exception {
|
||||
mPerfCollector.beginSnapshot("testBeginSnapshotNoWriter");
|
||||
|
||||
assertTrue((Long)readPrivateField("mSnapshotCpuTime", mPerfCollector) > 0);
|
||||
assertTrue((Long)readPrivateField("mSnapshotExecTime", mPerfCollector) > 0);
|
||||
Bundle snapshot = (Bundle)readPrivateField("mPerfSnapshot", mPerfCollector);
|
||||
assertNotNull(snapshot);
|
||||
assertEquals(2, snapshot.size());
|
||||
}
|
||||
|
||||
@LargeTest
|
||||
public void testEndSnapshotNoWriter() throws Exception {
|
||||
mPerfCollector.beginSnapshot("testEndSnapshotNoWriter");
|
||||
sleepForRandomLongPeriod();
|
||||
Bundle snapshot = mPerfCollector.endSnapshot();
|
||||
|
||||
verifySnapshotBundle(snapshot);
|
||||
}
|
||||
|
||||
public void testStartTimingNoWriter() throws Exception {
|
||||
mPerfCollector.startTiming("testStartTimingNoWriter");
|
||||
|
||||
assertTrue((Long)readPrivateField("mCpuTime", mPerfCollector) > 0);
|
||||
assertTrue((Long)readPrivateField("mExecTime", mPerfCollector) > 0);
|
||||
Bundle measurement = (Bundle)readPrivateField("mPerfMeasurement", mPerfCollector);
|
||||
assertNotNull(measurement);
|
||||
verifyTimingBundle(measurement, new ArrayList<String>());
|
||||
}
|
||||
|
||||
public void testAddIterationNoWriter() throws Exception {
|
||||
mPerfCollector.startTiming("testAddIterationNoWriter");
|
||||
sleepForRandomTinyPeriod();
|
||||
Bundle iteration = mPerfCollector.addIteration("timing1");
|
||||
|
||||
verifyIterationBundle(iteration, "timing1");
|
||||
}
|
||||
|
||||
public void testStopTimingNoWriter() throws Exception {
|
||||
mPerfCollector.startTiming("testStopTimingNoWriter");
|
||||
sleepForRandomTinyPeriod();
|
||||
mPerfCollector.addIteration("timing2");
|
||||
sleepForRandomTinyPeriod();
|
||||
mPerfCollector.addIteration("timing3");
|
||||
sleepForRandomShortPeriod();
|
||||
Bundle timing = mPerfCollector.stopTiming("timing4");
|
||||
|
||||
ArrayList<String> labels = new ArrayList<String>();
|
||||
labels.add("timing2");
|
||||
labels.add("timing3");
|
||||
labels.add("timing4");
|
||||
verifyTimingBundle(timing, labels);
|
||||
}
|
||||
|
||||
public void testBeginSnapshot() throws Exception {
|
||||
MockPerformanceResultsWriter writer = new MockPerformanceResultsWriter();
|
||||
mPerfCollector.setPerformanceResultsWriter(writer);
|
||||
mPerfCollector.beginSnapshot("testBeginSnapshot");
|
||||
|
||||
assertEquals("testBeginSnapshot", writer.snapshotLabel);
|
||||
assertTrue((Long)readPrivateField("mSnapshotCpuTime", mPerfCollector) > 0);
|
||||
assertTrue((Long)readPrivateField("mSnapshotExecTime", mPerfCollector) > 0);
|
||||
Bundle snapshot = (Bundle)readPrivateField("mPerfSnapshot", mPerfCollector);
|
||||
assertNotNull(snapshot);
|
||||
assertEquals(2, snapshot.size());
|
||||
}
|
||||
|
||||
@LargeTest
|
||||
public void testEndSnapshot() throws Exception {
|
||||
MockPerformanceResultsWriter writer = new MockPerformanceResultsWriter();
|
||||
mPerfCollector.setPerformanceResultsWriter(writer);
|
||||
mPerfCollector.beginSnapshot("testEndSnapshot");
|
||||
sleepForRandomLongPeriod();
|
||||
Bundle snapshot1 = mPerfCollector.endSnapshot();
|
||||
Bundle snapshot2 = writer.snapshotResults;
|
||||
|
||||
assertTrue(snapshot1.equals(snapshot2));
|
||||
verifySnapshotBundle(snapshot1);
|
||||
}
|
||||
|
||||
public void testStartTiming() throws Exception {
|
||||
MockPerformanceResultsWriter writer = new MockPerformanceResultsWriter();
|
||||
mPerfCollector.setPerformanceResultsWriter(writer);
|
||||
mPerfCollector.startTiming("testStartTiming");
|
||||
|
||||
assertEquals("testStartTiming", writer.timingLabel);
|
||||
assertTrue((Long)readPrivateField("mCpuTime", mPerfCollector) > 0);
|
||||
assertTrue((Long)readPrivateField("mExecTime", mPerfCollector) > 0);
|
||||
Bundle measurement = (Bundle)readPrivateField("mPerfMeasurement", mPerfCollector);
|
||||
assertNotNull(measurement);
|
||||
verifyTimingBundle(measurement, new ArrayList<String>());
|
||||
}
|
||||
|
||||
public void testAddIteration() throws Exception {
|
||||
mPerfCollector.startTiming("testAddIteration");
|
||||
sleepForRandomTinyPeriod();
|
||||
Bundle iteration = mPerfCollector.addIteration("timing5");
|
||||
|
||||
verifyIterationBundle(iteration, "timing5");
|
||||
}
|
||||
|
||||
public void testStopTiming() throws Exception {
|
||||
mPerfCollector.startTiming("testStopTiming");
|
||||
sleepForRandomTinyPeriod();
|
||||
mPerfCollector.addIteration("timing6");
|
||||
sleepForRandomTinyPeriod();
|
||||
mPerfCollector.addIteration("timing7");
|
||||
sleepForRandomShortPeriod();
|
||||
Bundle timing = mPerfCollector.stopTiming("timing8");
|
||||
|
||||
ArrayList<String> labels = new ArrayList<String>();
|
||||
labels.add("timing6");
|
||||
labels.add("timing7");
|
||||
labels.add("timing8");
|
||||
verifyTimingBundle(timing, labels);
|
||||
}
|
||||
|
||||
@LargeTest
|
||||
public void testSimpleSequence() throws Exception {
|
||||
MockPerformanceResultsWriter writer = new MockPerformanceResultsWriter();
|
||||
mPerfCollector.setPerformanceResultsWriter(writer);
|
||||
mPerfCollector.beginSnapshot("testSimpleSequence");
|
||||
mPerfCollector.startTiming("testSimpleSequenceTiming");
|
||||
sleepForRandomTinyPeriod();
|
||||
mPerfCollector.addIteration("iteration1");
|
||||
sleepForRandomTinyPeriod();
|
||||
mPerfCollector.addIteration("iteration2");
|
||||
sleepForRandomTinyPeriod();
|
||||
mPerfCollector.addIteration("iteration3");
|
||||
sleepForRandomTinyPeriod();
|
||||
mPerfCollector.addIteration("iteration4");
|
||||
sleepForRandomShortPeriod();
|
||||
Bundle timing = mPerfCollector.stopTiming("iteration5");
|
||||
sleepForRandomLongPeriod();
|
||||
Bundle snapshot1 = mPerfCollector.endSnapshot();
|
||||
Bundle snapshot2 = writer.snapshotResults;
|
||||
|
||||
assertTrue(snapshot1.equals(snapshot2));
|
||||
verifySnapshotBundle(snapshot1);
|
||||
|
||||
ArrayList<String> labels = new ArrayList<String>();
|
||||
labels.add("iteration1");
|
||||
labels.add("iteration2");
|
||||
labels.add("iteration3");
|
||||
labels.add("iteration4");
|
||||
labels.add("iteration5");
|
||||
verifyTimingBundle(timing, labels);
|
||||
}
|
||||
|
||||
@LargeTest
|
||||
public void testLongSequence() throws Exception {
|
||||
MockPerformanceResultsWriter writer = new MockPerformanceResultsWriter();
|
||||
mPerfCollector.setPerformanceResultsWriter(writer);
|
||||
mPerfCollector.beginSnapshot("testLongSequence");
|
||||
mPerfCollector.startTiming("testLongSequenceTiming1");
|
||||
sleepForRandomTinyPeriod();
|
||||
mPerfCollector.addIteration("iteration1");
|
||||
sleepForRandomTinyPeriod();
|
||||
mPerfCollector.addIteration("iteration2");
|
||||
sleepForRandomShortPeriod();
|
||||
Bundle timing1 = mPerfCollector.stopTiming("iteration3");
|
||||
sleepForRandomLongPeriod();
|
||||
|
||||
mPerfCollector.startTiming("testLongSequenceTiming2");
|
||||
sleepForRandomTinyPeriod();
|
||||
mPerfCollector.addIteration("iteration4");
|
||||
sleepForRandomTinyPeriod();
|
||||
mPerfCollector.addIteration("iteration5");
|
||||
sleepForRandomShortPeriod();
|
||||
Bundle timing2 = mPerfCollector.stopTiming("iteration6");
|
||||
sleepForRandomLongPeriod();
|
||||
|
||||
mPerfCollector.startTiming("testLongSequenceTiming3");
|
||||
sleepForRandomTinyPeriod();
|
||||
mPerfCollector.addIteration("iteration7");
|
||||
sleepForRandomTinyPeriod();
|
||||
mPerfCollector.addIteration("iteration8");
|
||||
sleepForRandomShortPeriod();
|
||||
Bundle timing3 = mPerfCollector.stopTiming("iteration9");
|
||||
sleepForRandomLongPeriod();
|
||||
|
||||
mPerfCollector.startTiming("testLongSequenceTiming4");
|
||||
sleepForRandomTinyPeriod();
|
||||
mPerfCollector.addIteration("iteration10");
|
||||
sleepForRandomTinyPeriod();
|
||||
mPerfCollector.addIteration("iteration11");
|
||||
sleepForRandomShortPeriod();
|
||||
Bundle timing4 = mPerfCollector.stopTiming("iteration12");
|
||||
sleepForRandomLongPeriod();
|
||||
|
||||
mPerfCollector.startTiming("testLongSequenceTiming5");
|
||||
sleepForRandomTinyPeriod();
|
||||
mPerfCollector.addIteration("iteration13");
|
||||
sleepForRandomTinyPeriod();
|
||||
mPerfCollector.addIteration("iteration14");
|
||||
sleepForRandomShortPeriod();
|
||||
Bundle timing5 = mPerfCollector.stopTiming("iteration15");
|
||||
sleepForRandomLongPeriod();
|
||||
Bundle snapshot1 = mPerfCollector.endSnapshot();
|
||||
Bundle snapshot2 = writer.snapshotResults;
|
||||
|
||||
assertTrue(snapshot1.equals(snapshot2));
|
||||
verifySnapshotBundle(snapshot1);
|
||||
|
||||
ArrayList<String> labels1 = new ArrayList<String>();
|
||||
labels1.add("iteration1");
|
||||
labels1.add("iteration2");
|
||||
labels1.add("iteration3");
|
||||
verifyTimingBundle(timing1, labels1);
|
||||
ArrayList<String> labels2 = new ArrayList<String>();
|
||||
labels2.add("iteration4");
|
||||
labels2.add("iteration5");
|
||||
labels2.add("iteration6");
|
||||
verifyTimingBundle(timing2, labels2);
|
||||
ArrayList<String> labels3 = new ArrayList<String>();
|
||||
labels3.add("iteration7");
|
||||
labels3.add("iteration8");
|
||||
labels3.add("iteration9");
|
||||
verifyTimingBundle(timing3, labels3);
|
||||
ArrayList<String> labels4 = new ArrayList<String>();
|
||||
labels4.add("iteration10");
|
||||
labels4.add("iteration11");
|
||||
labels4.add("iteration12");
|
||||
verifyTimingBundle(timing4, labels4);
|
||||
ArrayList<String> labels5 = new ArrayList<String>();
|
||||
labels5.add("iteration13");
|
||||
labels5.add("iteration14");
|
||||
labels5.add("iteration15");
|
||||
verifyTimingBundle(timing5, labels5);
|
||||
}
|
||||
|
||||
/*
|
||||
* Verify that snapshotting and timing do not interfere w/ each other,
|
||||
* by staggering calls to snapshot and timing functions.
|
||||
*/
|
||||
@LargeTest
|
||||
public void testOutOfOrderSequence() {
|
||||
MockPerformanceResultsWriter writer = new MockPerformanceResultsWriter();
|
||||
mPerfCollector.setPerformanceResultsWriter(writer);
|
||||
mPerfCollector.startTiming("testOutOfOrderSequenceTiming");
|
||||
sleepForRandomShortPeriod();
|
||||
mPerfCollector.beginSnapshot("testOutOfOrderSequenceSnapshot");
|
||||
sleepForRandomShortPeriod();
|
||||
Bundle timing1 = mPerfCollector.stopTiming("timing1");
|
||||
sleepForRandomShortPeriod();
|
||||
Bundle snapshot1 = mPerfCollector.endSnapshot();
|
||||
|
||||
Bundle timing2 = writer.timingResults;
|
||||
Bundle snapshot2 = writer.snapshotResults;
|
||||
|
||||
assertTrue(snapshot1.equals(snapshot2));
|
||||
verifySnapshotBundle(snapshot1);
|
||||
|
||||
assertTrue(timing1.equals(timing2));
|
||||
ArrayList<String> labels = new ArrayList<String>();
|
||||
labels.add("timing1");
|
||||
verifyTimingBundle(timing1, labels);
|
||||
}
|
||||
|
||||
private void sleepForRandomPeriod(int minDuration, int maxDuration) {
|
||||
Random random = new Random();
|
||||
int period = minDuration + random.nextInt(maxDuration - minDuration);
|
||||
int slept = 0;
|
||||
// Generate random positive amount of work, so cpu time is measurable in
|
||||
// milliseconds
|
||||
while (slept < period) {
|
||||
int step = random.nextInt(minDuration/5);
|
||||
try {
|
||||
Thread.sleep(step);
|
||||
} catch (InterruptedException e ) {
|
||||
// eat the exception
|
||||
}
|
||||
slept += step;
|
||||
}
|
||||
}
|
||||
|
||||
private void sleepForRandomTinyPeriod() {
|
||||
sleepForRandomPeriod(25, 50);
|
||||
}
|
||||
|
||||
private void sleepForRandomShortPeriod() {
|
||||
sleepForRandomPeriod(100, 250);
|
||||
}
|
||||
|
||||
private void sleepForRandomLongPeriod() {
|
||||
sleepForRandomPeriod(500, 1000);
|
||||
}
|
||||
|
||||
private void verifySnapshotBundle(Bundle snapshot) {
|
||||
assertTrue("At least 26 metrics collected", 26 <= snapshot.size());
|
||||
|
||||
assertTrue(snapshot.containsKey(PerformanceCollector.METRIC_KEY_CPU_TIME));
|
||||
assertTrue(snapshot.getLong(PerformanceCollector.METRIC_KEY_CPU_TIME) > 0);
|
||||
assertTrue(snapshot.containsKey(PerformanceCollector.METRIC_KEY_EXECUTION_TIME));
|
||||
assertTrue(snapshot.getLong(PerformanceCollector.METRIC_KEY_EXECUTION_TIME) > 0);
|
||||
|
||||
assertTrue(snapshot.containsKey(
|
||||
PerformanceCollector.METRIC_KEY_PRE_RECEIVED_TRANSACTIONS));
|
||||
assertTrue(snapshot.containsKey(PerformanceCollector.METRIC_KEY_PRE_SENT_TRANSACTIONS));
|
||||
assertTrue(snapshot.containsKey(PerformanceCollector.METRIC_KEY_RECEIVED_TRANSACTIONS));
|
||||
assertTrue(snapshot.containsKey(PerformanceCollector.METRIC_KEY_SENT_TRANSACTIONS));
|
||||
assertTrue(snapshot.containsKey(PerformanceCollector.METRIC_KEY_GC_INVOCATION_COUNT));
|
||||
|
||||
assertTrue(snapshot.containsKey(PerformanceCollector.METRIC_KEY_JAVA_ALLOCATED));
|
||||
assertTrue(snapshot.getLong(PerformanceCollector.METRIC_KEY_JAVA_ALLOCATED) > 0);
|
||||
assertTrue(snapshot.containsKey(PerformanceCollector.METRIC_KEY_JAVA_FREE));
|
||||
assertTrue(snapshot.getLong(PerformanceCollector.METRIC_KEY_JAVA_FREE) > 0);
|
||||
assertTrue(snapshot.containsKey(PerformanceCollector.METRIC_KEY_JAVA_PRIVATE_DIRTY));
|
||||
assertTrue(snapshot.getLong(PerformanceCollector.METRIC_KEY_JAVA_PRIVATE_DIRTY) > 0);
|
||||
assertTrue(snapshot.containsKey(PerformanceCollector.METRIC_KEY_JAVA_PSS));
|
||||
assertTrue(snapshot.getLong(PerformanceCollector.METRIC_KEY_JAVA_PSS) > 0);
|
||||
assertTrue(snapshot.containsKey(PerformanceCollector.METRIC_KEY_JAVA_SHARED_DIRTY));
|
||||
assertTrue(snapshot.getLong(PerformanceCollector.METRIC_KEY_JAVA_SHARED_DIRTY) > 0);
|
||||
assertTrue(snapshot.containsKey(PerformanceCollector.METRIC_KEY_JAVA_SIZE));
|
||||
assertTrue(snapshot.getLong(PerformanceCollector.METRIC_KEY_JAVA_SIZE) > 0);
|
||||
assertTrue(snapshot.containsKey(PerformanceCollector.METRIC_KEY_NATIVE_ALLOCATED));
|
||||
assertTrue(snapshot.getLong(PerformanceCollector.METRIC_KEY_NATIVE_ALLOCATED) > 0);
|
||||
assertTrue(snapshot.containsKey(PerformanceCollector.METRIC_KEY_NATIVE_FREE));
|
||||
assertTrue(snapshot.getLong(PerformanceCollector.METRIC_KEY_NATIVE_FREE) > 0);
|
||||
assertTrue(snapshot.containsKey(PerformanceCollector.METRIC_KEY_NATIVE_PRIVATE_DIRTY));
|
||||
assertTrue(snapshot.getLong(PerformanceCollector.METRIC_KEY_NATIVE_PRIVATE_DIRTY) > 0);
|
||||
assertTrue(snapshot.containsKey(PerformanceCollector.METRIC_KEY_NATIVE_PSS));
|
||||
assertTrue(snapshot.getLong(PerformanceCollector.METRIC_KEY_NATIVE_PSS) > 0);
|
||||
assertTrue(snapshot.containsKey(PerformanceCollector.METRIC_KEY_NATIVE_SHARED_DIRTY));
|
||||
assertTrue(snapshot.getLong(PerformanceCollector.METRIC_KEY_NATIVE_SHARED_DIRTY) > 0);
|
||||
assertTrue(snapshot.containsKey(PerformanceCollector.METRIC_KEY_NATIVE_SIZE));
|
||||
assertTrue(snapshot.getLong(PerformanceCollector.METRIC_KEY_NATIVE_SIZE) > 0);
|
||||
assertTrue(snapshot.containsKey(PerformanceCollector.METRIC_KEY_GLOBAL_ALLOC_COUNT));
|
||||
assertTrue(snapshot.getLong(PerformanceCollector.METRIC_KEY_GLOBAL_ALLOC_COUNT) > 0);
|
||||
assertTrue(snapshot.containsKey(PerformanceCollector.METRIC_KEY_GLOBAL_ALLOC_SIZE));
|
||||
assertTrue(snapshot.getLong(PerformanceCollector.METRIC_KEY_GLOBAL_ALLOC_SIZE) > 0);
|
||||
assertTrue(snapshot.containsKey(PerformanceCollector.METRIC_KEY_GLOBAL_FREED_COUNT));
|
||||
assertTrue(snapshot.getLong(PerformanceCollector.METRIC_KEY_GLOBAL_FREED_COUNT) > 0);
|
||||
assertTrue(snapshot.containsKey(PerformanceCollector.METRIC_KEY_GLOBAL_FREED_SIZE));
|
||||
assertTrue(snapshot.getLong(PerformanceCollector.METRIC_KEY_GLOBAL_FREED_SIZE) > 0);
|
||||
assertTrue(snapshot.containsKey(PerformanceCollector.METRIC_KEY_OTHER_PRIVATE_DIRTY));
|
||||
assertTrue(snapshot.getLong(PerformanceCollector.METRIC_KEY_OTHER_PRIVATE_DIRTY) > 0);
|
||||
assertTrue(snapshot.containsKey(PerformanceCollector.METRIC_KEY_OTHER_PSS));
|
||||
assertTrue(snapshot.getLong(PerformanceCollector.METRIC_KEY_OTHER_PSS) > 0);
|
||||
assertTrue(snapshot.containsKey(PerformanceCollector.METRIC_KEY_OTHER_SHARED_DIRTY));
|
||||
assertTrue(snapshot.getLong(PerformanceCollector.METRIC_KEY_OTHER_SHARED_DIRTY) > 0);
|
||||
}
|
||||
|
||||
private void verifyIterationBundle(Bundle iteration, String label) {
|
||||
assertEquals(3, iteration.size());
|
||||
assertTrue(iteration.containsKey(PerformanceCollector.METRIC_KEY_LABEL));
|
||||
assertEquals(label, iteration.getString(PerformanceCollector.METRIC_KEY_LABEL));
|
||||
assertTrue(iteration.containsKey(PerformanceCollector.METRIC_KEY_CPU_TIME));
|
||||
assertTrue(iteration.getLong(PerformanceCollector.METRIC_KEY_CPU_TIME) > 0);
|
||||
assertTrue(iteration.containsKey(PerformanceCollector.METRIC_KEY_EXECUTION_TIME));
|
||||
assertTrue(iteration.getLong(PerformanceCollector.METRIC_KEY_EXECUTION_TIME) > 0);
|
||||
}
|
||||
|
||||
private void verifyTimingBundle(Bundle timing, ArrayList<String> labels) {
|
||||
assertEquals(1, timing.size());
|
||||
assertTrue(timing.containsKey(PerformanceCollector.METRIC_KEY_ITERATIONS));
|
||||
ArrayList<Parcelable> iterations = timing.getParcelableArrayList(
|
||||
PerformanceCollector.METRIC_KEY_ITERATIONS);
|
||||
assertNotNull(iterations);
|
||||
assertEquals(labels.size(), iterations.size());
|
||||
for (int i = 0; i < labels.size(); i ++) {
|
||||
Bundle iteration = (Bundle)iterations.get(i);
|
||||
verifyIterationBundle(iteration, labels.get(i));
|
||||
}
|
||||
}
|
||||
|
||||
private Object readPrivateField(String fieldName, Object object) throws Exception {
|
||||
Field f = object.getClass().getDeclaredField(fieldName);
|
||||
f.setAccessible(true);
|
||||
return f.get(object);
|
||||
}
|
||||
|
||||
private class MockPerformanceResultsWriter implements PerformanceResultsWriter {
|
||||
|
||||
public String snapshotLabel;
|
||||
public Bundle snapshotResults = new Bundle();
|
||||
public String timingLabel;
|
||||
public Bundle timingResults = new Bundle();
|
||||
|
||||
public void writeBeginSnapshot(String label) {
|
||||
snapshotLabel = label;
|
||||
}
|
||||
|
||||
public void writeEndSnapshot(Bundle results) {
|
||||
snapshotResults = results;
|
||||
}
|
||||
|
||||
public void writeStartTiming(String label) {
|
||||
timingLabel = label;
|
||||
}
|
||||
|
||||
public void writeStopTiming(Bundle results) {
|
||||
timingResults = results;
|
||||
}
|
||||
}
|
||||
}
|
Reference in New Issue
Block a user