[Re-land] Remove scripts related to iorap

This reverts commit 59a56500c0c8171427852a0ff95de9622807745c.

- Iorap is being removed and relevant scripts need to be remove, too.

Bug: 214108410
Test: build okay
Change-Id: I560a66eec4050d60b74ad325328128cd3e299662
Merged-In: I560a66eec4050d60b74ad325328128cd3e299662
(cherry picked from commit 25adac3ba6bf88d8966e592e0cd68c6923c913c4)
This commit is contained in:
Eric Jeong 2022-02-14 11:43:28 -08:00
parent 540cc1fa17
commit c8f7613bbd
66 changed files with 0 additions and 9765 deletions

View File

@ -1,457 +0,0 @@
#!/usr/bin/env python3
#
# Copyright 2018, The Android Open Source Project
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""
Perform statistical analysis on measurements produced by app_startup_runner.py
Install:
$> sudo apt-get install python3-scipy
Usage:
$> ./analyze_metrics.py <filename.csv> [<filename2.csv> ...]
$> ./analyze_metrics.py --help
"""
import argparse
import csv
import itertools
import os
import subprocess
import sys
import tempfile
from typing import Any, List, Dict, Iterable, TextIO, Tuple
from scipy import stats as sc
import numpy as np
# These CSV columns are considered labels. Everything after them in the same row are metrics.
_LABEL_COLUMNS=['packages', 'readaheads', 'compiler_filters']
# The metric series with the 'cold' readahead is the baseline.
# All others (warm, jit, etc) are the potential improvements.
#fixme: this should probably be an option
_BASELINE=('readaheads', 'cold')
# ignore this for some statistic calculations
_IGNORE_PAIR=('readaheads', 'warm')
_PLOT_SUBKEY='readaheads'
_PLOT_GROUPKEY='packages'
_PLOT_DATA_INDEX = 0
_DELTA=50
_DELTA2=100
_PVALUE_THRESHOLD=0.10
_debug = False # See -d/--debug flag.
def parse_options(argv: List[str] = None):
"""Parse command line arguments and return an argparse Namespace object."""
parser = argparse.ArgumentParser(description="Perform statistical analysis on measurements produced by app_start_runner.py.")
parser.add_argument('input_files', metavar='file.csv', nargs='+', help='CSV file produced by app_startup_runner.py')
parser.add_argument('-d', '--debug', dest='debug', action='store_true', help='Add extra debugging output')
parser.add_argument('-os', '--output-samples', dest='output_samples', default='/dev/null', action='store', help='Store CSV for per-sample data')
parser.add_argument('-oc', '--output-comparable', dest='output_comparable', default='/dev/null', action='store', help='Output CSV for comparable against baseline')
parser.add_argument('-ocs', '--output-comparable-significant', dest='output_comparable_significant', default='/dev/null', action='store', help='Output CSV for comparable against baseline (significant only)')
parser.add_argument('-pt', '--pvalue-threshold', dest='pvalue_threshold', type=float, default=_PVALUE_THRESHOLD, action='store')
parser.add_argument('-dt', '--delta-threshold', dest='delta_threshold', type=int, default=_DELTA, action='store')
return parser.parse_args(argv)
def _debug_print(*args, **kwargs):
"""Print the args to sys.stderr if the --debug/-d flag was passed in."""
global _debug
if _debug:
print(*args, **kwargs, file=sys.stderr)
def _expand_gen_repr(args):
new_args_list = []
for i in args:
# detect iterable objects that do not have their own override of __str__
if hasattr(i, '__iter__'):
to_str = getattr(i, '__str__')
if to_str.__objclass__ == object:
# the repr for a generator is just type+address, expand it out instead.
new_args_list.append([_expand_gen_repr([j])[0] for j in i])
continue
# normal case: uses the built-in to-string
new_args_list.append(i)
return new_args_list
def _debug_print_gen(*args, **kwargs):
"""Like _debug_print but will turn any iterable args into a list."""
if not _debug:
return
new_args_list = _expand_gen_repr(args)
_debug_print(*new_args_list, **kwargs)
def read_headers(input_file: TextIO) -> Tuple[List[str], List[str]]:
_debug_print("read_headers for file: ", input_file.name)
csv_reader = csv.reader(input_file)
label_num_columns = len(_LABEL_COLUMNS)
try:
header = next(csv_reader)
except StopIteration:
header = None
_debug_print('header', header)
if not header:
return (None, None)
labels = header[0:label_num_columns]
data = header[label_num_columns:]
return (labels, data)
def read_labels_and_data(input_file: TextIO) -> Iterable[Tuple[List[str], List[int]]]:
_debug_print("print_analysis for file: ", input_file.name)
csv_reader = csv.reader(input_file)
# Skip the header because it doesn't contain any data.
# To get the header see read_headers function.
try:
header = next(csv_reader)
except StopIteration:
header = None
label_num_columns = len(_LABEL_COLUMNS)
for row in csv_reader:
if len(row) > 0 and row[0][0] == ';':
_debug_print("skip comment line", row)
continue
labels = row[0:label_num_columns]
data = [int(i) for i in row[label_num_columns:]]
# _debug_print("labels:", labels)
# _debug_print("data:", data)
yield (labels, data)
def group_metrics_by_label(it: Iterable[Tuple[List[str], List[int]]]):
prev_labels = None
data_2d = []
for label_list, data_list in it:
if prev_labels != label_list:
if prev_labels:
# _debug_print("grouped labels:", prev_labels, "data_2d:", data_2d)
yield (prev_labels, data_2d)
data_2d = []
data_2d.append(data_list)
prev_labels = label_list
if prev_labels:
# _debug_print("grouped labels:", prev_labels, "data_2d:", data_2d)
yield (prev_labels, data_2d)
def data_to_numpy(it: Iterable[Tuple[List[str], List[List[int]]]]) -> Iterable[Tuple[List[str], Any]]:
for label_list, data_2d in it:
yield (label_list, np.asarray(data_2d, dtype=int))
def iterate_columns(np_data_2d):
for col in range(np_data_2d.shape[1]):
col_as_array = np_data_2d[:, col]
yield col_as_array
def confidence_interval(np_data_2d, percent=0.95):
"""
Given some data [[a,b,c],[d,e,f,]...]
We assume the same metric is in the column (e.g. [a,d])
and that data in the rows (e.g. [b,e]) are separate metric values.
We then calculate the CI for each metric individually returning it as a list of tuples.
"""
arr = []
for col_2d in iterate_columns(np_data_2d):
mean = col_2d.mean()
sigma = col_2d.std()
ci = sc.norm.interval(percent, loc=mean, scale=sigma / np.sqrt(len(col_2d)))
arr.append(ci)
# TODO: This seems to be returning NaN when all the samples have the same exact value
# (e.g. stddev=0, which can trivially happen when sample count = 1).
return arr
def print_analysis(it, label_header: List[str], data_header: List[str], output_samples: str):
print(label_header)
with open(output_samples, "w") as output_file:
csv_writer = csv.writer(output_file)
csv_writer.writerow(label_header + ['mean', 'std', 'confidence_interval_a', 'confidence_interval_b'])
for label_list, np_data_2d in it:
print("**********************")
print(label_list)
print()
print(" ", data_header)
# aggregate computation column-wise
print("Mean: ", np_data_2d.mean(axis=0))
print("Std: ", np_data_2d.std(axis=0))
print("CI95%:", confidence_interval(np_data_2d))
print("SEM: ", stats_standard_error_one(np_data_2d, axis=0))
#ci = confidence_interval(np_data_2d)[_PLOT_DATA_INDEX]
sem = stats_standard_error_one(np_data_2d, axis=0)[_PLOT_DATA_INDEX]
mean = np_data_2d.mean(axis=0)[_PLOT_DATA_INDEX]
ci = (mean - sem, mean + sem)
csv_writer.writerow(label_list + [mean, np_data_2d.std(axis=0)[_PLOT_DATA_INDEX], ci[0], ci[1]])
def from_file_group_by_labels(input_file):
(label_header, data_header) = read_headers(input_file)
label_data_iter = read_labels_and_data(input_file)
grouped_iter = group_metrics_by_label(label_data_iter)
grouped_numpy_iter = data_to_numpy(grouped_iter)
return grouped_numpy_iter, label_header, data_header
def list_without_index(list, index):
return list[:index] + list[index+1:]
def group_by_without_baseline_key(grouped_numpy_iter, label_header):
"""
Data is considered comparable if the only difference is the baseline key
(i.e. the readahead is different but the package, compilation filter, etc, are the same).
Returns iterator that's grouped by the non-baseline labels to an iterator of
(label_list, data_2d).
"""
baseline_index = label_header.index(_BASELINE[0])
def get_label_without_baseline(tpl):
label_list, _ = tpl
return list_without_index(label_list, baseline_index)
# [['pkgname', 'compfilter', 'warm'], [data]]
# [['pkgname', 'compfilter', 'cold'], [data2]]
# [['pkgname2', 'compfilter', 'warm'], [data3]]
#
# ->
# ( [['pkgname', 'compfilter', 'warm'], [data]] # ignore baseline label change.
# [['pkgname', 'compfilter', 'cold'], [data2]] ), # split here because the pkgname changed.
# ( [['pkgname2', 'compfilter', 'warm'], [data3]] )
for group_info, it in itertools.groupby(grouped_numpy_iter, key = get_label_without_baseline):
yield it
# TODO: replace this messy manual iteration/grouping with pandas
def iterate_comparable_metrics(without_baseline_iter, label_header):
baseline_index = label_header.index(_BASELINE[0])
baseline_value = _BASELINE[1]
_debug_print("iterate comparables")
def is_baseline_fun(tp):
ll, dat = tp
return ll[baseline_index] == baseline_value
# iterating here when everything but the baseline key is the same.
for it in without_baseline_iter:
it1, it2 = itertools.tee(it)
# find all the baseline data.
baseline_filter_it = filter(is_baseline_fun, it1)
# find non-baseline data.
nonbaseline_filter_it = itertools.filterfalse(is_baseline_fun, it2)
yield itertools.product(baseline_filter_it, nonbaseline_filter_it)
def stats_standard_error_one(a, axis):
a_std = a.std(axis=axis, ddof=0)
a_len = a.shape[axis]
return a_std / np.sqrt(a_len)
def stats_standard_error(a, b, axis):
a_std = a.std(axis=axis, ddof=0)
b_std = b.std(axis=axis, ddof=0)
a_len = a.shape[axis]
b_len = b.shape[axis]
temp1 = a_std*a_std/a_len
temp2 = b_std*b_std/b_len
return np.sqrt(temp1 + temp2)
def stats_tvalue(a, b, axis, delta = 0):
a_mean = a.mean(axis=axis)
b_mean = b.mean(axis=axis)
return (a_mean - b_mean - delta) / stats_standard_error(a, b, axis)
def stats_pvalue(a, b, axis, delta, left:bool = False):
"""
Single-tailed 2-sample t-test.
Returns p-value for the null hypothesis: mean(a) - mean(b) >= delta.
:param a: numpy 2d array
:param b: numpy 2d array
:param axis: which axis to do the calculations across
:param delta: test value of mean differences
:param left: if true then use <= delta instead of >= delta
:return: p-value
"""
# implement our own pvalue calculation because the built-in t-test (t,p values)
# only offer delta=0 , e.g. m1-m1 ? 0
# we are however interested in m1-m2 ? delta
t_value = stats_tvalue(a, b, axis, delta)
# 2-sample degrees of freedom is using the array sizes - 2.
dof = a.shape[axis] + b.shape[axis] - 2
if left:
# left tailed test. e.g. m1-m2 <= delta
return sc.t.cdf(t_value, dof)
else:
# right tailed test. e.g. m1-m2 >= delta
return sc.t.sf(t_value, dof)
# a left+right tailed test is a 2-tail t-test and can be done using ttest_ind for delta=0
def print_comparable_analysis(comparable_metrics_iter, label_header, data_header, output_comparable: str, output_comparable_significant: str):
baseline_value = _BASELINE[1]
baseline_index = label_header.index(_BASELINE[0])
old_baseline_label_list = None
delta = _DELTA
filter_value = _IGNORE_PAIR[1]
filter_index = label_header.index(_IGNORE_PAIR[0])
pvalue_threshold = _PVALUE_THRESHOLD
ci_threshold = (1 - _PVALUE_THRESHOLD) * 100.0
with open(output_comparable, "w") as output_file:
csv_writer = csv.writer(output_file)
csv_writer.writerow(label_header + ['mean', 'mean_diff', 'sem', 'pvalue_2tailed', 'pvalue_gt%d' %(_DELTA), 'pvalue_gt%d' %(_DELTA2)])
print("------------------------------------------------------------------")
print("Comparison against the baseline %s = %s" %(_BASELINE, baseline_value))
print("--- Right-tailed t-test checks if the baseline >= current %s by at least %d" %(_BASELINE[0], delta))
print()
global_stats = {'better_than_delta': [], 'better_than_delta_p95': []}
for nested_it in comparable_metrics_iter:
print("************************")
better_than_delta = []
better_than_delta_p95 = []
saw_baseline_once = False
for ((baseline_label_list, baseline_np_data_2d), (rest_label_list, rest_np_data_2d)) in nested_it:
_debug_print("baseline_label_list:", baseline_label_list)
_debug_print("baseline_np_data_2d:", baseline_np_data_2d)
_debug_print("rest_label_list:", rest_label_list)
_debug_print("rest_np_data_2d:", rest_np_data_2d)
mean_diff = baseline_np_data_2d.mean(axis=0) - rest_np_data_2d.mean(axis=0)
# 2-sample 2-tailed t-test with delta=0
# e.g. "Is it true that usually the two sample means are different?"
t_statistic, t_pvalue = sc.ttest_ind(baseline_np_data_2d, rest_np_data_2d, axis=0)
# 2-sample 1-tailed t-test with delta=50
# e.g. "Is it true that usually the sample means better than 50ms?"
t2 = stats_tvalue(baseline_np_data_2d, rest_np_data_2d, axis=0, delta=delta)
p2 = stats_pvalue(baseline_np_data_2d, rest_np_data_2d, axis=0, delta=delta)
t2_b = stats_tvalue(baseline_np_data_2d, rest_np_data_2d, axis=0, delta=_DELTA2)
p2_b = stats_pvalue(baseline_np_data_2d, rest_np_data_2d, axis=0, delta=_DELTA2)
print("%s vs %s" %(rest_label_list, baseline_value))
print(" ", data_header)
print("Mean Difference: ", mean_diff)
print("T-test (2-tailed) != 0: t=%s, p=%s" %(t_statistic, t_pvalue))
print("T-test (right-tailed) >= %d: t=%s, p=%s" %(_DELTA, t2, p2))
print("T-test (right-tailed) >= %d: t=%s, p=%s" %(_DELTA2, t2_b, p2_b))
def write_out_values(label_list, *args):
csv_writer.writerow(label_list + [i[_PLOT_DATA_INDEX] for i in args])
sem = stats_standard_error(baseline_np_data_2d, rest_np_data_2d, axis=0)
if saw_baseline_once == False:
saw_baseline_once = True
base_sem = stats_standard_error_one(baseline_np_data_2d, axis=0)
write_out_values(baseline_label_list, baseline_np_data_2d.mean(axis=0), [0], base_sem, [None], [None], [None])
write_out_values(rest_label_list, rest_np_data_2d.mean(axis=0), mean_diff, sem, t_pvalue, p2, p2_b)
# now do the global statistics aggregation
if rest_label_list[filter_index] == filter_value:
continue
if mean_diff > delta:
better_than_delta.append((mean_diff, p2, rest_label_list))
if p2 <= pvalue_threshold:
better_than_delta_p95.append((mean_diff, rest_label_list))
if better_than_delta:
global_stats['better_than_delta'].append(better_than_delta)
if better_than_delta_p95:
global_stats['better_than_delta_p95'].append(better_than_delta_p95)
print("------------------------")
print("Global statistics:")
print("//// Rows with %s=%s are ignored here." %_IGNORE_PAIR)
print("- # of results with mean diff better than delta(%d) = %d" %(delta, len(global_stats['better_than_delta'])))
print(" > (meandiff, pvalue, labels)")
for i in global_stats['better_than_delta']:
print(" > %s" %i)
print("- # of results with mean diff better than delta(%d) CI%d%% = %d" %(delta, ci_threshold, len(global_stats['better_than_delta_p95'])))
print(" > (meandiff, labels)")
for i in global_stats['better_than_delta_p95']:
print(" > %s" %i)
def main():
global _debug
global _DELTA
global _PVALUE_THRESHOLD
opts = parse_options()
_debug = opts.debug
_debug_print("parsed options: ", opts)
_PVALUE_THRESHOLD = opts.pvalue_threshold or _PVALUE_THRESHOLD
for file_name in opts.input_files:
with open(file_name, 'r') as input_file:
(grouped_numpy_iter, label_header, data_header) = from_file_group_by_labels(input_file)
print_analysis(grouped_numpy_iter, label_header, data_header, opts.output_samples)
with open(file_name, 'r') as input_file:
(grouped_numpy_iter, label_header, data_header) = from_file_group_by_labels(input_file)
without_baseline_iter = group_by_without_baseline_key(grouped_numpy_iter, label_header)
#_debug_print_gen(without_baseline_iter)
comparable_metrics_iter = iterate_comparable_metrics(without_baseline_iter, label_header)
print_comparable_analysis(comparable_metrics_iter, label_header, data_header, opts.output_comparable, opts.output_comparable_significant)
return 0
if __name__ == '__main__':
sys.exit(main())

View File

@ -1,393 +0,0 @@
#!/usr/bin/env python3
#
# Copyright 2018, The Android Open Source Project
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
#
# Measure application start-up time by launching applications under various combinations.
# See --help for more details.
#
#
# Sample usage:
# $> ./app_startup_runner.py -p com.google.android.calculator -r warm -r cold -lc 10 -o out.csv
# $> ./analyze_metrics.py out.csv
#
#
import argparse
import csv
import itertools
import os
import sys
import tempfile
from datetime import timedelta
from typing import Any, Callable, Iterable, List, NamedTuple, TextIO, Tuple, \
TypeVar, Union, Optional
# local import
DIR = os.path.abspath(os.path.dirname(__file__))
sys.path.append(os.path.dirname(DIR))
import lib.cmd_utils as cmd_utils
import lib.print_utils as print_utils
from app_startup.run_app_with_prefetch import PrefetchAppRunner
import app_startup.lib.args_utils as args_utils
from app_startup.lib.data_frame import DataFrame
from app_startup.lib.perfetto_trace_collector import PerfettoTraceCollector
from iorap.compiler import CompilerType
import iorap.compiler as compiler
# The following command line options participate in the combinatorial generation.
# All other arguments have a global effect.
_COMBINATORIAL_OPTIONS = ['package', 'readahead', 'compiler_filter',
'activity', 'trace_duration']
_TRACING_READAHEADS = ['mlock', 'fadvise']
_FORWARD_OPTIONS = {'loop_count': '--count'}
_RUN_SCRIPT = os.path.join(os.path.dirname(os.path.realpath(__file__)),
'run_app_with_prefetch.py')
CollectorPackageInfo = NamedTuple('CollectorPackageInfo',
[('package', str), ('compiler_filter', str)])
# by 2; systrace starts up slowly.
_UNLOCK_SCREEN_SCRIPT = os.path.join(
os.path.dirname(os.path.realpath(__file__)), 'unlock_screen')
RunCommandArgs = NamedTuple('RunCommandArgs',
[('package', str),
('readahead', str),
('activity', Optional[str]),
('compiler_filter', Optional[str]),
('timeout', Optional[int]),
('debug', bool),
('simulate', bool),
('input', Optional[str]),
('trace_duration', Optional[timedelta])])
# This must be the only mutable global variable. All other global variables are constants to avoid magic literals.
_debug = False # See -d/--debug flag.
_DEBUG_FORCE = None # Ignore -d/--debug if this is not none.
_PERFETTO_TRACE_DURATION_MS = 5000 # milliseconds
_PERFETTO_TRACE_DURATION = timedelta(milliseconds=_PERFETTO_TRACE_DURATION_MS)
# Type hinting names.
T = TypeVar('T')
NamedTupleMeta = Callable[
..., T] # approximation of a (S : NamedTuple<T> where S() == T) metatype.
def parse_options(argv: List[str] = None):
"""Parse command line arguments and return an argparse Namespace object."""
parser = argparse.ArgumentParser(description="Run one or more Android "
"applications under various "
"settings in order to measure "
"startup time.")
# argparse considers args starting with - and -- optional in --help, even though required=True.
# by using a named argument group --help will clearly say that it's required instead of optional.
required_named = parser.add_argument_group('required named arguments')
required_named.add_argument('-p', '--package', action='append',
dest='packages',
help='package of the application', required=True)
required_named.add_argument('-r', '--readahead', action='append',
dest='readaheads',
help='which readahead mode to use',
choices=('warm', 'cold', 'mlock', 'fadvise'),
required=True)
# optional arguments
# use a group here to get the required arguments to appear 'above' the optional arguments in help.
optional_named = parser.add_argument_group('optional named arguments')
optional_named.add_argument('-c', '--compiler-filter', action='append',
dest='compiler_filters',
help='which compiler filter to use. if omitted it does not enforce the app\'s compiler filter',
choices=('speed', 'speed-profile', 'quicken'))
optional_named.add_argument('-s', '--simulate', dest='simulate',
action='store_true',
help='Print which commands will run, but don\'t run the apps')
optional_named.add_argument('-d', '--debug', dest='debug',
action='store_true',
help='Add extra debugging output')
optional_named.add_argument('-o', '--output', dest='output', action='store',
help='Write CSV output to file.')
optional_named.add_argument('-t', '--timeout', dest='timeout', action='store',
type=int, default=10,
help='Timeout after this many seconds when executing a single run.')
optional_named.add_argument('-lc', '--loop-count', dest='loop_count',
default=1, type=int, action='store',
help='How many times to loop a single run.')
optional_named.add_argument('-in', '--inodes', dest='inodes', type=str,
action='store',
help='Path to inodes file (system/extras/pagecache/pagecache.py -d inodes)')
optional_named.add_argument('--compiler-trace-duration-ms',
dest='trace_duration',
type=lambda ms_str: timedelta(milliseconds=int(ms_str)),
action='append',
help='The trace duration (milliseconds) in '
'compilation')
optional_named.add_argument('--compiler-type', dest='compiler_type',
type=CompilerType, choices=list(CompilerType),
default=CompilerType.DEVICE,
help='The type of compiler.')
return parser.parse_args(argv)
def key_to_cmdline_flag(key: str) -> str:
"""Convert key into a command line flag, e.g. 'foo-bars' -> '--foo-bar' """
if key.endswith("s"):
key = key[:-1]
return "--" + key.replace("_", "-")
def as_run_command(tpl: NamedTuple) -> List[Union[str, Any]]:
"""
Convert a named tuple into a command-line compatible arguments list.
Example: ABC(1, 2, 3) -> ['--a', 1, '--b', 2, '--c', 3]
"""
args = []
for key, value in tpl._asdict().items():
if value is None:
continue
args.append(key_to_cmdline_flag(key))
args.append(value)
return args
def run_perfetto_collector(collector_info: CollectorPackageInfo,
timeout: int,
simulate: bool) -> Tuple[bool, TextIO]:
"""Run collector to collect prefetching trace.
Returns:
A tuple of whether the collection succeeds and the generated trace file.
"""
tmp_output_file = tempfile.NamedTemporaryFile()
collector = PerfettoTraceCollector(package=collector_info.package,
activity=None,
compiler_filter=collector_info.compiler_filter,
timeout=timeout,
simulate=simulate,
trace_duration=_PERFETTO_TRACE_DURATION,
save_destination_file_path=tmp_output_file.name)
result = collector.run()
return result is not None, tmp_output_file
def parse_run_script_csv_file(csv_file: TextIO) -> DataFrame:
"""Parse a CSV file full of integers into a DataFrame."""
csv_reader = csv.reader(csv_file)
try:
header_list = next(csv_reader)
except StopIteration:
header_list = []
if not header_list:
return None
headers = [i for i in header_list]
d = {}
for row in csv_reader:
header_idx = 0
for i in row:
v = i
if i:
v = int(i)
header_key = headers[header_idx]
l = d.get(header_key, [])
l.append(v)
d[header_key] = l
header_idx = header_idx + 1
return DataFrame(d)
def build_ri_compiler_argv(inodes_path: str,
perfetto_trace_file: str,
trace_duration: Optional[timedelta]
) -> str:
argv = ['-i', inodes_path, '--perfetto-trace',
perfetto_trace_file]
if trace_duration is not None:
argv += ['--duration', str(int(trace_duration.total_seconds()
* PerfettoTraceCollector.MS_PER_SEC))]
print_utils.debug_print(argv)
return argv
def execute_run_using_perfetto_trace(collector_info,
run_combos: Iterable[RunCommandArgs],
simulate: bool,
inodes_path: str,
timeout: int,
compiler_type: CompilerType,
requires_trace_collection: bool) -> DataFrame:
""" Executes run based on perfetto trace. """
if requires_trace_collection:
passed, perfetto_trace_file = run_perfetto_collector(collector_info,
timeout,
simulate)
if not passed:
raise RuntimeError('Cannot run perfetto collector!')
else:
perfetto_trace_file = tempfile.NamedTemporaryFile()
with perfetto_trace_file:
for combos in run_combos:
if combos.readahead in _TRACING_READAHEADS:
if simulate:
compiler_trace_file = tempfile.NamedTemporaryFile()
else:
ri_compiler_argv = build_ri_compiler_argv(inodes_path,
perfetto_trace_file.name,
combos.trace_duration)
compiler_trace_file = compiler.compile(compiler_type,
inodes_path,
ri_compiler_argv,
combos.package,
combos.activity)
with compiler_trace_file:
combos = combos._replace(input=compiler_trace_file.name)
print_utils.debug_print(combos)
output = PrefetchAppRunner(**combos._asdict()).run()
else:
print_utils.debug_print(combos)
output = PrefetchAppRunner(**combos._asdict()).run()
yield DataFrame(dict((x, [y]) for x, y in output)) if output else None
def execute_run_combos(
grouped_run_combos: Iterable[Tuple[CollectorPackageInfo, Iterable[RunCommandArgs]]],
simulate: bool,
inodes_path: str,
timeout: int,
compiler_type: CompilerType,
requires_trace_collection: bool):
# nothing will work if the screen isn't unlocked first.
cmd_utils.execute_arbitrary_command([_UNLOCK_SCREEN_SCRIPT],
timeout,
simulate=simulate,
shell=False)
for collector_info, run_combos in grouped_run_combos:
yield from execute_run_using_perfetto_trace(collector_info,
run_combos,
simulate,
inodes_path,
timeout,
compiler_type,
requires_trace_collection)
def gather_results(commands: Iterable[Tuple[DataFrame]],
key_list: List[str], value_list: List[Tuple[str, ...]]):
print_utils.debug_print("gather_results: key_list = ", key_list)
stringify_none = lambda s: s is None and "<none>" or s
# yield key_list + ["time(ms)"]
for (run_result_list, values) in itertools.zip_longest(commands, value_list):
print_utils.debug_print("run_result_list = ", run_result_list)
print_utils.debug_print("values = ", values)
if not run_result_list:
continue
# RunCommandArgs(package='com.whatever', readahead='warm', compiler_filter=None)
# -> {'package':['com.whatever'], 'readahead':['warm'], 'compiler_filter':[None]}
values_dict = {}
for k, v in values._asdict().items():
if not k in key_list:
continue
values_dict[k] = [stringify_none(v)]
values_df = DataFrame(values_dict)
# project 'values_df' to be same number of rows as run_result_list.
values_df = values_df.repeat(run_result_list.data_row_len)
# the results are added as right-hand-side columns onto the existing labels for the table.
values_df.merge_data_columns(run_result_list)
yield values_df
def eval_and_save_to_csv(output, annotated_result_values):
printed_header = False
csv_writer = csv.writer(output)
for row in annotated_result_values:
if not printed_header:
headers = row.headers
csv_writer.writerow(headers)
printed_header = True
# TODO: what about when headers change?
for data_row in row.data_table:
data_row = [d for d in data_row]
csv_writer.writerow(data_row)
output.flush() # see the output live.
def coerce_to_list(opts: dict):
"""Tranform values of the dictionary to list.
For example:
1 -> [1], None -> [None], [1,2,3] -> [1,2,3]
[[1],[2]] -> [[1],[2]], {1:1, 2:2} -> [{1:1, 2:2}]
"""
result = {}
for key in opts:
val = opts[key]
result[key] = val if issubclass(type(val), list) else [val]
return result
def main():
global _debug
opts = parse_options()
_debug = opts.debug
if _DEBUG_FORCE is not None:
_debug = _DEBUG_FORCE
print_utils.DEBUG = _debug
cmd_utils.SIMULATE = opts.simulate
print_utils.debug_print("parsed options: ", opts)
output_file = opts.output and open(opts.output, 'w') or sys.stdout
combos = lambda: args_utils.generate_run_combinations(
RunCommandArgs,
coerce_to_list(vars(opts)),
opts.loop_count)
print_utils.debug_print_gen("run combinations: ", combos())
grouped_combos = lambda: args_utils.generate_group_run_combinations(combos(),
CollectorPackageInfo)
print_utils.debug_print_gen("grouped run combinations: ", grouped_combos())
requires_trace_collection = any(i in _TRACING_READAHEADS for i in opts.readaheads)
exec = execute_run_combos(grouped_combos(),
opts.simulate,
opts.inodes,
opts.timeout,
opts.compiler_type,
requires_trace_collection)
results = gather_results(exec, _COMBINATORIAL_OPTIONS, combos())
eval_and_save_to_csv(output_file, results)
return 1
if __name__ == '__main__':
sys.exit(main())

View File

@ -1,176 +0,0 @@
#!/usr/bin/env python3
#
# Copyright 2018, The Android Open Source Project
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
"""
Unit tests for the app_startup_runner.py script.
Install:
$> sudo apt-get install python3-pytest ## OR
$> pip install -U pytest
See also https://docs.pytest.org/en/latest/getting-started.html
Usage:
$> ./app_startup_runner_test.py
$> pytest app_startup_runner_test.py
$> python -m pytest app_startup_runner_test.py
See also https://docs.pytest.org/en/latest/usage.html
"""
import io
import shlex
import sys
import typing
# global imports
from contextlib import contextmanager
# local imports
import app_startup_runner as asr
# pip imports
import pytest
#
# Argument Parsing Helpers
#
@contextmanager
def ignore_stdout_stderr():
"""Ignore stdout/stderr output for duration of this context."""
old_stdout = sys.stdout
old_stderr = sys.stderr
sys.stdout = io.StringIO()
sys.stderr = io.StringIO()
try:
yield
finally:
sys.stdout = old_stdout
sys.stderr = old_stderr
@contextmanager
def argparse_bad_argument(msg):
"""
Assert that a SystemExit is raised when executing this context.
If the assertion fails, print the message 'msg'.
"""
with pytest.raises(SystemExit, message=msg):
with ignore_stdout_stderr():
yield
def assert_bad_argument(args, msg):
"""
Assert that the command line arguments in 'args' are malformed.
Prints 'msg' if the assertion fails.
"""
with argparse_bad_argument(msg):
parse_args(args)
def parse_args(args):
"""
:param args: command-line like arguments as a single string
:return: dictionary of parsed key/values
"""
# "-a b -c d" => ['-a', 'b', '-c', 'd']
return vars(asr.parse_options(shlex.split(args)))
def default_dict_for_parsed_args(**kwargs):
"""
# Combine it with all of the "optional" parameters' default values.
"""
d = {'compiler_filters': None, 'simulate': False, 'debug': False,
'output': None, 'timeout': 10, 'loop_count': 1, 'inodes': None,
'trace_duration': None, 'compiler_type': asr.CompilerType.DEVICE}
d.update(kwargs)
return d
def default_mock_dict_for_parsed_args(include_optional=True, **kwargs):
"""
Combine default dict with all optional parameters with some mock required parameters.
"""
d = {'packages': ['com.fake.package'], 'readaheads': ['warm']}
if include_optional:
d.update(default_dict_for_parsed_args())
d.update(kwargs)
return d
def parse_optional_args(str):
"""
Parse an argument string which already includes all the required arguments
in default_mock_dict_for_parsed_args.
"""
req = "--package com.fake.package --readahead warm"
return parse_args("%s %s" % (req, str))
def test_argparse():
# missing arguments
assert_bad_argument("", "-p and -r are required")
assert_bad_argument("-r warm", "-p is required")
assert_bad_argument("--readahead warm", "-p is required")
assert_bad_argument("-p com.fake.package", "-r is required")
assert_bad_argument("--package com.fake.package", "-r is required")
# required arguments are parsed correctly
ad = default_dict_for_parsed_args # assert dict
assert parse_args("--package xyz --readahead warm") == ad(packages=['xyz'],
readaheads=['warm'])
assert parse_args("-p xyz -r warm") == ad(packages=['xyz'],
readaheads=['warm'])
assert parse_args("-p xyz -r warm -s") == ad(packages=['xyz'],
readaheads=['warm'],
simulate=True)
assert parse_args("-p xyz -r warm --simulate") == ad(packages=['xyz'],
readaheads=['warm'],
simulate=True)
# optional arguments are parsed correctly.
mad = default_mock_dict_for_parsed_args # mock assert dict
assert parse_optional_args("--output filename.csv") == mad(
output='filename.csv')
assert parse_optional_args("-o filename.csv") == mad(output='filename.csv')
assert parse_optional_args("--timeout 123") == mad(timeout=123)
assert parse_optional_args("-t 456") == mad(timeout=456)
assert parse_optional_args("--loop-count 123") == mad(loop_count=123)
assert parse_optional_args("-lc 456") == mad(loop_count=456)
assert parse_optional_args("--inodes bar") == mad(inodes="bar")
assert parse_optional_args("-in baz") == mad(inodes="baz")
def test_key_to_cmdline_flag():
assert asr.key_to_cmdline_flag("abc") == "--abc"
assert asr.key_to_cmdline_flag("foos") == "--foo"
assert asr.key_to_cmdline_flag("ba_r") == "--ba-r"
assert asr.key_to_cmdline_flag("ba_zs") == "--ba-z"
def test_parse_run_script_csv_file():
# empty file -> empty list
f = io.StringIO("")
assert asr.parse_run_script_csv_file(f) == None
# common case
f = io.StringIO("TotalTime_ms,Displayed_ms\n1,2")
df = asr.DataFrame({'TotalTime_ms': [1], 'Displayed_ms': [2]})
pf = asr.parse_run_script_csv_file(f)
assert pf == df
if __name__ == '__main__':
pytest.main()

View File

@ -1,143 +0,0 @@
#!/bin/bash
#
# Copyright 2018, The Android Open Source Project
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
# Forces an application APK to be compiled (by ART's dex2oat)
# with a specific compiler filter.
#
# Example usage:
# $> ./force_compiler_filter -p com.google.android.apps.maps -c speed-profile
#
# (The application may be started/stopped as a side effect)
#
DIR="$( cd "$( dirname "${BASH_SOURCE[0]}" )" && pwd )"
source "$DIR/lib/common"
usage() {
cat <<EOF
Usage: $(basename $0) [OPTION]...
Required:
-p, --package package of the app to recompile
-c, --compiler-filter override the compiler filter if set (default none)
valid options are listed by: adb shell cmd package, under compile -m
Optional:
-a, --activity activity of the app to recompile
-h, --help usage information (this)
-v, --verbose enable extra verbose printing
-w, --wait_time how long to wait for app startup (default 10) in seconds
EOF
}
wait_time="10" # seconds
parse_arguments() {
while [[ $# -gt 0 ]]; do
case "$1" in
-a|--activity)
activity="$2"
shift
;;
-h|--help)
usage
exit 0
;;
-p|--package)
package="$2"
shift
;;
-w|--wait_time)
wait_time="$2"
shift
;;
-c|--compiler-filter)
compiler_filter="$2"
shift
;;
-v|--verbose)
verbose="y"
;;
esac
shift
done
if [[ -z "$compiler_filter" ]]; then
echo "Missing required --compiler-filter" >&2
echo ""
usage
exit 1
fi
if [[ -z "$package" ]]; then
echo "Missing required --package" >&2
echo ""
usage
exit 1
fi
if [[ "$activity" == "" ]]; then
activity="$(get_activity_name "$package")"
if [[ "$activity" == "" ]]; then
echo "Activity name could not be found, invalid package name?" 1>&2
exit 1
else
verbose_print "Activity name inferred: " "$activity"
fi
fi
}
force_package_compilation() {
local arg_compiler_filter="$1"
local arg_package="$2"
if [[ $arg_compiler_filter == speed-profile ]]; then
# Force the running app to dump its profiles to disk.
remote_pkill "$arg_package" -SIGUSR1
sleep 1 # give some time for above to complete.
fi
adb shell cmd package compile -m "$arg_compiler_filter" -f "$arg_package"
}
main() {
parse_arguments "$@"
if [[ $compiler_filter == speed-profile ]]; then
# screen needs to be unlocked in order to run an app
"$DIR"/unlock_screen
local output=$("$DIR"/launch_application "$package" "$activity")
if [[ $? -ne 0 ]]; then
echo "launching application failed" >&2
exit 1
fi
verbose_print "$output"
# give some time for app startup to complete.
# this is supposed to be an upper bound for measuring startup time.
sleep "$wait_time"
fi
force_package_compilation "$compiler_filter" "$package"
# kill the application to ensure next time it's started,
# it picks up the correct compilation filter.
adb shell am force-stop "$package"
remote_pkill "$package"
}
main "$@"

View File

@ -1,52 +0,0 @@
#!/bin/bash
#
# Copyright 2018, The Android Open Source Project
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
DIR="$( cd "$( dirname "${BASH_SOURCE[0]}" )" && pwd )"
source "$DIR/lib/common"
launch_application_usage() {
cat <<EOF
Usage: $(basename $0) <package> <activity>
Positional Arguments:
<package> package of the app to test
<activity> activity to use
Named Arguments:
-h, --help usage information (this)
EOF
}
launch_application() {
local package="$1"
local activity="$2"
# if there's any $s inside of the activity name, it needs to be escaped to \$.
# example '.app.honeycomb.Shell$HomeActivity'
# if the $ is not escaped, adb shell will try to evaluate $HomeActivity to a variable.
activity=${activity//\$/\\$}
adb shell am start -S -W "$package"/"$activity"
# pipe this into 'parse_metrics' to parse the output.
}
if [[ $# -lt 2 ]]; then
launch_application_usage
exit 1
fi
launch_application "$@"

View File

@ -1,126 +0,0 @@
#!/usr/bin/env python3
#
# Copyright 2019, The Android Open Source Project
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Helper util libraries for calling adb command line."""
import datetime
import os
import re
import sys
import time
from typing import Optional
sys.path.append(os.path.dirname(os.path.dirname(os.path.dirname(
os.path.abspath(__file__)))))
import lib.cmd_utils as cmd_utils
import lib.logcat_utils as logcat_utils
def logcat_save_timestamp() -> str:
"""Gets the current logcat timestamp.
Returns:
A string of timestamp.
"""
_, output = cmd_utils.run_adb_shell_command(
"date -u +\'%Y-%m-%d %H:%M:%S.%N\'")
return output
def vm_drop_cache():
"""Free pagecache and slab object."""
cmd_utils.run_adb_shell_command('echo 3 > /proc/sys/vm/drop_caches')
# Sleep a little bit to provide enough time for cache cleanup.
time.sleep(1)
def root():
"""Roots adb and successive adb commands will run under root."""
cmd_utils.run_shell_command('adb root')
def disable_selinux():
"""Disables selinux setting."""
_, output = cmd_utils.run_adb_shell_command('getenforce')
if output == 'Permissive':
return
print('Disable selinux permissions and restart framework.')
cmd_utils.run_adb_shell_command('setenforce 0')
cmd_utils.run_adb_shell_command('stop')
cmd_utils.run_adb_shell_command('start')
cmd_utils.run_shell_command('adb wait-for-device')
def pkill(procname: str):
"""Kills a process on device specified by the substring pattern in procname"""
_, pids = cmd_utils.run_shell_command('adb shell ps | grep "{}" | '
'awk \'{{print $2;}}\''.
format(procname))
for pid in pids.split('\n'):
pid = pid.strip()
if pid:
passed,_ = cmd_utils.run_adb_shell_command('kill {}'.format(pid))
time.sleep(1)
def parse_time_to_milliseconds(time: str) -> int:
"""Parses the time string to milliseconds."""
# Example: +1s56ms, +56ms
regex = r'\+((?P<second>\d+?)s)?(?P<millisecond>\d+?)ms'
result = re.search(regex, time)
second = 0
if result.group('second'):
second = int(result.group('second'))
ms = int(result.group('millisecond'))
return second * 1000 + ms
def blocking_wait_for_logcat_displayed_time(timestamp: datetime.datetime,
package: str,
timeout: int) -> Optional[int]:
"""Parses the displayed time in the logcat.
Returns:
the displayed time.
"""
pattern = re.compile('.*ActivityTaskManager: Displayed {}.*'.format(package))
# 2019-07-02 22:28:34.469453349 -> 2019-07-02 22:28:34.469453
timestamp = datetime.datetime.strptime(timestamp[:-3],
'%Y-%m-%d %H:%M:%S.%f')
timeout_dt = timestamp + datetime.timedelta(0, timeout)
# 2019-07-01 14:54:21.946 27365 27392 I ActivityTaskManager:
# Displayed com.android.settings/.Settings: +927ms
result = logcat_utils.blocking_wait_for_logcat_pattern(timestamp,
pattern,
timeout_dt)
if not result or not '+' in result:
return None
displayed_time = result[result.rfind('+'):]
return parse_time_to_milliseconds(displayed_time)
def delete_file_on_device(file_path: str) -> None:
""" Deletes a file on the device. """
cmd_utils.run_adb_shell_command(
"[[ -f '{file_path}' ]] && rm -f '{file_path}' || "
"exit 0".format(file_path=file_path))
def set_prop(property: str, value: str) -> None:
""" Sets property using adb shell. """
cmd_utils.run_adb_shell_command('setprop "{property}" "{value}"'.format(
property=property, value=value))
def pull_file(device_file_path: str, output_file_path: str) -> None:
""" Pulls file from device to output """
cmd_utils.run_shell_command('adb pull "{device_file_path}" "{output_file_path}"'.
format(device_file_path=device_file_path,
output_file_path=output_file_path))

View File

@ -1,16 +0,0 @@
import adb_utils
# pip imports
import pytest
def test_parse_time_to_milliseconds():
# Act
result1 = adb_utils.parse_time_to_milliseconds('+1s7ms')
result2 = adb_utils.parse_time_to_milliseconds('+523ms')
# Assert
assert result1 == 1007
assert result2 == 523
if __name__ == '__main__':
pytest.main()

View File

@ -1,266 +0,0 @@
# Copyright 2019, The Android Open Source Project
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Class to run an app."""
import os
import sys
from typing import Optional, List, Tuple
# local import
sys.path.append(os.path.dirname(os.path.dirname(os.path.dirname(
os.path.abspath(__file__)))))
import app_startup.lib.adb_utils as adb_utils
import lib.cmd_utils as cmd_utils
import lib.print_utils as print_utils
class AppRunnerListener(object):
"""Interface for lisenter of AppRunner. """
def preprocess(self) -> None:
"""Preprocess callback to initialized before the app is running. """
pass
def postprocess(self, pre_launch_timestamp: str) -> None:
"""Postprocess callback to cleanup after the app is running.
param:
'pre_launch_timestamp': indicates the timestamp when the app is
launching.. """
pass
def metrics_selector(self, am_start_output: str,
pre_launch_timestamp: str) -> None:
"""A metrics selection callback that waits for the desired metrics to
show up in logcat.
params:
'am_start_output': indicates the output of app startup.
'pre_launch_timestamp': indicates the timestamp when the app is
launching.
returns:
a string in the format of "<metric>=<value>\n<metric>=<value>\n..."
for further parsing. For example "TotalTime=123\nDisplayedTime=121".
Return an empty string if no metrics need to be parsed further.
"""
pass
class AppRunner(object):
""" Class to run an app. """
# static variables
DIR = os.path.abspath(os.path.dirname(__file__))
APP_STARTUP_DIR = os.path.dirname(DIR)
IORAP_COMMON_BASH_SCRIPT = os.path.realpath(os.path.join(DIR,
'../../iorap/common'))
DEFAULT_TIMEOUT = 30 # seconds
def __init__(self,
package: str,
activity: Optional[str],
compiler_filter: Optional[str],
timeout: Optional[int],
simulate: bool):
self.package = package
self.simulate = simulate
# If the argument activity is None, try to set it.
self.activity = activity
if self.simulate:
self.activity = 'act'
if self.activity is None:
self.activity = AppRunner.get_activity(self.package)
self.compiler_filter = compiler_filter
self.timeout = timeout if timeout else AppRunner.DEFAULT_TIMEOUT
self.listeners = []
def add_callbacks(self, listener: AppRunnerListener):
self.listeners.append(listener)
def remove_callbacks(self, listener: AppRunnerListener):
self.listeners.remove(listener)
@staticmethod
def get_activity(package: str) -> str:
""" Tries to set the activity based on the package. """
passed, activity = cmd_utils.run_shell_func(
AppRunner.IORAP_COMMON_BASH_SCRIPT,
'get_activity_name',
[package])
if not passed or not activity:
raise ValueError(
'Activity name could not be found, invalid package name?!')
return activity
def configure_compiler_filter(self) -> bool:
"""Configures compiler filter (e.g. speed).
Returns:
A bool indicates whether configure of compiler filer succeeds or not.
"""
if not self.compiler_filter:
print_utils.debug_print('No --compiler-filter specified, don\'t'
' need to force it.')
return True
passed, current_compiler_filter_info = \
cmd_utils.run_shell_command(
'{} --package {}'.format(os.path.join(AppRunner.APP_STARTUP_DIR,
'query_compiler_filter.py'),
self.package))
if passed != 0:
return passed
# TODO: call query_compiler_filter directly as a python function instead of
# these shell calls.
current_compiler_filter, current_reason, current_isa = \
current_compiler_filter_info.split(' ')
print_utils.debug_print('Compiler Filter={} Reason={} Isa={}'.format(
current_compiler_filter, current_reason, current_isa))
# Don't trust reasons that aren't 'unknown' because that means
# we didn't manually force the compilation filter.
# (e.g. if any automatic system-triggered compilations are not unknown).
if current_reason != 'unknown' or \
current_compiler_filter != self.compiler_filter:
passed, _ = adb_utils.run_shell_command('{}/force_compiler_filter '
'--compiler-filter "{}" '
'--package "{}"'
' --activity "{}'.
format(AppRunner.APP_STARTUP_DIR,
self.compiler_filter,
self.package,
self.activity))
else:
adb_utils.debug_print('Queried compiler-filter matched requested '
'compiler-filter, skip forcing.')
passed = False
return passed
def run(self) -> Optional[List[Tuple[str]]]:
"""Runs an app.
Returns:
A list of (metric, value) tuples.
"""
print_utils.debug_print('==========================================')
print_utils.debug_print('===== START =====')
print_utils.debug_print('==========================================')
# Run the preprocess.
for listener in self.listeners:
listener.preprocess()
# Ensure the APK is currently compiled with whatever we passed in
# via --compiler-filter.
# No-op if this option was not passed in.
if not self.configure_compiler_filter():
print_utils.error_print('Compiler filter configuration failed!')
return None
pre_launch_timestamp = adb_utils.logcat_save_timestamp()
# Launch the app.
results = self.launch_app(pre_launch_timestamp)
# Run the postprocess.
for listener in self.listeners:
listener.postprocess(pre_launch_timestamp)
return results
def launch_app(self, pre_launch_timestamp: str) -> Optional[List[Tuple[str]]]:
""" Launches the app.
Returns:
A list of (metric, value) tuples.
"""
print_utils.debug_print('Running with timeout {}'.format(self.timeout))
passed, am_start_output = cmd_utils.run_shell_command('timeout {timeout} '
'"{DIR}/launch_application" '
'"{package}" '
'"{activity}"'.
format(timeout=self.timeout,
DIR=AppRunner.APP_STARTUP_DIR,
package=self.package,
activity=self.activity))
if not passed and not self.simulate:
return None
return self.wait_for_app_finish(pre_launch_timestamp, am_start_output)
def wait_for_app_finish(self,
pre_launch_timestamp: str,
am_start_output: str) -> Optional[List[Tuple[str]]]:
""" Wait for app finish and all metrics are shown in logcat.
Returns:
A list of (metric, value) tuples.
"""
if self.simulate:
return [('TotalTime', '123')]
ret = []
for listener in self.listeners:
output = listener.metrics_selector(am_start_output,
pre_launch_timestamp)
ret = ret + AppRunner.parse_metrics_output(output)
return ret
@staticmethod
def parse_metrics_output(input: str) -> List[
Tuple[str, str, str]]:
"""Parses output of app startup to metrics and corresponding values.
It converts 'a=b\nc=d\ne=f\n...' into '[(a,b,''),(c,d,''),(e,f,'')]'
Returns:
A list of tuples that including metric name, metric value and rest info.
"""
all_metrics = []
for line in input.split('\n'):
if not line:
continue
splits = line.split('=')
if len(splits) < 2:
print_utils.error_print('Bad line "{}"'.format(line))
continue
metric_name = splits[0]
metric_value = splits[1]
rest = splits[2] if len(splits) > 2 else ''
if rest:
print_utils.error_print('Corrupt line "{}"'.format(line))
print_utils.debug_print('metric: "{metric_name}", '
'value: "{metric_value}" '.
format(metric_name=metric_name,
metric_value=metric_value))
all_metrics.append((metric_name, metric_value))
return all_metrics
@staticmethod
def parse_total_time( am_start_output: str) -> Optional[str]:
"""Parses the total time from 'adb shell am start pkg' output.
Returns:
the total time of app startup.
"""
for line in am_start_output.split('\n'):
if 'TotalTime:' in line:
return line[len('TotalTime:'):].strip()
return None

View File

@ -1,104 +0,0 @@
# Copyright 2019, The Android Open Source Project
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
"""Unit tests for the AppRunner."""
import os
import sys
from pathlib import Path
from app_runner import AppRunner, AppRunnerListener
from mock import Mock, call, patch
# The path is "frameworks/base/startop/scripts/"
sys.path.append(Path(os.path.realpath(__file__)).parents[2])
import lib.cmd_utils as cmd_utils
class AppRunnerTestListener(AppRunnerListener):
def preprocess(self) -> None:
cmd_utils.run_shell_command('pre'),
def postprocess(self, pre_launch_timestamp: str) -> None:
cmd_utils.run_shell_command('post'),
def metrics_selector(self, am_start_output: str,
pre_launch_timestamp: str) -> None:
return 'TotalTime=123\n'
RUNNER = AppRunner(package='music',
activity='MainActivity',
compiler_filter='speed',
timeout=None,
simulate=False)
def test_configure_compiler_filter():
with patch('lib.cmd_utils.run_shell_command',
new_callable=Mock) as mock_run_shell_command:
mock_run_shell_command.return_value = (True, 'speed arm64 kUpToDate')
RUNNER.configure_compiler_filter()
calls = [call(os.path.realpath(
os.path.join(RUNNER.DIR,
'../query_compiler_filter.py')) + ' --package music')]
mock_run_shell_command.assert_has_calls(calls)
def test_parse_metrics_output():
input = 'a1=b1\nc1=d1\ne1=f1'
ret = RUNNER.parse_metrics_output(input)
assert ret == [('a1', 'b1'), ('c1', 'd1'), ('e1', 'f1')]
def _mocked_run_shell_command(*args, **kwargs):
if args[0] == 'adb shell "date -u +\'%Y-%m-%d %H:%M:%S.%N\'"':
return (True, "2019-07-02 23:20:06.972674825")
elif args[0] == 'adb shell ps | grep "music" | awk \'{print $2;}\'':
return (True, '9999')
else:
return (True, 'a1=b1\nc1=d1=d2\ne1=f1')
@patch('app_startup.lib.adb_utils.blocking_wait_for_logcat_displayed_time')
@patch('lib.cmd_utils.run_shell_command')
def test_run(mock_run_shell_command,
mock_blocking_wait_for_logcat_displayed_time):
mock_run_shell_command.side_effect = _mocked_run_shell_command
mock_blocking_wait_for_logcat_displayed_time.return_value = 123
test_listener = AppRunnerTestListener()
RUNNER.add_callbacks(test_listener)
result = RUNNER.run()
RUNNER.remove_callbacks(test_listener)
calls = [call('pre'),
call(os.path.realpath(
os.path.join(RUNNER.DIR,
'../query_compiler_filter.py')) +
' --package music'),
call('adb shell "date -u +\'%Y-%m-%d %H:%M:%S.%N\'"'),
call(
'timeout {timeout} "{DIR}/launch_application" "{package}" "{activity}"'
.format(timeout=30,
DIR=os.path.realpath(os.path.dirname(RUNNER.DIR)),
package='music',
activity='MainActivity',
timestamp='2019-07-02 23:20:06.972674825')),
call('post')
]
mock_run_shell_command.assert_has_calls(calls)
assert result == [('TotalTime', '123')]
assert len(RUNNER.listeners) == 0

View File

@ -1,77 +0,0 @@
import itertools
import os
import sys
from typing import Any, Callable, Dict, Iterable, List, NamedTuple, Tuple, \
TypeVar, Optional
# local import
sys.path.append(os.path.dirname(os.path.dirname(os.path.dirname(
os.path.abspath(__file__)))))
import lib.print_utils as print_utils
T = TypeVar('T')
NamedTupleMeta = Callable[
..., T] # approximation of a (S : NamedTuple<T> where S() == T) metatype.
FilterFuncType = Callable[[NamedTuple], bool]
def dict_lookup_any_key(dictionary: dict, *keys: List[Any]):
for k in keys:
if k in dictionary:
return dictionary[k]
print_utils.debug_print("None of the keys {} were in the dictionary".format(
keys))
return [None]
def generate_run_combinations(named_tuple: NamedTupleMeta[T],
opts_dict: Dict[str, List[Optional[object]]],
loop_count: int = 1) -> Iterable[T]:
"""
Create all possible combinations given the values in opts_dict[named_tuple._fields].
:type T: type annotation for the named_tuple type.
:param named_tuple: named tuple type, whose fields are used to make combinations for
:param opts_dict: dictionary of keys to value list. keys correspond to the named_tuple fields.
:param loop_count: number of repetitions.
:return: an iterable over named_tuple instances.
"""
combinations_list = []
for k in named_tuple._fields:
# the key can be either singular or plural , e.g. 'package' or 'packages'
val = dict_lookup_any_key(opts_dict, k, k + "s")
# treat {'x': None} key value pairs as if it was [None]
# otherwise itertools.product throws an exception about not being able to iterate None.
combinations_list.append(val or [None])
print_utils.debug_print("opts_dict: ", opts_dict)
print_utils.debug_print_nd("named_tuple: ", named_tuple)
print_utils.debug_print("combinations_list: ", combinations_list)
for i in range(loop_count):
for combo in itertools.product(*combinations_list):
yield named_tuple(*combo)
def filter_run_combinations(named_tuple: NamedTuple,
filters: List[FilterFuncType]) -> bool:
for filter in filters:
if filter(named_tuple):
return False
return True
def generate_group_run_combinations(run_combinations: Iterable[NamedTuple],
dst_nt: NamedTupleMeta[T]) \
-> Iterable[Tuple[T, Iterable[NamedTuple]]]:
def group_by_keys(src_nt):
src_d = src_nt._asdict()
# now remove the keys that aren't legal in dst.
for illegal_key in set(src_d.keys()) - set(dst_nt._fields):
if illegal_key in src_d:
del src_d[illegal_key]
return dst_nt(**src_d)
for args_list_it in itertools.groupby(run_combinations, group_by_keys):
(group_key_value, args_it) = args_list_it
yield (group_key_value, args_it)

View File

@ -1,58 +0,0 @@
#!/usr/bin/env python3
#
# Copyright 2018, The Android Open Source Project
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
"""Unit tests for the args_utils.py script."""
import typing
import args_utils
def generate_run_combinations(*args):
# expand out the generator values so that assert x == y works properly.
return [i for i in args_utils.generate_run_combinations(*args)]
def test_generate_run_combinations():
blank_nd = typing.NamedTuple('Blank')
assert generate_run_combinations(blank_nd, {}, 1) == [()], "empty"
assert generate_run_combinations(blank_nd, {'a': ['a1', 'a2']}) == [
()], "empty filter"
a_nd = typing.NamedTuple('A', [('a', str)])
assert generate_run_combinations(a_nd, {'a': None}) == [(None,)], "None"
assert generate_run_combinations(a_nd, {'a': ['a1', 'a2']}) == [('a1',), (
'a2',)], "one item"
assert generate_run_combinations(a_nd,
{'a': ['a1', 'a2'], 'b': ['b1', 'b2']}) == [
('a1',), ('a2',)], \
"one item filter"
assert generate_run_combinations(a_nd, {'a': ['a1', 'a2']}, 2) == [('a1',), (
'a2',), ('a1',), ('a2',)], "one item"
ab_nd = typing.NamedTuple('AB', [('a', str), ('b', str)])
assert generate_run_combinations(ab_nd,
{'a': ['a1', 'a2'],
'b': ['b1', 'b2']}) == [ab_nd('a1', 'b1'),
ab_nd('a1', 'b2'),
ab_nd('a2', 'b1'),
ab_nd('a2', 'b2')], \
"two items"
assert generate_run_combinations(ab_nd,
{'as': ['a1', 'a2'],
'bs': ['b1', 'b2']}) == [ab_nd('a1', 'b1'),
ab_nd('a1', 'b2'),
ab_nd('a2', 'b1'),
ab_nd('a2', 'b2')], \
"two items plural"

View File

@ -1,198 +0,0 @@
#!/bin/bash
# Copyright 2018, The Android Open Source Project
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
if [[ -z $ANDROID_BUILD_TOP ]]; then
echo "Please run source build/envsetup.sh first" >&2
exit 1
fi
source $ANDROID_BUILD_TOP/build/envsetup.sh
verbose_print() {
if [[ "$verbose" == "y" ]]; then
echo "$@" >&2
fi
}
remote_pidof() {
local procname="$1"
adb shell ps | grep "$procname" | awk '{print $2;}'
}
remote_pkill() {
local procname="$1"
shift
local the_pids=$(remote_pidof "$procname")
local pid
for pid in $the_pids; do
verbose_print adb shell kill "$@" "$pid"
adb shell kill "$@" "$pid"
done
}
get_activity_name() {
local package="$1"
local action_key="android.intent.action.MAIN:"
# Example query-activities output being parsed:
#
# Activity #14:
# priority=0 preferredOrder=0 match=0x108000 specificIndex=-1 isDefault=true
# com.google.android.videos/com.google.android.youtube.videos.EntryPoint
# Activity #15:
# priority=0 preferredOrder=0 match=0x108000 specificIndex=-1 isDefault=true
# com.google.android.youtube/.app.honeycomb.Shell$HomeActivity
# Given package 'com.google.android.youtube' return '.app.honeycomb.Shell$HomeActivity'
local activity_line="$(adb shell cmd package query-activities --brief -a android.intent.action.MAIN -c android.intent.category.LAUNCHER | grep "$package/")"
IFS="/" read -a array <<< "$activity_line"
local activity_name="${array[1]}"
# Activities starting with '.' are shorthand for having their package name prefixed.
if [[ $activity_name == .* ]]; then
activity_name="${package}${activity_name}"
fi
echo "$activity_name"
}
# Use with logcat_from_timestamp to skip all past log-lines.
logcat_save_timestamp() {
adb shell 'date -u +"%Y-%m-%d %H:%M:%S.%N"'
}
# Roll forward logcat to only show events
# since the specified timestamp.
#
# i.e. don't look at historical logcat,
# only look at FUTURE logcat.
#
# First use 'logcat_save_timestamp'
# Then do whatever action you want.
# Then use 'logcat_from_timestamp_bg $timestamp'
logcat_from_timestamp_bg() {
local timestamp="$1"
shift # drop timestamp from args.
verbose_print adb logcat -T \"$timestamp\" \"$@\"
adb logcat -v UTC -T "$timestamp" "$@" &
logcat_from_timestamp_pid=$!
}
# Starting at timestamp $2, wait until we seen pattern $3
# or until a timeout happens in $1 seconds.
# If successful, also echo the line that matched the pattern.
#
# Set VERBOSE_LOGCAT=1 to debug every line of logcat it tries to parse.
logcat_select_pattern() {
local timeout="$1"
local timestamp="$2"
local pattern="$3"
local logcat_fd
coproc logcat_fd {
kill_children_quietly() {
kill "$logcat_pidd"
wait "$logcat_pidd" 2>/dev/null
}
trap 'kill_children_quietly' EXIT # kill logcat when this coproc is killed.
# run logcat in the background so it can be killed.
logcat_from_timestamp_bg "$timestamp"
logcat_pidd=$logcat_from_timestamp_pid
wait "$logcat_pidd"
}
local logcat_pid="$!"
verbose_print "[LOGCAT] Spawn pid $logcat_pid"
local timeout_ts="$(date -d "now + ${timeout} seconds" '+%s')"
local now_ts="0"
local return_code=1
verbose_print "logcat_wait_for_pattern begin"
while read -t "$timeout" -r -u "${logcat_fd[0]}" logcat_output; do
if (( $VERBOSE_LOGCAT )); then
verbose_print "LOGCAT: $logcat_output"
fi
if [[ "$logcat_output:" == *"$pattern"* ]]; then
verbose_print "LOGCAT: " "$logcat_output"
verbose_print "WE DID SEE PATTERN" '<<' "$pattern" '>>.'
echo "$logcat_output"
return_code=0
break
fi
now_ts="$(date -d "now" '+%s')"
if (( now_ts >= timeout_ts )); then
verbose_print "DID TIMEOUT BEFORE SEEING ANYTHING (timeout=$timeout seconds) " '<<' "$pattern" '>>.'
break
fi
done
# Don't leave logcat lying around since it will keep going.
kill "$logcat_pid"
# Suppress annoying 'Terminated...' message.
wait "$logcat_pid" 2>/dev/null
verbose_print "[LOGCAT] $logcat_pid should be killed"
return $return_code
}
# Starting at timestamp $2, wait until we seen pattern $3
# or until a timeout happens in $1 seconds.
#
# Set VERBOSE_LOGCAT=1 to debug every line of logcat it tries to parse.
logcat_wait_for_pattern() {
logcat_select_pattern "$@" > /dev/null
}
# Starting at timestamp $2, wait until we seen pattern $3
# or until a timeout happens in $1 seconds.
# If successful, extract with the regular expression pattern in #4
# and return the first capture group.
#
# Set VERBOSE_LOGCAT=1 to debug every line of logcat it tries to parse.
logcat_extract_pattern() {
local timeout="$1"
local timestamp="$2"
local pattern="$3"
local re_pattern="$4"
local result
local exit_code
result="$(logcat_select_pattern "$@")"
exit_code=$?
if [[ $exit_code -ne 0 ]]; then
return $exit_code
fi
echo "$result" | sed 's/'"$re_pattern"'/\1/g'
}
# Join array
# FOO=(a b c)
# join_by , "${FOO[@]}" #a,b,c
join_by() {
local IFS="$1"
shift
echo "$*"
}

View File

@ -1,201 +0,0 @@
import itertools
from typing import Dict, List
class DataFrame:
"""Table-like class for storing a 2D cells table with named columns."""
def __init__(self, data: Dict[str, List[object]] = {}):
"""
Create a new DataFrame from a dictionary (keys = headers,
values = columns).
"""
self._headers = [i for i in data.keys()]
self._rows = []
row_num = 0
def get_data_row(idx):
r = {}
for header, header_data in data.items():
if not len(header_data) > idx:
continue
r[header] = header_data[idx]
return r
while True:
row_dict = get_data_row(row_num)
if len(row_dict) == 0:
break
self._append_row(row_dict.keys(), row_dict.values())
row_num = row_num + 1
def concat_rows(self, other: 'DataFrame') -> None:
"""
In-place concatenate rows of other into the rows of the
current DataFrame.
None is added in pre-existing cells if new headers
are introduced.
"""
other_datas = other._data_only()
other_headers = other.headers
for d in other_datas:
self._append_row(other_headers, d)
def _append_row(self, headers: List[str], data: List[object]):
new_row = {k:v for k,v in zip(headers, data)}
self._rows.append(new_row)
for header in headers:
if not header in self._headers:
self._headers.append(header)
def __repr__(self):
# return repr(self._rows)
repr = ""
header_list = self._headers_only()
row_format = u""
for header in header_list:
row_format = row_format + u"{:>%d}" %(len(header) + 1)
repr = row_format.format(*header_list) + "\n"
for v in self._data_only():
repr = repr + row_format.format(*v) + "\n"
return repr
def __eq__(self, other):
if isinstance(other, self.__class__):
return self.headers == other.headers and self.data_table == other.data_table
else:
print("wrong instance", other.__class__)
return False
@property
def headers(self) -> List[str]:
return [i for i in self._headers_only()]
@property
def data_table(self) -> List[List[object]]:
return list(self._data_only())
@property
def data_table_transposed(self) -> List[List[object]]:
return list(self._transposed_data())
@property
def data_row_len(self) -> int:
return len(self._rows)
def data_row_at(self, idx) -> List[object]:
"""
Return a single data row at the specified index (0th based).
Accepts negative indices, e.g. -1 is last row.
"""
row_dict = self._rows[idx]
l = []
for h in self._headers_only():
l.append(row_dict.get(h)) # Adds None in blank spots.
return l
def copy(self) -> 'DataFrame':
"""
Shallow copy of this DataFrame.
"""
return self.repeat(count=0)
def repeat(self, count: int) -> 'DataFrame':
"""
Returns a new DataFrame where each row of this dataframe is repeated count times.
A repeat of a row is adjacent to other repeats of that same row.
"""
df = DataFrame()
df._headers = self._headers.copy()
rows = []
for row in self._rows:
for i in range(count):
rows.append(row.copy())
df._rows = rows
return df
def merge_data_columns(self, other: 'DataFrame'):
"""
Merge self and another DataFrame by adding the data from other column-wise.
For any headers that are the same, data from 'other' is preferred.
"""
for h in other._headers:
if not h in self._headers:
self._headers.append(h)
append_rows = []
for self_dict, other_dict in itertools.zip_longest(self._rows, other._rows):
if not self_dict:
d = {}
append_rows.append(d)
else:
d = self_dict
d_other = other_dict
if d_other:
for k,v in d_other.items():
d[k] = v
for r in append_rows:
self._rows.append(r)
def data_row_reduce(self, fnc) -> 'DataFrame':
"""
Reduces the data row-wise by applying the fnc to each row (column-wise).
Empty cells are skipped.
fnc(Iterable[object]) -> object
fnc is applied over every non-empty cell in that column (descending row-wise).
Example:
DataFrame({'a':[1,2,3]}).data_row_reduce(sum) == DataFrame({'a':[6]})
Returns a new single-row DataFrame.
"""
df = DataFrame()
df._headers = self._headers.copy()
def yield_by_column(header_key):
for row_dict in self._rows:
val = row_dict.get(header_key)
if val:
yield val
new_row_dict = {}
for h in df._headers:
cell_value = fnc(yield_by_column(h))
new_row_dict[h] = cell_value
df._rows = [new_row_dict]
return df
def _headers_only(self):
return self._headers
def _data_only(self):
row_len = len(self._rows)
for i in range(row_len):
yield self.data_row_at(i)
def _transposed_data(self):
return zip(*self._data_only())

View File

@ -1,128 +0,0 @@
#!/usr/bin/env python3
#
# Copyright 2018, The Android Open Source Project
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
"""Unit tests for the data_frame.py script."""
from data_frame import DataFrame
def test_data_frame():
# trivial empty data frame
df = DataFrame()
assert df.headers == []
assert df.data_table == []
assert df.data_table_transposed == []
# common case, same number of values in each place.
df = DataFrame({'TotalTime_ms': [1, 2, 3], 'Displayed_ms': [4, 5, 6]})
assert df.headers == ['TotalTime_ms', 'Displayed_ms']
assert df.data_table == [[1, 4], [2, 5], [3, 6]]
assert df.data_table_transposed == [(1, 2, 3), (4, 5, 6)]
# varying num values.
df = DataFrame({'many': [1, 2], 'none': []})
assert df.headers == ['many', 'none']
assert df.data_table == [[1, None], [2, None]]
assert df.data_table_transposed == [(1, 2), (None, None)]
df = DataFrame({'many': [], 'none': [1, 2]})
assert df.headers == ['many', 'none']
assert df.data_table == [[None, 1], [None, 2]]
assert df.data_table_transposed == [(None, None), (1, 2)]
# merge multiple data frames
df = DataFrame()
df.concat_rows(DataFrame())
assert df.headers == []
assert df.data_table == []
assert df.data_table_transposed == []
df = DataFrame()
df2 = DataFrame({'TotalTime_ms': [1, 2, 3], 'Displayed_ms': [4, 5, 6]})
df.concat_rows(df2)
assert df.headers == ['TotalTime_ms', 'Displayed_ms']
assert df.data_table == [[1, 4], [2, 5], [3, 6]]
assert df.data_table_transposed == [(1, 2, 3), (4, 5, 6)]
df = DataFrame({'TotalTime_ms': [1, 2]})
df2 = DataFrame({'Displayed_ms': [4, 5]})
df.concat_rows(df2)
assert df.headers == ['TotalTime_ms', 'Displayed_ms']
assert df.data_table == [[1, None], [2, None], [None, 4], [None, 5]]
df = DataFrame({'TotalTime_ms': [1, 2]})
df2 = DataFrame({'TotalTime_ms': [3, 4], 'Displayed_ms': [5, 6]})
df.concat_rows(df2)
assert df.headers == ['TotalTime_ms', 'Displayed_ms']
assert df.data_table == [[1, None], [2, None], [3, 5], [4, 6]]
# data_row_at
df = DataFrame({'TotalTime_ms': [1, 2, 3], 'Displayed_ms': [4, 5, 6]})
assert df.data_row_at(-1) == [3, 6]
assert df.data_row_at(2) == [3, 6]
assert df.data_row_at(1) == [2, 5]
# repeat
df = DataFrame({'TotalTime_ms': [1], 'Displayed_ms': [4]})
df2 = DataFrame({'TotalTime_ms': [1, 1, 1], 'Displayed_ms': [4, 4, 4]})
assert df.repeat(3) == df2
# repeat
df = DataFrame({'TotalTime_ms': [1, 1, 1], 'Displayed_ms': [4, 4, 4]})
assert df.data_row_len == 3
df = DataFrame({'TotalTime_ms': [1, 1]})
assert df.data_row_len == 2
# repeat
df = DataFrame({'TotalTime_ms': [1, 1, 1], 'Displayed_ms': [4, 4, 4]})
assert df.data_row_len == 3
df = DataFrame({'TotalTime_ms': [1, 1]})
assert df.data_row_len == 2
# data_row_reduce
df = DataFrame({'TotalTime_ms': [1, 1, 1], 'Displayed_ms': [4, 4, 4]})
df_sum = DataFrame({'TotalTime_ms': [3], 'Displayed_ms': [12]})
assert df.data_row_reduce(sum) == df_sum
# merge_data_columns
df = DataFrame({'TotalTime_ms': [1, 2, 3]})
df2 = DataFrame({'Displayed_ms': [3, 4, 5, 6]})
df.merge_data_columns(df2)
assert df == DataFrame(
{'TotalTime_ms': [1, 2, 3], 'Displayed_ms': [3, 4, 5, 6]})
df = DataFrame({'TotalTime_ms': [1, 2, 3]})
df2 = DataFrame({'Displayed_ms': [3, 4]})
df.merge_data_columns(df2)
assert df == DataFrame(
{'TotalTime_ms': [1, 2, 3], 'Displayed_ms': [3, 4]})
df = DataFrame({'TotalTime_ms': [1, 2, 3]})
df2 = DataFrame({'TotalTime_ms': [10, 11]})
df.merge_data_columns(df2)
assert df == DataFrame({'TotalTime_ms': [10, 11, 3]})
df = DataFrame({'TotalTime_ms': []})
df2 = DataFrame({'TotalTime_ms': [10, 11]})
df.merge_data_columns(df2)
assert df == DataFrame({'TotalTime_ms': [10, 11]})

View File

@ -1,166 +0,0 @@
# Copyright 2019, The Android Open Source Project
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Class to collector perfetto trace."""
import datetime
import os
import re
import sys
import time
from datetime import timedelta
from typing import Optional, List, Tuple
# global variables
DIR = os.path.abspath(os.path.dirname(__file__))
sys.path.append(os.path.dirname(os.path.dirname(DIR)))
import app_startup.lib.adb_utils as adb_utils
from app_startup.lib.app_runner import AppRunner, AppRunnerListener
import lib.print_utils as print_utils
import lib.logcat_utils as logcat_utils
import iorap.lib.iorapd_utils as iorapd_utils
class PerfettoTraceCollector(AppRunnerListener):
""" Class to collect perfetto trace.
To set trace duration of perfetto, change the 'trace_duration_ms'.
To pull the generated perfetto trace on device, set the 'output'.
"""
TRACE_FILE_SUFFIX = 'perfetto_trace.pb'
TRACE_DURATION_PROP = 'iorapd.perfetto.trace_duration_ms'
MS_PER_SEC = 1000
DEFAULT_TRACE_DURATION = timedelta(milliseconds=5000) # 5 seconds
_COLLECTOR_TIMEOUT_MULTIPLIER = 10 # take the regular timeout and multiply
def __init__(self,
package: str,
activity: Optional[str],
compiler_filter: Optional[str],
timeout: Optional[int],
simulate: bool,
trace_duration: timedelta = DEFAULT_TRACE_DURATION,
save_destination_file_path: Optional[str] = None):
""" Initialize the perfetto trace collector. """
self.app_runner = AppRunner(package,
activity,
compiler_filter,
timeout,
simulate)
self.app_runner.add_callbacks(self)
self.trace_duration = trace_duration
self.save_destination_file_path = save_destination_file_path
def purge_file(self, suffix: str) -> None:
print_utils.debug_print('iorapd-perfetto: purge file in ' +
self._get_remote_path())
adb_utils.delete_file_on_device(self._get_remote_path())
def run(self) -> Optional[List[Tuple[str]]]:
"""Runs an app.
Returns:
A list of (metric, value) tuples.
"""
return self.app_runner.run()
def preprocess(self):
# Sets up adb environment.
adb_utils.root()
adb_utils.disable_selinux()
time.sleep(1)
# Kill any existing process of this app
adb_utils.pkill(self.app_runner.package)
# Remove existing trace and compiler files
self.purge_file(PerfettoTraceCollector.TRACE_FILE_SUFFIX)
# Set perfetto trace duration prop to milliseconds.
adb_utils.set_prop(PerfettoTraceCollector.TRACE_DURATION_PROP,
int(self.trace_duration.total_seconds()*
PerfettoTraceCollector.MS_PER_SEC))
if not iorapd_utils.stop_iorapd():
raise RuntimeError('Cannot stop iorapd!')
if not iorapd_utils.enable_iorapd_perfetto():
raise RuntimeError('Cannot enable perfetto!')
if not iorapd_utils.disable_iorapd_readahead():
raise RuntimeError('Cannot disable readahead!')
if not iorapd_utils.start_iorapd():
raise RuntimeError('Cannot start iorapd!')
# Drop all caches to get cold starts.
adb_utils.vm_drop_cache()
def postprocess(self, pre_launch_timestamp: str):
# Kill any existing process of this app
adb_utils.pkill(self.app_runner.package)
iorapd_utils.disable_iorapd_perfetto()
if self.save_destination_file_path is not None:
adb_utils.pull_file(self._get_remote_path(),
self.save_destination_file_path)
def metrics_selector(self, am_start_output: str,
pre_launch_timestamp: str) -> str:
"""Parses the metric after app startup by reading from logcat in a blocking
manner until all metrics have been found".
Returns:
An empty string because the metric needs no further parsing.
"""
if not self._wait_for_perfetto_trace(pre_launch_timestamp):
raise RuntimeError('Could not save perfetto app trace file!')
return ''
def _wait_for_perfetto_trace(self, pre_launch_timestamp) -> Optional[str]:
""" Waits for the perfetto trace being saved to file.
The string is in the format of r".*Perfetto TraceBuffer saved to file:
<file path>.*"
Returns:
the string what the program waits for. If the string doesn't show up,
return None.
"""
pattern = re.compile(r'.*Perfetto TraceBuffer saved to file: {}.*'.
format(self._get_remote_path()))
# The pre_launch_timestamp is longer than what the datetime can parse. Trim
# last three digits to make them align. For example:
# 2019-07-02 23:20:06.972674825999 -> 2019-07-02 23:20:06.972674825
assert len(pre_launch_timestamp) == len('2019-07-02 23:20:06.972674825')
timestamp = datetime.datetime.strptime(pre_launch_timestamp[:-3],
'%Y-%m-%d %H:%M:%S.%f')
# The timeout of perfetto trace is longer than the normal app run timeout.
timeout_dt = self.app_runner.timeout * PerfettoTraceCollector._COLLECTOR_TIMEOUT_MULTIPLIER
timeout_end = timestamp + datetime.timedelta(seconds=timeout_dt)
return logcat_utils.blocking_wait_for_logcat_pattern(timestamp,
pattern,
timeout_end)
def _get_remote_path(self):
# For example: android.music%2Fmusic.TopLevelActivity.perfetto_trace.pb
return iorapd_utils._iorapd_path_to_data_file(self.app_runner.package,
self.app_runner.activity,
PerfettoTraceCollector.TRACE_FILE_SUFFIX)

View File

@ -1,101 +0,0 @@
#!/usr/bin/env python3
#
# Copyright 2019, The Android Open Source Project
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
"""Unit tests for the data_frame.py script."""
import os
import sys
from pathlib import Path
from datetime import timedelta
from mock import call, patch
from perfetto_trace_collector import PerfettoTraceCollector
sys.path.append(Path(os.path.realpath(__file__)).parents[2])
from app_startup.lib.app_runner import AppRunner
RUNNER = PerfettoTraceCollector(package='music',
activity='MainActivity',
compiler_filter=None,
timeout=10,
simulate=False,
trace_duration = timedelta(milliseconds=1000),
# No actual file will be created. Just to
# check the command.
save_destination_file_path='/tmp/trace.pb')
def _mocked_run_shell_command(*args, **kwargs):
if args[0] == 'adb shell ps | grep "music" | awk \'{print $2;}\'':
return (True, '9999')
else:
return (True, '')
@patch('lib.logcat_utils.blocking_wait_for_logcat_pattern')
@patch('lib.cmd_utils.run_shell_command')
def test_perfetto_trace_collector_preprocess(mock_run_shell_command,
mock_blocking_wait_for_logcat_pattern):
mock_run_shell_command.side_effect = _mocked_run_shell_command
mock_blocking_wait_for_logcat_pattern.return_value = "Succeed!"
RUNNER.preprocess()
calls = [call('adb root'),
call('adb shell "getenforce"'),
call('adb shell "setenforce 0"'),
call('adb shell "stop"'),
call('adb shell "start"'),
call('adb wait-for-device'),
call('adb shell ps | grep "music" | awk \'{print $2;}\''),
call('adb shell "kill 9999"'),
call(
'adb shell "[[ -f \'/data/misc/iorapd/music%2FMainActivity.perfetto_trace.pb\' ]] '
'&& rm -f \'/data/misc/iorapd/music%2FMainActivity.perfetto_trace.pb\' || exit 0"'),
call('adb shell "setprop "iorapd.perfetto.trace_duration_ms" "1000""'),
call(
'bash -c "source {}; iorapd_stop"'.format(
AppRunner.IORAP_COMMON_BASH_SCRIPT)),
call(
'bash -c "source {}; iorapd_perfetto_enable"'.format(
AppRunner.IORAP_COMMON_BASH_SCRIPT)),
call(
'bash -c "source {}; iorapd_readahead_disable"'.format(
AppRunner.IORAP_COMMON_BASH_SCRIPT)),
call(
'bash -c "source {}; iorapd_start"'.format(
AppRunner.IORAP_COMMON_BASH_SCRIPT)),
call('adb shell "echo 3 > /proc/sys/vm/drop_caches"')]
mock_run_shell_command.assert_has_calls(calls)
@patch('lib.logcat_utils.blocking_wait_for_logcat_pattern')
@patch('lib.cmd_utils.run_shell_command')
def test_perfetto_trace_collector_postprocess(mock_run_shell_command,
mock_blocking_wait_for_logcat_pattern):
mock_run_shell_command.side_effect = _mocked_run_shell_command
mock_blocking_wait_for_logcat_pattern.return_value = "Succeed!"
RUNNER.postprocess('2019-07-02 23:20:06.972674825')
calls = [call('adb shell ps | grep "music" | awk \'{print $2;}\''),
call('adb shell "kill 9999"'),
call(
'bash -c "source {}; iorapd_perfetto_disable"'.format(
AppRunner.IORAP_COMMON_BASH_SCRIPT)),
call('adb pull '
'"/data/misc/iorapd/music%2FMainActivity.perfetto_trace.pb" '
'"/tmp/trace.pb"')]
mock_run_shell_command.assert_has_calls(calls)

View File

@ -1,215 +0,0 @@
#!/bin/bash
#
# Copyright 2019, The Android Open Source Project
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
usage() {
cat <<EOF
Usage: launch_application package activity | parse_metrics --package <name> --timestamp <timestamp> [OPTIONS]...
Reads from stdin the result of 'am start' metrics. May also parse logcat
for additional metrics.
Output form:
MetricName_unit=numeric_value
MetricName2_unit=numeric_value2
This may block until all desired metrics are parsed from logcat.
To get a list of metrics without doing real parsing, use --simulate.
To add package-specific metrics, add a script called 'metrics/\$full_package_name'
that exposes additional metrics in same way as above.
(required)
-p, --package <name> package of the app that is being used
-ts, --timestamp <name> logcat timestamp [only looks at logcat entries after this timestamp].
(optional)
-s, --simulate prints dummy values instead of real metrics
-a, --activity <name> activity to use (default: inferred)
-h, --help usage information (this)
-v, --verbose enable extra verbose printing
-t, --timeout <sec> how many seconds to timeout when trying to wait for logcat to change
-rfd, --reportfullydrawn wait for report fully drawn (default: off)
EOF
}
DIR="$( cd "$( dirname "${BASH_SOURCE[0]}" )" && pwd )"
source "$DIR/lib/common"
report_fully_drawn="n"
package=""
activity=""
timeout=5
simulate="n"
parse_arguments() {
while [[ $# -gt 0 ]]; do
case "$1" in
-h|--help)
usage
exit 0
;;
-p|--package)
package="$2"
shift
;;
-a|--activity)
activity="$2"
shift
;;
-v|--verbose)
export verbose="y"
;;
-t|--timeout)
timeout="$2"
shift
;;
-ts|--timestamp)
timestamp="$2"
shift
;;
-s|--simulate)
simulate="y"
;;
-rfd|--reportfullydrawn)
report_fully_drawn="y"
;;
*)
echo "Invalid argument: $1" >&2
exit 1
esac
shift
done
}
# Main entry point
if [[ $# -eq 0 ]]; then
usage
exit 1
else
parse_arguments "$@"
# if we do not have have package exit early with an error
[[ "$package" == "" ]] && echo "--package not specified" 1>&2 && exit 64
# ignore timestamp for --simulate. it's optional.
if [[ $simulate == y ]]; then
timestamp=0
fi
# if we do not have timestamp, exit early with an error
[[ "$timestamp" == "" ]] && echo "--timestamp not specified" 1>&2 && exit 64
if [[ "$activity" == "" ]] && [[ "$simulate" != "y" ]]; then
activity="$(get_activity_name "$package")"
if [[ "$activity" == "" ]]; then
echo "Activity name could not be found, invalid package name?" 1>&2
exit 64
else
verbose_print "Activity name inferred: " "$activity"
fi
fi
fi
parse_metric_from_logcat() {
local metric_name="$1"
local pattern="$2"
local re_pattern="$3"
local retcode
local result
local sec
local ms
# parse logcat for 'Displayed...' and that other one...
# 05-06 14:34:08.854 29460 29481 I ActivityTaskManager: Displayed com.google.android.dialer/.extensions.GoogleDialtactsActivity: +361ms
verbose_print "parse_metric_from_logcat: $re_pattern"
echo -ne "$metric_name="
if [[ $simulate == y ]]; then
echo "-1"
return 0
fi
result="$(logcat_extract_pattern "$timeout" "$timestamp" "$pattern" "$re_pattern")"
retcode=$?
if [[ $retcode -ne 0 ]]; then
# Timed out before finding the pattern. Could also mean the pattern is wrong.
echo "Parse $re_pattern from logcat TIMED OUT after $timeout seconds." >&2
echo "-$?"
return $retcode
fi
# "10s123ms" -> "10s123"
result=${result/ms/}
if [[ $result =~ s ]]; then
ms=${result/*s/}
sec=${result/s*/}
else
sec=0
ms=$result
fi
((result=sec*1000+ms))
echo "$result"
return $retcode
}
total_time="-1"
if [[ $simulate != y ]]; then
verbose_print 'logcat timestamp NOW: ' $(logcat_save_timestamp)
# parse stdin for 'am start' result
while read -t "$timeout" -r input_line; do
verbose_print 'stdin:' "$input_line"
if [[ $input_line == *TotalTime:* ]]; then
total_time="$(echo "$input_line" | sed 's/TotalTime: \([[:digit:]]\+\)/\1/g')"
# but keep reading the rest from stdin until <EOF>
fi
done
fi
echo "TotalTime_ms=$total_time"
# parse logcat for 'Displayed...' and that other one...
# 05-06 14:34:08.854 29460 29481 I ActivityTaskManager: Displayed com.google.android.dialer/.extensions.GoogleDialtactsActivity: +361ms
pattern="ActivityTaskManager: Displayed ${package}"
re_pattern='.*Displayed[[:blank:]]\+'"${package}"'[/][^[:blank:]]\+[[:blank:]]+\([[:digit:]]\+ms\|[[:digit:]]\+s[[:digit:]]\+ms\).*'
parse_metric_from_logcat "Displayed_ms" "$pattern" "$re_pattern"
# Only track ReportFullyDrawn with --reportfullydrawn/-rfd flags
if [[ $report_fully_drawn == y ]]; then
# 01-16 17:31:44.550 11172 11204 I ActivityTaskManager: Fully drawn com.google.android.GoogleCamera/com.android.camera.CameraLauncher: +10s897ms
pattern="ActivityTaskManager: Fully drawn ${package}"
#re_pattern='.*Fully drawn[[:blank:]]\+'"${package}"'[/][^[:blank:]]\+[[:blank:]]+\([[:digit:]]\+\).*'
re_pattern='.*Fully drawn[[:blank:]]\+'"${package}"'[/][^[:blank:]]\+[[:blank:]]+\([[:digit:]]\+ms\|[[:digit:]]\+s[[:digit:]]\+ms\).*'
parse_metric_from_logcat "Fully_drawn_ms" "$pattern" "$re_pattern"
fi
# also call into package-specific scripts if there are additional metrics
if [[ -x "$DIR/metrics/$package" ]]; then
source "$DIR/metrics/$package" "$timestamp"
else
verbose_print parse_metrics: no per-package metrics script found at "$DIR/metrics/$package"
fi

View File

@ -1,232 +0,0 @@
#!/usr/bin/env python3
#
# Copyright 2018, The Android Open Source Project
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
#
# Query the current compiler filter for an application by its package name.
# (By parsing the results of the 'adb shell dumpsys package $package' command).
# The output is a string "$compilation_filter $compilation_reason $isa".
#
# See --help for more details.
#
# -----------------------------------
#
# Sample usage:
#
# $> ./query_compiler_filter.py --package com.google.android.calculator
# speed-profile unknown arm64
#
import argparse
import os
import re
import sys
# TODO: refactor this with a common library file with analyze_metrics.py
DIR = os.path.abspath(os.path.dirname(__file__))
sys.path.append(os.path.dirname(DIR))
import lib.cmd_utils as cmd_utils
import lib.print_utils as print_utils
from typing import List, NamedTuple, Iterable
_DEBUG_FORCE = None # Ignore -d/--debug if this is not none.
def parse_options(argv: List[str] = None):
"""Parse command line arguments and return an argparse Namespace object."""
parser = argparse.ArgumentParser(description="Query the compiler filter for a package.")
# argparse considers args starting with - and -- optional in --help, even though required=True.
# by using a named argument group --help will clearly say that it's required instead of optional.
required_named = parser.add_argument_group('required named arguments')
required_named.add_argument('-p', '--package', action='store', dest='package', help='package of the application', required=True)
# optional arguments
# use a group here to get the required arguments to appear 'above' the optional arguments in help.
optional_named = parser.add_argument_group('optional named arguments')
optional_named.add_argument('-i', '--isa', '--instruction-set', action='store', dest='instruction_set', help='which instruction set to select. defaults to the first one available if not specified.', choices=('arm64', 'arm', 'x86_64', 'x86'))
optional_named.add_argument('-s', '--simulate', dest='simulate', action='store_true', help='Print which commands will run, but don\'t run the apps')
optional_named.add_argument('-d', '--debug', dest='debug', action='store_true', help='Add extra debugging output')
return parser.parse_args(argv)
def remote_dumpsys_package(package: str, simulate: bool) -> str:
# --simulate is used for interactive debugging/development, but also for the unit test.
if simulate:
return """
Dexopt state:
[%s]
path: /data/app/%s-D7s8PLidqqEq7Jc7UH_a5A==/base.apk
arm64: [status=speed-profile] [reason=unknown]
path: /data/app/%s-D7s8PLidqqEq7Jc7UH_a5A==/base.apk
arm: [status=speed] [reason=first-boot]
path: /data/app/%s-D7s8PLidqqEq7Jc7UH_a5A==/base.apk
x86: [status=quicken] [reason=install]
""" %(package, package, package, package)
code, res = cmd_utils.execute_arbitrary_command(['adb', 'shell', 'dumpsys',
'package', package],
simulate=False,
timeout=5,
shell=False)
if code:
return res
else:
raise AssertionError("Failed to dumpsys package, errors = %s", res)
ParseTree = NamedTuple('ParseTree', [('label', str), ('children', List['ParseTree'])])
DexoptState = ParseTree # With the Dexopt state: label
ParseResult = NamedTuple('ParseResult', [('remainder', List[str]), ('tree', ParseTree)])
def find_parse_subtree(parse_tree: ParseTree, match_regex: str) -> ParseTree:
if re.match(match_regex, parse_tree.label):
return parse_tree
for node in parse_tree.children:
res = find_parse_subtree(node, match_regex)
if res:
return res
return None
def find_parse_children(parse_tree: ParseTree, match_regex: str) -> Iterable[ParseTree]:
for node in parse_tree.children:
if re.match(match_regex, node.label):
yield node
def parse_tab_subtree(label: str, str_lines: List[str], separator=' ', indent=-1) -> ParseResult:
children = []
get_indent_level = lambda line: len(line) - len(line.lstrip())
line_num = 0
keep_going = True
while keep_going:
keep_going = False
for line_num in range(len(str_lines)):
line = str_lines[line_num]
current_indent = get_indent_level(line)
print_utils.debug_print("INDENT=%d, LINE=%s" %(current_indent, line))
current_label = line.lstrip()
# skip empty lines
if line.lstrip() == "":
continue
if current_indent > indent:
parse_result = parse_tab_subtree(current_label, str_lines[line_num+1::], separator, current_indent)
str_lines = parse_result.remainder
children.append(parse_result.tree)
keep_going = True
else:
# current_indent <= indent
keep_going = False
break
new_remainder = str_lines[line_num::]
print_utils.debug_print("NEW REMAINDER: ", new_remainder)
parse_tree = ParseTree(label, children)
return ParseResult(new_remainder, parse_tree)
def parse_tab_tree(str_tree: str, separator=' ', indentation_level=-1) -> ParseTree:
label = None
lst = []
line_num = 0
line_lst = str_tree.split("\n")
return parse_tab_subtree("", line_lst, separator, indentation_level).tree
def parse_dexopt_state(dumpsys_tree: ParseTree) -> DexoptState:
res = find_parse_subtree(dumpsys_tree, "Dexopt(\s+)state[:]?")
if not res:
raise AssertionError("Could not find the Dexopt state")
return res
def find_first_compiler_filter(dexopt_state: DexoptState, package: str, instruction_set: str) -> str:
lst = find_all_compiler_filters(dexopt_state, package)
print_utils.debug_print("all compiler filters: ", lst)
for compiler_filter_info in lst:
if not instruction_set:
return compiler_filter_info
if compiler_filter_info.isa == instruction_set:
return compiler_filter_info
return None
CompilerFilterInfo = NamedTuple('CompilerFilterInfo', [('isa', str), ('status', str), ('reason', str)])
def find_all_compiler_filters(dexopt_state: DexoptState, package: str) -> List[CompilerFilterInfo]:
lst = []
package_tree = find_parse_subtree(dexopt_state, re.escape("[%s]" %package))
if not package_tree:
raise AssertionError("Could not find any package subtree for package %s" %(package))
print_utils.debug_print("package tree: ", package_tree)
for path_tree in find_parse_children(package_tree, "path: "):
print_utils.debug_print("path tree: ", path_tree)
matchre = re.compile("([^:]+):\s+\[status=([^\]]+)\]\s+\[reason=([^\]]+)\]")
for isa_node in find_parse_children(path_tree, matchre):
matches = re.match(matchre, isa_node.label).groups()
info = CompilerFilterInfo(*matches)
lst.append(info)
return lst
def main() -> int:
opts = parse_options()
cmd_utils._debug = opts.debug
if _DEBUG_FORCE is not None:
cmd_utils._debug = _DEBUG_FORCE
print_utils.debug_print("parsed options: ", opts)
# Note: This can often 'fail' if the package isn't actually installed.
package_dumpsys = remote_dumpsys_package(opts.package, opts.simulate)
print_utils.debug_print("package dumpsys: ", package_dumpsys)
dumpsys_parse_tree = parse_tab_tree(package_dumpsys, package_dumpsys)
print_utils.debug_print("parse tree: ", dumpsys_parse_tree)
dexopt_state = parse_dexopt_state(dumpsys_parse_tree)
filter = find_first_compiler_filter(dexopt_state, opts.package, opts.instruction_set)
if filter:
print(filter.status, end=' ')
print(filter.reason, end=' ')
print(filter.isa)
else:
print("ERROR: Could not find any compiler-filter for package %s, isa %s" %(opts.package, opts.instruction_set), file=sys.stderr)
return 1
return 0
if __name__ == '__main__':
sys.exit(main())

View File

@ -1,116 +0,0 @@
#!/usr/bin/env python3
#
# Copyright 2018, The Android Open Source Project
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
"""
Unit tests for the query_compiler_filter.py script.
Install:
$> sudo apt-get install python3-pytest ## OR
$> pip install -U pytest
See also https://docs.pytest.org/en/latest/getting-started.html
Usage:
$> ./query_compiler_filter.py
$> pytest query_compiler_filter.py
$> python -m pytest query_compiler_filter.py
See also https://docs.pytest.org/en/latest/usage.html
"""
# global imports
from contextlib import contextmanager
import io
import shlex
import sys
import typing
# pip imports
import pytest
# local imports
import query_compiler_filter as qcf
@contextmanager
def redirect_stdout_stderr():
"""Redirect stdout/stderr to a new StringIO for duration of context."""
old_stdout = sys.stdout
old_stderr = sys.stderr
new_stdout = io.StringIO()
sys.stdout = new_stdout
new_stderr = io.StringIO()
sys.stderr = new_stderr
try:
yield (new_stdout, new_stderr)
finally:
sys.stdout = old_stdout
sys.stderr = old_stderr
# Seek back to the beginning so we can read whatever was written into it.
new_stdout.seek(0)
new_stderr.seek(0)
@contextmanager
def replace_argv(argv):
""" Temporarily replace argv for duration of this context."""
old_argv = sys.argv
sys.argv = [sys.argv[0]] + argv
try:
yield
finally:
sys.argv = old_argv
def exec_main(argv):
"""Run the query_compiler_filter main function with the provided arguments.
Returns the stdout result when successful, assertion failure otherwise.
"""
try:
with redirect_stdout_stderr() as (the_stdout, the_stderr):
with replace_argv(argv):
code = qcf.main()
assert 0 == code, the_stderr.readlines()
all_lines = the_stdout.readlines()
return "".join(all_lines)
finally:
the_stdout.close()
the_stderr.close()
def test_query_compiler_filter():
# no --instruction-set specified: provide whatever was the 'first' filter.
assert exec_main(['--simulate',
'--package', 'com.google.android.apps.maps']) == \
"speed-profile unknown arm64\n"
# specifying an instruction set finds the exact compiler filter match.
assert exec_main(['--simulate',
'--package', 'com.google.android.apps.maps',
'--instruction-set', 'arm64']) == \
"speed-profile unknown arm64\n"
assert exec_main(['--simulate',
'--package', 'com.google.android.apps.maps',
'--instruction-set', 'arm']) == \
"speed first-boot arm\n"
assert exec_main(['--simulate',
'--debug',
'--package', 'com.google.android.apps.maps',
'--instruction-set', 'x86']) == \
"quicken install x86\n"
if __name__ == '__main__':
pytest.main()

View File

@ -1,487 +0,0 @@
#!/bin/bash
#
# Copyright 2018, The Android Open Source Project
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
usage() {
cat <<EOF
Usage: run_app_with_prefetch --package <name> [OPTIONS]...
-p, --package <name> package of the app to test
-a, --activity <name> activity to use
-h, --help usage information (this)
-v, --verbose enable extra verbose printing
-i, --input <file> trace file protobuf (default 'TraceFile.pb')
-r, --readahead <mode> cold, warm, fadvise, mlock (default 'warm')
-w, --when <when> aot or jit (default 'jit')
-c, --count <count> how many times to run (default 1)
-s, --sleep <sec> how long to sleep after readahead
-t, --timeout <sec> how many seconds to timeout in between each app run (default 10)
-o, --output <file.csv> what file to write the performance results into as csv (default stdout)
EOF
}
DIR="$( cd "$( dirname "${BASH_SOURCE[0]}" )" && pwd )"
source "$DIR/../iorap/common"
report_fully_drawn="n"
needs_trace_file="n"
input_file=""
package=""
mode='warm'
count=2
sleep_time=2
timeout=10
output="" # stdout by default
when="jit"
parse_arguments() {
while [[ $# -gt 0 ]]; do
case "$1" in
-h|--help)
usage
exit 0
;;
-p|--package)
package="$2"
shift
;;
-a|--activity)
activity="$2"
shift
;;
-i|--input)
input_file="$2"
shift
;;
-v|--verbose)
export verbose="y"
;;
-r|--readahead)
mode="$2"
shift
;;
-rfd|--reportfullydrawn)
report_fully_drawn="y"
shift
;;
-c|--count)
count="$2"
((count+=1))
shift
;;
-s|--sleep)
sleep_time="$2"
shift
;;
-t|--timeout)
timeout="$2"
shift
;;
-o|--output)
output="$2"
shift
;;
-w|--when)
when="$2"
shift
;;
--compiler-filter)
compiler_filter="$2"
shift
;;
*)
echo "Invalid argument: $1" >&2
exit 1
esac
shift
done
if [[ $when == "aot" ]]; then
# TODO: re-implement aot later for experimenting.
echo "Error: --when $when is unsupported" >&2
exit 1
elif [[ $when != "jit" ]]; then
echo "Error: --when must be one of (aot jit)." >&2
exit 1
fi
}
echo_to_output_file() {
if [[ "x$output" != x ]]; then
echo "$@" >> $output
fi
# Always echo to stdout as well.
echo "$@"
}
find_package_path() {
local pkg="$1"
res="$(adb shell find "/data/app/$pkg"-'*' -maxdepth 0 2> /dev/null)"
if [[ -z $res ]]; then
res="$(adb shell find "/system/app/$pkg"-'*' -maxdepth 0 2> /dev/null)"
fi
echo "$res"
}
# Main entry point
if [[ $# -eq 0 ]]; then
usage
exit 1
else
parse_arguments "$@"
# if we do not have have package exit early with an error
[[ "$package" == "" ]] && echo "--package not specified" 1>&2 && exit 1
if [[ $mode != "cold" && $mode != "warm" ]]; then
needs_trace_file="y"
if [[ -z "$input_file" ]] || ! [[ -f $input_file ]]; then
echo "--input not specified" 1>&2
exit 1
fi
fi
if [[ "$activity" == "" ]]; then
activity="$(get_activity_name "$package")"
if [[ "$activity" == "" ]]; then
echo "Activity name could not be found, invalid package name?" 1>&2
exit 1
else
verbose_print "Activity name inferred: " "$activity"
fi
fi
fi
adb root > /dev/null
if [[ ($when == jit) || ($when == aot) ]] && [[ "$(adb shell getenforce)" != "Permissive" ]]; then
echo "Disable selinux permissions and restart framework."
adb shell setenforce 0
adb shell stop
adb shell start
adb wait-for-device
fi
# TODO: set performance governor etc, preferrably only once
# before every single app run.
# Kill everything before running.
remote_pkill "$package"
sleep 1
timings_array=()
package_path="$(find_package_path "$package")"
if [[ $? -ne 0 ]]; then
echo "Failed to detect package path for '$package'" >&2
exit 1
fi
verbose_print "Package was in path '$package_path'"
application_trace_file_path="$package_path/TraceFile.pb"
trace_file_directory="$package_path"
if [[ $needs_trace_file == y ]]; then
# system server always passes down the package path in a hardcoded spot.
if [[ $when == "jit" ]]; then
if ! iorapd_compiler_install_trace_file "$package" "$activity" "$input_file"; then
echo "Error: Failed to install compiled TraceFile.pb for '$package/$activity'" >&2
exit 1
fi
keep_application_trace_file="y"
else
echo "TODO: --readahead=aot is non-functional and needs to be fixed." >&2
exit 1
# otherwise use a temporary directory to get normal non-jit behavior.
trace_file_directory="/data/local/tmp/prefetch/$package"
adb shell mkdir -p "$trace_file_directory"
verbose_print adb push "$input_file" "$trace_file_directory/TraceFile.pb"
adb push "$input_file" "$trace_file_directory/TraceFile.pb"
fi
fi
# Everything other than JIT: remove the trace file,
# otherwise system server activity hints will kick in
# and the new just-in-time app pre-warmup will happen.
if [[ $keep_application_trace_file == "n" ]]; then
iorapd_compiler_purge_trace_file "$package" "$activity"
fi
# Perform AOT readahead/pinning/etc when an application is about to be launched.
# For JIT readahead, we allow the system to handle it itself (this is a no-op).
#
# For warm, cold, etc modes which don't need readahead this is always a no-op.
perform_aot() {
local the_when="$1" # user: aot, jit
local the_mode="$2" # warm, cold, fadvise, mlock, etc.
# iorapd readahead for jit+(mlock/fadvise)
if [[ $the_when == "jit" && $the_mode != 'warm' && $the_mode != 'cold' ]]; then
iorapd_readahead_enable
return 0
fi
if [[ $the_when != "aot" ]]; then
# TODO: just in time implementation.. should probably use system server.
return 0
fi
# any non-warm/non-cold modes should use the iorap-activity-hint wrapper script.
if [[ $the_mode != 'warm' && $the_mode != 'cold' ]]; then
# TODO: add activity_hint_sender.exp
verbose_print "starting with package=$package package_path=$trace_file_directory"
coproc hint_sender_fd { $ANDROID_BUILD_TOP/system/iorap/src/sh/activity_hint_sender.exp "$package" "$trace_file_directory" "$the_mode"; }
hint_sender_pid=$!
verbose_print "Activity hint sender began"
notification_success="n"
while read -r -u "${hint_sender_fd[0]}" hint_sender_output; do
verbose_print "$hint_sender_output"
if [[ "$hint_sender_output" == "Press any key to send completed event..."* ]]; then
verbose_print "WE DID SEE NOTIFICATION SUCCESS."
notification_success='y'
# Give it some time to actually perform the readaheads.
sleep $sleep_time
break
fi
done
if [[ $notification_success == 'n' ]]; then
echo "[FATAL] Activity hint notification failed." 1>&2
exit 1
fi
fi
}
# Perform cleanup at the end of each loop iteration.
perform_post_launch_cleanup() {
local the_when="$1" # user: aot, jit
local the_mode="$2" # warm, cold, fadvise, mlock, etc.
local logcat_timestamp="$3" # timestamp from before am start.
local res
if [[ $the_when != "aot" ]]; then
if [[ $the_mode != 'warm' && $the_mode != 'cold' ]]; then
# Validate that readahead completes.
# If this fails for some reason, then this will also discard the timing of the run.
iorapd_readahead_wait_until_finished "$package" "$activity" "$logcat_timestamp" "$timeout"
res=$?
iorapd_readahead_disable
return $res
fi
# Don't need to do anything for warm or cold.
return 0
fi
# any non-warm/non-cold modes should use the iorap-activity-hint wrapper script.
if [[ $the_mode != 'warm' && $the_mode != 'cold' ]]; then
# Clean up the hint sender by telling it that the launch was completed,
# and to shutdown the watcher.
echo "Done\n" >&"${hint_sender_fd[1]}"
while read -r -u "${hint_sender_fd[0]}" hint_sender_output; do
verbose_print "$hint_sender_output"
done
wait $hint_sender_pid
fi
}
configure_compiler_filter() {
local the_compiler_filter="$1"
local the_package="$2"
local the_activity="$3"
if [[ -z $the_compiler_filter ]]; then
verbose_print "No --compiler-filter specified, don't need to force it."
return 0
fi
local current_compiler_filter_info="$("$DIR"/query_compiler_filter.py --package "$the_package")"
local res=$?
if [[ $res -ne 0 ]]; then
return $res
fi
local current_compiler_filter
local current_reason
local current_isa
read current_compiler_filter current_reason current_isa <<< "$current_compiler_filter_info"
verbose_print "Compiler Filter="$current_compiler_filter "Reason="$current_reason "Isa="$current_isa
# Don't trust reasons that aren't 'unknown' because that means we didn't manually force the compilation filter.
# (e.g. if any automatic system-triggered compilations are not unknown).
if [[ $current_reason != "unknown" ]] || [[ $current_compiler_filter != $the_compiler_filter ]]; then
verbose_print "$DIR"/force_compiler_filter --compiler-filter "$the_compiler_filter" --package "$the_package" --activity "$the_activity"
"$DIR"/force_compiler_filter --compiler-filter "$the_compiler_filter" --package "$the_package" --activity "$the_activity"
res=$?
else
verbose_print "Queried compiler-filter matched requested compiler-filter, skip forcing."
res=0
fi
return $res
}
# Ensure the APK is currently compiled with whatever we passed in via --compiler-filter.
# No-op if this option was not passed in.
configure_compiler_filter "$compiler_filter" "$package" "$activity" || exit 1
# convert 'a=b\nc=d\ne=f\n...' into 'b,d,f,...'
parse_metrics_output_string() {
# single string with newlines in it.
local input="$1"
local metric_name
local metric_value
local rest
local all_metrics=()
# (n1=v1 n2=v2 n3=v3 ...)
readarray -t all_metrics <<< "$input"
local kv_pair=()
local i
for i in "${all_metrics[@]}"
do
verbose_print "parse_metrics_output: element '$i'"
# name=value
IFS='=' read -r metric_name metric_value rest <<< "$i"
verbose_print "parse_metrics_output: metric_value '$metric_value'"
# (value1 value2 value3 ...)
all_metrics+=(${metric_value})
done
# "value1,value2,value3,..."
join_by ',' "${all_metrics[@]}"
}
# convert 'a=b\nc=d\ne=f\n... into b,d,f,...'
parse_metrics_output() {
local metric_name
local metric_value
local rest
local all_metrics=()
while IFS='=' read -r metric_name metric_value rest; do
verbose_print "metric: $metric_name, value: $metric_value; rest: $rest"
all_metrics+=($metric_value)
done
join_by ',' "${all_metrics[@]}"
}
# convert 'a=b\nc=d\ne=f\n... into b,d,f,...'
parse_metrics_header() {
local metric_name
local metric_value
local rest
local all_metrics=()
while IFS='=' read -r metric_name metric_value rest; do
verbose_print "metric: $metric_name, value: $metric_value; rest: $rest"
all_metrics+=($metric_name)
done
join_by ',' "${all_metrics[@]}"
}
if [[ $report_fully_drawn == y ]]; then
metrics_header="$("$DIR/parse_metrics" --package "$package" --activity "$activity" --simulate --reportfullydrawn | parse_metrics_header)"
else
metrics_header="$("$DIR/parse_metrics" --package "$package" --activity "$activity" --simulate | parse_metrics_header)"
fi
# TODO: This loop logic could probably be moved into app_startup_runner.py
for ((i=0;i<count;++i)) do
verbose_print "=========================================="
verbose_print "==== ITERATION $i ===="
verbose_print "=========================================="
if [[ $mode != "warm" ]]; then
# The package must be killed **before** we drop caches, otherwise pages will stay resident.
verbose_print "Kill package for non-warm start."
remote_pkill "$package"
verbose_print "Drop caches for non-warm start."
# Drop all caches to get cold starts.
adb shell "echo 3 > /proc/sys/vm/drop_caches"
fi
perform_aot "$when" "$mode"
verbose_print "Running with timeout $timeout"
pre_launch_timestamp="$(logcat_save_timestamp)"
# TODO: multiple metrics output.
if [[ $report_fully_drawn == y ]]; then
total_time="$(timeout $timeout "$DIR/launch_application" "$package" "$activity" | "$DIR/parse_metrics" --package "$package" --activity "$activity" --timestamp "$pre_launch_timestamp" --reportfullydrawn | parse_metrics_output)"
else
total_time="$(timeout $timeout "$DIR/launch_application" "$package" "$activity" | "$DIR/parse_metrics" --package "$package" --activity "$activity" --timestamp "$pre_launch_timestamp" | parse_metrics_output)"
fi
if [[ $? -ne 0 ]]; then
echo "WARNING: Skip bad result, try iteration again." >&2
((i=i-1))
continue
fi
perform_post_launch_cleanup "$when" "$mode" "$pre_launch_timestamp"
if [[ $? -ne 0 ]]; then
echo "WARNING: Skip bad cleanup, try iteration again." >&2
((i=i-1))
continue
fi
echo "Iteration $i. Total time was: $total_time"
timings_array+=("$total_time")
done
# drop the first result which is usually garbage.
timings_array=("${timings_array[@]:1}")
# Print the CSV header first.
echo_to_output_file "$metrics_header"
# Print out interactive/debugging timings and averages.
# Other scripts should use the --output flag and parse the CSV.
for tim in "${timings_array[@]}"; do
echo_to_output_file "$tim"
done
if [[ x$output != x ]]; then
echo " Saved results to '$output'"
fi
if [[ $needs_trace_file == y ]] ; then
iorapd_compiler_purge_trace_file "$package" "$activity"
fi
# Kill the process to ensure AM isn't keeping it around.
remote_pkill "$package"
exit 0

View File

@ -1,230 +0,0 @@
#!/usr/bin/env python3
#
# Copyright 2019, The Android Open Source Project
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Runner of one test given a setting.
Run app and gather the measurement in a certain configuration.
Print the result to stdout.
See --help for more details.
Sample usage:
$> ./python run_app_with_prefetch.py -p com.android.settings -a
com.android.settings.Settings -r fadvise -i input
"""
import argparse
import os
import sys
import time
from typing import List, Tuple, Optional
# local imports
import lib.adb_utils as adb_utils
from lib.app_runner import AppRunner, AppRunnerListener
# global variables
DIR = os.path.abspath(os.path.dirname(__file__))
sys.path.append(os.path.dirname(DIR))
import lib.print_utils as print_utils
import lib.cmd_utils as cmd_utils
import iorap.lib.iorapd_utils as iorapd_utils
class PrefetchAppRunner(AppRunnerListener):
def __init__(self,
package: str,
activity: Optional[str],
readahead: str,
compiler_filter: Optional[str],
timeout: Optional[int],
simulate: bool,
debug: bool,
input:Optional[str],
**kwargs):
self.app_runner = AppRunner(package,
activity,
compiler_filter,
timeout,
simulate)
self.app_runner.add_callbacks(self)
self.simulate = simulate
self.readahead = readahead
self.debug = debug
self.input = input
print_utils.DEBUG = self.debug
cmd_utils.SIMULATE = self.simulate
def run(self) -> Optional[List[Tuple[str]]]:
"""Runs an app.
Returns:
A list of (metric, value) tuples.
"""
return self.app_runner.run()
def preprocess(self):
passed = self.validate_options()
if not passed:
return
# Sets up adb environment.
adb_utils.root()
adb_utils.disable_selinux()
time.sleep(1)
# Kill any existing process of this app
adb_utils.pkill(self.app_runner.package)
if self.readahead != 'warm':
print_utils.debug_print('Drop caches for non-warm start.')
# Drop all caches to get cold starts.
adb_utils.vm_drop_cache()
if self.readahead != 'warm' and self.readahead != 'cold':
iorapd_utils.enable_iorapd_readahead()
def postprocess(self, pre_launch_timestamp: str):
passed = self._perform_post_launch_cleanup(pre_launch_timestamp)
if not passed and not self.app_runner.simulate:
print_utils.error_print('Cannot perform post launch cleanup!')
return None
# Kill any existing process of this app
adb_utils.pkill(self.app_runner.package)
def _perform_post_launch_cleanup(self, logcat_timestamp: str) -> bool:
"""Performs cleanup at the end of each loop iteration.
Returns:
A bool indicates whether the cleanup succeeds or not.
"""
if self.readahead != 'warm' and self.readahead != 'cold':
passed = iorapd_utils.wait_for_iorapd_finish(self.app_runner.package,
self.app_runner.activity,
self.app_runner.timeout,
self.debug,
logcat_timestamp)
if not passed:
return passed
return iorapd_utils.disable_iorapd_readahead()
# Don't need to do anything for warm or cold.
return True
def metrics_selector(self, am_start_output: str,
pre_launch_timestamp: str) -> str:
"""Parses the metric after app startup by reading from logcat in a blocking
manner until all metrics have been found".
Returns:
the total time and displayed time of app startup.
For example: "TotalTime=123\nDisplayedTime=121
"""
total_time = AppRunner.parse_total_time(am_start_output)
displayed_time = adb_utils.blocking_wait_for_logcat_displayed_time(
pre_launch_timestamp, self.app_runner.package, self.app_runner.timeout)
return 'TotalTime={}\nDisplayedTime={}'.format(total_time, displayed_time)
def validate_options(self) -> bool:
"""Validates the activity and trace file if needed.
Returns:
A bool indicates whether the activity is valid.
"""
needs_trace_file = self.readahead != 'cold' and self.readahead != 'warm'
if needs_trace_file and (self.input is None or
not os.path.exists(self.input)):
print_utils.error_print('--input not specified!')
return False
# Install necessary trace file. This must be after the activity checking.
if needs_trace_file:
passed = iorapd_utils.iorapd_compiler_install_trace_file(
self.app_runner.package, self.app_runner.activity, self.input)
if not cmd_utils.SIMULATE and not passed:
print_utils.error_print('Failed to install compiled TraceFile.pb for '
'"{}/{}"'.
format(self.app_runner.package,
self.app_runner.activity))
return False
return True
def parse_options(argv: List[str] = None):
"""Parses command line arguments and return an argparse Namespace object."""
parser = argparse.ArgumentParser(
description='Run an Android application once and measure startup time.'
)
required_named = parser.add_argument_group('required named arguments')
required_named.add_argument('-p', '--package', action='store', dest='package',
help='package of the application', required=True)
# optional arguments
# use a group here to get the required arguments to appear 'above' the
# optional arguments in help.
optional_named = parser.add_argument_group('optional named arguments')
optional_named.add_argument('-a', '--activity', action='store',
dest='activity',
help='launch activity of the application')
optional_named.add_argument('-s', '--simulate', dest='simulate',
action='store_true',
help='simulate the process without executing '
'any shell commands')
optional_named.add_argument('-d', '--debug', dest='debug',
action='store_true',
help='Add extra debugging output')
optional_named.add_argument('-i', '--input', action='store', dest='input',
help='perfetto trace file protobuf',
default='TraceFile.pb')
optional_named.add_argument('-r', '--readahead', action='store',
dest='readahead',
help='which readahead mode to use',
default='cold',
choices=('warm', 'cold', 'mlock', 'fadvise'))
optional_named.add_argument('-t', '--timeout', dest='timeout', action='store',
type=int,
help='Timeout after this many seconds when '
'executing a single run.',
default=10)
optional_named.add_argument('--compiler-filter', dest='compiler_filter',
action='store',
help='Which compiler filter to use.',
default=None)
return parser.parse_args(argv)
def main():
opts = parse_options()
runner = PrefetchAppRunner(**vars(opts))
result = runner.run()
if result is None:
return 1
print(result)
return 0
if __name__ == '__main__':
sys.exit(main())

View File

@ -1,286 +0,0 @@
#!/usr/bin/env python3
#
# Copyright 2019, The Android Open Source Project
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
"""Unit tests for the run_app_with_prefetch_test.py script.
Install:
$> sudo apt-get install python3-pytest ## OR
$> pip install -U pytest
See also https://docs.pytest.org/en/latest/getting-started.html
Usage:
$> ./run_app_with_prefetch_test.py
$> pytest run_app_with_prefetch_test.py
$> python -m pytest run_app_with_prefetch_test.py
See also https://docs.pytest.org/en/latest/usage.html
"""
import io
import os
import shlex
import sys
import tempfile
# global imports
from contextlib import contextmanager
# pip imports
import pytest
# local imports
import run_app_with_prefetch as runner
from mock import call, patch, Mock
sys.path.append(os.path.dirname(os.path.dirname(os.path.abspath(__file__))))
from app_startup.lib.app_runner import AppRunner
#
# Argument Parsing Helpers
#
@contextmanager
def ignore_stdout_stderr():
"""Ignore stdout/stderr output for duration of this context."""
old_stdout = sys.stdout
old_stderr = sys.stderr
sys.stdout = io.StringIO()
sys.stderr = io.StringIO()
try:
yield
finally:
sys.stdout = old_stdout
sys.stderr = old_stderr
@contextmanager
def argparse_bad_argument(msg):
"""Asserts that a SystemExit is raised when executing this context.
If the assertion fails, print the message 'msg'.
"""
with pytest.raises(SystemExit, message=msg):
with ignore_stdout_stderr():
yield
def assert_bad_argument(args, msg):
"""Asserts that the command line arguments in 'args' are malformed.
Prints 'msg' if the assertion fails.
"""
with argparse_bad_argument(msg):
parse_args(args)
def parse_args(args):
"""
:param args: command-line like arguments as a single string
:return: dictionary of parsed key/values
"""
# "-a b -c d" => ['-a', 'b', '-c', 'd']
return vars(runner.parse_options(shlex.split(args)))
def default_dict_for_parsed_args(**kwargs):
"""Combines it with all of the "optional" parameters' default values."""
d = {
'readahead': 'cold',
'simulate': None,
'simulate': False,
'debug': False,
'input': 'TraceFile.pb',
'timeout': 10,
'compiler_filter': None,
'activity': None
}
d.update(kwargs)
return d
def default_mock_dict_for_parsed_args(include_optional=True, **kwargs):
"""Combines default dict with all optional parameters with some mock required
parameters.
"""
d = {'package': 'com.fake.package'}
if include_optional:
d.update(default_dict_for_parsed_args())
d.update(kwargs)
return d
def parse_optional_args(str):
"""
Parses an argument string which already includes all the required arguments
in default_mock_dict_for_parsed_args.
"""
req = '--package com.fake.package'
return parse_args('%s %s' % (req, str))
def test_argparse():
# missing arguments
assert_bad_argument('', '-p are required')
# required arguments are parsed correctly
ad = default_dict_for_parsed_args # assert dict
assert parse_args('--package xyz') == ad(package='xyz')
assert parse_args('-p xyz') == ad(package='xyz')
assert parse_args('-p xyz -s') == ad(package='xyz', simulate=True)
assert parse_args('-p xyz --simulate') == ad(package='xyz', simulate=True)
# optional arguments are parsed correctly.
mad = default_mock_dict_for_parsed_args # mock assert dict
assert parse_optional_args('--input trace.pb') == mad(input='trace.pb')
assert parse_optional_args('--compiler-filter speed') == \
mad(compiler_filter='speed')
assert parse_optional_args('-d') == mad(debug=True)
assert parse_optional_args('--debug') == mad(debug=True)
assert parse_optional_args('--timeout 123') == mad(timeout=123)
assert parse_optional_args('-t 456') == mad(timeout=456)
assert parse_optional_args('-r warm') == mad(readahead='warm')
assert parse_optional_args('--readahead warm') == mad(readahead='warm')
assert parse_optional_args('-a act') == mad(activity='act')
assert parse_optional_args('--activity act') == mad(activity='act')
def test_main():
args = '--package com.fake.package --activity act -s'
opts = runner.parse_options(shlex.split(args))
result = runner.PrefetchAppRunner(**vars(opts)).run()
assert result == [('TotalTime', '123')]
def _mocked_run_shell_command(*args, **kwargs):
if args[0] == 'adb shell ps | grep "music" | awk \'{print $2;}\'':
return (True, '9999')
else:
return (True, '')
def test_preprocess_no_cache_drop():
with patch('lib.cmd_utils.run_shell_command',
new_callable=Mock) as mock_run_shell_command:
mock_run_shell_command.side_effect = _mocked_run_shell_command
prefetch_app_runner = runner.PrefetchAppRunner(package='music',
activity='MainActivity',
readahead='warm',
compiler_filter=None,
timeout=None,
simulate=False,
debug=False,
input=None)
prefetch_app_runner.preprocess()
calls = [call('adb root'),
call('adb shell "getenforce"'),
call('adb shell "setenforce 0"'),
call('adb shell "stop"'),
call('adb shell "start"'),
call('adb wait-for-device'),
call('adb shell ps | grep "music" | awk \'{print $2;}\''),
call('adb shell "kill 9999"')]
mock_run_shell_command.assert_has_calls(calls)
def test_preprocess_with_cache_drop():
with patch('lib.cmd_utils.run_shell_command',
new_callable=Mock) as mock_run_shell_command:
mock_run_shell_command.side_effect = _mocked_run_shell_command
prefetch_app_runner = runner.PrefetchAppRunner(package='music',
activity='MainActivity',
readahead='cold',
compiler_filter=None,
timeout=None,
simulate=False,
debug=False,
input=None)
prefetch_app_runner.preprocess()
calls = [call('adb root'),
call('adb shell "getenforce"'),
call('adb shell "setenforce 0"'),
call('adb shell "stop"'),
call('adb shell "start"'),
call('adb wait-for-device'),
call('adb shell ps | grep "music" | awk \'{print $2;}\''),
call('adb shell "kill 9999"'),
call('adb shell "echo 3 > /proc/sys/vm/drop_caches"')]
mock_run_shell_command.assert_has_calls(calls)
def test_preprocess_with_cache_drop_and_iorapd_enabled():
with patch('lib.cmd_utils.run_shell_command',
new_callable=Mock) as mock_run_shell_command:
mock_run_shell_command.side_effect = _mocked_run_shell_command
with tempfile.NamedTemporaryFile() as input:
prefetch_app_runner = runner.PrefetchAppRunner(package='music',
activity='MainActivity',
readahead='fadvise',
compiler_filter=None,
timeout=None,
simulate=False,
debug=False,
input=input.name)
prefetch_app_runner.preprocess()
calls = [call('adb root'),
call('adb shell "getenforce"'),
call('adb shell "setenforce 0"'),
call('adb shell "stop"'),
call('adb shell "start"'),
call('adb wait-for-device'),
call(
'adb shell ps | grep "music" | awk \'{print $2;}\''),
call('adb shell "kill 9999"'),
call('adb shell "echo 3 > /proc/sys/vm/drop_caches"'),
call('bash -c "source {}; iorapd_readahead_enable"'.
format(AppRunner.IORAP_COMMON_BASH_SCRIPT))]
mock_run_shell_command.assert_has_calls(calls)
@patch('lib.adb_utils.blocking_wait_for_logcat_displayed_time')
@patch('lib.cmd_utils.run_shell_command')
def test_postprocess_with_launch_cleanup(
mock_run_shell_command,
mock_blocking_wait_for_logcat_displayed_time):
mock_run_shell_command.side_effect = _mocked_run_shell_command
mock_blocking_wait_for_logcat_displayed_time.return_value = 123
with tempfile.NamedTemporaryFile() as input:
prefetch_app_runner = runner.PrefetchAppRunner(package='music',
activity='MainActivity',
readahead='fadvise',
compiler_filter=None,
timeout=10,
simulate=False,
debug=False,
input=input.name)
prefetch_app_runner.postprocess('2019-07-02 23:20:06.972674825')
calls = [
call('bash -c "source {script_path}; '
'iorapd_readahead_wait_until_finished '
'\'{package}\' \'{activity}\' \'{timestamp}\' \'{timeout}\'"'.
format(timeout=10,
package='music',
activity='MainActivity',
timestamp='2019-07-02 23:20:06.972674825',
script_path=AppRunner.IORAP_COMMON_BASH_SCRIPT)),
call('bash -c "source {}; iorapd_readahead_disable"'.
format(AppRunner.IORAP_COMMON_BASH_SCRIPT)),
call('adb shell ps | grep "music" | awk \'{print $2;}\''),
call('adb shell "kill 9999"')]
mock_run_shell_command.assert_has_calls(calls)
if __name__ == '__main__':
pytest.main()

View File

@ -1,22 +0,0 @@
#!/bin/bash
#
# Copyright 2018, The Android Open Source Project
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# This turns the screen on if it's off.
# If it's on it does nothing unless its on the home screen, in which case it opens up some background
# menu.
#
# However, this menu is ignored because "am start" commands still work as expected.
adb shell input keyevent MENU

View File

@ -1,168 +0,0 @@
#!/usr/bin/env python3
#
# Copyright 2020, The Android Open Source Project
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import argparse
import os
import sys
from typing import Dict, List, NamedTuple, Tuple
DIR = os.path.abspath(os.path.dirname(__file__))
sys.path.append(os.path.dirname(DIR)) # framework/base/startop/script
import lib.print_utils as print_utils
# Include generated protos.
dir_name = os.path.dirname(os.path.realpath(__file__))
sys.path.append(dir_name + "/generated")
from TraceFile_pb2 import *
def parse_options(argv: List[str] = None):
"""Parses command line arguments and returns an argparse Namespace object."""
parser = argparse.ArgumentParser(description="Analyze compiled_trace iorap protos.")
required_named = parser.add_argument_group('required named arguments')
required_named.add_argument('-i', dest='input', metavar='FILE',
help='Read protobuf file as input')
optional_named = parser.add_argument_group('optional named arguments')
optional_named.add_argument('-up', dest='upper_percent', type=float,
default=95.0,
help='Only show the top-most entries up to this value.')
optional_named.add_argument('-r', dest='raw', action='store_true',
help='Output entire raw file.')
optional_named.add_argument('-o', dest='output',
help='The results are stored into the output file')
optional_named.add_argument('-d', dest='debug', action='store_true'
, help='Activity of the app to be compiled')
return parser.parse_args(argv)
def open_iorap_prefetch_file(file_path: str) -> TraceFile:
with open(file_path, "rb") as f:
tf = TraceFile()
tf.ParseFromString(f.read())
return tf
def print_stats_summary(trace_file: TraceFile, upper_percent):
tf_dict = convert_to_dict(trace_file)
print_utils.debug_print(tf_dict)
total_length = 0
summaries = []
for name, entries_list in tf_dict.items():
summary = entries_sum(entries_list)
summaries.append(summary)
total_length += summary.length
# Sort by length
summaries.sort(reverse=True, key=lambda s: s.length)
percent_sum = 0.0
skipped_entries = 0
print("===========================================")
print("Total length: {:,} bytes".format(total_length))
print("Displayed upper percent: {:0.2f}%".format(upper_percent))
print("===========================================")
print("")
print("name,length,percent_of_total,upper_percent")
for sum in summaries:
percent_of_total = (sum.length * 1.0) / (total_length * 1.0) * 100.0
percent_sum += percent_of_total
if percent_sum > upper_percent:
skipped_entries = skipped_entries + 1
continue
#print("%s,%d,%.2f%%" %(sum.name, sum.length, percent_of_total))
print("{:s},{:d},{:0.2f}%,{:0.2f}%".format(sum.name, sum.length, percent_of_total, percent_sum))
if skipped_entries > 0:
print("[WARNING] Skipped {:d} entries, use -up=100 to show everything".format(skipped_entries))
pass
class FileEntry(NamedTuple):
id: int
name: str
offset: int
length: int
class FileEntrySummary(NamedTuple):
name: str
length: int
def entries_sum(entries: List[FileEntry]) -> FileEntrySummary:
if not entries:
return None
summary = FileEntrySummary(name=entries[0].name, length=0)
for entry in entries:
summary = FileEntrySummary(summary.name, summary.length + entry.length)
return summary
def convert_to_dict(trace_file: TraceFile) -> Dict[str, FileEntry]:
trace_file_index = trace_file.index
# entries.id -> entry.file_name
entries_map = {}
index_entries = trace_file_index.entries
for entry in index_entries:
entries_map[entry.id] = entry.file_name
final_map = {}
file_entries_map = {}
file_entries = trace_file.list.entries
for entry in file_entries:
print_utils.debug_print(entry)
lst = file_entries_map.get(entry.index_id, [])
file_entries_map[entry.index_id] = lst
file_name = entries_map[entry.index_id]
file_entry = \
FileEntry(id=entry.index_id, name=file_name, offset=entry.file_offset, length=entry.file_length)
lst.append(file_entry)
final_map[file_name] = lst
return final_map
def main(argv: List[str]) -> int:
opts = parse_options(argv[1:])
if opts.debug:
print_utils.DEBUG = opts.debug
print_utils.debug_print(opts)
prefetch_file = open_iorap_prefetch_file(opts.input)
if opts.raw:
print(prefetch_file)
print_stats_summary(prefetch_file, opts.upper_percent)
return 0
if __name__ == '__main__':
sys.exit(main(sys.argv))

View File

@ -1,403 +0,0 @@
#!/bin/bash
#
# Copyright 2017, The Android Open Source Project
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
DIR="$( cd "$( dirname "${BASH_SOURCE[0]}" )" && pwd )"
APP_STARTUP_DIR="$DIR/../app_startup/"
source "$DIR/common"
usage() {
cat <<EOF
Usage: collector [OPTIONS]...
Runs an application, causes an iorap trace to be collected for it, and then invokes the iorap
compiler to generate a TraceFile.pb.
-p, --package package of the app to test
-a, --activity activity of the app to test
-h, --help usage information (this)
-v, --verbose enable extra verbose printing
-i, --inodes path to inodes file (system/extras/pagecache/pagecache.py -d inodes)
-b, --trace_buffer_size how big to make trace buffer size (default 32768)
-w, --wait_time how long to run systrace for (default 10) in seconds
-c, --compiler-filter override the compilation filter if set (default none)
-o, --output output trace file protobuf (default 'TraceFile.pb')
EOF
}
DIR="$( cd "$( dirname "${BASH_SOURCE[0]}" )" && pwd )"
trace_buffer_size=32768
wait_time=10
comp_filter=""
output_dest="TraceFile.pb"
parse_arguments() {
while [[ $# -gt 0 ]]; do
case "$1" in
-a|--activity)
activity="$2"
shift
;;
-h|--help)
usage
exit 0
;;
-p|--package)
package="$2"
shift
;;
-i|--inodes)
inodes="$2"
shift
;;
-b|--trace_buffer_size)
trace_buffer_size="$2"
shift
;;
-w|--wait_time)
wait_time="$2"
shift
;;
-c|--compiler-filter)
comp_filter="$2"
shift
;;
-o|--output)
output_dest="$2"
shift
;;
-v|--verbose)
verbose="y"
;;
esac
shift
done
}
remote_pidof() {
local procname="$1"
adb shell ps | grep "$procname" | awk '{print $2;}'
}
remote_pkill() {
local procname="$1"
shift
local the_pids=$(remote_pidof "$procname")
local pid
for pid in $the_pids; do
verbose_print adb shell kill "$@" "$pid"
adb shell kill "$@" "$pid"
done
}
force_package_compilation() {
local arg_comp_filter="$1"
local arg_package="$2"
if [[ $arg_comp_filter == speed-profile ]]; then
# Force the running app to dump its profiles to disk.
remote_pkill "$arg_package" -SIGUSR1
sleep 1 # give some time for above to complete.
fi
adb shell cmd package compile -m "$arg_comp_filter" -f "$arg_package"
}
parse_package_dumpsys_line() {
local what_left="$1"
local what_right="$2"
local line="$3"
if [[ $line == *${what_left}*${what_right}* ]]; then
found="${line#*$what_left}"
found="${found%$what_right*}"
echo "$found"
return 0
fi
return 1
}
parse_package_dumpsys_section() {
local what_left="$1"
local what_right="$2"
shift
local lines="$@"
lines="${lines//$'\n'/}"
local new_lines=()
local current_line=""
local newline=n
local line
for line in "${lines[@]}"; do
if [[ $line == *: ]]; then
newline=y
current_line=""
new_lines+=("$current_line")
parse_package_dumpsys_line "$what_left" "$what_right" "$current_line" && return 0
else
# strip all spaces from the start
line="${line//$' '/}"
current_line+="$line"
#prepend to current line
fi
done
[[ "$current_line" != "" ]] && new_lines+=("$current_line")
parse_package_dumpsys_line "$what_left" "$what_right" "$current_line" && return 0
return 1
}
parse_package_compilation() {
local pkg="$1"
# [com.google.android.apps.maps]
local compilation_filter
local is_prebuilt
local isa
local etc
local ret_code
read compilation_filter is_prebuilt isa etc <<< "$("$APP_STARTUP_DIR"/query_compiler_filter.py --package "$pkg")"
ret_code=$?
if [[ $ret_code -eq 0 && x$compilation_filter != x ]]; then
verbose_print "Package compilation info for $pkg was '$compilation_filter'"
echo "$compilation_filter"
return 0
else
verbose_print "query failed ret code $ret_code filter=$compilation_filter"
fi
return $ret_code
}
# Main entry point
if [[ $# -eq 0 ]]; then
usage
exit 1
else
parse_arguments "$@"
# if we do not have have package exit early with an error
[[ "$package" == "" ]] && echo "--package not specified" 1>&2 && exit 1
if [[ -z "$inodes" ]] || ! [[ -f $inodes ]]; then
echo "--inodes not specified" 1>&2
exit 1
fi
if [[ "$activity" == "" ]]; then
activity="$(get_activity_name "$package")"
if [[ "$activity" == "" ]]; then
echo "Activity name could not be found, invalid package name?" 1>&2
exit 1
else
verbose_print "Activity name inferred: " "$activity"
fi
fi
fi
adb root > /dev/null
if [[ "$(adb shell getenforce)" != "Permissive" ]]; then
adb shell setenforce 0
adb shell stop
adb shell start
adb wait-for-device
fi
compilation_was="$(parse_package_compilation "$package")"
if [[ $? -ne 0 ]]; then
echo "Could not determine package compilation filter; was this package installed?" >&2
exit 1
fi
verbose_print "Package compilation: $compilation_was"
# Cannot downgrade (e.g. from speed-profile to quicken) without forceful recompilation.
# Forceful recompilation will recompile even if compilation filter was unchanged.
# Therefore avoid recompiling unless the filter is actually different than what we asked for.
if [[ "x$comp_filter" != "x" ]] && [[ "$compilation_was" != "$comp_filter" ]]; then
echo "Current compilation filter is '$compilation_was'; force recompile to '$comp_filter'" >&2
#TODO: this matching seems hopelessly broken, it will always recompile.
force_package_compilation "$comp_filter" "$package"
fi
# Drop all caches prior to beginning a systrace, otherwise we won't record anything already in pagecache.
adb shell "echo 3 > /proc/sys/vm/drop_caches"
trace_tmp_file="$(mktemp -t trace.XXXXXXXXX.html)"
function finish {
[[ -f "$trace_tmp_file" ]] && rm "$trace_tmp_file"
}
trap finish EXIT
launch_application_and_wait_for_trace() {
local package="$1"
local activity="$2"
local timeout=30 # seconds
# Ensure application isn't running already.
remote_pkill "$package"
# 5 second trace of Home screen causes
# a trace of the home screen.
# There is no way to abort the trace
# so just wait for it to complete instead.
sleep 30
local time_now="$(logcat_save_timestamp)"
local retcode=0
verbose_print "Drop caches for non-warm start."
# Drop all caches to get cold starts.
adb shell "echo 3 > /proc/sys/vm/drop_caches"
verbose_print "now launching application"
# Launch an application
"$APP_STARTUP_DIR"/launch_application "$package" "$activity"
retcode=$?
if [[ $retcode -ne 0 ]]; then
echo "FATAL: Application launch failed." >&2
return $retcode
fi
# This blocks until 'am start' returns at which point the application is
# already to be considered "started" as the first frame has been drawn.
# TODO: check for cold start w.r.t to activitymanager?
# Wait for application to start from the point of view of ActivityTaskManager.
local pattern="ActivityTaskManager: Displayed $package"
logcat_wait_for_pattern "$timeout" "$time_now" "$pattern"
retcode=$?
if [[ $retcode -ne 0 ]]; then
echo "FATAL: Could not find '$pattern' in logcat." >&2
return $retcode
fi
# Wait for iorapd to finish writing out the perfetto traces for this app.
iorapd_perfetto_wait_for_app_trace "$package" "$activity" "$timeout" "$time_now"
retcode=$?
if [[ $retcode -ne 0 ]]; then
echo "FATAL: Could not save perfetto app trace file." >&2
return $retcode
fi
verbose_print "iorapd has finished collecting app trace file for $package/$activity"
}
collector_main() {
# don't even bother trying to run anything until the screen is unlocked.
"$APP_STARTUP_DIR"/unlock_screen
# Don't mutate state while iorapd is running.
iorapd_stop || return $?
# Remove all existing metadata for a package/activity in iorapd.
iorapd_perfetto_purge_app_trace "$package" "$activity" || return $?
iorapd_compiler_purge_trace_file "$package" "$activity" || return $?
iorapd_perfetto_enable || return $?
iorapd_readahead_disable || return $?
iorapd_start || return $?
# Wait for perfetto trace to finished writing itself out.
launch_application_and_wait_for_trace "$package" "$activity" || return $?
# Pull the perfetto trace for manual inspection.
iorapd_perfetto_pull_trace_file "$package" "$activity" "perfetto_trace.pb"
# Compile the trace so that the next app run can use prefetching.
iorapd_compiler_for_app_trace "$package" "$activity" "$inodes" || return $?
# Save TraceFile.pb to local file.
iorapd_compiler_pull_trace_file "$package" "$activity" "$output_dest" || return $?
# Remove the TraceFile.pb from the device.
iorapd_compiler_purge_trace_file "$package" "$activity" || return $?
# TODO: better transactional support for restoring iorapd global properties
iorapd_perfetto_disable || return $?
}
collector_main "$@"
verbose_print "Collector finished. Children: "
if [[ $verbose == y ]]; then
jobs -p
ps f -g$$
fi
exit $?
verbose_print "About to begin systrace"
coproc systrace_fd {
# Disable stdout buffering since we need to know the output of systrace RIGHT AWAY.
stdbuf -oL "$ANDROID_BUILD_TOP"/external/chromium-trace/systrace.py --target=android -b "$trace_buffer_size" -t "$wait_time" am pagecache dalvik -o "$trace_tmp_file"
}
verbose_print "Systrace began"
systrace_pid="$!"
while read -r -u "${systrace_fd[0]}" systrace_output; do
verbose_print "$systrace_output"
if [[ "$systrace_output" == *"Starting tracing"* ]]; then
verbose_print "WE DID SEE STARTING TRACING."
break
fi
done
# Systrace has begun recording the tracing.
# Run the application and collect the results.
am_output="$(adb shell am start -S -W "$package"/"$activity")"
if [[ $? -ne 0 ]]; then
echo "am start failed" >&2
exit 1
fi
verbose_print "$am_output"
total_time="$(echo "$am_output" | grep 'TotalTime:' | sed 's/TotalTime: //g')"
verbose_print "total time: $total_time"
# Now wait for systrace to finish.
wait "$systrace_pid" || { echo "Systrace finished before am start was finished, try a longer --wait_time"; exit 1; }
verbose_print "Systrace has now finished"
verbose_print "$(ls -la "$trace_tmp_file")"
iorapd_perfetto_disable
# Now that systrace has finished, convert the trace file html file to a protobuf.
"$ANDROID_BUILD_TOP"/system/iorap/src/py/collector/trace_parser.py -i "$inodes" -t "$trace_tmp_file" -o "$output_dest" || exit 1
echo "Trace file collection complete, trace file saved to \"$output_dest\"!" >&2
finish

View File

@ -1,253 +0,0 @@
#!/bin/bash
#
# Copyright 2019, The Android Open Source Project
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
DIR_IORAP_COMMON="$( cd "$( dirname "${BASH_SOURCE[0]}" )" && pwd )"
APP_STARTUP_DIR="$DIR_IORAP_COMMON/../app_startup/"
source "$APP_STARTUP_DIR/lib/common"
IORAPD_DATA_PATH="/data/misc/iorapd"
iorapd_start() {
verbose_print 'iorapd_start'
adb shell start iorapd
sleep 1
# TODO: block until logcat prints successfully connecting
}
iorapd_stop() {
verbose_print 'iorapd_stop'
adb shell stop iorapd
}
iorapd_reset() {
iorapd_stop
iorapd_start
}
# Enable perfetto tracing.
# Subsequent launches of an application will record a perfetto trace protobuf.
iorapd_perfetto_enable() {
verbose_print 'enable perfetto'
adb shell setprop iorapd.perfetto.enable true
iorapd_reset # iorapd only reads this flag when initializing
}
# Disable perfetto tracing.
# Subsequent launches of applications will no longer record perfetto trace protobufs.
iorapd_perfetto_disable() {
verbose_print 'disable perfetto'
adb shell setprop iorapd.perfetto.enable false
iorapd_reset # iorapd only reads this flag when initializing
}
# Enable readahead
# Subsequent launches of an application will be sped up by iorapd readahead prefetching
# (Provided an appropriate compiled trace exists for that application)
iorapd_readahead_enable() {
if [[ "$(adb shell getprop iorapd.readahead.enable)" == true ]]; then
verbose_print 'enable readahead [already enabled]'
return 0
fi
verbose_print 'enable readahead [reset iorapd]'
adb shell setprop iorapd.readahead.enable true
iorapd_reset # iorapd only reads this flag when initializing
}
# Disable readahead
# Subsequent launches of an application will be not be sped up by iorapd readahead prefetching.
iorapd_readahead_disable() {
if [[ "$(adb shell getprop iorapd.readahead.enable)" == false ]]; then
verbose_print 'disable readahead [already disabled]'
return 0
fi
verbose_print 'disable readahead [reset iorapd]'
adb shell setprop iorapd.readahead.enable false
iorapd_reset # iorapd only reads this flag when initializing
}
_iorapd_path_to_data_file() {
local package="$1"
local activity="$2"
local suffix="$3"
# Match logic of 'AppComponentName' in iorap::compiler C++ code.
echo "${IORAPD_DATA_PATH}/${package}%2F${activity}.${suffix}"
}
iorapd_perfetto_wait_for_app_trace() {
local package="$1"
local activity="$2"
local timeout="$3"
local timestamp="$4"
local remote_path="$(_iorapd_path_to_data_file "$package" "$activity" "perfetto_trace.pb")"
verbose_print "iorapd_perfetto_wait_for_app_trace on file '$remote_path'"
# see event_manager.cc
local pattern="Perfetto TraceBuffer saved to file: $remote_path"
logcat_wait_for_pattern "$timeout" "$timestamp" "$pattern"
}
# Purge all perfetto traces for a given application.
iorapd_perfetto_purge_app_trace() {
local package="$1"
local activity="$2"
local remote_path="$(_iorapd_path_to_data_file "$package" "$activity" "perfetto_trace.pb")"
verbose_print 'iorapd-perfetto: purge app trace in ' "$remote_path"
adb shell "[[ -f '$remote_path' ]] && rm -f '$remote_path' || exit 0"
}
# Pull the remote perfetto trace file into a local file.
iorapd_perfetto_pull_trace_file() {
local package="$1"
local activity="$2"
local output_file="$3" # local path
local compiled_path="$(_iorapd_path_to_data_file "$package" "$activity" "perfetto_trace.pb")"
if ! adb shell "[[ -f '$compiled_path' ]]"; then
echo "Error: Remote path '$compiled_path' invalid" >&2
return 1
fi
if ! mkdir -p "$(dirname "$output_file")"; then
echo "Error: Fail to make output directory for '$output_file'" >&2
return 1
fi
verbose_print adb pull "$compiled_path" "$output_file"
adb pull "$compiled_path" "$output_file"
}
# Compile a perfetto trace for a given application.
# This requires the app has run at least once with perfetto tracing enabled.
iorapd_compiler_for_app_trace() {
local package="$1"
local activity="$2"
local inodes="$3" # local path
# remote path calculations
local input_path="$(_iorapd_path_to_data_file "$package" "$activity" "perfetto_trace.pb")"
local compiled_path="$(_iorapd_path_to_data_file "$package" "$activity" "compiled_trace.tmp.pb")"
local compiled_path_final="$(_iorapd_path_to_data_file "$package" "$activity" "compiled_trace.pb")"
if ! adb shell "[[ -f '$input_path' ]]"; then
echo "Error: Missing perfetto traces; nothing to compile. Expected: '$input_path'" >&2
return 1
fi
if ! [[ -f $inodes ]]; then
# We could compile using 'diskscan' but it's non-deterministic, so refuse instead.
echo "Error: Missing inodes textcache at '$inodes'; refusing to compile." >&2
return 1
fi
# inodes file needs to be on the device for iorap.cmd.compiler to access it
local remote_inodes=/data/local/tmp/prefetch/inodes.txt
adb shell "mkdir -p \"$(dirname "$remote_inodes")\"" || return 1
verbose_print adb push "$inodes" "$remote_inodes"
adb push "$inodes" "$remote_inodes"
verbose_print 'iorapd-compiler: compile app trace in ' "$input_path"
verbose_print adb shell "iorap.cmd.compiler '$input_path' --inode-textcache '$remote_inodes' --output-proto '$compiled_path'"
adb shell "iorap.cmd.compiler '$input_path' --inode-textcache '$remote_inodes' --output-proto '$compiled_path'"
retcode=$?
# Don't overwrite the true 'compiled_trace.pb' unless the compiler completed without error.
# TODO: The native compiler code should be handling its own transaction-safety.
if [[ $retcode -eq 0 ]]; then
adb shell "mv '$compiled_path' '$compiled_path_final'"
else
adb shell "[[ -f '$compiled_path' ]] && rm -f '$compiled_path'"
fi
# Clean up inodes file we just pushed.
# adb shell "[[ -f '$remote_inodes' ]] && rm -f '$remote_inodes'"
return $retcode
}
# Pull the remote compiled trace file into a local file.
iorapd_compiler_pull_trace_file() {
local package="$1"
local activity="$2"
local output_file="$3" # local path
local compiled_path="$(_iorapd_path_to_data_file "$package" "$activity" "compiled_trace.pb")"
if ! adb shell "[[ -f '$compiled_path' ]]"; then
echo "Error: Remote path '$compiled_path' invalid" >&2
return 1
fi
if ! mkdir -p "$(dirname "$output_file")"; then
echo "Error: Fail to make output directory for '$output_file'" >&2
return 1
fi
verbose_print adb pull "$compiled_path" "$output_file"
adb pull "$compiled_path" "$output_file"
}
# Install a compiled trace file.
iorapd_compiler_install_trace_file() {
local package="$1"
local activity="$2"
local input_file="$3" # local path
# remote path calculations
local compiled_path="$(_iorapd_path_to_data_file "$package" "$activity" "compiled_trace.pb")"
if ! [[ -f $input_file ]]; then
echo "Error: File '$input_file' does not exist." >&2
return 1
fi
adb shell "mkdir -p \"$(dirname "$compiled_path")\"" || return 1
verbose_print adb push "$input_file" "$compiled_path"
adb push "$input_file" "$compiled_path"
}
iorapd_compiler_purge_trace_file() {
local package="$1"
local activity="$2"
local input_file="$3" # local path
local remote_path="$(_iorapd_path_to_data_file "$package" "$activity" "compiled_trace.pb")"
adb shell "[[ -f '$remote_path' ]] && rm -f '$remote_path' || exit 0"
}
# Blocks until the readahead for the requested package/activity has finished.
# This assumes that the trace file was already installed, and also that
# the application launched but not completed yet.
iorapd_readahead_wait_until_finished() {
local package="$1"
local activity="$2"
local timestamp="$3"
local timeout="$4"
if [[ $# -lt 4 ]]; then
echo "FATAL: Expected 4 arguments (actual $# $@)" >&2
exit 1
fi
local remote_path="$(_iorapd_path_to_data_file "$package" "$activity" "compiled_trace.pb")"
# See 'read_ahead.cc' LOG(INFO).
local pattern="Description = $remote_path"
logcat_wait_for_pattern "$timeout" "$timestamp" "$pattern"
}

View File

@ -1,297 +0,0 @@
#!/usr/bin/env python3
#
# Copyright 2019, The Android Open Source Project
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import argparse
import asyncio
import csv
import itertools
import os
import re
import struct
import sys
import tempfile
import time
import zipfile
from typing import Any, Callable, Dict, Generic, Iterable, List, NamedTuple, TextIO, Tuple, TypeVar, Optional, Union
# Include generated protos.
dir_name = os.path.dirname(os.path.realpath(__file__))
sys.path.append(dir_name + "/generated")
from TraceFile_pb2 import *
def parse_options(argv: List[str] = None):
"""Parse command line arguments and return an argparse Namespace object."""
parser = argparse.ArgumentParser(description="Compile a TraceFile.proto from a manual text file.")
# argparse considers args starting with - and -- optional in --help, even though required=True.
# by using a named argument group --help will clearly say that it's required instead of optional.
required_named = parser.add_argument_group('required named arguments')
# optional arguments
# use a group here to get the required arguments to appear 'above' the optional arguments in help.
optional_named = parser.add_argument_group('optional named arguments')
optional_named.add_argument('-opb', '--output-proto-binary', dest='output_proto_binary', action='store', help='Write binary proto output to file.')
optional_named.add_argument('-pm', '--pinlist-meta', dest='pinlist_meta', action='store', help='Path to pinlist.meta (default=none) binary file.')
optional_named.add_argument('-pmp', '--pinlist-meta-parent', dest='pinlist_meta_parent', action='store', help='Device path that the pinlist.meta applies to (e.g. /data/.../somefile.apk)')
optional_named.add_argument('-i', '--input', dest='input', action='store', help='Input text file (default stdin).')
optional_named.add_argument('-zp', '--zip_path', dest='zip_path', action='append', help='Directory containing zip files.')
optional_named.add_argument('-d', '--debug', dest='debug', action='store_true', help='Add extra debugging output')
optional_named.add_argument('-ot', '--output-text', dest='output_text', action='store', help='Output text file (default stdout).')
return parser.parse_args(argv)
# TODO: refactor this with a common library file with analyze_metrics.py
def _debug_print(*args, **kwargs):
"""Print the args to sys.stderr if the --debug/-d flag was passed in."""
if _debug:
print(*args, **kwargs, file=sys.stderr)
class BadInputError(Exception):
pass
InputRecord = NamedTuple('InputRecord', [('filepath', str), ('offset', int), ('length', int), ('remark', str)])
def find_zip_in_paths(original_name, zip_paths):
# /foo/bar/bax.zip -> bax.zip
file_basename = os.path.split(original_name)[1]
# the file must be located in one of the --zip-path arguments
matched = None
for zip_path in zip_paths:
for dir_entry in os.listdir(zip_path):
if dir_entry == file_basename:
matched = os.path.join(zip_path, dir_entry)
break
if matched:
break
if not matched:
raise ValueError("%s could not be found in any of the --zip_path specified." %(file_basename))
_debug_print("found zip file ", file_basename, " in ", matched)
if not zipfile.is_zipfile(matched):
raise ValueError("%s is not a zip file" %(matched))
return matched
def handle_zip_entry(input_record, zip_paths):
res = re.match("([^!]+)[!](.*)", input_record.filepath)
if not res:
return input_record
# 'foo!bar'
in_filepath = res[1] # -> 'foo'
in_zip_entry = res[2] # -> 'bar'
matched = find_zip_in_paths(in_filepath, zip_paths)
zip = zipfile.ZipFile(matched)
try:
zip_info = zip.getinfo(in_zip_entry)
except KeyError:
raise ValueError("%s is not an item in the zip file %s" %(in_zip_entry, matched))
# TODO: do we also need to add header size to this?
in_offset = zip_info.header_offset
# TODO: if a range is specified, use that instead.
in_length = zip_info.compress_size
return InputRecord(in_filepath, in_offset, in_length, 'zip entry (%s)' %(in_zip_entry))
def parse_input_file(input: Iterable[str], zip_paths: List[str]) -> Iterable[InputRecord]:
for line in input:
line = line.strip()
_debug_print("Line = ", line)
if not line:
_debug_print(" skip empty line", line)
continue
elif line[0] == "#":
_debug_print(" skip commented line", line)
continue
res = re.match("([^\s]+)\s+(\d+)\s+(\d+)", line)
if not res:
raise BadInputError("Expected input of form: <str:filepath> <int:offset> <int:length>")
in_filepath = res[1]
in_offset = int(res[2])
in_length = int(res[3])
yield handle_zip_entry(InputRecord(in_filepath, in_offset, in_length, 'regular file'), zip_paths)
# format:
# (<big_endian(i32):file_offset> <big_endian(i32):range_length>)+
PIN_META_FORMAT = ">ii"
PIN_META_READ_SIZE = struct.calcsize(PIN_META_FORMAT)
def parse_pin_meta(pin_meta_file, pinlist_meta_parent, zip_paths):
if not pin_meta_file:
return ()
global PIN_META_FORMAT
global PIN_META_READ_SIZE
# '/data/app/com.google.android.GoogleCamera-aNQhzSznf4h_bvJ_MRbweQ==/base.apk'
# -> 'com.google.android.GoogleCamera'
package_name_match = re.match('/.*/(.*)-.*=/base.apk', pinlist_meta_parent)
if not package_name_match:
raise ValueError("%s did not contain the <packagename>.apk" %(pinlist_meta_parent))
package_name = package_name_match[1]
# "com.google.android.GoogleCamera" -> "GoogleCamera.apk"
apk_name = package_name.split(".")[-1] + ".apk"
path_to_zip_on_host = find_zip_in_paths(apk_name, zip_paths)
apk_file_size = os.path.getsize(path_to_zip_on_host)
_debug_print("APK path '%s' file size '%d'" %(path_to_zip_on_host, apk_file_size))
while True:
data = pin_meta_file.read(PIN_META_READ_SIZE)
if not data:
break
(pin_offset, pin_length) = struct.unpack(PIN_META_FORMAT, data) # (offset, length)
remark = 'regular file (pinlist.meta)'
remaining_size = apk_file_size - pin_offset
if remaining_size < 0:
print("WARNING: Clamp entry (%d, %d), offset too large (max file size = %d)" %(pin_offset, pin_length, apk_file_size))
pin_length = pin_length + remaining_size
pin_offset = pin_offset + remaining_size
if pin_offset < 0:
pin_offset = 0
remark += '[clamped.offset]'
pin_last_offset = pin_offset + pin_length
remaining_size = apk_file_size - pin_last_offset
if remaining_size < 0:
print("WARNING: Clamp entry (%d, %d), length too large (max file size = %d)" %(pin_offset, pin_length, apk_file_size))
pin_length = pin_length + remaining_size
remark += '[clamped.length]'
yield InputRecord(pinlist_meta_parent, pin_offset, pin_length, remark)
def write_text_file_output(input_records: Iterable[InputRecord], output_text_file):
for rec in input_records:
output_text_file.write("%s %d %d #%s\n" %(rec.filepath, rec.offset, rec.length, rec.remark))
def build_trace_file(input_records: Iterable[InputRecord]) -> TraceFile:
trace_file = TraceFile()
trace_file_index = trace_file.index
file_id_counter = 0
file_id_map = {} # filename -> id
stats_length_total = 0
filename_stats = {} # filename -> total size
for rec in input_records:
filename = rec.filepath
file_id = file_id_map.get(filename)
if not file_id:
file_id = file_id_counter
file_id_map[filename] = file_id_counter
file_id_counter = file_id_counter + 1
file_index_entry = trace_file_index.entries.add()
file_index_entry.id = file_id
file_index_entry.file_name = filename
# already in the file index, add the file entry.
file_entry = trace_file.list.entries.add()
file_entry.index_id = file_id
file_entry.file_length = rec.length
stats_length_total += file_entry.file_length
file_entry.file_offset = rec.offset
filename_stats[filename] = filename_stats.get(filename, 0) + file_entry.file_length
return trace_file
def main():
global _debug
options= parse_options()
_debug = options.debug
_debug_print("parsed options: ", options)
if not options.input:
input_file = sys.stdin
_debug_print("input = stdin")
else:
input_file = open(options.input)
_debug_print("input = (file)", options.input)
if not options.output_proto_binary:
output_proto_file = None
else:
output_proto_file = open(options.output_proto_binary, 'wb')
_debug_print("output_proto_binary = ", output_proto_file)
pinlist_meta_parent = options.pinlist_meta_parent
if options.pinlist_meta:
pin_meta_file = open(options.pinlist_meta, 'rb')
else:
pin_meta_file = None
if (pinlist_meta_parent == None) != (pin_meta_file == None):
print("Options must be used together: --pinlist-meta and --pinlist-meta-path")
return 1
if not options.output_text:
output_text_file = sys.stdout
_debug_print("output = stdout")
else:
output_text_file = open(options.output_text, 'w')
_debug_print("output = (file)", options.output_text)
zip_paths = options.zip_path or []
input_records = list(parse_pin_meta(pin_meta_file, pinlist_meta_parent, zip_paths))
input_records = input_records + list(parse_input_file(input_file, zip_paths))
for p in input_records:
_debug_print(p)
write_text_file_output(input_records, output_text_file)
output_text_file.close()
out_proto = build_trace_file(input_records)
if output_proto_file:
output_proto_file.write(out_proto.SerializeToString())
output_proto_file.close()
return 0
if __name__ == '__main__':
sys.exit(main())

View File

@ -1,73 +0,0 @@
#!/usr/bin/env python3
#
# Copyright 2019, The Android Open Source Project
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import importlib
import os
import sys
import tempfile
from enum import Enum
from typing import TextIO, List
# local import
DIR = os.path.abspath(os.path.dirname(__file__))
sys.path.append(os.path.dirname(DIR))
import lib.print_utils as print_utils
# Type of compiler.
class CompilerType(Enum):
HOST = 1 # iorap.cmd.compiler on host
DEVICE = 2 # adb shell iorap.cmd.compiler
RI = 3 # compiler.py
def compile_perfetto_trace_ri(
argv: List[str],
compiler) -> TextIO:
print_utils.debug_print('Compile using RI compiler.')
compiler_trace_file = tempfile.NamedTemporaryFile()
argv.extend(['-o', compiler_trace_file.name])
print_utils.debug_print(argv)
compiler.main([''] + argv)
return compiler_trace_file
def compile_perfetto_trace_device(inodes_path: str,
package: str,
activity: str,
compiler) -> TextIO:
print_utils.debug_print('Compile using on-device compiler.')
compiler_trace_file = tempfile.NamedTemporaryFile()
compiler.main(inodes_path, package, activity, compiler_trace_file.name)
return compiler_trace_file
def compile(compiler_type: CompilerType,
inodes_path: str,
ri_compiler_argv,
package: str,
activity: str) -> TextIO:
if compiler_type == CompilerType.RI:
compiler = importlib.import_module('iorap.compiler_ri')
compiler_trace_file = compile_perfetto_trace_ri(ri_compiler_argv,
compiler)
return compiler_trace_file
if compiler_type == CompilerType.DEVICE:
compiler = importlib.import_module('iorap.compiler_device')
compiler_trace_file = compile_perfetto_trace_device(inodes_path,
package,
activity,
compiler)
return compiler_trace_file
# Should not arrive here.
raise ValueError('Unknown compiler type')

View File

@ -1,68 +0,0 @@
#!/usr/bin/env python3
#
# Copyright 2019, The Android Open Source Project
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import argparse
import os
import sys
from typing import List
DIR = os.path.abspath(os.path.dirname(__file__))
sys.path.append(os.path.dirname(DIR)) # framework/base/startop/script
import lib.print_utils as print_utils
import iorap.lib.iorapd_utils as iorapd_utils
from app_startup.lib.app_runner import AppRunner
IORAP_COMMON_BASH_SCRIPT = os.path.join(DIR, 'common')
def parse_options(argv: List[str] = None):
"""Parses command line arguments and returns an argparse Namespace object."""
parser = argparse.ArgumentParser(description="Compile perfetto trace file")
required_named = parser.add_argument_group('required named arguments')
required_named.add_argument('-i', dest='inodes', metavar='FILE',
help='Read cached inode data from a file saved '
'earlier with pagecache.py -d')
required_named.add_argument('-p', dest='package',
help='Package of the app to be compiled')
optional_named = parser.add_argument_group('optional named arguments')
optional_named.add_argument('-o', dest='output',
help='The compiled trace is stored into the output file')
optional_named.add_argument('-a', dest='activity',
help='Activity of the app to be compiled')
optional_named.add_argument('-d', dest='debug', action='store_true'
, help='Activity of the app to be compiled')
return parser.parse_args(argv)
def main(inodes, package, activity, output, **kwargs) -> int:
"""Entries of the program."""
if not activity:
activity = AppRunner.get_activity(package)
passed = iorapd_utils.compile_perfetto_trace_on_device(package, activity,
inodes)
if passed and output:
iorapd_utils.get_iorapd_compiler_trace(package, activity, output)
return 0
if __name__ == '__main__':
opts = parse_options()
if opts.debug:
print_utils.DEBUG = opts.debug
print_utils.debug_print(opts)
sys.exit(main(**(vars(opts))))

View File

@ -1,325 +0,0 @@
#!/usr/bin/env python3
#
# Copyright (C) 2019 The Android Open Source Project
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
#
# Dependencies:
#
# $> sudo apt-get install python3-pip
# $> pip3 install --user protobuf sqlalchemy sqlite3
#
import optparse
import os
import re
import sys
import tempfile
from pathlib import Path
from datetime import timedelta
from typing import Iterable, Optional, List
DIR = os.path.abspath(os.path.dirname(__file__))
sys.path.append(os.path.dirname(DIR))
from iorap.generated.TraceFile_pb2 import *
from iorap.lib.inode2filename import Inode2Filename
parent_dir_name = os.path.dirname(os.path.dirname(os.path.realpath(__file__)))
sys.path.append(parent_dir_name)
from trace_analyzer.lib.trace2db import Trace2Db, MmFilemapAddToPageCache, \
RawFtraceEntry
import lib.cmd_utils as cmd_utils
_PAGE_SIZE = 4096 # adb shell getconf PAGESIZE ## size of a memory page in bytes.
ANDROID_BUILD_TOP = Path(parent_dir_name).parents[3]
TRACECONV_BIN = ANDROID_BUILD_TOP.joinpath(
'external/perfetto/tools/traceconv')
class PageRun:
"""
Intermediate representation for a run of one or more pages.
"""
def __init__(self, device_number: int, inode: int, offset: int, length: int):
self.device_number = device_number
self.inode = inode
self.offset = offset
self.length = length
def __str__(self):
return "PageRun(device_number=%d, inode=%d, offset=%d, length=%d)" \
%(self.device_number, self.inode, self.offset, self.length)
def debug_print(msg):
#print(msg)
pass
UNDER_LAUNCH = False
def page_cache_entries_to_runs(page_cache_entries: Iterable[MmFilemapAddToPageCache]):
global _PAGE_SIZE
runs = [
PageRun(device_number=pg_entry.dev, inode=pg_entry.ino, offset=pg_entry.ofs,
length=_PAGE_SIZE)
for pg_entry in page_cache_entries
]
for r in runs:
debug_print(r)
print("Stats: Page runs totaling byte length: %d" %(len(runs) * _PAGE_SIZE))
return runs
def optimize_page_runs(page_runs):
new_entries = []
last_entry = None
for pg_entry in page_runs:
if last_entry:
if pg_entry.device_number == last_entry.device_number and pg_entry.inode == last_entry.inode:
# we are dealing with a run for the same exact file as a previous run.
if pg_entry.offset == last_entry.offset + last_entry.length:
# trivially contiguous entries. merge them together.
last_entry.length += pg_entry.length
continue
# Default: Add the run without merging it to a previous run.
last_entry = pg_entry
new_entries.append(pg_entry)
return new_entries
def is_filename_matching_filter(file_name, filters=[]):
"""
Blacklist-style regular expression filters.
:return: True iff file_name has an RE match in one of the filters.
"""
for filt in filters:
res = re.search(filt, file_name)
if res:
return True
return False
def build_protobuf(page_runs, inode2filename, filters=[]):
trace_file = TraceFile()
trace_file_index = trace_file.index
file_id_counter = 0
file_id_map = {} # filename -> id
stats_length_total = 0
filename_stats = {} # filename -> total size
skipped_inode_map = {}
filtered_entry_map = {} # filename -> count
for pg_entry in page_runs:
fn = inode2filename.resolve(pg_entry.device_number, pg_entry.inode)
if not fn:
skipped_inode_map[pg_entry.inode] = skipped_inode_map.get(pg_entry.inode, 0) + 1
continue
filename = fn
if filters and not is_filename_matching_filter(filename, filters):
filtered_entry_map[filename] = filtered_entry_map.get(filename, 0) + 1
continue
file_id = file_id_map.get(filename)
# file_id could 0, which satisfies "if file_id" and causes duplicate
# filename for file id 0.
if file_id is None:
file_id = file_id_counter
file_id_map[filename] = file_id_counter
file_id_counter = file_id_counter + 1
file_index_entry = trace_file_index.entries.add()
file_index_entry.id = file_id
file_index_entry.file_name = filename
# already in the file index, add the file entry.
file_entry = trace_file.list.entries.add()
file_entry.index_id = file_id
file_entry.file_length = pg_entry.length
stats_length_total += file_entry.file_length
file_entry.file_offset = pg_entry.offset
filename_stats[filename] = filename_stats.get(filename, 0) + file_entry.file_length
for inode, count in skipped_inode_map.items():
print("WARNING: Skip inode %s because it's not in inode map (%d entries)" %(inode, count))
print("Stats: Sum of lengths %d" %(stats_length_total))
if filters:
print("Filter: %d total files removed." %(len(filtered_entry_map)))
for fn, count in filtered_entry_map.items():
print("Filter: File '%s' removed '%d' entries." %(fn, count))
for filename, file_size in filename_stats.items():
print("%s,%s" %(filename, file_size))
return trace_file
def calc_trace_end_time(trace2db: Trace2Db,
trace_duration: Optional[timedelta]) -> float:
"""
Calculates the end time based on the trace duration.
The start time is the first receiving mm file map event.
The end time is the start time plus the trace duration.
All of them are in milliseconds.
"""
# If the duration is not set, assume all time is acceptable.
if trace_duration is None:
# float('inf')
return RawFtraceEntry.__table__.c.timestamp.type.python_type('inf')
first_event = trace2db.session.query(MmFilemapAddToPageCache).join(
MmFilemapAddToPageCache.raw_ftrace_entry).order_by(
RawFtraceEntry.timestamp).first()
# total_seconds() will return a float number.
return first_event.raw_ftrace_entry.timestamp + trace_duration.total_seconds()
def query_add_to_page_cache(trace2db: Trace2Db, trace_duration: Optional[timedelta]):
end_time = calc_trace_end_time(trace2db, trace_duration)
# SELECT * FROM tbl ORDER BY id;
return trace2db.session.query(MmFilemapAddToPageCache).join(
MmFilemapAddToPageCache.raw_ftrace_entry).filter(
RawFtraceEntry.timestamp <= end_time).order_by(
MmFilemapAddToPageCache.id).all()
def transform_perfetto_trace_to_systrace(path_to_perfetto_trace: str,
path_to_tmp_systrace: str) -> None:
""" Transforms the systrace file from perfetto trace. """
cmd_utils.run_command_nofail([str(TRACECONV_BIN),
'systrace',
path_to_perfetto_trace,
path_to_tmp_systrace])
def run(sql_db_path:str,
trace_file:str,
trace_duration:Optional[timedelta],
output_file:str,
inode_table:str,
filter:List[str]) -> int:
trace2db = Trace2Db(sql_db_path)
# Speed optimization: Skip any entries that aren't mm_filemap_add_to_pagecache.
trace2db.set_raw_ftrace_entry_filter(\
lambda entry: entry['function'] == 'mm_filemap_add_to_page_cache')
# TODO: parse multiple trace files here.
parse_count = trace2db.parse_file_into_db(trace_file)
mm_filemap_add_to_page_cache_rows = query_add_to_page_cache(trace2db,
trace_duration)
print("DONE. Parsed %d entries into sql db." %(len(mm_filemap_add_to_page_cache_rows)))
page_runs = page_cache_entries_to_runs(mm_filemap_add_to_page_cache_rows)
print("DONE. Converted %d entries" %(len(page_runs)))
# TODO: flags to select optimizations.
optimized_page_runs = optimize_page_runs(page_runs)
print("DONE. Optimized down to %d entries" %(len(optimized_page_runs)))
print("Build protobuf...")
trace_file = build_protobuf(optimized_page_runs, inode_table, filter)
print("Write protobuf to file...")
output_file = open(output_file, 'wb')
output_file.write(trace_file.SerializeToString())
output_file.close()
print("DONE")
# TODO: Silent running mode [no output except on error] for build runs.
return 0
def main(argv):
parser = optparse.OptionParser(usage="Usage: %prog [options]", description="Compile systrace file into TraceFile.pb")
parser.add_option('-i', dest='inode_data_file', metavar='FILE',
help='Read cached inode data from a file saved earlier with pagecache.py -d')
parser.add_option('-t', dest='trace_file', metavar='FILE',
help='Path to systrace file (trace.html) that will be parsed')
parser.add_option('--perfetto-trace', dest='perfetto_trace_file',
metavar='FILE',
help='Path to perfetto trace that will be parsed')
parser.add_option('--db', dest='sql_db', metavar='FILE',
help='Path to intermediate sqlite3 database [default: in-memory].')
parser.add_option('-f', dest='filter', action="append", default=[],
help="Add file filter. All file entries not matching one of the filters are discarded.")
parser.add_option('-l', dest='launch_lock', action="store_true", default=False,
help="Exclude all events not inside launch_lock")
parser.add_option('-o', dest='output_file', metavar='FILE',
help='Output protobuf file')
parser.add_option('--duration', dest='trace_duration', action="store",
type=int, help='The duration of trace in milliseconds.')
options, categories = parser.parse_args(argv[1:])
# TODO: OptionParser should have some flags to make these mandatory.
if not options.inode_data_file:
parser.error("-i is required")
if not options.trace_file and not options.perfetto_trace_file:
parser.error("one of -t or --perfetto-trace is required")
if options.trace_file and options.perfetto_trace_file:
parser.error("please enter either -t or --perfetto-trace, not both")
if not options.output_file:
parser.error("-o is required")
if options.launch_lock:
print("INFO: Launch lock flag (-l) enabled; filtering all events not inside launch_lock.")
inode_table = Inode2Filename.new_from_filename(options.inode_data_file)
sql_db_path = ":memory:"
if options.sql_db:
sql_db_path = options.sql_db
trace_duration = timedelta(milliseconds=options.trace_duration) if \
options.trace_duration is not None else None
# if the input is systrace
if options.trace_file:
return run(sql_db_path,
options.trace_file,
trace_duration,
options.output_file,
inode_table,
options.filter)
# if the input is perfetto trace
# TODO python 3.7 switch to using nullcontext
with tempfile.NamedTemporaryFile() as trace_file:
transform_perfetto_trace_to_systrace(options.perfetto_trace_file,
trace_file.name)
return run(sql_db_path,
trace_file.name,
trace_duration,
options.output_file,
inode_table,
options.filter)
if __name__ == '__main__':
print(sys.argv)
sys.exit(main(sys.argv))

View File

@ -1,78 +0,0 @@
#!/usr/bin/env python3
#
# Copyright 2019, The Android Open Source Project
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
"""
Unit tests for the compiler.py script.
Install:
$> sudo apt-get install python3-pytest ## OR
$> pip install -U pytest
See also https://docs.pytest.org/en/latest/getting-started.html
Usage:
$> pytest compiler_test.py
See also https://docs.pytest.org/en/latest/usage.html
"""
import os
import compiler_ri as compiler
DIR = os.path.abspath(os.path.dirname(__file__))
TEXTCACHE = os.path.join(DIR, 'test_fixtures/compiler/common_textcache')
SYSTRACE = os.path.join(DIR, 'test_fixtures/compiler/common_systrace')
ARGV = [os.path.join(DIR, 'compiler.py'), '-i', TEXTCACHE, '-t', SYSTRACE]
PERFETTO_TRACE = os.path.join(DIR,
'test_fixtures/compiler/common_perfetto_trace.pb')
def assert_compile_result(output, expected, *extra_argv):
argv = ARGV + ['-o', output] + [args for args in extra_argv]
compiler.main(argv)
with open(output, 'rb') as f1, open(expected, 'rb') as f2:
assert f1.read() == f2.read()
### Unit tests - testing compiler code directly
def test_transform_perfetto_trace_to_systrace(tmpdir):
expected = os.path.join(DIR,
'test_fixtures/compiler/test_result_systrace')
output = tmpdir.mkdir('compiler').join('tmp_systrace')
compiler.transform_perfetto_trace_to_systrace(PERFETTO_TRACE, str(output))
with open(output, 'rb') as f1, open(expected, 'rb') as f2:
assert f1.read() == f2.read()
### Functional tests - calls 'compiler.py --args...'
def test_compiler_main(tmpdir):
output = tmpdir.mkdir('compiler').join('output')
# No duration
expected = os.path.join(DIR,
'test_fixtures/compiler/test_result_without_duration.TraceFile.pb')
assert_compile_result(output, expected)
# 10ms duration
expected = os.path.join(DIR,
'test_fixtures/compiler/test_result_with_duration.TraceFile.pb')
assert_compile_result(output, expected, '--duration', '10000')
# 30ms duration
expected = os.path.join(DIR,
'test_fixtures/compiler/test_result_without_duration.TraceFile.pb')
assert_compile_result(output, expected, '--duration', '30000')

View File

@ -1,38 +0,0 @@
#!/bin/bash
#
# Copyright 2019, The Android Open Source Project
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
DIR="$( cd "$( dirname "${BASH_SOURCE[0]}" )" && pwd )"
#
# Dumps an iorap compiler protobuf from iorap.cmd.compiler into text
# with gqui.
#
if [[ "$#" -lt 1 ]]; then
echo "Usage: $0 <compiler_trace_file.pb> [...args]" >&2
exit 1
fi
path_to_proto="$DIR/../../../../../system/iorap/src/serialize/TraceFile.proto"
filename="$1"
shift
if ! [[ -f $filename ]]; then
echo "Error: $filename does not exist." >&2
exit 1
fi
gqui "rawproto:$filename" proto "$path_to_proto":iorap.serialize.proto.TraceFile "$@"

View File

@ -1,38 +0,0 @@
#!/bin/bash
#
# Copyright 2019, The Android Open Source Project
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
DIR="$( cd "$( dirname "${BASH_SOURCE[0]}" )" && pwd )"
#
# Dumps a perfetto protobuf collected by iorapd (from perfetto) into text
# with gqui.
#
if [[ "$#" -lt 1 ]]; then
echo "Usage: $0 <perfetto_trace.pb> [...args]" >&2
exit 1
fi
path_to_perfetto_proto="$DIR/../../../../../external/perfetto/protos/perfetto/trace/perfetto_trace.proto"
filename="$1"
shift
if ! [[ -f $filename ]]; then
echo "Error: $filename does not exist." >&2
exit 1
fi
gqui "rawproto:$filename" proto "$path_to_perfetto_proto":perfetto.protos.Trace "$@"

View File

@ -1,259 +0,0 @@
# Generated by the protocol buffer compiler. DO NOT EDIT!
# source: TraceFile.proto
import sys
_b=sys.version_info[0]<3 and (lambda x:x) or (lambda x:x.encode('latin1'))
from google.protobuf import descriptor as _descriptor
from google.protobuf import message as _message
from google.protobuf import reflection as _reflection
from google.protobuf import symbol_database as _symbol_database
from google.protobuf import descriptor_pb2
# @@protoc_insertion_point(imports)
_sym_db = _symbol_database.Default()
DESCRIPTOR = _descriptor.FileDescriptor(
name='TraceFile.proto',
package='iorap.serialize.proto',
syntax='proto2',
serialized_pb=_b('\n\x0fTraceFile.proto\x12\x15iorap.serialize.proto\"u\n\tTraceFile\x12\x34\n\x05index\x18\x01 \x02(\x0b\x32%.iorap.serialize.proto.TraceFileIndex\x12\x32\n\x04list\x18\x02 \x02(\x0b\x32$.iorap.serialize.proto.TraceFileList\"M\n\x0eTraceFileIndex\x12;\n\x07\x65ntries\x18\x01 \x03(\x0b\x32*.iorap.serialize.proto.TraceFileIndexEntry\"4\n\x13TraceFileIndexEntry\x12\n\n\x02id\x18\x01 \x02(\x03\x12\x11\n\tfile_name\x18\x02 \x02(\t\"G\n\rTraceFileList\x12\x36\n\x07\x65ntries\x18\x01 \x03(\x0b\x32%.iorap.serialize.proto.TraceFileEntry\"L\n\x0eTraceFileEntry\x12\x10\n\x08index_id\x18\x01 \x02(\x03\x12\x13\n\x0b\x66ile_offset\x18\x02 \x02(\x03\x12\x13\n\x0b\x66ile_length\x18\x03 \x02(\x03\x42\x1c\n\x18\x63om.google.android.iorapH\x03')
)
_sym_db.RegisterFileDescriptor(DESCRIPTOR)
_TRACEFILE = _descriptor.Descriptor(
name='TraceFile',
full_name='iorap.serialize.proto.TraceFile',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='index', full_name='iorap.serialize.proto.TraceFile.index', index=0,
number=1, type=11, cpp_type=10, label=2,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='list', full_name='iorap.serialize.proto.TraceFile.list', index=1,
number=2, type=11, cpp_type=10, label=2,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
],
extensions=[
],
nested_types=[],
enum_types=[
],
options=None,
is_extendable=False,
syntax='proto2',
extension_ranges=[],
oneofs=[
],
serialized_start=42,
serialized_end=159,
)
_TRACEFILEINDEX = _descriptor.Descriptor(
name='TraceFileIndex',
full_name='iorap.serialize.proto.TraceFileIndex',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='entries', full_name='iorap.serialize.proto.TraceFileIndex.entries', index=0,
number=1, type=11, cpp_type=10, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
],
extensions=[
],
nested_types=[],
enum_types=[
],
options=None,
is_extendable=False,
syntax='proto2',
extension_ranges=[],
oneofs=[
],
serialized_start=161,
serialized_end=238,
)
_TRACEFILEINDEXENTRY = _descriptor.Descriptor(
name='TraceFileIndexEntry',
full_name='iorap.serialize.proto.TraceFileIndexEntry',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='id', full_name='iorap.serialize.proto.TraceFileIndexEntry.id', index=0,
number=1, type=3, cpp_type=2, label=2,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='file_name', full_name='iorap.serialize.proto.TraceFileIndexEntry.file_name', index=1,
number=2, type=9, cpp_type=9, label=2,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
],
extensions=[
],
nested_types=[],
enum_types=[
],
options=None,
is_extendable=False,
syntax='proto2',
extension_ranges=[],
oneofs=[
],
serialized_start=240,
serialized_end=292,
)
_TRACEFILELIST = _descriptor.Descriptor(
name='TraceFileList',
full_name='iorap.serialize.proto.TraceFileList',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='entries', full_name='iorap.serialize.proto.TraceFileList.entries', index=0,
number=1, type=11, cpp_type=10, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
],
extensions=[
],
nested_types=[],
enum_types=[
],
options=None,
is_extendable=False,
syntax='proto2',
extension_ranges=[],
oneofs=[
],
serialized_start=294,
serialized_end=365,
)
_TRACEFILEENTRY = _descriptor.Descriptor(
name='TraceFileEntry',
full_name='iorap.serialize.proto.TraceFileEntry',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='index_id', full_name='iorap.serialize.proto.TraceFileEntry.index_id', index=0,
number=1, type=3, cpp_type=2, label=2,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='file_offset', full_name='iorap.serialize.proto.TraceFileEntry.file_offset', index=1,
number=2, type=3, cpp_type=2, label=2,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='file_length', full_name='iorap.serialize.proto.TraceFileEntry.file_length', index=2,
number=3, type=3, cpp_type=2, label=2,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
],
extensions=[
],
nested_types=[],
enum_types=[
],
options=None,
is_extendable=False,
syntax='proto2',
extension_ranges=[],
oneofs=[
],
serialized_start=367,
serialized_end=443,
)
_TRACEFILE.fields_by_name['index'].message_type = _TRACEFILEINDEX
_TRACEFILE.fields_by_name['list'].message_type = _TRACEFILELIST
_TRACEFILEINDEX.fields_by_name['entries'].message_type = _TRACEFILEINDEXENTRY
_TRACEFILELIST.fields_by_name['entries'].message_type = _TRACEFILEENTRY
DESCRIPTOR.message_types_by_name['TraceFile'] = _TRACEFILE
DESCRIPTOR.message_types_by_name['TraceFileIndex'] = _TRACEFILEINDEX
DESCRIPTOR.message_types_by_name['TraceFileIndexEntry'] = _TRACEFILEINDEXENTRY
DESCRIPTOR.message_types_by_name['TraceFileList'] = _TRACEFILELIST
DESCRIPTOR.message_types_by_name['TraceFileEntry'] = _TRACEFILEENTRY
TraceFile = _reflection.GeneratedProtocolMessageType('TraceFile', (_message.Message,), dict(
DESCRIPTOR = _TRACEFILE,
__module__ = 'TraceFile_pb2'
# @@protoc_insertion_point(class_scope:iorap.serialize.proto.TraceFile)
))
_sym_db.RegisterMessage(TraceFile)
TraceFileIndex = _reflection.GeneratedProtocolMessageType('TraceFileIndex', (_message.Message,), dict(
DESCRIPTOR = _TRACEFILEINDEX,
__module__ = 'TraceFile_pb2'
# @@protoc_insertion_point(class_scope:iorap.serialize.proto.TraceFileIndex)
))
_sym_db.RegisterMessage(TraceFileIndex)
TraceFileIndexEntry = _reflection.GeneratedProtocolMessageType('TraceFileIndexEntry', (_message.Message,), dict(
DESCRIPTOR = _TRACEFILEINDEXENTRY,
__module__ = 'TraceFile_pb2'
# @@protoc_insertion_point(class_scope:iorap.serialize.proto.TraceFileIndexEntry)
))
_sym_db.RegisterMessage(TraceFileIndexEntry)
TraceFileList = _reflection.GeneratedProtocolMessageType('TraceFileList', (_message.Message,), dict(
DESCRIPTOR = _TRACEFILELIST,
__module__ = 'TraceFile_pb2'
# @@protoc_insertion_point(class_scope:iorap.serialize.proto.TraceFileList)
))
_sym_db.RegisterMessage(TraceFileList)
TraceFileEntry = _reflection.GeneratedProtocolMessageType('TraceFileEntry', (_message.Message,), dict(
DESCRIPTOR = _TRACEFILEENTRY,
__module__ = 'TraceFile_pb2'
# @@protoc_insertion_point(class_scope:iorap.serialize.proto.TraceFileEntry)
))
_sym_db.RegisterMessage(TraceFileEntry)
DESCRIPTOR.has_options = True
DESCRIPTOR._options = _descriptor._ParseOptions(descriptor_pb2.FileOptions(), _b('\n\030com.google.android.iorapH\003'))
# @@protoc_insertion_point(module_scope)

View File

@ -1,35 +0,0 @@
#!/bin/bash
#
# Copyright 2019, The Android Open Source Project
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
DIR="$( cd "$( dirname "${BASH_SOURCE[0]}" )" && pwd )"
APROTOC="$(which aprotoc)"
IORAP_SERIALIZE_DIR="${DIR}/../../../../../../system/iorap/src/serialize"
IORAP_PROTOS=($IORAP_SERIALIZE_DIR/*.proto)
if [[ $? -ne 0 ]]; then
echo "Fatal: Missing aprotoc. Set APROTOC=... or lunch build/envsetup.sh?" >&2
exit 1
fi
if ! [[ -d $IORAP_SERIALIZE_DIR ]]; then
echo "Fatal: Directory '$IORAP_SERIALIZE_DIR' does not exist." >&2
exit 1
fi
# codegen the .py files into the same directory as this script.
echo "$APROTOC" --proto_path="$IORAP_SERIALIZE_DIR" --python_out="$DIR" "${IORAP_PROTOS[@]}"
"$APROTOC" --proto_path="$IORAP_SERIALIZE_DIR" --python_out="$DIR" "${IORAP_PROTOS[@]}"

View File

@ -1,94 +0,0 @@
#!/usr/bin/env python3
#
# Copyright (C) 2019 The Android Open Source Project
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
from typing import Any, Callable, Dict, Generic, Iterable, List, NamedTuple, TextIO, Tuple, TypeVar, Optional, Union, TextIO
import re
class Inode2Filename:
"""
Parses a text file of the format
"uint(dev_t) uint(ino_t) int(file_size) string(filepath)\\n"*
Lines not matching this format are ignored.
"""
def __init__(self, inode_data_file: TextIO):
"""
Create an Inode2Filename that reads cached inode from a file saved earlier
(e.g. with pagecache.py -d or with inode2filename --format=textcache)
:param inode_data_file: a file object (e.g. created with open or StringIO).
Lifetime: inode_data_file is only used during the construction of the object.
"""
self._inode_table = Inode2Filename.build_inode_lookup_table(inode_data_file)
@classmethod
def new_from_filename(cls, textcache_filename: str) -> 'Inode2Filename':
"""
Create an Inode2Filename that reads cached inode from a file saved earlier
(e.g. with pagecache.py -d or with inode2filename --format=textcache)
:param textcache_filename: path to textcache
"""
with open(textcache_filename) as inode_data_file:
return cls(inode_data_file)
@staticmethod
def build_inode_lookup_table(inode_data_file: TextIO) -> Dict[Tuple[int, int], Tuple[str, str]]:
"""
:return: map { (device_int, inode_int) -> (filename_str, size_str) }
"""
inode2filename = {}
for line in inode_data_file:
# stat -c "%d %i %s %n
# device number, inode number, total size in bytes, file name
result = re.match('([0-9]+)d? ([0-9]+) -?([0-9]+) (.*)', line)
if result:
inode2filename[(int(result.group(1)), int(result.group(2)))] = \
(result.group(4), result.group(3))
return inode2filename
def resolve(self, dev_t: int, ino_t: int) -> Optional[str]:
"""
Return a filename (str) from a (dev_t, ino_t) inode pair.
Returns None if the lookup fails.
"""
maybe_result = self._inode_table.get((dev_t, ino_t))
if not maybe_result:
return None
return maybe_result[0] # filename str
def __len__(self) -> int:
"""
:return: the number of inode entries parsed from the file.
"""
return len(self._inode_table)
def __repr__(self) -> str:
"""
:return: string representation for debugging/test failures.
"""
return "Inode2Filename%s" %(repr(self._inode_table))
# end of class.

View File

@ -1,83 +0,0 @@
#!/usr/bin/env python3
#
# Copyright 2019, The Android Open Source Project
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
"""
Unit tests for inode2filename module.
Install:
$> sudo apt-get install python3-pytest ## OR
$> pip install -U pytest
See also https://docs.pytest.org/en/latest/getting-started.html
Usage:
$> ./inode2filename_test.py
$> pytest inode2filename_test.py
$> python -m pytest inode2filename_test.py
See also https://docs.pytest.org/en/latest/usage.html
"""
# global imports
from contextlib import contextmanager
import io
import shlex
import sys
import typing
# pip imports
import pytest
# local imports
from inode2filename import *
def create_inode2filename(*contents):
buf = io.StringIO()
for c in contents:
buf.write(c)
buf.write("\n")
buf.seek(0)
i2f = Inode2Filename(buf)
buf.close()
return i2f
def test_inode2filename():
a = create_inode2filename("")
assert len(a) == 0
assert a.resolve(1, 2) == None
a = create_inode2filename("1 2 3 foo.bar")
assert len(a) == 1
assert a.resolve(1, 2) == "foo.bar"
assert a.resolve(4, 5) == None
a = create_inode2filename("1 2 3 foo.bar", "4 5 6 bar.baz")
assert len(a) == 2
assert a.resolve(1, 2) == "foo.bar"
assert a.resolve(4, 5) == "bar.baz"
a = create_inode2filename("1567d 8910 -1 /a/b/c/", "4 5 6 bar.baz")
assert len(a) == 2
assert a.resolve(1567, 8910) == "/a/b/c/"
assert a.resolve(4, 5) == "bar.baz"
if __name__ == '__main__':
pytest.main()

View File

@ -1,175 +0,0 @@
#!/usr/bin/env python3
#
# Copyright 2019, The Android Open Source Project
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Helper util libraries for iorapd related operations."""
import os
import sys
# up to two level
sys.path.append(os.path.join(os.path.abspath(__file__),'../..'))
import lib.cmd_utils as cmd_utils
IORAPID_LIB_DIR = os.path.abspath(os.path.dirname(__file__))
IORAPD_DATA_PATH = '/data/misc/iorapd'
IORAP_COMMON_BASH_SCRIPT = os.path.realpath(os.path.join(IORAPID_LIB_DIR,
'../common'))
def _iorapd_path_to_data_file(package: str, activity: str, suffix: str) -> str:
"""Gets conventional data filename.
Returns:
The path of iorapd data file.
"""
# Match logic of 'AppComponentName' in iorap::compiler C++ code.
return '{}/{}%2F{}.{}'.format(IORAPD_DATA_PATH, package, activity, suffix)
def compile_perfetto_trace_on_device(package: str, activity: str,
inodes: str) -> bool:
"""Compiles the perfetto trace using on-device compiler."""
passed, _ = cmd_utils.run_shell_func(IORAP_COMMON_BASH_SCRIPT,
'iorapd_compiler_for_app_trace',
[package, activity, inodes])
return passed
def get_iorapd_compiler_trace(package: str, activity: str, dest: str) -> str:
"""Gets compiler trace to dest file."""
src = _iorapd_path_to_data_file(package, activity, 'compiled_trace.pb')
passed, _ = cmd_utils.run_shell_command('adb pull "{}" "{}"'.format(src, dest))
if not passed:
return False
return True
def iorapd_compiler_install_trace_file(package: str, activity: str,
input_file: str) -> bool:
"""Installs a compiled trace file.
Returns:
Whether the trace file is installed successful or not.
"""
# remote path calculations
compiled_path = _iorapd_path_to_data_file(package, activity,
'compiled_trace.pb')
if not os.path.exists(input_file):
print('Error: File {} does not exist'.format(input_file))
return False
passed, _ = cmd_utils.run_adb_shell_command(
'mkdir -p "$(dirname "{}")"'.format(compiled_path))
if not passed:
return False
passed, _ = cmd_utils.run_shell_command('adb push "{}" "{}"'.format(
input_file, compiled_path))
return passed
def wait_for_iorapd_finish(package: str,
activity: str,
timeout: int,
debug: bool,
logcat_timestamp: str)->bool:
"""Waits for the finish of iorapd.
Returns:
A bool indicates whether the iorapd is done successfully or not.
"""
# Set verbose for bash script based on debug flag.
if debug:
os.putenv('verbose', 'y')
# Validate that readahead completes.
# If this fails for some reason, then this will also discard the timing of
# the run.
passed, _ = cmd_utils.run_shell_func(IORAP_COMMON_BASH_SCRIPT,
'iorapd_readahead_wait_until_finished',
[package, activity, logcat_timestamp,
str(timeout)])
return passed
def enable_iorapd_readahead() -> bool:
"""
Disable readahead. Subsequent launches of an application will be sped up
by iorapd readahead prefetching.
Returns:
A bool indicates whether the enabling is done successfully or not.
"""
passed, _ = cmd_utils.run_shell_func(IORAP_COMMON_BASH_SCRIPT,
'iorapd_readahead_enable', [])
return passed
def disable_iorapd_readahead() -> bool:
"""
Disable readahead. Subsequent launches of an application will be not be sped
up by iorapd readahead prefetching.
Returns:
A bool indicates whether the disabling is done successfully or not.
"""
passed, _ = cmd_utils.run_shell_func(IORAP_COMMON_BASH_SCRIPT,
'iorapd_readahead_disable', [])
return passed
def enable_iorapd_perfetto() -> bool:
"""
Enable Perfetto. Subsequent launches of an application will record a perfetto
trace protobuf.
Returns:
A bool indicates whether the enabling is done successfully or not.
"""
passed, _ = cmd_utils.run_shell_func(IORAP_COMMON_BASH_SCRIPT,
'iorapd_perfetto_enable', [])
return passed
def disable_iorapd_perfetto() -> bool:
"""
Disable Perfetto. Subsequent launches of applications will no longer record
perfetto trace protobufs.
Returns:
A bool indicates whether the disabling is done successfully or not.
"""
passed, _ = cmd_utils.run_shell_func(IORAP_COMMON_BASH_SCRIPT,
'iorapd_perfetto_disable', [])
return passed
def start_iorapd() -> bool:
"""
Starts iorapd.
Returns:
A bool indicates whether the starting is done successfully or not.
"""
passed, _ = cmd_utils.run_shell_func(IORAP_COMMON_BASH_SCRIPT,
'iorapd_start', [])
return passed
def stop_iorapd() -> bool:
"""
Stops iorapd.
Returns:
A bool indicates whether the stopping is done successfully or not.
"""
passed, _ = cmd_utils.run_shell_func(IORAP_COMMON_BASH_SCRIPT,
'iorapd_stop', [])
return passed

View File

@ -1,24 +0,0 @@
#!/bin/bash
#
# Copyright 2019, The Android Open Source Project
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
if [[ $# -lt 1 ]]; then
echo "Usage: $0 <output-filename>" >&2
exit 1
fi
# see compiler/main.cc for list of roots
adb shell iorap.inode2filename --output-format=textcache --output=/data/local/tmp/dumpcache --all --root=/system --root=/apex --root=/vendor --root=/data --root=/product --root=/metadata
adb pull /data/local/tmp/dumpcache "$1"

View File

@ -1,5 +0,0 @@
<...>-2965 (-----) [001] .... 10000.746629: mm_filemap_add_to_page_cache: dev 253:6 ino 1 page=00000000679ee1ec pfn=1299913 ofs=192512
<...>-2965 (-----) [001] .... 10010.746664: mm_filemap_add_to_page_cache: dev 253:6 ino 2 page=0000000006cd2fb7 pfn=1296251 ofs=196608
<...>-2965 (-----) [001] .... 10020.746677: mm_filemap_add_to_page_cache: dev 253:6 ino 3 page=00000000af82f3d6 pfn=1419330 ofs=200704
<...>-2965 (-----) [001] .... 10030.746693: mm_filemap_add_to_page_cache: dev 253:6 ino 4 page=000000002840f054 pfn=1304928 ofs=204800
<...>-2965 (-----) [001] .... 10040.746706: mm_filemap_add_to_page_cache: dev 253:6 ino 5 page=000000004a59da17 pfn=1288069 ofs=208896

View File

@ -1,2 +0,0 @@
64774 1 -1 /system/test1
64774 3 -1 /data/test2

View File

@ -1,748 +0,0 @@
TRACE:
# tracer: nop
#
# entries-in-buffer/entries-written: 30624/30624 #P:4
#
# _-----=> irqs-off
# / _----=> need-resched
# | / _---=> hardirq/softirq
# || / _--=> preempt-depth
# ||| / delay
# TASK-PID TGID CPU# |||| TIMESTAMP FUNCTION
# | | | | |||| | |
<unknown>-27388 (-----) [004] .... 1920260.530929: mm_filemap_add_to_page_cache: dev 0:64771 ino df31 page=0000000000000000 pfn=1461937 ofs=9535488
<unknown>-27388 (-----) [005] .... 1920260.532161: mm_filemap_add_to_page_cache: dev 0:64771 ino df31 page=0000000000000000 pfn=1344589 ofs=9474048
<unknown>-27388 (-----) [005] .... 1920260.532183: mm_filemap_add_to_page_cache: dev 0:64771 ino df31 page=0000000000000000 pfn=1153671 ofs=9478144
<unknown>-27388 (-----) [005] .... 1920260.532184: mm_filemap_add_to_page_cache: dev 0:64771 ino df31 page=0000000000000000 pfn=1219563 ofs=9482240
<unknown>-27388 (-----) [005] .... 1920260.532185: mm_filemap_add_to_page_cache: dev 0:64771 ino df31 page=0000000000000000 pfn=1083162 ofs=9486336
<unknown>-27388 (-----) [005] .... 1920260.532185: mm_filemap_add_to_page_cache: dev 0:64771 ino df31 page=0000000000000000 pfn=1147318 ofs=9490432
<unknown>-27388 (-----) [005] .... 1920260.532186: mm_filemap_add_to_page_cache: dev 0:64771 ino df31 page=0000000000000000 pfn=1333594 ofs=9494528
<unknown>-27388 (-----) [005] .... 1920260.532186: mm_filemap_add_to_page_cache: dev 0:64771 ino df31 page=0000000000000000 pfn=1375715 ofs=9498624
<unknown>-27388 (-----) [005] .... 1920260.532186: mm_filemap_add_to_page_cache: dev 0:64771 ino df31 page=0000000000000000 pfn=1184831 ofs=9502720
<unknown>-27388 (-----) [005] .... 1920260.532187: mm_filemap_add_to_page_cache: dev 0:64771 ino df31 page=0000000000000000 pfn=1241653 ofs=9506816
<unknown>-27388 (-----) [005] .... 1920260.532187: mm_filemap_add_to_page_cache: dev 0:64771 ino df31 page=0000000000000000 pfn=1134975 ofs=9510912
<unknown>-27388 (-----) [005] .... 1920260.532190: mm_filemap_add_to_page_cache: dev 0:64771 ino df31 page=0000000000000000 pfn=1145772 ofs=9515008
<unknown>-27388 (-----) [005] .... 1920260.532190: mm_filemap_add_to_page_cache: dev 0:64771 ino df31 page=0000000000000000 pfn=1090457 ofs=9519104
<unknown>-27388 (-----) [005] .... 1920260.532190: mm_filemap_add_to_page_cache: dev 0:64771 ino df31 page=0000000000000000 pfn=1137942 ofs=9523200
<unknown>-27388 (-----) [005] .... 1920260.532191: mm_filemap_add_to_page_cache: dev 0:64771 ino df31 page=0000000000000000 pfn=1130123 ofs=9527296
<unknown>-27388 (-----) [005] .... 1920260.532191: mm_filemap_add_to_page_cache: dev 0:64771 ino df31 page=0000000000000000 pfn=1208783 ofs=9531392
<unknown>-27388 (-----) [005] .... 1920260.532192: mm_filemap_add_to_page_cache: dev 0:64771 ino df31 page=0000000000000000 pfn=1294989 ofs=9539584
<unknown>-27388 (-----) [005] .... 1920260.532206: mm_filemap_add_to_page_cache: dev 0:64771 ino df31 page=0000000000000000 pfn=1163979 ofs=9543680
<unknown>-27388 (-----) [005] .... 1920260.532206: mm_filemap_add_to_page_cache: dev 0:64771 ino df31 page=0000000000000000 pfn=1350628 ofs=9547776
<unknown>-27388 (-----) [005] .... 1920260.532206: mm_filemap_add_to_page_cache: dev 0:64771 ino df31 page=0000000000000000 pfn=1386717 ofs=9551872
<unknown>-27388 (-----) [005] .... 1920260.532207: mm_filemap_add_to_page_cache: dev 0:64771 ino df31 page=0000000000000000 pfn=1316148 ofs=9555968
<unknown>-27388 (-----) [005] .... 1920260.532208: mm_filemap_add_to_page_cache: dev 0:64771 ino df31 page=0000000000000000 pfn=1316419 ofs=9560064
<unknown>-27388 (-----) [005] .... 1920260.532208: mm_filemap_add_to_page_cache: dev 0:64771 ino df31 page=0000000000000000 pfn=1149076 ofs=9564160
<unknown>-27388 (-----) [005] .... 1920260.532209: mm_filemap_add_to_page_cache: dev 0:64771 ino df31 page=0000000000000000 pfn=1372772 ofs=9568256
<unknown>-27388 (-----) [005] .... 1920260.532209: mm_filemap_add_to_page_cache: dev 0:64771 ino df31 page=0000000000000000 pfn=1116389 ofs=9572352
<unknown>-27388 (-----) [005] .... 1920260.532211: mm_filemap_add_to_page_cache: dev 0:64771 ino df31 page=0000000000000000 pfn=1325458 ofs=9576448
<unknown>-27388 (-----) [005] .... 1920260.532211: mm_filemap_add_to_page_cache: dev 0:64771 ino df31 page=0000000000000000 pfn=1195423 ofs=9580544
<unknown>-27388 (-----) [005] .... 1920260.532211: mm_filemap_add_to_page_cache: dev 0:64771 ino df31 page=0000000000000000 pfn=1250964 ofs=9584640
<unknown>-27388 (-----) [005] .... 1920260.532212: mm_filemap_add_to_page_cache: dev 0:64771 ino df31 page=0000000000000000 pfn=1196027 ofs=9588736
<unknown>-27388 (-----) [005] .... 1920260.532212: mm_filemap_add_to_page_cache: dev 0:64771 ino df31 page=0000000000000000 pfn=1354059 ofs=9592832
<unknown>-27388 (-----) [005] .... 1920260.532213: mm_filemap_add_to_page_cache: dev 0:64771 ino df31 page=0000000000000000 pfn=1264649 ofs=9596928
<unknown>-27388 (-----) [005] .... 1920260.532213: mm_filemap_add_to_page_cache: dev 0:64771 ino df31 page=0000000000000000 pfn=1245285 ofs=9601024
<unknown>-27388 (-----) [005] .... 1920260.535119: mm_filemap_add_to_page_cache: dev 0:64768 ino 588 page=0000000000000000 pfn=1411552 ofs=44244992
<unknown>-27388 (-----) [005] .... 1920260.535129: mm_filemap_add_to_page_cache: dev 0:3 ino 0 page=0000000000000000 pfn=1483081 ofs=433524736
<unknown>-27388 (-----) [004] .... 1920260.536144: mm_filemap_add_to_page_cache: dev 0:3 ino 0 page=0000000000000000 pfn=1276173 ofs=438185984
<unknown>-27388 (-----) [004] .... 1920260.536462: mm_filemap_add_to_page_cache: dev 0:64768 ino 588 page=0000000000000000 pfn=1174575 ofs=44249088
<unknown>-27388 (-----) [004] .... 1920260.536464: mm_filemap_add_to_page_cache: dev 0:64768 ino 588 page=0000000000000000 pfn=1126294 ofs=44253184
<unknown>-27388 (-----) [004] .... 1920260.536464: mm_filemap_add_to_page_cache: dev 0:64768 ino 588 page=0000000000000000 pfn=1248232 ofs=44257280
<unknown>-27388 (-----) [004] .... 1920260.537065: mm_filemap_add_to_page_cache: dev 0:64768 ino 588 page=0000000000000000 pfn=1332993 ofs=44240896
<unknown>-27388 (-----) [006] .... 1920260.537646: mm_filemap_add_to_page_cache: dev 0:64768 ino 588 page=0000000000000000 pfn=1153343 ofs=44400640
<unknown>-27388 (-----) [005] .... 1920260.538777: mm_filemap_add_to_page_cache: dev 0:64768 ino 588 page=0000000000000000 pfn=1358397 ofs=44474368
<unknown>-12683 (-----) [006] .... 1920260.560094: mm_filemap_add_to_page_cache: dev 0:64771 ino 11e page=0000000000000000 pfn=1426577 ofs=0
<unknown>-12683 (-----) [006] .... 1920260.560105: mm_filemap_add_to_page_cache: dev 0:64771 ino 1 page=0000000000000000 pfn=1117587 ofs=1171456
<unknown>-12683 (-----) [006] .... 1920260.561199: mm_filemap_add_to_page_cache: dev 0:64771 ino 11e page=0000000000000000 pfn=1099987 ofs=4096
<unknown>-12683 (-----) [006] .... 1920260.561411: mm_filemap_add_to_page_cache: dev 0:64771 ino 11e page=0000000000000000 pfn=1099910 ofs=16384
<unknown>-12683 (-----) [006] .... 1920260.561598: mm_filemap_add_to_page_cache: dev 0:64771 ino 11e page=0000000000000000 pfn=1099905 ofs=20480
<unknown>-12683 (-----) [006] .... 1920260.561758: mm_filemap_add_to_page_cache: dev 0:64771 ino 11e page=0000000000000000 pfn=1099883 ofs=32768
<unknown>-12683 (-----) [006] .... 1920260.562088: mm_filemap_add_to_page_cache: dev 0:64771 ino 11e page=0000000000000000 pfn=1099809 ofs=36864
<unknown>-12683 (-----) [006] .... 1920260.562325: mm_filemap_add_to_page_cache: dev 0:64771 ino 11e page=0000000000000000 pfn=1099803 ofs=98304
<unknown>-12683 (-----) [006] .... 1920260.562516: mm_filemap_add_to_page_cache: dev 0:64771 ino 11e page=0000000000000000 pfn=1099795 ofs=102400
<unknown>-12683 (-----) [006] .... 1920260.563094: mm_filemap_add_to_page_cache: dev 0:64768 ino 5f3 page=0000000000000000 pfn=1107649 ofs=12288
<unknown>-12683 (-----) [006] .... 1920260.563105: mm_filemap_add_to_page_cache: dev 0:64768 ino 5f3 page=0000000000000000 pfn=1269029 ofs=16384
<unknown>-12683 (-----) [006] .... 1920260.563785: mm_filemap_add_to_page_cache: dev 0:64768 ino 4da page=0000000000000000 pfn=1451096 ofs=8192
<unknown>-12683 (-----) [006] .... 1920260.563790: mm_filemap_add_to_page_cache: dev 0:64768 ino 4da page=0000000000000000 pfn=1301480 ofs=12288
<unknown>-12683 (-----) [006] .... 1920260.563790: mm_filemap_add_to_page_cache: dev 0:64768 ino 4da page=0000000000000000 pfn=1314353 ofs=16384
<unknown>-12683 (-----) [006] .... 1920260.563791: mm_filemap_add_to_page_cache: dev 0:64768 ino 4da page=0000000000000000 pfn=1216744 ofs=24576
<unknown>-12683 (-----) [006] .... 1920260.564309: mm_filemap_add_to_page_cache: dev 0:64771 ino 11e page=0000000000000000 pfn=1099787 ofs=49152
<unknown>-12683 (-----) [006] .... 1920260.564514: mm_filemap_add_to_page_cache: dev 0:64771 ino 11e page=0000000000000000 pfn=1099778 ofs=53248
<unknown>-12683 (-----) [005] .... 1920260.564756: mm_filemap_add_to_page_cache: dev 0:64771 ino 11e page=0000000000000000 pfn=1148849 ofs=114688
<unknown>-12683 (-----) [005] .... 1920260.564973: mm_filemap_add_to_page_cache: dev 0:64771 ino 11e page=0000000000000000 pfn=1164731 ofs=118784
<unknown>-12683 (-----) [005] .... 1920260.565000: mm_filemap_add_to_page_cache: dev 0:2053 ino 1a page=0000000000000000 pfn=1170255 ofs=0
<unknown>-12683 (-----) [005] .... 1920260.565003: mm_filemap_add_to_page_cache: dev 0:2053 ino 1a page=0000000000000000 pfn=1181043 ofs=4096
<unknown>-12683 (-----) [005] .... 1920260.565004: mm_filemap_add_to_page_cache: dev 0:2053 ino 1a page=0000000000000000 pfn=1296004 ofs=8192
<unknown>-12683 (-----) [005] .... 1920260.565004: mm_filemap_add_to_page_cache: dev 0:2053 ino 1a page=0000000000000000 pfn=1102004 ofs=12288
<unknown>-12683 (-----) [005] .... 1920260.565626: mm_filemap_add_to_page_cache: dev 0:3 ino 0 page=0000000000000000 pfn=1351232 ofs=470597632
<unknown>-12683 (-----) [005] .... 1920260.565982: mm_filemap_add_to_page_cache: dev 0:64771 ino 1 page=0000000000000000 pfn=1391336 ofs=40210432
<unknown>-12683 (-----) [005] .... 1920260.565985: mm_filemap_add_to_page_cache: dev 0:64771 ino 2 page=0000000000000000 pfn=1267536 ofs=12668928
<unknown>-27388 (-----) [007] .... 1920260.566082: mm_filemap_add_to_page_cache: dev 0:64768 ino 588 page=0000000000000000 pfn=1256752 ofs=43921408
<unknown>-12683 (-----) [005] .... 1920260.566516: mm_filemap_add_to_page_cache: dev 0:64771 ino 1 page=0000000000000000 pfn=1110966 ofs=176226304
<unknown>-12683 (-----) [005] .... 1920260.566519: mm_filemap_add_to_page_cache: dev 0:64771 ino 2 page=0000000000000000 pfn=1060586 ofs=12967936
<unknown>-12683 (-----) [004] .... 1920260.567773: mm_filemap_add_to_page_cache: dev 0:64771 ino 1 page=0000000000000000 pfn=1117234 ofs=421888
<unknown>-12683 (-----) [005] .... 1920260.568604: mm_filemap_add_to_page_cache: dev 0:64771 ino 1 page=0000000000000000 pfn=1210571 ofs=430080
<unknown>-12683 (-----) [005] .... 1920260.568887: mm_filemap_add_to_page_cache: dev 0:64771 ino 69 page=0000000000000000 pfn=1055640 ofs=0
<unknown>-12683 (-----) [005] .... 1920260.568908: mm_filemap_add_to_page_cache: dev 0:64771 ino 49 page=0000000000000000 pfn=1142694 ofs=0
<unknown>-12683 (-----) [005] .... 1920260.568910: mm_filemap_add_to_page_cache: dev 0:64771 ino 1 page=0000000000000000 pfn=1060788 ofs=299008
<unknown>-12683 (-----) [005] .... 1920260.569418: mm_filemap_add_to_page_cache: dev 0:64771 ino 49 page=0000000000000000 pfn=1085046 ofs=4096
<unknown>-12683 (-----) [005] .... 1920260.569640: mm_filemap_add_to_page_cache: dev 0:64771 ino 49 page=0000000000000000 pfn=1057135 ofs=8192
<unknown>-12683 (-----) [005] .... 1920260.569833: mm_filemap_add_to_page_cache: dev 0:64771 ino 1 page=0000000000000000 pfn=1058976 ofs=19406848
<unknown>-12683 (-----) [005] .... 1920260.569835: mm_filemap_add_to_page_cache: dev 0:64771 ino 2 page=0000000000000000 pfn=1477947 ofs=10526720
<unknown>-12683 (-----) [005] .... 1920260.572285: mm_filemap_add_to_page_cache: dev 0:64768 ino 61d page=0000000000000000 pfn=1237492 ofs=299008
<unknown>-12683 (-----) [005] .... 1920260.572297: mm_filemap_add_to_page_cache: dev 0:64768 ino 61d page=0000000000000000 pfn=1264914 ofs=339968
<unknown>-12683 (-----) [005] .... 1920260.572314: mm_filemap_add_to_page_cache: dev 0:64768 ino 61d page=0000000000000000 pfn=1434748 ofs=348160
<unknown>-12683 (-----) [005] .... 1920260.572316: mm_filemap_add_to_page_cache: dev 0:64768 ino 61d page=0000000000000000 pfn=1372959 ofs=352256
<unknown>-12683 (-----) [005] .... 1920260.572317: mm_filemap_add_to_page_cache: dev 0:64768 ino 61d page=0000000000000000 pfn=1258955 ofs=356352
<unknown>-12683 (-----) [005] .... 1920260.572317: mm_filemap_add_to_page_cache: dev 0:64768 ino 61d page=0000000000000000 pfn=1113420 ofs=360448
<unknown>-12683 (-----) [005] .... 1920260.572318: mm_filemap_add_to_page_cache: dev 0:64768 ino 61d page=0000000000000000 pfn=1137083 ofs=364544
<unknown>-12683 (-----) [004] .... 1920260.575490: mm_filemap_add_to_page_cache: dev 0:64771 ino 11e page=0000000000000000 pfn=1379679 ofs=65536
<unknown>-12683 (-----) [006] .... 1920260.576194: mm_filemap_add_to_page_cache: dev 0:64771 ino 11e page=0000000000000000 pfn=1323898 ofs=69632
<unknown>-12683 (-----) [006] .... 1920260.576248: mm_filemap_add_to_page_cache: dev 0:64771 ino 1 page=0000000000000000 pfn=1323895 ofs=262623232
<unknown>-12683 (-----) [006] .... 1920260.576251: mm_filemap_add_to_page_cache: dev 0:64771 ino 2 page=0000000000000000 pfn=1323861 ofs=13156352
<unknown>-12683 (-----) [005] .... 1920260.576810: mm_filemap_add_to_page_cache: dev 0:64771 ino 1 page=0000000000000000 pfn=1477585 ofs=262590464
<unknown>-12683 (-----) [004] .... 1920260.577197: mm_filemap_add_to_page_cache: dev 0:64771 ino 1 page=0000000000000000 pfn=1267617 ofs=25206784
<unknown>-12683 (-----) [004] .... 1920260.577200: mm_filemap_add_to_page_cache: dev 0:64771 ino 2 page=0000000000000000 pfn=1267618 ofs=12636160
<unknown>-12683 (-----) [005] .... 1920260.577725: mm_filemap_add_to_page_cache: dev 0:64771 ino 1 page=0000000000000000 pfn=1056225 ofs=228618240
<unknown>-12683 (-----) [005] .... 1920260.577727: mm_filemap_add_to_page_cache: dev 0:64771 ino 2 page=0000000000000000 pfn=1164942 ofs=13082624
<unknown>-12683 (-----) [007] .... 1920260.578411: mm_filemap_add_to_page_cache: dev 0:64771 ino da07 page=0000000000000000 pfn=1372616 ofs=0
<unknown>-12683 (-----) [007] .... 1920260.578422: mm_filemap_add_to_page_cache: dev 0:64771 ino da07 page=0000000000000000 pfn=1307468 ofs=4096
<unknown>-12683 (-----) [007] .... 1920260.578428: mm_filemap_add_to_page_cache: dev 0:64771 ino da07 page=0000000000000000 pfn=1120117 ofs=8192
<unknown>-12683 (-----) [007] .... 1920260.578428: mm_filemap_add_to_page_cache: dev 0:64771 ino da07 page=0000000000000000 pfn=1217989 ofs=12288
<unknown>-12683 (-----) [007] .... 1920260.578650: mm_filemap_add_to_page_cache: dev 0:64771 ino da07 page=0000000000000000 pfn=1475011 ofs=5419008
<unknown>-12683 (-----) [007] .... 1920260.578653: mm_filemap_add_to_page_cache: dev 0:64771 ino 1 page=0000000000000000 pfn=1066084 ofs=236453888
<unknown>-12683 (-----) [007] .... 1920260.578654: mm_filemap_add_to_page_cache: dev 0:64771 ino 2 page=0000000000000000 pfn=1100271 ofs=13099008
<unknown>-12683 (-----) [004] .... 1920260.579004: mm_filemap_add_to_page_cache: dev 0:64771 ino da07 page=0000000000000000 pfn=1485156 ofs=5423104
<unknown>-12683 (-----) [004] .... 1920260.579005: mm_filemap_add_to_page_cache: dev 0:64771 ino da07 page=0000000000000000 pfn=1124212 ofs=5427200
<unknown>-12683 (-----) [004] .... 1920260.579006: mm_filemap_add_to_page_cache: dev 0:64771 ino da07 page=0000000000000000 pfn=1195377 ofs=5431296
<unknown>-12683 (-----) [004] .... 1920260.579006: mm_filemap_add_to_page_cache: dev 0:64771 ino da07 page=0000000000000000 pfn=1265888 ofs=5435392
<unknown>-12683 (-----) [004] .... 1920260.579007: mm_filemap_add_to_page_cache: dev 0:64771 ino da07 page=0000000000000000 pfn=1170194 ofs=5439488
<unknown>-12683 (-----) [004] .... 1920260.579007: mm_filemap_add_to_page_cache: dev 0:64771 ino da07 page=0000000000000000 pfn=1403742 ofs=5443584
<unknown>-12683 (-----) [004] .... 1920260.579008: mm_filemap_add_to_page_cache: dev 0:64771 ino da07 page=0000000000000000 pfn=1123826 ofs=5447680
<unknown>-12683 (-----) [004] .... 1920260.579008: mm_filemap_add_to_page_cache: dev 0:64771 ino da07 page=0000000000000000 pfn=1255034 ofs=5451776
<unknown>-12683 (-----) [004] .... 1920260.579011: mm_filemap_add_to_page_cache: dev 0:64771 ino da07 page=0000000000000000 pfn=1190447 ofs=5455872
<unknown>-12683 (-----) [004] .... 1920260.579011: mm_filemap_add_to_page_cache: dev 0:64771 ino da07 page=0000000000000000 pfn=1286864 ofs=5459968
<unknown>-12683 (-----) [004] .... 1920260.579012: mm_filemap_add_to_page_cache: dev 0:64771 ino da07 page=0000000000000000 pfn=1428535 ofs=5464064
<unknown>-12683 (-----) [004] .... 1920260.579012: mm_filemap_add_to_page_cache: dev 0:64771 ino da07 page=0000000000000000 pfn=1184092 ofs=5468160
<unknown>-12683 (-----) [004] .... 1920260.579013: mm_filemap_add_to_page_cache: dev 0:64771 ino da07 page=0000000000000000 pfn=1411906 ofs=5472256
<unknown>-12683 (-----) [004] .... 1920260.579013: mm_filemap_add_to_page_cache: dev 0:64771 ino da07 page=0000000000000000 pfn=1342349 ofs=5476352
<unknown>-12683 (-----) [004] .... 1920260.579013: mm_filemap_add_to_page_cache: dev 0:64771 ino da07 page=0000000000000000 pfn=1188185 ofs=5480448
<unknown>-12683 (-----) [004] .... 1920260.579014: mm_filemap_add_to_page_cache: dev 0:64771 ino da07 page=0000000000000000 pfn=1158702 ofs=5484544
<unknown>-12683 (-----) [005] .... 1920260.579430: mm_filemap_add_to_page_cache: dev 0:64771 ino da07 page=0000000000000000 pfn=1299421 ofs=5230592
<unknown>-12683 (-----) [005] .... 1920260.579435: mm_filemap_add_to_page_cache: dev 0:64771 ino da07 page=0000000000000000 pfn=1317097 ofs=5234688
<unknown>-12683 (-----) [005] .... 1920260.579435: mm_filemap_add_to_page_cache: dev 0:64771 ino da07 page=0000000000000000 pfn=1441714 ofs=5238784
<unknown>-12683 (-----) [005] .... 1920260.579438: mm_filemap_add_to_page_cache: dev 0:64771 ino da07 page=0000000000000000 pfn=1081974 ofs=5242880
<unknown>-12683 (-----) [005] .... 1920260.579439: mm_filemap_add_to_page_cache: dev 0:64771 ino da07 page=0000000000000000 pfn=1128684 ofs=5246976
<unknown>-12683 (-----) [005] .... 1920260.579439: mm_filemap_add_to_page_cache: dev 0:64771 ino da07 page=0000000000000000 pfn=1447381 ofs=5251072
<unknown>-12683 (-----) [005] .... 1920260.579440: mm_filemap_add_to_page_cache: dev 0:64771 ino da07 page=0000000000000000 pfn=1466410 ofs=5255168
<unknown>-12683 (-----) [005] .... 1920260.579440: mm_filemap_add_to_page_cache: dev 0:64771 ino da07 page=0000000000000000 pfn=1259909 ofs=5259264
<unknown>-12683 (-----) [005] .... 1920260.579441: mm_filemap_add_to_page_cache: dev 0:64771 ino da07 page=0000000000000000 pfn=1125784 ofs=5263360
<unknown>-12683 (-----) [005] .... 1920260.579441: mm_filemap_add_to_page_cache: dev 0:64771 ino da07 page=0000000000000000 pfn=1270592 ofs=5267456
<unknown>-12683 (-----) [005] .... 1920260.579442: mm_filemap_add_to_page_cache: dev 0:64771 ino da07 page=0000000000000000 pfn=1246070 ofs=5271552
<unknown>-12683 (-----) [005] .... 1920260.579442: mm_filemap_add_to_page_cache: dev 0:64771 ino da07 page=0000000000000000 pfn=1472544 ofs=5275648
<unknown>-12683 (-----) [005] .... 1920260.579442: mm_filemap_add_to_page_cache: dev 0:64771 ino da07 page=0000000000000000 pfn=1113357 ofs=5279744
<unknown>-12683 (-----) [005] .... 1920260.579443: mm_filemap_add_to_page_cache: dev 0:64771 ino da07 page=0000000000000000 pfn=1202021 ofs=5283840
<unknown>-12683 (-----) [005] .... 1920260.579443: mm_filemap_add_to_page_cache: dev 0:64771 ino da07 page=0000000000000000 pfn=1078639 ofs=5287936
<unknown>-12683 (-----) [005] .... 1920260.579449: mm_filemap_add_to_page_cache: dev 0:64771 ino da07 page=0000000000000000 pfn=1176171 ofs=5292032
<unknown>-12683 (-----) [005] .... 1920260.579450: mm_filemap_add_to_page_cache: dev 0:64771 ino da07 page=0000000000000000 pfn=1089516 ofs=5296128
<unknown>-12683 (-----) [005] .... 1920260.579451: mm_filemap_add_to_page_cache: dev 0:64771 ino da07 page=0000000000000000 pfn=1400065 ofs=5300224
<unknown>-12683 (-----) [005] .... 1920260.579452: mm_filemap_add_to_page_cache: dev 0:64771 ino da07 page=0000000000000000 pfn=1300489 ofs=5304320
<unknown>-12683 (-----) [005] .... 1920260.579452: mm_filemap_add_to_page_cache: dev 0:64771 ino da07 page=0000000000000000 pfn=1452081 ofs=5308416
<unknown>-12683 (-----) [005] .... 1920260.579452: mm_filemap_add_to_page_cache: dev 0:64771 ino da07 page=0000000000000000 pfn=1161862 ofs=5312512
<unknown>-12683 (-----) [005] .... 1920260.579453: mm_filemap_add_to_page_cache: dev 0:64771 ino da07 page=0000000000000000 pfn=1161871 ofs=5316608
<unknown>-12683 (-----) [005] .... 1920260.579453: mm_filemap_add_to_page_cache: dev 0:64771 ino da07 page=0000000000000000 pfn=1263798 ofs=5320704
<unknown>-12683 (-----) [005] .... 1920260.579454: mm_filemap_add_to_page_cache: dev 0:64771 ino da07 page=0000000000000000 pfn=1126887 ofs=5324800
<unknown>-12683 (-----) [005] .... 1920260.579454: mm_filemap_add_to_page_cache: dev 0:64771 ino da07 page=0000000000000000 pfn=1375498 ofs=5328896
<unknown>-12683 (-----) [005] .... 1920260.579455: mm_filemap_add_to_page_cache: dev 0:64771 ino da07 page=0000000000000000 pfn=1328067 ofs=5332992
<unknown>-12683 (-----) [005] .... 1920260.579455: mm_filemap_add_to_page_cache: dev 0:64771 ino da07 page=0000000000000000 pfn=1420691 ofs=5337088
<unknown>-12683 (-----) [005] .... 1920260.579456: mm_filemap_add_to_page_cache: dev 0:64771 ino da07 page=0000000000000000 pfn=1298707 ofs=5341184
<unknown>-12683 (-----) [005] .... 1920260.579456: mm_filemap_add_to_page_cache: dev 0:64771 ino da07 page=0000000000000000 pfn=1078670 ofs=5345280
<unknown>-12683 (-----) [005] .... 1920260.579457: mm_filemap_add_to_page_cache: dev 0:64771 ino da07 page=0000000000000000 pfn=1430498 ofs=5349376
<unknown>-12683 (-----) [005] .... 1920260.579458: mm_filemap_add_to_page_cache: dev 0:64771 ino da07 page=0000000000000000 pfn=1338720 ofs=5353472
<unknown>-12683 (-----) [005] .... 1920260.579476: mm_filemap_add_to_page_cache: dev 0:64771 ino da07 page=0000000000000000 pfn=1452611 ofs=5357568
<unknown>-12683 (-----) [006] .... 1920260.580451: mm_filemap_add_to_page_cache: dev 0:64771 ino 180a page=0000000000000000 pfn=1241967 ofs=0
<unknown>-12683 (-----) [006] .... 1920260.580454: mm_filemap_add_to_page_cache: dev 0:64771 ino 180a page=0000000000000000 pfn=1116541 ofs=4096
<unknown>-12683 (-----) [006] .... 1920260.580461: mm_filemap_add_to_page_cache: dev 0:64771 ino 180a page=0000000000000000 pfn=1145049 ofs=8192
<unknown>-12683 (-----) [006] .... 1920260.580462: mm_filemap_add_to_page_cache: dev 0:64771 ino 180a page=0000000000000000 pfn=1277255 ofs=12288
<unknown>-12683 (-----) [006] .... 1920260.580462: mm_filemap_add_to_page_cache: dev 0:64771 ino 180a page=0000000000000000 pfn=1098037 ofs=16384
<unknown>-12683 (-----) [006] .... 1920260.580463: mm_filemap_add_to_page_cache: dev 0:64771 ino 180a page=0000000000000000 pfn=1135986 ofs=20480
<unknown>-12683 (-----) [006] .... 1920260.580464: mm_filemap_add_to_page_cache: dev 0:64771 ino 180a page=0000000000000000 pfn=1154455 ofs=24576
<unknown>-12683 (-----) [006] .... 1920260.580464: mm_filemap_add_to_page_cache: dev 0:64771 ino 180a page=0000000000000000 pfn=1221822 ofs=28672
<unknown>-12683 (-----) [006] .... 1920260.580465: mm_filemap_add_to_page_cache: dev 0:64771 ino 180a page=0000000000000000 pfn=1078684 ofs=32768
<unknown>-12683 (-----) [006] .... 1920260.580465: mm_filemap_add_to_page_cache: dev 0:64771 ino 180a page=0000000000000000 pfn=1158876 ofs=36864
<unknown>-12683 (-----) [006] .... 1920260.580465: mm_filemap_add_to_page_cache: dev 0:64771 ino 180a page=0000000000000000 pfn=1289644 ofs=40960
<unknown>-12683 (-----) [006] .... 1920260.580466: mm_filemap_add_to_page_cache: dev 0:64771 ino 180a page=0000000000000000 pfn=1289386 ofs=45056
<unknown>-12683 (-----) [006] .... 1920260.580466: mm_filemap_add_to_page_cache: dev 0:64771 ino 180a page=0000000000000000 pfn=1131002 ofs=49152
<unknown>-12683 (-----) [006] .... 1920260.580467: mm_filemap_add_to_page_cache: dev 0:64771 ino 180a page=0000000000000000 pfn=1464335 ofs=53248
<unknown>-12683 (-----) [006] .... 1920260.580468: mm_filemap_add_to_page_cache: dev 0:64771 ino 180a page=0000000000000000 pfn=1135789 ofs=57344
<unknown>-12683 (-----) [006] .... 1920260.580469: mm_filemap_add_to_page_cache: dev 0:64771 ino 180a page=0000000000000000 pfn=1240897 ofs=61440
<unknown>-12683 (-----) [006] .... 1920260.580469: mm_filemap_add_to_page_cache: dev 0:64771 ino 180a page=0000000000000000 pfn=1241770 ofs=65536
<unknown>-12683 (-----) [006] .... 1920260.580470: mm_filemap_add_to_page_cache: dev 0:64771 ino 180a page=0000000000000000 pfn=1421959 ofs=69632
<unknown>-12683 (-----) [006] .... 1920260.580470: mm_filemap_add_to_page_cache: dev 0:64771 ino 180a page=0000000000000000 pfn=1230007 ofs=73728
<unknown>-12683 (-----) [006] .... 1920260.580471: mm_filemap_add_to_page_cache: dev 0:64771 ino 180a page=0000000000000000 pfn=1109271 ofs=77824
<unknown>-12683 (-----) [006] .... 1920260.580471: mm_filemap_add_to_page_cache: dev 0:64771 ino 180a page=0000000000000000 pfn=1159974 ofs=81920
<unknown>-12683 (-----) [006] .... 1920260.580471: mm_filemap_add_to_page_cache: dev 0:64771 ino 180a page=0000000000000000 pfn=1154528 ofs=86016
<unknown>-12683 (-----) [006] .... 1920260.580472: mm_filemap_add_to_page_cache: dev 0:64771 ino 180a page=0000000000000000 pfn=1315790 ofs=90112
<unknown>-12683 (-----) [006] .... 1920260.580473: mm_filemap_add_to_page_cache: dev 0:64771 ino 180a page=0000000000000000 pfn=1185583 ofs=94208
<unknown>-12683 (-----) [006] .... 1920260.580473: mm_filemap_add_to_page_cache: dev 0:64771 ino 180a page=0000000000000000 pfn=1253153 ofs=98304
<unknown>-12683 (-----) [006] .... 1920260.580473: mm_filemap_add_to_page_cache: dev 0:64771 ino 180a page=0000000000000000 pfn=1103982 ofs=102400
<unknown>-12683 (-----) [006] .... 1920260.580474: mm_filemap_add_to_page_cache: dev 0:64771 ino 180a page=0000000000000000 pfn=1284589 ofs=106496
<unknown>-12683 (-----) [006] .... 1920260.580474: mm_filemap_add_to_page_cache: dev 0:64771 ino 180a page=0000000000000000 pfn=1169601 ofs=110592
<unknown>-12683 (-----) [006] .... 1920260.580476: mm_filemap_add_to_page_cache: dev 0:64771 ino 180a page=0000000000000000 pfn=1206248 ofs=114688
<unknown>-12683 (-----) [006] .... 1920260.580476: mm_filemap_add_to_page_cache: dev 0:64771 ino 180a page=0000000000000000 pfn=1261161 ofs=118784
<unknown>-12683 (-----) [006] .... 1920260.580477: mm_filemap_add_to_page_cache: dev 0:64771 ino 180a page=0000000000000000 pfn=1305841 ofs=122880
<unknown>-12683 (-----) [006] .... 1920260.580477: mm_filemap_add_to_page_cache: dev 0:64771 ino 180a page=0000000000000000 pfn=1468293 ofs=126976
<unknown>-12683 (-----) [004] .... 1920260.580646: mm_filemap_add_to_page_cache: dev 0:64771 ino da07 page=0000000000000000 pfn=1318816 ofs=16384
<unknown>-12683 (-----) [004] .... 1920260.580649: mm_filemap_add_to_page_cache: dev 0:64771 ino da07 page=0000000000000000 pfn=1472922 ofs=20480
<unknown>-12683 (-----) [004] .... 1920260.580650: mm_filemap_add_to_page_cache: dev 0:64771 ino da07 page=0000000000000000 pfn=1473229 ofs=24576
<unknown>-12683 (-----) [004] .... 1920260.580650: mm_filemap_add_to_page_cache: dev 0:64771 ino da07 page=0000000000000000 pfn=1524262 ofs=28672
<unknown>-12683 (-----) [004] .... 1920260.580656: mm_filemap_add_to_page_cache: dev 0:64771 ino da07 page=0000000000000000 pfn=1205714 ofs=32768
<unknown>-12683 (-----) [004] .... 1920260.580657: mm_filemap_add_to_page_cache: dev 0:64771 ino da07 page=0000000000000000 pfn=1310560 ofs=36864
<unknown>-12683 (-----) [004] .... 1920260.580658: mm_filemap_add_to_page_cache: dev 0:64771 ino da07 page=0000000000000000 pfn=1295070 ofs=40960
<unknown>-12683 (-----) [004] .... 1920260.580659: mm_filemap_add_to_page_cache: dev 0:64771 ino da07 page=0000000000000000 pfn=1404093 ofs=45056
<unknown>-12683 (-----) [004] .... 1920260.580659: mm_filemap_add_to_page_cache: dev 0:64771 ino da07 page=0000000000000000 pfn=1435814 ofs=49152
<unknown>-12683 (-----) [004] .... 1920260.580660: mm_filemap_add_to_page_cache: dev 0:64771 ino da07 page=0000000000000000 pfn=1435442 ofs=53248
<unknown>-12683 (-----) [004] .... 1920260.580660: mm_filemap_add_to_page_cache: dev 0:64771 ino da07 page=0000000000000000 pfn=1096077 ofs=57344
<unknown>-12683 (-----) [004] .... 1920260.580661: mm_filemap_add_to_page_cache: dev 0:64771 ino da07 page=0000000000000000 pfn=1483793 ofs=61440
<unknown>-12683 (-----) [004] .... 1920260.580661: mm_filemap_add_to_page_cache: dev 0:64771 ino da07 page=0000000000000000 pfn=1231298 ofs=65536
<unknown>-12683 (-----) [004] .... 1920260.580661: mm_filemap_add_to_page_cache: dev 0:64771 ino da07 page=0000000000000000 pfn=1215648 ofs=69632
<unknown>-12683 (-----) [004] .... 1920260.580662: mm_filemap_add_to_page_cache: dev 0:64771 ino da07 page=0000000000000000 pfn=1327326 ofs=73728
<unknown>-12683 (-----) [004] .... 1920260.580662: mm_filemap_add_to_page_cache: dev 0:64771 ino da07 page=0000000000000000 pfn=1108894 ofs=77824
<unknown>-12683 (-----) [004] .... 1920260.580663: mm_filemap_add_to_page_cache: dev 0:64771 ino da07 page=0000000000000000 pfn=1327545 ofs=81920
<unknown>-12683 (-----) [004] .... 1920260.580663: mm_filemap_add_to_page_cache: dev 0:64771 ino da07 page=0000000000000000 pfn=1328804 ofs=86016
<unknown>-12683 (-----) [004] .... 1920260.580664: mm_filemap_add_to_page_cache: dev 0:64771 ino da07 page=0000000000000000 pfn=1300171 ofs=90112
<unknown>-12683 (-----) [004] .... 1920260.580664: mm_filemap_add_to_page_cache: dev 0:64771 ino da07 page=0000000000000000 pfn=1353250 ofs=94208
<unknown>-12683 (-----) [004] .... 1920260.580668: mm_filemap_add_to_page_cache: dev 0:64771 ino da07 page=0000000000000000 pfn=1333681 ofs=98304
<unknown>-12683 (-----) [004] .... 1920260.580668: mm_filemap_add_to_page_cache: dev 0:64771 ino da07 page=0000000000000000 pfn=1144969 ofs=102400
<unknown>-12683 (-----) [004] .... 1920260.580669: mm_filemap_add_to_page_cache: dev 0:64771 ino da07 page=0000000000000000 pfn=1450962 ofs=106496
<unknown>-12683 (-----) [004] .... 1920260.580669: mm_filemap_add_to_page_cache: dev 0:64771 ino da07 page=0000000000000000 pfn=1255701 ofs=110592
<unknown>-12683 (-----) [004] .... 1920260.580670: mm_filemap_add_to_page_cache: dev 0:64771 ino da07 page=0000000000000000 pfn=1294782 ofs=114688
<unknown>-12683 (-----) [004] .... 1920260.580670: mm_filemap_add_to_page_cache: dev 0:64771 ino da07 page=0000000000000000 pfn=1226912 ofs=118784
<unknown>-12683 (-----) [004] .... 1920260.580671: mm_filemap_add_to_page_cache: dev 0:64771 ino da07 page=0000000000000000 pfn=1294579 ofs=122880
<unknown>-12683 (-----) [004] .... 1920260.580671: mm_filemap_add_to_page_cache: dev 0:64771 ino da07 page=0000000000000000 pfn=1246960 ofs=126976
<unknown>-12683 (-----) [004] .... 1920260.580671: mm_filemap_add_to_page_cache: dev 0:64771 ino da07 page=0000000000000000 pfn=1199086 ofs=131072
<unknown>-12683 (-----) [004] .... 1920260.580672: mm_filemap_add_to_page_cache: dev 0:64771 ino da07 page=0000000000000000 pfn=1449590 ofs=135168
<unknown>-12683 (-----) [004] .... 1920260.580672: mm_filemap_add_to_page_cache: dev 0:64771 ino da07 page=0000000000000000 pfn=1276363 ofs=139264
<unknown>-12683 (-----) [004] .... 1920260.580675: mm_filemap_add_to_page_cache: dev 0:64771 ino da07 page=0000000000000000 pfn=1389998 ofs=143360
<unknown>-12683 (-----) [004] .... 1920260.580739: mm_filemap_add_to_page_cache: dev 0:64771 ino da07 page=0000000000000000 pfn=1423031 ofs=1249280
<unknown>-12683 (-----) [004] .... 1920260.580741: mm_filemap_add_to_page_cache: dev 0:64771 ino da07 page=0000000000000000 pfn=1171032 ofs=1253376
<unknown>-12683 (-----) [004] .... 1920260.580742: mm_filemap_add_to_page_cache: dev 0:64771 ino da07 page=0000000000000000 pfn=1320946 ofs=1257472
<unknown>-12683 (-----) [004] .... 1920260.580743: mm_filemap_add_to_page_cache: dev 0:64771 ino da07 page=0000000000000000 pfn=1314696 ofs=1261568
<unknown>-12683 (-----) [004] .... 1920260.580743: mm_filemap_add_to_page_cache: dev 0:64771 ino da07 page=0000000000000000 pfn=1414864 ofs=1265664
<unknown>-12683 (-----) [004] .... 1920260.580744: mm_filemap_add_to_page_cache: dev 0:64771 ino da07 page=0000000000000000 pfn=1334933 ofs=1269760
<unknown>-12683 (-----) [004] .... 1920260.580744: mm_filemap_add_to_page_cache: dev 0:64771 ino da07 page=0000000000000000 pfn=1242845 ofs=1273856
<unknown>-12683 (-----) [004] .... 1920260.580747: mm_filemap_add_to_page_cache: dev 0:64771 ino da07 page=0000000000000000 pfn=1289488 ofs=1277952
<unknown>-12683 (-----) [004] .... 1920260.580748: mm_filemap_add_to_page_cache: dev 0:64771 ino da07 page=0000000000000000 pfn=1335445 ofs=1282048
<unknown>-12683 (-----) [004] .... 1920260.580748: mm_filemap_add_to_page_cache: dev 0:64771 ino da07 page=0000000000000000 pfn=1289663 ofs=1286144
<unknown>-12683 (-----) [004] .... 1920260.580749: mm_filemap_add_to_page_cache: dev 0:64771 ino da07 page=0000000000000000 pfn=1080462 ofs=1290240
<unknown>-12683 (-----) [004] .... 1920260.580749: mm_filemap_add_to_page_cache: dev 0:64771 ino da07 page=0000000000000000 pfn=1286303 ofs=1294336
<unknown>-12683 (-----) [004] .... 1920260.580750: mm_filemap_add_to_page_cache: dev 0:64771 ino da07 page=0000000000000000 pfn=1353531 ofs=1298432
<unknown>-12683 (-----) [004] .... 1920260.580750: mm_filemap_add_to_page_cache: dev 0:64771 ino da07 page=0000000000000000 pfn=1280701 ofs=1302528
<unknown>-12683 (-----) [004] .... 1920260.580751: mm_filemap_add_to_page_cache: dev 0:64771 ino da07 page=0000000000000000 pfn=1107730 ofs=1306624
<unknown>-12683 (-----) [004] .... 1920260.580752: mm_filemap_add_to_page_cache: dev 0:64771 ino da07 page=0000000000000000 pfn=1242729 ofs=1310720
<unknown>-12683 (-----) [004] .... 1920260.580753: mm_filemap_add_to_page_cache: dev 0:64771 ino da07 page=0000000000000000 pfn=1078336 ofs=1314816
<unknown>-12683 (-----) [004] .... 1920260.580753: mm_filemap_add_to_page_cache: dev 0:64771 ino da07 page=0000000000000000 pfn=1372425 ofs=1318912
<unknown>-12683 (-----) [004] .... 1920260.580754: mm_filemap_add_to_page_cache: dev 0:64771 ino da07 page=0000000000000000 pfn=1248813 ofs=1323008
<unknown>-12683 (-----) [004] .... 1920260.580754: mm_filemap_add_to_page_cache: dev 0:64771 ino da07 page=0000000000000000 pfn=1201155 ofs=1327104
<unknown>-12683 (-----) [004] .... 1920260.580755: mm_filemap_add_to_page_cache: dev 0:64771 ino da07 page=0000000000000000 pfn=1250103 ofs=1331200
<unknown>-12683 (-----) [004] .... 1920260.580755: mm_filemap_add_to_page_cache: dev 0:64771 ino da07 page=0000000000000000 pfn=1359710 ofs=1335296
<unknown>-12683 (-----) [004] .... 1920260.580756: mm_filemap_add_to_page_cache: dev 0:64771 ino da07 page=0000000000000000 pfn=1272462 ofs=1339392
<unknown>-12683 (-----) [004] .... 1920260.580758: mm_filemap_add_to_page_cache: dev 0:64771 ino da07 page=0000000000000000 pfn=1097035 ofs=1343488
<unknown>-12683 (-----) [004] .... 1920260.580759: mm_filemap_add_to_page_cache: dev 0:64771 ino da07 page=0000000000000000 pfn=1233124 ofs=1347584
<unknown>-12683 (-----) [004] .... 1920260.580759: mm_filemap_add_to_page_cache: dev 0:64771 ino da07 page=0000000000000000 pfn=1455812 ofs=1351680
<unknown>-12683 (-----) [004] .... 1920260.580759: mm_filemap_add_to_page_cache: dev 0:64771 ino da07 page=0000000000000000 pfn=1355689 ofs=1355776
<unknown>-12683 (-----) [004] .... 1920260.580760: mm_filemap_add_to_page_cache: dev 0:64771 ino da07 page=0000000000000000 pfn=1263593 ofs=1359872
<unknown>-12683 (-----) [004] .... 1920260.580760: mm_filemap_add_to_page_cache: dev 0:64771 ino da07 page=0000000000000000 pfn=1230789 ofs=1363968
<unknown>-12683 (-----) [004] .... 1920260.580761: mm_filemap_add_to_page_cache: dev 0:64771 ino da07 page=0000000000000000 pfn=1143766 ofs=1368064
<unknown>-12683 (-----) [004] .... 1920260.580762: mm_filemap_add_to_page_cache: dev 0:64771 ino da07 page=0000000000000000 pfn=1269666 ofs=1372160
<unknown>-12683 (-----) [004] .... 1920260.580762: mm_filemap_add_to_page_cache: dev 0:64771 ino da07 page=0000000000000000 pfn=1353022 ofs=1376256
<unknown>-12683 (-----) [004] .... 1920260.581613: mm_filemap_add_to_page_cache: dev 0:64771 ino da07 page=0000000000000000 pfn=1355509 ofs=258048
<unknown>-12683 (-----) [004] .... 1920260.581615: mm_filemap_add_to_page_cache: dev 0:64771 ino da07 page=0000000000000000 pfn=1178902 ofs=262144
<unknown>-12683 (-----) [004] .... 1920260.581616: mm_filemap_add_to_page_cache: dev 0:64771 ino da07 page=0000000000000000 pfn=1193649 ofs=266240
<unknown>-12683 (-----) [004] .... 1920260.581618: mm_filemap_add_to_page_cache: dev 0:64771 ino da07 page=0000000000000000 pfn=1225497 ofs=270336
<unknown>-12683 (-----) [004] .... 1920260.581618: mm_filemap_add_to_page_cache: dev 0:64771 ino da07 page=0000000000000000 pfn=1228259 ofs=274432
<unknown>-12683 (-----) [004] .... 1920260.581635: mm_filemap_add_to_page_cache: dev 0:64771 ino da07 page=0000000000000000 pfn=1309674 ofs=278528
<unknown>-12683 (-----) [004] .... 1920260.581635: mm_filemap_add_to_page_cache: dev 0:64771 ino da07 page=0000000000000000 pfn=1239390 ofs=282624
<unknown>-12683 (-----) [004] .... 1920260.581636: mm_filemap_add_to_page_cache: dev 0:64771 ino da07 page=0000000000000000 pfn=1468083 ofs=286720
<unknown>-12683 (-----) [004] .... 1920260.581636: mm_filemap_add_to_page_cache: dev 0:64771 ino da07 page=0000000000000000 pfn=1292751 ofs=290816
<unknown>-12683 (-----) [004] .... 1920260.581637: mm_filemap_add_to_page_cache: dev 0:64771 ino da07 page=0000000000000000 pfn=1318066 ofs=294912
<unknown>-12683 (-----) [004] .... 1920260.581637: mm_filemap_add_to_page_cache: dev 0:64771 ino da07 page=0000000000000000 pfn=1489314 ofs=299008
<unknown>-12683 (-----) [004] .... 1920260.581637: mm_filemap_add_to_page_cache: dev 0:64771 ino da07 page=0000000000000000 pfn=1169867 ofs=303104
<unknown>-12683 (-----) [004] .... 1920260.581639: mm_filemap_add_to_page_cache: dev 0:64771 ino da07 page=0000000000000000 pfn=1314256 ofs=307200
<unknown>-12683 (-----) [004] .... 1920260.581639: mm_filemap_add_to_page_cache: dev 0:64771 ino da07 page=0000000000000000 pfn=1310230 ofs=311296
<unknown>-12683 (-----) [004] .... 1920260.581640: mm_filemap_add_to_page_cache: dev 0:64771 ino da07 page=0000000000000000 pfn=1356180 ofs=315392
<unknown>-12683 (-----) [004] .... 1920260.581640: mm_filemap_add_to_page_cache: dev 0:64771 ino da07 page=0000000000000000 pfn=1419179 ofs=319488
<unknown>-12683 (-----) [004] .... 1920260.581641: mm_filemap_add_to_page_cache: dev 0:64771 ino da07 page=0000000000000000 pfn=1307265 ofs=323584
<unknown>-12683 (-----) [004] .... 1920260.581641: mm_filemap_add_to_page_cache: dev 0:64771 ino da07 page=0000000000000000 pfn=1218590 ofs=327680
<unknown>-12683 (-----) [004] .... 1920260.581642: mm_filemap_add_to_page_cache: dev 0:64771 ino da07 page=0000000000000000 pfn=1447586 ofs=331776
<unknown>-12683 (-----) [004] .... 1920260.581642: mm_filemap_add_to_page_cache: dev 0:64771 ino da07 page=0000000000000000 pfn=1209382 ofs=335872
<unknown>-12683 (-----) [004] .... 1920260.581642: mm_filemap_add_to_page_cache: dev 0:64771 ino da07 page=0000000000000000 pfn=1072148 ofs=339968
<unknown>-12683 (-----) [004] .... 1920260.581645: mm_filemap_add_to_page_cache: dev 0:64771 ino da07 page=0000000000000000 pfn=1227195 ofs=344064
<unknown>-12683 (-----) [004] .... 1920260.581646: mm_filemap_add_to_page_cache: dev 0:64771 ino da07 page=0000000000000000 pfn=1246369 ofs=348160
<unknown>-12683 (-----) [004] .... 1920260.581646: mm_filemap_add_to_page_cache: dev 0:64771 ino da07 page=0000000000000000 pfn=1193845 ofs=352256
<unknown>-12683 (-----) [004] .... 1920260.581647: mm_filemap_add_to_page_cache: dev 0:64771 ino da07 page=0000000000000000 pfn=1137553 ofs=356352
<unknown>-12683 (-----) [004] .... 1920260.581647: mm_filemap_add_to_page_cache: dev 0:64771 ino da07 page=0000000000000000 pfn=1475215 ofs=360448
<unknown>-12683 (-----) [004] .... 1920260.581648: mm_filemap_add_to_page_cache: dev 0:64771 ino da07 page=0000000000000000 pfn=1258935 ofs=364544
<unknown>-12683 (-----) [004] .... 1920260.581649: mm_filemap_add_to_page_cache: dev 0:64771 ino da07 page=0000000000000000 pfn=1448788 ofs=368640
<unknown>-12683 (-----) [004] .... 1920260.581649: mm_filemap_add_to_page_cache: dev 0:64771 ino da07 page=0000000000000000 pfn=1447611 ofs=372736
<unknown>-12683 (-----) [004] .... 1920260.581650: mm_filemap_add_to_page_cache: dev 0:64771 ino da07 page=0000000000000000 pfn=1290842 ofs=376832
<unknown>-12683 (-----) [004] .... 1920260.581650: mm_filemap_add_to_page_cache: dev 0:64771 ino da07 page=0000000000000000 pfn=1447826 ofs=380928
<unknown>-12683 (-----) [004] .... 1920260.581650: mm_filemap_add_to_page_cache: dev 0:64771 ino da07 page=0000000000000000 pfn=1181016 ofs=385024
<unknown>-12683 (-----) [005] .... 1920260.582230: mm_filemap_add_to_page_cache: dev 0:64771 ino 180a page=0000000000000000 pfn=1216810 ofs=1662976
<unknown>-12683 (-----) [005] .... 1920260.582234: mm_filemap_add_to_page_cache: dev 0:64771 ino 180a page=0000000000000000 pfn=1175966 ofs=1667072
<unknown>-12683 (-----) [005] .... 1920260.582235: mm_filemap_add_to_page_cache: dev 0:64771 ino 180a page=0000000000000000 pfn=1449798 ofs=1671168
<unknown>-12683 (-----) [005] .... 1920260.582236: mm_filemap_add_to_page_cache: dev 0:64771 ino 180a page=0000000000000000 pfn=1273480 ofs=1675264
<unknown>-12683 (-----) [005] .... 1920260.582236: mm_filemap_add_to_page_cache: dev 0:64771 ino 180a page=0000000000000000 pfn=1152779 ofs=1679360
<unknown>-12683 (-----) [005] .... 1920260.582237: mm_filemap_add_to_page_cache: dev 0:64771 ino 180a page=0000000000000000 pfn=1272810 ofs=1683456
<unknown>-12683 (-----) [005] .... 1920260.582237: mm_filemap_add_to_page_cache: dev 0:64771 ino 180a page=0000000000000000 pfn=1248634 ofs=1687552
<unknown>-12683 (-----) [005] .... 1920260.582237: mm_filemap_add_to_page_cache: dev 0:64771 ino 180a page=0000000000000000 pfn=1203376 ofs=1691648
<unknown>-12683 (-----) [005] .... 1920260.582238: mm_filemap_add_to_page_cache: dev 0:64771 ino 180a page=0000000000000000 pfn=1138880 ofs=1695744
<unknown>-12683 (-----) [005] .... 1920260.582238: mm_filemap_add_to_page_cache: dev 0:64771 ino 180a page=0000000000000000 pfn=1344591 ofs=1699840
<unknown>-12683 (-----) [005] .... 1920260.582239: mm_filemap_add_to_page_cache: dev 0:64771 ino 180a page=0000000000000000 pfn=1416060 ofs=1703936
<unknown>-12683 (-----) [005] .... 1920260.582246: mm_filemap_add_to_page_cache: dev 0:64771 ino 180a page=0000000000000000 pfn=1128676 ofs=1708032
<unknown>-12683 (-----) [005] .... 1920260.582247: mm_filemap_add_to_page_cache: dev 0:64771 ino 180a page=0000000000000000 pfn=1301921 ofs=1712128
<unknown>-12683 (-----) [005] .... 1920260.582248: mm_filemap_add_to_page_cache: dev 0:64771 ino 180a page=0000000000000000 pfn=1384569 ofs=1716224
<unknown>-12683 (-----) [005] .... 1920260.582248: mm_filemap_add_to_page_cache: dev 0:64771 ino 180a page=0000000000000000 pfn=1249106 ofs=1720320
<unknown>-12683 (-----) [005] .... 1920260.582249: mm_filemap_add_to_page_cache: dev 0:64771 ino 180a page=0000000000000000 pfn=1206596 ofs=1724416
<unknown>-12683 (-----) [005] .... 1920260.582249: mm_filemap_add_to_page_cache: dev 0:64771 ino 180a page=0000000000000000 pfn=1429831 ofs=1728512
<unknown>-12683 (-----) [005] .... 1920260.582252: mm_filemap_add_to_page_cache: dev 0:64771 ino 180a page=0000000000000000 pfn=1107796 ofs=1732608
<unknown>-12683 (-----) [005] .... 1920260.582255: mm_filemap_add_to_page_cache: dev 0:64771 ino 180a page=0000000000000000 pfn=1098336 ofs=1736704
<unknown>-12683 (-----) [005] .... 1920260.582255: mm_filemap_add_to_page_cache: dev 0:64771 ino 180a page=0000000000000000 pfn=1230286 ofs=1740800
<unknown>-12683 (-----) [005] .... 1920260.582256: mm_filemap_add_to_page_cache: dev 0:64771 ino 180a page=0000000000000000 pfn=1100370 ofs=1744896
<unknown>-12683 (-----) [005] .... 1920260.582256: mm_filemap_add_to_page_cache: dev 0:64771 ino 180a page=0000000000000000 pfn=1241930 ofs=1748992
<unknown>-12683 (-----) [005] .... 1920260.582257: mm_filemap_add_to_page_cache: dev 0:64771 ino 180a page=0000000000000000 pfn=1366807 ofs=1753088
<unknown>-12683 (-----) [005] .... 1920260.582257: mm_filemap_add_to_page_cache: dev 0:64771 ino 180a page=0000000000000000 pfn=1136252 ofs=1757184
<unknown>-12683 (-----) [005] .... 1920260.582258: mm_filemap_add_to_page_cache: dev 0:64771 ino 180a page=0000000000000000 pfn=1274291 ofs=1761280
<unknown>-12683 (-----) [005] .... 1920260.582258: mm_filemap_add_to_page_cache: dev 0:64771 ino 180a page=0000000000000000 pfn=1254775 ofs=1765376
<unknown>-12683 (-----) [005] .... 1920260.582259: mm_filemap_add_to_page_cache: dev 0:64771 ino 180a page=0000000000000000 pfn=1194679 ofs=1769472
<unknown>-12683 (-----) [005] .... 1920260.582262: mm_filemap_add_to_page_cache: dev 0:64771 ino 180a page=0000000000000000 pfn=1177090 ofs=1773568
<unknown>-12683 (-----) [005] .... 1920260.582263: mm_filemap_add_to_page_cache: dev 0:64771 ino 180a page=0000000000000000 pfn=1343925 ofs=1777664
<unknown>-12683 (-----) [005] .... 1920260.582263: mm_filemap_add_to_page_cache: dev 0:64771 ino 180a page=0000000000000000 pfn=1159217 ofs=1781760
<unknown>-12683 (-----) [005] .... 1920260.582263: mm_filemap_add_to_page_cache: dev 0:64771 ino 180a page=0000000000000000 pfn=1435471 ofs=1785856
<unknown>-12683 (-----) [005] .... 1920260.582264: mm_filemap_add_to_page_cache: dev 0:64771 ino 180a page=0000000000000000 pfn=1435529 ofs=1789952
<unknown>-12683 (-----) [004] .... 1920260.582524: mm_filemap_add_to_page_cache: dev 0:64771 ino df31 page=0000000000000000 pfn=1181910 ofs=0
<unknown>-12683 (-----) [004] .... 1920260.582528: mm_filemap_add_to_page_cache: dev 0:64771 ino df31 page=0000000000000000 pfn=1212021 ofs=4096
<unknown>-12683 (-----) [004] .... 1920260.582529: mm_filemap_add_to_page_cache: dev 0:64771 ino df31 page=0000000000000000 pfn=1162778 ofs=8192
<unknown>-12683 (-----) [004] .... 1920260.582529: mm_filemap_add_to_page_cache: dev 0:64771 ino df31 page=0000000000000000 pfn=1107700 ofs=12288
<unknown>-12683 (-----) [004] .... 1920260.583553: mm_filemap_add_to_page_cache: dev 0:64771 ino df31 page=0000000000000000 pfn=1093394 ofs=3399680
<unknown>-12683 (-----) [004] .... 1920260.583984: mm_filemap_add_to_page_cache: dev 0:64771 ino 1 page=0000000000000000 pfn=1121431 ofs=242503680
<unknown>-12683 (-----) [004] .... 1920260.583986: mm_filemap_add_to_page_cache: dev 0:64771 ino 2 page=0000000000000000 pfn=1168551 ofs=13115392
<unknown>-12683 (-----) [004] .... 1920260.584304: mm_filemap_add_to_page_cache: dev 0:64771 ino e745 page=0000000000000000 pfn=1347409 ofs=0
<unknown>-12683 (-----) [004] .... 1920260.584307: mm_filemap_add_to_page_cache: dev 0:64771 ino e745 page=0000000000000000 pfn=1428681 ofs=4096
<unknown>-12683 (-----) [004] .... 1920260.584307: mm_filemap_add_to_page_cache: dev 0:64771 ino e745 page=0000000000000000 pfn=1259106 ofs=8192
<unknown>-12683 (-----) [004] .... 1920260.584308: mm_filemap_add_to_page_cache: dev 0:64771 ino e745 page=0000000000000000 pfn=1343229 ofs=12288
<unknown>-12694 (-----) [005] .... 1920260.584622: mm_filemap_add_to_page_cache: dev 0:64771 ino e745 page=0000000000000000 pfn=1098733 ofs=1531904
<unknown>-12696 (-----) [006] .... 1920260.584626: mm_filemap_add_to_page_cache: dev 0:64771 ino e745 page=0000000000000000 pfn=1331319 ofs=1536000
<unknown>-12694 (-----) [005] .... 1920260.584626: mm_filemap_add_to_page_cache: dev 0:64771 ino e745 page=0000000000000000 pfn=1278537 ofs=1540096
<unknown>-12696 (-----) [006] .... 1920260.584631: mm_filemap_add_to_page_cache: dev 0:64771 ino e745 page=0000000000000000 pfn=1492534 ofs=1544192
<unknown>-12694 (-----) [005] .... 1920260.584636: mm_filemap_add_to_page_cache: dev 0:64771 ino e745 page=0000000000000000 pfn=1460878 ofs=1548288
<unknown>-12694 (-----) [005] .... 1920260.584640: mm_filemap_add_to_page_cache: dev 0:64771 ino e745 page=0000000000000000 pfn=1092973 ofs=1552384
<unknown>-12694 (-----) [005] .... 1920260.584641: mm_filemap_add_to_page_cache: dev 0:64771 ino e745 page=0000000000000000 pfn=1103200 ofs=1556480
<unknown>-12694 (-----) [005] .... 1920260.584642: mm_filemap_add_to_page_cache: dev 0:64771 ino e745 page=0000000000000000 pfn=1257426 ofs=1560576
<unknown>-12694 (-----) [005] .... 1920260.584642: mm_filemap_add_to_page_cache: dev 0:64771 ino e745 page=0000000000000000 pfn=1219424 ofs=1564672
<unknown>-12683 (-----) [004] .... 1920260.584660: mm_filemap_add_to_page_cache: dev 0:64771 ino e745 page=0000000000000000 pfn=1279352 ofs=1568768
<unknown>-12696 (-----) [006] .... 1920260.584662: mm_filemap_add_to_page_cache: dev 0:64771 ino e745 page=0000000000000000 pfn=1260572 ofs=1572864
<unknown>-12683 (-----) [004] .... 1920260.584663: mm_filemap_add_to_page_cache: dev 0:64771 ino e745 page=0000000000000000 pfn=1225809 ofs=1576960
<unknown>-12696 (-----) [006] .... 1920260.584665: mm_filemap_add_to_page_cache: dev 0:64771 ino e745 page=0000000000000000 pfn=1350766 ofs=1585152
<unknown>-12697 (-----) [007] .... 1920260.584666: mm_filemap_add_to_page_cache: dev 0:64771 ino e745 page=0000000000000000 pfn=1107173 ofs=1581056
<unknown>-12683 (-----) [004] .... 1920260.584668: mm_filemap_add_to_page_cache: dev 0:64771 ino e745 page=0000000000000000 pfn=1305885 ofs=1589248
<unknown>-12694 (-----) [005] .... 1920260.584669: mm_filemap_add_to_page_cache: dev 0:64771 ino e745 page=0000000000000000 pfn=1293385 ofs=1593344
<unknown>-12696 (-----) [006] .... 1920260.584670: mm_filemap_add_to_page_cache: dev 0:64771 ino e745 page=0000000000000000 pfn=1173841 ofs=1597440
<unknown>-12697 (-----) [007] .... 1920260.584670: mm_filemap_add_to_page_cache: dev 0:64771 ino e745 page=0000000000000000 pfn=1080021 ofs=1601536
<unknown>-12683 (-----) [004] .... 1920260.584673: mm_filemap_add_to_page_cache: dev 0:64771 ino e745 page=0000000000000000 pfn=1147419 ofs=1605632
<unknown>-12696 (-----) [006] .... 1920260.584673: mm_filemap_add_to_page_cache: dev 0:64771 ino e745 page=0000000000000000 pfn=1252762 ofs=1609728
<unknown>-12694 (-----) [005] .... 1920260.584674: mm_filemap_add_to_page_cache: dev 0:64771 ino e745 page=0000000000000000 pfn=1323916 ofs=1613824
<unknown>-12683 (-----) [004] .... 1920260.584675: mm_filemap_add_to_page_cache: dev 0:64771 ino e745 page=0000000000000000 pfn=1155631 ofs=1617920
<unknown>-12696 (-----) [006] .... 1920260.584676: mm_filemap_add_to_page_cache: dev 0:64771 ino e745 page=0000000000000000 pfn=1449815 ofs=1622016
<unknown>-12694 (-----) [005] .... 1920260.584678: mm_filemap_add_to_page_cache: dev 0:64771 ino e745 page=0000000000000000 pfn=1227069 ofs=1626112
<unknown>-12696 (-----) [006] .... 1920260.584680: mm_filemap_add_to_page_cache: dev 0:64771 ino e745 page=0000000000000000 pfn=1317692 ofs=1630208
<unknown>-12694 (-----) [005] .... 1920260.584681: mm_filemap_add_to_page_cache: dev 0:64771 ino e745 page=0000000000000000 pfn=1492244 ofs=1634304
<unknown>-12683 (-----) [004] .... 1920260.584682: mm_filemap_add_to_page_cache: dev 0:64771 ino e745 page=0000000000000000 pfn=1241876 ofs=1638400
<unknown>-12697 (-----) [007] .... 1920260.585446: mm_filemap_add_to_page_cache: dev 0:64771 ino e745 page=0000000000000000 pfn=1402958 ofs=167936
<unknown>-12697 (-----) [007] .... 1920260.585449: mm_filemap_add_to_page_cache: dev 0:64771 ino e745 page=0000000000000000 pfn=1133263 ofs=172032
<unknown>-12697 (-----) [007] .... 1920260.585450: mm_filemap_add_to_page_cache: dev 0:64771 ino e745 page=0000000000000000 pfn=1295502 ofs=176128
<unknown>-12697 (-----) [007] .... 1920260.585450: mm_filemap_add_to_page_cache: dev 0:64771 ino e745 page=0000000000000000 pfn=1249495 ofs=180224
<unknown>-12697 (-----) [007] .... 1920260.585451: mm_filemap_add_to_page_cache: dev 0:64771 ino e745 page=0000000000000000 pfn=1237999 ofs=184320
<unknown>-12697 (-----) [007] .... 1920260.585451: mm_filemap_add_to_page_cache: dev 0:64771 ino e745 page=0000000000000000 pfn=1280965 ofs=188416
<unknown>-12697 (-----) [007] .... 1920260.585454: mm_filemap_add_to_page_cache: dev 0:64771 ino e745 page=0000000000000000 pfn=1208361 ofs=192512
<unknown>-12697 (-----) [007] .... 1920260.585454: mm_filemap_add_to_page_cache: dev 0:64771 ino e745 page=0000000000000000 pfn=1308840 ofs=196608
<unknown>-12695 (-----) [004] .... 1920260.585455: mm_filemap_add_to_page_cache: dev 0:64771 ino e745 page=0000000000000000 pfn=1138875 ofs=569344
<unknown>-12695 (-----) [004] .... 1920260.585458: mm_filemap_add_to_page_cache: dev 0:64771 ino e745 page=0000000000000000 pfn=1314886 ofs=573440
<unknown>-12697 (-----) [007] .... 1920260.585458: mm_filemap_add_to_page_cache: dev 0:64771 ino e745 page=0000000000000000 pfn=1242734 ofs=200704
<unknown>-12695 (-----) [004] .... 1920260.585458: mm_filemap_add_to_page_cache: dev 0:64771 ino e745 page=0000000000000000 pfn=1447386 ofs=577536
<unknown>-12697 (-----) [007] .... 1920260.585459: mm_filemap_add_to_page_cache: dev 0:64771 ino e745 page=0000000000000000 pfn=1241302 ofs=204800
<unknown>-12695 (-----) [004] .... 1920260.585459: mm_filemap_add_to_page_cache: dev 0:64771 ino e745 page=0000000000000000 pfn=1328663 ofs=581632
<unknown>-12697 (-----) [007] .... 1920260.585459: mm_filemap_add_to_page_cache: dev 0:64771 ino e745 page=0000000000000000 pfn=1476101 ofs=208896
<unknown>-12695 (-----) [004] .... 1920260.585460: mm_filemap_add_to_page_cache: dev 0:64771 ino e745 page=0000000000000000 pfn=1209461 ofs=585728
<unknown>-12697 (-----) [007] .... 1920260.585460: mm_filemap_add_to_page_cache: dev 0:64771 ino e745 page=0000000000000000 pfn=1080147 ofs=212992
<unknown>-12697 (-----) [007] .... 1920260.585461: mm_filemap_add_to_page_cache: dev 0:64771 ino e745 page=0000000000000000 pfn=1128509 ofs=217088
<unknown>-12697 (-----) [007] .... 1920260.585461: mm_filemap_add_to_page_cache: dev 0:64771 ino e745 page=0000000000000000 pfn=1371915 ofs=221184
<unknown>-12697 (-----) [007] .... 1920260.585461: mm_filemap_add_to_page_cache: dev 0:64771 ino e745 page=0000000000000000 pfn=1264015 ofs=225280
<unknown>-12697 (-----) [007] .... 1920260.585462: mm_filemap_add_to_page_cache: dev 0:64771 ino e745 page=0000000000000000 pfn=1211695 ofs=229376
<unknown>-12697 (-----) [007] .... 1920260.585462: mm_filemap_add_to_page_cache: dev 0:64771 ino e745 page=0000000000000000 pfn=1150386 ofs=233472
<unknown>-12697 (-----) [007] .... 1920260.585463: mm_filemap_add_to_page_cache: dev 0:64771 ino e745 page=0000000000000000 pfn=1135747 ofs=237568
<unknown>-12697 (-----) [007] .... 1920260.585463: mm_filemap_add_to_page_cache: dev 0:64771 ino e745 page=0000000000000000 pfn=1128230 ofs=241664
<unknown>-12697 (-----) [007] .... 1920260.585464: mm_filemap_add_to_page_cache: dev 0:64771 ino e745 page=0000000000000000 pfn=1155451 ofs=245760
<unknown>-12697 (-----) [007] .... 1920260.585465: mm_filemap_add_to_page_cache: dev 0:64771 ino e745 page=0000000000000000 pfn=1246841 ofs=249856
<unknown>-12697 (-----) [007] .... 1920260.585465: mm_filemap_add_to_page_cache: dev 0:64771 ino e745 page=0000000000000000 pfn=1462971 ofs=253952
<unknown>-12697 (-----) [007] .... 1920260.585466: mm_filemap_add_to_page_cache: dev 0:64771 ino e745 page=0000000000000000 pfn=1131333 ofs=258048
<unknown>-12697 (-----) [007] .... 1920260.585466: mm_filemap_add_to_page_cache: dev 0:64771 ino e745 page=0000000000000000 pfn=1289407 ofs=262144
<unknown>-12695 (-----) [004] .... 1920260.585467: mm_filemap_add_to_page_cache: dev 0:64771 ino e745 page=0000000000000000 pfn=1134730 ofs=589824
<unknown>-12697 (-----) [007] .... 1920260.585467: mm_filemap_add_to_page_cache: dev 0:64771 ino e745 page=0000000000000000 pfn=1289873 ofs=266240
<unknown>-12697 (-----) [007] .... 1920260.585468: mm_filemap_add_to_page_cache: dev 0:64771 ino e745 page=0000000000000000 pfn=1448734 ofs=270336
<unknown>-12695 (-----) [004] .... 1920260.585468: mm_filemap_add_to_page_cache: dev 0:64771 ino e745 page=0000000000000000 pfn=1129776 ofs=593920
<unknown>-12697 (-----) [007] .... 1920260.585468: mm_filemap_add_to_page_cache: dev 0:64771 ino e745 page=0000000000000000 pfn=1524090 ofs=274432
<unknown>-12695 (-----) [004] .... 1920260.585468: mm_filemap_add_to_page_cache: dev 0:64771 ino e745 page=0000000000000000 pfn=1399725 ofs=598016
<unknown>-12697 (-----) [007] .... 1920260.585469: mm_filemap_add_to_page_cache: dev 0:64771 ino e745 page=0000000000000000 pfn=1524081 ofs=278528
<unknown>-12695 (-----) [004] .... 1920260.585469: mm_filemap_add_to_page_cache: dev 0:64771 ino e745 page=0000000000000000 pfn=1276535 ofs=602112
<unknown>-12697 (-----) [007] .... 1920260.585469: mm_filemap_add_to_page_cache: dev 0:64771 ino e745 page=0000000000000000 pfn=1524060 ofs=282624
<unknown>-12695 (-----) [004] .... 1920260.585470: mm_filemap_add_to_page_cache: dev 0:64771 ino e745 page=0000000000000000 pfn=1449847 ofs=606208
<unknown>-12697 (-----) [007] .... 1920260.585470: mm_filemap_add_to_page_cache: dev 0:64771 ino e745 page=0000000000000000 pfn=1158944 ofs=286720
<unknown>-12695 (-----) [004] .... 1920260.585470: mm_filemap_add_to_page_cache: dev 0:64771 ino e745 page=0000000000000000 pfn=1384536 ofs=610304
<unknown>-12697 (-----) [007] .... 1920260.585470: mm_filemap_add_to_page_cache: dev 0:64771 ino e745 page=0000000000000000 pfn=1116785 ofs=290816
<unknown>-12695 (-----) [004] .... 1920260.585471: mm_filemap_add_to_page_cache: dev 0:64771 ino e745 page=0000000000000000 pfn=1308118 ofs=614400
<unknown>-12697 (-----) [007] .... 1920260.585471: mm_filemap_add_to_page_cache: dev 0:64771 ino e745 page=0000000000000000 pfn=1448669 ofs=294912
<unknown>-12695 (-----) [004] .... 1920260.585471: mm_filemap_add_to_page_cache: dev 0:64771 ino e745 page=0000000000000000 pfn=1227050 ofs=618496
<unknown>-12695 (-----) [004] .... 1920260.585473: mm_filemap_add_to_page_cache: dev 0:64771 ino e745 page=0000000000000000 pfn=1289324 ofs=622592
<unknown>-12695 (-----) [004] .... 1920260.585473: mm_filemap_add_to_page_cache: dev 0:64771 ino e745 page=0000000000000000 pfn=1187869 ofs=626688
<unknown>-12695 (-----) [004] .... 1920260.585474: mm_filemap_add_to_page_cache: dev 0:64771 ino e745 page=0000000000000000 pfn=1400523 ofs=630784
<unknown>-12695 (-----) [004] .... 1920260.585474: mm_filemap_add_to_page_cache: dev 0:64771 ino e745 page=0000000000000000 pfn=1344176 ofs=634880
<unknown>-12695 (-----) [004] .... 1920260.585475: mm_filemap_add_to_page_cache: dev 0:64771 ino e745 page=0000000000000000 pfn=1092871 ofs=638976
<unknown>-12695 (-----) [004] .... 1920260.585475: mm_filemap_add_to_page_cache: dev 0:64771 ino e745 page=0000000000000000 pfn=1092021 ofs=643072
<unknown>-12695 (-----) [004] .... 1920260.585476: mm_filemap_add_to_page_cache: dev 0:64771 ino e745 page=0000000000000000 pfn=1198169 ofs=647168
<unknown>-12695 (-----) [004] .... 1920260.585476: mm_filemap_add_to_page_cache: dev 0:64771 ino e745 page=0000000000000000 pfn=1371540 ofs=651264
<unknown>-12683 (-----) [005] .... 1920260.585476: mm_filemap_add_to_page_cache: dev 0:64771 ino e745 page=0000000000000000 pfn=1195003 ofs=348160
<unknown>-12695 (-----) [004] .... 1920260.585477: mm_filemap_add_to_page_cache: dev 0:64771 ino e745 page=0000000000000000 pfn=1228787 ofs=655360
<unknown>-12695 (-----) [004] .... 1920260.585477: mm_filemap_add_to_page_cache: dev 0:64771 ino e745 page=0000000000000000 pfn=1236123 ofs=659456
<unknown>-12695 (-----) [004] .... 1920260.585477: mm_filemap_add_to_page_cache: dev 0:64771 ino e745 page=0000000000000000 pfn=1137213 ofs=663552
<unknown>-12695 (-----) [004] .... 1920260.585478: mm_filemap_add_to_page_cache: dev 0:64771 ino e745 page=0000000000000000 pfn=1294618 ofs=667648
<unknown>-12695 (-----) [004] .... 1920260.585478: mm_filemap_add_to_page_cache: dev 0:64771 ino e745 page=0000000000000000 pfn=1241048 ofs=671744
<unknown>-12695 (-----) [004] .... 1920260.585479: mm_filemap_add_to_page_cache: dev 0:64771 ino e745 page=0000000000000000 pfn=1228779 ofs=675840
<unknown>-12683 (-----) [005] .... 1920260.585479: mm_filemap_add_to_page_cache: dev 0:64771 ino e745 page=0000000000000000 pfn=1199292 ofs=352256
<unknown>-12683 (-----) [005] .... 1920260.585480: mm_filemap_add_to_page_cache: dev 0:64771 ino e745 page=0000000000000000 pfn=1200861 ofs=356352
<unknown>-12695 (-----) [004] .... 1920260.585480: mm_filemap_add_to_page_cache: dev 0:64771 ino e745 page=0000000000000000 pfn=1309572 ofs=679936
<unknown>-12683 (-----) [005] .... 1920260.585480: mm_filemap_add_to_page_cache: dev 0:64771 ino e745 page=0000000000000000 pfn=1215770 ofs=360448
<unknown>-12695 (-----) [004] .... 1920260.585481: mm_filemap_add_to_page_cache: dev 0:64771 ino e745 page=0000000000000000 pfn=1409002 ofs=684032
<unknown>-12683 (-----) [005] .... 1920260.585481: mm_filemap_add_to_page_cache: dev 0:64771 ino e745 page=0000000000000000 pfn=1151883 ofs=364544
<unknown>-12695 (-----) [004] .... 1920260.585481: mm_filemap_add_to_page_cache: dev 0:64771 ino e745 page=0000000000000000 pfn=1103729 ofs=688128
<unknown>-12683 (-----) [005] .... 1920260.585482: mm_filemap_add_to_page_cache: dev 0:64771 ino e745 page=0000000000000000 pfn=1468126 ofs=368640
<unknown>-12695 (-----) [004] .... 1920260.585482: mm_filemap_add_to_page_cache: dev 0:64771 ino e745 page=0000000000000000 pfn=1162720 ofs=692224
<unknown>-12683 (-----) [005] .... 1920260.585482: mm_filemap_add_to_page_cache: dev 0:64771 ino e745 page=0000000000000000 pfn=1251672 ofs=372736
<unknown>-12695 (-----) [004] .... 1920260.585482: mm_filemap_add_to_page_cache: dev 0:64771 ino e745 page=0000000000000000 pfn=1199221 ofs=696320
<unknown>-12683 (-----) [005] .... 1920260.585483: mm_filemap_add_to_page_cache: dev 0:64771 ino e745 page=0000000000000000 pfn=1283325 ofs=376832
<unknown>-12683 (-----) [005] .... 1920260.585483: mm_filemap_add_to_page_cache: dev 0:64771 ino e745 page=0000000000000000 pfn=1190489 ofs=380928
<unknown>-12683 (-----) [005] .... 1920260.585484: mm_filemap_add_to_page_cache: dev 0:64771 ino e745 page=0000000000000000 pfn=1489117 ofs=385024
<unknown>-12683 (-----) [005] .... 1920260.585484: mm_filemap_add_to_page_cache: dev 0:64771 ino e745 page=0000000000000000 pfn=1273899 ofs=389120
<unknown>-12683 (-----) [005] .... 1920260.585485: mm_filemap_add_to_page_cache: dev 0:64771 ino e745 page=0000000000000000 pfn=1274459 ofs=393216
<unknown>-12683 (-----) [005] .... 1920260.585486: mm_filemap_add_to_page_cache: dev 0:64771 ino e745 page=0000000000000000 pfn=1316649 ofs=397312
<unknown>-12683 (-----) [005] .... 1920260.585491: mm_filemap_add_to_page_cache: dev 0:64771 ino e745 page=0000000000000000 pfn=1375678 ofs=401408
<unknown>-12683 (-----) [005] .... 1920260.585491: mm_filemap_add_to_page_cache: dev 0:64771 ino e745 page=0000000000000000 pfn=1483317 ofs=405504
<unknown>-12683 (-----) [005] .... 1920260.585492: mm_filemap_add_to_page_cache: dev 0:64771 ino e745 page=0000000000000000 pfn=1240286 ofs=409600
<unknown>-12683 (-----) [005] .... 1920260.585492: mm_filemap_add_to_page_cache: dev 0:64771 ino e745 page=0000000000000000 pfn=1131345 ofs=413696
<unknown>-12683 (-----) [005] .... 1920260.585493: mm_filemap_add_to_page_cache: dev 0:64771 ino e745 page=0000000000000000 pfn=1200483 ofs=417792
<unknown>-12683 (-----) [005] .... 1920260.585493: mm_filemap_add_to_page_cache: dev 0:64771 ino e745 page=0000000000000000 pfn=1384693 ofs=421888
<unknown>-12683 (-----) [005] .... 1920260.585493: mm_filemap_add_to_page_cache: dev 0:64771 ino e745 page=0000000000000000 pfn=1161385 ofs=425984
<unknown>-12683 (-----) [005] .... 1920260.585494: mm_filemap_add_to_page_cache: dev 0:64771 ino e745 page=0000000000000000 pfn=1452025 ofs=430080
<unknown>-12683 (-----) [005] .... 1920260.585495: mm_filemap_add_to_page_cache: dev 0:64771 ino e745 page=0000000000000000 pfn=1253654 ofs=434176
<unknown>-12683 (-----) [005] .... 1920260.585495: mm_filemap_add_to_page_cache: dev 0:64771 ino e745 page=0000000000000000 pfn=1116697 ofs=438272
<unknown>-12683 (-----) [005] .... 1920260.585495: mm_filemap_add_to_page_cache: dev 0:64771 ino e745 page=0000000000000000 pfn=1432645 ofs=442368
<unknown>-12694 (-----) [006] .... 1920260.585495: mm_filemap_add_to_page_cache: dev 0:64771 ino e745 page=0000000000000000 pfn=1337397 ofs=16384
<unknown>-12683 (-----) [005] .... 1920260.585496: mm_filemap_add_to_page_cache: dev 0:64771 ino e745 page=0000000000000000 pfn=1304229 ofs=446464
<unknown>-12683 (-----) [005] .... 1920260.585496: mm_filemap_add_to_page_cache: dev 0:64771 ino e745 page=0000000000000000 pfn=1419147 ofs=450560
<unknown>-12683 (-----) [005] .... 1920260.585498: mm_filemap_add_to_page_cache: dev 0:64771 ino e745 page=0000000000000000 pfn=1349246 ofs=454656
<unknown>-12683 (-----) [005] .... 1920260.585499: mm_filemap_add_to_page_cache: dev 0:64771 ino e745 page=0000000000000000 pfn=1128519 ofs=458752
<unknown>-12683 (-----) [005] .... 1920260.585499: mm_filemap_add_to_page_cache: dev 0:64771 ino e745 page=0000000000000000 pfn=1125168 ofs=462848
<unknown>-12694 (-----) [006] .... 1920260.585509: mm_filemap_add_to_page_cache: dev 0:64771 ino e745 page=0000000000000000 pfn=1081031 ofs=20480
<unknown>-12694 (-----) [006] .... 1920260.585509: mm_filemap_add_to_page_cache: dev 0:64771 ino e745 page=0000000000000000 pfn=1293022 ofs=24576
<unknown>-12694 (-----) [006] .... 1920260.585510: mm_filemap_add_to_page_cache: dev 0:64771 ino e745 page=0000000000000000 pfn=1113007 ofs=28672
<unknown>-12694 (-----) [006] .... 1920260.585510: mm_filemap_add_to_page_cache: dev 0:64771 ino e745 page=0000000000000000 pfn=1339312 ofs=32768
<unknown>-12694 (-----) [006] .... 1920260.585511: mm_filemap_add_to_page_cache: dev 0:64771 ino e745 page=0000000000000000 pfn=1412311 ofs=36864
<unknown>-12694 (-----) [006] .... 1920260.585511: mm_filemap_add_to_page_cache: dev 0:64771 ino e745 page=0000000000000000 pfn=1260960 ofs=40960
<unknown>-12694 (-----) [006] .... 1920260.585512: mm_filemap_add_to_page_cache: dev 0:64771 ino e745 page=0000000000000000 pfn=1189529 ofs=45056
<unknown>-12694 (-----) [006] .... 1920260.585512: mm_filemap_add_to_page_cache: dev 0:64771 ino e745 page=0000000000000000 pfn=1412184 ofs=49152
<unknown>-12694 (-----) [006] .... 1920260.585513: mm_filemap_add_to_page_cache: dev 0:64771 ino e745 page=0000000000000000 pfn=1481227 ofs=53248
<unknown>-12694 (-----) [006] .... 1920260.585513: mm_filemap_add_to_page_cache: dev 0:64771 ino e745 page=0000000000000000 pfn=1455940 ofs=57344
<unknown>-12694 (-----) [006] .... 1920260.585514: mm_filemap_add_to_page_cache: dev 0:64771 ino e745 page=0000000000000000 pfn=1299132 ofs=61440
<unknown>-12694 (-----) [006] .... 1920260.585514: mm_filemap_add_to_page_cache: dev 0:64771 ino e745 page=0000000000000000 pfn=1337375 ofs=65536
<unknown>-12694 (-----) [006] .... 1920260.585529: mm_filemap_add_to_page_cache: dev 0:64771 ino e745 page=0000000000000000 pfn=1328742 ofs=69632
<unknown>-12694 (-----) [006] .... 1920260.585529: mm_filemap_add_to_page_cache: dev 0:64771 ino e745 page=0000000000000000 pfn=1315646 ofs=73728
<unknown>-12694 (-----) [006] .... 1920260.585531: mm_filemap_add_to_page_cache: dev 0:64771 ino e745 page=0000000000000000 pfn=1225475 ofs=77824
<unknown>-12694 (-----) [006] .... 1920260.585531: mm_filemap_add_to_page_cache: dev 0:64771 ino e745 page=0000000000000000 pfn=1146097 ofs=81920
<unknown>-12694 (-----) [006] .... 1920260.585532: mm_filemap_add_to_page_cache: dev 0:64771 ino e745 page=0000000000000000 pfn=1318775 ofs=86016
<unknown>-12694 (-----) [006] .... 1920260.585532: mm_filemap_add_to_page_cache: dev 0:64771 ino e745 page=0000000000000000 pfn=1448391 ofs=90112
<unknown>-12694 (-----) [006] .... 1920260.585532: mm_filemap_add_to_page_cache: dev 0:64771 ino e745 page=0000000000000000 pfn=1441412 ofs=94208
<unknown>-12694 (-----) [006] .... 1920260.585533: mm_filemap_add_to_page_cache: dev 0:64771 ino e745 page=0000000000000000 pfn=1138111 ofs=98304
<unknown>-12694 (-----) [006] .... 1920260.585533: mm_filemap_add_to_page_cache: dev 0:64771 ino e745 page=0000000000000000 pfn=1143223 ofs=102400
<unknown>-12683 (-----) [005] .... 1920260.585534: mm_filemap_add_to_page_cache: dev 0:64771 ino e745 page=0000000000000000 pfn=1079876 ofs=466944
<unknown>-12694 (-----) [006] .... 1920260.585534: mm_filemap_add_to_page_cache: dev 0:64771 ino e745 page=0000000000000000 pfn=1447637 ofs=106496
<unknown>-12694 (-----) [006] .... 1920260.585534: mm_filemap_add_to_page_cache: dev 0:64771 ino e745 page=0000000000000000 pfn=1220585 ofs=110592
<unknown>-12694 (-----) [006] .... 1920260.585535: mm_filemap_add_to_page_cache: dev 0:64771 ino e745 page=0000000000000000 pfn=1449051 ofs=114688
<unknown>-12694 (-----) [006] .... 1920260.585535: mm_filemap_add_to_page_cache: dev 0:64771 ino e745 page=0000000000000000 pfn=1313180 ofs=118784
<unknown>-12694 (-----) [006] .... 1920260.585535: mm_filemap_add_to_page_cache: dev 0:64771 ino e745 page=0000000000000000 pfn=1313166 ofs=122880
<unknown>-12694 (-----) [006] .... 1920260.585536: mm_filemap_add_to_page_cache: dev 0:64771 ino e745 page=0000000000000000 pfn=1313154 ofs=126976
<unknown>-12683 (-----) [005] .... 1920260.585536: mm_filemap_add_to_page_cache: dev 0:64771 ino e745 page=0000000000000000 pfn=1218394 ofs=471040
<unknown>-12694 (-----) [006] .... 1920260.585536: mm_filemap_add_to_page_cache: dev 0:64771 ino e745 page=0000000000000000 pfn=1144047 ofs=131072
<unknown>-12683 (-----) [005] .... 1920260.585537: mm_filemap_add_to_page_cache: dev 0:64771 ino e745 page=0000000000000000 pfn=1218579 ofs=475136
<unknown>-12694 (-----) [006] .... 1920260.585543: mm_filemap_add_to_page_cache: dev 0:64771 ino e745 page=0000000000000000 pfn=1241332 ofs=135168
<unknown>-12694 (-----) [006] .... 1920260.585543: mm_filemap_add_to_page_cache: dev 0:64771 ino e745 page=0000000000000000 pfn=1097199 ofs=139264
<unknown>-12694 (-----) [006] .... 1920260.585545: mm_filemap_add_to_page_cache: dev 0:64771 ino e745 page=0000000000000000 pfn=1214197 ofs=143360
<unknown>-12694 (-----) [006] .... 1920260.585645: mm_filemap_add_to_page_cache: dev 0:64771 ino e745 page=0000000000000000 pfn=1197633 ofs=147456
<unknown>-12694 (-----) [006] .... 1920260.585647: mm_filemap_add_to_page_cache: dev 0:64771 ino e745 page=0000000000000000 pfn=1311536 ofs=151552
<unknown>-12694 (-----) [006] .... 1920260.585647: mm_filemap_add_to_page_cache: dev 0:64771 ino e745 page=0000000000000000 pfn=1322952 ofs=155648
<unknown>-12694 (-----) [006] .... 1920260.585647: mm_filemap_add_to_page_cache: dev 0:64771 ino e745 page=0000000000000000 pfn=1346974 ofs=159744
<unknown>-12694 (-----) [006] .... 1920260.585648: mm_filemap_add_to_page_cache: dev 0:64771 ino e745 page=0000000000000000 pfn=1257232 ofs=163840
<unknown>-12695 (-----) [004] .... 1920260.586355: mm_filemap_add_to_page_cache: dev 0:64771 ino e745 page=0000000000000000 pfn=1204484 ofs=700416
<unknown>-12695 (-----) [004] .... 1920260.586357: mm_filemap_add_to_page_cache: dev 0:64771 ino e745 page=0000000000000000 pfn=1326426 ofs=704512
<unknown>-12695 (-----) [004] .... 1920260.586358: mm_filemap_add_to_page_cache: dev 0:64771 ino e745 page=0000000000000000 pfn=1151808 ofs=708608
<unknown>-12695 (-----) [004] .... 1920260.586358: mm_filemap_add_to_page_cache: dev 0:64771 ino e745 page=0000000000000000 pfn=1209422 ofs=712704
<unknown>-12695 (-----) [004] .... 1920260.586359: mm_filemap_add_to_page_cache: dev 0:64771 ino e745 page=0000000000000000 pfn=1408387 ofs=716800
<unknown>-12695 (-----) [004] .... 1920260.586359: mm_filemap_add_to_page_cache: dev 0:64771 ino e745 page=0000000000000000 pfn=1197336 ofs=720896
<unknown>-12695 (-----) [004] .... 1920260.586363: mm_filemap_add_to_page_cache: dev 0:64771 ino e745 page=0000000000000000 pfn=1205652 ofs=724992
<unknown>-12695 (-----) [004] .... 1920260.586363: mm_filemap_add_to_page_cache: dev 0:64771 ino e745 page=0000000000000000 pfn=1133421 ofs=729088
<unknown>-12695 (-----) [004] .... 1920260.586364: mm_filemap_add_to_page_cache: dev 0:64771 ino e745 page=0000000000000000 pfn=1092173 ofs=733184
<unknown>-12695 (-----) [004] .... 1920260.586365: mm_filemap_add_to_page_cache: dev 0:64771 ino e745 page=0000000000000000 pfn=1124430 ofs=737280
<unknown>-12695 (-----) [004] .... 1920260.586365: mm_filemap_add_to_page_cache: dev 0:64771 ino e745 page=0000000000000000 pfn=1143926 ofs=741376
<unknown>-12695 (-----) [004] .... 1920260.586366: mm_filemap_add_to_page_cache: dev 0:64771 ino e745 page=0000000000000000 pfn=1090109 ofs=745472
<unknown>-12695 (-----) [004] .... 1920260.586366: mm_filemap_add_to_page_cache: dev 0:64771 ino e745 page=0000000000000000 pfn=1102012 ofs=749568
<unknown>-12695 (-----) [004] .... 1920260.586367: mm_filemap_add_to_page_cache: dev 0:64771 ino e745 page=0000000000000000 pfn=1154930 ofs=753664
<unknown>-12695 (-----) [004] .... 1920260.586368: mm_filemap_add_to_page_cache: dev 0:64771 ino e745 page=0000000000000000 pfn=1132993 ofs=757760
<unknown>-12695 (-----) [004] .... 1920260.586369: mm_filemap_add_to_page_cache: dev 0:64771 ino e745 page=0000000000000000 pfn=1430780 ofs=761856
<unknown>-12695 (-----) [004] .... 1920260.586369: mm_filemap_add_to_page_cache: dev 0:64771 ino e745 page=0000000000000000 pfn=1197452 ofs=765952
<unknown>-12695 (-----) [004] .... 1920260.586369: mm_filemap_add_to_page_cache: dev 0:64771 ino e745 page=0000000000000000 pfn=1075111 ofs=770048
<unknown>-12695 (-----) [004] .... 1920260.586370: mm_filemap_add_to_page_cache: dev 0:64771 ino e745 page=0000000000000000 pfn=1275616 ofs=774144
<unknown>-12695 (-----) [004] .... 1920260.586370: mm_filemap_add_to_page_cache: dev 0:64771 ino e745 page=0000000000000000 pfn=1444981 ofs=778240
<unknown>-12695 (-----) [004] .... 1920260.586371: mm_filemap_add_to_page_cache: dev 0:64771 ino e745 page=0000000000000000 pfn=1452592 ofs=782336
<unknown>-12695 (-----) [004] .... 1920260.586374: mm_filemap_add_to_page_cache: dev 0:64771 ino e745 page=0000000000000000 pfn=1102857 ofs=786432
<unknown>-12695 (-----) [004] .... 1920260.586376: mm_filemap_add_to_page_cache: dev 0:64771 ino e745 page=0000000000000000 pfn=1406969 ofs=790528
<unknown>-12695 (-----) [004] .... 1920260.586378: mm_filemap_add_to_page_cache: dev 0:64771 ino e745 page=0000000000000000 pfn=1522553 ofs=794624
<unknown>-12695 (-----) [004] .... 1920260.586378: mm_filemap_add_to_page_cache: dev 0:64771 ino e745 page=0000000000000000 pfn=1260771 ofs=798720
<unknown>-12695 (-----) [004] .... 1920260.586379: mm_filemap_add_to_page_cache: dev 0:64771 ino e745 page=0000000000000000 pfn=1474649 ofs=802816
<unknown>-12695 (-----) [004] .... 1920260.586379: mm_filemap_add_to_page_cache: dev 0:64771 ino e745 page=0000000000000000 pfn=1268708 ofs=806912
<unknown>-12695 (-----) [004] .... 1920260.586379: mm_filemap_add_to_page_cache: dev 0:64771 ino e745 page=0000000000000000 pfn=1346144 ofs=811008
<unknown>-12695 (-----) [004] .... 1920260.586380: mm_filemap_add_to_page_cache: dev 0:64771 ino e745 page=0000000000000000 pfn=1081167 ofs=815104
<unknown>-12695 (-----) [004] .... 1920260.586380: mm_filemap_add_to_page_cache: dev 0:64771 ino e745 page=0000000000000000 pfn=1137677 ofs=819200
<unknown>-12695 (-----) [004] .... 1920260.586381: mm_filemap_add_to_page_cache: dev 0:64771 ino e745 page=0000000000000000 pfn=1161175 ofs=823296
<unknown>-12695 (-----) [004] .... 1920260.586381: mm_filemap_add_to_page_cache: dev 0:64771 ino e745 page=0000000000000000 pfn=1461331 ofs=827392
<unknown>-12695 (-----) [004] .... 1920260.586492: mm_filemap_add_to_page_cache: dev 0:64771 ino e745 page=0000000000000000 pfn=1347219 ofs=831488
<unknown>-12695 (-----) [004] .... 1920260.586494: mm_filemap_add_to_page_cache: dev 0:64771 ino e745 page=0000000000000000 pfn=1290004 ofs=835584
<unknown>-12695 (-----) [004] .... 1920260.586494: mm_filemap_add_to_page_cache: dev 0:64771 ino e745 page=0000000000000000 pfn=1299174 ofs=839680
<unknown>-12695 (-----) [004] .... 1920260.586496: mm_filemap_add_to_page_cache: dev 0:64771 ino e745 page=0000000000000000 pfn=1317595 ofs=843776
<unknown>-12695 (-----) [004] .... 1920260.586496: mm_filemap_add_to_page_cache: dev 0:64771 ino e745 page=0000000000000000 pfn=1484924 ofs=847872
<unknown>-12695 (-----) [004] .... 1920260.586497: mm_filemap_add_to_page_cache: dev 0:64771 ino e745 page=0000000000000000 pfn=1169920 ofs=851968
<unknown>-12695 (-----) [004] .... 1920260.586501: mm_filemap_add_to_page_cache: dev 0:64771 ino e745 page=0000000000000000 pfn=1359189 ofs=856064
<unknown>-12695 (-----) [004] .... 1920260.586501: mm_filemap_add_to_page_cache: dev 0:64771 ino e745 page=0000000000000000 pfn=1307842 ofs=860160
<unknown>-12695 (-----) [004] .... 1920260.586502: mm_filemap_add_to_page_cache: dev 0:64771 ino e745 page=0000000000000000 pfn=1237858 ofs=864256
<unknown>-12695 (-----) [004] .... 1920260.586502: mm_filemap_add_to_page_cache: dev 0:64771 ino e745 page=0000000000000000 pfn=1189461 ofs=868352
<unknown>-12695 (-----) [004] .... 1920260.586503: mm_filemap_add_to_page_cache: dev 0:64771 ino e745 page=0000000000000000 pfn=1223232 ofs=872448
<unknown>-12695 (-----) [004] .... 1920260.586503: mm_filemap_add_to_page_cache: dev 0:64771 ino e745 page=0000000000000000 pfn=1104076 ofs=876544
<unknown>-12695 (-----) [004] .... 1920260.586504: mm_filemap_add_to_page_cache: dev 0:64771 ino e745 page=0000000000000000 pfn=1079223 ofs=880640
<unknown>-12695 (-----) [004] .... 1920260.586504: mm_filemap_add_to_page_cache: dev 0:64771 ino e745 page=0000000000000000 pfn=1092537 ofs=884736
<unknown>-12695 (-----) [004] .... 1920260.586505: mm_filemap_add_to_page_cache: dev 0:64771 ino e745 page=0000000000000000 pfn=1353960 ofs=888832
<unknown>-12695 (-----) [004] .... 1920260.586505: mm_filemap_add_to_page_cache: dev 0:64771 ino e745 page=0000000000000000 pfn=1346330 ofs=892928
<unknown>-12695 (-----) [004] .... 1920260.586506: mm_filemap_add_to_page_cache: dev 0:64771 ino e745 page=0000000000000000 pfn=1345764 ofs=897024
<unknown>-12695 (-----) [004] .... 1920260.586507: mm_filemap_add_to_page_cache: dev 0:64771 ino e745 page=0000000000000000 pfn=1363913 ofs=901120
<unknown>-12695 (-----) [004] .... 1920260.586508: mm_filemap_add_to_page_cache: dev 0:64771 ino e745 page=0000000000000000 pfn=1319570 ofs=905216
<unknown>-12695 (-----) [004] .... 1920260.586508: mm_filemap_add_to_page_cache: dev 0:64771 ino e745 page=0000000000000000 pfn=1367024 ofs=909312
<unknown>-12695 (-----) [004] .... 1920260.586508: mm_filemap_add_to_page_cache: dev 0:64771 ino e745 page=0000000000000000 pfn=1333808 ofs=913408
<unknown>-12695 (-----) [004] .... 1920260.586509: mm_filemap_add_to_page_cache: dev 0:64771 ino e745 page=0000000000000000 pfn=1158627 ofs=917504
<unknown>-12695 (-----) [004] .... 1920260.586509: mm_filemap_add_to_page_cache: dev 0:64771 ino e745 page=0000000000000000 pfn=1300368 ofs=921600
<unknown>-12695 (-----) [004] .... 1920260.586510: mm_filemap_add_to_page_cache: dev 0:64771 ino e745 page=0000000000000000 pfn=1245363 ofs=925696
<unknown>-12695 (-----) [004] .... 1920260.586510: mm_filemap_add_to_page_cache: dev 0:64771 ino e745 page=0000000000000000 pfn=1345609 ofs=929792
<unknown>-12695 (-----) [004] .... 1920260.586510: mm_filemap_add_to_page_cache: dev 0:64771 ino e745 page=0000000000000000 pfn=1393826 ofs=933888
<unknown>-12695 (-----) [004] .... 1920260.586511: mm_filemap_add_to_page_cache: dev 0:64771 ino e745 page=0000000000000000 pfn=1200552 ofs=937984
<unknown>-12695 (-----) [004] .... 1920260.586511: mm_filemap_add_to_page_cache: dev 0:64771 ino e745 page=0000000000000000 pfn=1170885 ofs=942080
<unknown>-12695 (-----) [004] .... 1920260.586512: mm_filemap_add_to_page_cache: dev 0:64771 ino e745 page=0000000000000000 pfn=1536209 ofs=946176
<unknown>-12695 (-----) [004] .... 1920260.586512: mm_filemap_add_to_page_cache: dev 0:64771 ino e745 page=0000000000000000 pfn=1189630 ofs=950272
<unknown>-12695 (-----) [004] .... 1920260.586513: mm_filemap_add_to_page_cache: dev 0:64771 ino e745 page=0000000000000000 pfn=1121010 ofs=954368
<unknown>-12695 (-----) [004] .... 1920260.586514: mm_filemap_add_to_page_cache: dev 0:64771 ino e745 page=0000000000000000 pfn=1324474 ofs=958464
<unknown>-12697 (-----) [007] .... 1920260.586578: mm_filemap_add_to_page_cache: dev 0:64771 ino e745 page=0000000000000000 pfn=1129628 ofs=299008
<unknown>-12697 (-----) [007] .... 1920260.586579: mm_filemap_add_to_page_cache: dev 0:64771 ino e745 page=0000000000000000 pfn=1307120 ofs=303104
<unknown>-12697 (-----) [007] .... 1920260.586580: mm_filemap_add_to_page_cache: dev 0:64771 ino e745 page=0000000000000000 pfn=1347284 ofs=307200
<unknown>-12697 (-----) [007] .... 1920260.586580: mm_filemap_add_to_page_cache: dev 0:64771 ino e745 page=0000000000000000 pfn=1312996 ofs=311296
<unknown>-12697 (-----) [007] .... 1920260.586581: mm_filemap_add_to_page_cache: dev 0:64771 ino e745 page=0000000000000000 pfn=1170623 ofs=315392
<unknown>-12697 (-----) [007] .... 1920260.586581: mm_filemap_add_to_page_cache: dev 0:64771 ino e745 page=0000000000000000 pfn=1359281 ofs=319488
<unknown>-12697 (-----) [007] .... 1920260.586582: mm_filemap_add_to_page_cache: dev 0:64771 ino e745 page=0000000000000000 pfn=1180021 ofs=323584
<unknown>-12697 (-----) [007] .... 1920260.586582: mm_filemap_add_to_page_cache: dev 0:64771 ino e745 page=0000000000000000 pfn=1195728 ofs=327680
<unknown>-12697 (-----) [007] .... 1920260.586582: mm_filemap_add_to_page_cache: dev 0:64771 ino e745 page=0000000000000000 pfn=1163642 ofs=331776
<unknown>-12697 (-----) [007] .... 1920260.586587: mm_filemap_add_to_page_cache: dev 0:64771 ino e745 page=0000000000000000 pfn=1152538 ofs=335872
<unknown>-12697 (-----) [007] .... 1920260.586589: mm_filemap_add_to_page_cache: dev 0:64771 ino e745 page=0000000000000000 pfn=1345922 ofs=339968
<unknown>-12697 (-----) [007] .... 1920260.586589: mm_filemap_add_to_page_cache: dev 0:64771 ino e745 page=0000000000000000 pfn=1343604 ofs=344064
<unknown>-12697 (-----) [007] .... 1920260.586721: mm_filemap_add_to_page_cache: dev 0:64771 ino e745 page=0000000000000000 pfn=1399371 ofs=479232
<unknown>-12697 (-----) [007] .... 1920260.586723: mm_filemap_add_to_page_cache: dev 0:64771 ino e745 page=0000000000000000 pfn=1106549 ofs=483328
<unknown>-12697 (-----) [007] .... 1920260.586724: mm_filemap_add_to_page_cache: dev 0:64771 ino e745 page=0000000000000000 pfn=1331546 ofs=487424
<unknown>-12697 (-----) [007] .... 1920260.586724: mm_filemap_add_to_page_cache: dev 0:64771 ino e745 page=0000000000000000 pfn=1299299 ofs=491520
<unknown>-12697 (-----) [007] .... 1920260.586725: mm_filemap_add_to_page_cache: dev 0:64771 ino e745 page=0000000000000000 pfn=1288883 ofs=495616
<unknown>-12697 (-----) [007] .... 1920260.586725: mm_filemap_add_to_page_cache: dev 0:64771 ino e745 page=0000000000000000 pfn=1399049 ofs=499712
<unknown>-12697 (-----) [007] .... 1920260.586726: mm_filemap_add_to_page_cache: dev 0:64771 ino e745 page=0000000000000000 pfn=1146931 ofs=503808
<unknown>-12697 (-----) [007] .... 1920260.586726: mm_filemap_add_to_page_cache: dev 0:64771 ino e745 page=0000000000000000 pfn=1296592 ofs=507904
<unknown>-12697 (-----) [007] .... 1920260.586727: mm_filemap_add_to_page_cache: dev 0:64771 ino e745 page=0000000000000000 pfn=1468397 ofs=512000
<unknown>-12697 (-----) [007] .... 1920260.586727: mm_filemap_add_to_page_cache: dev 0:64771 ino e745 page=0000000000000000 pfn=1215698 ofs=516096
<unknown>-12697 (-----) [007] .... 1920260.586727: mm_filemap_add_to_page_cache: dev 0:64771 ino e745 page=0000000000000000 pfn=1177341 ofs=520192
<unknown>-12697 (-----) [007] .... 1920260.586731: mm_filemap_add_to_page_cache: dev 0:64771 ino e745 page=0000000000000000 pfn=1189162 ofs=524288
<unknown>-12697 (-----) [007] .... 1920260.586732: mm_filemap_add_to_page_cache: dev 0:64771 ino e745 page=0000000000000000 pfn=1435997 ofs=528384
<unknown>-12697 (-----) [007] .... 1920260.586732: mm_filemap_add_to_page_cache: dev 0:64771 ino e745 page=0000000000000000 pfn=1209896 ofs=532480
<unknown>-12697 (-----) [007] .... 1920260.586733: mm_filemap_add_to_page_cache: dev 0:64771 ino e745 page=0000000000000000 pfn=1255888 ofs=536576
<unknown>-12697 (-----) [007] .... 1920260.586734: mm_filemap_add_to_page_cache: dev 0:64771 ino e745 page=0000000000000000 pfn=1234200 ofs=540672
<unknown>-12697 (-----) [007] .... 1920260.586734: mm_filemap_add_to_page_cache: dev 0:64771 ino e745 page=0000000000000000 pfn=1422854 ofs=544768
<unknown>-12697 (-----) [007] .... 1920260.586735: mm_filemap_add_to_page_cache: dev 0:64771 ino e745 page=0000000000000000 pfn=1435794 ofs=548864
<unknown>-12697 (-----) [007] .... 1920260.586735: mm_filemap_add_to_page_cache: dev 0:64771 ino e745 page=0000000000000000 pfn=1236279 ofs=552960
<unknown>-12697 (-----) [007] .... 1920260.586736: mm_filemap_add_to_page_cache: dev 0:64771 ino e745 page=0000000000000000 pfn=1485732 ofs=557056
<unknown>-12683 (-----) [005] .... 1920260.586743: mm_filemap_add_to_page_cache: dev 0:64771 ino e745 page=0000000000000000 pfn=1417198 ofs=561152
<unknown>-12683 (-----) [005] .... 1920260.586746: mm_filemap_add_to_page_cache: dev 0:64771 ino e745 page=0000000000000000 pfn=1469450 ofs=565248
<unknown>-12696 (-----) [004] .... 1920260.587465: mm_filemap_add_to_page_cache: dev 0:64771 ino e745 page=0000000000000000 pfn=1489023 ofs=1040384
<unknown>-12696 (-----) [004] .... 1920260.587469: mm_filemap_add_to_page_cache: dev 0:64771 ino e745 page=0000000000000000 pfn=1449498 ofs=1044480
<unknown>-12696 (-----) [004] .... 1920260.587469: mm_filemap_add_to_page_cache: dev 0:64771 ino e745 page=0000000000000000 pfn=1447737 ofs=1048576
<unknown>-12696 (-----) [004] .... 1920260.587470: mm_filemap_add_to_page_cache: dev 0:64771 ino e745 page=0000000000000000 pfn=1124530 ofs=1052672
<unknown>-12696 (-----) [004] .... 1920260.587476: mm_filemap_add_to_page_cache: dev 0:64771 ino e745 page=0000000000000000 pfn=1246743 ofs=1056768
<unknown>-12696 (-----) [004] .... 1920260.587476: mm_filemap_add_to_page_cache: dev 0:64771 ino e745 page=0000000000000000 pfn=1441927 ofs=1060864
<unknown>-12696 (-----) [004] .... 1920260.587477: mm_filemap_add_to_page_cache: dev 0:64771 ino e745 page=0000000000000000 pfn=1280581 ofs=1064960
<unknown>-12696 (-----) [004] .... 1920260.587477: mm_filemap_add_to_page_cache: dev 0:64771 ino e745 page=0000000000000000 pfn=1289438 ofs=1069056
<unknown>-12696 (-----) [004] .... 1920260.587477: mm_filemap_add_to_page_cache: dev 0:64771 ino e745 page=0000000000000000 pfn=1464236 ofs=1073152
<unknown>-12696 (-----) [004] .... 1920260.587478: mm_filemap_add_to_page_cache: dev 0:64771 ino e745 page=0000000000000000 pfn=1125808 ofs=1077248
<unknown>-12696 (-----) [004] .... 1920260.587478: mm_filemap_add_to_page_cache: dev 0:64771 ino e745 page=0000000000000000 pfn=1329385 ofs=1081344
<unknown>-12696 (-----) [004] .... 1920260.587480: mm_filemap_add_to_page_cache: dev 0:64771 ino e745 page=0000000000000000 pfn=1314093 ofs=1085440
<unknown>-12696 (-----) [004] .... 1920260.587480: mm_filemap_add_to_page_cache: dev 0:64771 ino e745 page=0000000000000000 pfn=1201837 ofs=1089536
<unknown>-12696 (-----) [004] .... 1920260.587481: mm_filemap_add_to_page_cache: dev 0:64771 ino e745 page=0000000000000000 pfn=1327734 ofs=1093632
<unknown>-12696 (-----) [004] .... 1920260.587481: mm_filemap_add_to_page_cache: dev 0:64771 ino e745 page=0000000000000000 pfn=1406568 ofs=1097728
<unknown>-12696 (-----) [004] .... 1920260.587481: mm_filemap_add_to_page_cache: dev 0:64771 ino e745 page=0000000000000000 pfn=1331873 ofs=1101824
<unknown>-12696 (-----) [004] .... 1920260.587482: mm_filemap_add_to_page_cache: dev 0:64771 ino e745 page=0000000000000000 pfn=1331898 ofs=1105920
<unknown>-12696 (-----) [004] .... 1920260.587482: mm_filemap_add_to_page_cache: dev 0:64771 ino e745 page=0000000000000000 pfn=1331917 ofs=1110016
<unknown>-12696 (-----) [004] .... 1920260.587483: mm_filemap_add_to_page_cache: dev 0:64771 ino e745 page=0000000000000000 pfn=1332091 ofs=1114112
<unknown>-12696 (-----) [004] .... 1920260.587483: mm_filemap_add_to_page_cache: dev 0:64771 ino e745 page=0000000000000000 pfn=1108186 ofs=1118208
<unknown>-12696 (-----) [004] .... 1920260.587486: mm_filemap_add_to_page_cache: dev 0:64771 ino e745 page=0000000000000000 pfn=1182631 ofs=1122304
<unknown>-12696 (-----) [004] .... 1920260.587486: mm_filemap_add_to_page_cache: dev 0:64771 ino e745 page=0000000000000000 pfn=1085941 ofs=1126400
<unknown>-12696 (-----) [004] .... 1920260.587487: mm_filemap_add_to_page_cache: dev 0:64771 ino e745 page=0000000000000000 pfn=1433982 ofs=1130496
<unknown>-12696 (-----) [004] .... 1920260.587487: mm_filemap_add_to_page_cache: dev 0:64771 ino e745 page=0000000000000000 pfn=1387028 ofs=1134592
<unknown>-12696 (-----) [004] .... 1920260.587488: mm_filemap_add_to_page_cache: dev 0:64771 ino e745 page=0000000000000000 pfn=1353117 ofs=1138688
<unknown>-12696 (-----) [004] .... 1920260.587489: mm_filemap_add_to_page_cache: dev 0:64771 ino e745 page=0000000000000000 pfn=1352364 ofs=1142784
<unknown>-12696 (-----) [004] .... 1920260.587489: mm_filemap_add_to_page_cache: dev 0:64771 ino e745 page=0000000000000000 pfn=1144513 ofs=1146880
<unknown>-12696 (-----) [004] .... 1920260.587490: mm_filemap_add_to_page_cache: dev 0:64771 ino e745 page=0000000000000000 pfn=1403984 ofs=1150976
<unknown>-12696 (-----) [004] .... 1920260.587490: mm_filemap_add_to_page_cache: dev 0:64771 ino e745 page=0000000000000000 pfn=1278970 ofs=1155072
<unknown>-12696 (-----) [004] .... 1920260.587491: mm_filemap_add_to_page_cache: dev 0:64771 ino e745 page=0000000000000000 pfn=1326743 ofs=1159168
<unknown>-12696 (-----) [004] .... 1920260.587491: mm_filemap_add_to_page_cache: dev 0:64771 ino e745 page=0000000000000000 pfn=1221809 ofs=1163264
<unknown>-12696 (-----) [004] .... 1920260.587492: mm_filemap_add_to_page_cache: dev 0:64771 ino e745 page=0000000000000000 pfn=1268668 ofs=1167360
<unknown>-12695 (-----) [005] .... 1920260.587502: mm_filemap_add_to_page_cache: dev 0:64771 ino e745 page=0000000000000000 pfn=1074544 ofs=962560
<unknown>-12695 (-----) [005] .... 1920260.587506: mm_filemap_add_to_page_cache: dev 0:64771 ino e745 page=0000000000000000 pfn=1074294 ofs=966656
<unknown>-12695 (-----) [005] .... 1920260.587506: mm_filemap_add_to_page_cache: dev 0:64771 ino e745 page=0000000000000000 pfn=1075097 ofs=970752
<unknown>-12695 (-----) [005] .... 1920260.587507: mm_filemap_add_to_page_cache: dev 0:64771 ino e745 page=0000000000000000 pfn=1162407 ofs=974848
<unknown>-12695 (-----) [005] .... 1920260.587507: mm_filemap_add_to_page_cache: dev 0:64771 ino e745 page=0000000000000000 pfn=1141370 ofs=978944
<unknown>-12695 (-----) [005] .... 1920260.587508: mm_filemap_add_to_page_cache: dev 0:64771 ino e745 page=0000000000000000 pfn=1306487 ofs=983040
<unknown>-12695 (-----) [005] .... 1920260.587508: mm_filemap_add_to_page_cache: dev 0:64771 ino e745 page=0000000000000000 pfn=1306434 ofs=987136
<unknown>-12695 (-----) [005] .... 1920260.587514: mm_filemap_add_to_page_cache: dev 0:64771 ino e745 page=0000000000000000 pfn=1306347 ofs=991232
<unknown>-12695 (-----) [005] .... 1920260.587514: mm_filemap_add_to_page_cache: dev 0:64771 ino e745 page=0000000000000000 pfn=1306247 ofs=995328
<unknown>-12695 (-----) [005] .... 1920260.587515: mm_filemap_add_to_page_cache: dev 0:64771 ino e745 page=0000000000000000 pfn=1306195 ofs=999424
<unknown>-12695 (-----) [005] .... 1920260.587516: mm_filemap_add_to_page_cache: dev 0:64771 ino e745 page=0000000000000000 pfn=1306039 ofs=1003520
<unknown>-12695 (-----) [005] .... 1920260.587516: mm_filemap_add_to_page_cache: dev 0:64771 ino e745 page=0000000000000000 pfn=1305983 ofs=1007616
<unknown>-12694 (-----) [006] .... 1920260.587701: mm_filemap_add_to_page_cache: dev 0:64771 ino e745 page=0000000000000000 pfn=1216391 ofs=1171456
<unknown>-12694 (-----) [006] .... 1920260.587705: mm_filemap_add_to_page_cache: dev 0:64771 ino e745 page=0000000000000000 pfn=1262462 ofs=1175552
<unknown>-12694 (-----) [006] .... 1920260.587706: mm_filemap_add_to_page_cache: dev 0:64771 ino e745 page=0000000000000000 pfn=1358114 ofs=1179648
<unknown>-12694 (-----) [006] .... 1920260.587706: mm_filemap_add_to_page_cache: dev 0:64771 ino e745 page=0000000000000000 pfn=1357898 ofs=1183744
<unknown>-12694 (-----) [006] .... 1920260.587707: mm_filemap_add_to_page_cache: dev 0:64771 ino e745 page=0000000000000000 pfn=1237003 ofs=1187840
<unknown>-12694 (-----) [006] .... 1920260.587707: mm_filemap_add_to_page_cache: dev 0:64771 ino e745 page=0000000000000000 pfn=1126319 ofs=1191936
<unknown>-12694 (-----) [006] .... 1920260.587708: mm_filemap_add_to_page_cache: dev 0:64771 ino e745 page=0000000000000000 pfn=1415489 ofs=1196032
<unknown>-12694 (-----) [006] .... 1920260.587708: mm_filemap_add_to_page_cache: dev 0:64771 ino e745 page=0000000000000000 pfn=1279558 ofs=1200128
<unknown>-12694 (-----) [006] .... 1920260.587708: mm_filemap_add_to_page_cache: dev 0:64771 ino e745 page=0000000000000000 pfn=1434022 ofs=1204224
<unknown>-12694 (-----) [006] .... 1920260.587709: mm_filemap_add_to_page_cache: dev 0:64771 ino e745 page=0000000000000000 pfn=1220130 ofs=1208320
<unknown>-12694 (-----) [006] .... 1920260.587710: mm_filemap_add_to_page_cache: dev 0:64771 ino e745 page=0000000000000000 pfn=1163037 ofs=1212416
<unknown>-12694 (-----) [006] .... 1920260.587711: mm_filemap_add_to_page_cache: dev 0:64771 ino e745 page=0000000000000000 pfn=1404501 ofs=1216512
<unknown>-12694 (-----) [006] .... 1920260.587711: mm_filemap_add_to_page_cache: dev 0:64771 ino e745 page=0000000000000000 pfn=1406287 ofs=1220608
<unknown>-12697 (-----) [007] .... 1920260.588132: mm_filemap_add_to_page_cache: dev 0:64771 ino e745 page=0000000000000000 pfn=1355143 ofs=1376256
<unknown>-12697 (-----) [007] .... 1920260.588136: mm_filemap_add_to_page_cache: dev 0:64771 ino e745 page=0000000000000000 pfn=1213923 ofs=1380352
<unknown>-12697 (-----) [007] .... 1920260.588136: mm_filemap_add_to_page_cache: dev 0:64771 ino e745 page=0000000000000000 pfn=1243190 ofs=1384448
<unknown>-12697 (-----) [007] .... 1920260.588143: mm_filemap_add_to_page_cache: dev 0:64771 ino e745 page=0000000000000000 pfn=1300698 ofs=1388544
<unknown>-12697 (-----) [007] .... 1920260.588144: mm_filemap_add_to_page_cache: dev 0:64771 ino e745 page=0000000000000000 pfn=1482568 ofs=1392640
<unknown>-12697 (-----) [007] .... 1920260.588144: mm_filemap_add_to_page_cache: dev 0:64771 ino e745 page=0000000000000000 pfn=1461789 ofs=1396736
<unknown>-12697 (-----) [007] .... 1920260.588145: mm_filemap_add_to_page_cache: dev 0:64771 ino e745 page=0000000000000000 pfn=1242314 ofs=1400832
<unknown>-12697 (-----) [007] .... 1920260.588145: mm_filemap_add_to_page_cache: dev 0:64771 ino e745 page=0000000000000000 pfn=1471996 ofs=1404928
<unknown>-12697 (-----) [007] .... 1920260.588146: mm_filemap_add_to_page_cache: dev 0:64771 ino e745 page=0000000000000000 pfn=1242742 ofs=1409024
<unknown>-12697 (-----) [007] .... 1920260.588146: mm_filemap_add_to_page_cache: dev 0:64771 ino e745 page=0000000000000000 pfn=1242579 ofs=1413120
<unknown>-12697 (-----) [007] .... 1920260.588148: mm_filemap_add_to_page_cache: dev 0:64771 ino e745 page=0000000000000000 pfn=1242553 ofs=1417216
<unknown>-12697 (-----) [007] .... 1920260.588148: mm_filemap_add_to_page_cache: dev 0:64771 ino e745 page=0000000000000000 pfn=1457332 ofs=1421312
<unknown>-12697 (-----) [007] .... 1920260.588149: mm_filemap_add_to_page_cache: dev 0:64771 ino e745 page=0000000000000000 pfn=1315431 ofs=1425408
<unknown>-12697 (-----) [007] .... 1920260.588149: mm_filemap_add_to_page_cache: dev 0:64771 ino e745 page=0000000000000000 pfn=1080653 ofs=1429504
<unknown>-12697 (-----) [007] .... 1920260.588149: mm_filemap_add_to_page_cache: dev 0:64771 ino e745 page=0000000000000000 pfn=1324174 ofs=1433600
<unknown>-12697 (-----) [007] .... 1920260.588150: mm_filemap_add_to_page_cache: dev 0:64771 ino e745 page=0000000000000000 pfn=1324142 ofs=1437696
<unknown>-12697 (-----) [007] .... 1920260.588150: mm_filemap_add_to_page_cache: dev 0:64771 ino e745 page=0000000000000000 pfn=1157760 ofs=1441792
<unknown>-12697 (-----) [007] .... 1920260.588151: mm_filemap_add_to_page_cache: dev 0:64771 ino e745 page=0000000000000000 pfn=1075059 ofs=1445888
<unknown>-12683 (-----) [006] .... 1920260.589785: mm_filemap_add_to_page_cache: dev 0:64771 ino e745 page=0000000000000000 pfn=1279192 ofs=1486848
<unknown>-12683 (-----) [006] .... 1920260.589790: mm_filemap_add_to_page_cache: dev 0:64771 ino e745 page=0000000000000000 pfn=1278527 ofs=1490944
<unknown>-12683 (-----) [006] .... 1920260.589791: mm_filemap_add_to_page_cache: dev 0:64771 ino e745 page=0000000000000000 pfn=1091778 ofs=1495040
<unknown>-12683 (-----) [006] .... 1920260.589791: mm_filemap_add_to_page_cache: dev 0:64771 ino e745 page=0000000000000000 pfn=1339447 ofs=1499136
<unknown>-12683 (-----) [006] .... 1920260.589792: mm_filemap_add_to_page_cache: dev 0:64771 ino e745 page=0000000000000000 pfn=1254007 ofs=1503232
<unknown>-12683 (-----) [006] .... 1920260.589793: mm_filemap_add_to_page_cache: dev 0:64771 ino e745 page=0000000000000000 pfn=1115173 ofs=1507328
<unknown>-12683 (-----) [006] .... 1920260.589793: mm_filemap_add_to_page_cache: dev 0:64771 ino e745 page=0000000000000000 pfn=1393985 ofs=1511424
<unknown>-12683 (-----) [006] .... 1920260.589794: mm_filemap_add_to_page_cache: dev 0:64771 ino e745 page=0000000000000000 pfn=1369123 ofs=1515520
<unknown>-12683 (-----) [006] .... 1920260.589794: mm_filemap_add_to_page_cache: dev 0:64771 ino e745 page=0000000000000000 pfn=1314257 ofs=1519616
<unknown>-12683 (-----) [006] .... 1920260.589802: mm_filemap_add_to_page_cache: dev 0:64771 ino e745 page=0000000000000000 pfn=1404487 ofs=1523712
<unknown>-12683 (-----) [006] .... 1920260.589803: mm_filemap_add_to_page_cache: dev 0:64771 ino e745 page=0000000000000000 pfn=1354554 ofs=1527808
<unknown>-12683 (-----) [006] .... 1920260.594312: mm_filemap_add_to_page_cache: dev 0:64771 ino 180a page=0000000000000000 pfn=1141445 ofs=9801728
<unknown>-12683 (-----) [006] .... 1920260.594322: mm_filemap_add_to_page_cache: dev 0:64771 ino 1 page=0000000000000000 pfn=1323774 ofs=231460864
<unknown>-12683 (-----) [006] .... 1920260.594326: mm_filemap_add_to_page_cache: dev 0:64771 ino 2 page=0000000000000000 pfn=1323772 ofs=10993664
<unknown>-12683 (-----) [006] .... 1920260.595212: mm_filemap_add_to_page_cache: dev 0:64771 ino 180a page=0000000000000000 pfn=1481305 ofs=9805824
<unknown>-12683 (-----) [006] .... 1920260.595214: mm_filemap_add_to_page_cache: dev 0:64771 ino 180a page=0000000000000000 pfn=1481306 ofs=9809920
<unknown>-12683 (-----) [006] .... 1920260.595214: mm_filemap_add_to_page_cache: dev 0:64771 ino 180a page=0000000000000000 pfn=1481316 ofs=9814016
<unknown>-12683 (-----) [006] .... 1920260.595215: mm_filemap_add_to_page_cache: dev 0:64771 ino 180a page=0000000000000000 pfn=1481340 ofs=9818112
<unknown>-12683 (-----) [006] .... 1920260.595216: mm_filemap_add_to_page_cache: dev 0:64771 ino 180a page=0000000000000000 pfn=1394587 ofs=9822208
<unknown>-12683 (-----) [006] .... 1920260.595216: mm_filemap_add_to_page_cache: dev 0:64771 ino 180a page=0000000000000000 pfn=1103455 ofs=9826304
<unknown>-12683 (-----) [006] .... 1920260.595217: mm_filemap_add_to_page_cache: dev 0:64771 ino 180a page=0000000000000000 pfn=1103271 ofs=9830400
<unknown>-12683 (-----) [006] .... 1920260.595218: mm_filemap_add_to_page_cache: dev 0:64771 ino 180a page=0000000000000000 pfn=1103168 ofs=9834496
<unknown>-12683 (-----) [006] .... 1920260.595218: mm_filemap_add_to_page_cache: dev 0:64771 ino 180a page=0000000000000000 pfn=1103145 ofs=9838592
<unknown>-12683 (-----) [006] .... 1920260.595219: mm_filemap_add_to_page_cache: dev 0:64771 ino 180a page=0000000000000000 pfn=1103115 ofs=9842688
<unknown>-12683 (-----) [006] .... 1920260.595222: mm_filemap_add_to_page_cache: dev 0:64771 ino 180a page=0000000000000000 pfn=1103057 ofs=9846784
<unknown>-12683 (-----) [006] .... 1920260.595222: mm_filemap_add_to_page_cache: dev 0:64771 ino 180a page=0000000000000000 pfn=1331958 ofs=9850880
<unknown>-12683 (-----) [006] .... 1920260.595227: mm_filemap_add_to_page_cache: dev 0:64771 ino 180a page=0000000000000000 pfn=1356305 ofs=9854976
<unknown>-12683 (-----) [006] .... 1920260.595228: mm_filemap_add_to_page_cache: dev 0:64771 ino 180a page=0000000000000000 pfn=1103708 ofs=9859072
<unknown>-12683 (-----) [006] .... 1920260.595228: mm_filemap_add_to_page_cache: dev 0:64771 ino 180a page=0000000000000000 pfn=1099286 ofs=9863168
<unknown>-12683 (-----) [006] .... 1920260.595229: mm_filemap_add_to_page_cache: dev 0:64771 ino 180a page=0000000000000000 pfn=1435190 ofs=9867264
<unknown>-12683 (-----) [006] .... 1920260.595229: mm_filemap_add_to_page_cache: dev 0:64771 ino 180a page=0000000000000000 pfn=1395504 ofs=9871360
<unknown>-12683 (-----) [006] .... 1920260.595230: mm_filemap_add_to_page_cache: dev 0:64771 ino 180a page=0000000000000000 pfn=1352916 ofs=9875456
<unknown>-12683 (-----) [006] .... 1920260.595231: mm_filemap_add_to_page_cache: dev 0:64771 ino 180a page=0000000000000000 pfn=1255529 ofs=9879552
<unknown>-12683 (-----) [006] .... 1920260.595231: mm_filemap_add_to_page_cache: dev 0:64771 ino 180a page=0000000000000000 pfn=1336145 ofs=9883648
<unknown>-12683 (-----) [006] .... 1920260.595232: mm_filemap_add_to_page_cache: dev 0:64771 ino 180a page=0000000000000000 pfn=1334143 ofs=9887744
<unknown>-12683 (-----) [006] .... 1920260.595232: mm_filemap_add_to_page_cache: dev 0:64771 ino 180a page=0000000000000000 pfn=1328548 ofs=9891840
<unknown>-12683 (-----) [006] .... 1920260.595232: mm_filemap_add_to_page_cache: dev 0:64771 ino 180a page=0000000000000000 pfn=1222215 ofs=9895936
<unknown>-12683 (-----) [006] .... 1920260.595233: mm_filemap_add_to_page_cache: dev 0:64771 ino 180a page=0000000000000000 pfn=1461056 ofs=9900032
<unknown>-12683 (-----) [006] .... 1920260.595234: mm_filemap_add_to_page_cache: dev 0:64771 ino 180a page=0000000000000000 pfn=1228276 ofs=9904128
<unknown>-12683 (-----) [006] .... 1920260.595235: mm_filemap_add_to_page_cache: dev 0:64771 ino 180a page=0000000000000000 pfn=1151188 ofs=9908224
<unknown>-12683 (-----) [006] .... 1920260.595236: mm_filemap_add_to_page_cache: dev 0:64771 ino 180a page=0000000000000000 pfn=1443605 ofs=9912320
<unknown>-12683 (-----) [006] .... 1920260.595236: mm_filemap_add_to_page_cache: dev 0:64771 ino 180a page=0000000000000000 pfn=1146821 ofs=9916416
<unknown>-12683 (-----) [006] .... 1920260.595237: mm_filemap_add_to_page_cache: dev 0:64771 ino 180a page=0000000000000000 pfn=1103669 ofs=9920512
<unknown>-12683 (-----) [006] .... 1920260.595238: mm_filemap_add_to_page_cache: dev 0:64771 ino 180a page=0000000000000000 pfn=1103744 ofs=9924608
<unknown>-12683 (-----) [006] .... 1920260.595238: mm_filemap_add_to_page_cache: dev 0:64771 ino 180a page=0000000000000000 pfn=1103868 ofs=9928704
<unknown>-12683 (-----) [006] .... 1920260.595789: mm_filemap_add_to_page_cache: dev 0:64771 ino 180a page=0000000000000000 pfn=1465942 ofs=15855616
<unknown>-12683 (-----) [006] .... 1920260.595792: mm_filemap_add_to_page_cache: dev 0:64771 ino 1 page=0000000000000000 pfn=1323712 ofs=261189632
<unknown>-12683 (-----) [006] .... 1920260.595998: mm_filemap_add_to_page_cache: dev 0:64771 ino 1 page=0000000000000000 pfn=1323701 ofs=262094848
<unknown>-12683 (-----) [006] .... 1920260.596191: mm_filemap_add_to_page_cache: dev 0:64771 ino 180a page=0000000000000000 pfn=1222287 ofs=15859712
<unknown>-12683 (-----) [006] .... 1920260.596192: mm_filemap_add_to_page_cache: dev 0:64771 ino 180a page=0000000000000000 pfn=1213146 ofs=15863808
<unknown>-12683 (-----) [006] .... 1920260.596192: mm_filemap_add_to_page_cache: dev 0:64771 ino 180a page=0000000000000000 pfn=1310396 ofs=15867904
<unknown>-12683 (-----) [006] .... 1920260.596193: mm_filemap_add_to_page_cache: dev 0:64771 ino 180a page=0000000000000000 pfn=1310177 ofs=15872000
<unknown>-12683 (-----) [006] .... 1920260.596194: mm_filemap_add_to_page_cache: dev 0:64771 ino 180a page=0000000000000000 pfn=1187914 ofs=15876096
<unknown>-12683 (-----) [006] .... 1920260.596195: mm_filemap_add_to_page_cache: dev 0:64771 ino 180a page=0000000000000000 pfn=1322409 ofs=15880192
<unknown>-12683 (-----) [006] .... 1920260.596195: mm_filemap_add_to_page_cache: dev 0:64771 ino 180a page=0000000000000000 pfn=1282484 ofs=15884288
<unknown>-12683 (-----) [006] .... 1920260.596200: mm_filemap_add_to_page_cache: dev 0:64771 ino 180a page=0000000000000000 pfn=1097245 ofs=15888384
<unknown>-12683 (-----) [006] .... 1920260.596200: mm_filemap_add_to_page_cache: dev 0:64771 ino 180a page=0000000000000000 pfn=1416816 ofs=15892480
<unknown>-12683 (-----) [006] .... 1920260.596201: mm_filemap_add_to_page_cache: dev 0:64771 ino 180a page=0000000000000000 pfn=1257125 ofs=15896576
<unknown>-12683 (-----) [006] .... 1920260.596201: mm_filemap_add_to_page_cache: dev 0:64771 ino 180a page=0000000000000000 pfn=1403527 ofs=15900672
<unknown>-12683 (-----) [006] .... 1920260.596202: mm_filemap_add_to_page_cache: dev 0:64771 ino 180a page=0000000000000000 pfn=1218006 ofs=15904768
<unknown>-12683 (-----) [006] .... 1920260.596202: mm_filemap_add_to_page_cache: dev 0:64771 ino 180a page=0000000000000000 pfn=1153893 ofs=15908864
<unknown>-12683 (-----) [006] .... 1920260.596202: mm_filemap_add_to_page_cache: dev 0:64771 ino 180a page=0000000000000000 pfn=1328023 ofs=15912960
<unknown>-12683 (-----) [006] .... 1920260.596203: mm_filemap_add_to_page_cache: dev 0:64771 ino 180a page=0000000000000000 pfn=1465412 ofs=15917056
<unknown>-12683 (-----) [006] .... 1920260.596203: mm_filemap_add_to_page_cache: dev 0:64771 ino 180a page=0000000000000000 pfn=1092448 ofs=15921152
<unknown>-12683 (-----) [006] .... 1920260.596204: mm_filemap_add_to_page_cache: dev 0:64771 ino 180a page=0000000000000000 pfn=1239220 ofs=15925248
<unknown>-12683 (-----) [006] .... 1920260.596204: mm_filemap_add_to_page_cache: dev 0:64771 ino 180a page=0000000000000000 pfn=1276491 ofs=15929344
<unknown>-12683 (-----) [006] .... 1920260.596205: mm_filemap_add_to_page_cache: dev 0:64771 ino 180a page=0000000000000000 pfn=1262240 ofs=15933440
<unknown>-12683 (-----) [006] .... 1920260.596206: mm_filemap_add_to_page_cache: dev 0:64771 ino 180a page=0000000000000000 pfn=1323793 ofs=15937536
<unknown>-12683 (-----) [006] .... 1920260.596206: mm_filemap_add_to_page_cache: dev 0:64771 ino 180a page=0000000000000000 pfn=1074937 ofs=15941632
<unknown>-12683 (-----) [006] .... 1920260.596207: mm_filemap_add_to_page_cache: dev 0:64771 ino 180a page=0000000000000000 pfn=1311157 ofs=15945728
<unknown>-12683 (-----) [006] .... 1920260.596207: mm_filemap_add_to_page_cache: dev 0:64771 ino 180a page=0000000000000000 pfn=1308442 ofs=15949824
<unknown>-12683 (-----) [006] .... 1920260.596210: mm_filemap_add_to_page_cache: dev 0:64771 ino 180a page=0000000000000000 pfn=1467709 ofs=15953920
<unknown>-12683 (-----) [006] .... 1920260.596211: mm_filemap_add_to_page_cache: dev 0:64771 ino 180a page=0000000000000000 pfn=1394299 ofs=15958016
<unknown>-12683 (-----) [004] .... 1920260.612586: mm_filemap_add_to_page_cache: dev 0:64771 ino 180a page=0000000000000000 pfn=1316156 ofs=344064
<unknown>-12683 (-----) [004] .... 1920260.612591: mm_filemap_add_to_page_cache: dev 0:64771 ino 180a page=0000000000000000 pfn=1406323 ofs=348160
<unknown>-12683 (-----) [004] .... 1920260.612601: mm_filemap_add_to_page_cache: dev 0:64771 ino 180a page=0000000000000000 pfn=1216972 ofs=352256
<unknown>-12683 (-----) [004] .... 1920260.612605: mm_filemap_add_to_page_cache: dev 0:64771 ino 180a page=0000000000000000 pfn=1271924 ofs=356352
<unknown>-12683 (-----) [004] .... 1920260.612605: mm_filemap_add_to_page_cache: dev 0:64771 ino 180a page=0000000000000000 pfn=1369225 ofs=360448
<unknown>-12683 (-----) [004] .... 1920260.612608: mm_filemap_add_to_page_cache: dev 0:64771 ino 180a page=0000000000000000 pfn=1318474 ofs=364544
<unknown>-12683 (-----) [004] .... 1920260.612609: mm_filemap_add_to_page_cache: dev 0:64771 ino 180a page=0000000000000000 pfn=1227283 ofs=368640
<unknown>-12683 (-----) [004] .... 1920260.612613: mm_filemap_add_to_page_cache: dev 0:64771 ino 180a page=0000000000000000 pfn=1364376 ofs=372736
<unknown>-12683 (-----) [004] .... 1920260.612613: mm_filemap_add_to_page_cache: dev 0:64771 ino 180a page=0000000000000000 pfn=1073400 ofs=376832

View File

@ -1,184 +0,0 @@
#!/usr/bin/env python3
#
# Copyright 2019, The Android Open Source Project
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Helper util libraries for command line operations."""
import asyncio
import sys
import time
from typing import Tuple, Optional, List
import lib.print_utils as print_utils
TIMEOUT = 50
SIMULATE = False
def run_command_nofail(cmd: List[str], **kwargs) -> None:
"""Runs cmd list with default timeout.
Throws exception if the execution fails.
"""
my_kwargs = {"timeout": TIMEOUT, "shell": False, "simulate": False}
my_kwargs.update(kwargs)
passed, out = execute_arbitrary_command(cmd, **my_kwargs)
if not passed:
raise RuntimeError(
"Failed to execute %s (kwargs=%s), output=%s" % (cmd, kwargs, out))
def run_adb_shell_command(cmd: str) -> Tuple[bool, str]:
"""Runs command using adb shell.
Returns:
A tuple of running status (True=succeeded, False=failed or timed out) and
std output (string contents of stdout with trailing whitespace removed).
"""
return run_shell_command('adb shell "{}"'.format(cmd))
def run_shell_func(script_path: str,
func: str,
args: List[str]) -> Tuple[bool, str]:
"""Runs shell function with default timeout.
Returns:
A tuple of running status (True=succeeded, False=failed or timed out) and
std output (string contents of stdout with trailing whitespace removed) .
"""
if args:
cmd = 'bash -c "source {script_path}; {func} {args}"'.format(
script_path=script_path,
func=func,
args=' '.join("'{}'".format(arg) for arg in args))
else:
cmd = 'bash -c "source {script_path}; {func}"'.format(
script_path=script_path,
func=func)
print_utils.debug_print(cmd)
return run_shell_command(cmd)
def run_shell_command(cmd: str) -> Tuple[bool, str]:
"""Runs shell command with default timeout.
Returns:
A tuple of running status (True=succeeded, False=failed or timed out) and
std output (string contents of stdout with trailing whitespace removed) .
"""
return execute_arbitrary_command([cmd],
TIMEOUT,
shell=True,
simulate=SIMULATE)
def execute_arbitrary_command(cmd: List[str],
timeout: int,
shell: bool,
simulate: bool) -> Tuple[bool, str]:
"""Run arbitrary shell command with default timeout.
Mostly copy from
frameworks/base/startop/scripts/app_startup/app_startup_runner.py.
Args:
cmd: list of cmd strings.
timeout: the time limit of running cmd.
shell: indicate if the cmd is a shell command.
simulate: if it's true, do not run the command and assume the running is
successful.
Returns:
A tuple of running status (True=succeeded, False=failed or timed out) and
std output (string contents of stdout with trailing whitespace removed) .
"""
if simulate:
print(cmd)
return True, ''
print_utils.debug_print('[EXECUTE]', cmd)
# block until either command finishes or the timeout occurs.
loop = asyncio.get_event_loop()
(return_code, script_output) = loop.run_until_complete(
_run_command(*cmd, shell=shell, timeout=timeout))
script_output = script_output.decode() # convert bytes to str
passed = (return_code == 0)
print_utils.debug_print('[$?]', return_code)
if not passed:
print('[FAILED, code:%s]' % (return_code), script_output, file=sys.stderr)
return passed, script_output.rstrip()
async def _run_command(*args: List[str],
shell: bool = False,
timeout: Optional[int] = None) -> Tuple[int, bytes]:
if shell:
process = await asyncio.create_subprocess_shell(
*args, stdout=asyncio.subprocess.PIPE, stderr=asyncio.subprocess.STDOUT)
else:
process = await asyncio.create_subprocess_exec(
*args, stdout=asyncio.subprocess.PIPE, stderr=asyncio.subprocess.STDOUT)
script_output = b''
print_utils.debug_print('[PID]', process.pid)
timeout_remaining = timeout
time_started = time.time()
# read line (sequence of bytes ending with b'\n') asynchronously
while True:
try:
line = await asyncio.wait_for(process.stdout.readline(),
timeout_remaining)
print_utils.debug_print('[STDOUT]', line)
script_output += line
if timeout_remaining:
time_elapsed = time.time() - time_started
timeout_remaining = timeout - time_elapsed
except asyncio.TimeoutError:
print_utils.debug_print('[TIMEDOUT] Process ', process.pid)
print_utils.debug_print('[TIMEDOUT] Sending SIGTERM.')
process.terminate()
# 5 second timeout for process to handle SIGTERM nicely.
try:
(remaining_stdout,
remaining_stderr) = await asyncio.wait_for(process.communicate(), 5)
script_output += remaining_stdout
except asyncio.TimeoutError:
print_utils.debug_print('[TIMEDOUT] Sending SIGKILL.')
process.kill()
# 5 second timeout to finish with SIGKILL.
try:
(remaining_stdout,
remaining_stderr) = await asyncio.wait_for(process.communicate(), 5)
script_output += remaining_stdout
except asyncio.TimeoutError:
# give up, this will leave a zombie process.
print_utils.debug_print('[TIMEDOUT] SIGKILL failed for process ',
process.pid)
time.sleep(100)
return -1, script_output
else:
if not line: # EOF
break
code = await process.wait() # wait for child process to exit
return code, script_output

View File

@ -1,104 +0,0 @@
#!/usr/bin/env python3
#
# Copyright 2019, The Android Open Source Project
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Helper util libraries for parsing logcat logs."""
import asyncio
import re
from datetime import datetime
from typing import Optional, Pattern
# local import
import lib.print_utils as print_utils
def parse_logcat_datetime(timestamp: str) -> Optional[datetime]:
"""Parses the timestamp of logcat.
Params:
timestamp: for example "2019-07-01 16:13:55.221".
Returns:
a datetime of timestamp with the year now.
"""
try:
# Match the format of logcat. For example: "2019-07-01 16:13:55.221",
# because it doesn't have year, set current year to it.
timestamp = datetime.strptime(timestamp,
'%Y-%m-%d %H:%M:%S.%f')
return timestamp
except ValueError as ve:
print_utils.debug_print('Invalid line: ' + timestamp)
return None
def _is_time_out(timeout: datetime, line: str) -> bool:
"""Checks if the timestamp of this line exceeds the timeout.
Returns:
true if the timestamp exceeds the timeout.
"""
# Get the timestampe string.
cur_timestamp_str = ' '.join(re.split(r'\s+', line)[0:2])
timestamp = parse_logcat_datetime(cur_timestamp_str)
if not timestamp:
return False
return timestamp > timeout
async def _blocking_wait_for_logcat_pattern(timestamp: datetime,
pattern: Pattern,
timeout: datetime) -> Optional[str]:
# Show the year in the timestampe.
logcat_cmd = 'adb logcat -v UTC -v year -v threadtime -T'.split()
logcat_cmd.append(str(timestamp))
print_utils.debug_print('[LOGCAT]:' + ' '.join(logcat_cmd))
# Create subprocess
process = await asyncio.create_subprocess_exec(
*logcat_cmd,
# stdout must a pipe to be accessible as process.stdout
stdout=asyncio.subprocess.PIPE)
while (True):
# Read one line of output.
data = await process.stdout.readline()
line = data.decode('utf-8').rstrip()
# 2019-07-01 14:54:21.946 27365 27392 I ActivityTaskManager: Displayed
# com.android.settings/.Settings: +927ms
# TODO: Detect timeouts even when there is no logcat output.
if _is_time_out(timeout, line):
print_utils.debug_print('DID TIMEOUT BEFORE SEEING ANYTHING ('
'timeout={timeout} seconds << {pattern} '
'>>'.format(timeout=timeout, pattern=pattern))
return None
if pattern.match(line):
print_utils.debug_print(
'WE DID SEE PATTERN << "{}" >>.'.format(pattern))
return line
def blocking_wait_for_logcat_pattern(timestamp: datetime,
pattern: Pattern,
timeout: datetime) -> Optional[str]:
"""Selects the line that matches the pattern and within the timeout.
Returns:
the line that matches the pattern and within the timeout.
"""
loop = asyncio.get_event_loop()
result = loop.run_until_complete(
_blocking_wait_for_logcat_pattern(timestamp, pattern, timeout))
return result

View File

@ -1,88 +0,0 @@
#!/usr/bin/env python3
#
# Copyright 2019, The Android Open Source Project
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
"""Unit tests for the logcat_utils.py script."""
import asyncio
import datetime
import re
import logcat_utils
from mock import MagicMock, patch
def test_parse_logcat_datatime():
# Act
result = logcat_utils.parse_logcat_datetime('2019-07-01 16:13:55.221')
# Assert
assert result == datetime.datetime(2019, 7, 1, 16, 13, 55, 221000)
class AsyncMock(MagicMock):
async def __call__(self, *args, **kwargs):
return super(AsyncMock, self).__call__(*args, **kwargs)
def _async_return():
f = asyncio.Future()
f.set_result(
b'2019-07-01 15:51:53.290 27365 27392 I ActivityTaskManager: '
b'Displayed com.google.android.music/com.android.music.activitymanagement.'
b'TopLevelActivity: +1s7ms')
return f
def test_parse_displayed_time_succeed():
# Act
with patch('asyncio.create_subprocess_exec',
new_callable=AsyncMock) as asyncio_mock:
asyncio_mock.return_value.stdout.readline = _async_return
timestamp = datetime.datetime(datetime.datetime.now().year, 7, 1, 16, 13,
55, 221000)
timeout_dt = timestamp + datetime.timedelta(0, 10)
pattern = re.compile('.*ActivityTaskManager: Displayed '
'com.google.android.music/com.android.music.*')
result = logcat_utils.blocking_wait_for_logcat_pattern(timestamp,
pattern,
timeout_dt)
# Assert
assert result == '2019-07-01 15:51:53.290 27365 27392 I ' \
'ActivityTaskManager: ' \
'Displayed com.google.android.music/com.android.music.' \
'activitymanagement.TopLevelActivity: +1s7ms'
def _async_timeout_return():
f = asyncio.Future()
f.set_result(
b'2019-07-01 17:51:53.290 27365 27392 I ActivityTaskManager: '
b'Displayed com.google.android.music/com.android.music.activitymanagement.'
b'TopLevelActivity: +1s7ms')
return f
def test_parse_displayed_time_timeout():
# Act
with patch('asyncio.create_subprocess_exec',
new_callable=AsyncMock) as asyncio_mock:
asyncio_mock.return_value.stdout.readline = _async_timeout_return
timestamp = datetime.datetime(datetime.datetime.now().year,
7, 1, 16, 13, 55, 221000)
timeout_dt = timestamp + datetime.timedelta(0, 10)
pattern = re.compile('.*ActivityTaskManager: Displayed '
'com.google.android.music/com.android.music.*')
result = logcat_utils.blocking_wait_for_logcat_pattern(timestamp,
pattern,
timeout_dt)
# Assert
assert result == None

View File

@ -1,67 +0,0 @@
#!/usr/bin/env python3
#
# Copyright 2019, The Android Open Source Project
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Helper util libraries for debug printing."""
import sys
DEBUG = False
def debug_print(*args, **kwargs):
"""Prints the args to sys.stderr if the DEBUG is set."""
if DEBUG:
print(*args, **kwargs, file=sys.stderr)
def error_print(*args, **kwargs):
print('[ERROR]:', *args, file=sys.stderr, **kwargs)
def _expand_gen_repr(args):
"""Like repr but any generator-like object has its iterator consumed
and then called repr on."""
new_args_list = []
for i in args:
# detect iterable objects that do not have their own override of __str__
if hasattr(i, '__iter__'):
to_str = getattr(i, '__str__')
if to_str.__objclass__ == object:
# the repr for a generator is just type+address, expand it out instead.
new_args_list.append([_expand_gen_repr([j])[0] for j in i])
continue
# normal case: uses the built-in to-string
new_args_list.append(i)
return new_args_list
def debug_print_gen(*args, **kwargs):
"""Like _debug_print but will turn any iterable args into a list."""
if not DEBUG:
return
new_args_list = _expand_gen_repr(args)
debug_print(*new_args_list, **kwargs)
def debug_print_nd(*args, **kwargs):
"""Like _debug_print but will turn any NamedTuple-type args into a string."""
if not DEBUG:
return
new_args_list = []
for i in args:
if hasattr(i, '_field_types'):
new_args_list.append("%s: %s" % (i.__name__, i._field_types))
else:
new_args_list.append(i)
debug_print(*new_args_list, **kwargs)

View File

@ -1,355 +0,0 @@
#!/usr/bin/python3
# Copyright (C) 2019 The Android Open Source Project
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import re
import sys
from sqlalchemy import create_engine
from sqlalchemy import Column, Date, Integer, Float, String, ForeignKey
from sqlalchemy.ext.declarative import declarative_base
from sqlalchemy.orm import relationship
from sqlalchemy.orm import sessionmaker
import sqlalchemy
from typing import Optional, Tuple
_DEBUG = False # print sql commands to console
_FLUSH_LIMIT = 10000 # how many entries are parsed before flushing to DB from memory
Base = declarative_base()
class RawFtraceEntry(Base):
__tablename__ = 'raw_ftrace_entries'
id = Column(Integer, primary_key=True)
task_name = Column(String, nullable=True) # <...> -> None.
task_pid = Column(String, nullable=False)
tgid = Column(Integer, nullable=True) # ----- -> None.
cpu = Column(Integer, nullable=False)
timestamp = Column(Float, nullable=False)
function = Column(String, nullable=False)
function_args = Column(String, nullable=False)
# 1:1 relation with MmFilemapAddToPageCache.
mm_filemap_add_to_page_cache = relationship("MmFilemapAddToPageCache",
back_populates="raw_ftrace_entry")
@staticmethod
def parse_dict(line):
# ' <...>-5521 (-----) [003] ...1 17148.446877: tracing_mark_write: trace_event_clock_sync: parent_ts=17148.447266'
m = re.match('\s*(.*)-(\d+)\s+\(([^\)]+)\)\s+\[(\d+)\]\s+([\w.]{4})\s+(\d+[.]\d+):\s+(\w+):\s+(.*)', line)
if not m:
return None
groups = m.groups()
# groups example:
# ('<...>',
# '5521',
# '-----',
# '003',
# '...1',
# '17148.446877',
# 'tracing_mark_write',
# 'trace_event_clock_sync: parent_ts=17148.447266')
task_name = groups[0]
if task_name == '<...>':
task_name = None
task_pid = int(groups[1])
tgid = groups[2]
if tgid == '-----':
tgid = None
cpu = int(groups[3])
# irq_flags = groups[4]
timestamp = float(groups[5])
function = groups[6]
function_args = groups[7]
return {'task_name': task_name, 'task_pid': task_pid, 'tgid': tgid, 'cpu': cpu, 'timestamp': timestamp, 'function': function, 'function_args': function_args}
class SchedSwitch(Base):
__tablename__ = 'sched_switches'
id = Column(Integer, ForeignKey('raw_ftrace_entries.id'), primary_key=True)
prev_comm = Column(String, nullable=False)
prev_pid = Column(Integer, nullable=False)
prev_prio = Column(Integer, nullable=False)
prev_state = Column(String, nullable=False)
next_comm = Column(String, nullable=False)
next_pid = Column(Integer, nullable=False)
next_prio = Column(Integer, nullable=False)
@staticmethod
def parse_dict(function_args, id = None):
# 'prev_comm=kworker/u16:5 prev_pid=13971 prev_prio=120 prev_state=S ==> next_comm=swapper/4 next_pid=0 next_prio=120'
m = re.match("prev_comm=(.*) prev_pid=(\d+) prev_prio=(\d+) prev_state=(.*) ==> next_comm=(.*) next_pid=(\d+) next_prio=(\d+) ?", function_args)
if not m:
return None
groups = m.groups()
# ('kworker/u16:5', '13971', '120', 'S', 'swapper/4', '0', '120')
d = {}
if id is not None:
d['id'] = id
d['prev_comm'] = groups[0]
d['prev_pid'] = int(groups[1])
d['prev_prio'] = int(groups[2])
d['prev_state'] = groups[3]
d['next_comm'] = groups[4]
d['next_pid'] = int(groups[5])
d['next_prio'] = int(groups[6])
return d
class SchedBlockedReason(Base):
__tablename__ = 'sched_blocked_reasons'
id = Column(Integer, ForeignKey('raw_ftrace_entries.id'), primary_key=True)
pid = Column(Integer, nullable=False)
iowait = Column(Integer, nullable=False)
caller = Column(String, nullable=False)
@staticmethod
def parse_dict(function_args, id = None):
# 'pid=2289 iowait=1 caller=wait_on_page_bit_common+0x2a8/0x5f'
m = re.match("pid=(\d+) iowait=(\d+) caller=(.*) ?", function_args)
if not m:
return None
groups = m.groups()
# ('2289', '1', 'wait_on_page_bit_common+0x2a8/0x5f8')
d = {}
if id is not None:
d['id'] = id
d['pid'] = int(groups[0])
d['iowait'] = int(groups[1])
d['caller'] = groups[2]
return d
class MmFilemapAddToPageCache(Base):
__tablename__ = 'mm_filemap_add_to_page_caches'
id = Column(Integer, ForeignKey('raw_ftrace_entries.id'), primary_key=True)
dev = Column(Integer, nullable=False) # decoded from ${major}:${minor} syntax.
dev_major = Column(Integer, nullable=False) # original ${major} value.
dev_minor = Column(Integer, nullable=False) # original ${minor} value.
ino = Column(Integer, nullable=False) # decoded from hex to base 10
page = Column(Integer, nullable=False) # decoded from hex to base 10
pfn = Column(Integer, nullable=False)
ofs = Column(Integer, nullable=False)
# 1:1 relation with RawFtraceEntry.
raw_ftrace_entry = relationship("RawFtraceEntry", uselist=False)
@staticmethod
def parse_dict(function_args, id = None):
# dev 253:6 ino b2c7 page=00000000ec787cd9 pfn=1478539 ofs=4096
m = re.match("dev (\d+):(\d+) ino ([0-9a-fA-F]+) page=([0-9a-fA-F]+) pfn=(\d+) ofs=(\d+)", function_args)
if not m:
return None
groups = m.groups()
# ('253', '6', 'b2c7', '00000000ec787cd9', '1478539', '4096')
d = {}
if id is not None:
d['id'] = id
device_major = d['dev_major'] = int(groups[0])
device_minor = d['dev_minor'] = int(groups[1])
d['dev'] = device_major << 8 | device_minor
d['ino'] = int(groups[2], 16)
d['page'] = int(groups[3], 16)
d['pfn'] = int(groups[4])
d['ofs'] = int(groups[5])
return d
class Trace2Db:
def __init__(self, db_filename: str):
(s, e) = self._init_sqlalchemy(db_filename)
self._session = s
self._engine = e
self._raw_ftrace_entry_filter = lambda x: True
def set_raw_ftrace_entry_filter(self, flt):
"""
Install a function dict(RawFtraceEntry) -> bool
If this returns 'false', then we skip adding the RawFtraceEntry to the database.
"""
self._raw_ftrace_entry_filter = flt
@staticmethod
def _init_sqlalchemy(db_filename: str) -> Tuple[object, object]:
global _DEBUG
engine = create_engine('sqlite:///' + db_filename, echo=_DEBUG)
# CREATE ... (tables)
Base.metadata.create_all(engine)
Session = sessionmaker(bind=engine)
session = Session()
return (session, engine)
def parse_file_into_db(self, filename: str, limit: Optional[int] = None):
"""
Parse the ftrace/systrace at 'filename',
inserting the values into the current sqlite database.
:return: number of RawFtraceEntry inserted.
"""
return parse_file(filename, self._session, self._engine, self._raw_ftrace_entry_filter, limit)
def parse_file_buf_into_db(self, file_buf, limit: Optional[int] = None):
"""
Parse the ftrace/systrace at 'filename',
inserting the values into the current sqlite database.
:return: number of RawFtraceEntry inserted.
"""
return parse_file_buf(file_buf, self._session, self._engine, self._raw_ftrace_entry_filter, limit)
@property
def session(self):
return self._session
def insert_pending_entries(engine, kls, lst):
if len(lst) > 0:
# for some reason, it tries to generate an empty INSERT statement with len=0,
# which of course violates the first non-null constraint.
try:
# Performance-sensitive parsing according to:
# https://docs.sqlalchemy.org/en/13/faq/performance.html#i-m-inserting-400-000-rows-with-the-orm-and-it-s-really-slow
engine.execute(kls.__table__.insert(), lst)
lst.clear()
except sqlalchemy.exc.IntegrityError as err:
# possibly violating some SQL constraint, print data here.
print(err)
print(lst)
raise
def parse_file(filename: str, *args, **kwargs) -> int:
# use explicit encoding to avoid UnicodeDecodeError.
with open(filename, encoding="ISO-8859-1") as f:
return parse_file_buf(f, *args, **kwargs)
def parse_file_buf(filebuf, session, engine, raw_ftrace_entry_filter, limit=None) -> int:
global _FLUSH_LIMIT
count = 0
# count and id are not equal, because count still increases for invalid lines.
id = 0
pending_entries = []
pending_sched_switch = []
pending_sched_blocked_reasons = []
pending_mm_filemap_add_to_pagecaches = []
def insert_all_pending_entries():
insert_pending_entries(engine, RawFtraceEntry, pending_entries)
insert_pending_entries(engine, SchedSwitch, pending_sched_switch)
insert_pending_entries(engine, SchedBlockedReason, pending_sched_blocked_reasons)
insert_pending_entries(engine, MmFilemapAddToPageCache, pending_mm_filemap_add_to_pagecaches)
# for trace.html files produced by systrace,
# the actual ftrace is in the 'second' trace-data script class.
parsing_trace_data = 0
parsing_systrace_file = False
f = filebuf
for l in f:
if parsing_trace_data == 0 and l == "<!DOCTYPE html>\n":
parsing_systrace_file = True
continue
if parsing_trace_data != 2 and parsing_systrace_file:
if l == ' <script class="trace-data" type="application/text">\n':
parsing_trace_data = parsing_trace_data + 1
continue
if parsing_systrace_file and parsing_trace_data != 2:
continue
elif parsing_systrace_file and parsing_trace_data == 2 and l == " </script>\n":
# the rest of this file is just random html
break
# now parsing the ftrace data.
if len(l) > 1 and l[0] == '#':
continue
count = count + 1
if limit and count >= limit:
break
raw_ftrace_entry = RawFtraceEntry.parse_dict(l)
if not raw_ftrace_entry:
print("WARNING: Failed to parse raw ftrace entry: " + l)
continue
if not raw_ftrace_entry_filter(raw_ftrace_entry):
# Skip processing raw ftrace entries that don't match a filter.
# This is an optimization for when Trace2Db is used programatically
# to avoid having an overly large database.
continue
pending_entries.append(raw_ftrace_entry)
id = id + 1
if raw_ftrace_entry['function'] == 'sched_switch':
sched_switch = SchedSwitch.parse_dict(raw_ftrace_entry['function_args'], id)
if not sched_switch:
print("WARNING: Failed to parse sched_switch: " + l)
else:
pending_sched_switch.append(sched_switch)
elif raw_ftrace_entry['function'] == 'sched_blocked_reason':
sbr = SchedBlockedReason.parse_dict(raw_ftrace_entry['function_args'], id)
if not sbr:
print("WARNING: Failed to parse sched_blocked_reason: " + l)
else:
pending_sched_blocked_reasons.append(sbr)
elif raw_ftrace_entry['function'] == 'mm_filemap_add_to_page_cache':
d = MmFilemapAddToPageCache.parse_dict(raw_ftrace_entry['function_args'],
id)
if not d:
print("WARNING: Failed to parse mm_filemap_add_to_page_cache: " + l)
else:
pending_mm_filemap_add_to_pagecaches.append(d)
# Objects are cached in python memory, not yet sent to SQL database.
# Send INSERT/UPDATE/etc statements to the underlying SQL database.
if count % _FLUSH_LIMIT == 0:
insert_all_pending_entries()
insert_all_pending_entries()
# Ensure underlying database commits changes from memory to disk.
session.commit()
return count

View File

@ -1,222 +0,0 @@
#!/usr/bin/env python3
#
# Copyright 2019, The Android Open Source Project
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
"""
Unit tests for inode2filename module.
Install:
$> sudo apt-get install python3-pytest ## OR
$> pip install -U pytest
See also https://docs.pytest.org/en/latest/getting-started.html
Usage:
$> ./inode2filename_test.py
$> pytest inode2filename_test.py
$> python -m pytest inode2filename_test.py
See also https://docs.pytest.org/en/latest/usage.html
"""
# global imports
import io
from copy import deepcopy
# pip imports
# local imports
from trace2db import *
# This pretty-prints the raw dictionary of the sqlalchemy object if it fails.
class EqualsSqlAlchemyObject:
# For convenience to write shorter tests, we also add 'ignore_fields' which allow us to specify
# which fields to ignore when doing the comparison.
def __init__(self_, self, ignore_fields=[]):
self_.self = self
self_.ignore_fields = ignore_fields
# Do field-by-field comparison.
# It seems that SQLAlchemy does not implement __eq__ itself so we have to do it ourselves.
def __eq__(self_, other):
if isinstance(other, EqualsSqlAlchemyObject):
other = other.self
self = self_.self
classes_match = isinstance(other, self.__class__)
a, b = deepcopy(self.__dict__), deepcopy(other.__dict__)
#compare based on equality our attributes, ignoring SQLAlchemy internal stuff
a.pop('_sa_instance_state', None)
b.pop('_sa_instance_state', None)
for f in self_.ignore_fields:
a.pop(f, None)
b.pop(f, None)
attrs_match = (a == b)
return classes_match and attrs_match
def __repr__(self):
return repr(self.self.__dict__)
def assert_eq_ignore_id(left, right):
# This pretty-prints the raw dictionary of the sqlalchemy object if it fails.
# It does field-by-field comparison, but ignores the 'id' field.
assert EqualsSqlAlchemyObject(left, ignore_fields=['id']) == EqualsSqlAlchemyObject(right)
def parse_trace_file_to_db(*contents):
"""
Make temporary in-memory sqlite3 database by parsing the string contents as a trace.
:return: Trace2Db instance
"""
buf = io.StringIO()
for c in contents:
buf.write(c)
buf.write("\n")
buf.seek(0)
t2d = Trace2Db(":memory:")
t2d.parse_file_buf_into_db(buf)
buf.close()
return t2d
def test_ftrace_mm_filemap_add_to_pagecache():
test_contents = """
MediaStoreImpor-27212 (27176) [000] .... 16136.595194: mm_filemap_add_to_page_cache: dev 253:6 ino 7580 page=0000000060e990c7 pfn=677646 ofs=159744
MediaStoreImpor-27212 (27176) [000] .... 16136.595920: mm_filemap_add_to_page_cache: dev 253:6 ino 7580 page=0000000048e2e156 pfn=677645 ofs=126976
MediaStoreImpor-27212 (27176) [000] .... 16136.597793: mm_filemap_add_to_page_cache: dev 253:6 ino 7580 page=0000000051eabfb2 pfn=677644 ofs=122880
MediaStoreImpor-27212 (27176) [000] .... 16136.597815: mm_filemap_add_to_page_cache: dev 253:6 ino 7580 page=00000000ce7cd606 pfn=677643 ofs=131072
MediaStoreImpor-27212 (27176) [000] .... 16136.603732: mm_filemap_add_to_page_cache: dev 253:6 ino 1 page=000000008ffd3030 pfn=730119 ofs=186482688
MediaStoreImpor-27212 (27176) [000] .... 16136.604126: mm_filemap_add_to_page_cache: dev 253:6 ino b1d8 page=0000000098d4d2e2 pfn=829676 ofs=0
<...>-27197 (-----) [002] .... 16136.613471: mm_filemap_add_to_page_cache: dev 253:6 ino 7580 page=00000000aca88a97 pfn=743346 ofs=241664
<...>-27197 (-----) [002] .... 16136.615979: mm_filemap_add_to_page_cache: dev 253:6 ino 7580 page=00000000351f2bc1 pfn=777799 ofs=106496
<...>-27224 (-----) [006] .... 16137.400090: mm_filemap_add_to_page_cache: dev 253:6 ino 712d page=000000006ff7ffdb pfn=754861 ofs=0
<...>-1396 (-----) [000] .... 16137.451660: mm_filemap_add_to_page_cache: dev 253:6 ino 1 page=00000000ba0cbb34 pfn=769173 ofs=187191296
<...>-1396 (-----) [000] .... 16137.453020: mm_filemap_add_to_page_cache: dev 253:6 ino b285 page=00000000f6ef038e pfn=820291 ofs=0
<...>-1396 (-----) [000] .... 16137.453067: mm_filemap_add_to_page_cache: dev 253:6 ino b285 page=0000000083ebc446 pfn=956463 ofs=4096
<...>-1396 (-----) [000] .... 16137.453101: mm_filemap_add_to_page_cache: dev 253:6 ino b285 page=000000009dc2cd25 pfn=822813 ofs=8192
<...>-1396 (-----) [000] .... 16137.453113: mm_filemap_add_to_page_cache: dev 253:6 ino b285 page=00000000a11167fb pfn=928650 ofs=12288
<...>-1396 (-----) [000] .... 16137.453126: mm_filemap_add_to_page_cache: dev 253:6 ino b285 page=00000000c1c3311b pfn=621110 ofs=16384
<...>-1396 (-----) [000] .... 16137.453139: mm_filemap_add_to_page_cache: dev 253:6 ino b285 page=000000009aa78342 pfn=689370 ofs=20480
<...>-1396 (-----) [000] .... 16137.453151: mm_filemap_add_to_page_cache: dev 253:6 ino b285 page=0000000082cddcd6 pfn=755584 ofs=24576
<...>-1396 (-----) [000] .... 16137.453162: mm_filemap_add_to_page_cache: dev 253:6 ino b285 page=00000000b0249bc7 pfn=691431 ofs=28672
<...>-1396 (-----) [000] .... 16137.453183: mm_filemap_add_to_page_cache: dev 253:6 ino b285 page=000000006a776ff0 pfn=795084 ofs=32768
<...>-1396 (-----) [000] .... 16137.453203: mm_filemap_add_to_page_cache: dev 253:6 ino b285 page=000000001a4918a7 pfn=806998 ofs=36864
<...>-2578 (-----) [002] .... 16137.561871: mm_filemap_add_to_page_cache: dev 253:6 ino 1 page=00000000d65af9d2 pfn=719246 ofs=187015168
<...>-2578 (-----) [002] .... 16137.562846: mm_filemap_add_to_page_cache: dev 253:6 ino b25a page=000000002f6ba74f pfn=864982 ofs=0
<...>-2578 (-----) [000] .... 16138.104500: mm_filemap_add_to_page_cache: dev 253:6 ino 1 page=00000000f888d0f6 pfn=805812 ofs=192794624
<...>-2578 (-----) [000] .... 16138.105836: mm_filemap_add_to_page_cache: dev 253:6 ino b7dd page=000000003749523b pfn=977196 ofs=0
<...>-27215 (-----) [001] .... 16138.256881: mm_filemap_add_to_page_cache: dev 253:6 ino 758f page=000000001b375de1 pfn=755928 ofs=0
<...>-27215 (-----) [001] .... 16138.257526: mm_filemap_add_to_page_cache: dev 253:6 ino 7591 page=000000004e039481 pfn=841534 ofs=0
NonUserFacing6-5246 ( 1322) [005] .... 16138.356491: mm_filemap_add_to_page_cache: dev 253:6 ino 1 page=00000000d65af9d2 pfn=719246 ofs=161890304
NonUserFacing6-5246 ( 1322) [005] .... 16138.357538: mm_filemap_add_to_page_cache: dev 253:6 ino 9a64 page=000000002f6ba74f pfn=864982 ofs=0
NonUserFacing6-5246 ( 1322) [005] .... 16138.357581: mm_filemap_add_to_page_cache: dev 253:6 ino 9a64 page=000000006e0f8322 pfn=797894 ofs=4096
<...>-27197 (-----) [005] .... 16140.143224: mm_filemap_add_to_page_cache: dev 253:6 ino 7580 page=00000000a42527c6 pfn=1076669 ofs=32768
"""
t2d = parse_trace_file_to_db(test_contents)
session = t2d.session
first_row = session.query(MmFilemapAddToPageCache).order_by(MmFilemapAddToPageCache.id).first()
#dev 253:6 ino 7580 page=0000000060e990c7 pfn=677646 ofs=159744
assert_eq_ignore_id(MmFilemapAddToPageCache(dev=64774, dev_major=253, dev_minor=6,
ino=0x7580, page=0x0000000060e990c7, pfn=677646, ofs=159744), first_row)
second_to_last_row = session.query(MmFilemapAddToPageCache).filter(MmFilemapAddToPageCache.page.in_([0x000000006e0f8322])).first()
# dev 253:6 ino 9a64 page=000000006e0f8322 pfn=797894 ofs=4096
assert_eq_ignore_id(MmFilemapAddToPageCache(dev=64774, dev_major=253, dev_minor=6,
ino=0x9a64, page=0x000000006e0f8322, pfn=797894, ofs=4096), second_to_last_row)
def test_systrace_mm_filemap_add_to_pagecache():
test_contents = """
<!DOCTYPE html>
<html>
<head i18n-values="dir:textdirection;">
<meta http-equiv="Content-Type" content="text/html; charset=utf-8">
<meta charset="utf-8"/>
<title>Android System Trace</title>
<script class="trace-data" type="application/text">
PROCESS DUMP
USER PID PPID VSZ RSS WCHAN PC S NAME COMM
root 1 0 62148 5976 0 0 S init [init]
root 2 0 0 0 0 0 S [kthreadd] [kthreadd]
</script>
<script class="trace-data" type="application/text">
MediaStoreImpor-27212 (27176) [000] .... 16136.595194: mm_filemap_add_to_page_cache: dev 253:6 ino 7580 page=0000000060e990c7 pfn=677646 ofs=159744
NonUserFacing6-5246 ( 1322) [005] .... 16138.357581: mm_filemap_add_to_page_cache: dev 253:6 ino 9a64 page=000000006e0f8322 pfn=797894 ofs=4096
</script>
<script class="trace-data" type="application/text">
{"traceEvents": [{"category": "process_argv", "name": "process_argv", "args": {"argv": ["/mnt/ssd3/workspace/master/external/chromium-trace/systrace.py", "-t", "5", "pagecache"]}, "pid": 160383, "ts": 1037300940509.7991, "tid": 139628672526080, "ph": "M"}, {"category": "python", "name": "clock_sync", "args": {"issue_ts": 1037307346185.212, "sync_id": "9a7e4fe3-89ad-441f-8226-8fe533fe973e"}, "pid": 160383, "ts": 1037307351643.906, "tid": 139628726089536, "ph": "c"}], "metadata": {"clock-domain": "SYSTRACE"}}
</script>
<!-- END TRACE -->
"""
t2d = parse_trace_file_to_db(test_contents)
session = t2d.session
first_row = session.query(MmFilemapAddToPageCache).order_by(MmFilemapAddToPageCache.id).first()
#dev 253:6 ino 7580 page=0000000060e990c7 pfn=677646 ofs=159744
assert_eq_ignore_id(MmFilemapAddToPageCache(dev=64774, dev_major=253, dev_minor=6,
ino=0x7580, page=0x0000000060e990c7, pfn=677646, ofs=159744), first_row)
second_to_last_row = session.query(MmFilemapAddToPageCache).filter(MmFilemapAddToPageCache.page.in_([0x000000006e0f8322])).first()
# dev 253:6 ino 9a64 page=000000006e0f8322 pfn=797894 ofs=4096
assert_eq_ignore_id(MmFilemapAddToPageCache(dev=64774, dev_major=253, dev_minor=6,
ino=0x9a64, page=0x000000006e0f8322, pfn=797894, ofs=4096), second_to_last_row)
def test_timestamp_filter():
test_contents = """
MediaStoreImpor-27212 (27176) [000] .... 16136.595194: mm_filemap_add_to_page_cache: dev 253:6 ino 7580 page=0000000060e990c7 pfn=677646 ofs=159744
NonUserFacing6-5246 ( 1322) [005] .... 16139.357581: mm_filemap_add_to_page_cache: dev 253:6 ino 9a64 page=000000006e0f8322 pfn=797894 ofs=4096
MediaStoreImpor-27212 (27176) [000] .... 16136.604126: mm_filemap_add_to_page_cache: dev 253:6 ino b1d8 page=0000000098d4d2e2 pfn=829676 ofs=0
"""
t2d = parse_trace_file_to_db(test_contents)
session = t2d.session
end_time = 16137.0
results = session.query(MmFilemapAddToPageCache).join(
MmFilemapAddToPageCache.raw_ftrace_entry).filter(
RawFtraceEntry.timestamp <= end_time).order_by(
MmFilemapAddToPageCache.id).all()
assert len(results) == 2
assert_eq_ignore_id(
MmFilemapAddToPageCache(dev=64774, dev_major=253, dev_minor=6,
ino=0x7580, page=0x0000000060e990c7, pfn=677646,
ofs=159744), results[0])
assert_eq_ignore_id(
MmFilemapAddToPageCache(dev=64774, dev_major=253, dev_minor=6,
ino=0xb1d8, page=0x0000000098d4d2e2, pfn=829676,
ofs=0), results[1])
if __name__ == '__main__':
pytest.main()

View File

@ -1,57 +0,0 @@
/*
* Copyright (C) 2019 The Android Open Source Project
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
-- filter for atrace writes
CREATE VIEW IF NOT EXISTS tracing_mark_writes AS
SELECT *
FROM raw_ftrace_entries
WHERE function = 'tracing_mark_write';
-- split the tracing_mark_write function args by ||s
DROP TABLE IF exists tracing_mark_write_split_array;
CREATE TABLE tracing_mark_write_split_array (
predictorset_id INT REFERENCES raw_ftrace_entries (id),
predictor_name,
rest,
gen,
UNIQUE(predictorset_id, gen) -- drops redundant inserts into table
);
CREATE INDEX "tracing_mark_write_split_array_id" ON tracing_mark_write_split_array (
predictorset_id COLLATE BINARY COLLATE BINARY
);
INSERT INTO tracing_mark_write_split_array
WITH
split(predictorset_id, predictor_name, rest, gen) AS (
-- split by |
SELECT id, '', function_args || '|', 0 FROM tracing_mark_writes WHERE id
UNION ALL
SELECT predictorset_id,
substr(rest, 0, instr(rest, '|')),
substr(rest, instr(rest, '|')+1),
gen + 1
FROM split
WHERE rest <> ''),
split_results AS (
SELECT * FROM split WHERE predictor_name <> ''
)
SELECT * from split_results
;

View File

@ -1,44 +0,0 @@
/*
* Copyright (C) 2019 The Android Open Source Project
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
-- use the 'launching: $process_name' async slice to figure out launch duration.
DROP VIEW IF EXISTS launch_durations_named;
CREATE VIEW launch_durations_named AS
WITH
launch_traces_raw AS (
SELECT *
FROM tracing_mark_write_split AS tmw,
raw_ftrace_entries AS rfe
WHERE atrace_message LIKE 'launching: %' AND rfe.id = tmw.raw_ftrace_entry_id
),
launch_traces_joined AS (
SELECT started.timestamp AS started_timestamp,
finished.timestamp AS finished_timestamp,
started.id AS started_id,
finished.id AS finished_id,
SUBSTR(started.atrace_message, 12) AS proc_name -- crop out "launching: " from the string.
FROM launch_traces_raw AS started,
launch_traces_raw AS finished
-- async slices ('S' -> 'F') have matching counters given the same PID.
WHERE started.atrace_type == 'S'
AND finished.atrace_type == 'F'
AND started.atrace_count == finished.atrace_count
AND started.atrace_pid == finished.atrace_pid
)
SELECT * from launch_traces_joined;
SELECT * FROM launch_durations_named;

View File

@ -1,52 +0,0 @@
/*
* Copyright (C) 2019 The Android Open Source Project
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
DROP VIEW IF EXISTS blocked_iowait_for_app_launches;
CREATE VIEW blocked_iowait_for_app_launches AS
WITH
block_launch_join AS (
SELECT *
FROM blocking_durations AS bd,
launch_durations_named AS ld
WHERE bd.block_timestamp >= ld.started_timestamp
AND bd.unblock_timestamp <= ld.finished_timestamp
),
blocked_ui_threads AS (
SELECT *
FROM start_process_ui_threads AS sp,
block_launch_join AS blj
WHERE sp.atm_ui_thread_tid == unblock_pid
AND sp.process_name = blj.proc_name
),
summed_raw AS (
SELECT SUM(unblock_timestamp-block_timestamp)*1000 AS sum_block_duration_ms,
*
FROM blocked_ui_threads
GROUP BY unblock_pid
),
summed_neat AS (
SELECT sum_block_duration_ms AS blocked_iowait_duration_ms,
process_name,
(finished_timestamp - started_timestamp) * 1000 AS launching_duration_ms,
started_timestamp * 1000 AS launching_started_timestamp_ms,
finished_timestamp * 1000 AS launching_finished_timestamp_ms
-- filter out the rest because its just selecting 1 arbitrary row (due to the SUM aggregate).,
FROM summed_raw
)
SELECT * FROM summed_neat;
SELECT * FROM blocked_iowait_for_app_launches;

View File

@ -1,101 +0,0 @@
/*
* Copyright (C) 2019 The Android Open Source Project
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
DROP VIEW IF EXISTS sched_switch_iowaits_pre;
-- scan for the closest pair such that:
-- sched_block_reason pid=$PID iowait=1 ...
-- ...
-- sched_switch next_pid=$PID
CREATE VIEW sched_switch_iowaits_pre AS
SELECT MAX(sbr.id) AS blocked_id,
ss.id AS sched_switch_id,
pid, -- iow.pid
iowait, -- iowait=0 or iowait=1
caller,
sbr_f.timestamp AS blocked_timestamp,
ss_f.timestamp AS sched_switch_timestamp,
next_comm, -- name of next_pid
next_pid -- same as iow.pid
FROM sched_blocked_reasons AS sbr,
raw_ftrace_entries AS sbr_f,
sched_switches AS ss,
raw_ftrace_entries AS ss_f
WHERE sbr_f.id == sbr.id
AND ss_f.id == ss.id
AND sbr.pid == ss.next_pid
AND sbr.iowait = 1
AND sbr_f.timestamp < ss_f.timestamp -- ensures the 'closest' sched_blocked_reason is selected.
GROUP BY ss.id
;
DROP VIEW IF EXISTS sched_switch_iowaits;
CREATE VIEW sched_switch_iowaits AS
SELECT *, MIN(sched_switch_timestamp) AS ss_timestamp -- drop all of the 'too large' sched_switch entries except the closest one.
FROM sched_switch_iowaits_pre
GROUP BY blocked_id;
SELECT * FROM sched_switch_iowaits;
-- use a real table here instead of a view, otherwise SQLiteStudio segfaults for some reason.
DROP TABLE IF EXISTS blocking_durations;
CREATE TABLE blocking_durations AS
WITH
blocking_durations_raw AS (
SELECT MAX(ss.id) AS block_id,
ssf.timestamp AS block_timestamp,
iow.sched_switch_timestamp AS unblock_timestamp,
ss.prev_comm as block_prev_comm,
iow.next_comm AS unblock_next_comm,
ss.prev_state AS block_prev_state,
iow.sched_switch_id AS unblock_id,
iow.pid AS unblock_pid,
iow.caller AS unblock_caller
FROM sched_switches AS ss, -- this is the sched_switch that caused a block (in the future when it unblocks, the reason is iowait=1).
sched_switch_iowaits AS iow, -- this is the sched_switch that removes the block (it is now running again).
raw_ftrace_entries AS ssf
WHERE ssf.id = ss.id AND ss.prev_pid == iow.next_pid AND ssf.timestamp < iow.sched_switch_timestamp
GROUP BY unblock_timestamp
),
blocking_durations_tmp AS (
SELECT block_id,
unblock_timestamp,
block_timestamp,
block_prev_comm as comm,
block_prev_state as block_state,
unblock_id,
unblock_pid,
unblock_caller
FROM blocking_durations_raw
)
SELECT * FROM blocking_durations_tmp;-- ORDER BY block_id ASC;
--SELECT SUM(block_duration_ms) AS sum, * FROM blocking_durations GROUP BY unblock_pid ORDER BY sum DESC;
DROP INDEX IF EXISTS "blocking_durations_block_timestamp";
CREATE INDEX "blocking_durations_block_timestamp" ON blocking_durations (
block_timestamp COLLATE BINARY COLLATE BINARY
);
DROP INDEX IF EXISTS "blocking_durations_unblock_timestamp";
CREATE INDEX "blocking_durations_unblock_timestamp" ON blocking_durations (
unblock_timestamp COLLATE BINARY COLLATE BINARY
);
SELECT * FROM blocking_durations;

View File

@ -1,42 +0,0 @@
/*
* Copyright (C) 2019 The Android Open Source Project
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
DROP VIEW IF EXISTS sched_switch_next_comm_pids;
CREATE VIEW IF NOT EXISTS sched_switch_next_comm_pids AS
-- TODO: switch to using sched_switches table.
WITH
sched_switchs AS (
SELECT * FROM raw_ftrace_entries WHERE function = 'sched_switch' AND function_args LIKE '% next_pid=%' AND function_args NOT LIKE '% next_comm=main %'
),
comm_and_pids_raws AS (
SELECT id,
SUBSTR(function_args, instr(function_args, "next_comm="), instr(function_args, "next_pid=") - instr(function_args, "next_comm=")) AS next_comm_raw,
SUBSTR(function_args, instr(function_args, "next_pid="), instr(function_args, "next_prio=") - instr(function_args, "next_pid=")) AS next_pid_raw
FROM sched_switchs
),
comm_and_pids AS (
SELECT id,
id AS raw_ftrace_entry_id,
TRIM(SUBSTR(next_pid_raw, 10)) AS next_pid, -- len("next_pid=") is 10
TRIM(SUBSTR(next_comm_raw, 11)) AS next_comm -- len("next_comm=") is 11
FROM comm_and_pids_raws
)
SELECT * from comm_and_pids;
SELECT * from sched_switch_next_comm_pids;

View File

@ -1,30 +0,0 @@
/*
* Copyright (C) 2019 The Android Open Source Project
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
DROP VIEW IF EXISTS start_procs;
CREATE VIEW IF NOT EXISTS start_procs AS
WITH
start_procs_raw AS (
SELECT * from tracing_mark_write_split WHERE atrace_message LIKE 'Start proc: %'
),
start_procs_substr AS (
-- note: "12" is len("Start proc: ")+1. sqlite indices start at 1.
SELECT raw_ftrace_entry_id, atrace_pid, SUBSTR(atrace_message, 13) AS process_name FROM start_procs_raw
)
SELECT * from start_procs_substr;
SELECT * from start_procs;

View File

@ -1,88 +0,0 @@
/*
* Copyright (C) 2019 The Android Open Source Project
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
-- note: These queries do comparisons based on raw_ftrace_entries.id by treating it as if it was equivalent to the temporal timestamp.
-- in practice, the ID of raw_ftrace_entries is based on its order in the ftrace buffer [and on the same cpu its equivalent].
-- we can always resort raw_ftrace_entries to ensure id order matches timestamp order. We should rarely need to compare by timestamp directly.
-- accessing 'floats' is inferior as they are harder to index, and will result in slower queries.
--
-- Naming convention note: '_fid' corresponds to 'raw_ftrace_entry.id'.
DROP VIEW IF EXISTS start_process_ui_threads;
-- Map of started process names to their UI thread's TID (as returned by gettid).
CREATE VIEW IF NOT EXISTS start_process_ui_threads AS
WITH
start_proc_tids AS (
SELECT sp.raw_ftrace_entry_id AS start_proc_fid,
sp.atrace_pid AS atrace_pid,
sp.process_name AS process_name,
--MIN(nc.raw_ftrace_entry_id) as next_comm_fid,
nc.raw_ftrace_entry_id AS next_comm_fid,
nc.next_pid as next_pid,
nc.next_comm as next_comm,
SUBSTR(sp.process_name, -15) AS cut -- why -15? See TASK_MAX in kernel, the sched_switch name is truncated to 16 bytes.
FROM start_procs AS sp,
sched_switch_next_comm_pids AS nc
WHERE sp.process_name LIKE '%' || nc.next_comm -- kernel truncates the sched_switch::next_comm event, so we must match the prefix of the full name.
--WHERE SUBSTR(sp.process_name, -16) == nc.next_comm
--WHERE cut == nc.next_comm
),
start_proc_tids_filtered AS (
SELECT *
FROM start_proc_tids
WHERE next_comm_fid > start_proc_fid -- safeguard that avoids choosing "earlier" sched_switch before process was even started.
--ORDER BY start_proc_fid, next_comm_fid
),
start_proc_all_threads AS (
SELECT DISTINCT
start_proc_fid, -- this is the ftrace entry of the system server 'Start proc: $process_name'. only need this to join for timestamp.
process_name, -- this is the '$process_name' from the system server entry.
-- next up we have all the possible thread IDs as parsed from sched_switch that corresponds most closest to the start proc.
next_pid AS ui_thread_tpid, -- sched_switch.next_pid. This can be any of the threads in that process, it's not necessarily the main UI thread yet.
next_comm,
MIN(next_comm_fid) AS next_comm_fid -- don't pick the 'later' next_comm_fid because it could correspond to another app start.
FROM start_proc_tids_filtered
GROUP BY start_proc_fid, ui_thread_tpid
),
activity_thread_mains AS (
SELECT * FROM tracing_mark_write_split WHERE atrace_message = 'ActivityThreadMain'
),
start_proc_ui_threads AS (
SELECT start_proc_fid,
process_name,
ui_thread_tpid,
next_comm,
next_comm_fid,
atm.raw_ftrace_entry_id as atm_fid,
atm.atrace_pid as atm_ui_thread_tid
FROM start_proc_all_threads AS spt,
activity_thread_mains AS atm
WHERE atm.atrace_pid == spt.ui_thread_tpid AND atm.raw_ftrace_entry_id > spt.start_proc_fid -- Ensure we ignore earlier ActivityThreadMains prior to their Start proc.
),
start_proc_ui_threads_filtered AS (
SELECT start_proc_fid,
process_name, -- e.g. 'com.android.settings'
--ui_thread_tpid,
--next_comm,
--next_comm_fid,
MIN(atm_fid) AS atm_fid,
atm_ui_thread_tid -- equivalent to gettid() for the process's UI thread.
FROM start_proc_ui_threads
GROUP BY start_proc_fid, atm_ui_thread_tid -- find the temporally closest ActivityTaskMain to a "Start proc: $process_name"
)
SELECT * FROM start_proc_ui_threads_filtered;
SELECT * FROM start_process_ui_threads;

View File

@ -1,53 +0,0 @@
/*
* Copyright (C) 2019 The Android Open Source Project
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
DROP TABLE IF EXISTS tracing_mark_write_split;
CREATE TABLE tracing_mark_write_split (
raw_ftrace_entry_id INT REFERENCES raw_ftrace_entries (id),
atrace_type CHAR(1), -- only null for the first 2 sync timers. usually 'B', 'C', E', ...
atrace_pid INT, -- only null for first 2 sync timers
atrace_message, -- usually null for type='E' etc.
atrace_count, -- usually non-null only for 'C'
UNIQUE(raw_ftrace_entry_id) -- drops redundant inserts into table
);
INSERT INTO tracing_mark_write_split
WITH
pivoted AS (
SELECT tx.predictorset_id,
--ty.predictorset_id,
--tz.predictorset_id,
--tzz.predictorset_id,
tx.predictor_name AS atrace_type,
CAST(ty.predictor_name AS integer) AS atrace_pid,
tz.predictor_name AS atrace_message,
CAST(tzz.predictor_name AS integer) AS atrace_count
FROM (SELECT * from tracing_mark_write_split_array WHERE gen = 1) AS tx
LEFT JOIN
(SELECT * FROM tracing_mark_write_split_array WHERE gen = 2) AS ty
ON tx.predictorset_id = ty.predictorset_id
LEFT JOIN
(SELECT * FROM tracing_mark_write_split_array WHERE gen = 3) AS tz
ON tx.predictorset_id = tz.predictorset_id
LEFT JOIN
(SELECT * FROM tracing_mark_write_split_array WHERE gen = 4) AS tzz
ON tx.predictorset_id = tzz.predictorset_id
)
SELECT * from pivoted ORDER BY predictorset_id;-- LIMIT 100;
SELECT * FROM tracing_mark_write_split;

View File

@ -1,28 +0,0 @@
/*
* Copyright (C) 2019 The Android Open Source Project
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
.headers on
.mode quote
SELECT * FROM blocked_iowait_for_app_launches;
/*
Output as CSV example:
'blocked_iowait_duration_ms','process_name','launching_duration_ms','launching_started_timestamp_ms','launching_finished_timestamp_ms'
125.33199995596078224,'com.android.settings',1022.4840000009862706,17149896.822000000626,17150919.305999998003
*/

View File

@ -1,79 +0,0 @@
#!/bin/bash
# Copyright (C) 2019 The Android Open Source Project
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
DIR="$( cd "$(dirname "$0")" ; pwd -P )"
if [[ $# -lt 1 ]]; then
echo "Usage: $0 <db-file>"
fi
DB_TARGET=$1
if ! [[ -f $DB_TARGET ]]; then
echo "ERROR: File '$DB_TARGET' does not exist." >&2
exit 1
fi
exec_sql_file() {
local filename="$1"
if ! [[ -f $filename ]]; then
echo "ERROR: Can't exec SQL file, '$filename' does not exist." >&2
return 1
fi
sqlite3 "$DB_TARGET" < "$DIR"/"$filename"
}
exec_sql_file_quiet() {
exec_sql_file "$@" > /dev/null
}
# Some views/tables need other views already created, so order does matter.
# x -> y , means x depends on y.
# View: tracing_mark_writes
# Table: tracing_mark_write_split_array -> tracing_mark_writes
exec_sql_file_quiet "queries_all.sql"
# Table: tracing_mark_write_split -> tracing_mark_write_split_array
exec_sql_file_quiet "queries_mark_write_join.sql"
# View: start_procs -> tracing_mark_write_split
exec_sql_file_quiet "queries_get_procs.sql"
# View: sched_switch_next_comm_pids
exec_sql_file_quiet "queries_get_comm_and_pids.sql"
# View: start_process_ui_threads -> start_procs, sched_switch_next_comm_pids
exec_sql_file_quiet "queries_get_ui_threads.sql"
# View: launch_durations_named -> tracing_mark_write_split
exec_sql_file_quiet "queries_app_launch_spans_with_name.sql"
# View: sched_switch_iowaits_pre
# View: sched_switch_iowaits -> sched_switch_iowaits_pre
# Table: blocking_durations -> sched_switch_iowaits
exec_sql_file_quiet "queries_find_sched_switch_unblocked.sql"
# View: blocked_iowait_for_app_launches -> launch_durations_named, blocking_durations
exec_sql_file_quiet "queries_block_launch.sql"
#####
#####
#####
# Final queries
exec_sql_file "queries_pretty_print_block_launch.sql"

View File

@ -1,518 +0,0 @@
# tracer: nop
#
# entries-in-buffer/entries-written: 411983/411983 #P:8
#
# _-----=> irqs-off
# / _----=> need-resched
# | / _---=> hardirq/softirq
# || / _--=> preempt-depth
# ||| / delay
# TASK-PID TGID CPU# |||| TIMESTAMP FUNCTION
# | | | | |||| | |
<...>-14603 (-----) [000] ...1 14592.893157: tracing_mark_write: trace_event_clock_sync: parent_ts=14592.892578
<...>-14603 (-----) [000] ...1 14592.893172: tracing_mark_write: trace_event_clock_sync: realtime_ts=1557129597951
<...>-18150 (-----) [004] d..2 14594.182110: sched_switch: prev_comm=kworker/u16:18 prev_pid=18150 prev_prio=120 prev_state=D ==> next_comm=kworker/u16:16 next_pid=23269 next_prio=120
kworker/u16:16-23269 (23269) [004] d.h3 14594.182228: sched_blocked_reason: pid=18150 iowait=0 caller=a6xx_oob_set+0x194/0x3dc
kworker/u16:16-23269 (23269) [004] d..2 14594.182248: sched_switch: prev_comm=kworker/u16:16 prev_pid=23269 prev_prio=120 prev_state=D ==> next_comm=kworker/u16:18 next_pid=18150 next_prio=120
<...>-18150 (-----) [004] d..2 14594.182312: sched_switch: prev_comm=kworker/u16:18 prev_pid=18150 prev_prio=120 prev_state=D ==> next_comm=swapper/4 next_pid=0 next_prio=120
<...>-18150 (-----) [004] d..2 14594.182488: sched_switch: prev_comm=kworker/u16:18 prev_pid=18150 prev_prio=120 prev_state=D ==> next_comm=swapper/4 next_pid=0 next_prio=120
kworker/u16:16-23269 (23269) [005] d..2 14594.182610: sched_switch: prev_comm=kworker/u16:16 prev_pid=23269 prev_prio=120 prev_state=S ==> next_comm=swapper/5 next_pid=0 next_prio=120
<...>-18150 (-----) [004] d..2 14594.182626: sched_switch: prev_comm=kworker/u16:18 prev_pid=18150 prev_prio=120 prev_state=D ==> next_comm=swapper/4 next_pid=0 next_prio=120
<...>-18150 (-----) [004] d..2 14594.182755: sched_switch: prev_comm=kworker/u16:18 prev_pid=18150 prev_prio=120 prev_state=D ==> next_comm=swapper/4 next_pid=0 next_prio=120
<...>-18150 (-----) [004] d..2 14594.182975: sched_switch: prev_comm=kworker/u16:18 prev_pid=18150 prev_prio=120 prev_state=D ==> next_comm=swapper/4 next_pid=0 next_prio=120
<...>-18150 (-----) [004] d..2 14594.183209: sched_switch: prev_comm=kworker/u16:18 prev_pid=18150 prev_prio=120 prev_state=D ==> next_comm=swapper/4 next_pid=0 next_prio=120
<...>-18150 (-----) [004] d..2 14594.183371: sched_switch: prev_comm=kworker/u16:18 prev_pid=18150 prev_prio=120 prev_state=D ==> next_comm=swapper/4 next_pid=0 next_prio=120
<...>-18150 (-----) [004] d..2 14594.184286: sched_switch: prev_comm=kworker/u16:18 prev_pid=18150 prev_prio=120 prev_state=S ==> next_comm=swapper/4 next_pid=0 next_prio=120
kworker/u16:16-23269 (23269) [005] d..2 14594.184495: sched_switch: prev_comm=kworker/u16:16 prev_pid=23269 prev_prio=120 prev_state=S ==> next_comm=swapper/5 next_pid=0 next_prio=120
<...>-18150 (-----) [004] d..2 14594.184498: sched_switch: prev_comm=kworker/u16:18 prev_pid=18150 prev_prio=120 prev_state=S ==> next_comm=swapper/4 next_pid=0 next_prio=120
ksoftirqd/4-47 ( 47) [004] d..2 14594.185678: sched_switch: prev_comm=ksoftirqd/4 prev_pid=47 prev_prio=120 prev_state=S ==> next_comm=swapper/4 next_pid=0 next_prio=120
kworker/6:2-10610 (10610) [006] d..2 14594.186012: sched_switch: prev_comm=kworker/6:2 prev_pid=10610 prev_prio=120 prev_state=S ==> next_comm=swapper/6 next_pid=0 next_prio=120
<...>-656 (-----) [001] .... 14594.219464: binder_set_priority: proc=625 thread=656 old=110 => new=120 desired=120
<...>-1803 (-----) [000] d..2 14594.219595: sched_switch: prev_comm=ndroid.systemui prev_pid=1803 prev_prio=120 prev_state=S ==> next_comm=swapper/0 next_pid=0 next_prio=120
<...>-3359 (-----) [001] ...1 14594.219856: tracing_mark_write: S|1368|launching: com.google.android.dialer|0
<...>-3359 (-----) [001] ...1 14594.219863: tracing_mark_write: B|1368|MetricsLogger:launchObserverNotifyActivityLaunched
<...>-3359 (-----) [001] ...1 14594.219869: tracing_mark_write: B|1368|MetricsLogger:convertActivityRecordToProto
<...>-1398 (-----) [006] ...1 14594.220160: tracing_mark_write: B|1368|updateInputWindows
<...>-3359 (-----) [001] .... 14594.220230: binder_set_priority: proc=1368 thread=3359 old=110 => new=120 desired=120
<...>-1398 (-----) [006] ...1 14594.220588: tracing_mark_write: B|1368|android.os.Handler: com.android.server.wm.AppWindowToken$1
<...>-1398 (-----) [006] ...1 14594.220722: tracing_mark_write: B|1368|ResourcesManager#getResources
<...>-1052 (-----) [002] d..2 14594.220884: sched_switch: prev_comm=statsd.writer prev_pid=1052 prev_prio=120 prev_state=S ==> next_comm=UiThreadHelper next_pid=2045 next_prio=118
<...>-1398 (-----) [006] ...1 14594.220926: tracing_mark_write: B|1368|Theme::ApplyStyle
<...>-1398 (-----) [006] ...1 14594.220929: tracing_mark_write: B|1368|AssetManager::GetBag
<...>-2007 (-----) [007] ...1 14594.220996: tracing_mark_write: B|2007|Choreographer#doFrame
<...>-2007 (-----) [007] ...1 14594.221005: tracing_mark_write: B|2007|animation
<...>-1398 (-----) [006] ...1 14594.221015: tracing_mark_write: B|1368|ResourcesManager#getResources
<...>-2045 (-----) [002] ...2 14594.221035: binder_set_priority: proc=1368 thread=1903 old=120 => new=118 desired=118
<...>-2045 (-----) [002] d..2 14594.221065: sched_switch: prev_comm=UiThreadHelper prev_pid=2045 prev_prio=118 prev_state=S ==> next_comm=Binder:1368_4 next_pid=1903 next_prio=118
<...>-1398 (-----) [006] ...1 14594.221080: tracing_mark_write: B|1368|AssetManager::SetApkAssets
<...>-2007 (-----) [007] ...1 14594.221110: tracing_mark_write: B|2007|traversal
<...>-656 (-----) [000] ...1 14594.221137: tracing_mark_write: B|625|requestNextVsync
<...>-656 (-----) [000] ...1 14594.221141: tracing_mark_write: B|625|resetIdleTimer
<...>-2007 (-----) [007] ...1 14594.221146: tracing_mark_write: B|2007|draw
<...>-2007 (-----) [007] ...1 14594.221160: tracing_mark_write: B|2007|Record View#draw()
<...>-660 (-----) [005] d..2 14594.221285: sched_switch: prev_comm=app prev_pid=660 prev_prio=97 prev_state=S ==> next_comm=RenderThread next_pid=2738 next_prio=110
<...>-658 (-----) [004] d..2 14594.221327: sched_switch: prev_comm=DispSync prev_pid=658 prev_prio=97 prev_state=S ==> next_comm=android.display next_pid=1397 next_prio=117
<...>-2738 (-----) [005] ...1 14594.221342: tracing_mark_write: B|2007|notifyFramePending
<...>-2738 (-----) [005] ...1 14594.221362: tracing_mark_write: B|2007|DrawFrame
<...>-2738 (-----) [005] ...1 14594.221369: tracing_mark_write: B|2007|query
<...>-2007 (-----) [007] d..2 14594.221369: sched_switch: prev_comm=s.nexuslauncher prev_pid=2007 prev_prio=110 prev_state=S ==> next_comm=swapper/7 next_pid=0 next_prio=120
<...>-1903 (-----) [002] .... 14594.221397: binder_set_priority: proc=1368 thread=1903 old=118 => new=120 desired=120
<...>-2738 (-----) [005] ...2 14594.221400: binder_set_priority: proc=625 thread=656 old=120 => new=110 desired=110
<...>-2738 (-----) [005] d..2 14594.221430: sched_switch: prev_comm=RenderThread prev_pid=2738 prev_prio=110 prev_state=S ==> next_comm=Binder:625_1 next_pid=656 next_prio=110
<...>-1368 (-----) [003] ...1 14594.221431: tracing_mark_write: B|1368|Lock contention on GC thread flip lock (owner tid: 0)
<...>-656 (-----) [005] ...1 14594.221460: tracing_mark_write: B|625|query
<...>-656 (-----) [005] .... 14594.221528: binder_set_priority: proc=625 thread=656 old=110 => new=120 desired=120
<...>-2738 (-----) [007] ...1 14594.221552: tracing_mark_write: B|2007|query
<...>-2738 (-----) [007] ...2 14594.221563: binder_set_priority: proc=625 thread=656 old=120 => new=110 desired=110
<...>-2738 (-----) [007] d..2 14594.221600: sched_switch: prev_comm=RenderThread prev_pid=2738 prev_prio=110 prev_state=S ==> next_comm=Binder:625_1 next_pid=656 next_prio=110
<...>-1368 (-----) [003] d..2 14594.221623: sched_switch: prev_comm=system_server prev_pid=1368 prev_prio=118 prev_state=S ==> next_comm=swapper/3 next_pid=0 next_prio=120
<...>-656 (-----) [007] ...1 14594.221628: tracing_mark_write: B|625|query
<...>-23031 (-----) [001] d..2 14594.221643: sched_switch: prev_comm=UiAutomation prev_pid=23031 prev_prio=120 prev_state=S ==> next_comm=swapper/1 next_pid=0 next_prio=120
<...>-2738 (-----) [007] ...1 14594.221664: tracing_mark_write: B|2007|syncFrameState
<...>-2738 (-----) [007] ...1 14594.221697: tracing_mark_write: B|2007|prepareTree
<...>-23008 (-----) [005] d..2 14594.221706: sched_switch: prev_comm=hub.uiautomator prev_pid=23008 prev_prio=120 prev_state=S ==> next_comm=swapper/5 next_pid=0 next_prio=120
<...>-656 (-----) [000] .... 14594.221737: binder_set_priority: proc=625 thread=656 old=110 => new=120 desired=120
<...>-1803 (-----) [003] d..2 14594.221747: sched_switch: prev_comm=ndroid.systemui prev_pid=1803 prev_prio=120 prev_state=S ==> next_comm=swapper/3 next_pid=0 next_prio=120
<...>-1397 (-----) [004] d..2 14594.221806: sched_switch: prev_comm=android.display prev_pid=1397 prev_prio=117 prev_state=S ==> next_comm=Binder:2007_A next_pid=4180 next_prio=120
<...>-1398 (-----) [006] d..2 14594.221816: sched_switch: prev_comm=android.anim prev_pid=1398 prev_prio=110 prev_state=R ==> next_comm=s.nexuslauncher next_pid=2007 next_prio=110
<...>-2738 (-----) [007] ...1 14594.221824: tracing_mark_write: B|2007|query
<...>-2738 (-----) [007] ...1 14594.221830: tracing_mark_write: B|2007|query
<...>-2738 (-----) [007] ...1 14594.221834: tracing_mark_write: B|2007|query
<...>-2738 (-----) [007] ...1 14594.221841: tracing_mark_write: B|2007|query
<...>-2738 (-----) [007] ...1 14594.221843: tracing_mark_write: B|2007|query
<...>-2738 (-----) [007] ...1 14594.221846: tracing_mark_write: B|2007|query
<...>-2738 (-----) [007] ...1 14594.221850: tracing_mark_write: B|2007|dequeueBuffer
<...>-2738 (-----) [007] ...2 14594.221864: binder_set_priority: proc=625 thread=656 old=120 => new=110 desired=110
<...>-2738 (-----) [007] d..2 14594.221985: sched_switch: prev_comm=RenderThread prev_pid=2738 prev_prio=110 prev_state=R+ ==> next_comm=crtc_event:97 next_pid=303 next_prio=83
<...>-2007 (-----) [006] ...1 14594.221989: tracing_mark_write: B|2007|topResumedActivityChangeItem
<...>-303 (-----) [007] d..2 14594.222016: sched_switch: prev_comm=crtc_event:97 prev_pid=303 prev_prio=83 prev_state=S ==> next_comm=rcu_preempt next_pid=7 next_prio=120
rcu_preempt-7 ( 7) [007] d..2 14594.222035: sched_switch: prev_comm=rcu_preempt prev_pid=7 prev_prio=120 prev_state=S ==> next_comm=RenderThread next_pid=2738 next_prio=110
migration/4-46 ( 46) [004] d..2 14594.222037: sched_switch: prev_comm=migration/4 prev_pid=46 prev_prio=0 prev_state=S ==> next_comm=Binder:625_1 next_pid=656 next_prio=110
<...>-2738 (-----) [007] d..2 14594.222039: sched_switch: prev_comm=RenderThread prev_pid=2738 prev_prio=110 prev_state=S ==> next_comm=kworker/u16:18 next_pid=18150 next_prio=120
<...>-656 (-----) [004] ...1 14594.222100: tracing_mark_write: B|625|dequeueBuffer
<...>-656 (-----) [004] ...1 14594.222114: tracing_mark_write: B|625|com.google.android.apps.nexuslauncher/com.google.android.apps.nexuslauncher.NexusLauncherActivity#1: 2
<...>-2007 (-----) [006] ...2 14594.222131: binder_set_priority: proc=1368 thread=1903 old=120 => new=110 desired=110
<...>-2007 (-----) [006] d..2 14594.222143: sched_switch: prev_comm=s.nexuslauncher prev_pid=2007 prev_prio=110 prev_state=S ==> next_comm=UiThreadHelper next_pid=2045 next_prio=118
<...>-2613 (-----) [001] d..2 14594.222158: sched_switch: prev_comm=ogle.android.as prev_pid=2613 prev_prio=120 prev_state=S ==> next_comm=swapper/1 next_pid=0 next_prio=120
<...>-18150 (-----) [007] d..2 14594.222193: sched_switch: prev_comm=kworker/u16:18 prev_pid=18150 prev_prio=120 prev_state=S ==> next_comm=swapper/7 next_pid=0 next_prio=120
<...>-656 (-----) [004] .... 14594.222220: binder_set_priority: proc=625 thread=656 old=110 => new=120 desired=120
<...>-2738 (-----) [007] ...1 14594.222267: tracing_mark_write: B|2007|HWC release fence 36027 has signaled
<...>-656 (-----) [007] ...1 14594.223842: tracing_mark_write: B|625|queueBuffer
<...>-656 (-----) [007] ...1 14594.223845: tracing_mark_write: B|625|com.google.android.apps.nexuslauncher/com.google.android.apps.nexuslauncher.NexusLauncherActivity#1: 2
<...>-656 (-----) [007] ...1 14594.223871: tracing_mark_write: B|625|requestNextVsync
<...>-656 (-----) [007] ...1 14594.223873: tracing_mark_write: B|625|resetIdleTimer
<...>-656 (-----) [007] ...1 14594.223881: tracing_mark_write: B|625|addAndGetFrameTimestamps
<...>-1395 (-----) [001] d..2 14594.223909: sched_switch: prev_comm=android.ui prev_pid=1395 prev_prio=118 prev_state=S ==> next_comm=swapper/1 next_pid=0 next_prio=120
<...>-2738 (-----) [007] ...1 14594.223959: tracing_mark_write: B|2007|Trace GPU completion fence 36027
<...>-11799 (-----) [006] ...1 14594.224006: tracing_mark_write: B|2007|waiting for GPU completion 36027
<...>-11799 (-----) [006] ...1 14594.224009: tracing_mark_write: B|2007|waitForever
<...>-2613 (-----) [004] d..2 14594.224014: sched_switch: prev_comm=ogle.android.as prev_pid=2613 prev_prio=120 prev_state=S ==> next_comm=Binder:1803_6 next_pid=2173 next_prio=120
<...>-11799 (-----) [006] d..1 14594.224014: fence_enable_signal: driver=kgsl-timeline timeline=kgsl-3d0_13-s.nexuslauncher(200 context=27 seqno=78002
<...>-11799 (-----) [006] d..2 14594.224021: sched_switch: prev_comm=GPU completion prev_pid=11799 prev_prio=110 prev_state=S ==> next_comm=rcuop/6 next_pid=68 next_prio=120
rcuop/6-68 ( 68) [006] d..2 14594.224044: sched_switch: prev_comm=rcuop/6 prev_pid=68 prev_prio=120 prev_state=S ==> next_comm=swapper/6 next_pid=0 next_prio=120
<...>-259 (-----) [006] d..2 14594.224132: sched_switch: prev_comm=kgsl_worker_thr prev_pid=259 prev_prio=97 prev_state=S ==> next_comm=Binder:2007_A next_pid=4180 next_prio=120
<...>-3206 (-----) [001] d..2 14594.224167: sched_switch: prev_comm=aiai-vc-0 prev_pid=3206 prev_prio=139 prev_state=R ==> next_comm=ndroid.systemui next_pid=1803 next_prio=120
lowpool[847]-14589 ( 2446) [005] d..1 14594.224300: mm_filemap_delete_from_page_cache: dev 0:1 ino 3d0034 page=000000008247d586 pfn=676904 ofs=0
<...>-1803 (-----) [001] d..2 14594.224302: sched_switch: prev_comm=ndroid.systemui prev_pid=1803 prev_prio=120 prev_state=S ==> next_comm=aiai-vc-0 next_pid=3206 next_prio=139
<...>-3206 (-----) [001] d..2 14594.224433: sched_switch: prev_comm=aiai-vc-0 prev_pid=3206 prev_prio=139 prev_state=S ==> next_comm=swapper/1 next_pid=0 next_prio=120
<...>-1903 (-----) [003] ...1 14594.224490: tracing_mark_write: B|1368|dispatchingStartProcess:com.google.android.dialer
<...>-1903 (-----) [003] ...1 14594.224659: tracing_mark_write: B|1368|wmLayout
<...>-1903 (-----) [003] ...1 14594.224666: tracing_mark_write: B|1368|performSurfacePlacement
<...>-1903 (-----) [003] ...1 14594.224683: tracing_mark_write: B|1368|applySurfaceChanges
<...>-1903 (-----) [003] ...1 14594.224688: tracing_mark_write: B|1368|openSurfaceTransaction
<...>-2738 (-----) [007] ...1 14594.224711: tracing_mark_write: B|2007|query
<...>-1903 (-----) [003] ...1 14594.224714: tracing_mark_write: B|1368|performLayout
<...>-2738 (-----) [007] ...1 14594.224714: tracing_mark_write: B|2007|query
<...>-1903 (-----) [003] ...1 14594.224723: tracing_mark_write: B|1368|applyPostLayoutPolicy
<...>-2738 (-----) [007] d..2 14594.224752: sched_switch: prev_comm=RenderThread prev_pid=2738 prev_prio=110 prev_state=S ==> next_comm=Binder:625_1 next_pid=656 next_prio=110
<...>-656 (-----) [007] .... 14594.224766: binder_set_priority: proc=625 thread=656 old=110 => new=120 desired=120
<...>-1398 (-----) [002] ...1 14594.224801: tracing_mark_write: B|1368|Theme::ApplyStyle
<...>-1398 (-----) [002] ...1 14594.224805: tracing_mark_write: B|1368|AssetManager::GetBag
<...>-1398 (-----) [002] ...1 14594.224820: tracing_mark_write: B|1368|AssetManager::GetBag
<...>-1398 (-----) [002] ...1 14594.224826: tracing_mark_write: B|1368|AssetManager::GetBag
<...>-1398 (-----) [002] ...1 14594.224833: tracing_mark_write: B|1368|AssetManager::GetBag
<...>-1398 (-----) [002] ...1 14594.224838: tracing_mark_write: B|1368|AssetManager::GetBag
<...>-1398 (-----) [002] ...1 14594.224846: tracing_mark_write: B|1368|AssetManager::GetBag
<...>-1398 (-----) [002] ...1 14594.224853: tracing_mark_write: B|1368|AssetManager::GetBag
<...>-1398 (-----) [002] ...1 14594.224859: tracing_mark_write: B|1368|AssetManager::GetBag
<...>-1398 (-----) [002] ...1 14594.224864: tracing_mark_write: B|1368|AssetManager::GetBag
<...>-18150 (-----) [006] d..2 14594.228407: sched_switch: prev_comm=kworker/u16:18 prev_pid=18150 prev_prio=120 prev_state=R+ ==> next_comm=mmc-cmdqd/0 next_pid=440 next_prio=98
<...>-2738 (-----) [007] d..2 14594.228411: sched_switch: prev_comm=RenderThread prev_pid=2738 prev_prio=110 prev_state=R+ ==> next_comm=kworker/7:0H next_pid=76 next_prio=100
<...>-1409 (-----) [004] ...1 14594.228417: tracing_mark_write: B|1368|Start proc: com.google.android.dialer
<...>-440 (-----) [006] d..2 14594.228418: sched_switch: prev_comm=mmc-cmdqd/0 prev_pid=440 prev_prio=98 prev_state=D ==> next_comm=kworker/u16:18 next_pid=18150 next_prio=120
<...>-76 (-----) [007] d..2 14594.228430: sched_switch: prev_comm=kworker/7:0H prev_pid=76 prev_prio=100 prev_state=R+ ==> next_comm=mmc-cmdqd/0 next_pid=440 next_prio=98
<...>-440 (-----) [007] d..2 14594.228434: sched_switch: prev_comm=mmc-cmdqd/0 prev_pid=440 prev_prio=98 prev_state=D ==> next_comm=kworker/7:0H next_pid=76 next_prio=100
<...>-18150 (-----) [006] d..3 14594.228442: sched_blocked_reason: pid=1398 iowait=1 caller=wait_on_page_bit_common+0x2a8/0x5f8
<...>-76 (-----) [007] d..2 14594.228442: sched_switch: prev_comm=kworker/7:0H prev_pid=76 prev_prio=100 prev_state=S ==> next_comm=RenderThread next_pid=2738 next_prio=110
<...>-2738 (-----) [007] ...2 14594.228446: binder_set_priority: proc=625 thread=656 old=120 => new=110 desired=110
<...>-18150 (-----) [006] d..2 14594.228447: sched_switch: prev_comm=kworker/u16:18 prev_pid=18150 prev_prio=120 prev_state=R+ ==> next_comm=android.anim next_pid=1398 next_prio=110
<...>-2738 (-----) [007] d..2 14594.228479: sched_switch: prev_comm=RenderThread prev_pid=2738 prev_prio=110 prev_state=S ==> next_comm=Binder:625_1 next_pid=656 next_prio=110
<...>-1409 (-----) [004] d..2 14594.228499: sched_switch: prev_comm=ActivityManager prev_pid=1409 prev_prio=118 prev_state=D ==> next_comm=Binder:965_2 next_pid=1041 next_prio=120
<...>-625 (-----) [003] ...1 14594.229271: tracing_mark_write: B|625|handleTransaction
<...>-1773 (-----) [004] .... 14594.229285: binder_set_priority: proc=625 thread=1773 old=110 => new=120 desired=120
<...>-440 (-----) [007] d..2 14594.229301: sched_switch: prev_comm=mmc-cmdqd/0 prev_pid=440 prev_prio=98 prev_state=D ==> next_comm=RenderThread next_pid=2738 next_prio=110
<...>-2738 (-----) [007] ...1 14594.229318: tracing_mark_write: B|2007|HWC release fence 36028 has signaled
<...>-2738 (-----) [007] ...1 14594.229331: tracing_mark_write: B|2007|query
<...>-2738 (-----) [007] ...1 14594.229337: tracing_mark_write: B|2007|eglBeginFrame
<...>-2738 (-----) [007] ...1 14594.229352: tracing_mark_write: B|2007|query
<...>-2738 (-----) [007] ...1 14594.229354: tracing_mark_write: B|2007|query
<...>-791 (-----) [000] d..2 14594.229357: sched_switch: prev_comm=main prev_pid=791 prev_prio=120 prev_state=S ==> next_comm=swapper/0 next_pid=0 next_prio=120
<...>-625 (-----) [003] ...1 14594.229440: tracing_mark_write: B|625|doTransaction
<...>-13916 (-----) [002] d..2 14594.229482: sched_switch: prev_comm=HeapTaskDaemon prev_pid=13916 prev_prio=124 prev_state=D|K ==> next_comm=swapper/2 next_pid=0 next_prio=120
<...>-13917 (-----) [001] d..2 14594.229492: sched_blocked_reason: pid=13916 iowait=0 caller=__rwsem_down_write_failed_common+0x3e8/0x754
<...>-625 (-----) [003] ...1 14594.229492: tracing_mark_write: B|625|doTransaction
<...>-625 (-----) [003] ...1 14594.229507: tracing_mark_write: B|625|doTransaction
<...>-13917 (-----) [001] d..2 14594.229523: sched_switch: prev_comm=ReferenceQueueD prev_pid=13917 prev_prio=124 prev_state=D ==> next_comm=swapper/1 next_pid=0 next_prio=120
<...>-13916 (-----) [002] d..2 14594.229535: sched_blocked_reason: pid=13917 iowait=0 caller=do_page_fault+0x550/0x5fc
<...>-625 (-----) [003] ...1 14594.229538: tracing_mark_write: B|625|doTransaction
<...>-2738 (-----) [007] ...1 14594.229543: tracing_mark_write: B|2007|flush commands
<...>-13916 (-----) [002] .... 14594.229562: sched_process_exit: comm=HeapTaskDaemon pid=13916 prio=124
<...>-625 (-----) [003] ...1 14594.229567: tracing_mark_write: B|625|doTransaction
<...>-625 (-----) [003] ...1 14594.229588: tracing_mark_write: B|625|doTransaction
<...>-625 (-----) [003] ...1 14594.229628: tracing_mark_write: B|625|doTransaction
<...>-625 (-----) [003] ...1 14594.229652: tracing_mark_write: B|625|doTransaction
<...>-13916 (-----) [002] d..2 14594.229676: sched_switch: prev_comm=HeapTaskDaemon prev_pid=13916 prev_prio=124 prev_state=x ==> next_comm=swapper/2 next_pid=0 next_prio=120
<...>-625 (-----) [003] ...1 14594.229676: tracing_mark_write: B|625|doTransaction
<...>-2007 (-----) [006] d..2 14594.229688: sched_switch: prev_comm=s.nexuslauncher prev_pid=2007 prev_prio=110 prev_state=S ==> next_comm=swapper/6 next_pid=0 next_prio=120
<...>-625 (-----) [003] ...1 14594.229703: tracing_mark_write: B|625|doTransaction
<...>-625 (-----) [003] ...1 14594.229725: tracing_mark_write: B|625|doTransaction
<...>-625 (-----) [003] ...1 14594.229750: tracing_mark_write: B|625|doTransaction
<...>-625 (-----) [003] ...1 14594.229772: tracing_mark_write: B|625|doTransaction
<...>-625 (-----) [003] ...1 14594.229792: tracing_mark_write: B|625|doTransaction
<...>-791 (-----) [000] d..2 14594.229811: sched_switch: prev_comm=main prev_pid=791 prev_prio=120 prev_state=S ==> next_comm=swapper/0 next_pid=0 next_prio=120
<...>-625 (-----) [003] ...1 14594.229824: tracing_mark_write: B|625|doTransaction
<...>-2738 (-----) [007] ...1 14594.229827: tracing_mark_write: B|2007|eglSwapBuffersWithDamageKHR
<...>-13917 (-----) [001] d..2 14594.229836: sched_switch: prev_comm=ReferenceQueueD prev_pid=13917 prev_prio=124 prev_state=D ==> next_comm=swapper/1 next_pid=0 next_prio=120
<...>-2738 (-----) [007] ...1 14594.229837: tracing_mark_write: B|2007|setSurfaceDamage
<...>-625 (-----) [003] ...1 14594.229850: tracing_mark_write: B|625|doTransaction
<...>-13918 (-----) [002] d..2 14594.229856: sched_blocked_reason: pid=13917 iowait=0 caller=SyS_madvise+0xd34/0xd3c
<...>-5281 (-----) [001] d..2 14594.229932: sched_switch: prev_comm=writer prev_pid=5281 prev_prio=96 prev_state=D ==> next_comm=swapper/1 next_pid=0 next_prio=120
<...>-89 (-----) [006] d..2 14594.229951: sched_switch: prev_comm=lpass_smem_glin prev_pid=89 prev_prio=98 prev_state=S ==> next_comm=swapper/6 next_pid=0 next_prio=120
<...>-625 (-----) [003] ...1 14594.229982: tracing_mark_write: B|625|handleMessageInvalidate
<...>-625 (-----) [003] ...1 14594.229984: tracing_mark_write: B|625|handlePageFlip
<...>-625 (-----) [003] ...1 14594.230013: tracing_mark_write: B|625|latchBuffer
<...>-13917 (-----) [000] .... 14594.230015: sched_process_exit: comm=ReferenceQueueD pid=13917 prio=124
<...>-625 (-----) [003] ...1 14594.230020: tracing_mark_write: B|625|query
<...>-625 (-----) [003] ...1 14594.230028: tracing_mark_write: B|625|updateTexImage
<...>-625 (-----) [003] ...1 14594.230035: tracing_mark_write: B|625|acquireBuffer
<...>-625 (-----) [003] ...1 14594.230044: tracing_mark_write: B|625|com.google.android.apps.nexuslauncher/com.google.android.apps.nexuslauncher.NexusLauncherActivity#1: 2
<...>-2738 (-----) [007] d..2 14594.230057: sched_switch: prev_comm=RenderThread prev_pid=2738 prev_prio=110 prev_state=D ==> next_comm=smem_native_lpa next_pid=88 next_prio=120
<...>-14607 (-----) [000] d..2 14594.259609: sched_blocked_reason: pid=14624 iowait=0 caller=__rwsem_down_write_failed_common+0x3e8/0x754
<...>-2738 (-----) [005] d..2 14594.259620: sched_switch: prev_comm=RenderThread prev_pid=2738 prev_prio=120 prev_state=S ==> next_comm=Binder:625_4 next_pid=1773 next_prio=120
<...>-1773 (-----) [005] ...1 14594.259649: tracing_mark_write: B|625|query
<...>-2738 (-----) [005] ...1 14594.259714: tracing_mark_write: B|2007|query
<...>-2738 (-----) [005] d..2 14594.259743: sched_switch: prev_comm=RenderThread prev_pid=2738 prev_prio=120 prev_state=S ==> next_comm=Binder:625_4 next_pid=1773 next_prio=120
<...>-1773 (-----) [005] ...1 14594.259757: tracing_mark_write: B|625|query
<...>-2738 (-----) [005] ...1 14594.259810: tracing_mark_write: B|2007|syncFrameState
<...>-2738 (-----) [005] ...1 14594.259856: tracing_mark_write: B|2007|prepareTree
Binder:14607_1-14624 (14607) [002] ...1 14594.259863: tracing_mark_write: B|14607|AttachCurrentThread
Binder:14607_1-14624 (14607) [002] ...1 14594.259869: tracing_mark_write: B|14607|Thread::Attach
Binder:14607_1-14624 (14607) [002] ...1 14594.259873: tracing_mark_write: B|14607|Thread birth
Binder:14607_1-14624 (14607) [002] ...1 14594.259916: tracing_mark_write: B|14607|Thread::Init
Binder:14607_1-14624 (14607) [002] ...1 14594.259920: tracing_mark_write: B|14607|InitStackHwm
<...>-14607 (-----) [000] d..2 14594.259932: sched_switch: prev_comm=.android.dialer prev_pid=14607 prev_prio=120 prev_state=D ==> next_comm=swapper/0 next_pid=0 next_prio=120
Binder:14607_1-14624 (14607) [002] d..2 14594.259941: sched_blocked_reason: pid=14607 iowait=0 caller=do_page_fault+0x550/0x5fc
<...>-3198 (-----) [001] ...1 14594.259942: tracing_mark_write: B|2007|Update SurfaceView position
Binder:14607_1-14624 (14607) [002] ...1 14594.259963: tracing_mark_write: B|14607|InitTlsEntryPoints
Binder:14607_1-14624 (14607) [002] ...1 14594.259974: tracing_mark_write: B|14607|InitInterpreterTls
<...>-14607 (-----) [000] d..2 14594.260005: sched_blocked_reason: pid=14624 iowait=0 caller=__rwsem_down_write_failed_common+0x3e8/0x754
<...>-3198 (-----) [001] d..2 14594.260007: sched_switch: prev_comm=hwuiTask1 prev_pid=3198 prev_prio=118 prev_state=S ==> next_comm=swapper/1 next_pid=0 next_prio=120
<...>-14607 (-----) [000] d..2 14594.260024: sched_switch: prev_comm=.android.dialer prev_pid=14607 prev_prio=120 prev_state=D ==> next_comm=swapper/0 next_pid=0 next_prio=120
Binder:14607_1-14624 (14607) [002] d..2 14594.260038: sched_blocked_reason: pid=14607 iowait=0 caller=do_page_fault+0x550/0x5fc
<...>-14607 (-----) [000] d..2 14594.260064: sched_blocked_reason: pid=14624 iowait=0 caller=__rwsem_down_write_failed_common+0x3e8/0x754
Binder:14607_1-14624 (14607) [002] ...1 14594.260101: tracing_mark_write: B|14607|ThreadList::Register
<...>-2738 (-----) [005] ...1 14594.260128: tracing_mark_write: B|2007|query
<...>-2738 (-----) [005] ...1 14594.260140: tracing_mark_write: B|2007|query
<...>-2738 (-----) [005] ...1 14594.260148: tracing_mark_write: B|2007|query
<...>-2738 (-----) [005] ...1 14594.260155: tracing_mark_write: B|2007|query
<...>-2738 (-----) [005] ...1 14594.260161: tracing_mark_write: B|2007|query
<...>-2738 (-----) [005] ...1 14594.260167: tracing_mark_write: B|2007|query
<...>-2738 (-----) [005] ...1 14594.260173: tracing_mark_write: B|2007|dequeueBuffer
<...>-2007 (-----) [001] d..2 14594.260201: sched_switch: prev_comm=s.nexuslauncher prev_pid=2007 prev_prio=120 prev_state=S ==> next_comm=swapper/1 next_pid=0 next_prio=120
<...>-2738 (-----) [005] d..2 14594.260214: sched_switch: prev_comm=RenderThread prev_pid=2738 prev_prio=120 prev_state=S ==> next_comm=Binder:625_4 next_pid=1773 next_prio=120
<...>-1773 (-----) [005] ...1 14594.260236: tracing_mark_write: B|625|dequeueBuffer
<...>-1773 (-----) [005] ...1 14594.260249: tracing_mark_write: B|625|com.google.android.apps.nexuslauncher/com.google.android.apps.nexuslauncher.NexusLauncherActivity#1: 2
<...>-14607 (-----) [000] d..2 14594.260334: sched_switch: prev_comm=.android.dialer prev_pid=14607 prev_prio=120 prev_state=D ==> next_comm=swapper/0 next_pid=0 next_prio=120
Binder:14607_1-14624 (14607) [002] d..2 14594.260343: sched_blocked_reason: pid=14607 iowait=0 caller=do_page_fault+0x550/0x5fc
<...>-14607 (-----) [000] d..2 14594.260376: sched_blocked_reason: pid=14624 iowait=0 caller=__rwsem_down_write_failed_common+0x3e8/0x754
<...>-14607 (-----) [000] d..2 14594.260387: sched_switch: prev_comm=.android.dialer prev_pid=14607 prev_prio=120 prev_state=D ==> next_comm=swapper/0 next_pid=0 next_prio=120
<...>-2738 (-----) [005] ...1 14594.260401: tracing_mark_write: B|2007|HWC release fence 36030 has signaled
Binder:14607_1-14624 (14607) [002] d..2 14594.260407: sched_blocked_reason: pid=14607 iowait=0 caller=do_page_fault+0x550/0x5fc
<...>-2738 (-----) [005] ...1 14594.260419: tracing_mark_write: B|2007|query
<...>-2738 (-----) [005] ...1 14594.260427: tracing_mark_write: B|2007|eglBeginFrame
<...>-2738 (-----) [005] ...1 14594.260445: tracing_mark_write: B|2007|query
<...>-2738 (-----) [005] ...1 14594.260450: tracing_mark_write: B|2007|query
Binder:14607_1-14624 (14607) [002] .... 14594.260472: task_newtask: pid=14625 comm=Binder:14607_1 clone_flags=3d0f00 oom_score_adj=-1000
<...>-14607 (-----) [000] d..2 14594.260517: sched_switch: prev_comm=.android.dialer prev_pid=14607 prev_prio=120 prev_state=D ==> next_comm=swapper/0 next_pid=0 next_prio=120
Binder:14607_2-14625 (14607) [001] d..2 14594.260525: sched_blocked_reason: pid=14607 iowait=0 caller=do_page_fault+0x550/0x5fc
<...>-14607 (-----) [000] d..2 14594.260555: sched_blocked_reason: pid=14625 iowait=0 caller=__rwsem_down_write_failed_common+0x3e8/0x754
<...>-14607 (-----) [000] ...1 14594.260569: tracing_mark_write: B|14607|ActivityThreadMain
<...>-14607 (-----) [000] d..2 14594.260581: sched_switch: prev_comm=.android.dialer prev_pid=14607 prev_prio=120 prev_state=D ==> next_comm=swapper/0 next_pid=0 next_prio=120
Binder:14607_2-14625 (14607) [001] d..2 14594.260588: sched_blocked_reason: pid=14607 iowait=0 caller=do_page_fault+0x550/0x5fc
<...>-14607 (-----) [000] d..2 14594.260611: sched_blocked_reason: pid=14625 iowait=0 caller=__rwsem_down_write_failed_common+0x3e8/0x754
<...>-14607 (-----) [000] d..2 14594.260623: sched_switch: prev_comm=.android.dialer prev_pid=14607 prev_prio=120 prev_state=D ==> next_comm=swapper/0 next_pid=0 next_prio=120
Binder:14607_2-14625 (14607) [001] d..2 14594.260636: sched_blocked_reason: pid=14607 iowait=0 caller=do_page_fault+0x550/0x5fc
<...>-14607 (-----) [000] d..2 14594.260663: sched_blocked_reason: pid=14625 iowait=0 caller=__rwsem_down_write_failed_common+0x3e8/0x754
<...>-14607 (-----) [000] d..2 14594.260674: sched_switch: prev_comm=.android.dialer prev_pid=14607 prev_prio=120 prev_state=D ==> next_comm=swapper/0 next_pid=0 next_prio=120
Binder:14607_2-14625 (14607) [001] d..2 14594.260694: sched_blocked_reason: pid=14607 iowait=0 caller=do_page_fault+0x550/0x5fc
<...>-14607 (-----) [000] d..2 14594.260724: sched_blocked_reason: pid=14625 iowait=0 caller=__rwsem_down_write_failed_common+0x3e8/0x754
<...>-2738 (-----) [005] ...1 14594.260734: tracing_mark_write: B|2007|flush commands
<...>-14607 (-----) [000] d..2 14594.260735: sched_switch: prev_comm=.android.dialer prev_pid=14607 prev_prio=120 prev_state=D ==> next_comm=swapper/0 next_pid=0 next_prio=120
Binder:14607_2-14625 (14607) [001] d..2 14594.260753: sched_blocked_reason: pid=14607 iowait=0 caller=do_page_fault+0x550/0x5fc
Binder:14607_2-14625 (14607) [001] ...1 14594.260925: tracing_mark_write: B|14607|AttachCurrentThread
Binder:14607_2-14625 (14607) [001] ...1 14594.260930: tracing_mark_write: B|14607|Thread::Attach
Binder:14607_2-14625 (14607) [001] ...1 14594.260933: tracing_mark_write: B|14607|Thread birth
Binder:14607_2-14625 (14607) [001] ...1 14594.260973: tracing_mark_write: B|14607|Thread::Init
Binder:14607_2-14625 (14607) [001] ...1 14594.260977: tracing_mark_write: B|14607|InitStackHwm
<...>-14607 (-----) [000] d..2 14594.260990: sched_switch: prev_comm=.android.dialer prev_pid=14607 prev_prio=120 prev_state=D ==> next_comm=swapper/0 next_pid=0 next_prio=120
Binder:14607_2-14625 (14607) [001] d..2 14594.260998: sched_blocked_reason: pid=14607 iowait=0 caller=do_page_fault+0x550/0x5fc
Binder:14607_2-14625 (14607) [001] ...1 14594.261023: tracing_mark_write: B|14607|InitTlsEntryPoints
Binder:14607_2-14625 (14607) [001] ...1 14594.261034: tracing_mark_write: B|14607|InitInterpreterTls
<...>-14607 (-----) [000] d..2 14594.261064: sched_blocked_reason: pid=14625 iowait=0 caller=__rwsem_down_write_failed_common+0x3e8/0x754
<...>-14607 (-----) [000] d..2 14594.261075: sched_switch: prev_comm=.android.dialer prev_pid=14607 prev_prio=120 prev_state=D ==> next_comm=swapper/0 next_pid=0 next_prio=120
Binder:14607_2-14625 (14607) [001] d..2 14594.261094: sched_blocked_reason: pid=14607 iowait=0 caller=do_page_fault+0x550/0x5fc
<...>-14607 (-----) [000] d..2 14594.261120: sched_blocked_reason: pid=14625 iowait=0 caller=__rwsem_down_write_failed_common+0x3e8/0x754
<...>-14607 (-----) [000] d..2 14594.261132: sched_switch: prev_comm=.android.dialer prev_pid=14607 prev_prio=120 prev_state=D ==> next_comm=swapper/0 next_pid=0 next_prio=120
Binder:14607_2-14625 (14607) [001] d..2 14594.261146: sched_blocked_reason: pid=14607 iowait=0 caller=do_page_fault+0x550/0x5fc
Binder:14607_2-14625 (14607) [001] ...1 14594.261167: tracing_mark_write: B|14607|ThreadList::Register
<...>-14607 (-----) [000] d..2 14594.261209: sched_blocked_reason: pid=14625 iowait=0 caller=__rwsem_down_write_failed_common+0x3e8/0x754
<...>-2738 (-----) [005] ...1 14594.261212: tracing_mark_write: B|2007|waitOnFences
<...>-14607 (-----) [000] d..2 14594.261220: sched_switch: prev_comm=.android.dialer prev_pid=14607 prev_prio=120 prev_state=D ==> next_comm=swapper/0 next_pid=0 next_prio=120
<...>-2738 (-----) [005] ...1 14594.261232: tracing_mark_write: B|2007|eglSwapBuffersWithDamageKHR
<...>-2738 (-----) [005] ...1 14594.261244: tracing_mark_write: B|2007|setSurfaceDamage
Binder:14607_2-14625 (14607) [001] d..2 14594.261246: sched_blocked_reason: pid=14607 iowait=0 caller=do_page_fault+0x550/0x5fc
<...>-14607 (-----) [000] ...1 14594.261326: tracing_mark_write: B|14607|VerifyClass com.android.org.conscrypt.TrustedCertificateStore$PreloadHolder
<...>-2738 (-----) [005] .... 14594.261621: fence_init: driver=kgsl-timeline timeline=kgsl-3d0_13-s.nexuslauncher(200 context=27 seqno=78005
<...>-625 (-----) [003] ...1 14594.263903: tracing_mark_write: B|625|resetIdleTimer
<...>-625 (-----) [003] ...1 14594.263912: tracing_mark_write: B|625|rebuildLayerStacks
<...>-625 (-----) [003] ...1 14594.263915: tracing_mark_write: B|625|rebuildLayerStacks VR Dirty
<...>-625 (-----) [003] ...1 14594.263919: tracing_mark_write: B|625|computeVisibleRegions
<...>-1398 (-----) [006] d..2 14594.263966: sched_switch: prev_comm=android.anim prev_pid=1398 prev_prio=110 prev_state=S ==> next_comm=Binder:625_4 next_pid=1773 next_prio=120
<...>-1695 (-----) [001] d..2 14594.264086: sched_switch: prev_comm=InputDispatcher prev_pid=1695 prev_prio=112 prev_state=S ==> next_comm=Binder:1368_14 next_pid=3253 next_prio=120
<...>-625 (-----) [003] ...1 14594.264293: tracing_mark_write: B|625|calculateWorkingSet
<...>-625 (-----) [003] ...1 14594.264500: tracing_mark_write: B|625|prepare
<...>-625 (-----) [003] ...1 14594.264513: tracing_mark_write: B|625|HIDL::IComposerClient::executeCommands_2_2::client
<...>-625 (-----) [003] ...2 14594.264584: binder_set_priority: proc=627 thread=627 old=97 => new=98 desired=98
<...>-625 (-----) [003] d..2 14594.264617: sched_switch: prev_comm=surfaceflinger prev_pid=625 prev_prio=98 prev_state=S ==> next_comm=logd.writer next_pid=588 next_prio=130
<...>-588 (-----) [003] d..2 14594.264851: sched_switch: prev_comm=logd.writer prev_pid=588 prev_prio=130 prev_state=S ==> next_comm=swapper/3 next_pid=0 next_prio=120
rcu_preempt-7 ( 7) [007] d..2 14594.265273: sched_switch: prev_comm=rcu_preempt prev_pid=7 prev_prio=120 prev_state=S ==> next_comm=kworker/u16:3 next_pid=18008 next_prio=120
<...>-18008 (-----) [007] d..2 14594.265404: sched_switch: prev_comm=kworker/u16:3 prev_pid=18008 prev_prio=120 prev_state=D ==> next_comm=swapper/7 next_pid=0 next_prio=120
<...>-18008 (-----) [007] d..2 14594.265471: sched_switch: prev_comm=kworker/u16:3 prev_pid=18008 prev_prio=120 prev_state=S ==> next_comm=swapper/7 next_pid=0 next_prio=120
<...>-625 (-----) [003] ...1 14594.265496: tracing_mark_write: B|625|doComposition
<...>-625 (-----) [003] ...1 14594.265507: tracing_mark_write: B|625|doComposeSurfaces
<...>-625 (-----) [003] ...1 14594.265552: tracing_mark_write: B|625|acquireBuffer
<...>-625 (-----) [003] ...1 14594.265563: tracing_mark_write: B|625|postFramebuffer
<...>-625 (-----) [003] ...1 14594.265567: tracing_mark_write: B|625|presentAndGetReleaseFences
<...>-625 (-----) [003] d..1 14594.265601: fence_enable_signal: driver=sde_fence:crtc97:91650 timeline=crtc97 context=3 seqno=91650
<...>-625 (-----) [003] ...1 14594.265735: tracing_mark_write: B|625|logLayerStats
<...>-625 (-----) [003] ...1 14594.265744: tracing_mark_write: B|625|postComposition
<...>-625 (-----) [003] ...1 14594.265749: tracing_mark_write: B|625|releaseBuffer
<...>-625 (-----) [003] ...1 14594.265753: tracing_mark_write: B|625|com.google.android.apps.nexuslauncher/com.google.android.apps.nexuslauncher.NexusLauncherActivity#1: 1
<...>-625 (-----) [003] ...1 14594.265791: tracing_mark_write: B|625|releaseBuffer
<...>-440 (-----) [007] d..2 14594.342366: sched_switch: prev_comm=mmc-cmdqd/0 prev_pid=440 prev_prio=98 prev_state=D ==> next_comm=kworker/u17:2 next_pid=1778 next_prio=100
<...>-2007 (-----) [006] ...1 14594.342375: tracing_mark_write: B|2007|input
<...>-2007 (-----) [006] ...1 14594.342399: tracing_mark_write: B|2007|animation
<...>-625 (-----) [003] ...1 14594.342447: tracing_mark_write: B|625|doTransaction
<...>-625 (-----) [003] ...1 14594.342489: tracing_mark_write: B|625|doTransaction
kworker/u17:2-1778 ( 1778) [007] d..3 14594.342532: sched_blocked_reason: pid=14607 iowait=1 caller=wait_on_page_bit_common+0x2a8/0x5f8
kworker/u17:2-1778 ( 1778) [007] d..2 14594.342544: sched_switch: prev_comm=kworker/u17:2 prev_pid=1778 prev_prio=100 prev_state=S ==> next_comm=kworker/u16:2 next_pid=27544 next_prio=120
<...>-1773 (-----) [000] ...1 14594.342575: tracing_mark_write: B|625|requestNextVsync
<...>-1773 (-----) [000] ...1 14594.342579: tracing_mark_write: B|625|resetIdleTimer
<...>-27544 (-----) [007] d..2 14594.342589: sched_switch: prev_comm=kworker/u16:2 prev_pid=27544 prev_prio=120 prev_state=S ==> next_comm=swapper/7 next_pid=0 next_prio=120
<...>-656 (-----) [002] d.h3 14594.342604: sched_blocked_reason: pid=1233 iowait=0 caller=geni_i2c_xfer+0x4d8/0x1398
<...>-1803 (-----) [001] d..2 14594.342605: sched_switch: prev_comm=ndroid.systemui prev_pid=1803 prev_prio=120 prev_state=S ==> next_comm=swapper/1 next_pid=0 next_prio=120
<...>-625 (-----) [003] ...1 14594.342632: tracing_mark_write: B|625|handleMessageInvalidate
<...>-625 (-----) [003] ...1 14594.342634: tracing_mark_write: B|625|handlePageFlip
<...>-2738 (-----) [007] ...1 14594.342641: tracing_mark_write: B|2007|notifyFramePending
<...>-658 (-----) [002] d..2 14594.342653: sched_switch: prev_comm=DispSync prev_pid=658 prev_prio=97 prev_state=S ==> next_comm=Binder:625_1 next_pid=656 next_prio=120
<...>-656 (-----) [002] ...1 14594.342656: tracing_mark_write: B|625|requestNextVsync
<...>-2738 (-----) [007] d..2 14594.342658: sched_switch: prev_comm=RenderThread prev_pid=2738 prev_prio=110 prev_state=S ==> next_comm=swapper/7 next_pid=0 next_prio=120
<...>-656 (-----) [002] ...1 14594.342660: tracing_mark_write: B|625|resetIdleTimer
<...>-660 (-----) [005] d..2 14594.342663: sched_switch: prev_comm=app prev_pid=660 prev_prio=97 prev_state=S ==> next_comm=swapper/5 next_pid=0 next_prio=120
<...>-625 (-----) [003] ...1 14594.342665: tracing_mark_write: B|625|latchBuffer
<...>-625 (-----) [003] ...1 14594.342673: tracing_mark_write: B|625|query
<...>-625 (-----) [003] ...1 14594.342682: tracing_mark_write: B|625|updateTexImage
<...>-625 (-----) [003] ...1 14594.342693: tracing_mark_write: B|625|acquireBuffer
<...>-625 (-----) [003] ...1 14594.342703: tracing_mark_write: B|625|com.google.android.apps.nexuslauncher/com.google.android.apps.nexuslauncher.NexusLauncherActivity#1: 1
<...>-660 (-----) [005] d..2 14594.342709: sched_switch: prev_comm=app prev_pid=660 prev_prio=97 prev_state=S ==> next_comm=swapper/5 next_pid=0 next_prio=120
<...>-2007 (-----) [006] ...1 14594.342733: tracing_mark_write: B|2007|traversal
<...>-2007 (-----) [006] ...1 14594.342776: tracing_mark_write: B|2007|draw
<...>-2007 (-----) [006] ...1 14594.342791: tracing_mark_write: B|2007|Record View#draw()
<...>-625 (-----) [003] ...1 14594.342849: tracing_mark_write: B|625|updateInputFlinger
<...>-2007 (-----) [006] d..2 14594.342903: sched_switch: prev_comm=s.nexuslauncher prev_pid=2007 prev_prio=110 prev_state=S ==> next_comm=kworker/6:2H next_pid=24261 next_prio=100
<...>-2738 (-----) [007] ...1 14594.342910: tracing_mark_write: B|2007|DrawFrame
<...>-2738 (-----) [007] d..2 14594.342917: sched_switch: prev_comm=RenderThread prev_pid=2738 prev_prio=110 prev_state=R+ ==> next_comm=mmc-cmdqd/0 next_pid=440 next_prio=98
<...>-24261 (-----) [006] d..2 14594.342918: sched_switch: prev_comm=kworker/6:2H prev_pid=24261 prev_prio=100 prev_state=S ==> next_comm=.android.dialer next_pid=14607 next_prio=110
<...>-440 (-----) [007] d..2 14594.342926: sched_switch: prev_comm=mmc-cmdqd/0 prev_pid=440 prev_prio=98 prev_state=D ==> next_comm=RenderThread next_pid=2738 next_prio=110
<...>-2738 (-----) [007] ...1 14594.342927: tracing_mark_write: B|2007|query
<...>-2738 (-----) [007] ...2 14594.342959: binder_set_priority: proc=625 thread=656 old=120 => new=110 desired=110
<...>-2738 (-----) [007] d..2 14594.342975: sched_switch: prev_comm=RenderThread prev_pid=2738 prev_prio=110 prev_state=R+ ==> next_comm=Binder:625_1 next_pid=656 next_prio=110
<...>-656 (-----) [007] ...1 14594.343021: tracing_mark_write: B|625|query
<...>-656 (-----) [007] .... 14594.343033: binder_set_priority: proc=625 thread=656 old=110 => new=120 desired=120
<...>-2738 (-----) [007] ...1 14594.343070: tracing_mark_write: B|2007|query
<...>-1233 (-----) [004] d..2 14594.343074: sched_switch: prev_comm=sound trigger c prev_pid=1233 prev_prio=120 prev_state=R+ ==> next_comm=irq/144-1436400 next_pid=2522 next_prio=49
<...>-2738 (-----) [007] ...2 14594.343078: binder_set_priority: proc=625 thread=656 old=120 => new=110 desired=110
<...>-625 (-----) [003] ...1 14594.343084: tracing_mark_write: B|625|onMessageReceived
<...>-625 (-----) [003] ...1 14594.343087: tracing_mark_write: B|625|handleMessageRefresh
<...>-625 (-----) [003] ...1 14594.343090: tracing_mark_write: B|625|preComposition
<...>-2738 (-----) [007] d..2 14594.343090: sched_switch: prev_comm=RenderThread prev_pid=2738 prev_prio=110 prev_state=R+ ==> next_comm=Binder:625_1 next_pid=656 next_prio=110
<...>-625 (-----) [003] ...1 14594.343122: tracing_mark_write: B|625|rebuildLayerStacks
<...>-625 (-----) [003] ...1 14594.343124: tracing_mark_write: B|625|rebuildLayerStacks VR Dirty
<...>-89 (-----) [007] d..2 14594.343126: sched_switch: prev_comm=lpass_smem_glin prev_pid=89 prev_prio=98 prev_state=S ==> next_comm=Binder:625_1 next_pid=656 next_prio=110
<...>-625 (-----) [003] ...1 14594.343129: tracing_mark_write: B|625|computeVisibleRegions
<...>-656 (-----) [007] ...1 14594.343136: tracing_mark_write: B|625|query
<...>-14607 (-----) [006] ...2 14594.343141: binder_set_priority: proc=1368 thread=3253 old=120 => new=110 desired=110
<...>-2965 (-----) [001] .... 14596.746610: mm_filemap_add_to_page_cache: dev 253:6 ino a359 page=000000002ae8fcff pfn=1522884 ofs=188416
<idle>-0 (-----) [002] d..2 14596.746619: sched_switch: prev_comm=swapper/2 prev_pid=0 prev_prio=120 prev_state=R ==> next_comm=mmc-cmdqd/0 next_pid=440 next_prio=98
<...>-2965 (-----) [001] .... 14596.746629: mm_filemap_add_to_page_cache: dev 253:6 ino a359 page=00000000679ee1ec pfn=1299913 ofs=192512
<...>-2965 (-----) [001] .... 14596.746664: mm_filemap_add_to_page_cache: dev 253:6 ino a359 page=0000000006cd2fb7 pfn=1296251 ofs=196608
<...>-2965 (-----) [001] .... 14596.746677: mm_filemap_add_to_page_cache: dev 253:6 ino a359 page=00000000af82f3d6 pfn=1419330 ofs=200704
<...>-2965 (-----) [001] .... 14596.746693: mm_filemap_add_to_page_cache: dev 253:6 ino a359 page=000000002840f054 pfn=1304928 ofs=204800
<...>-2965 (-----) [001] .... 14596.746706: mm_filemap_add_to_page_cache: dev 253:6 ino a359 page=000000004a59da17 pfn=1288069 ofs=208896
<...>-2965 (-----) [001] .... 14596.746717: mm_filemap_add_to_page_cache: dev 253:6 ino a359 page=0000000023a80dca pfn=1419686 ofs=212992
<...>-2965 (-----) [001] .... 14596.746730: mm_filemap_add_to_page_cache: dev 253:6 ino a359 page=000000001cf89eab pfn=1315372 ofs=217088
<...>-2965 (-----) [001] .... 14596.746743: mm_filemap_add_to_page_cache: dev 253:6 ino a359 page=000000005b4c6cb6 pfn=1380698 ofs=221184
<...>-2965 (-----) [001] .... 14596.746760: mm_filemap_add_to_page_cache: dev 253:6 ino a359 page=00000000f8304ae7 pfn=1206753 ofs=225280
<...>-2965 (-----) [001] .... 14596.746773: mm_filemap_add_to_page_cache: dev 253:6 ino a359 page=00000000cb912305 pfn=1325465 ofs=229376
<...>-2965 (-----) [001] .... 14596.746785: mm_filemap_add_to_page_cache: dev 253:6 ino a359 page=00000000f16f3774 pfn=1408056 ofs=233472
<...>-2965 (-----) [001] .... 14596.746801: mm_filemap_add_to_page_cache: dev 253:6 ino a359 page=0000000056d4c926 pfn=1418352 ofs=237568
<...>-2965 (-----) [001] .... 14596.746815: mm_filemap_add_to_page_cache: dev 253:6 ino a359 page=00000000f3eeb42c pfn=1320957 ofs=241664
<...>-440 (-----) [002] d..2 14596.746916: sched_switch: prev_comm=mmc-cmdqd/0 prev_pid=440 prev_prio=98 prev_state=D ==> next_comm=swapper/2 next_pid=0 next_prio=120
<...>-656 (-----) [007] .... 14594.343145: binder_set_priority: proc=625 thread=656 old=110 => new=120 desired=120
<...>-14607 (-----) [006] d..2 14594.343164: sched_switch: prev_comm=.android.dialer prev_pid=14607 prev_prio=110 prev_state=S ==> next_comm=swapper/6 next_pid=0 next_prio=120
<...>-5281 (-----) [002] d..2 14594.343177: sched_switch: prev_comm=writer prev_pid=5281 prev_prio=96 prev_state=S ==> next_comm=RenderThread next_pid=2738 next_prio=110
irq/144-1436400-2522 ( 2522) [004] d..2 14594.343223: sched_switch: prev_comm=irq/144-1436400 prev_pid=2522 prev_prio=49 prev_state=D ==> next_comm=sound trigger c next_pid=1233 next_prio=120
<...>-88 (-----) [006] d..2 14594.343240: sched_switch: prev_comm=smem_native_lpa prev_pid=88 prev_prio=98 prev_state=S ==> next_comm=swapper/6 next_pid=0 next_prio=120
<...>-1238 (-----) [001] d..2 14594.343243: sched_switch: prev_comm=FastMixer prev_pid=1238 prev_prio=96 prev_state=S ==> next_comm=swapper/1 next_pid=0 next_prio=120
<...>-2738 (-----) [002] ...1 14594.343244: tracing_mark_write: B|2007|syncFrameState
<...>-2738 (-----) [002] ...1 14594.343293: tracing_mark_write: B|2007|prepareTree
<...>-1695 (-----) [001] d..2 14594.343318: sched_switch: prev_comm=InputDispatcher prev_pid=1695 prev_prio=112 prev_state=R+ ==> next_comm=FastMixer next_pid=1238 next_prio=96
<...>-5281 (-----) [005] d..2 14594.343322: sched_switch: prev_comm=writer prev_pid=5281 prev_prio=96 prev_state=S ==> next_comm=Binder:1368_14 next_pid=3253 next_prio=110
<...>-1238 (-----) [001] d..2 14594.343442: sched_switch: prev_comm=FastMixer prev_pid=1238 prev_prio=96 prev_state=S ==> next_comm=InputDispatcher next_pid=1695 next_prio=112
<...>-1695 (-----) [001] d..2 14594.343467: sched_switch: prev_comm=InputDispatcher prev_pid=1695 prev_prio=112 prev_state=S ==> next_comm=swapper/1 next_pid=0 next_prio=120
<...>-5281 (-----) [000] d..2 14594.343484: sched_switch: prev_comm=writer prev_pid=5281 prev_prio=96 prev_state=S ==> next_comm=swapper/0 next_pid=0 next_prio=120
<...>-625 (-----) [003] ...1 14594.343519: tracing_mark_write: B|625|calculateWorkingSet
<...>-2738 (-----) [002] ...1 14594.343568: tracing_mark_write: B|2007|query
<...>-2738 (-----) [002] ...1 14594.343577: tracing_mark_write: B|2007|query
<...>-2738 (-----) [002] ...1 14594.343586: tracing_mark_write: B|2007|query
<...>-2738 (-----) [002] ...1 14594.343591: tracing_mark_write: B|2007|query
<...>-2738 (-----) [002] ...1 14594.343597: tracing_mark_write: B|2007|query
<...>-2738 (-----) [002] ...1 14594.343602: tracing_mark_write: B|2007|query
<...>-2738 (-----) [002] ...1 14594.343609: tracing_mark_write: B|2007|dequeueBuffer
<...>-2007 (-----) [006] d..2 14594.343612: sched_switch: prev_comm=s.nexuslauncher prev_pid=2007 prev_prio=110 prev_state=S ==> next_comm=swapper/6 next_pid=0 next_prio=120
<...>-2738 (-----) [002] ...2 14594.343633: binder_set_priority: proc=625 thread=656 old=120 => new=110 desired=110
<...>-2738 (-----) [002] d..2 14594.343683: sched_switch: prev_comm=RenderThread prev_pid=2738 prev_prio=110 prev_state=R+ ==> next_comm=Binder:625_1 next_pid=656 next_prio=110
<...>-625 (-----) [003] ...1 14594.343704: tracing_mark_write: B|625|prepare
<...>-656 (-----) [002] ...1 14594.343707: tracing_mark_write: B|625|dequeueBuffer
<...>-625 (-----) [004] ...1 14594.812869: tracing_mark_write: B|625|com.google.android.dialer/com.google.android.dialer.extensions.GoogleDialtactsActivity#0: 2
<...>-2048 (-----) [000] d..2 14594.812895: sched_switch: prev_comm=RenderThread prev_pid=2048 prev_prio=120 prev_state=R+ ==> next_comm=Binder:625_3 next_pid=1431 next_prio=120
<...>-1431 (-----) [000] ...1 14594.812911: tracing_mark_write: B|625|query
<...>-625 (-----) [004] ...1 14594.812914: tracing_mark_write: B|625|latchBuffer
<...>-625 (-----) [004] ...1 14594.812919: tracing_mark_write: B|625|query
<...>-625 (-----) [004] ...1 14594.812925: tracing_mark_write: B|625|updateTexImage
<...>-625 (-----) [004] ...1 14594.812928: tracing_mark_write: B|625|acquireBuffer
<...>-625 (-----) [004] ...1 14594.812934: tracing_mark_write: B|625|StatusBar#0: 1
<...>-2048 (-----) [000] ...1 14594.812962: tracing_mark_write: B|1803|syncFrameState
<...>-656 (-----) [002] ...1 14594.813044: tracing_mark_write: B|625|setTransactionState
<...>-14607 (-----) [007] ...2 14594.813083: binder_set_priority: proc=10691 thread=18733 old=120 => new=110 desired=110
<...>-14607 (-----) [007] d..2 14594.813114: sched_switch: prev_comm=.android.dialer prev_pid=14607 prev_prio=110 prev_state=S ==> next_comm=kworker/7:1 next_pid=7092 next_prio=120
<...>-14655 (-----) [006] d..2 14594.813128: sched_switch: prev_comm=DialerExecutors prev_pid=14655 prev_prio=130 prev_state=R ==> next_comm=lpass_smem_glin next_pid=89 next_prio=98
<...>-89 (-----) [006] d..2 14594.813163: sched_switch: prev_comm=lpass_smem_glin prev_pid=89 prev_prio=98 prev_state=S ==> next_comm=DialerExecutors next_pid=14655 next_prio=130
<...>-656 (-----) [002] ...1 14594.813218: tracing_mark_write: B|625|requestNextVsync
<...>-656 (-----) [002] ...1 14594.813222: tracing_mark_write: B|625|resetIdleTimer
kworker/7:1-7092 ( 7092) [007] d..2 14594.813239: sched_switch: prev_comm=kworker/7:1 prev_pid=7092 prev_prio=120 prev_state=R+ ==> next_comm=smem_native_lpa next_pid=88 next_prio=98
<...>-5281 (-----) [001] d..2 14594.813245: sched_switch: prev_comm=writer prev_pid=5281 prev_prio=96 prev_state=S ==> next_comm=Binder:10691_B next_pid=18733 next_prio=110
<...>-88 (-----) [007] d..2 14594.813248: sched_switch: prev_comm=smem_native_lpa prev_pid=88 prev_prio=98 prev_state=R ==> next_comm=kgsl_worker_thr next_pid=259 next_prio=97
<...>-2048 (-----) [000] d..2 14594.813249: sched_switch: prev_comm=RenderThread prev_pid=2048 prev_prio=120 prev_state=R+ ==> next_comm=FastMixer next_pid=1238 next_prio=96
<...>-14655 (-----) [006] d..2 14594.813263: sched_switch: prev_comm=DialerExecutors prev_pid=14655 prev_prio=130 prev_state=R+ ==> next_comm=smem_native_lpa next_pid=88 next_prio=98
<...>-661 (-----) [002] d..2 14594.813265: sched_switch: prev_comm=sf prev_pid=661 prev_prio=97 prev_state=S ==> next_comm=Binder:625_1 next_pid=656 next_prio=116
<...>-259 (-----) [007] d..2 14594.813265: sched_switch: prev_comm=kgsl_worker_thr prev_pid=259 prev_prio=97 prev_state=S ==> next_comm=kworker/7:1 next_pid=7092 next_prio=120
kworker/7:1-7092 ( 7092) [007] d..2 14594.813271: sched_switch: prev_comm=kworker/7:1 prev_pid=7092 prev_prio=120 prev_state=S ==> next_comm=system next_pid=108 next_prio=120
<...>-108 (-----) [007] .... 14594.813275: ion_heap_shrink: heap_name=system, len=9469952, total_allocated=189620224
<...>-88 (-----) [006] d..2 14594.813294: sched_switch: prev_comm=smem_native_lpa prev_pid=88 prev_prio=98 prev_state=S ==> next_comm=DialerExecutors next_pid=14655 next_prio=130
<...>-625 (-----) [004] ...1 14594.813310: tracing_mark_write: B|625|updateInputFlinger
<...>-1238 (-----) [000] d..2 14594.813312: sched_switch: prev_comm=FastMixer prev_pid=1238 prev_prio=96 prev_state=S ==> next_comm=RenderThread next_pid=2048 next_prio=120
<...>-661 (-----) [002] d..2 14594.813317: sched_switch: prev_comm=sf prev_pid=661 prev_prio=97 prev_state=S ==> next_comm=Binder:625_1 next_pid=656 next_prio=116
<...>-14640 (-----) [005] d..2 14594.813319: sched_switch: prev_comm=DialerExecutors prev_pid=14640 prev_prio=130 prev_state=R ==> next_comm=DispSync next_pid=658 next_prio=97
<...>-656 (-----) [002] ...1 14594.813336: tracing_mark_write: B|625|~GraphicBuffer
<...>-658 (-----) [005] d..2 14594.813345: sched_switch: prev_comm=DispSync prev_pid=658 prev_prio=97 prev_state=S ==> next_comm=DialerExecutors next_pid=14640 next_prio=130
<...>-656 (-----) [002] ...1 14594.813345: tracing_mark_write: B|625|~GraphicBuffer
<...>-656 (-----) [002] ...1 14594.813353: tracing_mark_write: B|625|~GraphicBuffer
<...>-2048 (-----) [000] d..2 14594.813358: sched_switch: prev_comm=RenderThread prev_pid=2048 prev_prio=120 prev_state=R+ ==> next_comm=FastMixer next_pid=1238 next_prio=96
<...>-656 (-----) [002] ...1 14594.813364: tracing_mark_write: B|625|~GraphicBuffer
<...>-5281 (-----) [001] d..2 14594.813369: sched_switch: prev_comm=writer prev_pid=5281 prev_prio=96 prev_state=S ==> next_comm=Binder:10691_B next_pid=18733 next_prio=110
<...>-656 (-----) [002] ...1 14594.813372: tracing_mark_write: B|625|~GraphicBuffer
<...>-656 (-----) [002] ...1 14594.813380: tracing_mark_write: B|625|~GraphicBuffer
<...>-656 (-----) [002] ...1 14594.813391: tracing_mark_write: B|625|~GraphicBuffer
<...>-656 (-----) [002] ...1 14594.813398: tracing_mark_write: B|625|~GraphicBuffer
<...>-656 (-----) [002] ...1 14594.813408: tracing_mark_write: B|625|~GraphicBuffer
<...>-656 (-----) [002] ...1 14594.813416: tracing_mark_write: B|625|~GraphicBuffer
<...>-656 (-----) [002] ...1 14594.813424: tracing_mark_write: B|625|~GraphicBuffer
<...>-656 (-----) [002] ...1 14594.813432: tracing_mark_write: B|625|~GraphicBuffer
<...>-656 (-----) [002] .n.1 14594.813443: tracing_mark_write: B|625|~GraphicBuffer
<...>-1238 (-----) [000] d..2 14594.813464: sched_switch: prev_comm=FastMixer prev_pid=1238 prev_prio=96 prev_state=S ==> next_comm=RenderThread next_pid=2048 next_prio=120
<...>-5281 (-----) [002] d..2 14594.813525: sched_switch: prev_comm=writer prev_pid=5281 prev_prio=96 prev_state=S ==> next_comm=Binder:625_1 next_pid=656 next_prio=116
<...>-656 (-----) [002] ...1 14594.813544: tracing_mark_write: B|625|~GraphicBuffer
<...>-656 (-----) [002] ...1 14594.813557: tracing_mark_write: B|625|~GraphicBuffer
<...>-2048 (-----) [000] d..2 14594.813594: sched_switch: prev_comm=RenderThread prev_pid=2048 prev_prio=120 prev_state=R+ ==> next_comm=Binder:1368_15 next_pid=3359 next_prio=120
<...>-18733 (-----) [001] ...2 14594.813635: binder_set_priority: proc=1368 thread=3514 old=120 => new=110 desired=110
<...>-656 (-----) [002] .... 14594.813637: binder_set_priority: proc=625 thread=656 old=116 => new=120 desired=120
<...>-108 (-----) [007] d..2 14594.813646: sched_switch: prev_comm=system prev_pid=108 prev_prio=120 prev_state=R+ ==> next_comm=android.anim next_pid=1398 next_prio=116
<...>-625 (-----) [004] ...1 14594.813646: tracing_mark_write: B|625|onMessageReceived
<...>-625 (-----) [004] ...1 14594.813649: tracing_mark_write: B|625|handleMessageRefresh
<...>-625 (-----) [004] ...1 14594.813651: tracing_mark_write: B|625|preComposition
<...>-625 (-----) [004] ...1 14594.813693: tracing_mark_write: B|625|rebuildLayerStacks
<...>-625 (-----) [004] ...1 14594.813696: tracing_mark_write: B|625|rebuildLayerStacks VR Dirty
<...>-625 (-----) [004] ...1 14594.813701: tracing_mark_write: B|625|computeVisibleRegions
<...>-1398 (-----) [007] d..2 14594.813718: sched_switch: prev_comm=android.anim prev_pid=1398 prev_prio=116 prev_state=S ==> next_comm=system next_pid=108 next_prio=120
<...>-108 (-----) [007] d..2 14594.813739: sched_switch: prev_comm=system prev_pid=108 prev_prio=120 prev_state=R+ ==> next_comm=android.anim next_pid=1398 next_prio=116
<...>-1695 (-----) [002] d..2 14594.813970: sched_switch: prev_comm=InputDispatcher prev_pid=1695 prev_prio=112 prev_state=S ==> next_comm=system next_pid=108 next_prio=120
<...>-1398 (-----) [007] ...1 14594.814029: tracing_mark_write: B|1368|wmLayout
<...>-1398 (-----) [007] ...1 14594.814033: tracing_mark_write: B|1368|performSurfacePlacement
<...>-1398 (-----) [007] ...1 14594.814040: tracing_mark_write: B|1368|applySurfaceChanges
<...>-1398 (-----) [007] ...1 14594.814043: tracing_mark_write: B|1368|openSurfaceTransaction
<...>-1398 (-----) [007] ...1 14594.814063: tracing_mark_write: B|1368|performLayout
<...>-625 (-----) [004] ...1 14594.814119: tracing_mark_write: B|625|calculateWorkingSet
<...>-1398 (-----) [007] ...1 14594.814241: tracing_mark_write: B|1368|layoutInputConsumer
<...>-2048 (-----) [000] ...1 14594.814260: tracing_mark_write: B|1803|prepareTree
<...>-1398 (-----) [007] ...1 14594.814263: tracing_mark_write: B|1368|applyPostLayoutPolicy
<...>-2048 (-----) [000] d..2 14594.814408: sched_switch: prev_comm=RenderThread prev_pid=2048 prev_prio=120 prev_state=R ==> next_comm=Binder:1368_15 next_pid=3359 next_prio=120
<...>-625 (-----) [004] ...1 14594.814411: tracing_mark_write: B|625|prepare
<...>-625 (-----) [004] ...1 14594.814428: tracing_mark_write: B|625|HIDL::IComposerClient::executeCommands_2_2::client
<...>-2048 (-----) [000] d..2 14594.814533: sched_switch: prev_comm=RenderThread prev_pid=2048 prev_prio=120 prev_state=R+ ==> next_comm=ndroid.systemui next_pid=1803 next_prio=120
<...>-1803 (-----) [000] d..2 14594.814558: sched_switch: prev_comm=ndroid.systemui prev_pid=1803 prev_prio=120 prev_state=S ==> next_comm=RenderThread next_pid=2048 next_prio=120
<...>-2048 (-----) [000] d..2 14594.814572: sched_switch: prev_comm=RenderThread prev_pid=2048 prev_prio=120 prev_state=R+ ==> next_comm=ndroid.systemui next_pid=1803 next_prio=120
<...>-625 (-----) [004] ...2 14594.814589: binder_set_priority: proc=627 thread=627 old=97 => new=98 desired=98
<...>-108 (-----) [002] d..2 14594.814650: sched_switch: prev_comm=system prev_pid=108 prev_prio=120 prev_state=R+ ==> next_comm=composer@2.2-se next_pid=627 next_prio=98
<...>-625 (-----) [004] d..2 14594.814664: sched_switch: prev_comm=surfaceflinger prev_pid=625 prev_prio=98 prev_state=S ==> next_comm=ashmemd next_pid=854 next_prio=129
<...>-1398 (-----) [007] ...1 14594.814723: tracing_mark_write: B|1368|applyWindowSurfaceChanges
<...>-854 (-----) [004] .... 14594.814746: binder_set_priority: proc=854 thread=854 old=129 => new=120 desired=120
<...>-854 (-----) [004] d..2 14594.814757: sched_switch: prev_comm=ashmemd prev_pid=854 prev_prio=120 prev_state=R+ ==> next_comm=highpool[0] next_pid=3493 next_prio=129
<...>-1803 (-----) [000] d..2 14594.814763: sched_switch: prev_comm=ndroid.systemui prev_pid=1803 prev_prio=120 prev_state=S ==> next_comm=RenderThread next_pid=2048 next_prio=120
<...>-18733 (-----) [001] d..1 14594.814819: mm_filemap_delete_from_page_cache: dev 0:1 ino 3ce5e7 page=0000000083f10c7a pfn=1298474 ofs=0
<...>-2048 (-----) [000] ...1 14594.814842: tracing_mark_write: B|1803|dequeueBuffer
<...>-1398 (-----) [007] ...1 14594.814850: tracing_mark_write: F|1368|launching: com.google.android.dialer|0
<...>-1398 (-----) [007] ...1 14594.814855: tracing_mark_write: B|1368|MetricsLogger:launchObserverNotifyActivityLaunchFinished
<...>-1398 (-----) [007] ...1 14594.814857: tracing_mark_write: B|1368|MetricsLogger:convertActivityRecordToProto
<...>-2048 (-----) [000] d..2 14594.814905: sched_switch: prev_comm=RenderThread prev_pid=2048 prev_prio=120 prev_state=R+ ==> next_comm=Binder:625_1 next_pid=656 next_prio=120
<...>-1410 (-----) [006] .... 14592.997816: mm_filemap_add_to_page_cache: dev 253:6 ino b785 page=00000000615a8f24 pfn=1134764 ofs=0
<...>-1410 (-----) [006] .... 14592.997831: mm_filemap_add_to_page_cache: dev 253:6 ino b785 page=000000008768a58f pfn=1134751 ofs=4096
<...>-18733 (-----) [001] .... 14594.814914: binder_set_priority: proc=10691 thread=18733 old=110 => new=120 desired=120
<...>-14655 (-----) [006] d..2 14594.814932: sched_switch: prev_comm=DialerExecutors prev_pid=14655 prev_prio=130 prev_state=R ==> next_comm=.android.dialer next_pid=14607 next_prio=110
<...>-656 (-----) [000] ...1 14594.814948: tracing_mark_write: B|625|dequeueBuffer
<...>-3514 (-----) [001] .... 14594.814954: binder_set_priority: proc=1368 thread=3514 old=110 => new=120 desired=120
<...>-656 (-----) [000] ...1 14594.814963: tracing_mark_write: B|625|NavigationBar0#0: 2
<...>-14607 (-----) [006] ...2 14594.815022: binder_set_priority: proc=1368 thread=3514 old=120 => new=110 desired=110
<...>-1398 (-----) [007] ...1 14594.815039: tracing_mark_write: B|1368|prepareSurfaces
<...>-14607 (-----) [006] d..2 14594.815041: sched_switch: prev_comm=.android.dialer prev_pid=14607 prev_prio=110 prev_state=S ==> next_comm=DialerExecutors next_pid=14655 next_prio=130
<...>-3493 (-----) [004] d..2 14594.815057: sched_switch: prev_comm=highpool[0] prev_pid=3493 prev_prio=129 prev_state=R ==> next_comm=Binder:1368_18 next_pid=3514 next_prio=110
<...>-2048 (-----) [000] ...1 14594.815088: tracing_mark_write: B|1803|HWC release fence 45750 has signaled
<...>-2048 (-----) [000] ...1 14594.815119: tracing_mark_write: B|1803|eglBeginFrame
<...>-14655 (-----) [006] d..2 14594.815190: sched_switch: prev_comm=DialerExecutors prev_pid=14655 prev_prio=130 prev_state=R ==> next_comm=crtc_commit:97 next_pid=301 next_prio=83
<...>-3514 (-----) [004] .... 14594.815193: binder_set_priority: proc=1368 thread=3514 old=110 => new=120 desired=120
<...>-1398 (-----) [007] ...1 14594.815322: tracing_mark_write: B|1368|closeSurfaceTransaction
<...>-3493 (-----) [004] .... 14594.815353: mm_filemap_add_to_page_cache: dev 253:6 ino 113b page=0000000069e2b98a pfn=628464 ofs=2723840
<...>-1398 (-----) [007] ...2 14594.815393: binder_set_priority: proc=625 thread=656 old=120 => new=116 desired=116
rcu_sched-8 ( 8) [007] d..2 14594.815449: sched_switch: prev_comm=rcu_sched prev_pid=8 prev_prio=120 prev_state=S ==> next_comm=Binder:625_1 next_pid=656 next_prio=116

View File

@ -1,42 +0,0 @@
#!/bin/bash
# Copyright (C) 2019 The Android Open Source Project
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
DIR="$( cd "$(dirname "$0")" ; pwd -P )"
if [[ "$#" -lt 2 ]]; then
echo "Usage: $0 <filename.trace> <sqlite-filename.db>" >&2
exit 1
fi
TRACE_FILENAME="$1"
SQLITE_FILENAME="$2"
#echo "Trace filename: $TRACE_FILENAME"
#echo "SQLite filename: $SQLITE_FILENAME"
if ! [[ -f "$TRACE_FILENAME" ]]; then
echo "Error: Trace '$TRACE_FILENAME' does not exist." >&2
exit 1
fi
if ! "$DIR/trace_analyzer.py" "$SQLITE_FILENAME" "$TRACE_FILENAME" > /dev/null; then
echo "Fatal: trace_analyzer.py failed, aborting." >&2
exit 1
fi
if ! "$DIR/run-sql-queries" "$SQLITE_FILENAME"; then
echo "Fatal: Failed to run sql queries, aborting." >&2
exit 1
fi

View File

@ -1,51 +0,0 @@
#!/usr/bin/python3
# Copyright (C) 2019 The Android Open Source Project
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import re
import sys
import argparse
from lib.trace2db import Trace2Db
# This script requires 'sqlalchemy' to access the sqlite3 database.
#
# $> sudo apt-get install python3-pip
# $> pip3 install --user sqlalchemy
#
def main(argv):
parser = argparse.ArgumentParser(description='Convert ftrace/systrace file into sqlite3 db.')
parser.add_argument('db_filename', metavar='sql_filename.db', type=str,
help='path to sqlite3 db filename')
parser.add_argument('trace_filename', metavar='systrace.ftrace', type=str,
help='path to ftrace/systrace filename')
parser.add_argument('--limit', type=int, help='limit the number of entries parsed [for debugging]')
args = parser.parse_args()
db_filename = args.db_filename
trace_filename = args.trace_filename
trace2db = Trace2Db(db_filename)
print("SQL Alchemy db initialized")
# parse 'raw_ftrace_entries' table
count = trace2db.parse_file_into_db(trace_filename, limit=args.limit)
print("Count was ", count)
return 0
if __name__ == '__main__':
main(sys.argv)

View File

@ -1,78 +0,0 @@
#!/bin/bash
# Copyright (C) 2019 The Android Open Source Project
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
DIR="$( cd "$(dirname "$0")" ; pwd -P )"
if [[ "$#" -lt 3 ]]; then
echo "Usage: $0 <trace-dir> <db-dir> <output.csv>" >&2
exit 1
fi
simulate="n"
TRACE_DIRNAME="$1"
SQLITE_DIRNAME="$2"
OUTPUT_FILENAME="$3"
echo "Trace filename: $TRACE_DIRNAME"
echo "SQLite filename: $SQLITE_DIRNAME"
if ! [[ -d "$TRACE_DIRNAME" ]]; then
echo "Error: Trace '$TRACE_DIRNAME' does not exist." >&2
exit 1
fi
process_trace_file() {
local trace_filename="$1"
local db_dirname="$2"
local output_file="$3"
local db_filename="$db_dirname/$(basename "$trace_filename").db"
if [[ $simulate == y ]]; then
echo "$DIR/trace_analyzer.py" "$db_filename" "$trace_filename" "> /dev/null"
else
if ! "$DIR/trace_analyzer.py" "$db_filename" "$trace_filename" > /dev/null; then
echo "Fatal: trace_analyzer.py failed, aborting." >&2
return 1
fi
fi
if [[ $simulate == y ]]; then
echo "$DIR/run-sql-queries" "$db_filename" ">> '$output_file'"
else
# append name of trace to CSV, so we can see where data came from
echo "; $trace_filename" >> "$output_file"
if ! "$DIR/run-sql-queries" "$db_filename" >> "$output_file"; then
echo "Fatal: Failed to run sql queries, aborting." >&2
return 1
fi
fi
return 0
}
find "$TRACE_DIRNAME" -type f -name '*.trace' -print0 |
while IFS= read -r -d '' file; do
if [[ $file == *#*.trace && $file != *#1.trace ]]; then
echo "Skip $file"
continue
fi
printf '%s\n' "$file"
process_trace_file "$file" "$SQLITE_DIRNAME" "$OUTPUT_FILENAME"
done
echo "Done"

View File

@ -1,66 +0,0 @@
#!/usr/bin/env python3
#
# Copyright 2019, The Android Open Source Project
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
"""
Unit tests for trace_analyzer module.
Install:
$> sudo apt-get install python3-pytest ## OR
$> pip install -U pytest
See also https://docs.pytest.org/en/latest/getting-started.html
Usage:
$> pytest trace_analyzer_test.py
See also https://docs.pytest.org/en/latest/usage.html
"""
# global imports
import os
import sys
DIR = os.path.abspath(os.path.dirname(__file__))
sys.path.append(os.path.dirname(DIR))
import lib.cmd_utils as cmd_utils
def test_trace_analyzer(tmpdir):
# Setup
bin = os.path.join(DIR, 'trace_analyzer')
systrace = os.path.join(DIR, 'test_fixtures/common_systrace')
db_file = tmpdir.mkdir('trace_analyzer').join('test.db')
# Act
passed, output = cmd_utils.execute_arbitrary_command([bin, systrace,
str(db_file)],
timeout=300,
shell=False,
simulate=False)
# Assert
assert passed
assert output == """\
'blocked_iowait_duration_ms',\
'process_name',\
'launching_duration_ms',\
'launching_started_timestamp_ms',\
'launching_finished_timestamp_ms'
81.697999999960302375,\
'com.google.android.dialer',\
594.99400000095192808,\
14594219.85600000061,\
14594814.85000000149"""