pagecache.py - GreenShellll/GreenShell GitHub Wiki
#!/usr/bin/env python
import curses import operator import optparse import os import re import subprocess import sys, time import threading import Queue import xlsxwriter import sys reload(sys) sys.setdefaultencoding('utf8')
STATS_UPDATE_INTERVAL = 0.2 PAGE_SIZE = 4096
global SLICE_LENHTH global add_info_list global delete_info_list global output_list_duration global input_list_duration
global slice_end global tidMap global total_pagecache
output_list_duration = {} input_list_duration = {} SLICE_LENHTH = 0.1 add_info_list = [] delete_info_list = []
slice_end = 0 tidMap = {} total_pagecache = 0.0
class PagecacheStats(): """Holds pagecache stats by accounting for pages added and removed.
"""
def __init__(self, inode_to_filename):
self._inode_to_filename = inode_to_filename
self._file_size = {}
self._file_pages = {}
self._total_pages_added = 0
self._total_pages_removed = 0
def add_page(self, device_number, inode, offset):
filename = None
filesize = 0
# See if we can find the page in our lookup table
if (device_number, inode) in self._inode_to_filename:
filename, filesize = self._inode_to_filename[(device_number, inode)]
else:
filename = str(device_number) + '->' + str(inode)
filesize = 0
if filename not in self._file_pages:
self._file_pages[filename] = [1, 0]
else:
self._file_pages[filename][0] += 1
self._total_pages_added += 1
if filename not in self._file_size:
self._file_size[filename] = filesize
def remove_page(self, device_number, inode, offset):
filename = None
filesize = 0
if (device_number, inode) in self._inode_to_filename:
filename, filesize = self._inode_to_filename[(device_number, inode)]
else:
filename = str(device_number) + '->' + str(inode)
filesize = 0
if filename not in self._file_pages:
self._file_pages[filename] = [0, 1]
else:
self._file_pages[filename][1] += 1
self._total_pages_removed += 1
if filename not in self._file_size:
self._file_size[filename] = filesize
def pages_to_mb(self, num_pages):
return "%.2f" % round(num_pages * PAGE_SIZE / 1024.0, 2)
# return "%.2f" % round(num_pages * PAGE_SIZE / 1024.0 / 1024.0, 2)
def bytes_to_mb(self, num_bytes):
return "%.2f" % round(int(num_bytes) / 1024.0, 2)
# return "%.2f" % round(int(num_bytes) / 1024.0 / 1024.0, 2)
def pages_to_kb(self, num_pages):
return "%.2f" % round(num_pages * PAGE_SIZE / 1024.0, 2)
def bytes_to_kb(self, num_bytes):
return "%.2f" % round(int(num_bytes) / 1024.0, 2)
def print_pages_and_mb(self, num_pages):
pages_string = str(num_pages) + ' (' + str(self.pages_to_mb(num_pages)) + ' KB)'
return pages_string
def get_filename_and_size(self, device_number, inode):
file_string = ''
if (device_number, inode) in self._inode_to_filename:
filename, filesize = self._inode_to_filename[(device_number, inode)]
file_string = filename + '-' + str(self.bytes_to_kb(filesize)) + ' KB'
else:
filename = str(device_number) + '->' + str(inode)
filesize = 0
file_string = filename + '-' + str(self.bytes_to_kb(filesize)) + ' KB'
return file_string
def reset_stats(self):
self._file_pages.clear()
self._total_pages_added = 0;
self._total_pages_removed = 0;
def print_stats(self):
# Create new merged dict
sorted_added = sorted(self._file_pages.items(), key=operator.itemgetter(1), reverse=True)
row_format = "{:<70}{:<12}{:<14}{:<9}"
print row_format.format('NAME', 'ADDED (KB)', 'REMOVED (KB)', 'SIZE (KB)')
for filename, added in sorted_added:
filesize = self._file_size[filename]
added = self._file_pages[filename][0]
removed = self._file_pages[filename][1]
if (filename > 64):
filename = filename[-64:]
print row_format.format(filename, self.pages_to_mb(added), self.pages_to_mb(removed), self.bytes_to_mb(filesize))
print row_format.format('TOTAL', self.pages_to_mb(self._total_pages_added),
self.pages_to_mb(self._total_pages_removed), '')
def print_stats_curses(self, pad):
sorted_added = sorted(self._file_pages.items(), key=operator.itemgetter(1), reverse=True)
height, width = pad.getmaxyx()
pad.clear()
pad.addstr(0, 2, 'NAME'.ljust(68), curses.A_REVERSE)
pad.addstr(0, 70, 'ADDED (KB)'.ljust(12), curses.A_REVERSE)
pad.addstr(0, 82, 'REMOVED (KB)'.ljust(14), curses.A_REVERSE)
pad.addstr(0, 96, 'SIZE (KB)'.ljust(9), curses.A_REVERSE)
y = 1
for filename, added_removed in sorted_added:
filesize = self._file_size[filename]
added = self._file_pages[filename][0]
removed = self._file_pages[filename][1]
if (filename > 64):
filename = filename[-64:]
pad.addstr(y, 2, filename)
pad.addstr(y, 70, self.pages_to_mb(added).rjust(10))
pad.addstr(y, 80, self.pages_to_mb(removed).rjust(14))
pad.addstr(y, 96, self.bytes_to_mb(filesize).rjust(9))
y += 1
if y == height - 2:
pad.addstr(y, 4, "<more...>")
break
y += 1
pad.addstr(y, 2, 'TOTAL'.ljust(74), curses.A_REVERSE)
pad.addstr(y, 70, str(self.pages_to_mb(self._total_pages_added)).rjust(10), curses.A_REVERSE)
pad.addstr(y, 80, str(self.pages_to_mb(self._total_pages_removed)).rjust(14), curses.A_REVERSE)
pad.refresh(0, 0, 0, 0, height, width)
def print_top_io_pid(self):
print " "
input_top20 = sorted(input_list_duration.iteritems(), key=lambda d: d[1], reverse=True)[0:20]
print "Top 20 INPUT pid:"
row_format = "{:<50}{:<12}"
print row_format.format('pid', 'ADDED (KB)')
for in_pidname, in_count in input_top20:
print row_format.format(in_pidname, self.pages_to_mb(in_count))
output_top20 = sorted(output_list_duration.iteritems(), key=lambda d: d[1], reverse=True)[0:20]
print "Top 20 OUTPUT pid:"
print row_format.format('pid', 'REMOVED (KB)')
for out_pidname, out_count in output_top20:
print row_format.format(out_pidname, self.pages_to_mb(out_count))
print " "
class FileReaderThread(threading.Thread): """Reads data from a file/pipe on a worker thread.
Use the standard threading. Thread object API to start and interact with the
thread (start(), join(), etc.).
"""
def __init__(self, file_object, output_queue, text_file, chunk_size=-1):
"""Initializes a FileReaderThread.
Args:
file_object: The file or pipe to read from.
output_queue: A Queue.Queue object that will receive the data
text_file: If True, the file will be read one line at a time, and
chunk_size will be ignored. If False, line breaks are ignored and
chunk_size must be set to a positive integer.
chunk_size: When processing a non-text file (text_file = False),
chunk_size is the amount of data to copy into the queue with each
read operation. For text files, this parameter is ignored.
"""
threading.Thread.__init__(self)
self._file_object = file_object
self._output_queue = output_queue
self._text_file = text_file
self._chunk_size = chunk_size
assert text_file or chunk_size > 0
def run(self):
"""Overrides Thread's run() function.
Returns when an EOF is encountered.
"""
if self._text_file:
# Read a text file one line at a time.
for line in self._file_object:
self._output_queue.put(line)
else:
# Read binary or text data until we get to EOF.
while True:
chunk = self._file_object.read(self._chunk_size)
if not chunk:
break
self._output_queue.put(chunk)
def set_chunk_size(self, chunk_size):
"""Change the read chunk size.
This function can only be called if the FileReaderThread object was
created with an initial chunk_size > 0.
Args:
chunk_size: the new chunk size for this file. Must be > 0.
"""
# The chunk size can be changed asynchronously while a file is being read
# in a worker thread. However, type of file can not be changed after the
# the FileReaderThread has been created. These asserts verify that we are
# only changing the chunk size, and not the type of file.
assert not self._text_file
assert chunk_size > 0
self._chunk_size = chunk_size
class AdbUtils(): @staticmethod def add_adb_serial(adb_command, device_serial): if device_serial is not None: adb_command.insert(1, device_serial) adb_command.insert(1, '-s')
@staticmethod
def construct_adb_shell_command(shell_args, device_serial):
adb_command = ['adb', 'shell', ' '.join(shell_args)]
AdbUtils.add_adb_serial(adb_command, device_serial)
return adb_command
@staticmethod
def run_adb_shell(shell_args, device_serial):
"""Runs "adb shell" with the given arguments.
Args:
shell_args: array of arguments to pass to adb shell.
device_serial: if not empty, will add the appropriate command-line
parameters so that adb targets the given device.
Returns:
A tuple containing the adb output (stdout & stderr) and the return code
from adb. Will exit if adb fails to start.
"""
adb_command = AdbUtils.construct_adb_shell_command(shell_args, device_serial)
adb_output = []
adb_return_code = 0
try:
adb_output = subprocess.check_output(adb_command, stderr=subprocess.STDOUT,
shell=False, universal_newlines=True)
except OSError as error:
# This usually means that the adb executable was not found in the path.
print >> sys.stderr, ('\nThe command "%s" failed with the following error:'
% ' '.join(adb_command))
print >> sys.stderr, ' %s' % str(error)
print >> sys.stderr, 'Is adb in your path?'
adb_return_code = error.errno
adb_output = error
except subprocess.CalledProcessError as error:
# The process exited with an error.
adb_return_code = error.returncode
adb_output = error.output
return (adb_output, adb_return_code)
@staticmethod
def do_preprocess_adb_cmd(command, serial):
args = [command]
dump, ret_code = AdbUtils.run_adb_shell(args, serial)
if ret_code != 0:
return None
dump = ''.join(dump)
return dump
def parse_atrace_line(line, pagecache_stats, app_name, pid, tid, build_file): global updated_trace global slice_end # global list_time_lim_title global add_info_list global delete_info_list global add_delete global SLICE_LENHTH global tidMap global START_TIME global TRUNC_START global TRUNC_END global output_list_duration global input_list_duration # Find a mm_filemap_add_to_page_cache entry # <2288>-2288 ( 1887) [001] .... 360374.896660: mm_filemap_add_to_page_cache: dev 179:64 ino 26b7c0 page=fffffff6988c7680 pfn=319668 ofs=487424 line_match = re.match('(.)-(\d+)\s+((\s\d+|\s*-----))\s+[(\d+)]\s+(.{4})\s+(\d+.\d+):\s+(mm_filemap_add_to_page_cache|mm_filemap_delete_from_page_cache):\s+dev\s+(\d+:\d+)\s+ino\s+([0-9a-z]+)\s+page=([0-9a-z]+)\s+pfn=(\d+)\s+ofs=(\d+)', line) if line_match != None: # Get filename devs = str(line_match.group(8)).split(':') device_number = int(devs[0]) << 8 | int(devs[1]) if device_number == 0: return inode = int(line_match.group(9), 16)
# parse line
line_info = {}
line_info['task'] = line_match.group(1) # --->Task_name
line_info['tid'] = line_match.group(2) # --->tid
if line_match.group(3).strip() == '-----':
if tidMap.get(line_info['tid']) != None:
line_info['pid'] = tidMap[line_info['tid']]
else:
line_info['pid'] = line_info['tid']
else:
line_info['pid'] = line_match.group(3).strip() # --->pid
line_info['cpu#'] = line_match.group(4) # --->[001]cpu
line_info['mode'] = line_match.group(5) # --->d..3 mode
line_info['time'] = line_match.group(6) # --->time
line_info['page_cache_type'] = line_match.group(7) # --->mm_filemap_add_to_page_cache | mm_filemap_delete_from_page_cache
line_info['dev'] = line_match.group(8) # --->dev 179:65
line_info['ino'] = line_match.group(9) # --->ino 5dff
line_info['page'] = line_match.group(10) # --->page=ffffffde97ec8440
line_info['pfn'] = line_match.group(11) # --->pfn=636113
line_info['ofs'] = line_match.group(12) # --->ofs=5107712
line_info['filename_and_size'] = pagecache_stats.get_filename_and_size(device_number, inode)
timestamp = float("{:.2f}".format(float(line_match.group(6)) - START_TIME))
if timestamp < TRUNC_START:
return
# ---------------------------------------------------------------------------------
if build_file is not None:
if slice_end == 0:
slice_end = timestamp + SLICE_LENHTH
build_file.add_sub_sheet(str(timestamp) + '-' + str(slice_end))
if timestamp >= slice_end or timestamp >= TRUNC_END:
build_file.get_data_line(line_info)
if line_match.group(7) == 'mm_filemap_add_to_page_cache': # I/O input
add_info_list.append(line_info)
elif line_match.group(7) == 'mm_filemap_delete_from_page_cache': # I/O output
delete_info_list.append(line_info)
# Output to file
build_file.build_io_data_sheet(str(slice_end - SLICE_LENHTH) + '-' + str(slice_end),
add_info_list.__len__(), delete_info_list.__len__())
build_file.add_sub_input_data_chart(str(slice_end - SLICE_LENHTH) + '-' + str(slice_end), build_file.input)
build_file.add_sub_output_data_chart(str(slice_end - SLICE_LENHTH) + '-' + str(slice_end),
build_file.output)
add_info_list = []
delete_info_list = []
slice_end = 0
else:
# count
build_file.get_data_line(line_info)
if line_match.group(7) == 'mm_filemap_add_to_page_cache': # I/O input
add_info_list.append(line_info)
# add_info_list+=1
elif line_match.group(7) == 'mm_filemap_delete_from_page_cache': # I/O output
delete_info_list.append(line_info)
# delete_info_list+=1
# ---------------------------------------------------------------------------------
if timestamp <= TRUNC_END and timestamp >= TRUNC_START :
key = line_info['pid']
if line_match.group(7) == 'mm_filemap_add_to_page_cache':
if input_list_duration.has_key(key):
input_list_duration[key] += 1
else:
input_list_duration[key] = 1
elif line_match.group(7) == 'mm_filemap_delete_from_page_cache':
if output_list_duration.has_key(key):
output_list_duration[key] += 1
else:
output_list_duration[key] = 1
# ---------------------------------------------------------------------------------
if timestamp > TRUNC_END:
return
if updated_trace is not None:
updated_trace.write(line_info['task'] + '-' + line_info['tid'] + ' (' + line_match.group(3) + ') [' + line_info['cpu#'] + '] ' + line_info['mode'] + ' ' + line_info['time'] \
+ ': tracing_mark_write: B|' + line_info['pid'] + '|' + line_info['page_cache_type'] \
+ ': dev ' + line_info['dev'] + ' ino ' + line_info['ino'] + ' page=' + line_info['page'] + ' pfn=' + line_info['pfn'] + ' ofs=' + line_info['ofs'] \
+ '--' + pagecache_stats.get_filename_and_size(device_number, inode) + '\n' \
+ line_info['task'] + '-' + line_info['tid'] + ' (' + line_match.group(3) + ') [' + line_info['cpu#'] + '] ' + line_info['mode'] + ' ' + line_info['time'] + ': tracing_mark_write: E|' + line_info['pid'] +'\n')
if app_name != None and not (app_name in line_match.group(0)):
return
if pid != None and pid != int(line_info['pid']):
return
if tid != None and tid != int(line_info['tid']):
return
if line_info['page_cache_type'] == 'mm_filemap_add_to_page_cache':
pagecache_stats.add_page(device_number, inode, line_info['ino'])
elif line_info['page_cache_type'] == 'mm_filemap_delete_from_page_cache':
pagecache_stats.remove_page(device_number, inode, line_info['ino'])
elif updated_trace is not None:
updated_trace.write(line)
def build_inode_lookup_table(inode_dump): inode2filename = {} text = inode_dump.splitlines() for line in text: result = re.match('([0-9]+)d? ([0-9]+) ([0-9]+) (.*)', line) if result: inode2filename[(int(result.group(1)), int(result.group(2)))] = (result.group(4), result.group(3))
return inode2filename;
def get_inode_data(datafile, dumpfile, adb_serial): if datafile is not None and os.path.isfile(datafile): print('Using cached inode data from ' + datafile) f = open(datafile, 'r') stat_dump = f.read(); else: # Build inode maps if we were tracing page cache print('Downloading inode data from device') stat_dump = AdbUtils.do_preprocess_adb_cmd('find /system /data /vendor ' + '-exec stat -c "%d %i %s %n" {} ;', adb_serial) if stat_dump is None: print 'Could not retrieve inode data from device.' sys.exit(1)
if dumpfile is not None:
print 'Storing inode data in ' + dumpfile
f = open(dumpfile, 'w')
f.write(stat_dump)
f.close()
sys.stdout.write('Done.\n')
return stat_dump
def read_and_parse_trace_file(trace_file, pagecache_stats, app_name, pid, tid): global data_file global slice_end global add_info_list global delete_info_list global SLICE_LENHTH
build_file =None
if data_file is not None:
build_file = Build_File(data_file)
for line in trace_file:
parse_atrace_line(line, pagecache_stats, app_name, pid, tid, build_file)
if build_file is not None:
if slice_end != 0:
build_file.build_io_data_sheet(str(slice_end - SLICE_LENHTH) + '-' + str(slice_end), add_info_list.__len__(), delete_info_list.__len__())
build_file.add_sub_input_data_chart(str(slice_end - SLICE_LENHTH) + '-' + str(slice_end), build_file.input)
build_file.add_sub_output_data_chart(str(slice_end - SLICE_LENHTH) + '-' + str(slice_end), build_file.output)
build_file.build_io_chart()
build_file.workbook.close()
pagecache_stats.print_stats()
pagecache_stats.print_top_io_pid()
global slice_start
print 'cost time ' + '{:.2f}'.format(time.time() - slice_start) + 's'
def read_and_parse_trace_data_live(stdout, stderr, pagecache_stats, app_name, pid, tid): # Start reading trace data stdout_queue = Queue.Queue(maxsize=128) stderr_queue = Queue.Queue()
stdout_thread = FileReaderThread(stdout, stdout_queue,
text_file=True, chunk_size=64)
stderr_thread = FileReaderThread(stderr, stderr_queue,
text_file=True)
stdout_thread.start()
stderr_thread.start()
stdscr = curses.initscr()
try:
height, width = stdscr.getmaxyx()
curses.noecho()
curses.cbreak()
stdscr.keypad(True)
stdscr.nodelay(True)
stdscr.refresh()
# We need at least a 30x100 window
used_width = max(width, 100)
used_height = max(height, 30)
# Create a pad for pagecache stats
pagecache_pad = curses.newpad(used_height - 2, used_width)
stdscr.addstr(used_height - 1, 0, 'KEY SHORTCUTS: (r)eset stats, CTRL-c to quit')
while (stdout_thread.isAlive() or stderr_thread.isAlive() or
not stdout_queue.empty() or not stderr_queue.empty()):
while not stderr_queue.empty():
# Pass along errors from adb.
line = stderr_queue.get()
sys.stderr.write(line)
while True:
try:
line = stdout_queue.get(True, STATS_UPDATE_INTERVAL)
parse_atrace_line(line, pagecache_stats, app_name, pid, tid, None)
except Queue.Empty:
break
key = ''
try:
key = stdscr.getkey()
except:
pass
if key == 'r':
pagecache_stats.reset_stats()
pagecache_stats.print_stats_curses(pagecache_pad)
except Exception, e:
curses.endwin()
print e
finally:
curses.endwin()
# The threads should already have stopped, so this is just for cleanup.
stdout_thread.join()
stderr_thread.join()
stdout.close()
stderr.close()
class Build_File(): # Build data chart def init(self, file_name): self.workbook = xlsxwriter.Workbook(file_name) self.worksheet_io = self.workbook.add_worksheet('io_chart') self.format_tit = self.workbook.add_format( {'bold': True, 'border': 1, 'font_color': 'black', 'font_size': 12, 'align': 'center', 'bg_color': '#FFFF00'}) self.format_sub_r = self.workbook.add_format({'align': 'center', 'border': 1, 'bg_color': '#FFFAFA'}) self.format_sub_b = self.workbook.add_format({'align': 'center', 'border': 1, 'bg_color': '#F0F8FF'}) self.format_sub = self.workbook.add_format({'align': 'center'}) self.format_red = self.workbook.add_format( {'bold': True, 'border': 1, 'font_color': 'black', 'font_size': 12, 'align': 'center', 'bg_color': '#FF0000'}) self.format_blue = self.workbook.add_format( {'bold': True, 'border': 1, 'font_color': 'black', 'font_size': 12, 'align': 'center', 'bg_color': '#1E90FF'}) self.format_purple = self.workbook.add_format( {'bold': True, 'border': 1, 'font_color': 'black', 'font_size': 12, 'align': 'center', 'bg_color': '#FF00FF'}) self.worksheet_io.write(0, 0, 'time', self.format_tit) self.worksheet_io.write(0, 1, 'add(page_number)', self.format_red) self.worksheet_io.write(0, 2, 'add_speed(MB/s)', self.format_red) self.worksheet_io.write(0, 3, 'delete(page_number)', self.format_blue) self.worksheet_io.write(0, 4, 'delete_speed(MB/s)', self.format_blue) self.worksheet_io.write(0, 5, 'total_pagecache(KB)', self.format_purple)
self.input = {}
self.output = {}
self.sub_sheet = {}
def base2order(self, number):
if number > 3 or number < 1:
return (str(number) + 'th')
elif number==1:
return (str(number) + 'st')
elif number==2:
return (str(number) + 'nd')
elif number==3:
return (str(number) + 'rd')
def build_io_data_sheet(self, time, add_numb, delete_numb):
global total_pagecache
rowmax = self.worksheet_io.dim_rowmax
sys.stdout.write('processing the ' + self.base2order(rowmax) + ' slice...\r')
sys.stdout.flush
self.worksheet_io.write_url(rowmax + 1, 0, 'internal:\'' + time + '\'!A1:B2', string=time)
rowmax = self.worksheet_io.dim_rowmax
total_pagecache = total_pagecache + float("{:.2f}".format((add_numb - delete_numb) * 4))
self.worksheet_io.write_row('B' + str(rowmax + 1),
[add_numb,
float("{:.2f}".format(add_numb * 4 / SLICE_LENHTH / 1024)),
delete_numb,
float("{:.2f}".format(delete_numb * 4 / SLICE_LENHTH / 1024)),
total_pagecache])
def build_io_chart(self):
rowmax = self.worksheet_io.dim_rowmax
chart = self.workbook.add_chart({'type': 'line'})
chart.add_series({
'categories': ['io_chart', 1, 0, rowmax, 0],
'values': ['io_chart', 1, 2, rowmax, 2],
'line': {'color': 'red'},
'name': 'add'
})
chart.add_series({
'categories': ['io_chart', 1, 0, rowmax, 0],
'values': ['io_chart', 1, 4, rowmax, 4],
'line': {'color': 'blue'},
'name': 'delete'
})
chart.set_y_axis({
'name': 'speed(MB/s)',
'name_font': {'size': 14, 'bold': True},
'num_font': {'italic': True},
})
self.worksheet_io.insert_chart('G4', chart)
chart1 = self.workbook.add_chart({'type': 'line'})
chart1.add_series({
'categories': ['io_chart', 1, 0, rowmax, 0],
'values': ['io_chart', 1, 5, rowmax, 5],
'line': {'color': 'purple'},
'name': 'total_pagecache'
})
chart1.set_y_axis({
'name': 'KB',
'name_font': {'size': 14, 'bold': True},
'num_font': {'italic': True},
})
self.worksheet_io.insert_chart('G24', chart1)
def add_sub_sheet(self, sheet_name):
self.sub_sheet = self.workbook.add_worksheet(sheet_name)
title_tuple = ('task', 'pid', 'dev', 'page', 'ofs', 'filename_and_size', 'ino', 'page_cache_type', 'mode',
'time', 'tid', 'pfn', 'cpu#')
self.sub_sheet.write_row('A1', title_tuple, self.format_tit)
self.input = {}
self.output = {}
def get_data_line(self, line_info):
line_info_tuple = tuple(line_info.values())
colmax = self.sub_sheet.dim_colmax
rowmax = self.sub_sheet.dim_rowmax
self.sub_sheet.write_row('A' + str(rowmax + 2), line_info_tuple)
#key = line_info['pid'] + '-' + line_info['task']
key = line_info['pid']
if line_info['page_cache_type'] == 'mm_filemap_add_to_page_cache':
if self.input.has_key(key):
self.input[key] += 1
else:
self.input[key] = 1
elif line_info['page_cache_type'] == 'mm_filemap_delete_from_page_cache':
if self.output.has_key(key):
self.output[key] += 1
else:
self.output[key] = 1
def add_sub_input_data_chart(self, sheet_name, input):
colmax = self.sub_sheet.dim_colmax
self.sub_sheet.merge_range(0, colmax + 2, 0, colmax + 4, "input_TOP20_LIST", self.format_red)
self.sub_sheet.write(1, colmax + 2, "pid", self.format_tit)
self.sub_sheet.write(1, colmax + 3, "in_count", self.format_tit)
self.sub_sheet.write(1, colmax + 4, "input_speed(MB/s)", self.format_tit)
input_top20 = sorted(input.iteritems(), key=lambda d: d[1], reverse=True)[0:20]
for x in range(0, len(input_top20)):
self.sub_sheet.write(2 + x, colmax + 2, input_top20[x][0])
self.sub_sheet.write(2 + x, colmax + 3, input_top20[x][1], self.format_sub)
in_speed = int(input_top20[x][1]) * 4 / SLICE_LENHTH / 1024
self.sub_sheet.write(2 + x, colmax + 4, float("{:.2f}".format(in_speed)), self.format_sub)
chart = self.workbook.add_chart({'type': 'column'})
chart.add_series({
'categories': [sheet_name, 2, 14, 22, 14],
'values': [sheet_name, 2, 16, 22, 16],
'fill': {'color': '#FF9900'},
'name': 'input_TOP20'
})
chart.set_x_axis({
'name': 'pid',
'name_font': {'size': 12, 'bold': True},
})
chart.set_y_axis({
'name': 'speed(MB/s)',
'name_font': {'size': 14, 'bold': True},
'num_font': {'italic': True},
})
self.sub_sheet.insert_chart('N24', chart)
def add_sub_output_data_chart(self, sheet_name, output):
colmax = self.sub_sheet.dim_colmax
self.sub_sheet.merge_range(0, colmax + 6, 0, colmax + 8, "output_TOP20_LIST", self.format_blue)
self.sub_sheet.write(1, colmax + 6, "pid", self.format_tit)
self.sub_sheet.write(1, colmax + 7, "out_count", self.format_tit)
self.sub_sheet.write(1, colmax + 8, "output_speed(MB/s)", self.format_tit)
output_top20 = sorted(output.iteritems(), key=lambda d: d[1], reverse=True)[0:20]
for x in range(0, len(output_top20)):
self.sub_sheet.write(2 + x, colmax + 6, output_top20[x][0])
self.sub_sheet.write(2 + x, colmax + 7, output_top20[x][1], self.format_sub)
out_speed = int(output_top20[x][1]) * 4 / SLICE_LENHTH / 1024
self.sub_sheet.write(2 + x, colmax + 8, float("{:.2f}".format(out_speed)), self.format_sub)
chart = self.workbook.add_chart({'type': 'column'})
chart.add_series({
'categories': [sheet_name, 2, 22, 22, 22],
'values': [sheet_name, 2, 24, 22, 24],
'fill': {'color': '#4169E1'},
'name': 'output_TOP20'
})
chart.set_x_axis({
'name': 'pid',
'name_font': {'size': 12, 'bold': True},
})
chart.set_y_axis({
'name': 'speed(MB/s)',
'name_font': {'size': 14, 'bold': True},
'num_font': {'italic': True},
})
self.sub_sheet.insert_chart('V24', chart)
def buildTidMap(trace_file): global tidMap for line in trace_file: line_match = re.match('.-(\d+)\s+((\s\d+|\s*-----))\s+[\d+]\s+.{4}\s+(\d+.\d+):\s+tracing_mark_write:\s+[B|E]|(\d+)', line) if line_match != None: tid = line_match.group(1) tgid = '-1' if '-----' != line_match.group(2).strip(): tgid = line_match.group(2).strip() pid = line_match.group(4) if tgid != '-1' and tgid != pid: print >> sys.stderr, ('Error: different tgid and pid') sys.exit(1) if tidMap.get(tid) != None and tidMap[tid] != pid: print >> sys.stderr, ('Error: pid changed, we need a separator!') sys.exit(1) tidMap[tid] = pid
def getStartTime(trace_file): global START_TIME # -0 (-----) [000] d..2 16638.088423: cpu_idle: state=4294967295 cpu_id=0 #<280>-280 ( 280) [004] d..3 16638.088544: sched_waking: comm=kswapd0 pid=139 prio=120 target_cpu=007 for line in trace_file: line_match = re.match('\s*.-\d+\s+((?:\s\d+|\s*-----))\s+[\d+]\s+.{4}\s+(\d+.\d+):\s+', line) if line_match != None: START_TIME = float(line_match.group(1).strip()) break
def parse_options(argv): usage = 'Usage: %prog [options]' desc = 'Example: %prog' parser = optparse.OptionParser(usage=usage, description=desc) parser.add_option('-d', dest='inode_dump_file', metavar='FILE', help='Dump the inode data read from a device to a file.' ' This file can then be reused with the -i option to speed' ' up future invocations of this script.') parser.add_option('-i', dest='inode_data_file', metavar='FILE', help='Read cached inode data from a file saved earlier with the' ' -d option.') parser.add_option('-s', '--serial', dest='device_serial', type='string', help='adb device serial number') parser.add_option('-f', dest='trace_file', metavar='FILE', help='Show stats from a trace file, instead of running live.') parser.add_option('-a', dest='app_name', type='string', help='filter a particular app') parser.add_option('-p', dest='pid', type='int', help='filter a particular process') parser.add_option('-t', dest='tid', type='int', help='filter a particular thread') parser.add_option('-u', dest='updated_trace', metavar='FILE', help='Transform pagecache record to systrace slices, output' ' the new systrace to a file') parser.add_option('-r', dest='slice_length', help='Set time slice of IO statistics. The unit is second, default 0.1') parser.add_option('-o', '--out', dest='data_file', help='output IO statistics xlsx. -o ***.xlsx') parser.add_option('--trunc_start', dest='trunc_start', help='filter records after the specified time') parser.add_option('--trunc_end', dest='trunc_end', help='filter records after the specified time')
options, categories = parser.parse_args(argv[1:])
if options.inode_dump_file and options.inode_data_file:
parser.error('options -d and -i can\'t be used at the same time')
num_of_filters = 0
if options.app_name:
num_of_filters += 1
if options.pid:
num_of_filters += 1
if options.tid:
num_of_filters += 1
if num_of_filters > 1:
parser.error('options -a/-p/-t, can\'t be used at the same time')
return (options, categories)
def main(): global slice_start global SLICE_LENHTH global TRUNC_START global TRUNC_END global data_file TRUNC_START = 0.0 TRUNC_END = sys.maxint data_file = None slice_start = time.time() options, categories = parse_options(sys.argv) if options.slice_length is not None: SLICE_LENHTH = float(options.slice_length) if options.trunc_start is not None: TRUNC_START = float(options.trunc_start) if options.trunc_end is not None: TRUNC_END = float(options.trunc_end) if options.data_file is not None: data_file = options.data_file # Load inode data for this device inode_data = get_inode_data(options.inode_data_file, options.inode_dump_file, options.device_serial) # Build (dev, inode) -> filename hash inode_lookup_table = build_inode_lookup_table(inode_data) # Init pagecache stats pagecache_stats = PagecacheStats(inode_lookup_table)
global updated_trace
updated_trace = None
if options.updated_trace is not None:
print 'create new systrace file ' + options.updated_trace
updated_trace = open(options.updated_trace, 'wb')
if options.trace_file is not None:
if not os.path.isfile(options.trace_file):
print >> sys.stderr, ('Couldn\'t load trace file.')
sys.exit(1)
trace_file = open(options.trace_file, 'r')
getStartTime(trace_file)
trace_file.seek(0, 0)
buildTidMap(trace_file)
trace_file.seek(0, 0)
read_and_parse_trace_file(trace_file, pagecache_stats, options.app_name, options.pid, options.tid)
else:
# Construct and execute trace command
trace_cmd = AdbUtils.construct_adb_shell_command(['atrace', '--stream', 'pagecache'],
options.device_serial)
try:
atrace = subprocess.Popen(trace_cmd, stdin=subprocess.PIPE, stdout=subprocess.PIPE,
stderr=subprocess.PIPE)
except OSError as error:
print >> sys.stderr, ('The command failed')
sys.exit(1)
read_and_parse_trace_data_live(atrace.stdout, atrace.stderr, pagecache_stats, options.app_name, options.pid,
options.tid)
if options.updated_trace is not None:
print('finish writing new systrace file ' + options.updated_trace)
print 'cost time ' + '{:.2f}'.format(time.time() - slice_start) + ' seconds'
updated_trace.close()
if name == "main": main()