Donate to e Foundation | Murena handsets with /e/OS | Own a part of Murena! Learn more

Commit 315ae2d4 authored by Yan Wang's avatar Yan Wang
Browse files

Add support of trace duration for host python compiler.

The basic idea is add timestamp for MmFilemapAddToPageCache.
Treat the first receiving MmFilemapAddToPageCache timestamp as
the start time. The end time is the sum of start time and duration.
Any MmFilemapAddToPageCache after end time is filtered out.

Test: pytest trace2db_test.py
Bug: 137398235
Change-Id: Ib9c439f3ae0ca666eacb08492361217d89adec34
parent ede8b187
Loading
Loading
Loading
Loading
+33 −8
Original line number Original line Diff line number Diff line
@@ -23,20 +23,18 @@
# $> pip3 install --user protobuf sqlalchemy sqlite3
# $> pip3 install --user protobuf sqlalchemy sqlite3
#
#


import collections
import optparse
import optparse
import os
import os
import re
import re
import sys
import sys
from typing import Iterable, Optional


from typing import Iterable

from lib.inode2filename import Inode2Filename
from generated.TraceFile_pb2 import *
from generated.TraceFile_pb2 import *
from lib.inode2filename import Inode2Filename


parent_dir_name = os.path.dirname(os.path.dirname(os.path.realpath(__file__)))
parent_dir_name = os.path.dirname(os.path.dirname(os.path.realpath(__file__)))
sys.path.append(parent_dir_name + "/trace_analyzer")
sys.path.append(parent_dir_name + "/trace_analyzer")
from lib.trace2db import Trace2Db, MmFilemapAddToPageCache
from lib.trace2db import Trace2Db, MmFilemapAddToPageCache, RawFtraceEntry


_PAGE_SIZE = 4096 # adb shell getconf PAGESIZE ## size of a memory page in bytes.
_PAGE_SIZE = 4096 # adb shell getconf PAGESIZE ## size of a memory page in bytes.


@@ -165,9 +163,32 @@ def build_protobuf(page_runs, inode2filename, filters=[]):


  return trace_file
  return trace_file


def query_add_to_page_cache(trace2db: Trace2Db):
def calc_trace_end_time(trace2db: Trace2Db,
                        trace_duration: Optional[int]) -> float:
  """
  Calculates the end time based on the trace duration.
  The start time is the first receiving mm file map event.
  The end time is the start time plus the trace duration.
  All of them are in milliseconds.
  """
  # If the duration is not set, assume all time is acceptable.
  if trace_duration is None:
    # float('inf')
    return RawFtraceEntry.__table__.c.timestamp.type.python_type('inf')

  first_event = trace2db.session.query(MmFilemapAddToPageCache).join(
      MmFilemapAddToPageCache.raw_ftrace_entry).order_by(
      RawFtraceEntry.timestamp).first()

  return first_event.raw_ftrace_entry.timestamp + trace_duration

def query_add_to_page_cache(trace2db: Trace2Db, trace_duration: Optional[int]):
  end_time = calc_trace_end_time(trace2db, trace_duration)
  # SELECT * FROM tbl ORDER BY id;
  # SELECT * FROM tbl ORDER BY id;
  return trace2db.session.query(MmFilemapAddToPageCache).order_by(MmFilemapAddToPageCache.id).all()
  return trace2db.session.query(MmFilemapAddToPageCache).join(
      MmFilemapAddToPageCache.raw_ftrace_entry).filter(
      RawFtraceEntry.timestamp <= end_time).order_by(
      MmFilemapAddToPageCache.id).all()


def main(argv):
def main(argv):
  parser = optparse.OptionParser(usage="Usage: %prog [options]", description="Compile systrace file into TraceFile.pb")
  parser = optparse.OptionParser(usage="Usage: %prog [options]", description="Compile systrace file into TraceFile.pb")
@@ -188,6 +209,9 @@ def main(argv):
  parser.add_option('-o', dest='output_file', metavar='FILE',
  parser.add_option('-o', dest='output_file', metavar='FILE',
                    help='Output protobuf file')
                    help='Output protobuf file')


  parser.add_option('--duration', dest='trace_duration', action="store",
                    type=int, help='The duration of trace in milliseconds.')

  options, categories = parser.parse_args(argv[1:])
  options, categories = parser.parse_args(argv[1:])


  # TODO: OptionParser should have some flags to make these mandatory.
  # TODO: OptionParser should have some flags to make these mandatory.
@@ -217,7 +241,8 @@ def main(argv):
  # TODO: parse multiple trace files here.
  # TODO: parse multiple trace files here.
  parse_count = trace2db.parse_file_into_db(options.trace_file)
  parse_count = trace2db.parse_file_into_db(options.trace_file)


  mm_filemap_add_to_page_cache_rows = query_add_to_page_cache(trace2db)
  mm_filemap_add_to_page_cache_rows = query_add_to_page_cache(trace2db,
                                                              options.trace_duration)
  print("DONE. Parsed %d entries into sql db." %(len(mm_filemap_add_to_page_cache_rows)))
  print("DONE. Parsed %d entries into sql db." %(len(mm_filemap_add_to_page_cache_rows)))


  page_runs = page_cache_entries_to_runs(mm_filemap_add_to_page_cache_rows)
  page_runs = page_cache_entries_to_runs(mm_filemap_add_to_page_cache_rows)
+15 −3
Original line number Original line Diff line number Diff line
@@ -19,6 +19,7 @@ import sys
from sqlalchemy import create_engine
from sqlalchemy import create_engine
from sqlalchemy import Column, Date, Integer, Float, String, ForeignKey
from sqlalchemy import Column, Date, Integer, Float, String, ForeignKey
from sqlalchemy.ext.declarative import declarative_base
from sqlalchemy.ext.declarative import declarative_base
from sqlalchemy.orm import relationship


from sqlalchemy.orm import sessionmaker
from sqlalchemy.orm import sessionmaker


@@ -43,6 +44,10 @@ class RawFtraceEntry(Base):
  function = Column(String, nullable=False)
  function = Column(String, nullable=False)
  function_args = Column(String, nullable=False)
  function_args = Column(String, nullable=False)


  # 1:1 relation with MmFilemapAddToPageCache.
  mm_filemap_add_to_page_cache = relationship("MmFilemapAddToPageCache",
                                              back_populates="raw_ftrace_entry")

  @staticmethod
  @staticmethod
  def parse_dict(line):
  def parse_dict(line):
    # '           <...>-5521  (-----) [003] ...1 17148.446877: tracing_mark_write: trace_event_clock_sync: parent_ts=17148.447266'
    # '           <...>-5521  (-----) [003] ...1 17148.446877: tracing_mark_write: trace_event_clock_sync: parent_ts=17148.447266'
@@ -155,6 +160,9 @@ class MmFilemapAddToPageCache(Base):
  pfn = Column(Integer, nullable=False)
  pfn = Column(Integer, nullable=False)
  ofs = Column(Integer, nullable=False)
  ofs = Column(Integer, nullable=False)


  # 1:1 relation with RawFtraceEntry.
  raw_ftrace_entry = relationship("RawFtraceEntry", uselist=False)

  @staticmethod
  @staticmethod
  def parse_dict(function_args, id = None):
  def parse_dict(function_args, id = None):
    # dev 253:6 ino b2c7 page=00000000ec787cd9 pfn=1478539 ofs=4096
    # dev 253:6 ino b2c7 page=00000000ec787cd9 pfn=1478539 ofs=4096
@@ -251,6 +259,8 @@ def parse_file(filename: str, *args, **kwargs) -> int:
def parse_file_buf(filebuf, session, engine, raw_ftrace_entry_filter, limit=None) -> int:
def parse_file_buf(filebuf, session, engine, raw_ftrace_entry_filter, limit=None) -> int:
  global _FLUSH_LIMIT
  global _FLUSH_LIMIT
  count = 0
  count = 0
  # count and id are not equal, because count still increases for invalid lines.
  id = 0


  pending_entries = []
  pending_entries = []
  pending_sched_switch = []
  pending_sched_switch = []
@@ -305,9 +315,10 @@ def parse_file_buf(filebuf, session, engine, raw_ftrace_entry_filter, limit=None
      continue
      continue


    pending_entries.append(raw_ftrace_entry)
    pending_entries.append(raw_ftrace_entry)
    id = id + 1


    if raw_ftrace_entry['function'] == 'sched_switch':
    if raw_ftrace_entry['function'] == 'sched_switch':
      sched_switch = SchedSwitch.parse_dict(raw_ftrace_entry['function_args'], count)
      sched_switch = SchedSwitch.parse_dict(raw_ftrace_entry['function_args'], id)


      if not sched_switch:
      if not sched_switch:
        print("WARNING: Failed to parse sched_switch: " + l)
        print("WARNING: Failed to parse sched_switch: " + l)
@@ -315,7 +326,7 @@ def parse_file_buf(filebuf, session, engine, raw_ftrace_entry_filter, limit=None
        pending_sched_switch.append(sched_switch)
        pending_sched_switch.append(sched_switch)


    elif raw_ftrace_entry['function'] == 'sched_blocked_reason':
    elif raw_ftrace_entry['function'] == 'sched_blocked_reason':
      sbr = SchedBlockedReason.parse_dict(raw_ftrace_entry['function_args'], count)
      sbr = SchedBlockedReason.parse_dict(raw_ftrace_entry['function_args'], id)


      if not sbr:
      if not sbr:
        print("WARNING: Failed to parse sched_blocked_reason: " + l)
        print("WARNING: Failed to parse sched_blocked_reason: " + l)
@@ -323,7 +334,8 @@ def parse_file_buf(filebuf, session, engine, raw_ftrace_entry_filter, limit=None
        pending_sched_blocked_reasons.append(sbr)
        pending_sched_blocked_reasons.append(sbr)


    elif raw_ftrace_entry['function'] == 'mm_filemap_add_to_page_cache':
    elif raw_ftrace_entry['function'] == 'mm_filemap_add_to_page_cache':
      d = MmFilemapAddToPageCache.parse_dict(raw_ftrace_entry['function_args'], count)
      d = MmFilemapAddToPageCache.parse_dict(raw_ftrace_entry['function_args'],
                                             id)
      if not d:
      if not d:
        print("WARNING: Failed to parse mm_filemap_add_to_page_cache: " + l)
        print("WARNING: Failed to parse mm_filemap_add_to_page_cache: " + l)
      else:
      else:
+27 −7
Original line number Original line Diff line number Diff line
@@ -32,17 +32,10 @@ See also https://docs.pytest.org/en/latest/usage.html
"""
"""


# global imports
# global imports
from contextlib import contextmanager
import io
import io
import shlex
import sys
import typing

from copy import deepcopy
from copy import deepcopy


# pip imports
# pip imports
import pytest

# local imports
# local imports
from trace2db import *
from trace2db import *


@@ -197,6 +190,33 @@ NonUserFacing6-5246 ( 1322) [005] .... 16138.357581: mm_filemap_add_to_page_cac
  assert_eq_ignore_id(MmFilemapAddToPageCache(dev=64774, dev_major=253, dev_minor=6,
  assert_eq_ignore_id(MmFilemapAddToPageCache(dev=64774, dev_major=253, dev_minor=6,
      ino=0x9a64, page=0x000000006e0f8322, pfn=797894, ofs=4096), second_to_last_row)
      ino=0x9a64, page=0x000000006e0f8322, pfn=797894, ofs=4096), second_to_last_row)


def test_timestamp_filter():
  test_contents = """
    MediaStoreImpor-27212 (27176) [000] .... 16136.595194: mm_filemap_add_to_page_cache: dev 253:6 ino 7580 page=0000000060e990c7 pfn=677646 ofs=159744
    NonUserFacing6-5246  ( 1322) [005] .... 16139.357581: mm_filemap_add_to_page_cache: dev 253:6 ino 9a64 page=000000006e0f8322 pfn=797894 ofs=4096
    MediaStoreImpor-27212 (27176) [000] .... 16136.604126: mm_filemap_add_to_page_cache: dev 253:6 ino b1d8 page=0000000098d4d2e2 pfn=829676 ofs=0
  """

  t2d = parse_trace_file_to_db(test_contents)
  session = t2d.session

  end_time = 16137.0

  results = session.query(MmFilemapAddToPageCache).join(
      MmFilemapAddToPageCache.raw_ftrace_entry).filter(
      RawFtraceEntry.timestamp <= end_time).order_by(
      MmFilemapAddToPageCache.id).all()

  assert len(results) == 2
  assert_eq_ignore_id(
      MmFilemapAddToPageCache(dev=64774, dev_major=253, dev_minor=6,
                              ino=0x7580, page=0x0000000060e990c7, pfn=677646,
                              ofs=159744), results[0])
  assert_eq_ignore_id(
      MmFilemapAddToPageCache(dev=64774, dev_major=253, dev_minor=6,
                              ino=0xb1d8, page=0x0000000098d4d2e2, pfn=829676,
                              ofs=0), results[1])



if __name__ == '__main__':
if __name__ == '__main__':
  pytest.main()
  pytest.main()