bitbake: cooker/utils: Improve parsing profiling

Currently the cooker parsing processes each dump an individual profile
which is ok, but means absolute numbers of function calls for a given load
can be tricky to determine as parsing of recipes may go to different pool
threads on different runs.

This change collects up the individual thread parsing results and processes
them into one profile output. The profile processing function in utils
needed tweaks to allow this to work.

(Bitbake rev: d3d2541aacd1ea560da0d8b25a3ea3f0563dee70)

Signed-off-by: Richard Purdie <richard.purdie@linuxfoundation.org>
This commit is contained in:
Richard Purdie 2015-05-27 17:31:10 +01:00
parent 553267d8d9
commit 08b77c8cef
2 changed files with 21 additions and 5 deletions

View File

@ -1798,8 +1798,6 @@ class Parser(multiprocessing.Process):
finally:
logfile = "profile-parse-%s.log" % multiprocessing.current_process().name
prof.dump_stats(logfile)
bb.utils.process_profilelog(logfile)
print("Raw profiling information saved to %s and processed statistics to %s.processed" % (logfile, logfile))
def realrun(self):
if self.init:
@ -1869,6 +1867,7 @@ class CookerParser(object):
self.current = 0
self.num_processes = int(self.cfgdata.getVar("BB_NUMBER_PARSE_THREADS", True) or
multiprocessing.cpu_count())
self.process_names = []
self.bb_cache = bb.cache.Cache(self.cfgdata, self.cfghash, cooker.caches_array)
self.fromcache = []
@ -1904,6 +1903,7 @@ class CookerParser(object):
for i in range(0, self.num_processes):
parser = Parser(self.jobs, self.result_queue, self.parser_quit, init, self.cooker.configuration.profile)
parser.start()
self.process_names.append(parser.name)
self.processes.append(parser)
self.results = itertools.chain(self.results, self.parse_generator())
@ -1947,6 +1947,16 @@ class CookerParser(object):
multiprocessing.util.Finalize(None, sync.join, exitpriority=-100)
bb.codeparser.parser_cache_savemerge(self.cooker.data)
bb.fetch.fetcher_parse_done(self.cooker.data)
if self.cooker.configuration.profile:
profiles = []
for i in self.process_names:
logfile = "profile-parse-%s.log" % i
if os.path.exists(logfile):
profiles.append(logfile)
pout = "profile-parse.log.processed"
bb.utils.process_profilelog(profiles, pout = pout)
print("Processed parsing statistics saved to %s" % (pout))
def load_cached(self):
for filename, appends in self.fromcache:

View File

@ -908,11 +908,17 @@ def cpu_count():
def nonblockingfd(fd):
fcntl.fcntl(fd, fcntl.F_SETFL, fcntl.fcntl(fd, fcntl.F_GETFL) | os.O_NONBLOCK)
def process_profilelog(fn):
pout = open(fn + '.processed', 'w')
def process_profilelog(fn, pout = None):
# Either call with a list of filenames and set pout or a filename and optionally pout.
if not pout:
pout = fn + '.processed'
pout = open(pout, 'w')
import pstats
p = pstats.Stats(fn, stream=pout)
if isinstance(fn, list):
p = pstats.Stats(*fn, stream=pout)
else:
p = pstats.Stats(fn, stream=pout)
p.sort_stats('time')
p.print_stats()
p.print_callers()