Add script to compare a Frame Profile JSON stats file vs. baseline.
Extract `latest_file()` logic replicated in profile_pretty.py and profile_csv.py out to logsdir.py, and use for new profile_cmp.py. (cherry picked from commit 439cfc97a81f221daaf8ba13aa5daa87e8511047)master
parent
725e1b7d6f
commit
705ec153c5
|
|
@ -0,0 +1,46 @@
|
|||
#!/usr/bin/env python3
|
||||
"""\
|
||||
@file logsdir.py
|
||||
@author Nat Goodspeed
|
||||
@date 2024-09-12
|
||||
@brief Locate the Second Life logs directory for the current user on the
|
||||
current platform.
|
||||
|
||||
$LicenseInfo:firstyear=2024&license=viewerlgpl$
|
||||
Copyright (c) 2024, Linden Research, Inc.
|
||||
$/LicenseInfo$
|
||||
"""
|
||||
|
||||
import os
|
||||
from pathlib import Path
|
||||
import platform
|
||||
|
||||
class Error(Exception):
|
||||
pass
|
||||
|
||||
# logic used by SLVersionChecker
|
||||
def logsdir():
|
||||
app = 'SecondLife'
|
||||
system = platform.system()
|
||||
if (system == 'Darwin'):
|
||||
base_dir = os.path.join(os.path.expanduser('~'),
|
||||
'Library','Application Support',app)
|
||||
elif (system == 'Linux'):
|
||||
base_dir = os.path.join(os.path.expanduser('~'),
|
||||
'.' + app.lower())
|
||||
elif (system == 'Windows'):
|
||||
appdata = os.getenv('APPDATA')
|
||||
base_dir = os.path.join(appdata, app)
|
||||
else:
|
||||
raise ValueError("Unsupported platform '%s'" % system)
|
||||
|
||||
return os.path.join(base_dir, 'logs')
|
||||
|
||||
def latest_file(dirpath, pattern):
|
||||
files = Path(dirpath).glob(pattern)
|
||||
sort = [(p.stat().st_mtime, p) for p in files if p.is_file()]
|
||||
sort.sort(reverse=True)
|
||||
try:
|
||||
return sort[0][1]
|
||||
except IndexError:
|
||||
raise Error(f'No {pattern} files in {dirpath}')
|
||||
|
|
@ -0,0 +1,104 @@
|
|||
#!/usr/bin/env python3
|
||||
"""\
|
||||
@file profile_cmp.py
|
||||
@author Nat Goodspeed
|
||||
@date 2024-09-13
|
||||
@brief Compare a frame profile stats file with a similar baseline file.
|
||||
|
||||
$LicenseInfo:firstyear=2024&license=viewerlgpl$
|
||||
Copyright (c) 2024, Linden Research, Inc.
|
||||
$/LicenseInfo$
|
||||
"""
|
||||
|
||||
from datetime import datetime
|
||||
import json
|
||||
from logsdir import Error, latest_file, logsdir
|
||||
from pathlib import Path
|
||||
import sys
|
||||
|
||||
# variance that's ignorable
|
||||
DEFAULT_EPSILON = 0.03 # 3%
|
||||
|
||||
def compare(baseline, test, epsilon=DEFAULT_EPSILON):
|
||||
if Path(baseline).samefile(test):
|
||||
print(f'{baseline} same as\n{test}\nAnalysis moot.')
|
||||
return
|
||||
|
||||
with open(baseline) as inf:
|
||||
bdata = json.load(inf)
|
||||
with open(test) as inf:
|
||||
tdata = json.load(inf)
|
||||
print(f'baseline {baseline}\ntestfile {test}')
|
||||
|
||||
for k, tv in tdata['context'].items():
|
||||
bv = bdata['context'].get(k)
|
||||
if bv != tv:
|
||||
print(f'baseline {k}={bv} vs.\ntestfile {k}={tv}')
|
||||
|
||||
btime = bdata['context'].get('time')
|
||||
ttime = tdata['context'].get('time')
|
||||
if btime and ttime:
|
||||
print('testfile newer by',
|
||||
datetime.fromisoformat(ttime) - datetime.fromisoformat(btime))
|
||||
|
||||
# The following ignores totals and unused shaders, except to the extent
|
||||
# that some shaders were used in the baseline but not in the recent test
|
||||
# or vice-versa. While the viewer considers that a shader has been used if
|
||||
# 'binds' is nonzero, we exclude any whose 'time' is zero to avoid zero
|
||||
# division.
|
||||
bshaders = {s['name']: s for s in bdata['shaders'] if s['time'] and s['samples']}
|
||||
tshaders = {s['name']: s for s in tdata['shaders'] if s['time']}
|
||||
|
||||
bothshaders = set(bshaders).intersection(tshaders)
|
||||
deltas = []
|
||||
for shader in bothshaders:
|
||||
bshader = bshaders[shader]
|
||||
tshader = tshaders[shader]
|
||||
bthruput = bshader['samples']/bshader['time']
|
||||
tthruput = tshader['samples']/tshader['time']
|
||||
delta = (tthruput - bthruput)/bthruput
|
||||
if abs(delta) > epsilon:
|
||||
deltas.append((delta, shader, bthruput, tthruput))
|
||||
|
||||
# descending order of performance gain
|
||||
deltas.sort(reverse=True)
|
||||
print(f'{len(deltas)} shaders showed nontrivial performance differences '
|
||||
'(millon samples/sec):')
|
||||
namelen = max(len(s[1]) for s in deltas) if deltas else 0
|
||||
for delta, shader, bthruput, tthruput in deltas:
|
||||
print(f' {shader.rjust(namelen)} {delta*100:6.1f}% '
|
||||
f'{bthruput/1000000:8.2f} -> {tthruput/1000000:8.2f}')
|
||||
|
||||
tunused = set(bshaders).difference(tshaders)
|
||||
print(f'{len(tunused)} baseline shaders not used in test:')
|
||||
for s in tunused:
|
||||
print(f' {s}')
|
||||
bunused = set(tshaders).difference(bshaders)
|
||||
print(f'{len(bunused)} shaders newly used in test:')
|
||||
for s in bunused:
|
||||
print(f' {s}')
|
||||
|
||||
def main(*raw_args):
|
||||
from argparse import ArgumentParser
|
||||
parser = ArgumentParser(description="""
|
||||
%(prog)s compares a baseline JSON file from Develop -> Render Tests -> Frame
|
||||
Profile to another such file from a more recent test. It identifies shaders
|
||||
that have gained and lost in throughput.
|
||||
""")
|
||||
parser.add_argument('-e', '--epsilon', type=float, default=int(DEFAULT_EPSILON*100),
|
||||
help="""percent variance considered ignorable (default %(default)s%%)""")
|
||||
parser.add_argument('baseline',
|
||||
help="""baseline profile filename to compare against""")
|
||||
parser.add_argument('test', nargs='?',
|
||||
help="""test profile filename to compare
|
||||
(default is most recent)""")
|
||||
args = parser.parse_args(raw_args)
|
||||
compare(args.baseline,
|
||||
args.test or latest_file(logsdir(), 'profile.*.json'),
|
||||
epsilon=(args.epsilon / 100.))
|
||||
|
||||
if __name__ == "__main__":
|
||||
try:
|
||||
sys.exit(main(*sys.argv[1:]))
|
||||
except (Error, OSError, json.JSONDecodeError) as err:
|
||||
sys.exit(str(err))
|
||||
|
|
@ -10,17 +10,16 @@ Copyright (c) 2024, Linden Research, Inc.
|
|||
$/LicenseInfo$
|
||||
"""
|
||||
|
||||
import logsdir
|
||||
import json
|
||||
from pathlib import Path
|
||||
from logsdir import Error, latest_file, logsdir
|
||||
import sys
|
||||
|
||||
class Error(Exception):
|
||||
pass
|
||||
|
||||
def convert(path, totals=True, unused=True, file=sys.stdout):
|
||||
with open(path) as inf:
|
||||
data = json.load(inf)
|
||||
# print path to sys.stderr in case user is redirecting stdout
|
||||
print(path, file=sys.stderr)
|
||||
|
||||
print('"name", "file1", "file2", "time", "binds", "samples", "triangles"', file=file)
|
||||
|
||||
if totals:
|
||||
|
|
@ -51,19 +50,8 @@ shaders list to full shaders lines.
|
|||
help="""profile filename to convert (default is most recent)""")
|
||||
|
||||
args = parser.parse_args(raw_args)
|
||||
if not args.path:
|
||||
logs = logsdir.logsdir()
|
||||
profiles = Path(logs).glob('profile.*.json')
|
||||
sort = [(p.stat().st_mtime, p) for p in profiles]
|
||||
sort.sort(reverse=True)
|
||||
try:
|
||||
args.path = sort[0][1]
|
||||
except IndexError:
|
||||
raise Error(f'No profile.*.json files in {logs}')
|
||||
# print path to sys.stderr in case user is redirecting stdout
|
||||
print(args.path, file=sys.stderr)
|
||||
|
||||
convert(args.path, totals=args.totals, unused=args.unused)
|
||||
convert(args.path or latest_file(logsdir(), 'profile.*.json'),
|
||||
totals=args.totals, unused=args.unused)
|
||||
|
||||
if __name__ == "__main__":
|
||||
try:
|
||||
|
|
|
|||
|
|
@ -10,17 +10,15 @@ Copyright (c) 2024, Linden Research, Inc.
|
|||
$/LicenseInfo$
|
||||
"""
|
||||
|
||||
import logsdir
|
||||
import json
|
||||
from pathlib import Path
|
||||
from logsdir import Error, latest_file, logsdir
|
||||
import sys
|
||||
|
||||
class Error(Exception):
|
||||
pass
|
||||
|
||||
def pretty(path):
|
||||
with open(path) as inf:
|
||||
data = json.load(inf)
|
||||
# print path to sys.stderr in case user is redirecting stdout
|
||||
print(path, file=sys.stderr)
|
||||
json.dump(data, sys.stdout, indent=4)
|
||||
|
||||
def main(*raw_args):
|
||||
|
|
@ -33,19 +31,7 @@ The file produced by the viewer is a single dense line of JSON.
|
|||
help="""profile filename to pretty-print (default is most recent)""")
|
||||
|
||||
args = parser.parse_args(raw_args)
|
||||
if not args.path:
|
||||
logs = logsdir.logsdir()
|
||||
profiles = Path(logs).glob('profile.*.json')
|
||||
sort = [(p.stat().st_mtime, p) for p in profiles]
|
||||
sort.sort(reverse=True)
|
||||
try:
|
||||
args.path = sort[0][1]
|
||||
except IndexError:
|
||||
raise Error(f'No profile.*.json files in {logs}')
|
||||
# print path to sys.stderr in case user is redirecting stdout
|
||||
print(args.path, file=sys.stderr)
|
||||
|
||||
pretty(args.path)
|
||||
pretty(args.path or latest_file(logsdir(), 'profile.*.json'))
|
||||
|
||||
if __name__ == "__main__":
|
||||
try:
|
||||
|
|
|
|||
Loading…
Reference in New Issue