Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Extend the final report generation script to output a json file of results #1825

Open
wants to merge 12 commits into
base: master
Choose a base branch
from
Open
78 changes: 72 additions & 6 deletions tools/submission/generate_final_report.py
Original file line number Diff line number Diff line change
Expand Up @@ -8,7 +8,7 @@
import re
import numpy as np
import pandas as pd

import json

def get_args():
"""Parse commandline."""
Expand Down Expand Up @@ -73,6 +73,7 @@ def main():

output = args.input[:-4]
writer = pd.ExcelWriter(output + '.xlsx', engine='xlsxwriter')
outjsondata = []

indices = {}
indices['closed'] = [
Expand Down Expand Up @@ -153,15 +154,18 @@ def main():
}
}

def MakeWorksheet(df, index, filter_dict, sheet_name):
def MakeWorksheet(df, index, filter_dict, sheet_name, outjsondata=[]):
for key, value in filter_dict.items():
if type(key) == tuple:
key = list(key)
df = df[value(df[key])]
df = df.pivot_table(index=index, columns=columns, values=['Result'])
df = df.fillna('')
if df.size == 0:
return
json_df = df.to_json(orient='records')
outjsondata += json.loads(json_df)

df = df.pivot_table(index=index, columns=columns, values=['Result'])
df = df.fillna('')
for i, order in enumerate(columns_order):
df = df.reindex(columns=order, level=i)
df.to_excel(writer, sheet_name=sheet_name)
Expand Down Expand Up @@ -222,16 +226,78 @@ def MakeUniqueID(x):
NotEqual('millijoules/Stream')),
('Scenario', 'Model'):
Apply(FilterScenario, suite)
}, suite + ' - ' + category)
}, suite + ' - ' + category, outjsondata)

MakeWorksheet(
df, indices[category], {
'Category': Equal(category),
'Suite': Contain(suite),
'has_power': Equal(True),
('Scenario', 'Model'): Apply(FilterScenario, suite)
}, suite + ' - ' + category + ' - power')
}, suite + ' - ' + category + ' - power', outjsondata)

def reformatlink(data, key):
details = data[key]
details = details[details.find("(")+2:details.find(",")-1]
return details

for i,result in enumerate(outjsondata):
result['Details'] = reformatlink(result, "Details")
result['Code'] = reformatlink(result, "Code")
result_id = result.pop('ID')
outjsondata[i] = {'ID': result_id, **result}

outjsondata.sort(key=lambda x:x["Units"])
outjsondata.sort(key=lambda x:x["Scenario"])
outjsondata.sort(key=lambda x:x["UsedModel"])
outjsondata.sort(key=lambda x:x["ID"])

#remove duplicate perf results
keystomatch = ['ID', 'UsedModel', 'Scenario', 'Units']
i = 0
n = len(outjsondata)
while i < n:
result = outjsondata[i]
while i < n - 1 and all(result[key] == outjsondata[i+1][key] for key in keystomatch):
del(outjsondata[i+1])
n -= 1
i += 1

#merge perf and power results
keystomatch.pop()

for i in range(len(outjsondata)):
result = outjsondata[i]
if not result:
continue
if i < len(outjsondata) - 1:
if all(result[key] == outjsondata[i+1][key] for key in keystomatch):
#print(result)
#print(outjsondata[i+1])
if "Watts" in result['Units'] or "joules" in result['Units']:
result['Performance_Result'] = outjsondata[i+1]['Result']
result['Performance_Units'] = outjsondata[i+1]['Units']
result['Power_Result'] = result['Result']
result['Power_Units'] = result['Units']
else:
result['Power_Result'] = outjsondata[i+1]['Result']
result['Power_Units'] = outjsondata[i+1]['Units']
result['Performance_Result'] = result['Result']
result['Performance_Units'] = result['Units']
outjsondata[i+1] = {}
del(result['Result'])
del(result['Units'])

for i,result in enumerate(outjsondata):
if result.get('Result'):
result['Performance_Result'] = result['Result']
result['Performance_Units'] = result['Units']
del(result['Result'])
del(result['Units'])

outjsondata = [ i for i in outjsondata if i != {}]
with open(f"{output}_results.json", "w") as f:
f.write(json.dumps(outjsondata, indent=2))
score_format = writer.book.add_format({'num_format': '#,##0.00'})
bg_format = writer.book.add_format({'bg_color': '#efefef'})
for ws in writer.book.worksheets():
Expand Down
18 changes: 14 additions & 4 deletions tools/submission/submission_checker.py
Original file line number Diff line number Diff line change
Expand Up @@ -1562,6 +1562,7 @@ def check_results_dir(
"inferred",
"has_power",
"Units",
"weight_data_types"
]
fmt = ",".join(["{}"] * len(head)) + "\n"
csv.write(",".join(head) + "\n")
Expand All @@ -1587,6 +1588,7 @@ def log_result(
config,
inferred=0,
power_metric=0,
weight_data_types="fp32"
):
notes = system_json.get("hw_notes", "")
if system_json.get("sw_notes"):
Expand Down Expand Up @@ -1636,7 +1638,10 @@ def log_result(
"Offline": "Watts",
"Server": "Watts",
}
unit = special_unit_dict.get(model_name, unit_dict)[scenario_fixed]
if config.version == "v4.0":
unit = unit_dict[scenario_fixed]
else:
unit = special_unit_dict.get(model_name, unit_dict)[scenario_fixed]
power_unit = power_unit_dict[scenario_fixed]

csv.write(
Expand Down Expand Up @@ -1668,6 +1673,7 @@ def log_result(
inferred,
power_metric > 0,
unit,
'"' + weight_data_types + '"',
)
)

Expand Down Expand Up @@ -1701,6 +1707,7 @@ def log_result(
inferred,
power_metric > 0,
power_unit,
'"' + weight_data_types + '"',
)
)

Expand All @@ -1718,7 +1725,7 @@ def log_result(
]
if len(files_outside_division) > 0 and not skip_extra_files_in_root_check:
log.error(
"Root contains files outside division folder %s",
"Root contains files outside division folder %s. You can use '--skip-extra-files-in-root-check' to skip this check temporarily",
division,
files_outside_division,
)
Expand Down Expand Up @@ -1999,7 +2006,7 @@ def log_result(
errors += 1
continue
else:
measurement_check, conf_equal_issue_check = check_measurement_dir(
measurement_check, conf_equal_issue_check, weight_data_types = check_measurement_dir(
config,
measurement_dir,
name,
Expand Down Expand Up @@ -2254,6 +2261,7 @@ def log_result(
config,
inferred=inferred,
power_metric=power_metric,
weight_data_types=weight_data_types
)
else:
results[name] = None
Expand Down Expand Up @@ -2450,9 +2458,11 @@ def check_measurement_dir(
end = len(".json")
break

weight_data_types = None
if system_file:
with open(os.path.join(measurement_dir, system_file), "r") as f:
j = json.load(f)
weight_data_types = j['weight_data_types']
for k in SYSTEM_IMP_REQUIRED_FILES:
if k not in j:
is_valid = False
Expand Down Expand Up @@ -2531,7 +2541,7 @@ def check_measurement_dir(
log.error("%s is missing %s*.json", fname, system_desc)
is_valid = False

return is_valid, equal_issue_used
return is_valid, equal_issue_used, weight_data_types


def check_compliance_perf_dir(test_dir):
Expand Down
Loading