Faster --stats

This commit is contained in:
augustin64 2024-07-04 09:20:54 +02:00
parent dee9f37a17
commit e610acfc8f

View File

@ -163,9 +163,10 @@ columns = [
("helper_core_fixed", "helper_core", "core"), ("helper_core_fixed", "helper_core", "core"),
("helper_ht", "helper_core", "hthread"), ("helper_ht", "helper_core", "hthread"),
] ]
for (col, icol, key) in columns: if not args.stats:
df[col] = df[icol].apply(remap_core(key)) for (col, icol, key) in columns:
print_timed(f"Column {col} added") df[col] = df[icol].apply(remap_core(key))
print_timed(f"Column {col} added")
if args.slice_remap: if args.slice_remap:
@ -244,13 +245,13 @@ def export_stats_csv():
""" """
Compute the statistic for 1 helper core/main core/slice/column Compute the statistic for 1 helper core/main core/slice/column
- median : default, not influenced by errors - median : default, not influenced by errors
- average : better accuracy when observing floor steps in the results - average : better precision when observing floor steps in the results
""" """
# return wq.median(x["time"], x[key]) # return wq.median(x["time"], x[key])
return np.average(x[key], weights=x["time"]) return np.average(x[key], weights=x["time"])
df_grouped = df.groupby(["main_core", "helper_core", "hash"]) df_grouped = df.groupby(["main_core", "helper_core", "hash"])
miss = df_grouped.apply(lambda x: compute_stat(x, "clflush_miss_n")) miss = df_grouped.apply(lambda x: compute_stat(x, "clflush_miss_n"))
hit_remote = df_grouped.apply(lambda x: compute_stat(x, "clflush_remote_hit")) hit_remote = df_grouped.apply(lambda x: compute_stat(x, "clflush_remote_hit"))
hit_local = df_grouped.apply(lambda x: compute_stat(x, "clflush_local_hit_n")) hit_local = df_grouped.apply(lambda x: compute_stat(x, "clflush_local_hit_n"))