Compare commits
No commits in common. "ead00b5e8af85d2971af02dd1b5047ac48a45efc" and "4817065712b0f17f3ea77bd029170250a45e1489" have entirely different histories.
ead00b5e8a
...
4817065712
2
.gitignore
vendored
2
.gitignore
vendored
@ -21,5 +21,3 @@ tftpboot
|
||||
pxelinux.cfg
|
||||
serial.out
|
||||
memdisk
|
||||
**/venv
|
||||
cache_utils/results*/
|
@ -7,24 +7,13 @@ import pandas as pd
|
||||
import matplotlib.pyplot as plt
|
||||
import seaborn as sns
|
||||
#import tikzplotlib
|
||||
from sys import exit
|
||||
import wquantiles as wq
|
||||
import numpy as np
|
||||
|
||||
from functools import partial
|
||||
|
||||
import sys
|
||||
import os
|
||||
|
||||
import json
|
||||
import warnings
|
||||
|
||||
warnings.filterwarnings('ignore')
|
||||
print("warnings are filtered, enable them back if you are having some trouble")
|
||||
|
||||
def dict_to_json(d):
|
||||
if isinstance(d, dict):
|
||||
return json.dumps(d)
|
||||
return d
|
||||
|
||||
# For cyber cobay sanity check :
|
||||
# from gmpy2 import popcount
|
||||
@ -52,10 +41,6 @@ def convert8(x):
|
||||
return np.array(int(x, base=16)).astype(np.int64)
|
||||
# return np.int8(int(x, base=16))
|
||||
|
||||
assert os.path.exists(sys.argv[1] + ".slices.csv")
|
||||
assert os.path.exists(sys.argv[1] + ".cores.csv")
|
||||
assert os.path.exists(sys.argv[1] + "-results_lite.csv.bz2")
|
||||
|
||||
df = pd.read_csv(sys.argv[1] + "-results_lite.csv.bz2",
|
||||
dtype={
|
||||
"main_core": np.int8,
|
||||
@ -76,8 +61,6 @@ df = pd.read_csv(sys.argv[1] + "-results_lite.csv.bz2",
|
||||
converters={'address': convert64, 'hash': convert8},
|
||||
)
|
||||
|
||||
print(f"Loaded columns : {list(df.keys())}")
|
||||
|
||||
sample_columns = [
|
||||
"clflush_remote_hit",
|
||||
"clflush_shared_hit",
|
||||
@ -134,11 +117,13 @@ print(*[bin(a) for a in addresses], sep='\n')
|
||||
|
||||
print(df.head())
|
||||
|
||||
print(df["hash"].unique())
|
||||
|
||||
min_time = df["time"].min()
|
||||
max_time = df["time"].max()
|
||||
|
||||
q10s = [wq.quantile(df["time"], df[col], 0.1) for col in sample_flush_columns if col in df]
|
||||
q90s = [wq.quantile(df["time"], df[col], 0.9) for col in sample_flush_columns if col in df]
|
||||
q10s = [wq.quantile(df["time"], df[col], 0.1) for col in sample_flush_columns]
|
||||
q90s = [wq.quantile(df["time"], df[col], 0.9) for col in sample_flush_columns]
|
||||
|
||||
graph_upper = int(((max(q90s) + 19) // 10) * 10)
|
||||
graph_lower = int(((min(q10s) - 10) // 10) * 10)
|
||||
@ -183,9 +168,6 @@ custom_hist(df_ax_vx_sx["time"], df_ax_vx_sx["clflush_miss_n"], df_ax_vx_sx["clf
|
||||
#tikzplotlib.save("fig-hist-bad-A{}V{}S{}.tex".format(attacker,victim,slice))#, axis_width=r'0.175\textwidth', axis_height=r'0.25\textwidth')
|
||||
plt.show()
|
||||
|
||||
# Fix np.darray is unhashable
|
||||
df_main_core_0.loc[:, ('hash',)] = df_main_core_0['hash'].apply(dict_to_json)
|
||||
df.loc[:, ('hash',)] = df['hash'].apply(dict_to_json)
|
||||
|
||||
g = sns.FacetGrid(df_main_core_0, col="helper_core", row="hash", legend_out=True)
|
||||
g2 = sns.FacetGrid(df, col="main_core", row="hash", legend_out=True)
|
||||
@ -212,7 +194,7 @@ g2.map(custom_hist, "time", "clflush_miss_n", "clflush_remote_hit", "clflush_loc
|
||||
#g3.map(custom_hist, "time", "clflush_miss_n", "clflush_remote_hit", "clflush_local_hit_n", "clflush_shared_hit")
|
||||
|
||||
#g4 = sns.FacetGrid(df_mcf6_slg7, row="helper_core_fixed", col="helper_ht")
|
||||
#g4.map(custom_hist, "time", "clflush_miss_n", "clflush_remote_hit", "clflush_local_hit_n", "clflush_shared_hit")
|
||||
g#4.map(custom_hist, "time", "clflush_miss_n", "clflush_remote_hit", "clflush_local_hit_n", "clflush_shared_hit")
|
||||
|
||||
def stat(x, key):
|
||||
return wq.median(x["time"], x[key])
|
||||
@ -234,7 +216,7 @@ stats.to_csv(sys.argv[1] + ".stats.csv", index=False)
|
||||
#print(stats.to_string())
|
||||
|
||||
plt.show()
|
||||
sys.exit(0)
|
||||
exit(0)
|
||||
g = sns.FacetGrid(stats, row="Core")
|
||||
|
||||
g.map(sns.distplot, 'Miss', bins=range(100, 480), color="r")
|
||||
@ -250,4 +232,4 @@ plt.show()
|
||||
# plt.figure()
|
||||
# sns.distplot(test["value"], hist_kws={"weights": test["weight"]}, kde=False)
|
||||
|
||||
sys.exit(0)
|
||||
exit(0)
|
||||
|
@ -10,12 +10,6 @@ from sys import exit
|
||||
import numpy as np
|
||||
from scipy import optimize
|
||||
import sys
|
||||
import os
|
||||
|
||||
import warnings
|
||||
|
||||
warnings.filterwarnings('ignore')
|
||||
print("warnings are filtered, enable them back if you are having some trouble")
|
||||
|
||||
# TODO
|
||||
# sys.argv[1] should be the root
|
||||
@ -29,9 +23,6 @@ print("warnings are filtered, enable them back if you are having some trouble")
|
||||
# each row is an origin core
|
||||
# each column a helper core if applicable
|
||||
|
||||
assert os.path.exists(sys.argv[1] + ".stats.csv")
|
||||
assert os.path.exists(sys.argv[1] + ".slices.csv")
|
||||
assert os.path.exists(sys.argv[1] + ".cores.csv")
|
||||
|
||||
stats = pd.read_csv(sys.argv[1] + ".stats.csv",
|
||||
dtype={
|
||||
@ -243,9 +234,9 @@ figure_median_I.map(sns.lineplot, 'slice_group', 'predicted_miss', color="b")
|
||||
figure_median_I.set_titles(col_template="$A$ = {col_name}")
|
||||
figure_median_I.tight_layout()
|
||||
|
||||
# import tikzplotlib
|
||||
import tikzplotlib
|
||||
|
||||
# tikzplotlib.save("fig-median-I.tex", axis_width=r'0.175\textwidth', axis_height=r'0.25\textwidth')
|
||||
tikzplotlib.save("fig-median-I.tex", axis_width=r'0.175\textwidth', axis_height=r'0.25\textwidth')
|
||||
plt.show()
|
||||
|
||||
#stats["predicted_remote_hit_no_gpu"] = exclusive_hit_topology_nogpu_df(stats, *(res_no_gpu[0]))
|
||||
@ -259,7 +250,7 @@ figure_median_E_A0.map(sns.scatterplot, 'helper_core_fixed', 'clflush_remote_hit
|
||||
figure_median_E_A0.map(sns.lineplot, 'helper_core_fixed', 'predicted_remote_hit_gpu', color="r")
|
||||
figure_median_E_A0.set_titles(col_template="$S$ = {col_name}")
|
||||
|
||||
# tikzplotlib.save("fig-median-E-A0.tex", axis_width=r'0.175\textwidth', axis_height=r'0.25\textwidth')
|
||||
tikzplotlib.save("fig-median-E-A0.tex", axis_width=r'0.175\textwidth', axis_height=r'0.25\textwidth')
|
||||
plt.show()
|
||||
|
||||
g = sns.FacetGrid(stats, row="main_core_fixed")
|
||||
|
@ -111,10 +111,9 @@ fn main() {
|
||||
|
||||
let core_per_socket_str = from_utf8(&core_per_socket_out.stdout).unwrap();
|
||||
|
||||
//println!("Number of cores per socket (str): {}", core_per_socket_str);
|
||||
//println!("Number of cores per socket: {}", cps_str);
|
||||
|
||||
let core_per_socket: u8 = core_per_socket_str[0..(core_per_socket_str.len() - 1)]
|
||||
.trim()
|
||||
.parse()
|
||||
.unwrap_or(0);
|
||||
|
||||
@ -157,7 +156,7 @@ fn main() {
|
||||
display_name: "clflush remote hit",
|
||||
t: &(),
|
||||
},
|
||||
CalibrateOperation2T {
|
||||
/* CalibrateOperation2T {
|
||||
prepare: maccess::<u8>,
|
||||
op: load_and_flush_wrap,
|
||||
name: "clflush_shared_hit",
|
||||
@ -177,7 +176,7 @@ fn main() {
|
||||
name: "clflush_local_hit_f",
|
||||
display_name: "clflush local hit - f",
|
||||
t: &(),
|
||||
},
|
||||
},*/
|
||||
CalibrateOperation2T {
|
||||
prepare: noop::<u8>,
|
||||
op: only_flush_wrap,
|
||||
@ -185,7 +184,7 @@ fn main() {
|
||||
display_name: "clflush miss - n",
|
||||
t: &(),
|
||||
},
|
||||
CalibrateOperation2T {
|
||||
/* CalibrateOperation2T {
|
||||
prepare: noop::<u8>,
|
||||
op: load_and_flush_wrap,
|
||||
name: "clflush_local_hit_n",
|
||||
@ -219,7 +218,7 @@ fn main() {
|
||||
name: "reload_local_hit",
|
||||
display_name: "reload local hit",
|
||||
t: &(),
|
||||
},
|
||||
},*/
|
||||
];
|
||||
|
||||
let r = unsafe {
|
||||
|
@ -4,6 +4,5 @@ wquantiles==0.5
|
||||
|
||||
matplotlib~=3.2.1
|
||||
numpy~=1.18.2
|
||||
#gmpy2~=2.0.8
|
||||
gmpy2~=2.0.8
|
||||
scipy~=1.4.1
|
||||
PyQt5
|
Loading…
x
Reference in New Issue
Block a user