Add analysis scripts, format and fix spelling of prefetcher_reverse/Readme.md

Squash of 4 cherry-picked commits:
569f3aaf26469c6f37ecf338f32fd8d6222575fb
1b93a2a951a9a6eea123806b1d557634e9333665
6e5b5c5807a83758ba321e405901377a532734c1
25ccd3248fa0a87e454363698d2ad2bba0588e37
This commit is contained in:
Guillaume DIDIER 2022-09-21 11:06:09 +02:00
parent 0765552240
commit bc684eca89
7 changed files with 1521 additions and 0 deletions

7
analysis/change_colour_map.sh Executable file
View File

@ -0,0 +1,7 @@
#!/bin/sh
sed -i.old 's/plots1/squarecolourmap/g' */_/1/*{FF,SF,SR}.tikz
sed -i.old 's/plots1/cubecolourmap/g' */*/*/*AllProbes.tikz
sed -i.old 's/plots1/slicecolourmap/g' */*/*/*Slice_*.tikz
sed -i.old 's/plots1/maxcolourmap/g' max_*/*.tikz
sed -i.old 's/plots1/diffcolourmap/g' diff_*/*.tikz

128
analysis/extract_analysis_csv.sh Executable file
View File

@ -0,0 +1,128 @@
#!/bin/sh
grep '^SingleIP0:' "$1.log" | cut -b 11- > "$1.S0.csv"
grep '^UniqueIPs0:' "$1.log" | cut -b 12- > "$1.U0.csv"
grep '^SingleIP1:' "$1.log" | cut -b 11- > "$1.S1.csv"
grep '^UniqueIPs1:' "$1.log" | cut -b 12- > "$1.U1.csv"
grep '^SingleIP2:' "$1.log" | cut -b 11- > "$1.S2.csv"
grep '^UniqueIPs2:' "$1.log" | cut -b 12- > "$1.U2.csv"
grep '^SingleIPA1:' "$1.log" | cut -b 12- > "$1.SA1.csv"
grep '^UniqueIPsA1:' "$1.log" | cut -b 13- > "$1.UA1.csv"
grep '^SingleIPA2:' "$1.log" | cut -b 12- > "$1.SA2.csv"
grep '^UniqueIPsA2:' "$1.log" | cut -b 13- > "$1.UA2.csv"
grep '^SingleIPA3:' "$1.log" | cut -b 12- > "$1.SA3.csv"
grep '^UniqueIPsA3:' "$1.log" | cut -b 13- > "$1.UA3.csv"
grep '^SingleIPA4:' "$1.log" | cut -b 12- > "$1.SA4.csv"
grep '^UniqueIPsA4:' "$1.log" | cut -b 13- > "$1.UA4.csv"
grep '^SingleIPA8:' "$1.log" | cut -b 12- > "$1.SA8.csv"
grep '^UniqueIPsA8:' "$1.log" | cut -b 13- > "$1.UA8.csv"
grep '^SingleIPB1:' "$1.log" | cut -b 12- > "$1.SB1.csv"
grep '^UniqueIPsB1:' "$1.log" | cut -b 13- > "$1.UB1.csv"
grep '^SingleIPB2:' "$1.log" | cut -b 12- > "$1.SB2.csv"
grep '^UniqueIPsB2:' "$1.log" | cut -b 13- > "$1.UB2.csv"
grep '^SingleIPB3:' "$1.log" | cut -b 12- > "$1.SB3.csv"
grep '^UniqueIPsB3:' "$1.log" | cut -b 13- > "$1.UB3.csv"
grep '^SingleIPB4:' "$1.log" | cut -b 12- > "$1.SB4.csv"
grep '^UniqueIPsB4:' "$1.log" | cut -b 13- > "$1.UB4.csv"
grep '^SingleIPB8:' "$1.log" | cut -b 12- > "$1.SB8.csv"
grep '^UniqueIPsB8:' "$1.log" | cut -b 13- > "$1.UB8.csv"
grep '^SingleIPC1:' "$1.log" | cut -b 12- > "$1.SC1.csv"
grep '^UniqueIPsC1:' "$1.log" | cut -b 13- > "$1.UC1.csv"
grep '^SingleIPC2:' "$1.log" | cut -b 12- > "$1.SC2.csv"
grep '^UniqueIPsC2:' "$1.log" | cut -b 13- > "$1.UC2.csv"
grep '^SingleIPC3:' "$1.log" | cut -b 12- > "$1.SC3.csv"
grep '^UniqueIPsC3:' "$1.log" | cut -b 13- > "$1.UC3.csv"
grep '^SingleIPC4:' "$1.log" | cut -b 12- > "$1.SC4.csv"
grep '^UniqueIPsC4:' "$1.log" | cut -b 13- > "$1.UC4.csv"
grep '^SingleIPC8:' "$1.log" | cut -b 12- > "$1.SC8.csv"
grep '^UniqueIPsC8:' "$1.log" | cut -b 13- > "$1.UC8.csv"
grep '^SingleIPD1:' "$1.log" | cut -b 12- > "$1.SD1.csv"
grep '^UniqueIPsD1:' "$1.log" | cut -b 13- > "$1.UD1.csv"
grep '^SingleIPD2:' "$1.log" | cut -b 12- > "$1.SD2.csv"
grep '^UniqueIPsD2:' "$1.log" | cut -b 13- > "$1.UD2.csv"
grep '^SingleIPD3:' "$1.log" | cut -b 12- > "$1.SD3.csv"
grep '^UniqueIPsD3:' "$1.log" | cut -b 13- > "$1.UD3.csv"
grep '^SingleIPD4:' "$1.log" | cut -b 12- > "$1.SD4.csv"
grep '^UniqueIPsD4:' "$1.log" | cut -b 13- > "$1.UD4.csv"
grep '^SingleIPD8:' "$1.log" | cut -b 12- > "$1.SD8.csv"
grep '^UniqueIPsD8:' "$1.log" | cut -b 13- > "$1.UD8.csv"
grep '^SingleIPE2:' "$1.log" | cut -b 12- > "$1.SE2.csv"
grep '^UniqueIPsE2:' "$1.log" | cut -b 13- > "$1.UE2.csv"
grep '^SingleIPE3:' "$1.log" | cut -b 12- > "$1.SE3.csv"
grep '^UniqueIPsE3:' "$1.log" | cut -b 13- > "$1.UE3.csv"
grep '^SingleIPE4:' "$1.log" | cut -b 12- > "$1.SE4.csv"
grep '^UniqueIPsE4:' "$1.log" | cut -b 13- > "$1.UE4.csv"
grep '^SingleIPF1:' "$1.log" | cut -b 12- > "$1.SF1.csv"
grep '^SingleIPF-1:' "$1.log" | cut -b 13- > "$1.SF-1.csv"
grep '^SingleIPF2:' "$1.log" | cut -b 12- > "$1.SF2.csv"
grep '^SingleIPF-2:' "$1.log" | cut -b 13- > "$1.SF-2.csv"
grep '^SingleIPF3:' "$1.log" | cut -b 12- > "$1.SF3.csv"
grep '^SingleIPF-3:' "$1.log" | cut -b 13- > "$1.SF-3.csv"
grep '^SingleIPF4:' "$1.log" | cut -b 12- > "$1.SF4.csv"
grep '^SingleIPF-4:' "$1.log" | cut -b 13- > "$1.SF-4.csv"
grep '^UniqueIPsF1:' "$1.log" | cut -b 13- > "$1.UF1.csv"
grep '^UniqueIPsF-1:' "$1.log" | cut -b 14- > "$1.UF-1.csv"
grep '^UniqueIPsF2:' "$1.log" | cut -b 13- > "$1.UF2.csv"
grep '^UniqueIPsF-2:' "$1.log" | cut -b 14- > "$1.UF-2.csv"
grep '^UniqueIPsF3:' "$1.log" | cut -b 13- > "$1.UF3.csv"
grep '^UniqueIPsF-3:' "$1.log" | cut -b 14- > "$1.UF-3.csv"
grep '^UniqueIPsF4:' "$1.log" | cut -b 13- > "$1.UF4.csv"
grep '^UniqueIPsF-4:' "$1.log" | cut -b 14- > "$1.UF-4.csv"
#SingleIPA1Functions:i,Addr
#UniqueIPsA1Functions:i,Addr
#SingleIPA2Functions:i,Addr
#UniqueIPsA2Functions:i,Addr
#SingleIPA3Functions:i,Addr
#UniqueIPsA3Functions:i,Addr
#SingleIPA4Functions:i,Addr
#UniqueIPsA4Functions:i,Addr
#SingleIPA8Functions:i,Addr
#UniqueIPsA8Functions:i,Addr
#SingleIPB1Functions:i,Addr
#UniqueIPsB1Functions:i,Addr
#SingleIPB2Functions:i,Addr
#UniqueIPsB2Functions:i,Addr
#SingleIPB3Functions:i,Addr
#UniqueIPsB3Functions:i,Addr
#SingleIPB4Functions:i,Addr
#UniqueIPsB4Functions:i,Addr
#SingleIPB8Functions:i,Addr
#UniqueIPsB8Functions:i,Addr
#SingleIPC1Functions:i,Addr
#UniqueIPsC1Functions:i,Addr
#SingleIPC2Functions:i,Addr
#UniqueIPsC2Functions:i,Addr
#SingleIPC3Functions:i,Addr
#UniqueIPsC3Functions:i,Addr
#SingleIPC4Functions:i,Addr
#UniqueIPsC4Functions:i,Addr
#SingleIPC8Functions:i,Addr
#UniqueIPsC8Functions:i,Addr
#SingleIPD1Functions:i,Addr
#UniqueIPsD1Functions:i,Addr
#SingleIPD2Functions:i,Addr
#UniqueIPsD2Functions:i,Addr
#SingleIPD3Functions:i,Addr
#UniqueIPsD3Functions:i,Addr
#SingleIPD4Functions:i,Addr
#UniqueIPsD4Functions:i,Addr
#SingleIPD8Functions:i,Addr
#UniqueIPsD8Functions:i,Addr
#SingleIPE2Functions:i,Addr
#UniqueIPsE2Functions:i,Addr
#SingleIPE3Functions:i,Addr
#UniqueIPsE3Functions:i,Addr
#SingleIPE4Functions:i,Addr
#UniqueIPsE4Functions:i,Addr
#grep '^SingleIP0:' "$1.log" | cut -b 11- > "$1.S0.csv"
#grep '^UniqueIPs0:' "$1.log" | cut -b 12- > "$1.U0.csv"
#grep '^SingleIP1:' "$1.log" | cut -b 11- > "$1.S1.csv"
#grep '^UniqueIPs1:' "$1.log" | cut -b 12- > "$1.U1.csv"
#grep '^SingleIP2:' "$1.log" | cut -b 11- > "$1.S2.csv"
#grep '^UniqueIPs2:' "$1.log" | cut -b 12- > "$1.U2.csv"

414
analysis/makeplots-bonus.jl Normal file
View File

@ -0,0 +1,414 @@
using CSV
using Plots
pgfplotsx()
function myfill(element, dimensions)
res = fill(element, dimensions)
res = map(x -> deepcopy(x), res)
res
end
# Generals TODO : Fix the ticks, add legends
#eaps = [12,13]
eaps = [14,15]
#eaps = [0,12,13,14,15]
len_eaps = length(eaps)
#types = ["S","U"]
types = ["S"]
#functions_identifier = ["A", "B", "C", "D", "E"]
functions_identifier = ["F"]
function A_index(k, x, y, z)
if (x + k) % 64 != y
print(string("Argh, k = ", k, ", x = ", x, ", y = ", y, ", z = ", z))
@assert false
end
(x,z)
end
function B_index(k, x, y, z)
@assert (x - k + 64) % 64 == y
(x,z)
end
function C_index(k, x, y, z)
@assert (y + k) % 64 == z
(x,y)
end
function D_index(k, x, y, z)
@assert (y - k + 64) % 64 == z
(x,y)
end
function E_index(k, x, y, z)
@assert (y - x + 64) % 64 == (z - y + 64) % 64
(x, (y - x + 64) % 64)
end
function F_index(k, x1, x2, x3, x4)
@assert (x1 + k + 64) % 64 == x2
@assert (x2 + k + 64) % 64 == x4
(x1, x3)
end
#functions_index = [A_index, B_index, C_index, D_index, E_index]
functions_index = [F_index]
#types = ["S"]
ks = [[1, 2, 3, 4, 8],
[1, 2, 3, 4, 8],
[1, 2, 3, 4, 8],
[1, 2, 3, 4, 8],
[2, 3, 4]
]
ks = [[4, 3, 2, 1, -1, -2, -3, -4]]
#ks = [[1]]
methods = ["SF", "SR", "FF"]
plot_lock = ReentrantLock()
slices_offset_0 = [0, 1, 2, 8, 14, 15, 30, 31, 32, 55, 56, 61, 62, 63]
#slices_offset_0 = []
#diff_slices_offset_0 = [0, 1, 2, 61, 62, 63]
function make_name(eap, type, f, k)
string("bonusap/bonusap-with-", eap, "-prefetcher.", type, f, k, ".csv")
end
all_file_names = myfill((0,0,0,[]), (length(eaps), length(types), length(functions_index)))
for x in 1:len_eaps
for (y,type) in enumerate(types)
for (z,f) in enumerate(functions_identifier)
all_file_names[x,y,z] = (x,y,z,[])
for (i,k) in enumerate(ks[z])
# change me : insert file names into list
push!(all_file_names[x,y,z][4] , (x, y, z, k, make_name(eaps[x], type, f, k) ) )
end
end
end
end
print(all_file_names)
#files = Matrix(CSV, length(eaps), length(types), length(levels))
files = Array{
Union{
Nothing,
Tuple{Int64, Int64, Int64, Vector{
Tuple{ Int64, Int64, Int64, Int64, CSV.File }
}}
},3
}(nothing, length(eaps), length(types), length(functions_identifier))
Threads.@threads for f in all_file_names
x = f[1]
y = f[2]
z = f[3]
files[x,y,z] = (x,y,z,[])
for (x,y,z,k,name) in f[4]
push!(files[x,y,z][4], (x,y,z,k, CSV.File(name)))
end
end
# TODO :
#
# - Split this function in a load data into square / cube structure and a plot function
# - Refactor the code below to compute the various squares / cubes and then do the plots.
# - Refactor the Slicing function too
# - Create a custom diagonal slice function ?
preamble_printed = false
push!(PGFPlotsX.CUSTOM_PREAMBLE,raw"\newcommand{\gdfigurewidth}{150mm}")
push!(PGFPlotsX.CUSTOM_PREAMBLE,raw"\newcommand{\gdfigureheight}{100mm}")
function graph2d(name, matrix, xlabel, ylabel)
x = range(0, 63)
y = range(0, 63)
function hmp2d(x, y)
matrix[x + 1, y + 1]
end
lock(plot_lock) do
graph = heatmap(x, y, hmp2d, minorgrid=true, height = raw"{\gdfigureheight}}, width = {{\gdfigurewidth}", xlabel = xlabel, ylabel = ylabel, c = :blues, extra_kwargs =:subplot)
if !preamble_printed
global preamble_printed = true
print(Plots.pgfx_preamble(graph))
end
savefig(graph, string(name, ".tikz"))
savefig(graph, string(name, ".pdf"))
end
end
function graph2dclims(name, matrix, clims, xlabel, ylabel)
x = range(0, 63)
y = range(0, 63)
function hmp2d(x, y)
matrix[x + 1, y + 1]
end
lock(plot_lock) do
graph = heatmap(x, y, hmp2d, clims = clims, minorgrid=true, height = raw"{\gdfigureheight}}, width = {{\gdfigurewidth}", xlabel = xlabel, ylabel = ylabel, extra_kwargs =:subplot)
savefig(graph, string(name, ".tikz"))
savefig(graph, string(name, ".pdf"))
end
end
function cube_flatten_z(cubes)
len = length(cubes)
res = myfill(myfill(0.0,(64,64)), len)
for k in range(1,64)
Threads.@threads for i in range(1,64)
for j in range(1,64)
for l in range(1,len)
res[l][i,j] += cubes[l][i,j,k]
end
end
end
end
res
end
function slice_extract_x(cubes, slices)
slice_length = length(slices)
cube_length = length(cubes)
res = myfill(myfill(myfill(0.0, (64, 64)), slice_length), cube_length)
for i in range(1,64)
for j in range(1,64)
for (k,slice) in enumerate(slices)
for l in range(1, cube_length)
res[l][k][i, j] = cubes[l][slice+1, i, j]
end
end
end
end
res
end
function graph_2(basename, csv, k, index_function)
result = fill(-1.0, (3, 64,64,64))
# Fill in the 3D cube, then create the various slices and flattenings
# Flattened Cube with x = first addr, y = second addr, compute the sum of prefetches ?
# Grab a few random first adresses and look at them with x = second addr, y = probe addr
# 0,1, 62,63 14, 15 plus one other depending on what appears
for row in csv
probe = row.ProbeAddr
offset_0 = Int64(row.Offset_0)
offset_1 = Int64(row.Offset_1)
offset_2 = Int64(row.Offset_2)
offset_3 = Int64(row.Offset_3)
index = index_function(k, offset_0, offset_1, offset_2, offset_3)
i = index[1] + 1
j = index[2] + 1
@assert result[:, i, j, probe + 1] == [-1.0,-1.0,-1.0]
result[1, i, j, probe + 1] = row.Probe_SF_HR
result[2, i, j, probe + 1] = row.Probe_SR_HR
result[3, i, j, probe + 1] = row.Probe_FF_HR
end
allprobes = cube_flatten_z([result[1,:,:,:], result[2,:,:,:], result[3,:,:,:]])
sf_probe_heatmap_allprobes = allprobes[1]
sr_probe_heatmap_allprobes = allprobes[2]
ff_probe_heatmap_allprobes = allprobes[3]
all_slices = slice_extract_x([result[1,:,:,:], result[2,:,:,:], result[3,:,:,:]], slices_offset_0)
sf_probe_slices_heatmaps = all_slices[1]
sr_probe_slices_heatmaps = all_slices[2]
ff_probe_slices_heatmaps = all_slices[3]
graph2d(string(basename, "_SF_AllProbes"), sf_probe_heatmap_allprobes, "i", "j")
graph2d(string(basename, "_SR_AllProbes"), sr_probe_heatmap_allprobes, "i", "j")
graph2d(string(basename, "_FF_AllProbes"), ff_probe_heatmap_allprobes, "i", "j")
for (i, offset_0) in enumerate(slices_offset_0)
print(offset_0)
data = sf_probe_slices_heatmaps[i]
graph2dclims(string(basename, "_SF_Slice_", offset_0),sf_probe_slices_heatmaps[i],(0,1), "j", "probe")
graph2dclims(string(basename, "_SR_Slice_", offset_0),sr_probe_slices_heatmaps[i],(0,1), "j", "probe")
graph2dclims(string(basename, "_FF_Slice_", offset_0),ff_probe_slices_heatmaps[i],(0,1), "j", "probe")
end
result
end
cubes = myfill([], (length(eaps), length(types), length(functions_identifier)))
# need to push (k, cube)
Threads.@threads for experiment in files
for (eap, type, f, k, file) in experiment[4]
name = string(eaps[eap], "/julia_eap_", eaps[eap], "_", types[type], functions_identifier[f], k)
print(string(name,"\n"))
cube_3 = graph_2(name, file, k, functions_index[f])
push!(cubes[eap, type, f], cube_3)
end
end
if false
print("Computing 14 union 13...")
function cube_max(cubes_1, cubes_2)
@assert size(cubes_1) == size(cubes_2)
sizes = size(cubes_1)
@assert length(sizes) == 5
res = fill(0.0, sizes)
for i in range(1,sizes[1])
for j in range(1,sizes[2])
Threads.@threads for k in range(1,64)
for l in range(1, 64)
for m in range(1, 64)
res[i,j,k,l,m] = max(cubes_1[i,j,k,l,m], cubes_2[i,j,k,l,m])
end
end
end
end
end
res
end
index_0 = findfirst(isequal(0), eaps)
index_12 = findfirst(isequal(12), eaps)
index_13 = findfirst(isequal(13), eaps)
index_14 = findfirst(isequal(14), eaps)
cube_max_13_14 = cube_max(cubes[index_13,:,:,:,:,:], cubes[index_14,:,:,:,:,:])
function do_cubes(name, cubes)
cube_list = []
index_list = []
for type in range(1,length(types))
for method in range(1,3)
push!(cube_list, cubes[type,method,:,:,:])
push!(index_list, (type, method))
end
end
allgraphs = cube_flatten_z(cube_list)
for (i,(type,method)) in enumerate(index_list)
graph2d(string(name, "_", types[type], "2_", methods[method], "_AllProbes"), allgraphs[i], "i", "j")
for slice in diff_slices_offset_0
graph2d(string(name,"_", types[type], "2_", methods[method], "_Slice_", slice), cubes[type, method, slice+1,:,:], "j", "probe")
end
end
end
graph_13_14 = @task begin
do_cubes("julia_max_13_14", cube_max_13_14)
#cube_list = []
#index_list = []
#for type in range(1,length(types))
# for method in range(1,3)
# push!(cube_list, cube_max_13_14[type,method,:,:,:])
# push!(index_list, (type, method))
# end
#end
#allgraphs = cube_flatten_z(cube_list)
#for (i,(type,method)) in enumerate(index_list)
# graph2d(string("julia_max_13_14_", types[type], "2_", methods[method], "_AllProbes"), allgraphs[i], "i", "j")
#end
end
schedule(graph_13_14)
print(" OK\n")
print("Computing Any difference between 0 and 12...")
function cube_differences(cubes_1, cubes_2)
@assert size(cubes_1) == size(cubes_2)
sizes = size(cubes_1)
@assert length(sizes) == 5
res = fill(0.0, sizes)
for i in range(1,sizes[1])
for j in range(1,sizes[2])
Threads.@threads for k in range(1,64)
for l in range(1, 64)
for m in range(1, 64)
res[i,j,k,l,m] = abs(cubes_1[i,j,k,l,m] - cubes_2[i,j,k,l,m])
end
end
end
end
end
res
end
cube_diff_0_12 = cube_differences(cubes[index_0,:,:,:,:,:], cubes[index_12,:,:,:,:,:])
graph_0_12 = @task begin
do_cubes("julia_diff_0_12", cube_diff_0_12)
#cube_list = []
#index_list = []
#for type in range(1,length(types))
# for method in range(1,3)
# push!(cube_list, cube_diff_0_12[type,method,:,:,:])
# push!(index_list, (type, method))
# end
#end
#allgraphs = cube_flatten_z(cube_list)
#for (i,(type,method)) in enumerate(index_list)
# graph2d(string("julia_diff_0_12_", types[type], "2_", methods[method], "_AllProbes"), allgraphs[i], "i", "j")
#end
end
schedule(graph_0_12)
print(" OK\n")
print("Computing Differences between 12 and (13 union 14)...")
cube_diff_12_1314 = cube_differences(cubes[index_0,:,:,:,:,:], cube_max_13_14)
graph_12_1314 = @task begin
do_cubes("julia_diff_12_1314", cube_diff_12_1314)
#cube_list = []
#index_list = []
#for type in range(1,length(types))
# for method in range(1,3)
# push!(cube_list, cube_diff_12_1314[type,method,:,:,:])
# push!(index_list, (type, method))
# end
#end
#allgraphs = cube_flatten_z(cube_list)
#for (i,(type,method)) in enumerate(index_list)
# graph2d(string("julia_diff_12_1314", types[type], "2_", methods[method], "_AllProbes"), allgraphs[i], "i", "j")
# for slice in diff_slices_offset_0
#
# end
#end
end
schedule(graph_12_1314)
wait(graph_13_14)
wait(graph_0_12)
wait(graph_12_1314)
print("done\n")
end

403
analysis/makeplots-extra.jl Normal file
View File

@ -0,0 +1,403 @@
using CSV
using Plots
pgfplotsx()
function myfill(element, dimensions)
res = fill(element, dimensions)
res = map(x -> deepcopy(x), res)
res
end
# Generals TODO : Fix the ticks, add legends
#eaps = [12,13]
#eaps = [0,14,15]
eaps = [0,12,13,14,15]
len_eaps = length(eaps)
types = ["S","U"]
functions_identifier = ["A", "B", "C", "D", "E"]
#functions_identifier = ["B"]
function A_index(k, x, y, z)
if (x + k) % 64 != y
print(string("Argh, k = ", k, ", x = ", x, ", y = ", y, ", z = ", z))
@assert false
end
(x,z)
end
function B_index(k, x, y, z)
@assert (x - k + 64) % 64 == y
(x,z)
end
function C_index(k, x, y, z)
@assert (y + k) % 64 == z
(x,y)
end
function D_index(k, x, y, z)
@assert (y - k + 64) % 64 == z
(x,y)
end
function E_index(k, x, y, z)
@assert (y - x + 64) % 64 == (z - y + 64) % 64
(x, (y - x + 64) % 64)
end
functions_index = [A_index, B_index, C_index, D_index, E_index]
#functions_index = [B_index]
#types = ["S"]
ks = [[1, 2, 3, 4, 8],
[1, 2, 3, 4, 8],
[1, 2, 3, 4, 8],
[1, 2, 3, 4, 8],
[2, 3, 4]
]
#ks = [[8]]
methods = ["SF", "SR", "FF"]
plot_lock = ReentrantLock()
slices_offset_0 = [0, 1, 2, 8, 14, 15, 30, 31, 32, 55, 56, 61, 62, 63]
#slices_offset_0 = []
diff_slices_offset_0 = [0, 1, 2, 61, 62, 63]
function make_name(eap, type, f, k)
string("extrap/extrap-with-", eap, "-prefetcher.", type, f, k, ".csv")
end
all_file_names = myfill((0,0,0,[]), (length(eaps), length(types), length(functions_index)))
for x in 1:len_eaps
for (y,type) in enumerate(types)
for (z,f) in enumerate(functions_identifier)
all_file_names[x,y,z] = (x,y,z,[])
for (i,k) in enumerate(ks[z])
# change me : insert file names into list
push!(all_file_names[x,y,z][4] , (x, y, z, k, make_name(eaps[x], type, f, k) ) )
end
end
end
end
print(all_file_names)
#files = Matrix(CSV, length(eaps), length(types), length(levels))
files = Array{
Union{
Nothing,
Tuple{Int64, Int64, Int64, Vector{
Tuple{ Int64, Int64, Int64, Int64, CSV.File }
}}
},3
}(nothing, length(eaps), length(types), length(functions_identifier))
Threads.@threads for f in all_file_names
x = f[1]
y = f[2]
z = f[3]
files[x,y,z] = (x,y,z,[])
for (x,y,z,k,name) in f[4]
push!(files[x,y,z][4], (x,y,z,k, CSV.File(name)))
end
end
# TODO :
#
# - Split this function in a load data into square / cube structure and a plot function
# - Refactor the code below to compute the various squares / cubes and then do the plots.
# - Refactor the Slicing function too
# - Create a custom diagonal slice function ?
preamble_printed = false
push!(PGFPlotsX.CUSTOM_PREAMBLE,raw"\newcommand{\gdfigurewidth}{150mm}")
push!(PGFPlotsX.CUSTOM_PREAMBLE,raw"\newcommand{\gdfigureheight}{100mm}")
function graph2d(name, matrix, xlabel, ylabel)
x = range(0, 63)
y = range(0, 63)
function hmp2d(x, y)
matrix[x + 1, y + 1]
end
lock(plot_lock) do
graph = heatmap(x, y, hmp2d, minorgrid=true, height = raw"{\gdfigureheight}}, width = {{\gdfigurewidth}", xlabel = xlabel, ylabel = ylabel, c = :blues, extra_kwargs =:subplot)
if !preamble_printed
global preamble_printed = true
print(Plots.pgfx_preamble(graph))
end
savefig(graph, string(name, ".tikz"))
savefig(graph, string(name, ".pdf"))
end
end
function graph2dclims(name, matrix, clims, xlabel, ylabel)
x = range(0, 63)
y = range(0, 63)
function hmp2d(x, y)
matrix[x + 1, y + 1]
end
lock(plot_lock) do
graph = heatmap(x, y, hmp2d, clims = clims, minorgrid=true, height = raw"{\gdfigureheight}}, width = {{\gdfigurewidth}", xlabel = xlabel, ylabel = ylabel, extra_kwargs =:subplot)
savefig(graph, string(name, ".tikz"))
savefig(graph, string(name, ".pdf"))
end
end
function cube_flatten_z(cubes)
len = length(cubes)
res = myfill(myfill(0.0,(64,64)), len)
for k in range(1,64)
Threads.@threads for i in range(1,64)
for j in range(1,64)
for l in range(1,len)
res[l][i,j] += cubes[l][i,j,k]
end
end
end
end
res
end
function slice_extract_x(cubes, slices)
slice_length = length(slices)
cube_length = length(cubes)
res = myfill(myfill(myfill(0.0, (64, 64)), slice_length), cube_length)
for i in range(1,64)
for j in range(1,64)
for (k,slice) in enumerate(slices)
for l in range(1, cube_length)
res[l][k][i, j] = cubes[l][slice+1, i, j]
end
end
end
end
res
end
function graph_2(basename, csv, k, index_function)
result = fill(-1.0, (3, 64,64,64))
# Fill in the 3D cube, then create the various slices and flattenings
# Flattened Cube with x = first addr, y = second addr, compute the sum of prefetches ?
# Grab a few random first adresses and look at them with x = second addr, y = probe addr
# 0,1, 62,63 14, 15 plus one other depending on what appears
for row in csv
probe = row.ProbeAddr
offset_0 = row.Offset_0
offset_1 = row.Offset_1
offset_2 = row.Offset_2
index = index_function(k, offset_0, offset_1, offset_2)
i = index[1] + 1
j = index[2] + 1
@assert result[:, i, j, probe + 1] == [-1.0,-1.0,-1.0]
result[1, i, j, probe + 1] = row.Probe_SF_HR
result[2, i, j, probe + 1] = row.Probe_SR_HR
result[3, i, j, probe + 1] = row.Probe_FF_HR
end
allprobes = cube_flatten_z([result[1,:,:,:], result[2,:,:,:], result[3,:,:,:]])
sf_probe_heatmap_allprobes = allprobes[1]
sr_probe_heatmap_allprobes = allprobes[2]
ff_probe_heatmap_allprobes = allprobes[3]
all_slices = slice_extract_x([result[1,:,:,:], result[2,:,:,:], result[3,:,:,:]], slices_offset_0)
sf_probe_slices_heatmaps = all_slices[1]
sr_probe_slices_heatmaps = all_slices[2]
ff_probe_slices_heatmaps = all_slices[3]
graph2d(string(basename, "_SF_AllProbes"), sf_probe_heatmap_allprobes, "i", "j")
graph2d(string(basename, "_SR_AllProbes"), sr_probe_heatmap_allprobes, "i", "j")
graph2d(string(basename, "_FF_AllProbes"), ff_probe_heatmap_allprobes, "i", "j")
for (i, offset_0) in enumerate(slices_offset_0)
print(offset_0)
data = sf_probe_slices_heatmaps[i]
graph2dclims(string(basename, "_SF_Slice_", offset_0),sf_probe_slices_heatmaps[i],(0,1), "j", "probe")
graph2dclims(string(basename, "_SR_Slice_", offset_0),sr_probe_slices_heatmaps[i],(0,1), "j", "probe")
graph2dclims(string(basename, "_FF_Slice_", offset_0),ff_probe_slices_heatmaps[i],(0,1), "j", "probe")
end
result
end
cubes = myfill([], (length(eaps), length(types), length(functions_identifier)))
# need to push (k, cube)
Threads.@threads for experiment in files
for (eap, type, f, k, file) in experiment[4]
name = string(eaps[eap], "/julia_eap_", eaps[eap], "_", types[type], functions_identifier[f], k)
print(string(name,"\n"))
cube_3 = graph_2(name, file, k, functions_index[f])
push!(cubes[eap, type, f], cube_3)
end
end
print("Computing 14 union 13...")
function cube_max(cubes_1, cubes_2)
@assert size(cubes_1) == size(cubes_2)
sizes = size(cubes_1)
@assert length(sizes) == 5
res = fill(0.0, sizes)
for i in range(1,sizes[1])
for j in range(1,sizes[2])
Threads.@threads for k in range(1,64)
for l in range(1, 64)
for m in range(1, 64)
res[i,j,k,l,m] = max(cubes_1[i,j,k,l,m], cubes_2[i,j,k,l,m])
end
end
end
end
end
res
end
index_0 = findfirst(isequal(0), eaps)
index_12 = findfirst(isequal(12), eaps)
index_13 = findfirst(isequal(13), eaps)
index_14 = findfirst(isequal(14), eaps)
cube_max_13_14 = cube_max(cubes[index_13,:,:,:,:,:], cubes[index_14,:,:,:,:,:])
function do_cubes(name, cubes)
cube_list = []
index_list = []
for type in range(1,length(types))
for method in range(1,3)
push!(cube_list, cubes[type,method,:,:,:])
push!(index_list, (type, method))
end
end
allgraphs = cube_flatten_z(cube_list)
for (i,(type,method)) in enumerate(index_list)
graph2d(string(name, "_", types[type], "2_", methods[method], "_AllProbes"), allgraphs[i], "i", "j")
for slice in diff_slices_offset_0
graph2d(string(name,"_", types[type], "2_", methods[method], "_Slice_", slice), cubes[type, method, slice+1,:,:], "j", "probe"))
end
end
end
graph_13_14 = @task begin
do_cubes("julia_max_13_14", cube_max_13_14)
cube_list = []
index_list = []
for type in range(1,length(types))
for method in range(1,3)
push!(cube_list, cube_max_13_14[type,method,:,:,:])
push!(index_list, (type, method))
end
end
allgraphs = cube_flatten_z(cube_list)
for (i,(type,method)) in enumerate(index_list)
graph2d(string("julia_max_13_14_", types[type], "2_", methods[method], "_AllProbes"), allgraphs[i], "i", "j")
end
end
schedule(graph_13_14)
print(" OK\n")
print("Computing Any difference between 0 and 12...")
function cube_differences(cubes_1, cubes_2)
@assert size(cubes_1) == size(cubes_2)
sizes = size(cubes_1)
@assert length(sizes) == 5
res = fill(0.0, sizes)
for i in range(1,sizes[1])
for j in range(1,sizes[2])
Threads.@threads for k in range(1,64)
for l in range(1, 64)
for m in range(1, 64)
res[i,j,k,l,m] = abs(cubes_1[i,j,k,l,m] - cubes_2[i,j,k,l,m])
end
end
end
end
end
res
end
cube_diff_0_12 = cube_differences(cubes[index_0,:,:,:,:,:], cubes[index_12,:,:,:,:,:])
graph_0_12 = @task begin
do_cubes("julia_diff_0_12", cube_diff_0_12)
cube_list = []
index_list = []
for type in range(1,length(types))
for method in range(1,3)
push!(cube_list, cube_diff_0_12[type,method,:,:,:])
push!(index_list, (type, method))
end
end
allgraphs = cube_flatten_z(cube_list)
for (i,(type,method)) in enumerate(index_list)
graph2d(string("julia_diff_0_12_", types[type], "2_", methods[method], "_AllProbes"), allgraphs[i], "i", "j")
end
end
schedule(graph_0_12)
print(" OK\n")
print("Computing Differences between 12 and (13 union 14)...")
cube_diff_12_1314 = cube_differences(cubes[index_0,:,:,:,:,:], cube_max_13_14)
graph_12_1314 = @task begin
do_cubes("julia_diff_12_1314", cube_diff_12_1314)
cube_list = []
index_list = []
for type in range(1,length(types))
for method in range(1,3)
push!(cube_list, cube_diff_12_1314[type,method,:,:,:])
push!(index_list, (type, method))
end
end
allgraphs = cube_flatten_z(cube_list)
for (i,(type,method)) in enumerate(index_list)
graph2d(string("julia_diff_12_1314", types[type], "2_", methods[method], "_AllProbes"), allgraphs[i], "i", "j")
for slice in diff_slices_offset_0
end
end
end
schedule(graph_12_1314)
wait(graph_13_14)
wait(graph_0_12)
wait(graph_12_1314)
print("done\n")

419
analysis/makeplots.jl Normal file
View File

@ -0,0 +1,419 @@
using CSV
using Plots
pgfplotsx()
# Generals TODO : Fix the ticks, add legends
#eaps = [0,12,13,14]
eaps = [0,12,13,14,15]
len_eaps = length(eaps)
types = ["S","U"]
#types = ["S"]
levels = [0,1,2]
methods = ["SF", "SR", "FF"]
plot_lock = ReentrantLock()
slices_offset_0 = [0, 1, 2, 8, 14, 15, 30, 31, 32, 55, 56, 61, 62, 63]
#slices_offset_0 = []
diff_slices_offset_0 = [0, 1, 2, 61, 62, 63]
function make_name(eap, type, level)
string("eap/eap-with-", eap, "-prefetcher.", type, level, ".csv")
end
all_file_names = fill((0,0,0,""), length(eaps), length(types), length(levels))
Threads.@threads for x in 1:len_eaps
for (y,type) in enumerate(types)
for (z,level) in enumerate(levels)
all_file_names[x,y,z] = (x,y,z,make_name(eaps[x], type, level))
end
end
end
#files = Matrix(CSV, length(eaps), length(types), length(levels))
files = Array{Union{Nothing, Tuple{Int64,Int64,Int64,CSV.File}},3}(nothing, length(eaps), length(types), length(levels))
Threads.@threads for f in all_file_names
x = f[1]
y = f[2]
z = f[3]
name = f[4]
files[x,y,z] = (x,y,z,CSV.File(name))
end
function graph_0(name, csv)
data = [csv.Probe_FF_HR, csv.Probe_SR_HR, csv.Probe_SF_HR]
x = range(0, 63)
y = range(0, 2)
function f(x, y)
data[y + 1][x + 1]
end
lock(plot_lock) do
graph = heatmap(x, y, f, yticks = ([0,1,2], ["FF", "SR", "SF"]), clims = (0, 1), xlabel="probe")
savefig(graph, string("julia_", name, ".tikz"))
savefig(graph, string("julia_", name, ".pdf"))
end
end # Todo double check if something better can be done wrt y names ?
# TODO :
#
# - Split this function in a load data into square / cube structure and a plot function
# - Refactor the code below to compute the various squares / cubes and then do the plots.
# - Refactor the Slicing function too
# - Create a custom diagonal slice function ?
preamble_printed = false
push!(PGFPlotsX.CUSTOM_PREAMBLE,raw"\newcommand{\gdfigurewidth}{150mm}")
push!(PGFPlotsX.CUSTOM_PREAMBLE,raw"\newcommand{\gdfigureheight}{100mm}")
function graph2d(name, matrix, xlabel, ylabel)
x = range(0, 63)
y = range(0, 63)
function hmp2d(x, y)
matrix[x + 1, y + 1]
end
lock(plot_lock) do
graph = heatmap(x, y, hmp2d, minorgrid=true, height = raw"{\gdfigureheight}}, width = {{\gdfigurewidth}", xlabel = xlabel, ylabel = ylabel, c = :blues, extra_kwargs =:subplot)
if !preamble_printed
global preamble_printed = true
print(Plots.pgfx_preamble(graph))
end
savefig(graph, string(name, ".tikz"))
savefig(graph, string(name, ".pdf"))
end
end
function graph2dclims(name, matrix, clims, xlabel, ylabel)
x = range(0, 63)
y = range(0, 63)
function hmp2d(x, y)
matrix[x + 1, y + 1]
end
lock(plot_lock) do
graph = heatmap(x, y, hmp2d, clims = clims, minorgrid=true, height = raw"{\gdfigureheight}}, width = {{\gdfigurewidth}", xlabel = xlabel, ylabel = ylabel, extra_kwargs =:subplot)
savefig(graph, string(name, ".tikz"))
savefig(graph, string(name, ".pdf"))
end
end
function graph_1(basename, csv)
# define the 2D arrays for the 3 heatmaps
sf_probe_heatmap = fill(-1.0, 64, 64)
sr_probe_heatmap = fill(-1.0, 64, 64)
ff_probe_heatmap = fill(-1.0, 64, 64)
# define 3 1D arrays to build the heatmap for average time of the first access in FF/SR and SF modes
sf_offset_hit_time = fill(-1.0, 64)
sr_offset_hit_time = fill(-1.0, 64)
ff_offset_hit_time = fill(-1.0, 64)
# iterates on the rows and fill in the 2D arrays.
for row in csv
offset = row.Offset_0
probe = row.ProbeAddr
@assert sf_probe_heatmap[offset+1,probe+1] == -1.0
sf_probe_heatmap[offset + 1, probe + 1] = row.Probe_SF_HR
sr_probe_heatmap[offset + 1, probe + 1] = row.Probe_SR_HR
ff_probe_heatmap[offset + 1, probe + 1] = row.Probe_FF_HR
if probe == 0
@assert sf_offset_hit_time[offset + 1] == -1.0
sf_offset_hit_time[offset + 1] = 0.0
end
sf_offset_hit_time[offset + 1] += row.Offset_0_SF_HR
sr_offset_hit_time[offset + 1] += row.Offset_0_SR_HR
ff_offset_hit_time[offset + 1] += row.Offset_0_FF_HR
if probe == 63
sf_offset_hit_time[offset + 1] /= 64
sr_offset_hit_time[offset + 1] /= 64
ff_offset_hit_time[offset + 1] /= 64
end
end
graph2dclims(string("julia_", basename, "_SF"), sf_probe_heatmap, (0,1), "i", "probe")
graph2dclims(string("julia_", basename, "_SR"), sr_probe_heatmap, (0,1), "i", "probe")
graph2dclims(string("julia_", basename, "_FF"), ff_probe_heatmap, (0,1), "i", "probe")
data = [ff_offset_hit_time, sr_offset_hit_time, sf_offset_hit_time]
x = range(0, 63)
y = range(0, 2)
function f(x, y)
data[y + 1][x + 1]
end
lock(plot_lock) do
graph = heatmap(x, y, f)
savefig(graph, string("julia_", basename, "_Offset_0_HT.tikz"))
savefig(graph, string("julia_", basename, "_Offset_0_HT.pdf"))
end
end
function myfill(element, dimensions)
res = fill(element, dimensions)
res = map(x -> deepcopy(x), res)
res
end
function cube_flatten_z(cubes)
len = length(cubes)
res = myfill(myfill(0.0,(64,64)), len)
for k in range(1,64)
Threads.@threads for i in range(1,64)
for j in range(1,64)
for l in range(1,len)
res[l][i,j] += cubes[l][i,j,k]
end
end
end
end
res
end
function slice_extract_x(cubes, slices)
slice_length = length(slices)
cube_length = length(cubes)
res = myfill(myfill(myfill(0.0, (64, 64)), slice_length), cube_length)
for i in range(1,64)
for j in range(1,64)
for (k,slice) in enumerate(slices)
for l in range(1, cube_length)
res[l][k][i, j] = cubes[l][slice+1, i, j]
end
end
end
end
res
end
function graph_2(basename, csv)
# First define a 3D cube for the resulting data ?
sf_probe_heatmap = myfill(-1.0, (64, 64, 64))
sr_probe_heatmap = myfill(-1.0, (64, 64, 64))
ff_probe_heatmap = myfill(-1.0, (64, 64, 64))
# Fill in the 3D cube, then create the various slices and flattenings
# Flattened Cube with x = first addr, y = second addr, compute the sum of prefetches ?
# Grab a few random first adresses and look at them with x = second addr, y = probe addr
# 0,1, 62,63 14, 15 plus one other depending on what appears
# Also define and fill in a 2D matrix of offset1-offset2 hit time.
sf_offset_hit_time = myfill(-1.0, (64, 64))
sr_offset_hit_time = myfill(-1.0, (64, 64))
ff_offset_hit_time = myfill(-1.0, (64, 64))
for row in csv
probe = row.ProbeAddr
offset_0 = row.Offset_0
offset_1 = row.Offset_1
@assert sf_probe_heatmap[offset_0 + 1, offset_1 + 1, probe + 1] == -1.0
sf_probe_heatmap[offset_0 + 1, offset_1 + 1, probe + 1] = row.Probe_SF_HR
sr_probe_heatmap[offset_0 + 1, offset_1 + 1, probe + 1] = row.Probe_SR_HR
ff_probe_heatmap[offset_0 + 1, offset_1 + 1, probe + 1] = row.Probe_FF_HR
if probe == 0
@assert sf_offset_hit_time[offset_0 + 1, offset_1 + 1] == -1.0
sf_offset_hit_time[offset_0 + 1, offset_1 + 1] = 0.0
end
sf_offset_hit_time[offset_0 + 1, offset_1 + 1] += row.Offset_1_SF_HR
sr_offset_hit_time[offset_0 + 1, offset_1 + 1] += row.Offset_1_SR_HR
ff_offset_hit_time[offset_0 + 1, offset_1 + 1] += row.Offset_1_FF_HR
if probe == 63
sf_offset_hit_time[offset_0 + 1, offset_1 + 1] /= 64
sr_offset_hit_time[offset_0 + 1, offset_1 + 1] /= 64
ff_offset_hit_time[offset_0 + 1, offset_1 + 1] /= 64
end
end
allprobes = cube_flatten_z([sf_probe_heatmap, sr_probe_heatmap, ff_probe_heatmap])
sf_probe_heatmap_allprobes = allprobes[1]
sr_probe_heatmap_allprobes = allprobes[2]
ff_probe_heatmap_allprobes = allprobes[3]
all_slices = slice_extract_x([sf_probe_heatmap, sr_probe_heatmap, ff_probe_heatmap], slices_offset_0)
sf_probe_slices_heatmaps = all_slices[1]
sr_probe_slices_heatmaps = all_slices[2]
ff_probe_slices_heatmaps = all_slices[3]
graph2d(string("julia_", basename, "_SF_AllProbes"), sf_probe_heatmap_allprobes, "i", "j")
graph2d(string("julia_", basename, "_SR_AllProbes"), sr_probe_heatmap_allprobes, "i", "j")
graph2d(string("julia_", basename, "_FF_AllProbes"), ff_probe_heatmap_allprobes, "i", "j")
for (i, offset_0) in enumerate(slices_offset_0)
print(offset_0)
data = sf_probe_slices_heatmaps[i]
graph2dclims(string("julia_", basename, "_SF_Slice_", offset_0),sf_probe_slices_heatmaps[i],(0,1), "j", "probe")
graph2dclims(string("julia_", basename, "_SR_Slice_", offset_0),sr_probe_slices_heatmaps[i],(0,1), "j", "probe")
graph2dclims(string("julia_", basename, "_FF_Slice_", offset_0),ff_probe_slices_heatmaps[i],(0,1), "j", "probe")
end
[sf_probe_heatmap, sr_probe_heatmap, ff_probe_heatmap]
end
Threads.@threads for file in files[:,:,1]
name = string("eap_",eaps[file[1]],"_",types[file[2]],levels[file[3]])
graph_0(name, file[4])
print(string(name,"\n"))
end
Threads.@threads for file in files[:,:,2]
name = string("eap_",eaps[file[1]],"_",types[file[2]],levels[file[3]])
graph_1(name, file[4])
print(string(name,"\n"))
end
cubes = fill(0.0, length(eaps), length(types), 3, 64, 64, 64)
Threads.@threads for file in files[:,:,3]
name = string("eap_",eaps[file[1]],"_",types[file[2]],levels[file[3]])
(sf,sr,ff) = graph_2(name, file[4])
cubes[file[1], file[2], 1, :, :, :] = sf
cubes[file[1], file[2], 2, :, :, :] = sr
cubes[file[1], file[2], 3, :, :, :] = ff
print(string(name,"\n"))
end
print("Computing 14 union 13...")
function cube_max(cubes_1, cubes_2)
@assert size(cubes_1) == size(cubes_2)
sizes = size(cubes_1)
@assert length(sizes) == 5
res = fill(0.0, sizes)
for i in range(1,sizes[1])
for j in range(1,sizes[2])
Threads.@threads for k in range(1,64)
for l in range(1, 64)
for m in range(1, 64)
res[i,j,k,l,m] = max(cubes_1[i,j,k,l,m], cubes_2[i,j,k,l,m])
end
end
end
end
end
res
end
index_0 = findfirst(isequal(0), eaps)
index_12 = findfirst(isequal(12), eaps)
index_13 = findfirst(isequal(13), eaps)
index_14 = findfirst(isequal(14), eaps)
cube_max_13_14 = cube_max(cubes[index_13,:,:,:,:,:], cubes[index_14,:,:,:,:,:])
function do_cubes(name, cubes)
cube_list = []
index_list = []
for type in range(1,length(types))
for method in range(1,3)
push!(cube_list, cubes[type,method,:,:,:])
push!(index_list, (type, method))
end
end
allgraphs = cube_flatten_z(cube_list)
for (i,(type,method)) in enumerate(index_list)
graph2d(string(name, "_", types[type], "2_", methods[method], "_AllProbes"), allgraphs[i], "i", "j")
for slice in diff_slices_offset_0
graph2d(string(name,"_", types[type], "2_", methods[method], "_Slice_", slice), cubes[type, method, slice+1,:,:], "j", "probe")
end
end
end
graph_13_14 = @task begin
do_cubes("julia_max_13_14", cube_max_13_14)
cube_list = []
index_list = []
for type in range(1,length(types))
for method in range(1,3)
push!(cube_list, cube_max_13_14[type,method,:,:,:])
push!(index_list, (type, method))
end
end
allgraphs = cube_flatten_z(cube_list)
for (i,(type,method)) in enumerate(index_list)
graph2d(string("julia_max_13_14_", types[type], "2_", methods[method], "_AllProbes"), allgraphs[i], "i", "j")
end
end
schedule(graph_13_14)
print(" OK\n")
print("Computing Any difference between 0 and 12...")
function cube_differences(cubes_1, cubes_2)
@assert size(cubes_1) == size(cubes_2)
sizes = size(cubes_1)
@assert length(sizes) == 5
res = fill(0.0, sizes)
for i in range(1,sizes[1])
for j in range(1,sizes[2])
Threads.@threads for k in range(1,64)
for l in range(1, 64)
for m in range(1, 64)
res[i,j,k,l,m] = abs(cubes_1[i,j,k,l,m] - cubes_2[i,j,k,l,m])
end
end
end
end
end
res
end
cube_diff_0_12 = cube_differences(cubes[index_0,:,:,:,:,:], cubes[index_12,:,:,:,:,:])
graph_0_12 = @task begin
do_cubes("julia_diff_0_12", cube_diff_0_12)
cube_list = []
index_list = []
for type in range(1,length(types))
for method in range(1,3)
push!(cube_list, cube_diff_0_12[type,method,:,:,:])
push!(index_list, (type, method))
end
end
allgraphs = cube_flatten_z(cube_list)
for (i,(type,method)) in enumerate(index_list)
graph2d(string("julia_diff_0_12_", types[type], "2_", methods[method], "_AllProbes"), allgraphs[i], "i", "j")
end
end
schedule(graph_0_12)
print(" OK\n")
print("Computing Differences between 12 and (13 union 14)...")
cube_diff_12_1314 = cube_differences(cubes[index_0,:,:,:,:,:], cube_max_13_14)
graph_12_1314 = @task begin
do_cubes("julia_diff_12_1314", cube_diff_12_1314)
cube_list = []
index_list = []
for type in range(1,length(types))
for method in range(1,3)
push!(cube_list, cube_diff_12_1314[type,method,:,:,:])
push!(index_list, (type, method))
end
end
allgraphs = cube_flatten_z(cube_list)
for (i,(type,method)) in enumerate(index_list)
graph2d(string("julia_diff_12_1314", types[type], "2_", methods[method], "_AllProbes"), allgraphs[i], "i", "j")
for slice in diff_slices_offset_0
end
end
end
schedule(graph_12_1314)
wait(graph_13_14)
wait(graph_0_12)
wait(graph_12_1314)
print("done\n")

74
analysis/sort.sh Executable file
View File

@ -0,0 +1,74 @@
#!/bin/sh
mkdir -p _/0
mkdir -p _/1
mkdir -p _/2
mv *{S,U}0* _/0/
mv *{S,U}1* _/1/
mv *{S,U}2* _/2/
mkdir -p A/1/
mkdir -p A/2/
mkdir -p A/3/
mkdir -p A/4/
mkdir -p A/8/
mkdir -p B/8/
mkdir -p B/4/
mkdir -p B/3/
mkdir -p B/2/
mkdir -p B/1/
mkdir -p C/1/
mkdir -p C/2/
mkdir -p C/3/
mkdir -p C/4/
mkdir -p C/8/
mkdir -p D/8/
mkdir -p D/1/
mkdir -p D/2/
mkdir -p D/3/
mkdir -p D/4/
mkdir -p E/4/
mkdir -p E/3/
mkdir -p E/2/
mkdir -p F/1/
mkdir -p F/-1/
mkdir -p F/2/
mkdir -p F/-2/
mkdir -p F/3/
mkdir -p F/-3/
mkdir -p F/4/
mkdir -p F/-4/
mv *A1_* A/1/
mv *A2_* A/2/
mv *A3_* A/3/
mv *A4_* A/4/
mv *A8_* A/8/
mv *B8_* B/8/
mv *B4_* B/4/
mv *B3_* B/3/
mv *B2_* B/2/
mv *B1_* B/1/
mv *C1_* C/1/
mv *C2_* C/2/
mv *C3_* C/3/
mv *C4_* C/4/
mv *C8_* C/8/
mv *D8_* D/8/
mv *D1_* D/1/
mv *D2_* D/2/
mv *D3_* D/3/
mv *D4_* D/4/
mv *E4_* E/4/
mv *E3_* E/3/
mv *E2_* E/2/
mv *F1_* F/1/
mv *F-1_* F/-1/
mv *F2_* F/2/
mv *F-2_* F/-2/
mv *F3_* F/3/
mv *F-3_* F/-3/
mv *F4_* F/4/
mv *F-4_* F/-4/

View File

@ -0,0 +1,76 @@
CacheObserver - monitor what happens in the cache when doing memory accesses
============================================================================
This framework, derived from https://github.com/MIAOUS-group/calibration-done-right,
is built to help reverse engineer prefetchers on Intel CPUs.
The main entry point of the framework is the `prefetcher_reverse` crate.
The code presented runs under Fedora 30, and can also be made to run on Ubuntu 18.04 LTS with minor tweaks
(Notably, lib cpupower may also be called lib cpufreq)
## Usage
Requires rust nightly features. Install rust nightly using rustup,
known working versions are listed at the end of the document.
This tool needs access to MSR and thus requires sudo access.
The setup.sh script disables turbo boost and makes sure the frequency is set to the max
non-boosted frequency.
One can run all the experiments with the following instructions :
```
cd prefetcher_reverse
mkdir results-xxx
cd results-xxx
sudo ../setup.sh
../run-msr-all.sh 15
../run-msr-all.sh 14
../run-msr-all.sh 13
../run-msr-all.sh 12
../run-msr-all.sh 0
# Do not forget to re-enable turbo-boost and set the cpupower frequency governor back
```
This results in a set of log files that can then be analyzed.
**Note for default settings, this results in several GB worth of logs**
## General Architecture
`prefetcher_reverse` is where the experiments used to reverse engineer prefetcher lives.
It contains the Prober structure, along with binaries generating patterns for the experiments
to run and feeding them to the Prober struct.
The `analysis` folder contains the scripts we used to turn the logs into figures.
To be documented. We used Julia with the Plots and PGFPlotsX backend to generate figures.
The flow is to first use `extract_analysis_csv.sh` to extract the CSV for each experiment from the logs.
Then one can use the makeplots Julia scripts (those are unfortunately not optimized and may run for several hours, as the LaTeX backend is not thread-safe and generates many figures).
Those scripts expect to find the CSVs at a specific path and require their output folder
by MSR 420 (0x1A4) values to exist beforehand (so 15,14,13,12,0 must exist beforehand).
They are still quite rough and undocumented, rough edges are to be expected.
(A better version could be released if the paper is accepted)
The resulting figures can then be sorted into subfolders for easier browsing, and the change colormap script can be used to tweak the tikz file colormaps for use in papers
Crates originally from the *Calibration done right* framework, slightly modified :
- `basic_timing_cache_channel` contains generic implementations of Naive and Optimised cache side channels, that just require providing the actual operation used
- `cache_side_channel` defines the interface cache side channels have to implement
- `cache_utils` contains utilities related to cache attacks
- `cpuid` is a small crate that handles CPU microarchitecture identification and provides info about what is known about it
- `flush_flush` and `flush_reload` are tiny crates that use `basic_timing_cache_channel` to export Flush+Flush and Flush+Reload primitives
- `turn_lock` is the synchronization primitive used by `cache_utils`
### Rust versions
Known good nightly :
- rustc 1.54.0-nightly (eab201df7 2021-06-09)
- rustc 1.55.0-nightly (885399992 2021-07-06)