Final experiments

Code for final experiments.
This commit is contained in:
Guillume DIDIER 2020-11-24 10:25:32 +01:00
parent 236b8bee48
commit 168f81a19e
7 changed files with 363 additions and 40 deletions

1
Cargo.lock generated
View File

@ -114,6 +114,7 @@ version = "0.1.0"
dependencies = [
"covert_channels_evaluation",
"flush_flush",
"flush_reload",
]
[[package]]

View File

@ -7,5 +7,6 @@ edition = "2018"
# See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html
[dependencies]
covert_channels_evaluation = {path = "../covert_channels_evaluation"}
flush_flush = {path = "../flush_flush"}
covert_channels_evaluation = { path = "../covert_channels_evaluation" }
flush_flush = { path = "../flush_flush" }
flush_reload = { path = "../flush_reload" }

View File

@ -1,21 +1,140 @@
#![feature(unsafe_block_in_unsafe_fn)]
#![deny(unsafe_op_in_unsafe_fn)]
use covert_channels_evaluation::benchmark_channel;
use flush_flush::FlushAndFlush;
use std::io::{stdout, Write};
use covert_channels_evaluation::{benchmark_channel, CovertChannel, CovertChannelBenchmarkResult};
use flush_flush::naive::NaiveFlushAndFlush;
use flush_flush::{FlushAndFlush, SingleFlushAndFlush};
use flush_reload::naive::NaiveFlushAndReload;
const NUM_BYTES: usize = 1 << 14; //20
const NUM_PAGES: usize = 1;
const NUM_PAGES_2: usize = 4;
const NUM_PAGE_MAX: usize = 32;
const NUM_ITER: usize = 32;
struct BenchmarkStats {
raw_res: Vec<CovertChannelBenchmarkResult>,
average_p: f64,
var_p: f64,
average_C: f64,
var_C: f64,
average_T: f64,
var_T: f64,
}
fn run_benchmark<T: CovertChannel + 'static>(
name: &str,
constructor: impl Fn() -> T,
num_iter: usize,
num_pages: usize,
) -> BenchmarkStats {
let mut results = Vec::new();
print!("Benchmarking {} with {} pages", name, num_pages);
for _ in 0..num_iter {
print!(".");
stdout().flush().expect("Failed to flush");
let channel = constructor();
let r = benchmark_channel(channel, num_pages, NUM_BYTES);
results.push(r);
}
println!();
let mut average_p = 0.0;
let mut average_C = 0.0;
let mut average_T = 0.0;
for result in results.iter() {
println!("{:?}", result);
println!("C: {}, T: {}", result.capacity(), result.true_capacity());
average_p += result.error_rate;
average_C += result.capacity();
average_T += result.true_capacity()
}
average_p /= num_iter as f64;
average_C /= num_iter as f64;
average_T /= num_iter as f64;
println!(
"{} - {} Average p: {} C: {}, T: {}",
name, num_pages, average_p, average_C, average_T
);
let mut var_p = 0.0;
let mut var_C = 0.0;
let mut var_T = 0.0;
for result in results.iter() {
let p = result.error_rate - average_p;
var_p += p * p;
let C = result.capacity() - average_C;
var_C += C * C;
let T = result.true_capacity() - average_T;
var_T += T * T;
}
var_p /= num_iter as f64;
var_C /= num_iter as f64;
var_T /= num_iter as f64;
println!(
"{} - {} Variance of p: {}, C: {}, T:{}",
name, num_pages, var_p, var_C, var_T
);
BenchmarkStats {
raw_res: results,
average_p,
var_p,
average_C,
var_C,
average_T,
var_T,
}
}
fn main() {
for num_pages in 1..=32 {
/*println!("Benchmarking F+F");
for _ in 0..16 {
//let sender = 0;
//let receiver = 2;
let (channel, old, receiver, sender) = match FlushAndFlush::new_any_two_core(true) {
// TODO Use the best possible ASV, not best possible AV
let (channel, old, receiver, sender) = match SingleFlushAndFlush::new_any_two_core(true) {
Err(e) => {
panic!("{:?}", e);
}
Ok(r) => r,
};
let r = benchmark_channel(channel, 1, 1 << 15);
let r = benchmark_channel(channel, NUM_PAGES, NUM_BYTES);
println!("{:?}", r);
println!("C: {}, T: {}", r.capacity(), r.true_capacity());
}*/
let naive_ff = run_benchmark(
"Naive F+F",
|| NaiveFlushAndFlush::from_threshold(202),
NUM_ITER << 4,
num_pages,
);
let better_ff = run_benchmark(
"Better F+F",
|| {
match FlushAndFlush::new_any_two_core(true) {
Err(e) => {
panic!("{:?}", e);
}
Ok(r) => r,
}
.0
},
NUM_ITER,
num_pages,
);
let fr = run_benchmark(
"F+R",
|| NaiveFlushAndReload::from_threshold(230),
NUM_ITER,
num_pages,
);
}
}

View File

@ -43,7 +43,18 @@ pub struct CovertChannelBenchmarkResult {
pub num_bit_errors: usize,
pub error_rate: f64,
pub time_rdtsc: u64,
//pub time_seconds: todo
pub time_seconds: std::time::Duration,
}
impl CovertChannelBenchmarkResult {
pub fn capacity(&self) -> f64 {
(self.num_bytes_transmitted * 8) as f64 / self.time_seconds.as_secs_f64()
}
pub fn true_capacity(&self) -> f64 {
let p = self.error_rate;
self.capacity() * (1.0 + ((1.0 - p) * f64::log2(1.0 - p) + p * f64::log2(p)))
}
}
pub struct BitIterator<'a> {
@ -97,7 +108,7 @@ unsafe impl<T: 'static + CovertChannel + Send> Send for CovertChannelParams<T> {
fn transmit_thread<T: CovertChannel>(
num_bytes: usize,
mut params: CovertChannelParams<T>,
) -> (u64, Vec<u8>) {
) -> (u64, std::time::Instant, Vec<u8>) {
let old_affinity = set_affinity(&(*params.covert_channel).helper_core());
let mut result = Vec::new();
@ -109,6 +120,7 @@ fn transmit_thread<T: CovertChannel>(
let mut bit_sent = 0;
let mut bit_iter = BitIterator::new(&result);
let start_time = std::time::Instant::now();
let start = unsafe { rdtsc_fence() };
while !bit_iter.atEnd() {
for page in params.pages.iter_mut() {
@ -121,7 +133,7 @@ fn transmit_thread<T: CovertChannel>(
}
}
}
(start, result)
(start, start_time, result)
}
pub fn benchmark_channel<T: 'static + Send + CovertChannel>(
@ -194,8 +206,9 @@ pub fn benchmark_channel<T: 'static + Send + CovertChannel>(
}
let stop = unsafe { rdtsc_fence() };
let stop_time = std::time::Instant::now();
let r = helper.join();
let (start, sent_bytes) = match r {
let (start, start_time, sent_bytes) = match r {
Ok(r) => r,
Err(e) => panic!("Join Error: {:?#}"),
};
@ -216,6 +229,7 @@ pub fn benchmark_channel<T: 'static + Send + CovertChannel>(
num_bit_errors: num_bit_error,
error_rate,
time_rdtsc: stop - start,
time_seconds: stop_time - start_time,
}
}

View File

@ -516,6 +516,67 @@ impl MultipleAddrCacheSideChannel for FlushAndFlush {
unsafe impl Send for FlushAndFlush {}
unsafe impl Sync for FlushAndFlush {}
impl CovertChannel for SingleFlushAndFlush {
const BIT_PER_PAGE: usize = 1; //PAGE_SHIFT - 6; // FIXME MAGIC cache line size
unsafe fn transmit<'a>(&self, page: *const u8, bits: &mut BitIterator<'a>) {
let mut offset = 0;
let page = self.0.preferred_address[&page];
if let Some(b) = bits.next() {
//println!("Transmitting {} on page {:p}", b, page);
if b {
unsafe { only_reload(page) };
} else {
unsafe { only_flush(page) };
}
}
}
unsafe fn receive(&self, page: *const u8) -> Vec<bool> {
let addresses: Vec<*const u8> = vec![self.0.preferred_address[&page]];
let r = unsafe { self.0.test_impl(&mut addresses.iter(), u32::max_value()) };
match r {
Err(e) => panic!("{:?}", e),
Ok(status_vec) => {
assert_eq!(status_vec.len(), 1);
let received = status_vec[0].1 == Hit;
return vec![received];
}
}
}
unsafe fn ready_page(&mut self, page: *const u8) {
let r = unsafe { self.0.calibrate(vec![page].into_iter()) }.unwrap();
let mut best_error_rate = 1.0;
let mut best_slice = 0;
for (sp, threshold_error) in self
.0
.thresholds
.iter()
.filter(|kv| kv.0.page == page as VPN)
{
if threshold_error.error.error_rate() < best_error_rate {
best_error_rate = threshold_error.error.error_rate();
best_slice = sp.slice;
}
}
for i in 0..PAGE_LEN {
let addr = unsafe { page.offset(i as isize) };
if self.0.get_slice(addr) == best_slice {
self.0.preferred_address.insert(page, addr);
let r = unsafe {
self.0
.prepare_impl(&mut vec![addr].iter(), u32::max_value())
}
.unwrap();
break;
}
}
}
}
impl CovertChannel for FlushAndFlush {
const BIT_PER_PAGE: usize = 1; //PAGE_SHIFT - 6; // FIXME MAGIC cache line size

View File

@ -0,0 +1,114 @@
use cache_side_channel::{
CacheStatus, ChannelFatalError, CoreSpec, SideChannelError, SingleAddrCacheSideChannel,
};
use cache_utils::calibration::{get_vpn, only_flush, only_reload, VPN};
use cache_utils::flush;
use covert_channels_evaluation::{BitIterator, CovertChannel};
use nix::sched::{sched_getaffinity, CpuSet};
use nix::unistd::Pid;
use std::collections::HashMap;
use std::thread::current;
#[derive(Debug)]
pub struct NaiveFlushAndFlush {
pub threshold: u64,
current: HashMap<VPN, *const u8>,
}
impl NaiveFlushAndFlush {
pub fn from_threshold(threshold: u64) -> Self {
NaiveFlushAndFlush {
threshold,
current: Default::default(),
}
}
unsafe fn test_impl(&self, addr: *const u8) -> Result<CacheStatus, SideChannelError> {
let vpn = get_vpn(addr);
if self.current.get(&vpn) != Some(&addr) {
return Err(SideChannelError::AddressNotReady(addr));
}
let t = unsafe { only_flush(addr) };
if t < self.threshold {
Ok(CacheStatus::Miss)
} else {
Ok(CacheStatus::Hit)
}
}
}
impl SingleAddrCacheSideChannel for NaiveFlushAndFlush {
/// # Safety
///
/// addr needs to be a valid pointer
unsafe fn test_single(&mut self, addr: *const u8) -> Result<CacheStatus, SideChannelError> {
unsafe { self.test_impl(addr) }
}
/// # Safety:
///
/// addr needs to be a valid pointer
unsafe fn prepare_single(&mut self, addr: *const u8) -> Result<(), SideChannelError> {
unsafe { flush(addr) };
let vpn = get_vpn(addr);
self.current.insert(vpn, addr);
Ok(())
}
fn victim_single(&mut self, operation: &dyn Fn()) {
operation()
}
/// # Safety
///
/// addr needs to be a valid pointer
unsafe fn calibrate_single(
&mut self,
_addresses: impl IntoIterator<Item = *const u8>,
) -> Result<(), ChannelFatalError> {
Ok(())
}
}
unsafe impl Send for NaiveFlushAndFlush {}
unsafe impl Sync for NaiveFlushAndFlush {}
impl CoreSpec for NaiveFlushAndFlush {
fn main_core(&self) -> CpuSet {
sched_getaffinity(Pid::from_raw(0)).unwrap()
}
fn helper_core(&self) -> CpuSet {
sched_getaffinity(Pid::from_raw(0)).unwrap()
}
}
impl CovertChannel for NaiveFlushAndFlush {
const BIT_PER_PAGE: usize = 1;
unsafe fn transmit<'a>(&self, page: *const u8, bits: &mut BitIterator<'a>) {
let vpn = get_vpn(page);
let addr = self.current.get(&vpn).unwrap();
if let Some(b) = bits.next() {
if b {
unsafe { only_reload(*addr) };
} else {
unsafe { only_flush(*addr) };
}
}
}
unsafe fn receive(&self, page: *const u8) -> Vec<bool> {
let r = unsafe { self.test_impl(page) };
match r {
Err(e) => panic!(),
Ok(status) => match status {
CacheStatus::Hit => vec![true],
CacheStatus::Miss => vec![false],
},
}
}
unsafe fn ready_page(&mut self, page: *const u8) {
unsafe { self.prepare_single(page) };
}
}

View File

@ -1,23 +1,38 @@
use cache_side_channel::{
CacheStatus, ChannelFatalError, CoreSpec, SideChannelError, SingleAddrCacheSideChannel,
};
use cache_utils::calibration::only_reload;
use cache_utils::calibration::{get_vpn, only_flush, only_reload, VPN};
use cache_utils::flush;
use covert_channels_evaluation::{BitIterator, CovertChannel};
use nix::sched::{sched_getaffinity, CpuSet};
use nix::unistd::Pid;
use std::collections::HashMap;
use std::thread::current;
#[derive(Debug)]
pub struct NaiveFlushAndReload {
pub threshold: u64,
current: Option<*const u8>,
current: HashMap<VPN, *const u8>,
}
impl NaiveFlushAndReload {
pub fn from_threshold(threshold: u64) -> Self {
NaiveFlushAndReload {
threshold,
current: None,
current: Default::default(),
}
}
unsafe fn test_impl(&self, addr: *const u8) -> Result<CacheStatus, SideChannelError> {
let vpn = get_vpn(addr);
if self.current.get(&vpn) != Some(&addr) {
return Err(SideChannelError::AddressNotReady(addr));
}
let t = unsafe { only_reload(addr) };
unsafe { flush(addr) };
if t > self.threshold {
Ok(CacheStatus::Miss)
} else {
Ok(CacheStatus::Hit)
}
}
}
@ -27,15 +42,7 @@ impl SingleAddrCacheSideChannel for NaiveFlushAndReload {
///
/// addr needs to be a valid pointer
unsafe fn test_single(&mut self, addr: *const u8) -> Result<CacheStatus, SideChannelError> {
if self.current != Some(addr) {
return Err(SideChannelError::AddressNotReady(addr));
}
let t = unsafe { only_reload(addr) };
if t > self.threshold {
Ok(CacheStatus::Miss)
} else {
Ok(CacheStatus::Hit)
}
unsafe { self.test_impl(addr) }
}
/// # Safety:
@ -43,7 +50,8 @@ impl SingleAddrCacheSideChannel for NaiveFlushAndReload {
/// addr needs to be a valid pointer
unsafe fn prepare_single(&mut self, addr: *const u8) -> Result<(), SideChannelError> {
unsafe { flush(addr) };
self.current = Some(addr);
let vpn = get_vpn(addr);
self.current.insert(vpn, addr);
Ok(())
}
@ -79,24 +87,29 @@ impl CovertChannel for NaiveFlushAndReload {
const BIT_PER_PAGE: usize = 1;
unsafe fn transmit<'a>(&self, page: *const u8, bits: &mut BitIterator<'a>) {
unimplemented!()
let vpn = get_vpn(page);
let addr = self.current.get(&vpn).unwrap();
if let Some(b) = bits.next() {
if b {
unsafe { only_reload(*addr) };
} else {
unsafe { only_flush(*addr) };
}
}
}
unsafe fn receive(&self, page: *const u8) -> Vec<bool> {
unimplemented!()
/*
let r = self.test_single(page);
let r = unsafe { self.test_impl(page) };
match r {
Err(e) => unimplemented!(),
Err(e) => panic!(),
Ok(status) => match status {
CacheStatus::Hit => vec![true],
CacheStatus::Miss => vec![false],
},
}
*/
}
unsafe fn ready_page(&mut self, page: *const u8) {
unimplemented!()
unsafe { self.prepare_single(page) };
}
}