Skip to content
Snippets Groups Projects
Commit 593e7bd8 authored by Bartolomeo Berend Müller's avatar Bartolomeo Berend Müller
Browse files

Done

parent 315ca061
No related branches found
No related tags found
No related merge requests found
...@@ -201,13 +201,16 @@ fn prepare_handshake( ...@@ -201,13 +201,16 @@ fn prepare_handshake(
quiche::connect(Some(SERVER_NAME), &scid, local_addr, peer_addr, &mut config).unwrap(); quiche::connect(Some(SERVER_NAME), &scid, local_addr, peer_addr, &mut config).unwrap();
if let Some(dir) = std::env::var_os("QLOGDIR") { if let Some(dir) = std::env::var_os("QLOGDIR") {
let id = format!("{scid:?}"); let mut log_id = [0; quiche::MAX_CONN_ID_LEN];
let writer = make_qlog_writer(&dir, "client", &id); SystemRandom::new().fill(&mut log_id[..]).unwrap();
let log_id = quiche::ConnectionId::from_ref(&log_id);
let log_id = format!("{log_id:?}");
let writer = make_qlog_writer(&dir, "client", &log_id);
conn.set_qlog_with_level( conn.set_qlog_with_level(
std::boxed::Box::new(writer), std::boxed::Box::new(writer),
"cquiche_s_timer qlog".to_string(), "cquiche_s_timer qlog".to_string(),
format!("{} id={}", "cquiche_s_timer qlog", id), format!("{} id={}", "cquiche_s_timer qlog", log_id),
quiche::QlogLevel::Extra, quiche::QlogLevel::Extra,
); );
} }
......
...@@ -12,8 +12,8 @@ def main(): ...@@ -12,8 +12,8 @@ def main():
data = pd.read_feather(f"{FEATHERS_DIR}/data.feather") data = pd.read_feather(f"{FEATHERS_DIR}/data.feather")
# data = pd.read_feather(f"{FEATHERS_DIR}/data_run_20241028.feather") # data = pd.read_feather(f"{FEATHERS_DIR}/data_run_20241028.feather")
bandwith_calcs() # bandwith_calcs()
# loss_calculations() loss_calculations()
# static_scenario_statistical_analysis(data) # static_scenario_statistical_analysis(data)
# median_of_all_static_runs_per_algorithm(data) # median_of_all_static_runs_per_algorithm(data)
# stats_of_qtl95_of_packetloss(data) # stats_of_qtl95_of_packetloss(data)
...@@ -79,20 +79,21 @@ def loss_calculations(): ...@@ -79,20 +79,21 @@ def loss_calculations():
return (1 - l) ** (cic + sic) return (1 - l) ** (cic + sic)
def calc_p_no_one_sec_delay(cic, sic, l): def calc_p_no_one_sec_delay(cic, sidc, l):
""" """
Calculates the probability p_noOneSec. Calculates the probability p_noOneSec.
Args: Args:
cic: client initial count. cic: client initial count.
sic: server initial count. sidc: server initial decryptable count, without last packet of sic if length < 1200.
lsf: last server ethernet frame length.
l: loss probability. l: loss probability.
Returns: Returns:
p_noOneSec as defined in the thesis. p_noOneSec as defined in the thesis.
""" """
term1 = (1 - l) ** cic * (1 - l ** (sic + (cic - 1))) term1 = (1 - l) ** cic * (1 - l ** (sidc + (cic - 1)))
term2 = 0 term2 = 0
# range: upper is not inclusive, in math symbol SUM it is inclusive # range: upper is not inclusive, in math symbol SUM it is inclusive
...@@ -118,12 +119,20 @@ def loss_calculations(): ...@@ -118,12 +119,20 @@ def loss_calculations():
return 1 - (p ** (1 / (cic + sic))) return 1 - (p ** (1 / (cic + sic)))
df["sidc"] = df.apply(
lambda row: (
row["sic"] - 1 if row["server_last_packet_length"] < 1200 else row["sic"]
),
axis=1,
)
for l in [0.01, 0.05, 0.10, 0.20]: for l in [0.01, 0.05, 0.10, 0.20]:
df[f"p_noLoss_{l}"] = df.apply( df[f"p_noLoss_{l}"] = df.apply(
lambda row: calc_p_no_loss(row["cic"], row["sic"], l), axis=1 lambda row: calc_p_no_loss(row["cic"], row["sic"], l), axis=1
) )
df[f"p_noOneSec_{l}"] = df.apply( df[f"p_noOneSec_{l}"] = df.apply(
lambda row: calc_p_no_one_sec_delay(row["cic"], row["sic"], l), axis=1 lambda row: calc_p_no_one_sec_delay(row["cic"], row["sidc"], l),
axis=1,
) )
df["l_for_noLoss_p50"] = df.apply( df["l_for_noLoss_p50"] = df.apply(
......
0% Loading or .
You are about to add 0 people to the discussion. Proceed with caution.
Please register or to comment