diff --git a/pq-tls-benchmark-framework/emulation-exp/code/cquiche_s_timer/src/main.rs b/pq-tls-benchmark-framework/emulation-exp/code/cquiche_s_timer/src/main.rs
index 94b0359c91c0da7decb11c5adda57efe87c3ed25..157b31fddb1ff279da187e656d14f10c01b28830 100644
--- a/pq-tls-benchmark-framework/emulation-exp/code/cquiche_s_timer/src/main.rs
+++ b/pq-tls-benchmark-framework/emulation-exp/code/cquiche_s_timer/src/main.rs
@@ -201,13 +201,16 @@ fn prepare_handshake(
         quiche::connect(Some(SERVER_NAME), &scid, local_addr, peer_addr, &mut config).unwrap();
 
     if let Some(dir) = std::env::var_os("QLOGDIR") {
-        let id = format!("{scid:?}");
-        let writer = make_qlog_writer(&dir, "client", &id);
+        let mut log_id = [0; quiche::MAX_CONN_ID_LEN];
+        SystemRandom::new().fill(&mut log_id[..]).unwrap();
+        let log_id = quiche::ConnectionId::from_ref(&log_id);
+        let log_id = format!("{log_id:?}");
+        let writer = make_qlog_writer(&dir, "client", &log_id);
 
         conn.set_qlog_with_level(
             std::boxed::Box::new(writer),
             "cquiche_s_timer qlog".to_string(),
-            format!("{} id={}", "cquiche_s_timer qlog", id),
+            format!("{} id={}", "cquiche_s_timer qlog", log_id),
             quiche::QlogLevel::Extra,
         );
     }
diff --git a/pq-tls-benchmark-framework/emulation-exp/code/kex/scripts/queries.py b/pq-tls-benchmark-framework/emulation-exp/code/kex/scripts/queries.py
index 8b129a8f5586ffa43097a1bd778139970b256d75..3ee5105580f02146108ac3e404428bb9ea06fe89 100644
--- a/pq-tls-benchmark-framework/emulation-exp/code/kex/scripts/queries.py
+++ b/pq-tls-benchmark-framework/emulation-exp/code/kex/scripts/queries.py
@@ -12,8 +12,8 @@ def main():
     data = pd.read_feather(f"{FEATHERS_DIR}/data.feather")
     # data = pd.read_feather(f"{FEATHERS_DIR}/data_run_20241028.feather")
 
-    bandwith_calcs()
-    # loss_calculations()
+    # bandwith_calcs()
+    loss_calculations()
     # static_scenario_statistical_analysis(data)
     # median_of_all_static_runs_per_algorithm(data)
     # stats_of_qtl95_of_packetloss(data)
@@ -79,20 +79,21 @@ def loss_calculations():
 
         return (1 - l) ** (cic + sic)
 
-    def calc_p_no_one_sec_delay(cic, sic, l):
+    def calc_p_no_one_sec_delay(cic, sidc, l):
         """
         Calculates the probability p_noOneSec.
 
         Args:
             cic: client initial count.
-            sic: server initial count.
+            sidc: server initial decryptable count, without last packet of sic if length < 1200.
+            lsf: last server ethernet frame length.
             l: loss probability.
 
         Returns:
             p_noOneSec as defined in the thesis.
         """
 
-        term1 = (1 - l) ** cic * (1 - l ** (sic + (cic - 1)))
+        term1 = (1 - l) ** cic * (1 - l ** (sidc + (cic - 1)))
         term2 = 0
 
         # range: upper is not inclusive, in math symbol SUM it is inclusive
@@ -118,12 +119,20 @@ def loss_calculations():
 
         return 1 - (p ** (1 / (cic + sic)))
 
+    df["sidc"] = df.apply(
+        lambda row: (
+            row["sic"] - 1 if row["server_last_packet_length"] < 1200 else row["sic"]
+        ),
+        axis=1,
+    )
+
     for l in [0.01, 0.05, 0.10, 0.20]:
         df[f"p_noLoss_{l}"] = df.apply(
             lambda row: calc_p_no_loss(row["cic"], row["sic"], l), axis=1
         )
         df[f"p_noOneSec_{l}"] = df.apply(
-            lambda row: calc_p_no_one_sec_delay(row["cic"], row["sic"], l), axis=1
+            lambda row: calc_p_no_one_sec_delay(row["cic"], row["sidc"], l),
+            axis=1,
         )
 
     df["l_for_noLoss_p50"] = df.apply(