Commit b179793b authored by Pascal Engélibert's avatar Pascal Engélibert 🚴
Browse files

Style, deps, clippy

parent cb744a87
This diff is collapsed.
......@@ -6,15 +6,13 @@ authors = ["tuxmain <t@txmn.tk>"]
edition = "2018"
license = "AGPL3"
# See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html
[dependencies]
bincode = "1.2.1"
indicatif = "0.14.0"
plotters = "0.2.12"
rand = "0.7.3"
rayon = "1.3.0"
serde = { version = "1.0.106", features = ["derive"] }
serde_cbor = "0.11.1"
serde_json = "1.0.51"
structopt = "0.3.13"
bincode = "1.3.3"
indicatif = "0.16.2"
plotters = "0.3.1"
rand = "0.8.4"
rayon = "1.5.1"
serde = { version = "1.0.130", features = ["derive"] }
serde_cbor = "0.11.2"
serde_json = "1.0.68"
structopt = "0.3.23"
use crate::common::*;
use crate::utils::cli;
use bincode;
use plotters::prelude::*;
use serde_cbor;
use serde_json;
use std::{collections::HashMap, io::Read};
use std::{
collections::{hash_map::Entry as HashMapEntry, HashMap},
io::Read,
};
pub fn run_charts(opt: cli::ChartsSubcommand) {
let stdin = std::io::stdin();
......@@ -15,7 +15,7 @@ pub fn run_charts(opt: cli::ChartsSubcommand) {
let export_results: ExportResults = match opt.input_format {
ExportFormat::Bincode => bincode::deserialize(&raw).unwrap(),
ExportFormat::Cbor => serde_cbor::from_slice(&raw).unwrap(),
ExportFormat::Json => serde_json::from_str(&std::str::from_utf8(&raw).unwrap()).unwrap(),
ExportFormat::Json => serde_json::from_str(std::str::from_utf8(&raw).unwrap()).unwrap(),
};
let root = BitMapBackend::new("security.png", (1280, 720)).into_drawing_area();
......@@ -49,7 +49,7 @@ pub fn run_charts(opt: cli::ChartsSubcommand) {
.x_label_area_size(30)
.y_label_area_size(40)
.right_y_label_area_size(40)
.build_ranged(0u32..export_results.settings.nb_blocks, mean_min..mean_max)
.build_cartesian_2d(0u32..export_results.settings.nb_blocks, mean_min..mean_max)
.unwrap()
.set_secondary_coord(0u32..export_results.settings.nb_blocks, 0f32..std_dev_max);
......@@ -75,9 +75,7 @@ pub fn run_charts(opt: cli::ChartsSubcommand) {
.collect::<Vec<(u32, f32)>>(),
1,
&RED,
&|c, s, st| {
return EmptyElement::at(c) + Circle::new((0, 0), s, st.filled());
},
&|c, s, st| EmptyElement::at(c) + Circle::new((0, 0), s, st.filled()),
))
.unwrap()
.label("Mean (left)")
......@@ -92,9 +90,7 @@ pub fn run_charts(opt: cli::ChartsSubcommand) {
.collect::<Vec<(u32, f32)>>(),
1,
&BLUE,
&|c, s, st| {
return EmptyElement::at(c) + Circle::new((0, 0), s, st.filled());
},
&|c, s, st| EmptyElement::at(c) + Circle::new((0, 0), s, st.filled()),
))
.unwrap()
.label("Std dev (right)")
......@@ -130,10 +126,11 @@ pub fn run_charts(opt: cli::ChartsSubcommand) {
.iter()
.for_each(|(block_id, tx_result)| {
let class_id = block_id.0.checked_sub(block_id.0 % 288).unwrap();
if tx_classes.contains_key(&class_id) {
tx_classes.get_mut(&class_id).unwrap().push(tx_result);
} else {
tx_classes.insert(class_id, vec![tx_result]);
match tx_classes.entry(class_id) {
HashMapEntry::Occupied(mut e) => e.get_mut().push(tx_result),
HashMapEntry::Vacant(e) => {
e.insert(vec![tx_result]);
}
}
});
......@@ -149,9 +146,9 @@ pub fn run_charts(opt: cli::ChartsSubcommand) {
.x_label_area_size(30)
.y_label_area_size(40)
.right_y_label_area_size(40)
.build_ranged(
.build_cartesian_2d(
0u32..export_results.settings.nb_blocks,
LogRange(duration_min as f32..duration_max as f32),
(duration_min as f32..duration_max as f32).log_scale(),
)
.unwrap()
.set_secondary_coord(
......@@ -182,9 +179,7 @@ pub fn run_charts(opt: cli::ChartsSubcommand) {
.collect::<Vec<(u32, f32)>>(),
1,
&BLUE.mix(0.25),
&|c, s, st| {
return EmptyElement::at(c) + Circle::new((0, 0), s, st.filled());
},
&|c, s, st| EmptyElement::at(c) + Circle::new((0, 0), s, st.filled()),
))
.unwrap();
......@@ -207,7 +202,7 @@ pub fn run_charts(opt: cli::ChartsSubcommand) {
chart
.draw_secondary_series(
Histogram::vertical(&chart.borrow_secondary())
Histogram::vertical(chart.borrow_secondary())
.style(RED.mix(0.25).filled())
.data(
(0u32..)
......
......@@ -6,17 +6,26 @@ use structopt::StructOpt;
pub const VERSION: u32 = 0;
#[derive(PartialEq, PartialOrd)]
#[derive(PartialEq)]
pub struct NonNan<T>(pub T);
impl<T> Eq for NonNan<T> where T: PartialEq {}
impl<T> PartialOrd for NonNan<T>
where
T: PartialOrd,
{
fn partial_cmp(&self, other: &NonNan<T>) -> Option<Ordering> {
self.0.partial_cmp(&other.0)
}
}
impl<T> Ord for NonNan<T>
where
T: PartialOrd,
{
fn cmp(&self, other: &NonNan<T>) -> Ordering {
self.partial_cmp(other).unwrap()
unsafe { self.partial_cmp(other).unwrap_unchecked() }
}
}
......
use crate::cli;
use crate::common::*;
use bincode;
use serde_cbor;
use serde_json;
use std::io::{Read, Write};
pub fn run_convert(opt: cli::ConvertSubcommand) {
......@@ -14,13 +11,13 @@ pub fn run_convert(opt: cli::ConvertSubcommand) {
let export_results: ExportResults = match opt.input_format {
ExportFormat::Bincode => bincode::deserialize(&raw).unwrap(),
ExportFormat::Cbor => serde_cbor::from_slice(&raw).unwrap(),
ExportFormat::Json => serde_json::from_str(&std::str::from_utf8(&raw).unwrap()).unwrap(),
ExportFormat::Json => serde_json::from_str(std::str::from_utf8(&raw).unwrap()).unwrap(),
};
let stdout = std::io::stdout();
let mut handle = stdout.lock();
handle
.write(&match opt.output_format {
.write_all(&match opt.output_format {
ExportFormat::Bincode => bincode::serialize(&export_results).unwrap(),
ExportFormat::Cbor => serde_cbor::to_vec(&export_results).unwrap(),
ExportFormat::Json => serde_json::to_string(&export_results).unwrap().into_bytes(),
......
......@@ -16,7 +16,7 @@ pub fn run_local(opt: cli::LocalSubcommand) {
let stdout = io::stdout();
let mut handle = stdout.lock();
handle
.write(&match opt.export {
.write_all(&match opt.export {
ExportFormat::Bincode => bincode::serialize(&export_results).unwrap(),
ExportFormat::Cbor => serde_cbor::to_vec(&export_results).unwrap(),
ExportFormat::Json => serde_json::to_string(&export_results).unwrap().into_bytes(),
......
#![feature(option_result_unwrap_unchecked)]
mod charts;
mod client;
mod common;
......
......@@ -10,7 +10,7 @@ fn analyze_tx(
nodes_index: &HashMap<Pubkey, Node>,
settings: &Settings,
tx: &Tx,
txs: &Vec<Tx>,
txs: &[Tx],
) -> TxResult {
if let TxType::Output(input_block) = tx.tx_type {
let path = forensics::retro_tx(
......@@ -43,7 +43,7 @@ pub fn analyze(
clients_index: &HashMap<Pubkey, Client>,
nodes_index: &HashMap<Pubkey, Node>,
settings: &Settings,
txs: &Vec<Tx>,
txs: &[Tx],
) -> Vec<(BlockId, TxResult)> {
let progress_bar = ProgressBar::new(txs.len() as u64);
progress_bar.set_style(
......
use crate::common::*;
use std::collections::HashMap;
use std::collections::{hash_map::Entry as HashMapEntry, HashMap};
/// Assumes txs is ordered by block_id
pub fn retro_tx(
......@@ -9,14 +9,9 @@ pub fn retro_tx(
nodes_index: &HashMap<Pubkey, Node>,
settings: &Settings,
tx: &Tx,
txs: &Vec<Tx>,
txs: &[Tx],
) -> PathResult {
let from_block = BlockId(
tx.block_id
.0
.checked_sub(settings.expire_local)
.unwrap_or(0),
);
let from_block = BlockId(tx.block_id.0.saturating_sub(settings.expire_local));
if layers == 1 {
let mut result: Vec<MixId> = vec![];
for txi in txs {
......@@ -61,10 +56,14 @@ pub fn count_possible_inputs(mix_ids: &mut HashMap<MixId, usize>, path: PathResu
match path {
PathResult::Input(v) => {
for mix_id in v {
if mix_ids.contains_key(&mix_id) {
mix_ids.insert(mix_id, mix_ids.get(&mix_id).unwrap() + 1);
} else {
mix_ids.insert(mix_id, 1);
match mix_ids.entry(mix_id) {
HashMapEntry::Occupied(e) => {
let e = *e.get();
mix_ids.insert(mix_id, e + 1);
}
HashMapEntry::Vacant(e) => {
e.insert(1);
}
}
}
}
......
use crate::common::*;
use rand::Rng;
use std::collections::HashMap;
use std::collections::{hash_map::Entry as HashMapEntry, HashMap};
pub fn prepare(
clients_index: &mut HashMap<Pubkey, Client>,
......@@ -31,8 +31,8 @@ pub fn simulate(
) -> Vec<usize> {
let mut rng = rand::thread_rng();
let clients_pubkeys: Vec<Pubkey> = clients_index.iter().map(|x| x.1.pubkey.clone()).collect();
let mut nodes_pubkeys: Vec<Pubkey> = nodes_index.iter().map(|x| x.1.pubkey.clone()).collect();
let clients_pubkeys: Vec<Pubkey> = clients_index.iter().map(|x| x.1.pubkey).collect();
let mut nodes_pubkeys: Vec<Pubkey> = nodes_index.iter().map(|x| x.1.pubkey).collect();
let mut mix_id = MixId(0);
let mut expired: Vec<usize> = Vec::new();
......@@ -61,14 +61,12 @@ pub fn simulate(
for node in nodes_index.values_mut() {
for pending_txs in node.pending_txs.values_mut() {
let mut to_remove: Vec<usize> = vec![];
let mut i: usize = 0;
for pending_tx in pending_txs.iter() {
for (i, pending_tx) in pending_txs.iter().enumerate() {
if pending_tx.expire_local.0 <= block_id
|| pending_tx.expire_global.0 <= block_id
{
to_remove.push(i);
}
i += 1;
}
current_expired += to_remove.len();
for i in to_remove.iter().rev() {
......@@ -79,7 +77,7 @@ pub fn simulate(
let mut to_send: Vec<Amount> = vec![];
for (amount, pending_txs) in node.pending_txs.iter() {
if pending_txs.len() >= settings.txmin {
to_send.push(amount.clone());
to_send.push(*amount);
}
}
for amount in to_send {
......@@ -93,13 +91,11 @@ pub fn simulate(
mix_id: pending_tx.mix_id,
path: pending_tx.path[1..].to_vec(),
};
if new_pending_txs.contains_key(&pending_tx.path[0]) {
new_pending_txs
.get_mut(&pending_tx.path[0])
.unwrap()
.push(new_pending_tx);
} else {
new_pending_txs.insert(pending_tx.path[0], vec![new_pending_tx]);
match new_pending_txs.entry(pending_tx.path[0]) {
HashMapEntry::Occupied(mut e) => e.get_mut().push(new_pending_tx),
HashMapEntry::Vacant(e) => {
e.insert(vec![new_pending_tx]);
}
}
}
txs.push(Tx {
......@@ -120,13 +116,11 @@ pub fn simulate(
for (pubkey, pending_txs) in new_pending_txs {
let node = nodes_index.get_mut(&pubkey).unwrap();
for pending_tx in pending_txs {
if node.pending_txs.contains_key(&pending_tx.amount) {
node.pending_txs
.get_mut(&pending_tx.amount)
.unwrap()
.push(pending_tx);
} else {
node.pending_txs.insert(pending_tx.amount, vec![pending_tx]);
match node.pending_txs.entry(pending_tx.amount) {
HashMapEntry::Occupied(mut e) => e.get_mut().push(pending_tx),
HashMapEntry::Vacant(e) => {
e.insert(vec![pending_tx]);
}
}
}
}
......@@ -141,26 +135,24 @@ pub fn simulate(
if settings.smart_clients {
// Smart client: pick in priority nodes with pending_txs < txmin
let mut path2: Vec<Pubkey> = Vec::new();
let mut i: usize = 0;
for (pubkey, node) in nodes_index.iter() {
let r: usize = rand::Rng::gen_range(&mut rng, i, nodes_pubkeys.len());
for (i, (pubkey, node)) in nodes_index.iter().enumerate() {
let r: usize = rand::Rng::gen_range(&mut rng, i..nodes_pubkeys.len());
if let Some(pending_txs) = node.pending_txs.get(&amount) {
if pending_txs.len() < settings.txmin {
path.push(pubkey.clone());
path.push(*pubkey);
if path.len() == settings.layers {
break;
}
} else if path2.len() < settings.layers {
path2.push(pubkey.clone());
path2.push(*pubkey);
}
} else {
path.push(pubkey.clone());
path.push(*pubkey);
if path.len() == settings.layers {
break;
}
}
nodes_pubkeys.swap(i, r);
i += 1;
}
while path.len() < settings.layers {
path.push(path2.pop().expect("Not enough nodes"));
......@@ -168,12 +160,12 @@ pub fn simulate(
} else {
// Simple client: equiprobably pick nodes
for layer in 0..settings.layers {
let r: usize = rand::Rng::gen_range(&mut rng, layer, nodes_pubkeys.len());
path.push(nodes_pubkeys[r].clone());
let r: usize = rand::Rng::gen_range(&mut rng, layer..nodes_pubkeys.len());
path.push(nodes_pubkeys[r]);
nodes_pubkeys.swap(layer, r);
}
}
path.push(clients_pubkeys[rand::Rng::gen_range(&mut rng, 0, clients_index.len())]);
path.push(clients_pubkeys[rand::Rng::gen_range(&mut rng, 0..clients_index.len())]);
let tx = Tx {
block_id: BlockId(block_id),
amount,
......@@ -194,13 +186,11 @@ pub fn simulate(
mix_id,
path: next_path,
};
if node.pending_txs.contains_key(&tx.amount) {
node.pending_txs
.get_mut(&tx.amount)
.unwrap()
.push(pending_tx);
} else {
node.pending_txs.insert(tx.amount, vec![pending_tx]);
match node.pending_txs.entry(tx.amount) {
HashMapEntry::Occupied(mut e) => e.get_mut().push(pending_tx),
HashMapEntry::Vacant(e) => {
e.insert(vec![pending_tx]);
}
}
}
mix_id.0 += 1;
......
Supports Markdown
0% or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment