From 9ff1cc0c85da8d0f2b635504c3335adfd9d17615 Mon Sep 17 00:00:00 2001 From: chriseth Date: Sat, 8 Oct 2022 12:11:26 +0200 Subject: [PATCH] Fix warnings. --- src/flow/adjacencies.rs | 95 ------- src/flow/flow.rs | 590 ---------------------------------------- src/flow/mod.rs | 38 --- src/lib.rs | 2 +- src/main.rs | 2 +- src/server.rs | 4 +- src/types/u256.rs | 3 +- tests/integration.rs | 2 +- 8 files changed, 7 insertions(+), 729 deletions(-) delete mode 100644 src/flow/adjacencies.rs delete mode 100644 src/flow/flow.rs delete mode 100644 src/flow/mod.rs diff --git a/src/flow/adjacencies.rs b/src/flow/adjacencies.rs deleted file mode 100644 index 0f0979a..0000000 --- a/src/flow/adjacencies.rs +++ /dev/null @@ -1,95 +0,0 @@ -use crate::flow::Node; -use crate::types::{Address, Edge, U256}; -use std::cmp::Reverse; -use std::collections::HashMap; - -pub struct Adjacencies<'a> { - edges: &'a HashMap>, - lazy_adjacencies: HashMap>, - capacity_adjustments: HashMap>, -} - -fn pseudo_node(edge: Edge) -> Node { - Node::TokenEdge(edge.from, edge.token) -} - -fn source_address_of(node: &Node) -> &Address { - match node { - Node::Node(addr) => addr, - Node::TokenEdge(from, _) => from, - } -} - -impl<'a> Adjacencies<'a> { - pub fn new(edges: &'a HashMap>) -> Self { - Adjacencies { - edges, - lazy_adjacencies: HashMap::new(), - capacity_adjustments: HashMap::new(), - } - } - - pub fn outgoing_edges_sorted_by_capacity(&mut self, from: &Node) -> Vec<(Node, U256)> { - let mut adjacencies = self.adjacencies_from(from); - if let Some(adjustments) = self.capacity_adjustments.get(from) { - for (node, c) in adjustments { - *adjacencies.entry(node.clone()).or_default() += *c; - } - } - let mut result = adjacencies.into_iter().collect::>(); - result.sort_unstable_by_key(|(_, capacity)| Reverse(*capacity)); - result - } - - pub fn adjust_capacity(&mut self, from: &Node, to: &Node, adjustment: U256) { - *self - .capacity_adjustments - .entry(from.clone()) - .or_default() - .entry(to.clone()) - .or_default() += adjustment; - } - - pub fn is_adjacent(&mut self, from: &Node, to: &Node) -> bool { - // TODO More efficiently? - if let Some(capacity) = self.adjacencies_from(from).get(to) { - *capacity > U256::from(0) - } else { - false - } - } - - fn adjacencies_from(&mut self, from: &Node) -> HashMap { - self.lazy_adjacencies - .entry(from.clone()) - .or_insert_with(|| { - let mut result: HashMap = HashMap::new(); - // Plain edges are (from, to, token) labeled with capacity - for edge in self.edges.get(source_address_of(from)).unwrap_or(&vec![]) { - match from { - Node::Node(_) => { - // One edge from "from" to "from x token" with a capacity - // as the max over all "to" addresses (the balance of the sender) - result - .entry(pseudo_node(*edge)) - .and_modify(|c| { - if edge.capacity > *c { - *c = edge.capacity; - } - }) - .or_insert(edge.capacity); - } - Node::TokenEdge(_, _) => { - // Another edge from "from x token" to "to" with its - // own capacity (based on the trust) - if pseudo_node(*edge) == *from { - result.insert(Node::Node(edge.to), edge.capacity); - } - } - } - } - result - }) - .clone() - } -} diff --git a/src/flow/flow.rs b/src/flow/flow.rs deleted file mode 100644 index 92e363b..0000000 --- a/src/flow/flow.rs +++ /dev/null @@ -1,590 +0,0 @@ -use crate::flow::adjacencies::Adjacencies; -use crate::flow::{node_as_address, node_as_token_edge, Node}; -use crate::types::{Address, Edge, U256}; -use std::cmp::min; -use std::collections::VecDeque; -use std::collections::{BTreeMap, HashMap, HashSet}; -use std::fmt::Write; - -pub fn compute_flow( - source: &Address, - sink: &Address, - edges: &HashMap>, - requested_flow: U256, - max_distance: Option, -) -> (U256, Vec) { - let mut adjacencies = Adjacencies::new(edges); - let mut used_edges: HashMap> = HashMap::new(); - - let mut flow = U256::default(); - loop { - let (new_flow, parents) = augmenting_path(source, sink, &mut adjacencies, max_distance); - if new_flow == U256::default() { - break; - } - flow += new_flow; - for window in parents.windows(2) { - if let [node, prev] = window { - adjacencies.adjust_capacity(prev, node, -new_flow); - adjacencies.adjust_capacity(node, prev, new_flow); - if adjacencies.is_adjacent(node, prev) { - *used_edges - .entry(node.clone()) - .or_default() - .entry(prev.clone()) - .or_default() -= new_flow; - } else { - *used_edges - .entry(prev.clone()) - .or_default() - .entry(node.clone()) - .or_default() += new_flow; - } - } else { - panic!(); - } - } - } - - println!("Max flow: {flow}"); - - if flow > requested_flow { - let still_to_prune = prune_flow(source, sink, flow - requested_flow, &mut used_edges); - flow = requested_flow + still_to_prune; - } - - let transfers = if flow == U256::from(0) { - vec![] - } else { - extract_transfers(source, sink, &flow, used_edges) - }; - // TODO We can simplify the transfers: - // If we have a transfer (A, B, T) and a transfer (B, C, T), - // We can always replace both by (A, C, T). - println!("Num transfers: {}", transfers.len()); - (flow, transfers) -} - -fn augmenting_path( - source: &Address, - sink: &Address, - adjacencies: &mut Adjacencies, - max_distance: Option, -) -> (U256, Vec) { - let mut parent = HashMap::new(); - if *source == *sink { - return (U256::default(), vec![]); - } - let mut queue = VecDeque::<(Node, (u64, U256))>::new(); - queue.push_back((Node::Node(*source), (0, U256::default() - U256::from(1)))); - while let Some((node, (depth, flow))) = queue.pop_front() { - if let Some(max) = max_distance { - if depth >= max { - continue; - } - } - for (target, capacity) in adjacencies.outgoing_edges_sorted_by_capacity(&node) { - if !parent.contains_key(&target) && capacity > U256::default() { - parent.insert(target.clone(), node.clone()); - let new_flow = min(flow, capacity); - if target == Node::Node(*sink) { - return ( - new_flow, - trace(parent, &Node::Node(*source), &Node::Node(*sink)), - ); - } - queue.push_back((target, (depth + 1, new_flow))); - } - } - } - (U256::default(), vec![]) -} - -fn trace(parent: HashMap, source: &Node, sink: &Node) -> Vec { - let mut t = vec![sink.clone()]; - let mut node = sink; - loop { - node = parent.get(node).unwrap(); - t.push(node.clone()); - if *node == *source { - break; - } - } - t -} - -fn to_dot( - edges: &HashMap>, - account_balances: &HashMap, -) -> String { - let mut out = String::new(); - writeln!(out, "digraph used_edges {{").expect(""); - - for (address, balance) in account_balances { - writeln!( - out, - " \"{}\" [label=\"{}: {}\"];", - address, address, balance - ) - .expect(""); - } - for (from, out_edges) in edges { - for (to, capacity) in out_edges { - writeln!( - out, - " \"{}\" -> \"{}\" [label=\"{}\"];", - from, to, capacity - ) - .expect(""); - } - } - writeln!(out, "}}").expect(""); - out -} - -fn prune_flow( - source: &Address, - sink: &Address, - mut flow_to_prune: U256, - used_edges: &mut HashMap>, -) -> U256 { - // Note the path length is negative to sort by longest shortest path first. - let edges_by_path_length = compute_edges_by_path_length(source, sink, used_edges); - - for edges_here in edges_by_path_length.values() { - //println!("Shorter path."); - // As long as `edges` contain an edge with smaller weight than the weight still to prune: - // take the smallest such edge and prune it. - while flow_to_prune > U256::from(0) && !edges_here.is_empty() { - //println!("Still to prune: {}", flow_to_prune); - if let Some((s, t)) = smallest_edge_in_set(used_edges, edges_here) { - if used_edges[&s][&t] > flow_to_prune { - break; - }; - flow_to_prune = prune_edge(used_edges, (&s, &t), flow_to_prune); - } else { - break; - } - } - } - // If there is still flow to prune, take the first element in edgesByPathLength - // and partially prune its path. - if flow_to_prune > U256::from(0) { - //println!("Final stage: Still to prune: {}", flow_to_prune); - for edges_here in edges_by_path_length.values() { - for (a, b) in edges_here { - if !used_edges.contains_key(a) || !used_edges[a].contains_key(b) { - continue; - } - flow_to_prune = prune_edge(used_edges, (a, b), flow_to_prune); - if flow_to_prune == U256::from(0) { - return U256::from(0); - } - } - if flow_to_prune == U256::from(0) { - return U256::from(0); - } - } - } - flow_to_prune -} - -/// Returns a map from the negative shortest path length to the edge. -/// The shortest path length is negative so that it is sorted by -/// longest paths first - those are the ones we want to eliminate first. -fn compute_edges_by_path_length( - source: &Address, - sink: &Address, - used_edges: &HashMap>, -) -> BTreeMap> { - let mut result = BTreeMap::>::new(); - let from_source = distance_from_source(&Node::Node(*source), used_edges); - let to_sink = distance_to_sink(&Node::Node(*sink), used_edges); - for (s, edges) in used_edges { - for t in edges.keys() { - let path_length = from_source[s] + 1 + to_sink[t]; - result - .entry(-path_length) - .or_default() - .insert((s.clone(), t.clone())); - } - } - result -} - -fn distance_from_source( - source: &Node, - used_edges: &HashMap>, -) -> HashMap { - let mut distances = HashMap::::new(); - let mut to_process = VecDeque::::new(); - distances.insert(source.clone(), 0); - to_process.push_back(source.clone()); - - while let Some(n) = to_process.pop_front() { - for (t, capacity) in used_edges.get(&n).unwrap_or(&HashMap::new()) { - if *capacity > U256::from(0) && !distances.contains_key(t) { - distances.insert(t.clone(), distances[&n] + 1); - to_process.push_back(t.clone()); - } - } - } - - distances -} - -fn distance_to_sink( - sink: &Node, - used_edges: &HashMap>, -) -> HashMap { - distance_from_source(sink, &reverse_edges(used_edges)) -} - -fn reverse_edges( - used_edges: &HashMap>, -) -> HashMap> { - let mut reversed: HashMap> = HashMap::new(); - for (n, edges) in used_edges { - for (t, capacity) in edges { - reversed - .entry(t.clone()) - .or_default() - .insert(n.clone(), *capacity); - } - } - reversed -} - -fn smallest_edge_in_set( - all_edges: &HashMap>, - edge_set: &HashSet<(Node, Node)>, -) -> Option<(Node, Node)> { - if let Some((a, b, _)) = edge_set - .iter() - .map(|(a, b)| { - let capacity = if let Some(out) = all_edges.get(a) { - if let Some(capacity) = out.get(b) { - assert!(*capacity != U256::from(0)); - Some(capacity) - } else { - None - } - } else { - None - }; - (a, b, capacity) - }) - .filter(|(_, _, capacity)| capacity.is_some()) - .min_by_key(|(_, _, capacity)| capacity.unwrap()) - { - Some((a.clone(), b.clone())) - } else { - None - } -} - -fn smallest_edge_from( - used_edges: &HashMap>, - n: &Node, -) -> Option<(Node, U256)> { - used_edges.get(n).and_then(|out| { - out.iter() - .min_by_key(|(_, c)| { - assert!(**c != U256::from(0)); - *c - }) - .map(|(t, c)| (t.clone(), *c)) - }) -} - -fn smallest_edge_to( - used_edges: &HashMap>, - n: &Node, -) -> Option<(Node, U256)> { - used_edges - .iter() - .filter(|(_, out)| out.contains_key(n)) - .map(|(t, out)| (t, out[n])) - .min_by_key(|(_, c)| { - assert!(*c != U256::from(0)); - *c - }) - .map(|(t, c)| (t.clone(), c)) -} - -/// Removes the edge (potentially partially), removing a given amount of flow. -/// Returns the remaining flow to prune if the edge was too small. -fn prune_edge( - used_edges: &mut HashMap>, - edge: (&Node, &Node), - flow_to_prune: U256, -) -> U256 { - let edge_size = min(flow_to_prune, used_edges[edge.0][edge.1]); - reduce_capacity(used_edges, edge, &edge_size); - prune_path(used_edges, edge.1, edge_size, PruneDirection::Forwards); - prune_path(used_edges, edge.0, edge_size, PruneDirection::Backwards); - flow_to_prune - edge_size -} - -fn reduce_capacity( - used_edges: &mut HashMap>, - (a, b): (&Node, &Node), - reduction: &U256, -) { - let out_edges = used_edges.get_mut(a).unwrap(); - *out_edges.get_mut(b).unwrap() -= *reduction; - if out_edges[b] == U256::from(0) { - out_edges.remove_entry(b); - } -} - -#[derive(Clone, Copy)] -enum PruneDirection { - Forwards, - Backwards, -} - -fn prune_path( - used_edges: &mut HashMap>, - n: &Node, - mut flow_to_prune: U256, - direction: PruneDirection, -) { - while let Some((next, mut capacity)) = match direction { - PruneDirection::Forwards => smallest_edge_from(used_edges, n), - PruneDirection::Backwards => smallest_edge_to(used_edges, n), - } { - capacity = min(flow_to_prune, capacity); - match direction { - PruneDirection::Forwards => reduce_capacity(used_edges, (n, &next), &capacity), - PruneDirection::Backwards => reduce_capacity(used_edges, (&next, n), &capacity), - }; - prune_path(used_edges, &next, capacity, direction); - flow_to_prune -= capacity; - if flow_to_prune == U256::from(0) { - return; - } - } -} - -fn extract_transfers( - source: &Address, - sink: &Address, - amount: &U256, - mut used_edges: HashMap>, -) -> Vec { - let mut transfers: Vec = Vec::new(); - let mut account_balances: HashMap = HashMap::new(); - account_balances.insert(*source, *amount); - - while !account_balances.is_empty() - && (account_balances.len() > 1 || *account_balances.iter().next().unwrap().0 != *sink) - { - let edge = next_full_capacity_edge(&used_edges, &account_balances); - assert!(account_balances[&edge.from] >= edge.capacity); - account_balances - .entry(edge.from) - .and_modify(|balance| *balance -= edge.capacity); - *account_balances.entry(edge.to).or_default() += edge.capacity; - account_balances.retain(|_account, balance| balance > &mut U256::from(0)); - assert!(used_edges.contains_key(&Node::TokenEdge(edge.from, edge.token))); - used_edges - .entry(Node::TokenEdge(edge.from, edge.token)) - .and_modify(|outgoing| { - assert!(outgoing.contains_key(&Node::Node(edge.to))); - outgoing.remove(&Node::Node(edge.to)); - }); - transfers.push(edge); - } - - transfers -} - -fn next_full_capacity_edge( - used_edges: &HashMap>, - account_balances: &HashMap, -) -> Edge { - for (account, balance) in account_balances { - for intermediate in used_edges - .get(&Node::Node(*account)) - .unwrap_or(&HashMap::new()) - .keys() - { - for (to_node, capacity) in &used_edges[intermediate] { - let (from, token) = node_as_token_edge(intermediate); - if *balance >= *capacity { - return Edge { - from: *from, - to: *node_as_address(to_node), - token: *token, - capacity: *capacity, - }; - } - } - } - } - panic!(); -} - -#[cfg(test)] -mod test { - use super::*; - - fn addresses() -> (Address, Address, Address, Address, Address, Address) { - ( - Address::from("0x11C7e86fF693e9032A0F41711b5581a04b26Be2E"), - Address::from("0x22cEDde51198D1773590311E2A340DC06B24cB37"), - Address::from("0x33cEDde51198D1773590311E2A340DC06B24cB37"), - Address::from("0x447EDde51198D1773590311E2A340DC06B24cB37"), - Address::from("0x55c16ce62d26fd51582a646e2e30a3267b1e6d7e"), - Address::from("0x66c16ce62d26fd51582a646e2e30a3267b1e6d7e"), - ) - } - fn build_edges(input: Vec) -> HashMap> { - let mut output: HashMap> = HashMap::new(); - for e in input { - output.entry(e.from).or_default().push(e); - } - output - } - - #[test] - fn direct() { - let (a, b, t, ..) = addresses(); - let edges = build_edges(vec![Edge { - from: a, - to: b, - token: t, - capacity: U256::from(10), - }]); - let flow = compute_flow(&a, &b, &edges, U256::MAX, None); - assert_eq!(flow, (U256::from(10), edges[&a].clone())); - } - - #[test] - fn one_hop() { - let (a, b, c, t1, t2, ..) = addresses(); - let edges = build_edges(vec![ - Edge { - from: a, - to: b, - token: t1, - capacity: U256::from(10), - }, - Edge { - from: b, - to: c, - token: t2, - capacity: U256::from(8), - }, - ]); - let flow = compute_flow(&a, &c, &edges, U256::MAX, None); - assert_eq!( - flow, - ( - U256::from(8), - vec![ - Edge { - from: a, - to: b, - token: t1, - capacity: U256::from(8) - }, - Edge { - from: b, - to: c, - token: t2, - capacity: U256::from(8) - }, - ] - ) - ); - } - - #[test] - fn diamond() { - let (a, b, c, d, t1, t2) = addresses(); - let edges = build_edges(vec![ - Edge { - from: a, - to: b, - token: t1, - capacity: U256::from(10), - }, - Edge { - from: a, - to: c, - token: t2, - capacity: U256::from(7), - }, - Edge { - from: b, - to: d, - token: t2, - capacity: U256::from(9), - }, - Edge { - from: c, - to: d, - token: t1, - capacity: U256::from(8), - }, - ]); - let mut flow = compute_flow(&a, &d, &edges, U256::MAX, None); - flow.1.sort(); - assert_eq!( - flow, - ( - U256::from(16), - vec![ - Edge { - from: a, - to: b, - token: t1, - capacity: U256::from(9) - }, - Edge { - from: a, - to: c, - token: t2, - capacity: U256::from(7) - }, - Edge { - from: b, - to: d, - token: t2, - capacity: U256::from(9) - }, - Edge { - from: c, - to: d, - token: t1, - capacity: U256::from(7) - }, - ] - ) - ); - let mut pruned_flow = compute_flow(&a, &d, &edges, U256::from(6), None); - pruned_flow.1.sort(); - assert_eq!( - pruned_flow, - ( - U256::from(6), - vec![ - Edge { - from: a, - to: b, - token: t1, - capacity: U256::from(6) - }, - Edge { - from: b, - to: d, - token: t2, - capacity: U256::from(6) - }, - ] - ) - ); - } -} diff --git a/src/flow/mod.rs b/src/flow/mod.rs deleted file mode 100644 index 5d04719..0000000 --- a/src/flow/mod.rs +++ /dev/null @@ -1,38 +0,0 @@ -use crate::types::Address; -use std::fmt::{Display, Formatter}; - -mod adjacencies; -mod flow; - -#[derive(Debug, Eq, PartialEq, Hash, Clone)] -pub enum Node { - Node(Address), - TokenEdge(Address, Address), -} - -pub fn node_as_address(node: &Node) -> &Address { - if let Node::Node(address) = node { - address - } else { - panic!() - } -} - -pub fn node_as_token_edge(node: &Node) -> (&Address, &Address) { - if let Node::TokenEdge(from, token) = node { - (from, token) - } else { - panic!() - } -} - -impl Display for Node { - fn fmt(&self, f: &mut Formatter) -> std::fmt::Result { - match self { - Node::Node(address) => write!(f, "{address}"), - Node::TokenEdge(address, token) => write!(f, "({address} x {token})"), - } - } -} - -pub use crate::flow::flow::compute_flow; diff --git a/src/lib.rs b/src/lib.rs index 3252967..77ce04b 100644 --- a/src/lib.rs +++ b/src/lib.rs @@ -1,3 +1,3 @@ -pub mod flow; +pub mod graph; pub mod io; pub mod types; diff --git a/src/main.rs b/src/main.rs index 239e6bf..3e71dc5 100644 --- a/src/main.rs +++ b/src/main.rs @@ -1,6 +1,6 @@ use std::env; -mod flow; +mod graph; mod io; mod server; mod types; diff --git a/src/server.rs b/src/server.rs index 553ce82..d06bc20 100644 --- a/src/server.rs +++ b/src/server.rs @@ -1,4 +1,4 @@ -use crate::flow; +use crate::graph; use crate::io::read_edges_binary; use crate::types::{Address, Edge, U256}; use json::JsonValue; @@ -92,7 +92,7 @@ fn compute_transfer( vec![None] }; for max_distance in max_distances { - let (flow, transfers) = flow::compute_flow( + let (flow, transfers) = graph::compute_flow( &Address::from(request.params["from"].to_string().as_str()), &Address::from(request.params["to"].to_string().as_str()), edges, diff --git a/src/types/u256.rs b/src/types/u256.rs index e6e11ee..352f0b2 100644 --- a/src/types/u256.rs +++ b/src/types/u256.rs @@ -12,7 +12,8 @@ impl U256 { U256([high, low]) } pub const MAX: U256 = U256::new(u128::MAX, u128::MAX); - pub fn to_decimal(&self) -> String { + #[allow(dead_code)] + pub fn to_decimal(self) -> String { let value = BigUint::from(self.0[0]) << 128 | BigUint::from(self.0[1]); format!("{}", value) } diff --git a/tests/integration.rs b/tests/integration.rs index 901bd5b..e2307db 100644 --- a/tests/integration.rs +++ b/tests/integration.rs @@ -1,4 +1,4 @@ -use pathfinder2::flow::compute_flow; +use pathfinder2::graph::compute_flow; use pathfinder2::io::read_edges_binary; use pathfinder2::types::{Address, Edge, U256}; use std::collections::HashMap;