From c8d27825b92b4e4e81f50a8865c4a1b0137190e5 Mon Sep 17 00:00:00 2001 From: richardb Date: Tue, 4 Jun 2024 18:00:01 +0200 Subject: [PATCH 01/43] initial version --- .gitignore | 5 ++ Cargo.toml | 30 +++++++ src/main.rs | 10 +++ src/server/config.rs | 54 ++++++++++++ src/server/grpc.rs | 205 +++++++++++++++++++++++++++++++++++++++++++ src/server/mod.rs | 8 ++ src/server/p2p.rs | 60 +++++++++++++ src/server/server.rs | 130 +++++++++++++++++++++++++++ 8 files changed, 502 insertions(+) create mode 100644 Cargo.toml create mode 100644 src/main.rs create mode 100644 src/server/config.rs create mode 100644 src/server/grpc.rs create mode 100644 src/server/mod.rs create mode 100644 src/server/p2p.rs create mode 100644 src/server/server.rs diff --git a/.gitignore b/.gitignore index 6985cf1b..196e176d 100644 --- a/.gitignore +++ b/.gitignore @@ -12,3 +12,8 @@ Cargo.lock # MSVC Windows builds of rustc generate these, which store debugging information *.pdb + + +# Added by cargo + +/target diff --git a/Cargo.toml b/Cargo.toml new file mode 100644 index 00000000..b56d69e5 --- /dev/null +++ b/Cargo.toml @@ -0,0 +1,30 @@ +[package] +name = "sha-p2pool" +version = "0.1.0" +edition = "2021" + +[dependencies] +libp2p = { version = "0.53.2", features = [ + "dns", + "identify", + "macros", + "tokio", + "serde", + "noise", + "tcp", + "yamux", + "mdns", + "gossipsub" +] } +tokio = { version = "1.38.0", features = ["full"] } +thiserror = "1.0" +serde = "1.0.203" +anyhow = "1.0" +log = "0.4.21" +env_logger = "0.11.3" +minotari_app_grpc = { git = "https://github.com/tari-project/tari.git" } +minotari_node_grpc_client = { git = "https://github.com/tari-project/tari.git" } +tonic = "0.8.3" +#tari_common_types = "0.8.1" + + diff --git a/src/main.rs b/src/main.rs new file mode 100644 index 00000000..2a77bf74 --- /dev/null +++ b/src/main.rs @@ -0,0 +1,10 @@ +mod server; + +#[tokio::main] +async fn main() -> anyhow::Result<()> { + env_logger::init(); + let config = server::Config::builder().with_p2p_port(9999).build(); + let mut server = server::Server::new(config).await?; + server.start().await?; + Ok(()) +} diff --git a/src/server/config.rs b/src/server/config.rs new file mode 100644 index 00000000..27546126 --- /dev/null +++ b/src/server/config.rs @@ -0,0 +1,54 @@ +use std::time::Duration; + +/// Config is the server configuration struct. +#[derive(Clone)] +pub struct Config { + pub base_node_address: String, + pub p2p_port: u64, + pub grpc_port: u64, + pub idle_connection_timeout: Duration, +} + +impl Default for Config { + fn default() -> Self { + Self { + base_node_address: String::from("http://127.0.0.1:18142"), + p2p_port: 0, // bind to any free port + grpc_port: 18143, // default local base node port + 1 + idle_connection_timeout: Duration::from_secs(30), + } + } +} + +impl Config { + pub fn builder() -> ConfigBuilder { + ConfigBuilder { + config: Config::default(), + } + } +} + +pub struct ConfigBuilder { + config: Config, +} + +impl ConfigBuilder { + pub fn with_p2p_port(&mut self, port: u64) -> &mut Self { + self.config.p2p_port = port; + self + } + + pub fn with_grpc_port(&mut self, port: u64) -> &mut Self { + self.config.grpc_port = port; + self + } + + pub fn with_idle_connection_timeout(&mut self, timeout: Duration) -> &Self { + self.config.idle_connection_timeout = timeout; + self + } + + pub fn build(&self) -> Config { + self.config.clone() + } +} diff --git a/src/server/grpc.rs b/src/server/grpc.rs new file mode 100644 index 00000000..3920f416 --- /dev/null +++ b/src/server/grpc.rs @@ -0,0 +1,205 @@ +use std::sync::{Arc, Mutex}; + +use libp2p::futures::channel::mpsc; +use log::info; +use minotari_app_grpc::tari_rpc; +use minotari_app_grpc::tari_rpc::{Block, BlockBlobRequest, BlockGroupRequest, BlockGroupResponse, BlockHeaderResponse, BlockHeight, BlockTimingResponse, ConsensusConstants, Empty, FetchMatchingUtxosRequest, GetActiveValidatorNodesRequest, GetBlocksRequest, GetHeaderByHashRequest, GetMempoolTransactionsRequest, GetNewBlockBlobResult, GetNewBlockResult, GetNewBlockTemplateWithCoinbasesRequest, GetNewBlockWithCoinbasesRequest, GetPeersRequest, GetShardKeyRequest, GetShardKeyResponse, GetSideChainUtxosRequest, GetTemplateRegistrationsRequest, HeightRequest, ListConnectedPeersResponse, ListHeadersRequest, MempoolStatsResponse, NetworkStatusResponse, NewBlockTemplate, NewBlockTemplateRequest, NewBlockTemplateResponse, NodeIdentity, SearchKernelsRequest, SearchUtxosRequest, SoftwareUpdate, StringValue, SubmitBlockResponse, SubmitTransactionRequest, SubmitTransactionResponse, SyncInfoResponse, SyncProgressResponse, TipInfoResponse, TransactionStateRequest, TransactionStateResponse}; +use minotari_app_grpc::tari_rpc::base_node_client::BaseNodeClient; +use minotari_node_grpc_client::BaseNodeGrpcClient; +use thiserror::Error; +use tonic::{Request, Response, Status}; + +pub struct TariBaseNodeGrpc { + client: Arc>>, +} + +#[derive(Error, Debug)] +pub enum Error { + #[error("Tonic error: {0}")] + Tonic(#[from] TonicError), +} + +#[derive(Error, Debug)] +pub enum TonicError { + #[error("Transport error: {0}")] + Transport(#[from] tonic::transport::Error), +} + +impl TariBaseNodeGrpc { + pub async fn new(base_node_address: String) -> Result { + let client = BaseNodeGrpcClient::connect(base_node_address) + .await + .map_err(|e| Error::Tonic(TonicError::Transport(e)))?; + + Ok(Self { client: Arc::new(Mutex::new(client)) }) + } +} + +#[tonic::async_trait] +impl tari_rpc::base_node_server::BaseNode for TariBaseNodeGrpc { + type FetchMatchingUtxosStream = mpsc::Receiver>; + type GetActiveValidatorNodesStream = mpsc::Receiver>; + type GetBlocksStream = mpsc::Receiver>; + type GetMempoolTransactionsStream = mpsc::Receiver>; + type GetNetworkDifficultyStream = mpsc::Receiver>; + type GetPeersStream = mpsc::Receiver>; + type GetSideChainUtxosStream = mpsc::Receiver>; + type GetTemplateRegistrationsStream = mpsc::Receiver>; + type GetTokensInCirculationStream = mpsc::Receiver>; + type ListHeadersStream = mpsc::Receiver>; + type SearchKernelsStream = mpsc::Receiver>; + type SearchUtxosStream = mpsc::Receiver>; + + async fn get_new_block_template(&self, request: Request) -> Result, Status> { + info!("get_new_block_template called!"); + if let Ok(mut client) = self.client.lock() { + let result = client + .get_new_block_template(request.into_inner().clone()) + .await; + } + + Err(Status::internal("")) + } + + async fn get_new_block(&self, request: Request) -> Result, Status> { + info!("get_new_block called!"); + todo!() + } + + + async fn list_headers(&self, request: Request) -> Result, Status> { + todo!() + } + + async fn get_header_by_hash(&self, request: Request) -> Result, Status> { + todo!() + } + + async fn get_blocks(&self, request: Request) -> Result, Status> { + todo!() + } + + async fn get_block_timing(&self, request: Request) -> Result, Status> { + todo!() + } + + async fn get_constants(&self, request: Request) -> Result, Status> { + todo!() + } + + async fn get_block_size(&self, request: Request) -> Result, Status> { + todo!() + } + + async fn get_block_fees(&self, request: Request) -> Result, Status> { + todo!() + } + + async fn get_version(&self, request: Request) -> Result, Status> { + todo!() + } + + async fn check_for_updates(&self, request: Request) -> Result, Status> { + todo!() + } + + async fn get_tokens_in_circulation(&self, request: Request) -> Result, Status> { + todo!() + } + + async fn get_network_difficulty(&self, request: Request) -> Result, Status> { + todo!() + } + + async fn get_new_block_with_coinbases(&self, request: Request) -> Result, Status> { + todo!() + } + + async fn get_new_block_template_with_coinbases(&self, request: Request) -> Result, Status> { + todo!() + } + + async fn get_new_block_blob(&self, request: Request) -> Result, Status> { + todo!() + } + + async fn submit_block(&self, request: Request) -> Result, Status> { + todo!() + } + + async fn submit_block_blob(&self, request: Request) -> Result, Status> { + todo!() + } + + async fn submit_transaction(&self, request: Request) -> Result, Status> { + todo!() + } + + async fn get_sync_info(&self, request: Request) -> Result, Status> { + todo!() + } + + async fn get_sync_progress(&self, request: Request) -> Result, Status> { + todo!() + } + + async fn get_tip_info(&self, request: Request) -> Result, Status> { + todo!() + } + + async fn search_kernels(&self, request: Request) -> Result, Status> { + todo!() + } + + async fn search_utxos(&self, request: Request) -> Result, Status> { + todo!() + } + + async fn fetch_matching_utxos(&self, request: Request) -> Result, Status> { + todo!() + } + + async fn get_peers(&self, request: Request) -> Result, Status> { + todo!() + } + + async fn get_mempool_transactions(&self, request: Request) -> Result, Status> { + todo!() + } + + async fn transaction_state(&self, request: Request) -> Result, Status> { + todo!() + } + + async fn identify(&self, request: Request) -> Result, Status> { + todo!() + } + + async fn get_network_status(&self, request: Request) -> Result, Status> { + todo!() + } + + async fn list_connected_peers(&self, request: Request) -> Result, Status> { + todo!() + } + + async fn get_mempool_stats(&self, request: Request) -> Result, Status> { + todo!() + } + + async fn get_active_validator_nodes(&self, request: Request) -> Result, Status> { + todo!() + } + + async fn get_shard_key(&self, request: Request) -> Result, Status> { + todo!() + } + + async fn get_template_registrations(&self, request: Request) -> Result, Status> { + todo!() + } + + async fn get_side_chain_utxos(&self, request: Request) -> Result, Status> { + todo!() + } +} \ No newline at end of file diff --git a/src/server/mod.rs b/src/server/mod.rs new file mode 100644 index 00000000..dd175871 --- /dev/null +++ b/src/server/mod.rs @@ -0,0 +1,8 @@ +pub use config::*; +pub use server::*; + +mod config; +mod server; +mod p2p; + +pub mod grpc; diff --git a/src/server/p2p.rs b/src/server/p2p.rs new file mode 100644 index 00000000..c2ec4863 --- /dev/null +++ b/src/server/p2p.rs @@ -0,0 +1,60 @@ +use std::hash::{DefaultHasher, Hash, Hasher}; +use std::time::Duration; + +use libp2p::{gossipsub, mdns, noise, Swarm, tcp, yamux}; +use libp2p::mdns::tokio::Tokio; +use libp2p::swarm::NetworkBehaviour; +use tokio::io; + +use crate::server::{config, Error, LibP2PError}; + +#[derive(NetworkBehaviour)] +pub struct ServerNetworkBehaviour { + pub mdns: mdns::Behaviour, + pub gossipsub: gossipsub::Behaviour, +} + +pub fn swarm(config: &config::Config) -> Result, Error> { + let swarm = libp2p::SwarmBuilder::with_new_identity() + .with_tokio() + .with_tcp( + tcp::Config::default(), + noise::Config::new, + yamux::Config::default, + ) + .map_err(|e| Error::LibP2P(LibP2PError::Noise(e)))? + .with_behaviour(move |key_pair| { + let message_id_fn = |message: &gossipsub::Message| { + let mut s = DefaultHasher::new(); + message.data.hash(&mut s); + gossipsub::MessageId::from(s.finish().to_string()) + }; + + let gossipsub_config = gossipsub::ConfigBuilder::default() + .heartbeat_interval(Duration::from_secs(10)) + .validation_mode(gossipsub::ValidationMode::Strict) + .message_id_fn(message_id_fn) + .build() + .map_err(|msg| io::Error::new(io::ErrorKind::Other, msg))?; // Temporary hack because `build` does not return a proper `std::error::Error`. + + let gossipsub = gossipsub::Behaviour::new( + gossipsub::MessageAuthenticity::Signed(key_pair.clone()), + gossipsub_config, + )?; + + Ok(ServerNetworkBehaviour { + gossipsub, + mdns: mdns::Behaviour::new( + mdns::Config::default(), + key_pair.public().to_peer_id(), + ) + .map_err(|e| Error::LibP2P(LibP2PError::IO(e)))?, + }) + }) + .map_err(|e| Error::LibP2P(LibP2PError::Behaviour(e.to_string())))? + .with_swarm_config(|c| c.with_idle_connection_timeout(config.idle_connection_timeout)) + .build(); + + Ok(swarm) +} + diff --git a/src/server/server.rs b/src/server/server.rs new file mode 100644 index 00000000..7bcc2295 --- /dev/null +++ b/src/server/server.rs @@ -0,0 +1,130 @@ +use std::convert::Infallible; +use std::hash::{DefaultHasher, Hash, Hasher}; +use std::net::{AddrParseError, SocketAddr}; +use std::str::FromStr; +use std::time::Duration; + +use libp2p::{gossipsub, mdns, multiaddr, noise, PeerId, Swarm, tcp, TransportError, yamux}; +use libp2p::futures::StreamExt; +use libp2p::mdns::tokio::Tokio; +use libp2p::swarm::{NetworkBehaviour, SwarmEvent}; +use log::{error, info}; +use minotari_app_grpc::tari_rpc::base_node_server::BaseNodeServer; +use thiserror::Error; +use tokio::{io, io::AsyncBufReadExt, select}; + +use crate::server::{config, grpc, p2p}; +use crate::server::grpc::{TariBaseNodeGrpc, TonicError}; +use crate::server::p2p::{ServerNetworkBehaviour, ServerNetworkBehaviourEvent}; + +#[derive(Error, Debug)] +pub enum Error { + #[error("LibP2P error: {0}")] + LibP2P(#[from] LibP2PError), + #[error("gRPC error: {0}")] + GRPC(#[from] grpc::Error), + #[error("Socket address parse error: {0}")] + AddrParse(#[from] AddrParseError), +} + +#[derive(Error, Debug)] +pub enum LibP2PError { + #[error("Noise error: {0}")] + Noise(#[from] noise::Error), + #[error("Multi address parse error: {0}")] + MultiAddrParse(#[from] multiaddr::Error), + #[error("Transport error: {0}")] + Transport(#[from] TransportError), + #[error("I/O error: {0}")] + IO(#[from] std::io::Error), + #[error("Behaviour error: {0}")] + Behaviour(String), +} + +/// Server represents the server running all the necessary components for sha-p2pool. +pub struct Server { + config: config::Config, + swarm: Swarm, + grpc_server: BaseNodeServer, +} + +impl Server { + pub async fn new(config: config::Config) -> Result { + let swarm = p2p::swarm(&config)?; + let grpc_service = TariBaseNodeGrpc::new(config.base_node_address.clone()).await.map_err(Error::GRPC)?; + let grpc_server = BaseNodeServer::new(grpc_service); + + Ok(Self { config, swarm, grpc_server }) + } + + pub async fn start_grpc(service: BaseNodeServer, grpc_port: u64) -> Result<(), Error> { + info!("Starting gRPC server on port {}!", &grpc_port); + + tonic::transport::Server::builder() + .add_service(service) + .serve( + SocketAddr::from_str( + format!("0.0.0.0:{}", grpc_port).as_str() + ).map_err(Error::AddrParse)? + ) + .await + .map_err(|err| { + error!("GRPC encountered an error: {:?}", err); + Error::GRPC(grpc::Error::Tonic(TonicError::Transport(err))) + })?; + + info!("gRPC server stopped!"); + + Ok(()) + } + + pub async fn start(&mut self) -> Result<(), Error> { + self.swarm + .listen_on( + format!("/ip4/0.0.0.0/tcp/{}", self.config.p2p_port) + .parse() + .map_err(|e| Error::LibP2P(LibP2PError::MultiAddrParse(e)))?, + ) + .map_err(|e| Error::LibP2P(LibP2PError::Transport(e)))?; + + info!("Starting Tari SHA-3 mining P2Pool..."); + + // grpc serve + let grpc_service = self.grpc_server.clone(); + let grpc_port = self.config.grpc_port; + tokio::spawn(async move { + Self::start_grpc(grpc_service, grpc_port).await; + }); + + // main loop + loop { + select! { + next = self.swarm.select_next_some() => match next { + SwarmEvent::NewListenAddr { address, .. } => { + info!("Listening on {address:?}"); + }, + SwarmEvent::Behaviour(event) => match event { + ServerNetworkBehaviourEvent::Mdns(mdns_event) => match mdns_event { + mdns::Event::Discovered(peers) => { + for (peer, addr) in peers { + info!("Discovered new peer {} at {}", peer, addr); + self.swarm.behaviour_mut().gossipsub.add_explicit_peer(&peer); + } + }, + mdns::Event::Expired(peers) => { + for (peer, addr) in peers { + info!("Expired peer {} at {}", peer, addr); + self.swarm.behaviour_mut().gossipsub.remove_explicit_peer(&peer); + } + }, + }, + ServerNetworkBehaviourEvent::Gossipsub(event) => { + info!("GOSSIP: {event:?}"); + }, + }, + _ => {} + } + } + } + } +} From f2233a3a58a437130f5a124836d08bfc5100234f Mon Sep 17 00:00:00 2001 From: richardb Date: Wed, 5 Jun 2024 15:03:31 +0200 Subject: [PATCH 02/43] added new grpc service to handle new logic for p2pool block template generation --- .gitmodules | 3 + Cargo.toml | 37 ++----- crates/grpc/.gitignore | 19 ++++ crates/grpc/Cargo.toml | 12 +++ crates/grpc/build.rs | 17 +++ crates/grpc/proto/p2pool.proto | 13 +++ crates/grpc/src/lib.rs | 4 + crates/sha_p2pool/.gitignore | 19 ++++ crates/sha_p2pool/Cargo.toml | 33 ++++++ {src => crates/sha_p2pool/src}/main.rs | 2 +- .../sha_p2pool/src}/server/config.rs | 2 +- .../sha_p2pool/src/server/grpc/base_node.rs | 100 +++++++++++++----- crates/sha_p2pool/src/server/grpc/error.rs | 13 +++ crates/sha_p2pool/src/server/grpc/mod.rs | 4 + {src => crates/sha_p2pool/src}/server/mod.rs | 0 {src => crates/sha_p2pool/src}/server/p2p.rs | 0 .../sha_p2pool/src}/server/server.rs | 27 +++-- 17 files changed, 238 insertions(+), 67 deletions(-) create mode 100644 .gitmodules create mode 100644 crates/grpc/.gitignore create mode 100644 crates/grpc/Cargo.toml create mode 100644 crates/grpc/build.rs create mode 100644 crates/grpc/proto/p2pool.proto create mode 100644 crates/grpc/src/lib.rs create mode 100644 crates/sha_p2pool/.gitignore create mode 100644 crates/sha_p2pool/Cargo.toml rename {src => crates/sha_p2pool/src}/main.rs (72%) rename {src => crates/sha_p2pool/src}/server/config.rs (94%) rename src/server/grpc.rs => crates/sha_p2pool/src/server/grpc/base_node.rs (68%) create mode 100644 crates/sha_p2pool/src/server/grpc/error.rs create mode 100644 crates/sha_p2pool/src/server/grpc/mod.rs rename {src => crates/sha_p2pool/src}/server/mod.rs (100%) rename {src => crates/sha_p2pool/src}/server/p2p.rs (100%) rename {src => crates/sha_p2pool/src}/server/server.rs (80%) diff --git a/.gitmodules b/.gitmodules new file mode 100644 index 00000000..d422d81e --- /dev/null +++ b/.gitmodules @@ -0,0 +1,3 @@ +[submodule "crates/grpc/submodules/tari"] + path = crates/grpc/submodules/tari + url = https://github.com/tari-project/tari diff --git a/Cargo.toml b/Cargo.toml index b56d69e5..d51d64fe 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -1,30 +1,11 @@ -[package] -name = "sha-p2pool" -version = "0.1.0" -edition = "2021" - -[dependencies] -libp2p = { version = "0.53.2", features = [ - "dns", - "identify", - "macros", - "tokio", - "serde", - "noise", - "tcp", - "yamux", - "mdns", - "gossipsub" -] } -tokio = { version = "1.38.0", features = ["full"] } -thiserror = "1.0" -serde = "1.0.203" -anyhow = "1.0" -log = "0.4.21" -env_logger = "0.11.3" -minotari_app_grpc = { git = "https://github.com/tari-project/tari.git" } -minotari_node_grpc_client = { git = "https://github.com/tari-project/tari.git" } -tonic = "0.8.3" -#tari_common_types = "0.8.1" +[workspace] +resolver = "2" +members = [ + "crates/grpc", + "crates/sha_p2pool" +] +[workspace.dependencies] +tonic = "0.8.3" +tonic-build = "0.8.4" \ No newline at end of file diff --git a/crates/grpc/.gitignore b/crates/grpc/.gitignore new file mode 100644 index 00000000..196e176d --- /dev/null +++ b/crates/grpc/.gitignore @@ -0,0 +1,19 @@ +# Generated by Cargo +# will have compiled files and executables +debug/ +target/ + +# Remove Cargo.lock from gitignore if creating an executable, leave it for libraries +# More information here https://doc.rust-lang.org/cargo/guide/cargo-toml-vs-cargo-lock.html +Cargo.lock + +# These are backup files generated by rustfmt +**/*.rs.bk + +# MSVC Windows builds of rustc generate these, which store debugging information +*.pdb + + +# Added by cargo + +/target diff --git a/crates/grpc/Cargo.toml b/crates/grpc/Cargo.toml new file mode 100644 index 00000000..eef795d2 --- /dev/null +++ b/crates/grpc/Cargo.toml @@ -0,0 +1,12 @@ +[package] +name = "sha_p2pool_grpc" +version = "0.1.0" +edition = "2021" + +[dependencies] +tonic = { workspace = true } +prost = "0.11.9" +prost-types = "0.11.9" + +[build-dependencies] +tonic-build = { workspace = true } diff --git a/crates/grpc/build.rs b/crates/grpc/build.rs new file mode 100644 index 00000000..fa8fd0db --- /dev/null +++ b/crates/grpc/build.rs @@ -0,0 +1,17 @@ +// Copyright 2024 The Tari Project +// SPDX-License-Identifier: BSD-3-Clause + +fn main() -> Result<(), Box> { + tonic_build::configure() + .build_client(true) + .build_server(true) + .include_file("tari.sha_p2pool.rs") + .compile( + &[ + "proto/p2pool.proto", + ], + &["proto", "submodules/tari/applications/minotari_app_grpc/proto/"], + )?; + + Ok(()) +} diff --git a/crates/grpc/proto/p2pool.proto b/crates/grpc/proto/p2pool.proto new file mode 100644 index 00000000..8f24064d --- /dev/null +++ b/crates/grpc/proto/p2pool.proto @@ -0,0 +1,13 @@ +syntax = "proto3"; + +package tari.sha_p2pool.rpc; + +import "base_node.proto"; + +service ShaP2Pool { + rpc GetNewBlockTemplate(GetNewBlockTemplateRequest) returns(tari.rpc.NewBlockTemplateResponse); +} + +message GetNewBlockTemplateRequest { + bytes wallet_payment_address = 1; +} \ No newline at end of file diff --git a/crates/grpc/src/lib.rs b/crates/grpc/src/lib.rs new file mode 100644 index 00000000..a71a74bc --- /dev/null +++ b/crates/grpc/src/lib.rs @@ -0,0 +1,4 @@ +#[allow(clippy::all, clippy::pedantic)] +pub mod tari_sha_p2pool_rpc { + tonic::include_proto!("tari.sha_p2pool"); +} \ No newline at end of file diff --git a/crates/sha_p2pool/.gitignore b/crates/sha_p2pool/.gitignore new file mode 100644 index 00000000..196e176d --- /dev/null +++ b/crates/sha_p2pool/.gitignore @@ -0,0 +1,19 @@ +# Generated by Cargo +# will have compiled files and executables +debug/ +target/ + +# Remove Cargo.lock from gitignore if creating an executable, leave it for libraries +# More information here https://doc.rust-lang.org/cargo/guide/cargo-toml-vs-cargo-lock.html +Cargo.lock + +# These are backup files generated by rustfmt +**/*.rs.bk + +# MSVC Windows builds of rustc generate these, which store debugging information +*.pdb + + +# Added by cargo + +/target diff --git a/crates/sha_p2pool/Cargo.toml b/crates/sha_p2pool/Cargo.toml new file mode 100644 index 00000000..508d2027 --- /dev/null +++ b/crates/sha_p2pool/Cargo.toml @@ -0,0 +1,33 @@ +[package] +name = "sha_p2pool" +version = "0.1.0" +edition = "2021" + +[dependencies] +sha_p2pool_grpc = { path = "../grpc" } +minotari_app_grpc = { git = "https://github.com/tari-project/tari.git" } +minotari_node_grpc_client = { git = "https://github.com/tari-project/tari.git" } +tari_common_types = { git = "https://github.com/tari-project/tari.git" } + +libp2p = { version = "0.53.2", features = [ + "dns", + "identify", + "macros", + "tokio", + "serde", + "noise", + "tcp", + "yamux", + "mdns", + "gossipsub", + "kad" +] } +tokio = { version = "1.38.0", features = ["full"] } +thiserror = "1.0" +serde = "1.0.203" +anyhow = "1.0" +log = "0.4.21" +env_logger = "0.11.3" +tonic = { workspace = true } + + diff --git a/src/main.rs b/crates/sha_p2pool/src/main.rs similarity index 72% rename from src/main.rs rename to crates/sha_p2pool/src/main.rs index 2a77bf74..0b07b4a7 100644 --- a/src/main.rs +++ b/crates/sha_p2pool/src/main.rs @@ -3,7 +3,7 @@ mod server; #[tokio::main] async fn main() -> anyhow::Result<()> { env_logger::init(); - let config = server::Config::builder().with_p2p_port(9999).build(); + let config = server::Config::builder().build(); let mut server = server::Server::new(config).await?; server.start().await?; Ok(()) diff --git a/src/server/config.rs b/crates/sha_p2pool/src/server/config.rs similarity index 94% rename from src/server/config.rs rename to crates/sha_p2pool/src/server/config.rs index 27546126..4da991f1 100644 --- a/src/server/config.rs +++ b/crates/sha_p2pool/src/server/config.rs @@ -14,7 +14,7 @@ impl Default for Config { Self { base_node_address: String::from("http://127.0.0.1:18142"), p2p_port: 0, // bind to any free port - grpc_port: 18143, // default local base node port + 1 + grpc_port: 18145, idle_connection_timeout: Duration::from_secs(30), } } diff --git a/src/server/grpc.rs b/crates/sha_p2pool/src/server/grpc/base_node.rs similarity index 68% rename from src/server/grpc.rs rename to crates/sha_p2pool/src/server/grpc/base_node.rs index 3920f416..19a76220 100644 --- a/src/server/grpc.rs +++ b/crates/sha_p2pool/src/server/grpc/base_node.rs @@ -1,28 +1,25 @@ -use std::sync::{Arc, Mutex}; +use std::future::Future; +use std::sync::Arc; use libp2p::futures::channel::mpsc; -use log::info; +use libp2p::futures::channel::mpsc::SendError; +use libp2p::futures::SinkExt; +use log::{error, info, warn}; use minotari_app_grpc::tari_rpc; -use minotari_app_grpc::tari_rpc::{Block, BlockBlobRequest, BlockGroupRequest, BlockGroupResponse, BlockHeaderResponse, BlockHeight, BlockTimingResponse, ConsensusConstants, Empty, FetchMatchingUtxosRequest, GetActiveValidatorNodesRequest, GetBlocksRequest, GetHeaderByHashRequest, GetMempoolTransactionsRequest, GetNewBlockBlobResult, GetNewBlockResult, GetNewBlockTemplateWithCoinbasesRequest, GetNewBlockWithCoinbasesRequest, GetPeersRequest, GetShardKeyRequest, GetShardKeyResponse, GetSideChainUtxosRequest, GetTemplateRegistrationsRequest, HeightRequest, ListConnectedPeersResponse, ListHeadersRequest, MempoolStatsResponse, NetworkStatusResponse, NewBlockTemplate, NewBlockTemplateRequest, NewBlockTemplateResponse, NodeIdentity, SearchKernelsRequest, SearchUtxosRequest, SoftwareUpdate, StringValue, SubmitBlockResponse, SubmitTransactionRequest, SubmitTransactionResponse, SyncInfoResponse, SyncProgressResponse, TipInfoResponse, TransactionStateRequest, TransactionStateResponse}; +use minotari_app_grpc::tari_rpc::{Block, BlockBlobRequest, BlockGroupRequest, BlockGroupResponse, BlockHeaderResponse, BlockHeight, BlockTimingResponse, ConsensusConstants, Empty, FetchMatchingUtxosRequest, GetActiveValidatorNodesRequest, GetBlocksRequest, GetHeaderByHashRequest, GetMempoolTransactionsRequest, GetNewBlockBlobResult, GetNewBlockResult, GetNewBlockTemplateWithCoinbasesRequest, GetNewBlockWithCoinbasesRequest, GetPeersRequest, GetShardKeyRequest, GetShardKeyResponse, GetSideChainUtxosRequest, GetTemplateRegistrationsRequest, HeightRequest, ListConnectedPeersResponse, ListHeadersRequest, MempoolStatsResponse, NetworkStatusResponse, NewBlockCoinbase, NewBlockTemplate, NewBlockTemplateRequest, NewBlockTemplateResponse, NodeIdentity, PowAlgo, SearchKernelsRequest, SearchUtxosRequest, SoftwareUpdate, StringValue, SubmitBlockResponse, SubmitTransactionRequest, SubmitTransactionResponse, SyncInfoResponse, SyncProgressResponse, TipInfoResponse, TransactionStateRequest, TransactionStateResponse}; use minotari_app_grpc::tari_rpc::base_node_client::BaseNodeClient; +use minotari_app_grpc::tari_rpc::pow_algo::PowAlgos; use minotari_node_grpc_client::BaseNodeGrpcClient; -use thiserror::Error; +use tari_common_types::tari_address::TariAddress; +use tokio::sync::Mutex; use tonic::{Request, Response, Status}; -pub struct TariBaseNodeGrpc { - client: Arc>>, -} +use crate::server::grpc::error::{Error, TonicError}; -#[derive(Error, Debug)] -pub enum Error { - #[error("Tonic error: {0}")] - Tonic(#[from] TonicError), -} +const LIST_HEADERS_PAGE_SIZE: usize = 10; -#[derive(Error, Debug)] -pub enum TonicError { - #[error("Transport error: {0}")] - Transport(#[from] tonic::transport::Error), +pub struct TariBaseNodeGrpc { + client: Arc>>, } impl TariBaseNodeGrpc { @@ -52,23 +49,72 @@ impl tari_rpc::base_node_server::BaseNode for TariBaseNodeGrpc { async fn get_new_block_template(&self, request: Request) -> Result, Status> { info!("get_new_block_template called!"); - if let Ok(mut client) = self.client.lock() { - let result = client - .get_new_block_template(request.into_inner().clone()) - .await; - } - - Err(Status::internal("")) + self.client.lock().await.get_new_block_template(request.into_inner()).await } async fn get_new_block(&self, request: Request) -> Result, Status> { info!("get_new_block called!"); + // TODO: remove extra logic and only proxy, move logic to the new p2pool grpc handler + let origin_block_template = request.into_inner(); + let origin = origin_block_template.clone(); + if let Some(header) = origin_block_template.header { + if let Some(body) = origin_block_template.body { + if let Some(pow) = header.pow { + + // simply proxy the request if pow algo is not supported + if pow.pow_algo != PowAlgos::Sha3x as u64 { + warn!("Only SHA3x PoW supported!"); + return self.client.lock().await.get_new_block(origin).await; + } + + // requesting new block template which includes all shares + let mut new_block_template_req = GetNewBlockTemplateWithCoinbasesRequest::default(); + let mut new_pow_algo = PowAlgo::default(); + new_pow_algo.set_pow_algo(PowAlgos::Sha3x); + new_block_template_req.algo = Some(new_pow_algo); + new_block_template_req.coinbases = vec![ + NewBlockCoinbase { + address: TariAddress::from_hex("30a815df7b8d7f653ce3252f08a21d570b1ac44958cb4d7af0e0ef124f89b11943") + .unwrap() + .to_hex(), + value: 1, + stealth_payment: false, + revealed_value_proof: true, + coinbase_extra: Vec::new(), + }, + ]; + if let Ok(response) = self.client.lock().await + .get_new_block_template_with_coinbases(new_block_template_req).await {} + } + } + } todo!() + // self.client.lock().await.get_new_block(request.into_inner()).await } + async fn submit_block(&self, request: Request) -> Result, Status> { + info!("submit_block called!"); + self.client.lock().await.submit_block(request.into_inner()).await + } async fn list_headers(&self, request: Request) -> Result, Status> { - todo!() + match self.client.lock().await.list_headers(request.into_inner()).await { + Ok(response) => { + let (mut tx, rx) = mpsc::channel(LIST_HEADERS_PAGE_SIZE); + tokio::spawn(async move { + let mut stream = response.into_inner(); + tokio::spawn(async move { + while let Ok(Some(next_message)) = stream.message().await { + if let Err(e) = tx.send(Ok(next_message)).await { + error!("failed to send 'list_headers' response message: {e}"); + } + } + }); + }); + Ok(Response::new(rx)) + } + Err(status) => Err(status) + } } async fn get_header_by_hash(&self, request: Request) -> Result, Status> { @@ -123,10 +169,6 @@ impl tari_rpc::base_node_server::BaseNode for TariBaseNodeGrpc { todo!() } - async fn submit_block(&self, request: Request) -> Result, Status> { - todo!() - } - async fn submit_block_blob(&self, request: Request) -> Result, Status> { todo!() } @@ -144,7 +186,7 @@ impl tari_rpc::base_node_server::BaseNode for TariBaseNodeGrpc { } async fn get_tip_info(&self, request: Request) -> Result, Status> { - todo!() + self.client.lock().await.get_tip_info(request.into_inner()).await } async fn search_kernels(&self, request: Request) -> Result, Status> { diff --git a/crates/sha_p2pool/src/server/grpc/error.rs b/crates/sha_p2pool/src/server/grpc/error.rs new file mode 100644 index 00000000..0f17335d --- /dev/null +++ b/crates/sha_p2pool/src/server/grpc/error.rs @@ -0,0 +1,13 @@ +use thiserror::Error; + +#[derive(Error, Debug)] +pub enum Error { + #[error("Tonic error: {0}")] + Tonic(#[from] TonicError), +} + +#[derive(Error, Debug)] +pub enum TonicError { + #[error("Transport error: {0}")] + Transport(#[from] tonic::transport::Error), +} \ No newline at end of file diff --git a/crates/sha_p2pool/src/server/grpc/mod.rs b/crates/sha_p2pool/src/server/grpc/mod.rs new file mode 100644 index 00000000..47336320 --- /dev/null +++ b/crates/sha_p2pool/src/server/grpc/mod.rs @@ -0,0 +1,4 @@ +use thiserror::Error; + +pub mod base_node; +pub mod error; \ No newline at end of file diff --git a/src/server/mod.rs b/crates/sha_p2pool/src/server/mod.rs similarity index 100% rename from src/server/mod.rs rename to crates/sha_p2pool/src/server/mod.rs diff --git a/src/server/p2p.rs b/crates/sha_p2pool/src/server/p2p.rs similarity index 100% rename from src/server/p2p.rs rename to crates/sha_p2pool/src/server/p2p.rs diff --git a/src/server/server.rs b/crates/sha_p2pool/src/server/server.rs similarity index 80% rename from src/server/server.rs rename to crates/sha_p2pool/src/server/server.rs index 7bcc2295..0ebf0d3c 100644 --- a/src/server/server.rs +++ b/crates/sha_p2pool/src/server/server.rs @@ -13,8 +13,10 @@ use minotari_app_grpc::tari_rpc::base_node_server::BaseNodeServer; use thiserror::Error; use tokio::{io, io::AsyncBufReadExt, select}; +use sha_p2pool_grpc::tari_sha_p2pool_rpc::tari::sha_p2pool::rpc::sha_p2_pool_server::ShaP2PoolServer; + use crate::server::{config, grpc, p2p}; -use crate::server::grpc::{TariBaseNodeGrpc, TonicError}; +use crate::server::grpc::base_node::{TariBaseNodeGrpc, TonicError}; use crate::server::p2p::{ServerNetworkBehaviour, ServerNetworkBehaviourEvent}; #[derive(Error, Debug)] @@ -22,7 +24,7 @@ pub enum Error { #[error("LibP2P error: {0}")] LibP2P(#[from] LibP2PError), #[error("gRPC error: {0}")] - GRPC(#[from] grpc::Error), + GRPC(#[from] grpc::error::Error), #[error("Socket address parse error: {0}")] AddrParse(#[from] AddrParseError), } @@ -45,16 +47,19 @@ pub enum LibP2PError { pub struct Server { config: config::Config, swarm: Swarm, - grpc_server: BaseNodeServer, + base_node_grpc_server: BaseNodeServer, } impl Server { pub async fn new(config: config::Config) -> Result { let swarm = p2p::swarm(&config)?; - let grpc_service = TariBaseNodeGrpc::new(config.base_node_address.clone()).await.map_err(Error::GRPC)?; - let grpc_server = BaseNodeServer::new(grpc_service); + let base_node_grpc_service = TariBaseNodeGrpc::new(config.base_node_address.clone()).await.map_err(Error::GRPC)?; + let base_node_grpc_server = BaseNodeServer::new(base_node_grpc_service); + + // TODO: continue + // let p2pool_server = ShaP2PoolServer::new() - Ok(Self { config, swarm, grpc_server }) + Ok(Self { config, swarm, base_node_grpc_server }) } pub async fn start_grpc(service: BaseNodeServer, grpc_port: u64) -> Result<(), Error> { @@ -70,7 +75,7 @@ impl Server { .await .map_err(|err| { error!("GRPC encountered an error: {:?}", err); - Error::GRPC(grpc::Error::Tonic(TonicError::Transport(err))) + Error::GRPC(grpc::error::Error::Tonic(TonicError::Transport(err))) })?; info!("gRPC server stopped!"); @@ -90,7 +95,7 @@ impl Server { info!("Starting Tari SHA-3 mining P2Pool..."); // grpc serve - let grpc_service = self.grpc_server.clone(); + let grpc_service = self.base_node_grpc_server.clone(); let grpc_port = self.config.grpc_port; tokio::spawn(async move { Self::start_grpc(grpc_service, grpc_port).await; @@ -109,6 +114,12 @@ impl Server { for (peer, addr) in peers { info!("Discovered new peer {} at {}", peer, addr); self.swarm.behaviour_mut().gossipsub.add_explicit_peer(&peer); + match self.swarm.dial(addr) { + Ok(_) => { + info!("Dial success!"); + }, + Err(_) => {}, + } } }, mdns::Event::Expired(peers) => { From 72d889e400a959b378f91257d2e63f8f30da4a8f Mon Sep 17 00:00:00 2001 From: richardb Date: Thu, 6 Jun 2024 15:05:35 +0200 Subject: [PATCH 03/43] impl in progress --- Cargo.toml | 4 +- crates/grpc/.gitignore | 19 ----- crates/grpc/Cargo.toml | 12 --- crates/grpc/build.rs | 17 ---- crates/grpc/proto/p2pool.proto | 13 --- crates/grpc/src/lib.rs | 4 - crates/sha_p2pool/Cargo.toml | 12 ++- .../sha_p2pool/src/server/grpc/base_node.rs | 71 ++++++++-------- crates/sha_p2pool/src/server/grpc/mod.rs | 5 +- crates/sha_p2pool/src/server/grpc/p2pool.rs | 85 +++++++++++++++++++ crates/sha_p2pool/src/server/server.rs | 30 ++++--- 11 files changed, 151 insertions(+), 121 deletions(-) delete mode 100644 crates/grpc/.gitignore delete mode 100644 crates/grpc/Cargo.toml delete mode 100644 crates/grpc/build.rs delete mode 100644 crates/grpc/proto/p2pool.proto delete mode 100644 crates/grpc/src/lib.rs create mode 100644 crates/sha_p2pool/src/server/grpc/p2pool.rs diff --git a/Cargo.toml b/Cargo.toml index d51d64fe..dbe09487 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -2,10 +2,8 @@ resolver = "2" members = [ - "crates/grpc", "crates/sha_p2pool" ] [workspace.dependencies] -tonic = "0.8.3" -tonic-build = "0.8.4" \ No newline at end of file +tonic = "0.8.3" \ No newline at end of file diff --git a/crates/grpc/.gitignore b/crates/grpc/.gitignore deleted file mode 100644 index 196e176d..00000000 --- a/crates/grpc/.gitignore +++ /dev/null @@ -1,19 +0,0 @@ -# Generated by Cargo -# will have compiled files and executables -debug/ -target/ - -# Remove Cargo.lock from gitignore if creating an executable, leave it for libraries -# More information here https://doc.rust-lang.org/cargo/guide/cargo-toml-vs-cargo-lock.html -Cargo.lock - -# These are backup files generated by rustfmt -**/*.rs.bk - -# MSVC Windows builds of rustc generate these, which store debugging information -*.pdb - - -# Added by cargo - -/target diff --git a/crates/grpc/Cargo.toml b/crates/grpc/Cargo.toml deleted file mode 100644 index eef795d2..00000000 --- a/crates/grpc/Cargo.toml +++ /dev/null @@ -1,12 +0,0 @@ -[package] -name = "sha_p2pool_grpc" -version = "0.1.0" -edition = "2021" - -[dependencies] -tonic = { workspace = true } -prost = "0.11.9" -prost-types = "0.11.9" - -[build-dependencies] -tonic-build = { workspace = true } diff --git a/crates/grpc/build.rs b/crates/grpc/build.rs deleted file mode 100644 index fa8fd0db..00000000 --- a/crates/grpc/build.rs +++ /dev/null @@ -1,17 +0,0 @@ -// Copyright 2024 The Tari Project -// SPDX-License-Identifier: BSD-3-Clause - -fn main() -> Result<(), Box> { - tonic_build::configure() - .build_client(true) - .build_server(true) - .include_file("tari.sha_p2pool.rs") - .compile( - &[ - "proto/p2pool.proto", - ], - &["proto", "submodules/tari/applications/minotari_app_grpc/proto/"], - )?; - - Ok(()) -} diff --git a/crates/grpc/proto/p2pool.proto b/crates/grpc/proto/p2pool.proto deleted file mode 100644 index 8f24064d..00000000 --- a/crates/grpc/proto/p2pool.proto +++ /dev/null @@ -1,13 +0,0 @@ -syntax = "proto3"; - -package tari.sha_p2pool.rpc; - -import "base_node.proto"; - -service ShaP2Pool { - rpc GetNewBlockTemplate(GetNewBlockTemplateRequest) returns(tari.rpc.NewBlockTemplateResponse); -} - -message GetNewBlockTemplateRequest { - bytes wallet_payment_address = 1; -} \ No newline at end of file diff --git a/crates/grpc/src/lib.rs b/crates/grpc/src/lib.rs deleted file mode 100644 index a71a74bc..00000000 --- a/crates/grpc/src/lib.rs +++ /dev/null @@ -1,4 +0,0 @@ -#[allow(clippy::all, clippy::pedantic)] -pub mod tari_sha_p2pool_rpc { - tonic::include_proto!("tari.sha_p2pool"); -} \ No newline at end of file diff --git a/crates/sha_p2pool/Cargo.toml b/crates/sha_p2pool/Cargo.toml index 508d2027..d4f68b6d 100644 --- a/crates/sha_p2pool/Cargo.toml +++ b/crates/sha_p2pool/Cargo.toml @@ -4,10 +4,14 @@ version = "0.1.0" edition = "2021" [dependencies] -sha_p2pool_grpc = { path = "../grpc" } -minotari_app_grpc = { git = "https://github.com/tari-project/tari.git" } -minotari_node_grpc_client = { git = "https://github.com/tari-project/tari.git" } -tari_common_types = { git = "https://github.com/tari-project/tari.git" } +#sha_p2pool_grpc = { path = "../sha_p2pool_grpc" } +#minotari_app_grpc = { git = "https://github.com/tari-project/tari.git" } +#minotari_node_grpc_client = { git = "https://github.com/tari-project/tari.git" } +#tari_common_types = { git = "https://github.com/tari-project/tari.git" } + +minotari_app_grpc = { git = "https://github.com/ksrichard/tari.git", branch = "p2pool" } +minotari_node_grpc_client = { git = "https://github.com/ksrichard/tari.git", branch = "p2pool" } +tari_common_types = { git = "https://github.com/ksrichard/tari.git", branch = "p2pool" } libp2p = { version = "0.53.2", features = [ "dns", diff --git a/crates/sha_p2pool/src/server/grpc/base_node.rs b/crates/sha_p2pool/src/server/grpc/base_node.rs index 19a76220..4cd70c29 100644 --- a/crates/sha_p2pool/src/server/grpc/base_node.rs +++ b/crates/sha_p2pool/src/server/grpc/base_node.rs @@ -24,6 +24,7 @@ pub struct TariBaseNodeGrpc { impl TariBaseNodeGrpc { pub async fn new(base_node_address: String) -> Result { + // TODO: add retry mechanism to try at least 3 times before failing let client = BaseNodeGrpcClient::connect(base_node_address) .await .map_err(|e| Error::Tonic(TonicError::Transport(e)))?; @@ -55,41 +56,41 @@ impl tari_rpc::base_node_server::BaseNode for TariBaseNodeGrpc { async fn get_new_block(&self, request: Request) -> Result, Status> { info!("get_new_block called!"); // TODO: remove extra logic and only proxy, move logic to the new p2pool grpc handler - let origin_block_template = request.into_inner(); - let origin = origin_block_template.clone(); - if let Some(header) = origin_block_template.header { - if let Some(body) = origin_block_template.body { - if let Some(pow) = header.pow { - - // simply proxy the request if pow algo is not supported - if pow.pow_algo != PowAlgos::Sha3x as u64 { - warn!("Only SHA3x PoW supported!"); - return self.client.lock().await.get_new_block(origin).await; - } - - // requesting new block template which includes all shares - let mut new_block_template_req = GetNewBlockTemplateWithCoinbasesRequest::default(); - let mut new_pow_algo = PowAlgo::default(); - new_pow_algo.set_pow_algo(PowAlgos::Sha3x); - new_block_template_req.algo = Some(new_pow_algo); - new_block_template_req.coinbases = vec![ - NewBlockCoinbase { - address: TariAddress::from_hex("30a815df7b8d7f653ce3252f08a21d570b1ac44958cb4d7af0e0ef124f89b11943") - .unwrap() - .to_hex(), - value: 1, - stealth_payment: false, - revealed_value_proof: true, - coinbase_extra: Vec::new(), - }, - ]; - if let Ok(response) = self.client.lock().await - .get_new_block_template_with_coinbases(new_block_template_req).await {} - } - } - } - todo!() - // self.client.lock().await.get_new_block(request.into_inner()).await + // let origin_block_template = request.into_inner(); + // let origin = origin_block_template.clone(); + // if let Some(header) = origin_block_template.header { + // if let Some(body) = origin_block_template.body { + // if let Some(pow) = header.pow { + // + // // simply proxy the request if pow algo is not supported + // if pow.pow_algo != PowAlgos::Sha3x as u64 { + // warn!("Only SHA3x PoW supported!"); + // return self.client.lock().await.get_new_block(origin).await; + // } + // + // // requesting new block template which includes all shares + // let mut new_block_template_req = GetNewBlockTemplateWithCoinbasesRequest::default(); + // let mut new_pow_algo = PowAlgo::default(); + // new_pow_algo.set_pow_algo(PowAlgos::Sha3x); + // new_block_template_req.algo = Some(new_pow_algo); + // new_block_template_req.coinbases = vec![ + // NewBlockCoinbase { + // address: TariAddress::from_hex("30a815df7b8d7f653ce3252f08a21d570b1ac44958cb4d7af0e0ef124f89b11943") + // .unwrap() + // .to_hex(), + // value: 1, + // stealth_payment: false, + // revealed_value_proof: true, + // coinbase_extra: Vec::new(), + // }, + // ]; + // if let Ok(response) = self.client.lock().await + // .get_new_block_template_with_coinbases(new_block_template_req).await {} + // } + // } + // } + // todo!() + self.client.lock().await.get_new_block(request.into_inner()).await } async fn submit_block(&self, request: Request) -> Result, Status> { diff --git a/crates/sha_p2pool/src/server/grpc/mod.rs b/crates/sha_p2pool/src/server/grpc/mod.rs index 47336320..9e3fee3b 100644 --- a/crates/sha_p2pool/src/server/grpc/mod.rs +++ b/crates/sha_p2pool/src/server/grpc/mod.rs @@ -1,4 +1,3 @@ -use thiserror::Error; - pub mod base_node; -pub mod error; \ No newline at end of file +pub mod error; +pub mod p2pool; \ No newline at end of file diff --git a/crates/sha_p2pool/src/server/grpc/p2pool.rs b/crates/sha_p2pool/src/server/grpc/p2pool.rs new file mode 100644 index 00000000..30f06483 --- /dev/null +++ b/crates/sha_p2pool/src/server/grpc/p2pool.rs @@ -0,0 +1,85 @@ +use std::collections::HashMap; +use std::sync::Arc; + +use log::info; +use minotari_app_grpc::tari_rpc::{GetNewBlockRequest, GetNewBlockResponse, GetNewBlockTemplateWithCoinbasesRequest, NewBlockCoinbase, NewBlockTemplateRequest, PowAlgo}; +use minotari_app_grpc::tari_rpc::base_node_client::BaseNodeClient; +use minotari_app_grpc::tari_rpc::pow_algo::PowAlgos; +use minotari_app_grpc::tari_rpc::sha_p2_pool_server::ShaP2Pool; +use minotari_node_grpc_client::BaseNodeGrpcClient; +use tokio::sync::Mutex; +use tonic::{Request, Response, Status}; + +use crate::server::grpc::error::Error; +use crate::server::grpc::error::TonicError; + +pub struct ShaP2PoolGrpc { + client: Arc>>, +} + +impl ShaP2PoolGrpc { + pub async fn new(base_node_address: String) -> Result { + // TODO: add retry mechanism to try at least 3 times before failing + let client = BaseNodeGrpcClient::connect(base_node_address) + .await + .map_err(|e| Error::Tonic(TonicError::Transport(e)))?; + + Ok(Self { client: Arc::new(Mutex::new(client)) }) + } + + // TODO: complete implementation to find the right shares + async fn generate_shares(&self, request: &GetNewBlockRequest, reward: u64) -> Vec { + let mut miners = HashMap::::new(); // target wallet address -> hash rate + + // TODO: remove, only for testing now, get miners from outside of this module using P2P network/sharechain + miners.insert(request.wallet_payment_address.clone(), 100); + miners.insert("6ee38cf177a8fbf818d93ba5bbca6078efd88cef5c57927ce65dd0716ca3ee655a".to_string(), 100); + + // calculate full hash rate and shares + let full_hash_rate: u64 = miners.iter() + .map(|(_, rate)| rate) + .sum(); + miners.iter() + .map(|(addr, rate)| (addr, rate / full_hash_rate)) + .for_each(|(addr, share)| { + info!("{addr} -> {share:?}"); + }); + + todo!() + } +} + +#[tonic::async_trait] +impl ShaP2Pool for ShaP2PoolGrpc { + async fn get_new_block(&self, request: Request) -> Result, Status> { + let template_request = request.into_inner(); + let mut pow_algo = PowAlgo::default(); + pow_algo.set_pow_algo(PowAlgos::Sha3x); + + // request original block template to get reward + let req = NewBlockTemplateRequest { + algo: Some(pow_algo.clone()), + max_weight: 0, + }; + let template_response = self.client.lock().await + .get_new_block_template(req) + .await? + .into_inner(); + let miner_data = template_response.miner_data.ok_or_else(|| Status::internal("missing miner data"))?; + let reward = miner_data.reward; + + // request new block template with shares as coinbases + let mut new_block_template_req = GetNewBlockTemplateWithCoinbasesRequest::default(); + new_block_template_req.algo = Some(pow_algo); + new_block_template_req.coinbases = self.generate_shares(&template_request, reward).await; + let response = self.client.lock().await + .get_new_block_template_with_coinbases(new_block_template_req).await?.into_inner(); + let miner_data = response.clone().miner_data.ok_or_else(|| Status::internal("missing miner data"))?; + let target_difficulty = miner_data.target_difficulty; + + Ok(Response::new(GetNewBlockResponse { + block: Some(response), + target_difficulty, + })) + } +} \ No newline at end of file diff --git a/crates/sha_p2pool/src/server/server.rs b/crates/sha_p2pool/src/server/server.rs index 0ebf0d3c..64276985 100644 --- a/crates/sha_p2pool/src/server/server.rs +++ b/crates/sha_p2pool/src/server/server.rs @@ -10,13 +10,14 @@ use libp2p::mdns::tokio::Tokio; use libp2p::swarm::{NetworkBehaviour, SwarmEvent}; use log::{error, info}; use minotari_app_grpc::tari_rpc::base_node_server::BaseNodeServer; +use minotari_app_grpc::tari_rpc::sha_p2_pool_server::ShaP2PoolServer; use thiserror::Error; use tokio::{io, io::AsyncBufReadExt, select}; -use sha_p2pool_grpc::tari_sha_p2pool_rpc::tari::sha_p2pool::rpc::sha_p2_pool_server::ShaP2PoolServer; - use crate::server::{config, grpc, p2p}; -use crate::server::grpc::base_node::{TariBaseNodeGrpc, TonicError}; +use crate::server::grpc::base_node::TariBaseNodeGrpc; +use crate::server::grpc::error::TonicError; +use crate::server::grpc::p2pool::ShaP2PoolGrpc; use crate::server::p2p::{ServerNetworkBehaviour, ServerNetworkBehaviourEvent}; #[derive(Error, Debug)] @@ -47,7 +48,8 @@ pub enum LibP2PError { pub struct Server { config: config::Config, swarm: Swarm, - base_node_grpc_server: BaseNodeServer, + base_node_grpc_service: BaseNodeServer, + p2pool_grpc_service: ShaP2PoolServer, } impl Server { @@ -56,17 +58,22 @@ impl Server { let base_node_grpc_service = TariBaseNodeGrpc::new(config.base_node_address.clone()).await.map_err(Error::GRPC)?; let base_node_grpc_server = BaseNodeServer::new(base_node_grpc_service); - // TODO: continue - // let p2pool_server = ShaP2PoolServer::new() + let p2pool_grpc_service = ShaP2PoolGrpc::new(config.base_node_address.clone()).await.map_err(Error::GRPC)?; + let p2pool_server = ShaP2PoolServer::new(p2pool_grpc_service); - Ok(Self { config, swarm, base_node_grpc_server }) + Ok(Self { config, swarm, base_node_grpc_service: base_node_grpc_server, p2pool_grpc_service: p2pool_server }) } - pub async fn start_grpc(service: BaseNodeServer, grpc_port: u64) -> Result<(), Error> { + pub async fn start_grpc( + base_node_service: BaseNodeServer, + p2pool_service: ShaP2PoolServer, + grpc_port: u64, + ) -> Result<(), Error> { info!("Starting gRPC server on port {}!", &grpc_port); tonic::transport::Server::builder() - .add_service(service) + .add_service(base_node_service) + .add_service(p2pool_service) .serve( SocketAddr::from_str( format!("0.0.0.0:{}", grpc_port).as_str() @@ -95,10 +102,11 @@ impl Server { info!("Starting Tari SHA-3 mining P2Pool..."); // grpc serve - let grpc_service = self.base_node_grpc_server.clone(); + let base_node_grpc_service = self.base_node_grpc_service.clone(); + let p2pool_grpc_service = self.p2pool_grpc_service.clone(); let grpc_port = self.config.grpc_port; tokio::spawn(async move { - Self::start_grpc(grpc_service, grpc_port).await; + Self::start_grpc(base_node_grpc_service, p2pool_grpc_service, grpc_port).await; }); // main loop From 1f2030fe4e256bcf4dcd152950ce917f673bf58d Mon Sep 17 00:00:00 2001 From: richardb Date: Fri, 7 Jun 2024 17:16:35 +0200 Subject: [PATCH 04/43] refactor + handle properly when a block with lower difficulty not accepted on network --- Cargo.toml | 2 +- crates/sha_p2pool/src/server/config.rs | 10 +- .../sha_p2pool/src/server/grpc/base_node.rs | 275 ++++++++++-------- crates/sha_p2pool/src/server/grpc/p2pool.rs | 44 ++- crates/sha_p2pool/src/server/server.rs | 15 +- 5 files changed, 203 insertions(+), 143 deletions(-) diff --git a/Cargo.toml b/Cargo.toml index dbe09487..6dc89a22 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -6,4 +6,4 @@ members = [ ] [workspace.dependencies] -tonic = "0.8.3" \ No newline at end of file +tonic = "0.8.3" diff --git a/crates/sha_p2pool/src/server/config.rs b/crates/sha_p2pool/src/server/config.rs index 4da991f1..83787261 100644 --- a/crates/sha_p2pool/src/server/config.rs +++ b/crates/sha_p2pool/src/server/config.rs @@ -4,8 +4,8 @@ use std::time::Duration; #[derive(Clone)] pub struct Config { pub base_node_address: String, - pub p2p_port: u64, - pub grpc_port: u64, + pub p2p_port: u16, + pub grpc_port: u16, pub idle_connection_timeout: Duration, } @@ -14,7 +14,7 @@ impl Default for Config { Self { base_node_address: String::from("http://127.0.0.1:18142"), p2p_port: 0, // bind to any free port - grpc_port: 18145, + grpc_port: 18145, // to possibly not collide with any other ports idle_connection_timeout: Duration::from_secs(30), } } @@ -33,12 +33,12 @@ pub struct ConfigBuilder { } impl ConfigBuilder { - pub fn with_p2p_port(&mut self, port: u64) -> &mut Self { + pub fn with_p2p_port(&mut self, port: u16) -> &mut Self { self.config.p2p_port = port; self } - pub fn with_grpc_port(&mut self, port: u64) -> &mut Self { + pub fn with_grpc_port(&mut self, port: u16) -> &mut Self { self.config.grpc_port = port; self } diff --git a/crates/sha_p2pool/src/server/grpc/base_node.rs b/crates/sha_p2pool/src/server/grpc/base_node.rs index 4cd70c29..fbcebeb5 100644 --- a/crates/sha_p2pool/src/server/grpc/base_node.rs +++ b/crates/sha_p2pool/src/server/grpc/base_node.rs @@ -1,24 +1,55 @@ use std::future::Future; +use std::ops::Deref; use std::sync::Arc; use libp2p::futures::channel::mpsc; -use libp2p::futures::channel::mpsc::SendError; use libp2p::futures::SinkExt; use log::{error, info, warn}; use minotari_app_grpc::tari_rpc; -use minotari_app_grpc::tari_rpc::{Block, BlockBlobRequest, BlockGroupRequest, BlockGroupResponse, BlockHeaderResponse, BlockHeight, BlockTimingResponse, ConsensusConstants, Empty, FetchMatchingUtxosRequest, GetActiveValidatorNodesRequest, GetBlocksRequest, GetHeaderByHashRequest, GetMempoolTransactionsRequest, GetNewBlockBlobResult, GetNewBlockResult, GetNewBlockTemplateWithCoinbasesRequest, GetNewBlockWithCoinbasesRequest, GetPeersRequest, GetShardKeyRequest, GetShardKeyResponse, GetSideChainUtxosRequest, GetTemplateRegistrationsRequest, HeightRequest, ListConnectedPeersResponse, ListHeadersRequest, MempoolStatsResponse, NetworkStatusResponse, NewBlockCoinbase, NewBlockTemplate, NewBlockTemplateRequest, NewBlockTemplateResponse, NodeIdentity, PowAlgo, SearchKernelsRequest, SearchUtxosRequest, SoftwareUpdate, StringValue, SubmitBlockResponse, SubmitTransactionRequest, SubmitTransactionResponse, SyncInfoResponse, SyncProgressResponse, TipInfoResponse, TransactionStateRequest, TransactionStateResponse}; +use minotari_app_grpc::tari_rpc::{Block, BlockBlobRequest, BlockGroupRequest, BlockGroupResponse, BlockHeaderResponse, BlockHeight, BlockTimingResponse, ConsensusConstants, Empty, FetchMatchingUtxosRequest, GetActiveValidatorNodesRequest, GetBlocksRequest, GetHeaderByHashRequest, GetMempoolTransactionsRequest, GetNewBlockBlobResult, GetNewBlockResult, GetNewBlockTemplateWithCoinbasesRequest, GetNewBlockWithCoinbasesRequest, GetPeersRequest, GetShardKeyRequest, GetShardKeyResponse, GetSideChainUtxosRequest, GetTemplateRegistrationsRequest, HeightRequest, HistoricalBlock, ListConnectedPeersResponse, ListHeadersRequest, MempoolStatsResponse, NetworkStatusResponse, NewBlockCoinbase, NewBlockTemplate, NewBlockTemplateRequest, NewBlockTemplateResponse, NodeIdentity, PowAlgo, SearchKernelsRequest, SearchUtxosRequest, SoftwareUpdate, StringValue, SubmitBlockResponse, SubmitTransactionRequest, SubmitTransactionResponse, SyncInfoResponse, SyncProgressResponse, TipInfoResponse, TransactionStateRequest, TransactionStateResponse, ValueAtHeightResponse}; use minotari_app_grpc::tari_rpc::base_node_client::BaseNodeClient; -use minotari_app_grpc::tari_rpc::pow_algo::PowAlgos; use minotari_node_grpc_client::BaseNodeGrpcClient; -use tari_common_types::tari_address::TariAddress; use tokio::sync::Mutex; -use tonic::{Request, Response, Status}; +use tonic::{Request, Response, Status, Streaming}; use crate::server::grpc::error::{Error, TonicError}; const LIST_HEADERS_PAGE_SIZE: usize = 10; +const GET_BLOCKS_PAGE_SIZE: usize = 10; +const GET_TOKENS_IN_CIRCULATION_PAGE_SIZE: usize = 1_000; + +const GET_DIFFICULTY_PAGE_SIZE: usize = 1_000; + +macro_rules! proxy_simple_result { + ($self:ident, $call:ident, $request:ident) => { + match $self.client.lock().await.$call($request.into_inner()).await { + Ok(resp) => Ok(resp), + Err(error) => { + error!("Error while calling {:?} on base node: {:?}", stringify!($call), error); + Err(error) + } + } + }; +} + +macro_rules! proxy_stream_result { + ($self:ident, $call:ident, $request:ident, $page_size:ident) => { + TariBaseNodeGrpc::streaming_response(String::from(stringify!($call)), + $self.client.lock().await.$call($request.into_inner()).await, + $page_size, + ).await + }; + + ($self:ident, $call:ident, $request:ident, $page_size:expr) => { + TariBaseNodeGrpc::streaming_response(String::from(stringify!($call)), + $self.client.lock().await.$call($request.into_inner()).await, + $page_size, + ).await + }; +} pub struct TariBaseNodeGrpc { + // TODO: check if 1 shared client is enough or we need a pool of clients to operate faster client: Arc>>, } @@ -31,83 +62,23 @@ impl TariBaseNodeGrpc { Ok(Self { client: Arc::new(Mutex::new(client)) }) } -} - -#[tonic::async_trait] -impl tari_rpc::base_node_server::BaseNode for TariBaseNodeGrpc { - type FetchMatchingUtxosStream = mpsc::Receiver>; - type GetActiveValidatorNodesStream = mpsc::Receiver>; - type GetBlocksStream = mpsc::Receiver>; - type GetMempoolTransactionsStream = mpsc::Receiver>; - type GetNetworkDifficultyStream = mpsc::Receiver>; - type GetPeersStream = mpsc::Receiver>; - type GetSideChainUtxosStream = mpsc::Receiver>; - type GetTemplateRegistrationsStream = mpsc::Receiver>; - type GetTokensInCirculationStream = mpsc::Receiver>; - type ListHeadersStream = mpsc::Receiver>; - type SearchKernelsStream = mpsc::Receiver>; - type SearchUtxosStream = mpsc::Receiver>; - - async fn get_new_block_template(&self, request: Request) -> Result, Status> { - info!("get_new_block_template called!"); - self.client.lock().await.get_new_block_template(request.into_inner()).await - } - async fn get_new_block(&self, request: Request) -> Result, Status> { - info!("get_new_block called!"); - // TODO: remove extra logic and only proxy, move logic to the new p2pool grpc handler - // let origin_block_template = request.into_inner(); - // let origin = origin_block_template.clone(); - // if let Some(header) = origin_block_template.header { - // if let Some(body) = origin_block_template.body { - // if let Some(pow) = header.pow { - // - // // simply proxy the request if pow algo is not supported - // if pow.pow_algo != PowAlgos::Sha3x as u64 { - // warn!("Only SHA3x PoW supported!"); - // return self.client.lock().await.get_new_block(origin).await; - // } - // - // // requesting new block template which includes all shares - // let mut new_block_template_req = GetNewBlockTemplateWithCoinbasesRequest::default(); - // let mut new_pow_algo = PowAlgo::default(); - // new_pow_algo.set_pow_algo(PowAlgos::Sha3x); - // new_block_template_req.algo = Some(new_pow_algo); - // new_block_template_req.coinbases = vec![ - // NewBlockCoinbase { - // address: TariAddress::from_hex("30a815df7b8d7f653ce3252f08a21d570b1ac44958cb4d7af0e0ef124f89b11943") - // .unwrap() - // .to_hex(), - // value: 1, - // stealth_payment: false, - // revealed_value_proof: true, - // coinbase_extra: Vec::new(), - // }, - // ]; - // if let Ok(response) = self.client.lock().await - // .get_new_block_template_with_coinbases(new_block_template_req).await {} - // } - // } - // } - // todo!() - self.client.lock().await.get_new_block(request.into_inner()).await - } - - async fn submit_block(&self, request: Request) -> Result, Status> { - info!("submit_block called!"); - self.client.lock().await.submit_block(request.into_inner()).await - } - - async fn list_headers(&self, request: Request) -> Result, Status> { - match self.client.lock().await.list_headers(request.into_inner()).await { + async fn streaming_response( + call: String, + result: Result>, Status>, + page_size: usize) + -> Result>>, Status> + where R: Send + Sync + 'static, + { + match result { Ok(response) => { - let (mut tx, rx) = mpsc::channel(LIST_HEADERS_PAGE_SIZE); + let (mut tx, rx) = mpsc::channel(page_size); tokio::spawn(async move { let mut stream = response.into_inner(); tokio::spawn(async move { while let Ok(Some(next_message)) = stream.message().await { if let Err(e) = tx.send(Ok(next_message)).await { - error!("failed to send 'list_headers' response message: {e}"); + error!("failed to send '{call}' response message: {e}"); } } }); @@ -117,132 +88,208 @@ impl tari_rpc::base_node_server::BaseNode for TariBaseNodeGrpc { Err(status) => Err(status) } } +} +#[tonic::async_trait] +impl tari_rpc::base_node_server::BaseNode for TariBaseNodeGrpc { + type ListHeadersStream = mpsc::Receiver>; + async fn list_headers(&self, request: Request) -> Result, Status> { + proxy_stream_result!(self, list_headers, request, LIST_HEADERS_PAGE_SIZE) + } async fn get_header_by_hash(&self, request: Request) -> Result, Status> { - todo!() + proxy_simple_result!(self, get_header_by_hash, request) } - + type GetBlocksStream = mpsc::Receiver>; async fn get_blocks(&self, request: Request) -> Result, Status> { - todo!() + proxy_stream_result!(self, get_blocks, request, GET_BLOCKS_PAGE_SIZE) } - async fn get_block_timing(&self, request: Request) -> Result, Status> { - todo!() + proxy_simple_result!(self, get_block_timing, request) } - async fn get_constants(&self, request: Request) -> Result, Status> { - todo!() + proxy_simple_result!(self, get_constants, request) } - async fn get_block_size(&self, request: Request) -> Result, Status> { - todo!() + proxy_simple_result!(self, get_block_size, request) } - async fn get_block_fees(&self, request: Request) -> Result, Status> { - todo!() + proxy_simple_result!(self, get_block_fees, request) } - async fn get_version(&self, request: Request) -> Result, Status> { - todo!() + proxy_simple_result!(self, get_version, request) } - async fn check_for_updates(&self, request: Request) -> Result, Status> { - todo!() + proxy_simple_result!(self, check_for_updates, request) } + type GetTokensInCirculationStream = mpsc::Receiver>; async fn get_tokens_in_circulation(&self, request: Request) -> Result, Status> { - todo!() + proxy_stream_result!(self, get_tokens_in_circulation, request, GET_TOKENS_IN_CIRCULATION_PAGE_SIZE) } + type GetNetworkDifficultyStream = mpsc::Receiver>; + async fn get_network_difficulty(&self, request: Request) -> Result, Status> { - todo!() + proxy_stream_result!(self, get_network_difficulty, request, GET_DIFFICULTY_PAGE_SIZE) + } + + async fn get_new_block_template(&self, request: Request) -> Result, Status> { + proxy_simple_result!(self, get_new_block_template, request) + } + + async fn get_new_block(&self, request: Request) -> Result, Status> { + proxy_simple_result!(self, get_new_block, request) } async fn get_new_block_with_coinbases(&self, request: Request) -> Result, Status> { - todo!() + proxy_simple_result!(self, get_new_block_with_coinbases, request) } async fn get_new_block_template_with_coinbases(&self, request: Request) -> Result, Status> { - todo!() + proxy_simple_result!(self, get_new_block_template_with_coinbases, request) } async fn get_new_block_blob(&self, request: Request) -> Result, Status> { - todo!() + proxy_simple_result!(self, get_new_block_blob, request) + } + + async fn submit_block(&self, request: Request) -> Result, Status> { + // TODO: Revisit this part whether this check needed as faster to send to node and see error, + // TODO: than checking for difficulty, bigger the chance to get accepted! + // TODO: Maybe worth checking for the last difficulty on network and if current block's difficulty is bigger + // TODO: than the last difficulty on network, then try to send block. + + // TODO: add logic to check new block before sending to upstream blockchain whether difficulty matches + // TODO: checking current network difficulty on base node + // let request_block_header = request.get_ref().clone() + // .header.ok_or_else(|| Status::internal("height not present"))?; + // let mut network_difficulty_stream = self.client.lock().await.get_network_difficulty(HeightRequest { + // from_tip: 0, + // start_height: request_block_header.height - 1, + // end_height: request_block_header.height, + // }).await?.into_inner(); + // let mut network_difficulty_matches = false; + // while let Ok(Some(diff_resp)) = network_difficulty_stream.message().await { + // if request_block_header.height == diff_resp.height + 1 { // TODO: compare block.difficulty with diff_resp.difficulty + // network_difficulty_matches = true; + // } + // } + // + // if network_difficulty_matches { // TODO: !network_difficulty_matches + // info!("Difficulties do not match (block <-> network)!"); + // // TODO: simply append new block if valid to sharechain showing that it is not accepted by base node + // // TODO: but still need to present on sharechain + // return Ok(Response::new(SubmitBlockResponse { + // block_hash: vec![], // TODO: get from sharechain + // })); + // } + + match proxy_simple_result!(self, submit_block, request) { + Ok(resp) => { + info!("Block sent successfully!"); + // TODO: append new block if valid to sharechain with a flag or something that shows + // TODO: that this block is accepted, so paid out + Ok(resp) + } + Err(_) => { + // TODO: simply append new block if valid to sharechain showing that it is not accepted by base node + // TODO: but still need to present on sharechain + Ok(Response::new(SubmitBlockResponse { + block_hash: vec![], // TODO: get from sharechain + })) + } + } } async fn submit_block_blob(&self, request: Request) -> Result, Status> { - todo!() + proxy_simple_result!(self, submit_block_blob, request) } async fn submit_transaction(&self, request: Request) -> Result, Status> { - todo!() + proxy_simple_result!(self, submit_transaction, request) } async fn get_sync_info(&self, request: Request) -> Result, Status> { - todo!() + proxy_simple_result!(self, get_sync_info, request) } async fn get_sync_progress(&self, request: Request) -> Result, Status> { - todo!() + proxy_simple_result!(self, get_sync_progress, request) } async fn get_tip_info(&self, request: Request) -> Result, Status> { - self.client.lock().await.get_tip_info(request.into_inner()).await + proxy_simple_result!(self, get_tip_info, request) } + type SearchKernelsStream = mpsc::Receiver>; + async fn search_kernels(&self, request: Request) -> Result, Status> { - todo!() + proxy_stream_result!(self, search_kernels, request, GET_BLOCKS_PAGE_SIZE) } + type SearchUtxosStream = mpsc::Receiver>; + async fn search_utxos(&self, request: Request) -> Result, Status> { - todo!() + proxy_stream_result!(self, search_utxos, request, GET_BLOCKS_PAGE_SIZE) } + type FetchMatchingUtxosStream = mpsc::Receiver>; + async fn fetch_matching_utxos(&self, request: Request) -> Result, Status> { - todo!() + proxy_stream_result!(self, fetch_matching_utxos, request, GET_BLOCKS_PAGE_SIZE) } + type GetPeersStream = mpsc::Receiver>; + async fn get_peers(&self, request: Request) -> Result, Status> { - todo!() + proxy_stream_result!(self, get_peers, request, GET_BLOCKS_PAGE_SIZE) } + type GetMempoolTransactionsStream = mpsc::Receiver>; + async fn get_mempool_transactions(&self, request: Request) -> Result, Status> { - todo!() + proxy_stream_result!(self, get_mempool_transactions, request, GET_BLOCKS_PAGE_SIZE) } async fn transaction_state(&self, request: Request) -> Result, Status> { - todo!() + proxy_simple_result!(self, transaction_state, request) } async fn identify(&self, request: Request) -> Result, Status> { - todo!() + proxy_simple_result!(self, identify, request) } async fn get_network_status(&self, request: Request) -> Result, Status> { - todo!() + proxy_simple_result!(self, get_network_status, request) } async fn list_connected_peers(&self, request: Request) -> Result, Status> { - todo!() + proxy_simple_result!(self, list_connected_peers, request) } async fn get_mempool_stats(&self, request: Request) -> Result, Status> { - todo!() + proxy_simple_result!(self, get_mempool_stats, request) } + type GetActiveValidatorNodesStream = mpsc::Receiver>; + async fn get_active_validator_nodes(&self, request: Request) -> Result, Status> { - todo!() + proxy_stream_result!(self, get_active_validator_nodes, request, 1000) } async fn get_shard_key(&self, request: Request) -> Result, Status> { - todo!() + proxy_simple_result!(self, get_shard_key, request) } + type GetTemplateRegistrationsStream = mpsc::Receiver>; + async fn get_template_registrations(&self, request: Request) -> Result, Status> { - todo!() + proxy_stream_result!(self, get_template_registrations, request, 10) } + type GetSideChainUtxosStream = mpsc::Receiver>; + async fn get_side_chain_utxos(&self, request: Request) -> Result, Status> { - todo!() + proxy_stream_result!(self, get_side_chain_utxos, request, 10) } } \ No newline at end of file diff --git a/crates/sha_p2pool/src/server/grpc/p2pool.rs b/crates/sha_p2pool/src/server/grpc/p2pool.rs index 30f06483..c761887b 100644 --- a/crates/sha_p2pool/src/server/grpc/p2pool.rs +++ b/crates/sha_p2pool/src/server/grpc/p2pool.rs @@ -29,23 +29,31 @@ impl ShaP2PoolGrpc { // TODO: complete implementation to find the right shares async fn generate_shares(&self, request: &GetNewBlockRequest, reward: u64) -> Vec { - let mut miners = HashMap::::new(); // target wallet address -> hash rate + let mut result = vec![]; + let mut miners = HashMap::::new(); // target wallet address -> hash rate // TODO: remove, only for testing now, get miners from outside of this module using P2P network/sharechain - miners.insert(request.wallet_payment_address.clone(), 100); - miners.insert("6ee38cf177a8fbf818d93ba5bbca6078efd88cef5c57927ce65dd0716ca3ee655a".to_string(), 100); + miners.insert(request.wallet_payment_address.clone(), 100.0); + miners.insert("260304a3699f8911c3d949b2eb0394595c8041a36fa13320fa2395b4090ae573a430ac21c5d087ecfcd1922e6ef58cd3f2a1eef2fcbd17e2374a09e0c68036fe6c5f91".to_string(), 100.0); // calculate full hash rate and shares - let full_hash_rate: u64 = miners.iter() - .map(|(_, rate)| rate) - .sum(); + let full_hash_rate: f64 = miners.values().sum(); miners.iter() .map(|(addr, rate)| (addr, rate / full_hash_rate)) + .filter(|(_, share)| *share > 0.0) .for_each(|(addr, share)| { - info!("{addr} -> {share:?}"); + let curr_reward = ((reward as f64) * share) as u64; + info!("{addr} -> SHARE: {share:?}, REWARD: {curr_reward:?}"); + result.push(NewBlockCoinbase { + address: addr.clone(), + value: curr_reward, + stealth_payment: false, + revealed_value_proof: true, + coinbase_extra: vec![], + }); }); - todo!() + result } } @@ -69,13 +77,19 @@ impl ShaP2Pool for ShaP2PoolGrpc { let reward = miner_data.reward; // request new block template with shares as coinbases - let mut new_block_template_req = GetNewBlockTemplateWithCoinbasesRequest::default(); - new_block_template_req.algo = Some(pow_algo); - new_block_template_req.coinbases = self.generate_shares(&template_request, reward).await; - let response = self.client.lock().await - .get_new_block_template_with_coinbases(new_block_template_req).await?.into_inner(); - let miner_data = response.clone().miner_data.ok_or_else(|| Status::internal("missing miner data"))?; - let target_difficulty = miner_data.target_difficulty; + let shares = self.generate_shares(&template_request, reward).await; + let share_count = shares.len(); + let mut response = self.client.lock().await + .get_new_block_template_with_coinbases(GetNewBlockTemplateWithCoinbasesRequest { + algo: Some(pow_algo), + max_weight: 0, + coinbases: shares, + }).await?.into_inner(); + + // set target difficulty + let mut miner_data = response.clone().miner_data.ok_or_else(|| Status::internal("missing miner data"))?; + // target difficulty is always: `original difficulty` / `number of shares` + let target_difficulty = miner_data.target_difficulty / share_count as u64; Ok(Response::new(GetNewBlockResponse { block: Some(response), diff --git a/crates/sha_p2pool/src/server/server.rs b/crates/sha_p2pool/src/server/server.rs index 64276985..79748b11 100644 --- a/crates/sha_p2pool/src/server/server.rs +++ b/crates/sha_p2pool/src/server/server.rs @@ -67,7 +67,7 @@ impl Server { pub async fn start_grpc( base_node_service: BaseNodeServer, p2pool_service: ShaP2PoolServer, - grpc_port: u64, + grpc_port: u16, ) -> Result<(), Error> { info!("Starting gRPC server on port {}!", &grpc_port); @@ -106,7 +106,12 @@ impl Server { let p2pool_grpc_service = self.p2pool_grpc_service.clone(); let grpc_port = self.config.grpc_port; tokio::spawn(async move { - Self::start_grpc(base_node_grpc_service, p2pool_grpc_service, grpc_port).await; + match Self::start_grpc(base_node_grpc_service, p2pool_grpc_service, grpc_port).await { + Ok(_) => {} + Err(error) => { + error!("GRPC Server encountered an error: {:?}", error); + } + } }); // main loop @@ -122,12 +127,6 @@ impl Server { for (peer, addr) in peers { info!("Discovered new peer {} at {}", peer, addr); self.swarm.behaviour_mut().gossipsub.add_explicit_peer(&peer); - match self.swarm.dial(addr) { - Ok(_) => { - info!("Dial success!"); - }, - Err(_) => {}, - } } }, mdns::Event::Expired(peers) => { From 7d21c943c83f8727c29ca9382e852f8a32e4f523 Mon Sep 17 00:00:00 2001 From: richardb Date: Mon, 10 Jun 2024 11:55:46 +0200 Subject: [PATCH 05/43] handle cases when a submitted block's difficulty is lower than latest network's one, so do not try to send to base node --- crates/sha_p2pool/Cargo.toml | 1 + .../sha_p2pool/src/server/grpc/base_node.rs | 64 ++++++++++--------- crates/sha_p2pool/src/server/grpc/p2pool.rs | 11 ++-- 3 files changed, 40 insertions(+), 36 deletions(-) diff --git a/crates/sha_p2pool/Cargo.toml b/crates/sha_p2pool/Cargo.toml index d4f68b6d..418fcb8e 100644 --- a/crates/sha_p2pool/Cargo.toml +++ b/crates/sha_p2pool/Cargo.toml @@ -12,6 +12,7 @@ edition = "2021" minotari_app_grpc = { git = "https://github.com/ksrichard/tari.git", branch = "p2pool" } minotari_node_grpc_client = { git = "https://github.com/ksrichard/tari.git", branch = "p2pool" } tari_common_types = { git = "https://github.com/ksrichard/tari.git", branch = "p2pool" } +tari_core = { git = "https://github.com/ksrichard/tari.git", branch = "p2pool" } libp2p = { version = "0.53.2", features = [ "dns", diff --git a/crates/sha_p2pool/src/server/grpc/base_node.rs b/crates/sha_p2pool/src/server/grpc/base_node.rs index fbcebeb5..aa44d1c3 100644 --- a/crates/sha_p2pool/src/server/grpc/base_node.rs +++ b/crates/sha_p2pool/src/server/grpc/base_node.rs @@ -5,12 +5,15 @@ use std::sync::Arc; use libp2p::futures::channel::mpsc; use libp2p::futures::SinkExt; use log::{error, info, warn}; +use minotari_app_grpc::conversions::*; use minotari_app_grpc::tari_rpc; use minotari_app_grpc::tari_rpc::{Block, BlockBlobRequest, BlockGroupRequest, BlockGroupResponse, BlockHeaderResponse, BlockHeight, BlockTimingResponse, ConsensusConstants, Empty, FetchMatchingUtxosRequest, GetActiveValidatorNodesRequest, GetBlocksRequest, GetHeaderByHashRequest, GetMempoolTransactionsRequest, GetNewBlockBlobResult, GetNewBlockResult, GetNewBlockTemplateWithCoinbasesRequest, GetNewBlockWithCoinbasesRequest, GetPeersRequest, GetShardKeyRequest, GetShardKeyResponse, GetSideChainUtxosRequest, GetTemplateRegistrationsRequest, HeightRequest, HistoricalBlock, ListConnectedPeersResponse, ListHeadersRequest, MempoolStatsResponse, NetworkStatusResponse, NewBlockCoinbase, NewBlockTemplate, NewBlockTemplateRequest, NewBlockTemplateResponse, NodeIdentity, PowAlgo, SearchKernelsRequest, SearchUtxosRequest, SoftwareUpdate, StringValue, SubmitBlockResponse, SubmitTransactionRequest, SubmitTransactionResponse, SyncInfoResponse, SyncProgressResponse, TipInfoResponse, TransactionStateRequest, TransactionStateResponse, ValueAtHeightResponse}; use minotari_app_grpc::tari_rpc::base_node_client::BaseNodeClient; use minotari_node_grpc_client::BaseNodeGrpcClient; +use tari_core::blocks; +use tari_core::proof_of_work::sha3x_difficulty; use tokio::sync::Mutex; -use tonic::{Request, Response, Status, Streaming}; +use tonic::{IntoRequest, Request, Response, Status, Streaming}; use crate::server::grpc::error::{Error, TonicError}; @@ -154,39 +157,38 @@ impl tari_rpc::base_node_server::BaseNode for TariBaseNodeGrpc { } async fn submit_block(&self, request: Request) -> Result, Status> { - // TODO: Revisit this part whether this check needed as faster to send to node and see error, - // TODO: than checking for difficulty, bigger the chance to get accepted! - // TODO: Maybe worth checking for the last difficulty on network and if current block's difficulty is bigger - // TODO: than the last difficulty on network, then try to send block. - - // TODO: add logic to check new block before sending to upstream blockchain whether difficulty matches - // TODO: checking current network difficulty on base node - // let request_block_header = request.get_ref().clone() - // .header.ok_or_else(|| Status::internal("height not present"))?; - // let mut network_difficulty_stream = self.client.lock().await.get_network_difficulty(HeightRequest { - // from_tip: 0, - // start_height: request_block_header.height - 1, - // end_height: request_block_header.height, - // }).await?.into_inner(); - // let mut network_difficulty_matches = false; - // while let Ok(Some(diff_resp)) = network_difficulty_stream.message().await { - // if request_block_header.height == diff_resp.height + 1 { // TODO: compare block.difficulty with diff_resp.difficulty - // network_difficulty_matches = true; - // } - // } - // - // if network_difficulty_matches { // TODO: !network_difficulty_matches - // info!("Difficulties do not match (block <-> network)!"); - // // TODO: simply append new block if valid to sharechain showing that it is not accepted by base node - // // TODO: but still need to present on sharechain - // return Ok(Response::new(SubmitBlockResponse { - // block_hash: vec![], // TODO: get from sharechain - // })); - // } + // Check block's difficulty compared to the latest network one to increase the probability + // to get the block accepted (and also a block with lower difficulty than latest one is invalid anyway). + let grpc_block = request.into_inner(); + let block = blocks::Block::try_from(grpc_block.clone()) + .map_err(|e| { Status::internal(e) })?; + let request_block_difficulty = sha3x_difficulty(&block.header) + .map_err(|error| { Status::internal(error.to_string()) })?; + let mut network_difficulty_stream = self.client.lock().await.get_network_difficulty(HeightRequest { + from_tip: 0, + start_height: block.header.height - 1, + end_height: block.header.height, + }).await?.into_inner(); + let mut network_difficulty_matches = false; + while let Ok(Some(diff_resp)) = network_difficulty_stream.message().await { + if block.header.height == diff_resp.height + 1 + && request_block_difficulty.as_u64() > diff_resp.difficulty { + network_difficulty_matches = true; + } + } + + if !network_difficulty_matches { + // TODO: simply append new block if valid to sharechain showing that it is not accepted by base node + // TODO: but still need to present on sharechain + return Ok(Response::new(SubmitBlockResponse { + block_hash: vec![], // TODO: get from sharechain + })); + } + let request = grpc_block.into_request(); match proxy_simple_result!(self, submit_block, request) { Ok(resp) => { - info!("Block sent successfully!"); + info!("Block found and sent successfully! (rewards will be paid out)"); // TODO: append new block if valid to sharechain with a flag or something that shows // TODO: that this block is accepted, so paid out Ok(resp) diff --git a/crates/sha_p2pool/src/server/grpc/p2pool.rs b/crates/sha_p2pool/src/server/grpc/p2pool.rs index c761887b..d1e68898 100644 --- a/crates/sha_p2pool/src/server/grpc/p2pool.rs +++ b/crates/sha_p2pool/src/server/grpc/p2pool.rs @@ -1,7 +1,6 @@ use std::collections::HashMap; use std::sync::Arc; -use log::info; use minotari_app_grpc::tari_rpc::{GetNewBlockRequest, GetNewBlockResponse, GetNewBlockTemplateWithCoinbasesRequest, NewBlockCoinbase, NewBlockTemplateRequest, PowAlgo}; use minotari_app_grpc::tari_rpc::base_node_client::BaseNodeClient; use minotari_app_grpc::tari_rpc::pow_algo::PowAlgos; @@ -43,7 +42,8 @@ impl ShaP2PoolGrpc { .filter(|(_, share)| *share > 0.0) .for_each(|(addr, share)| { let curr_reward = ((reward as f64) * share) as u64; - info!("{addr} -> SHARE: {share:?}, REWARD: {curr_reward:?}"); + // TODO: check if still needed + // info!("{addr} -> SHARE: {share:?}, REWARD: {curr_reward:?}"); result.push(NewBlockCoinbase { address: addr.clone(), value: curr_reward, @@ -79,7 +79,7 @@ impl ShaP2Pool for ShaP2PoolGrpc { // request new block template with shares as coinbases let shares = self.generate_shares(&template_request, reward).await; let share_count = shares.len(); - let mut response = self.client.lock().await + let response = self.client.lock().await .get_new_block_template_with_coinbases(GetNewBlockTemplateWithCoinbasesRequest { algo: Some(pow_algo), max_weight: 0, @@ -87,9 +87,10 @@ impl ShaP2Pool for ShaP2PoolGrpc { }).await?.into_inner(); // set target difficulty - let mut miner_data = response.clone().miner_data.ok_or_else(|| Status::internal("missing miner data"))?; + let miner_data = response.clone().miner_data.ok_or_else(|| Status::internal("missing miner data"))?; // target difficulty is always: `original difficulty` / `number of shares` - let target_difficulty = miner_data.target_difficulty / share_count as u64; + // let target_difficulty = miner_data.target_difficulty / share_count as u64; // TODO: uncomment this + let target_difficulty = miner_data.target_difficulty / (share_count as u64 * 10); // TODO: remove this Ok(Response::new(GetNewBlockResponse { block: Some(response), From a197e8b2d63c5afa684f144224e8d73f43de2b19 Mon Sep 17 00:00:00 2001 From: richardb Date: Mon, 10 Jun 2024 15:05:12 +0200 Subject: [PATCH 06/43] share chain implementation in progress --- crates/sha_p2pool/Cargo.toml | 7 ++ crates/sha_p2pool/build.rs | 13 +++ crates/sha_p2pool/proto/block.proto | 88 +++++++++++++++++++ crates/sha_p2pool/proto/sharechain.proto | 23 +++++ crates/sha_p2pool/src/main.rs | 1 + crates/sha_p2pool/src/server/server.rs | 1 + crates/sha_p2pool/src/sharechain/grpc/mod.rs | 33 +++++++ crates/sha_p2pool/src/sharechain/in_memory.rs | 16 ++++ crates/sha_p2pool/src/sharechain/mod.rs | 26 ++++++ 9 files changed, 208 insertions(+) create mode 100644 crates/sha_p2pool/build.rs create mode 100644 crates/sha_p2pool/proto/block.proto create mode 100644 crates/sha_p2pool/proto/sharechain.proto create mode 100644 crates/sha_p2pool/src/sharechain/grpc/mod.rs create mode 100644 crates/sha_p2pool/src/sharechain/in_memory.rs create mode 100644 crates/sha_p2pool/src/sharechain/mod.rs diff --git a/crates/sha_p2pool/Cargo.toml b/crates/sha_p2pool/Cargo.toml index 418fcb8e..672b56ed 100644 --- a/crates/sha_p2pool/Cargo.toml +++ b/crates/sha_p2pool/Cargo.toml @@ -32,7 +32,14 @@ thiserror = "1.0" serde = "1.0.203" anyhow = "1.0" log = "0.4.21" +prost = "0.11.9" +prost-types = "0.11.9" env_logger = "0.11.3" tonic = { workspace = true } +async-trait = "0.1.80" +libp2p-grpc-rs = "0.1.2" + +[build-dependencies] +tonic-build = "0.8.4" diff --git a/crates/sha_p2pool/build.rs b/crates/sha_p2pool/build.rs new file mode 100644 index 00000000..f1afa221 --- /dev/null +++ b/crates/sha_p2pool/build.rs @@ -0,0 +1,13 @@ +// Copyright 2022 The Tari Project +// SPDX-License-Identifier: BSD-3-Clause + +fn main() -> Result<(), Box> { + tonic_build::configure().build_client(true).build_server(true).compile( + &[ + "proto/sharechain.proto", + ], + &["proto"], + )?; + + Ok(()) +} \ No newline at end of file diff --git a/crates/sha_p2pool/proto/block.proto b/crates/sha_p2pool/proto/block.proto new file mode 100644 index 00000000..09cd2150 --- /dev/null +++ b/crates/sha_p2pool/proto/block.proto @@ -0,0 +1,88 @@ +syntax = "proto3"; + +package tari.p2pool.sharechain.rpc; + +// The BlockHeader contains all the metadata for the block, including proof of work, a link to the previous block +// and the transaction kernels. +message TariBlockHeader { + // The hash of the block + bytes hash = 1; + // Version of the block + uint32 version = 2; + // Height of this block since the genesis block (height 0) + uint64 height = 3; + // Hash of the block previous to this in the chain. + bytes prev_hash = 4; + // Timestamp at which the block was built. + uint64 timestamp = 5; + // This is the UTXO merkle root of the outputs + // This is calculated as Hash (txo MMR root || roaring bitmap hash of UTXO indices) + bytes output_mr = 6; + // This is the MMR root of the kernels + bytes kernel_mr = 8; + // This is the Merkle root of the inputs in this block + bytes input_mr = 9; + // Total accumulated sum of kernel offsets since genesis block. We can derive the kernel offset sum for *this* + // block from the total kernel offset of the previous block header. + bytes total_kernel_offset = 10; + // Nonce increment used to mine this block. + uint64 nonce = 11; + // Proof of work metadata + ProofOfWork pow = 12; + // Kernel MMR size + uint64 kernel_mmr_size = 13; + // Output MMR size + uint64 output_mmr_size = 14; + // Sum of script offsets for all kernels in this block. + bytes total_script_offset = 15; + // Merkle root of validator nodes + bytes validator_node_mr = 16; + // Validator size + uint64 validator_node_size = 17; +} + +// The proof of work data structure that is included in the block header. +message ProofOfWork { + // The algorithm used to mine this block + // 0 = Monero + // 1 = Sha3X + uint64 pow_algo = 1; + // Supplemental proof of work data. For example for Sha3x, this would be empty (only the block header is + // required), but for Monero merge mining we need the Monero block header and RandomX seed hash. + bytes pow_data = 4; +} + +//This is used to request the which pow algo should be used with the block template +message PowAlgo { + // The permitted pow algorithms + enum PowAlgos { + POW_ALGOS_RANDOMX = 0; // Accessible as `grpc::pow_algo::PowAlgos::Randomx` + POW_ALGOS_SHA3X = 1; // Accessible as `grpc::pow_algo::PowAlgos::Sha3x` + } + // The pow algo to use + PowAlgos pow_algo = 1; +} + + +// A Share chain block. Blocks are linked together into a blockchain. +message Block { + // The hash of the block + bytes hash = 1; + + // Hash of the block previous to this in the share chain. + bytes prev_hash = 2; + + // Height of this block since the genesis block (height 0) + uint64 height = 3; + + // The original header of the block on the main Tari network, we need this to validate original block + // and calculate current block's hash to do another round of validation. + TariBlockHeader original_block_header = 4; + + // All the miners working on this block, all of them included in the resulting block if mined and submitted t mai chain. + repeated string miners = 5; + + // TODO: add other relevant fields +} + + diff --git a/crates/sha_p2pool/proto/sharechain.proto b/crates/sha_p2pool/proto/sharechain.proto new file mode 100644 index 00000000..42da9681 --- /dev/null +++ b/crates/sha_p2pool/proto/sharechain.proto @@ -0,0 +1,23 @@ +syntax = "proto3"; + +package tari.p2pool.sharechain.rpc; + +import "block.proto"; + +service ShareChain { + // Get tip height of share chain of the curren node. + rpc GetBlockHeightTip(GetBlockHeightTipRequest) returns(GetBlockHeightTipResponse); + + // Sync will return a stream of share chain blocks starting from `from_height`. + rpc Sync(SyncRequest) returns(stream Block); +} + +message SyncRequest { + uint64 from_height = 1; +} + +message GetBlockHeightTipRequest {} + +message GetBlockHeightTipResponse { + uint64 height = 1; +} \ No newline at end of file diff --git a/crates/sha_p2pool/src/main.rs b/crates/sha_p2pool/src/main.rs index 0b07b4a7..5f654201 100644 --- a/crates/sha_p2pool/src/main.rs +++ b/crates/sha_p2pool/src/main.rs @@ -1,4 +1,5 @@ mod server; +mod sharechain; #[tokio::main] async fn main() -> anyhow::Result<()> { diff --git a/crates/sha_p2pool/src/server/server.rs b/crates/sha_p2pool/src/server/server.rs index 79748b11..ec062bc0 100644 --- a/crates/sha_p2pool/src/server/server.rs +++ b/crates/sha_p2pool/src/server/server.rs @@ -52,6 +52,7 @@ pub struct Server { p2pool_grpc_service: ShaP2PoolServer, } +// TODO: add graceful shutdown impl Server { pub async fn new(config: config::Config) -> Result { let swarm = p2p::swarm(&config)?; diff --git a/crates/sha_p2pool/src/sharechain/grpc/mod.rs b/crates/sha_p2pool/src/sharechain/grpc/mod.rs new file mode 100644 index 00000000..f13840d9 --- /dev/null +++ b/crates/sha_p2pool/src/sharechain/grpc/mod.rs @@ -0,0 +1,33 @@ +use tonic::{Request, Response, Status}; + +use crate::sharechain::grpc::rpc::{GetBlockHeightTipRequest, GetBlockHeightTipResponse, SyncRequest}; +use crate::sharechain::grpc::rpc::share_chain_server::ShareChain as GrpcShareChain; +use crate::sharechain::ShareChain; + +pub mod rpc { + tonic::include_proto!("tari.p2pool.sharechain.rpc"); +} + +#[derive(Debug)] +pub struct ShareChainGrpc + where T: ShareChain + Send + Sync + 'static, +{ + blockchain: T, +} + +#[tonic::async_trait] +impl GrpcShareChain for ShareChainGrpc + where T: ShareChain + Send + Sync + 'static, +{ + async fn get_block_height_tip(&self, request: Request) -> Result, Status> { + Ok(Response::new(GetBlockHeightTipResponse { + height: self.blockchain.tip_height().await, + })) + } + + type SyncStream = (); + + async fn sync(&self, request: Request) -> Result, Status> { + todo!() + } +} \ No newline at end of file diff --git a/crates/sha_p2pool/src/sharechain/in_memory.rs b/crates/sha_p2pool/src/sharechain/in_memory.rs new file mode 100644 index 00000000..0d591583 --- /dev/null +++ b/crates/sha_p2pool/src/sharechain/in_memory.rs @@ -0,0 +1,16 @@ +use async_trait::async_trait; + +use crate::sharechain::{Block, ShareChain, ShareChainError}; + +pub struct InMemoryShareChain {} + +#[async_trait] +impl ShareChain for InMemoryShareChain { + async fn submit_block(&self, block: Block) -> ShareChainError<()> { + todo!() + } + + async fn tip_height(&self) -> ShareChainError { + todo!() + } +} \ No newline at end of file diff --git a/crates/sha_p2pool/src/sharechain/mod.rs b/crates/sha_p2pool/src/sharechain/mod.rs new file mode 100644 index 00000000..3371ad6a --- /dev/null +++ b/crates/sha_p2pool/src/sharechain/mod.rs @@ -0,0 +1,26 @@ +use async_trait::async_trait; +use tari_core::blocks::BlockHeader; +use thiserror::Error; + +mod grpc; +mod in_memory; + +pub struct Block { + original_header: BlockHeader, + +} + +#[derive(Error, Debug)] +pub enum Error { + #[error("Internal error: {0}")] + Internal(String), +} + +pub type ShareChainError = Result; + +#[async_trait] +pub trait ShareChain { + async fn submit_block(&self, block: Block) -> ShareChainError<()>; + + async fn tip_height(&self) -> ShareChainError; +} \ No newline at end of file From e07ca29efd7f50dc95b7bd0a83cec28ca72d1f40 Mon Sep 17 00:00:00 2001 From: richardb Date: Tue, 11 Jun 2024 15:19:03 +0200 Subject: [PATCH 07/43] refactor + node info publishing almost done via kademlia --- Cargo.toml | 1 + crates/sha_p2pool/Cargo.toml | 9 +- crates/sha_p2pool/proto/sharechain.proto | 9 - crates/sha_p2pool/src/main.rs | 5 +- crates/sha_p2pool/src/server/mod.rs | 2 +- crates/sha_p2pool/src/server/p2p.rs | 60 ----- crates/sha_p2pool/src/server/p2p/error.rs | 30 +++ crates/sha_p2pool/src/server/p2p/mod.rs | 6 + crates/sha_p2pool/src/server/p2p/p2p.rs | 212 ++++++++++++++++++ crates/sha_p2pool/src/server/server.rs | 80 ++----- crates/sha_p2pool/src/sharechain/grpc/mod.rs | 20 +- crates/sha_p2pool/src/sharechain/in_memory.rs | 19 +- crates/sha_p2pool/src/sharechain/mod.rs | 18 +- 13 files changed, 316 insertions(+), 155 deletions(-) delete mode 100644 crates/sha_p2pool/src/server/p2p.rs create mode 100644 crates/sha_p2pool/src/server/p2p/error.rs create mode 100644 crates/sha_p2pool/src/server/p2p/mod.rs create mode 100644 crates/sha_p2pool/src/server/p2p/p2p.rs diff --git a/Cargo.toml b/Cargo.toml index 6dc89a22..839a03e4 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -7,3 +7,4 @@ members = [ [workspace.dependencies] tonic = "0.8.3" +tonic-build = "0.8.4" diff --git a/crates/sha_p2pool/Cargo.toml b/crates/sha_p2pool/Cargo.toml index 672b56ed..f2f4a009 100644 --- a/crates/sha_p2pool/Cargo.toml +++ b/crates/sha_p2pool/Cargo.toml @@ -25,7 +25,9 @@ libp2p = { version = "0.53.2", features = [ "yamux", "mdns", "gossipsub", - "kad" + "kad", + "request-response", + "json" ] } tokio = { version = "1.38.0", features = ["full"] } thiserror = "1.0" @@ -37,9 +39,10 @@ prost-types = "0.11.9" env_logger = "0.11.3" tonic = { workspace = true } async-trait = "0.1.80" -libp2p-grpc-rs = "0.1.2" +serde_cbor = "0.11.2" +rand = "0.8.5" [build-dependencies] -tonic-build = "0.8.4" +tonic-build = { workspace = true } diff --git a/crates/sha_p2pool/proto/sharechain.proto b/crates/sha_p2pool/proto/sharechain.proto index 42da9681..3636817d 100644 --- a/crates/sha_p2pool/proto/sharechain.proto +++ b/crates/sha_p2pool/proto/sharechain.proto @@ -5,19 +5,10 @@ package tari.p2pool.sharechain.rpc; import "block.proto"; service ShareChain { - // Get tip height of share chain of the curren node. - rpc GetBlockHeightTip(GetBlockHeightTipRequest) returns(GetBlockHeightTipResponse); - // Sync will return a stream of share chain blocks starting from `from_height`. rpc Sync(SyncRequest) returns(stream Block); } message SyncRequest { uint64 from_height = 1; -} - -message GetBlockHeightTipRequest {} - -message GetBlockHeightTipResponse { - uint64 height = 1; } \ No newline at end of file diff --git a/crates/sha_p2pool/src/main.rs b/crates/sha_p2pool/src/main.rs index 5f654201..6ad88f82 100644 --- a/crates/sha_p2pool/src/main.rs +++ b/crates/sha_p2pool/src/main.rs @@ -1,3 +1,5 @@ +use crate::sharechain::in_memory::InMemoryShareChain; + mod server; mod sharechain; @@ -5,7 +7,8 @@ mod sharechain; async fn main() -> anyhow::Result<()> { env_logger::init(); let config = server::Config::builder().build(); - let mut server = server::Server::new(config).await?; + let share_chain = InMemoryShareChain::new(); + let mut server = server::Server::new(config, share_chain).await?; server.start().await?; Ok(()) } diff --git a/crates/sha_p2pool/src/server/mod.rs b/crates/sha_p2pool/src/server/mod.rs index dd175871..355266da 100644 --- a/crates/sha_p2pool/src/server/mod.rs +++ b/crates/sha_p2pool/src/server/mod.rs @@ -3,6 +3,6 @@ pub use server::*; mod config; mod server; -mod p2p; pub mod grpc; +mod p2p; diff --git a/crates/sha_p2pool/src/server/p2p.rs b/crates/sha_p2pool/src/server/p2p.rs deleted file mode 100644 index c2ec4863..00000000 --- a/crates/sha_p2pool/src/server/p2p.rs +++ /dev/null @@ -1,60 +0,0 @@ -use std::hash::{DefaultHasher, Hash, Hasher}; -use std::time::Duration; - -use libp2p::{gossipsub, mdns, noise, Swarm, tcp, yamux}; -use libp2p::mdns::tokio::Tokio; -use libp2p::swarm::NetworkBehaviour; -use tokio::io; - -use crate::server::{config, Error, LibP2PError}; - -#[derive(NetworkBehaviour)] -pub struct ServerNetworkBehaviour { - pub mdns: mdns::Behaviour, - pub gossipsub: gossipsub::Behaviour, -} - -pub fn swarm(config: &config::Config) -> Result, Error> { - let swarm = libp2p::SwarmBuilder::with_new_identity() - .with_tokio() - .with_tcp( - tcp::Config::default(), - noise::Config::new, - yamux::Config::default, - ) - .map_err(|e| Error::LibP2P(LibP2PError::Noise(e)))? - .with_behaviour(move |key_pair| { - let message_id_fn = |message: &gossipsub::Message| { - let mut s = DefaultHasher::new(); - message.data.hash(&mut s); - gossipsub::MessageId::from(s.finish().to_string()) - }; - - let gossipsub_config = gossipsub::ConfigBuilder::default() - .heartbeat_interval(Duration::from_secs(10)) - .validation_mode(gossipsub::ValidationMode::Strict) - .message_id_fn(message_id_fn) - .build() - .map_err(|msg| io::Error::new(io::ErrorKind::Other, msg))?; // Temporary hack because `build` does not return a proper `std::error::Error`. - - let gossipsub = gossipsub::Behaviour::new( - gossipsub::MessageAuthenticity::Signed(key_pair.clone()), - gossipsub_config, - )?; - - Ok(ServerNetworkBehaviour { - gossipsub, - mdns: mdns::Behaviour::new( - mdns::Config::default(), - key_pair.public().to_peer_id(), - ) - .map_err(|e| Error::LibP2P(LibP2PError::IO(e)))?, - }) - }) - .map_err(|e| Error::LibP2P(LibP2PError::Behaviour(e.to_string())))? - .with_swarm_config(|c| c.with_idle_connection_timeout(config.idle_connection_timeout)) - .build(); - - Ok(swarm) -} - diff --git a/crates/sha_p2pool/src/server/p2p/error.rs b/crates/sha_p2pool/src/server/p2p/error.rs new file mode 100644 index 00000000..a8989eed --- /dev/null +++ b/crates/sha_p2pool/src/server/p2p/error.rs @@ -0,0 +1,30 @@ +use libp2p::{kad, multiaddr, noise, TransportError}; +use thiserror::Error; + +use crate::sharechain; + +#[derive(Error, Debug)] +pub enum Error { + #[error("LibP2P error: {0}")] + LibP2P(#[from] LibP2PError), + #[error("CBOR serialize error: {0}")] + Serialize(#[from] serde_cbor::Error), + #[error("Share chain error: {0}")] + ShareChain(#[from] sharechain::Error), +} + +#[derive(Error, Debug)] +pub enum LibP2PError { + #[error("Noise error: {0}")] + Noise(#[from] noise::Error), + #[error("Multi address parse error: {0}")] + MultiAddrParse(#[from] multiaddr::Error), + #[error("Transport error: {0}")] + Transport(#[from] TransportError), + #[error("I/O error: {0}")] + IO(#[from] std::io::Error), + #[error("Behaviour error: {0}")] + Behaviour(String), + #[error("Kademlia record store error: {0}")] + KadRecord(#[from] kad::store::Error), +} \ No newline at end of file diff --git a/crates/sha_p2pool/src/server/p2p/mod.rs b/crates/sha_p2pool/src/server/p2p/mod.rs new file mode 100644 index 00000000..bd8a5374 --- /dev/null +++ b/crates/sha_p2pool/src/server/p2p/mod.rs @@ -0,0 +1,6 @@ +pub use error::*; +pub use p2p::*; + +mod p2p; +mod error; + diff --git a/crates/sha_p2pool/src/server/p2p/p2p.rs b/crates/sha_p2pool/src/server/p2p/p2p.rs new file mode 100644 index 00000000..bbdb4c42 --- /dev/null +++ b/crates/sha_p2pool/src/server/p2p/p2p.rs @@ -0,0 +1,212 @@ +use std::hash::{DefaultHasher, Hash, Hasher}; +use std::sync::Arc; +use std::time::Duration; + +use libp2p::{gossipsub, kad, mdns, noise, PeerId, Swarm, tcp, yamux}; +use libp2p::futures::{StreamExt, TryFutureExt}; +use libp2p::kad::{Mode, Quorum, Record}; +use libp2p::kad::store::MemoryStore; +use libp2p::mdns::tokio::Tokio; +use libp2p::swarm::{NetworkBehaviour, SwarmEvent}; +use log::{error, info}; +use serde::{Deserialize, Serialize}; +use tokio::{io, select}; +use tokio::sync::Mutex; + +use crate::server::config; +use crate::server::p2p::{Error, LibP2PError}; +use crate::sharechain::ShareChain; + +#[derive(NetworkBehaviour)] +pub struct ServerNetworkBehaviour { + pub mdns: mdns::Behaviour, + pub gossipsub: gossipsub::Behaviour, + pub kademlia: kad::Behaviour, + // pub request_response: json::Behaviour, +} + +#[derive(Serialize, Deserialize)] +pub struct NodeInfo { + pub current_height: u64, +} + +pub struct Service + where S: ShareChain + Send + Sync + 'static, +{ + swarm: Arc>>, + port: u16, + share_chain: Arc, +} + +impl Service + where S: ShareChain + Send + Sync + 'static, +{ + fn new_swarm(config: &config::Config) -> Result, Error> { + let mut swarm = libp2p::SwarmBuilder::with_new_identity() + .with_tokio() + .with_tcp( + tcp::Config::default(), + noise::Config::new, + yamux::Config::default, + ) + .map_err(|e| Error::LibP2P(LibP2PError::Noise(e)))? + .with_behaviour(move |key_pair| { + // gossipsub + let message_id_fn = |message: &gossipsub::Message| { + let mut s = DefaultHasher::new(); + message.data.hash(&mut s); + gossipsub::MessageId::from(s.finish().to_string()) + }; + let gossipsub_config = gossipsub::ConfigBuilder::default() + .heartbeat_interval(Duration::from_secs(10)) + .validation_mode(gossipsub::ValidationMode::Strict) + .message_id_fn(message_id_fn) + .build() + .map_err(|msg| io::Error::new(io::ErrorKind::Other, msg))?; // Temporary hack because `build` does not return a proper `std::error::Error`. + let gossipsub = gossipsub::Behaviour::new( + gossipsub::MessageAuthenticity::Signed(key_pair.clone()), + gossipsub_config, + )?; + + // grpc + // let router = Server::builder().add_service( + // ShareChainServer::new(ShareChainGrpc::new(InMemoryShareChain::new())) + // ).into_service(); + // let behaviour = json::Behaviour::::new( + // [(StreamProtocol::new("/grpc"), ProtocolSupport::Full)], + // request_response::Config::default(), + // ); + + + Ok(ServerNetworkBehaviour { + gossipsub, + mdns: mdns::Behaviour::new( + mdns::Config::default(), + key_pair.public().to_peer_id(), + ) + .map_err(|e| Error::LibP2P(LibP2PError::IO(e)))?, + kademlia: kad::Behaviour::new( + key_pair.public().to_peer_id(), + MemoryStore::new(key_pair.public().to_peer_id()), + ), + }) + }) + .map_err(|e| Error::LibP2P(LibP2PError::Behaviour(e.to_string())))? + .with_swarm_config(|c| c.with_idle_connection_timeout(config.idle_connection_timeout)) + .build(); + + swarm.behaviour_mut().kademlia.set_mode(Some(Mode::Server)); + + Ok(swarm) + } + pub fn new(config: &config::Config, share_chain: Arc) -> Result { + Ok(Self { + swarm: Arc::new(Mutex::new(Self::new_swarm(config)?)), + port: config.p2p_port, + share_chain, + }) + } + + async fn start_publish_node_info(&self) { + let swarm = self.swarm.clone(); + let share_chain = self.share_chain.clone(); + tokio::spawn(async move { + let mut interval = tokio::time::interval(Duration::from_secs(5)); + loop { + select! { + _ = interval.tick() => { + // TODO: somehow update always the node info value to the latest + info!("Publishing node info..."); + + // store current node info in Kademlia DHT + let key = swarm.lock().await.local_peer_id().to_base58().into_bytes(); + let current_height_result = share_chain.tip_height().await; + if let Err(error) = current_height_result { + error!("Failed to get tip of local share chain: {error:?}"); + continue; + } + let node_info_result = serde_cbor::to_vec(&NodeInfo { + current_height: current_height_result.unwrap(), + }).map_err(Error::Serialize); + if let Err(error) = node_info_result { + error!("Failed to serialize Node Info: {error:?}"); + continue; + } + let query_id_result = swarm.lock().await.behaviour_mut().kademlia.put_record( + Record::new( + key.clone(), + node_info_result.unwrap(), + ), + Quorum::All, + ); + if let Err(error) = query_id_result { + error!("Failed to put kademlia record: {error:?}"); + continue; + } + + // set ourself as a provider + let provide_result = + swarm.lock().await.behaviour_mut().kademlia.start_providing(kad::RecordKey::new(&key)); + if let Err(error) = provide_result { + error!("Failed to start providing kademlia record: {error:?}"); + } + } + } + } + }); + } + + async fn event_loop(&self) -> Result<(), Error> { + loop { + let mut swarm = self.swarm.lock().await; + select! { + next = swarm.select_next_some() => match next { + SwarmEvent::NewListenAddr { address, .. } => { + info!("Listening on {address:?}"); + }, + SwarmEvent::Behaviour(event) => match event { + ServerNetworkBehaviourEvent::Mdns(mdns_event) => match mdns_event { + mdns::Event::Discovered(peers) => { + for (peer, addr) in peers { + info!("Discovered new peer {} at {}", peer, addr); + swarm.behaviour_mut().gossipsub.add_explicit_peer(&peer); + swarm.behaviour_mut().kademlia.add_address(&peer, addr); + } + }, + mdns::Event::Expired(peers) => { + for (peer, addr) in peers { + info!("Expired peer {} at {}", peer, addr); + swarm.behaviour_mut().gossipsub.remove_explicit_peer(&peer); + swarm.behaviour_mut().kademlia.remove_address(&peer, &addr); + } + }, + }, + ServerNetworkBehaviourEvent::Gossipsub(event) => { + info!("[GOSSIP] {event:?}"); + }, + ServerNetworkBehaviourEvent::Kademlia(event) => { + info!("[Kademlia] {event:?}"); + }}, + _ => {} + } + } + } + } + + pub async fn start(&self) -> Result<(), Error> { + self.swarm + .lock() + .await + .listen_on( + format!("/ip4/0.0.0.0/tcp/{}", self.port) + .parse() + .map_err(|e| Error::LibP2P(LibP2PError::MultiAddrParse(e)))?, + ) + .map_err(|e| Error::LibP2P(LibP2PError::Transport(e)))?; + + self.start_publish_node_info().await; + + self.event_loop().await + } +} + diff --git a/crates/sha_p2pool/src/server/server.rs b/crates/sha_p2pool/src/server/server.rs index ec062bc0..49855a42 100644 --- a/crates/sha_p2pool/src/server/server.rs +++ b/crates/sha_p2pool/src/server/server.rs @@ -2,10 +2,12 @@ use std::convert::Infallible; use std::hash::{DefaultHasher, Hash, Hasher}; use std::net::{AddrParseError, SocketAddr}; use std::str::FromStr; +use std::sync::Arc; use std::time::Duration; use libp2p::{gossipsub, mdns, multiaddr, noise, PeerId, Swarm, tcp, TransportError, yamux}; use libp2p::futures::StreamExt; +use libp2p::gossipsub::Topic; use libp2p::mdns::tokio::Tokio; use libp2p::swarm::{NetworkBehaviour, SwarmEvent}; use log::{error, info}; @@ -19,50 +21,43 @@ use crate::server::grpc::base_node::TariBaseNodeGrpc; use crate::server::grpc::error::TonicError; use crate::server::grpc::p2pool::ShaP2PoolGrpc; use crate::server::p2p::{ServerNetworkBehaviour, ServerNetworkBehaviourEvent}; +use crate::sharechain::in_memory::InMemoryShareChain; +use crate::sharechain::ShareChain; #[derive(Error, Debug)] pub enum Error { - #[error("LibP2P error: {0}")] - LibP2P(#[from] LibP2PError), + #[error("P2P service error: {0}")] + P2PService(#[from] p2p::Error), #[error("gRPC error: {0}")] GRPC(#[from] grpc::error::Error), #[error("Socket address parse error: {0}")] AddrParse(#[from] AddrParseError), } -#[derive(Error, Debug)] -pub enum LibP2PError { - #[error("Noise error: {0}")] - Noise(#[from] noise::Error), - #[error("Multi address parse error: {0}")] - MultiAddrParse(#[from] multiaddr::Error), - #[error("Transport error: {0}")] - Transport(#[from] TransportError), - #[error("I/O error: {0}")] - IO(#[from] std::io::Error), - #[error("Behaviour error: {0}")] - Behaviour(String), -} - /// Server represents the server running all the necessary components for sha-p2pool. -pub struct Server { +pub struct Server + where S: ShareChain + Send + Sync + 'static +{ config: config::Config, - swarm: Swarm, + p2p_service: p2p::Service, base_node_grpc_service: BaseNodeServer, p2pool_grpc_service: ShaP2PoolServer, } // TODO: add graceful shutdown -impl Server { - pub async fn new(config: config::Config) -> Result { - let swarm = p2p::swarm(&config)?; +impl Server + where S: ShareChain + Send + Sync + 'static +{ + pub async fn new(config: config::Config, share_chain: S) -> Result { let base_node_grpc_service = TariBaseNodeGrpc::new(config.base_node_address.clone()).await.map_err(Error::GRPC)?; let base_node_grpc_server = BaseNodeServer::new(base_node_grpc_service); let p2pool_grpc_service = ShaP2PoolGrpc::new(config.base_node_address.clone()).await.map_err(Error::GRPC)?; let p2pool_server = ShaP2PoolServer::new(p2pool_grpc_service); - Ok(Self { config, swarm, base_node_grpc_service: base_node_grpc_server, p2pool_grpc_service: p2pool_server }) + let p2p_service: p2p::Service = p2p::Service::new(&config, Arc::new(share_chain)).map_err(Error::P2PService)?; + + Ok(Self { config, p2p_service, base_node_grpc_service: base_node_grpc_server, p2pool_grpc_service: p2pool_server }) } pub async fn start_grpc( @@ -92,17 +87,9 @@ impl Server { } pub async fn start(&mut self) -> Result<(), Error> { - self.swarm - .listen_on( - format!("/ip4/0.0.0.0/tcp/{}", self.config.p2p_port) - .parse() - .map_err(|e| Error::LibP2P(LibP2PError::MultiAddrParse(e)))?, - ) - .map_err(|e| Error::LibP2P(LibP2PError::Transport(e)))?; - info!("Starting Tari SHA-3 mining P2Pool..."); - // grpc serve + // local base node and p2pool node grpc services let base_node_grpc_service = self.base_node_grpc_service.clone(); let p2pool_grpc_service = self.p2pool_grpc_service.clone(); let grpc_port = self.config.grpc_port; @@ -115,35 +102,6 @@ impl Server { } }); - // main loop - loop { - select! { - next = self.swarm.select_next_some() => match next { - SwarmEvent::NewListenAddr { address, .. } => { - info!("Listening on {address:?}"); - }, - SwarmEvent::Behaviour(event) => match event { - ServerNetworkBehaviourEvent::Mdns(mdns_event) => match mdns_event { - mdns::Event::Discovered(peers) => { - for (peer, addr) in peers { - info!("Discovered new peer {} at {}", peer, addr); - self.swarm.behaviour_mut().gossipsub.add_explicit_peer(&peer); - } - }, - mdns::Event::Expired(peers) => { - for (peer, addr) in peers { - info!("Expired peer {} at {}", peer, addr); - self.swarm.behaviour_mut().gossipsub.remove_explicit_peer(&peer); - } - }, - }, - ServerNetworkBehaviourEvent::Gossipsub(event) => { - info!("GOSSIP: {event:?}"); - }, - }, - _ => {} - } - } - } + self.p2p_service.start().await.map_err(Error::P2PService) } } diff --git a/crates/sha_p2pool/src/sharechain/grpc/mod.rs b/crates/sha_p2pool/src/sharechain/grpc/mod.rs index f13840d9..d4932d5a 100644 --- a/crates/sha_p2pool/src/sharechain/grpc/mod.rs +++ b/crates/sha_p2pool/src/sharechain/grpc/mod.rs @@ -1,6 +1,7 @@ +use libp2p::futures::channel::mpsc; use tonic::{Request, Response, Status}; -use crate::sharechain::grpc::rpc::{GetBlockHeightTipRequest, GetBlockHeightTipResponse, SyncRequest}; +use crate::sharechain::grpc::rpc::{Block, SyncRequest}; use crate::sharechain::grpc::rpc::share_chain_server::ShareChain as GrpcShareChain; use crate::sharechain::ShareChain; @@ -15,17 +16,20 @@ pub struct ShareChainGrpc blockchain: T, } +impl ShareChainGrpc + where T: ShareChain + Send + Sync + 'static { + pub fn new(blockchain: T) -> Self { + Self { + blockchain + } + } +} + #[tonic::async_trait] impl GrpcShareChain for ShareChainGrpc where T: ShareChain + Send + Sync + 'static, { - async fn get_block_height_tip(&self, request: Request) -> Result, Status> { - Ok(Response::new(GetBlockHeightTipResponse { - height: self.blockchain.tip_height().await, - })) - } - - type SyncStream = (); + type SyncStream = mpsc::Receiver>; async fn sync(&self, request: Request) -> Result, Status> { todo!() diff --git a/crates/sha_p2pool/src/sharechain/in_memory.rs b/crates/sha_p2pool/src/sharechain/in_memory.rs index 0d591583..7941e35e 100644 --- a/crates/sha_p2pool/src/sharechain/in_memory.rs +++ b/crates/sha_p2pool/src/sharechain/in_memory.rs @@ -1,16 +1,25 @@ use async_trait::async_trait; +use rand::random; -use crate::sharechain::{Block, ShareChain, ShareChainError}; +use crate::sharechain::{Block, ShareChain, ShareChainResult}; pub struct InMemoryShareChain {} +impl InMemoryShareChain { + pub fn new() -> Self { + Self {} + } +} + #[async_trait] impl ShareChain for InMemoryShareChain { - async fn submit_block(&self, block: Block) -> ShareChainError<()> { - todo!() + async fn submit_block(&self, block: Block) -> ShareChainResult<()> { + //TODO: implement + Ok(()) } - async fn tip_height(&self) -> ShareChainError { - todo!() + async fn tip_height(&self) -> ShareChainResult { + //TODO: implement + Ok(random()) } } \ No newline at end of file diff --git a/crates/sha_p2pool/src/sharechain/mod.rs b/crates/sha_p2pool/src/sharechain/mod.rs index 3371ad6a..6c164208 100644 --- a/crates/sha_p2pool/src/sharechain/mod.rs +++ b/crates/sha_p2pool/src/sharechain/mod.rs @@ -1,13 +1,17 @@ use async_trait::async_trait; +use tari_common_types::types::BlockHash; use tari_core::blocks::BlockHeader; use thiserror::Error; -mod grpc; -mod in_memory; +pub mod grpc; +pub mod in_memory; pub struct Block { - original_header: BlockHeader, - + hash: BlockHash, + prev_hash: BlockHash, + height: u64, + original_block_header: BlockHeader, + miners: Vec, } #[derive(Error, Debug)] @@ -16,11 +20,11 @@ pub enum Error { Internal(String), } -pub type ShareChainError = Result; +pub type ShareChainResult = Result; #[async_trait] pub trait ShareChain { - async fn submit_block(&self, block: Block) -> ShareChainError<()>; + async fn submit_block(&self, block: Block) -> ShareChainResult<()>; - async fn tip_height(&self) -> ShareChainError; + async fn tip_height(&self) -> ShareChainResult; } \ No newline at end of file From 027ddef2bcac9bd83ab6cb49eb0ea3fefbb60b35 Mon Sep 17 00:00:00 2001 From: richardb Date: Wed, 12 Jun 2024 13:55:58 +0200 Subject: [PATCH 08/43] automatic broadcasting of peer info implemented + tracking all of the peers and its info --- crates/sha_p2pool/Cargo.toml | 2 +- crates/sha_p2pool/src/server/p2p/error.rs | 11 +- crates/sha_p2pool/src/server/p2p/messages.rs | 34 +++ crates/sha_p2pool/src/server/p2p/mod.rs | 2 + crates/sha_p2pool/src/server/p2p/p2p.rs | 212 +++++++++--------- .../sha_p2pool/src/server/p2p/peer_store.rs | 88 ++++++++ 6 files changed, 235 insertions(+), 114 deletions(-) create mode 100644 crates/sha_p2pool/src/server/p2p/messages.rs create mode 100644 crates/sha_p2pool/src/server/p2p/peer_store.rs diff --git a/crates/sha_p2pool/Cargo.toml b/crates/sha_p2pool/Cargo.toml index f2f4a009..13af320e 100644 --- a/crates/sha_p2pool/Cargo.toml +++ b/crates/sha_p2pool/Cargo.toml @@ -25,7 +25,6 @@ libp2p = { version = "0.53.2", features = [ "yamux", "mdns", "gossipsub", - "kad", "request-response", "json" ] } @@ -41,6 +40,7 @@ tonic = { workspace = true } async-trait = "0.1.80" serde_cbor = "0.11.2" rand = "0.8.5" +dashmap = "5.5.3" [build-dependencies] tonic-build = { workspace = true } diff --git a/crates/sha_p2pool/src/server/p2p/error.rs b/crates/sha_p2pool/src/server/p2p/error.rs index a8989eed..b0aa8eb5 100644 --- a/crates/sha_p2pool/src/server/p2p/error.rs +++ b/crates/sha_p2pool/src/server/p2p/error.rs @@ -1,4 +1,5 @@ -use libp2p::{kad, multiaddr, noise, TransportError}; +use libp2p::{multiaddr, noise, TransportError}; +use libp2p::gossipsub::PublishError; use thiserror::Error; use crate::sharechain; @@ -7,8 +8,8 @@ use crate::sharechain; pub enum Error { #[error("LibP2P error: {0}")] LibP2P(#[from] LibP2PError), - #[error("CBOR serialize error: {0}")] - Serialize(#[from] serde_cbor::Error), + #[error("CBOR serialize/deserialize error: {0}")] + SerializeDeserialize(#[from] serde_cbor::Error), #[error("Share chain error: {0}")] ShareChain(#[from] sharechain::Error), } @@ -25,6 +26,6 @@ pub enum LibP2PError { IO(#[from] std::io::Error), #[error("Behaviour error: {0}")] Behaviour(String), - #[error("Kademlia record store error: {0}")] - KadRecord(#[from] kad::store::Error), + #[error("Gossip sub publish error: {0}")] + Publish(#[from] PublishError), } \ No newline at end of file diff --git a/crates/sha_p2pool/src/server/p2p/messages.rs b/crates/sha_p2pool/src/server/p2p/messages.rs new file mode 100644 index 00000000..7406e752 --- /dev/null +++ b/crates/sha_p2pool/src/server/p2p/messages.rs @@ -0,0 +1,34 @@ +use libp2p::gossipsub::Message; +use serde::{Deserialize, Serialize}; + +use crate::server::p2p::Error; + +macro_rules! impl_message_try_from { + ($type:ty) => { + impl TryFrom for $type { + type Error = Error; + + fn try_from(message: Message) -> Result { + deserialize_message::<$type>(message.data.as_slice()) + } + } + }; +} + +#[derive(Serialize, Deserialize, Debug)] +pub struct PeerInfo { + pub current_height: u64, +} +impl_message_try_from!(PeerInfo); + +pub fn deserialize_message<'a, T>(raw_message: &'a [u8]) -> Result + where T: Deserialize<'a>, +{ + serde_cbor::from_slice(raw_message).map_err(Error::SerializeDeserialize) +} + +pub fn serialize_message(input: &T) -> Result, Error> + where T: Serialize, +{ + serde_cbor::to_vec(input).map_err(Error::SerializeDeserialize) +} \ No newline at end of file diff --git a/crates/sha_p2pool/src/server/p2p/mod.rs b/crates/sha_p2pool/src/server/p2p/mod.rs index bd8a5374..a21a44e9 100644 --- a/crates/sha_p2pool/src/server/p2p/mod.rs +++ b/crates/sha_p2pool/src/server/p2p/mod.rs @@ -3,4 +3,6 @@ pub use p2p::*; mod p2p; mod error; +mod messages; +mod peer_store; diff --git a/crates/sha_p2pool/src/server/p2p/p2p.rs b/crates/sha_p2pool/src/server/p2p/p2p.rs index bbdb4c42..d8b60fdb 100644 --- a/crates/sha_p2pool/src/server/p2p/p2p.rs +++ b/crates/sha_p2pool/src/server/p2p/p2p.rs @@ -1,48 +1,48 @@ +use std::collections::HashMap; use std::hash::{DefaultHasher, Hash, Hasher}; use std::sync::Arc; use std::time::Duration; -use libp2p::{gossipsub, kad, mdns, noise, PeerId, Swarm, tcp, yamux}; +use libp2p::{gossipsub, mdns, noise, PeerId, Swarm, tcp, yamux}; use libp2p::futures::{StreamExt, TryFutureExt}; -use libp2p::kad::{Mode, Quorum, Record}; -use libp2p::kad::store::MemoryStore; +use libp2p::gossipsub::{Event, IdentTopic, Message, PublishError, Topic}; use libp2p::mdns::tokio::Tokio; use libp2p::swarm::{NetworkBehaviour, SwarmEvent}; -use log::{error, info}; +use log::{error, info, warn}; +use rand::random; use serde::{Deserialize, Serialize}; use tokio::{io, select}; -use tokio::sync::Mutex; +use tokio::sync::{Mutex, MutexGuard}; use crate::server::config; -use crate::server::p2p::{Error, LibP2PError}; +use crate::server::p2p::{Error, LibP2PError, messages}; +use crate::server::p2p::messages::PeerInfo; +use crate::server::p2p::peer_store::PeerStore; use crate::sharechain::ShareChain; +const PEER_INFO_TOPIC: &str = "peer_info"; + #[derive(NetworkBehaviour)] pub struct ServerNetworkBehaviour { pub mdns: mdns::Behaviour, pub gossipsub: gossipsub::Behaviour, - pub kademlia: kad::Behaviour, // pub request_response: json::Behaviour, } -#[derive(Serialize, Deserialize)] -pub struct NodeInfo { - pub current_height: u64, -} - pub struct Service where S: ShareChain + Send + Sync + 'static, { - swarm: Arc>>, + swarm: Swarm, port: u16, share_chain: Arc, + peer_store: PeerStore, } impl Service where S: ShareChain + Send + Sync + 'static, { fn new_swarm(config: &config::Config) -> Result, Error> { - let mut swarm = libp2p::SwarmBuilder::with_new_identity() + let swarm = libp2p::SwarmBuilder::with_new_identity() .with_tokio() .with_tcp( tcp::Config::default(), @@ -68,16 +68,6 @@ impl Service gossipsub_config, )?; - // grpc - // let router = Server::builder().add_service( - // ShareChainServer::new(ShareChainGrpc::new(InMemoryShareChain::new())) - // ).into_service(); - // let behaviour = json::Behaviour::::new( - // [(StreamProtocol::new("/grpc"), ProtocolSupport::Full)], - // request_response::Config::default(), - // ); - - Ok(ServerNetworkBehaviour { gossipsub, mdns: mdns::Behaviour::new( @@ -85,118 +75,124 @@ impl Service key_pair.public().to_peer_id(), ) .map_err(|e| Error::LibP2P(LibP2PError::IO(e)))?, - kademlia: kad::Behaviour::new( - key_pair.public().to_peer_id(), - MemoryStore::new(key_pair.public().to_peer_id()), - ), }) }) .map_err(|e| Error::LibP2P(LibP2PError::Behaviour(e.to_string())))? .with_swarm_config(|c| c.with_idle_connection_timeout(config.idle_connection_timeout)) .build(); - swarm.behaviour_mut().kademlia.set_mode(Some(Mode::Server)); - Ok(swarm) } pub fn new(config: &config::Config, share_chain: Arc) -> Result { Ok(Self { - swarm: Arc::new(Mutex::new(Self::new_swarm(config)?)), + swarm: Self::new_swarm(config)?, port: config.p2p_port, share_chain, + peer_store: PeerStore::new(config.idle_connection_timeout), }) } - async fn start_publish_node_info(&self) { - let swarm = self.swarm.clone(); + async fn broadcast_peer_info(&mut self) -> Result<(), Error> { + // get node info let share_chain = self.share_chain.clone(); - tokio::spawn(async move { - let mut interval = tokio::time::interval(Duration::from_secs(5)); - loop { - select! { - _ = interval.tick() => { - // TODO: somehow update always the node info value to the latest - info!("Publishing node info..."); - - // store current node info in Kademlia DHT - let key = swarm.lock().await.local_peer_id().to_base58().into_bytes(); - let current_height_result = share_chain.tip_height().await; - if let Err(error) = current_height_result { - error!("Failed to get tip of local share chain: {error:?}"); - continue; - } - let node_info_result = serde_cbor::to_vec(&NodeInfo { - current_height: current_height_result.unwrap(), - }).map_err(Error::Serialize); - if let Err(error) = node_info_result { - error!("Failed to serialize Node Info: {error:?}"); - continue; - } - let query_id_result = swarm.lock().await.behaviour_mut().kademlia.put_record( - Record::new( - key.clone(), - node_info_result.unwrap(), - ), - Quorum::All, - ); - if let Err(error) = query_id_result { - error!("Failed to put kademlia record: {error:?}"); - continue; - } + let current_height = share_chain.tip_height().await + .map_err(Error::ShareChain)?; + let node_info = messages::serialize_message(&PeerInfo { current_height })?; - // set ourself as a provider - let provide_result = - swarm.lock().await.behaviour_mut().kademlia.start_providing(kad::RecordKey::new(&key)); - if let Err(error) = provide_result { - error!("Failed to start providing kademlia record: {error:?}"); - } + // broadcast node info + self.swarm.behaviour_mut().gossipsub.publish(IdentTopic::new(PEER_INFO_TOPIC), node_info) + .map_err(|error| Error::LibP2P(LibP2PError::Publish(error)))?; + + Ok(()) + } + + async fn subscribe_to_peer_info(&mut self) { + self.swarm.behaviour_mut().gossipsub.subscribe(&IdentTopic::new(PEER_INFO_TOPIC)) + .expect("must be subscribed to node_info topic"); + } + + async fn handle_new_message(&mut self, message: Message) { + let peer = message.source; + if peer.is_none() { + warn!("Message source is not set! {:?}", message); + return; + } + let peer = peer.unwrap(); + + let topic = message.topic.as_str(); + match topic { + PEER_INFO_TOPIC => { + match messages::PeerInfo::try_from(message) { + Ok(payload) => { + self.peer_store.add(peer, payload); + } + Err(error) => { + error!("Can't deserialize node info payload: {:?}", error); } } } - }); + &_ => { + warn!("Unknown message!"); + } + } + } + + async fn handle_event(&mut self, event: SwarmEvent) { + match event { + SwarmEvent::NewListenAddr { address, .. } => { + info!("Listening on {address:?}"); + } + SwarmEvent::Behaviour(event) => match event { + ServerNetworkBehaviourEvent::Mdns(mdns_event) => match mdns_event { + mdns::Event::Discovered(peers) => { + for (peer, addr) in peers { + self.swarm.behaviour_mut().gossipsub.add_explicit_peer(&peer); + } + } + mdns::Event::Expired(peers) => { + for (peer, addr) in peers { + self.swarm.behaviour_mut().gossipsub.remove_explicit_peer(&peer); + } + } + }, + ServerNetworkBehaviourEvent::Gossipsub(event) => match event { + Event::Message { message, message_id: _message_id, propagation_source: _propagation_source } => { + self.handle_new_message(message).await; + } + Event::Subscribed { .. } => {} + Event::Unsubscribed { .. } => {} + Event::GossipsubNotSupported { .. } => {} + } + }, + _ => {} + }; } - async fn event_loop(&self) -> Result<(), Error> { + async fn main_loop(&mut self) -> Result<(), Error> { + // TODO: get from config + let mut publish_peer_info_interval = tokio::time::interval(Duration::from_secs(5)); loop { - let mut swarm = self.swarm.lock().await; select! { - next = swarm.select_next_some() => match next { - SwarmEvent::NewListenAddr { address, .. } => { - info!("Listening on {address:?}"); - }, - SwarmEvent::Behaviour(event) => match event { - ServerNetworkBehaviourEvent::Mdns(mdns_event) => match mdns_event { - mdns::Event::Discovered(peers) => { - for (peer, addr) in peers { - info!("Discovered new peer {} at {}", peer, addr); - swarm.behaviour_mut().gossipsub.add_explicit_peer(&peer); - swarm.behaviour_mut().kademlia.add_address(&peer, addr); + _ = publish_peer_info_interval.tick() => { + self.peer_store.cleanup(); + if let Err(error) = self.broadcast_peer_info().await { + match error { + Error::LibP2P(LibP2PError::Publish(PublishError::InsufficientPeers)) => { + warn!("No peers to broadcast peer info!"); } - }, - mdns::Event::Expired(peers) => { - for (peer, addr) in peers { - info!("Expired peer {} at {}", peer, addr); - swarm.behaviour_mut().gossipsub.remove_explicit_peer(&peer); - swarm.behaviour_mut().kademlia.remove_address(&peer, &addr); - } - }, - }, - ServerNetworkBehaviourEvent::Gossipsub(event) => { - info!("[GOSSIP] {event:?}"); - }, - ServerNetworkBehaviourEvent::Kademlia(event) => { - info!("[Kademlia] {event:?}"); - }}, - _ => {} - } + _ => { + error!("Failed to publish node info: {error:?}"); + } + } + } + }, + next = self.swarm.select_next_some() => self.handle_event(next).await, } } } - pub async fn start(&self) -> Result<(), Error> { + pub async fn start(&mut self) -> Result<(), Error> { self.swarm - .lock() - .await .listen_on( format!("/ip4/0.0.0.0/tcp/{}", self.port) .parse() @@ -204,9 +200,9 @@ impl Service ) .map_err(|e| Error::LibP2P(LibP2PError::Transport(e)))?; - self.start_publish_node_info().await; + self.subscribe_to_peer_info().await; - self.event_loop().await + self.main_loop().await } } diff --git a/crates/sha_p2pool/src/server/p2p/peer_store.rs b/crates/sha_p2pool/src/server/p2p/peer_store.rs new file mode 100644 index 00000000..761b2581 --- /dev/null +++ b/crates/sha_p2pool/src/server/p2p/peer_store.rs @@ -0,0 +1,88 @@ +use std::sync::Arc; +use std::time::{Duration, Instant}; + +use dashmap::DashMap; +use libp2p::PeerId; + +use crate::server::p2p::messages::PeerInfo; + +pub struct PeerStoreRecord { + peer_info: PeerInfo, + created: Instant, +} + +impl PeerStoreRecord { + pub fn new(peer_info: PeerInfo) -> Self { + Self { + peer_info, + created: Instant::now(), + } + } +} + +#[derive(Copy, Clone, Debug)] +pub struct PeerStoreBlockHeightTip { + peer_id: PeerId, + height: u64, +} + +impl PeerStoreBlockHeightTip { + pub fn new(peer_id: PeerId, height: u64) -> Self { + Self { + peer_id, + height, + } + } +} + +pub struct PeerStore { + inner: Arc>, + // Max time to live for the items to avoid non-existing peers in list. + ttl: Duration, + tip_of_block_height: Option, +} + +impl PeerStore { + pub fn new(ttl: Duration) -> Self { + Self { + inner: Arc::new(DashMap::new()), + ttl, + tip_of_block_height: None, + } + } + + pub fn add(&mut self, peer_id: PeerId, peer_info: PeerInfo) { + self.inner.insert(peer_id, PeerStoreRecord::new(peer_info)); + self.set_tip_of_block_height(); + } + + fn set_tip_of_block_height(&mut self) { + if let Some(result) = + self.inner.iter() + .max_by(|r1, r2| { + r1.peer_info.current_height.cmp(&r2.peer_info.current_height) + }) { + self.tip_of_block_height = Some( + PeerStoreBlockHeightTip::new( + *result.key(), + result.peer_info.current_height, + ), + ) + } + } + + pub fn tip_of_block_height(&self) -> Option { + self.tip_of_block_height + } + + pub fn cleanup(&mut self) { + self.inner.iter() + .filter(|record| { + let elapsed = record.created.elapsed(); + elapsed.gt(&self.ttl) + }).for_each(|record| { + self.inner.remove(record.key()); + }); + self.set_tip_of_block_height(); + } +} \ No newline at end of file From 11375bb69af2f8a26881d0cf8abd653abf7f96a0 Mon Sep 17 00:00:00 2001 From: richardb Date: Wed, 12 Jun 2024 15:01:05 +0200 Subject: [PATCH 09/43] cleanup + p2p service client impl in progress --- crates/sha_p2pool/Cargo.toml | 3 - crates/sha_p2pool/build.rs | 13 --- crates/sha_p2pool/proto/block.proto | 88 ------------------- crates/sha_p2pool/proto/sharechain.proto | 14 --- .../sha_p2pool/src/server/grpc/base_node.rs | 20 +++-- crates/sha_p2pool/src/server/grpc/p2pool.rs | 18 ++-- crates/sha_p2pool/src/server/p2p/messages.rs | 28 ++++-- crates/sha_p2pool/src/server/p2p/p2p.rs | 15 ++-- crates/sha_p2pool/src/server/server.rs | 15 ++-- crates/sha_p2pool/src/sharechain/grpc/mod.rs | 37 -------- crates/sha_p2pool/src/sharechain/mod.rs | 1 - 11 files changed, 66 insertions(+), 186 deletions(-) delete mode 100644 crates/sha_p2pool/build.rs delete mode 100644 crates/sha_p2pool/proto/block.proto delete mode 100644 crates/sha_p2pool/proto/sharechain.proto delete mode 100644 crates/sha_p2pool/src/sharechain/grpc/mod.rs diff --git a/crates/sha_p2pool/Cargo.toml b/crates/sha_p2pool/Cargo.toml index 13af320e..8a78aa16 100644 --- a/crates/sha_p2pool/Cargo.toml +++ b/crates/sha_p2pool/Cargo.toml @@ -42,7 +42,4 @@ serde_cbor = "0.11.2" rand = "0.8.5" dashmap = "5.5.3" -[build-dependencies] -tonic-build = { workspace = true } - diff --git a/crates/sha_p2pool/build.rs b/crates/sha_p2pool/build.rs deleted file mode 100644 index f1afa221..00000000 --- a/crates/sha_p2pool/build.rs +++ /dev/null @@ -1,13 +0,0 @@ -// Copyright 2022 The Tari Project -// SPDX-License-Identifier: BSD-3-Clause - -fn main() -> Result<(), Box> { - tonic_build::configure().build_client(true).build_server(true).compile( - &[ - "proto/sharechain.proto", - ], - &["proto"], - )?; - - Ok(()) -} \ No newline at end of file diff --git a/crates/sha_p2pool/proto/block.proto b/crates/sha_p2pool/proto/block.proto deleted file mode 100644 index 09cd2150..00000000 --- a/crates/sha_p2pool/proto/block.proto +++ /dev/null @@ -1,88 +0,0 @@ -syntax = "proto3"; - -package tari.p2pool.sharechain.rpc; - -// The BlockHeader contains all the metadata for the block, including proof of work, a link to the previous block -// and the transaction kernels. -message TariBlockHeader { - // The hash of the block - bytes hash = 1; - // Version of the block - uint32 version = 2; - // Height of this block since the genesis block (height 0) - uint64 height = 3; - // Hash of the block previous to this in the chain. - bytes prev_hash = 4; - // Timestamp at which the block was built. - uint64 timestamp = 5; - // This is the UTXO merkle root of the outputs - // This is calculated as Hash (txo MMR root || roaring bitmap hash of UTXO indices) - bytes output_mr = 6; - // This is the MMR root of the kernels - bytes kernel_mr = 8; - // This is the Merkle root of the inputs in this block - bytes input_mr = 9; - // Total accumulated sum of kernel offsets since genesis block. We can derive the kernel offset sum for *this* - // block from the total kernel offset of the previous block header. - bytes total_kernel_offset = 10; - // Nonce increment used to mine this block. - uint64 nonce = 11; - // Proof of work metadata - ProofOfWork pow = 12; - // Kernel MMR size - uint64 kernel_mmr_size = 13; - // Output MMR size - uint64 output_mmr_size = 14; - // Sum of script offsets for all kernels in this block. - bytes total_script_offset = 15; - // Merkle root of validator nodes - bytes validator_node_mr = 16; - // Validator size - uint64 validator_node_size = 17; -} - -// The proof of work data structure that is included in the block header. -message ProofOfWork { - // The algorithm used to mine this block - // 0 = Monero - // 1 = Sha3X - uint64 pow_algo = 1; - // Supplemental proof of work data. For example for Sha3x, this would be empty (only the block header is - // required), but for Monero merge mining we need the Monero block header and RandomX seed hash. - bytes pow_data = 4; -} - -//This is used to request the which pow algo should be used with the block template -message PowAlgo { - // The permitted pow algorithms - enum PowAlgos { - POW_ALGOS_RANDOMX = 0; // Accessible as `grpc::pow_algo::PowAlgos::Randomx` - POW_ALGOS_SHA3X = 1; // Accessible as `grpc::pow_algo::PowAlgos::Sha3x` - } - // The pow algo to use - PowAlgos pow_algo = 1; -} - - -// A Share chain block. Blocks are linked together into a blockchain. -message Block { - // The hash of the block - bytes hash = 1; - - // Hash of the block previous to this in the share chain. - bytes prev_hash = 2; - - // Height of this block since the genesis block (height 0) - uint64 height = 3; - - // The original header of the block on the main Tari network, we need this to validate original block - // and calculate current block's hash to do another round of validation. - TariBlockHeader original_block_header = 4; - - // All the miners working on this block, all of them included in the resulting block if mined and submitted t mai chain. - repeated string miners = 5; - - // TODO: add other relevant fields -} - - diff --git a/crates/sha_p2pool/proto/sharechain.proto b/crates/sha_p2pool/proto/sharechain.proto deleted file mode 100644 index 3636817d..00000000 --- a/crates/sha_p2pool/proto/sharechain.proto +++ /dev/null @@ -1,14 +0,0 @@ -syntax = "proto3"; - -package tari.p2pool.sharechain.rpc; - -import "block.proto"; - -service ShareChain { - // Sync will return a stream of share chain blocks starting from `from_height`. - rpc Sync(SyncRequest) returns(stream Block); -} - -message SyncRequest { - uint64 from_height = 1; -} \ No newline at end of file diff --git a/crates/sha_p2pool/src/server/grpc/base_node.rs b/crates/sha_p2pool/src/server/grpc/base_node.rs index aa44d1c3..a32c6107 100644 --- a/crates/sha_p2pool/src/server/grpc/base_node.rs +++ b/crates/sha_p2pool/src/server/grpc/base_node.rs @@ -16,6 +16,8 @@ use tokio::sync::Mutex; use tonic::{IntoRequest, Request, Response, Status, Streaming}; use crate::server::grpc::error::{Error, TonicError}; +use crate::server::p2p; +use crate::sharechain::ShareChain; const LIST_HEADERS_PAGE_SIZE: usize = 10; const GET_BLOCKS_PAGE_SIZE: usize = 10; @@ -51,19 +53,23 @@ macro_rules! proxy_stream_result { }; } -pub struct TariBaseNodeGrpc { +pub struct TariBaseNodeGrpc + where S: ShareChain + Send + Sync + 'static +{ // TODO: check if 1 shared client is enough or we need a pool of clients to operate faster client: Arc>>, + p2p_service: Arc>, } -impl TariBaseNodeGrpc { - pub async fn new(base_node_address: String) -> Result { +impl TariBaseNodeGrpc + where S: ShareChain + Send + Sync + 'static { + pub async fn new(base_node_address: String, p2p_service: Arc>) -> Result { // TODO: add retry mechanism to try at least 3 times before failing let client = BaseNodeGrpcClient::connect(base_node_address) .await .map_err(|e| Error::Tonic(TonicError::Transport(e)))?; - Ok(Self { client: Arc::new(Mutex::new(client)) }) + Ok(Self { client: Arc::new(Mutex::new(client)), p2p_service }) } async fn streaming_response( @@ -94,7 +100,9 @@ impl TariBaseNodeGrpc { } #[tonic::async_trait] -impl tari_rpc::base_node_server::BaseNode for TariBaseNodeGrpc { +impl tari_rpc::base_node_server::BaseNode for TariBaseNodeGrpc + where S: ShareChain + Send + Sync + 'static +{ type ListHeadersStream = mpsc::Receiver>; async fn list_headers(&self, request: Request) -> Result, Status> { proxy_stream_result!(self, list_headers, request, LIST_HEADERS_PAGE_SIZE) @@ -158,7 +166,7 @@ impl tari_rpc::base_node_server::BaseNode for TariBaseNodeGrpc { async fn submit_block(&self, request: Request) -> Result, Status> { // Check block's difficulty compared to the latest network one to increase the probability - // to get the block accepted (and also a block with lower difficulty than latest one is invalid anyway). + // to get the block accepted (and also a block with lower difficulty than latest one is invalid anyway). let grpc_block = request.into_inner(); let block = blocks::Block::try_from(grpc_block.clone()) .map_err(|e| { Status::internal(e) })?; diff --git a/crates/sha_p2pool/src/server/grpc/p2pool.rs b/crates/sha_p2pool/src/server/grpc/p2pool.rs index d1e68898..d8220125 100644 --- a/crates/sha_p2pool/src/server/grpc/p2pool.rs +++ b/crates/sha_p2pool/src/server/grpc/p2pool.rs @@ -11,19 +11,25 @@ use tonic::{Request, Response, Status}; use crate::server::grpc::error::Error; use crate::server::grpc::error::TonicError; +use crate::sharechain::ShareChain; -pub struct ShaP2PoolGrpc { +pub struct ShaP2PoolGrpc + where S: ShareChain + Send + Sync + 'static +{ client: Arc>>, + share_chain: Arc, } -impl ShaP2PoolGrpc { - pub async fn new(base_node_address: String) -> Result { +impl ShaP2PoolGrpc + where S: ShareChain + Send + Sync + 'static +{ + pub async fn new(base_node_address: String, share_chain: Arc) -> Result { // TODO: add retry mechanism to try at least 3 times before failing let client = BaseNodeGrpcClient::connect(base_node_address) .await .map_err(|e| Error::Tonic(TonicError::Transport(e)))?; - Ok(Self { client: Arc::new(Mutex::new(client)) }) + Ok(Self { client: Arc::new(Mutex::new(client)), share_chain }) } // TODO: complete implementation to find the right shares @@ -58,7 +64,9 @@ impl ShaP2PoolGrpc { } #[tonic::async_trait] -impl ShaP2Pool for ShaP2PoolGrpc { +impl ShaP2Pool for ShaP2PoolGrpc + where S: ShareChain + Send + Sync + 'static +{ async fn get_new_block(&self, request: Request) -> Result, Status> { let template_request = request.into_inner(); let mut pow_algo = PowAlgo::default(); diff --git a/crates/sha_p2pool/src/server/p2p/messages.rs b/crates/sha_p2pool/src/server/p2p/messages.rs index 7406e752..e2cf7f8e 100644 --- a/crates/sha_p2pool/src/server/p2p/messages.rs +++ b/crates/sha_p2pool/src/server/p2p/messages.rs @@ -3,7 +3,7 @@ use serde::{Deserialize, Serialize}; use crate::server::p2p::Error; -macro_rules! impl_message_try_from { +macro_rules! impl_conversions { ($type:ty) => { impl TryFrom for $type { type Error = Error; @@ -12,15 +12,16 @@ macro_rules! impl_message_try_from { deserialize_message::<$type>(message.data.as_slice()) } } + + impl TryInto> for $type { + type Error = Error; + + fn try_into(self) -> Result, Self::Error> { + serialize_message(&self) + } + } }; } - -#[derive(Serialize, Deserialize, Debug)] -pub struct PeerInfo { - pub current_height: u64, -} -impl_message_try_from!(PeerInfo); - pub fn deserialize_message<'a, T>(raw_message: &'a [u8]) -> Result where T: Deserialize<'a>, { @@ -31,4 +32,15 @@ pub fn serialize_message(input: &T) -> Result, Error> where T: Serialize, { serde_cbor::to_vec(input).map_err(Error::SerializeDeserialize) +} + +#[derive(Serialize, Deserialize, Debug)] +pub struct PeerInfo { + pub current_height: u64, +} +impl_conversions!(PeerInfo); +impl PeerInfo { + pub fn new(current_height: u64) -> Self { + Self { current_height } + } } \ No newline at end of file diff --git a/crates/sha_p2pool/src/server/p2p/p2p.rs b/crates/sha_p2pool/src/server/p2p/p2p.rs index d8b60fdb..4d731e72 100644 --- a/crates/sha_p2pool/src/server/p2p/p2p.rs +++ b/crates/sha_p2pool/src/server/p2p/p2p.rs @@ -29,6 +29,11 @@ pub struct ServerNetworkBehaviour { // pub request_response: json::Behaviour, } +// TODO: implement ServiceClient and wire into TariBaseNodeGrpc +pub struct ServiceClient + where S: ShareChain + Send + Sync + 'static +{} + pub struct Service where S: ShareChain + Send + Sync + 'static, { @@ -93,14 +98,14 @@ impl Service } async fn broadcast_peer_info(&mut self) -> Result<(), Error> { - // get node info + // get peer info let share_chain = self.share_chain.clone(); let current_height = share_chain.tip_height().await .map_err(Error::ShareChain)?; - let node_info = messages::serialize_message(&PeerInfo { current_height })?; + let peer_info_raw: Vec = PeerInfo::new(current_height).try_into()?; - // broadcast node info - self.swarm.behaviour_mut().gossipsub.publish(IdentTopic::new(PEER_INFO_TOPIC), node_info) + // broadcast peer info + self.swarm.behaviour_mut().gossipsub.publish(IdentTopic::new(PEER_INFO_TOPIC), peer_info_raw) .map_err(|error| Error::LibP2P(LibP2PError::Publish(error)))?; Ok(()) @@ -132,7 +137,7 @@ impl Service } } &_ => { - warn!("Unknown message!"); + warn!("Unknown topic {topic:?}!"); } } } diff --git a/crates/sha_p2pool/src/server/server.rs b/crates/sha_p2pool/src/server/server.rs index 49855a42..2518c665 100644 --- a/crates/sha_p2pool/src/server/server.rs +++ b/crates/sha_p2pool/src/server/server.rs @@ -39,9 +39,9 @@ pub struct Server where S: ShareChain + Send + Sync + 'static { config: config::Config, - p2p_service: p2p::Service, + p2p_service: Arc>, base_node_grpc_service: BaseNodeServer, - p2pool_grpc_service: ShaP2PoolServer, + p2pool_grpc_service: ShaP2PoolServer>, } // TODO: add graceful shutdown @@ -49,20 +49,23 @@ impl Server where S: ShareChain + Send + Sync + 'static { pub async fn new(config: config::Config, share_chain: S) -> Result { + let share_chain = Arc::new(share_chain); + let p2p_service: Arc> = Arc::new( + p2p::Service::new(&config, share_chain.clone()).map_err(Error::P2PService)? + ); + let base_node_grpc_service = TariBaseNodeGrpc::new(config.base_node_address.clone()).await.map_err(Error::GRPC)?; let base_node_grpc_server = BaseNodeServer::new(base_node_grpc_service); - let p2pool_grpc_service = ShaP2PoolGrpc::new(config.base_node_address.clone()).await.map_err(Error::GRPC)?; + let p2pool_grpc_service = ShaP2PoolGrpc::new(config.base_node_address.clone(), share_chain.clone()).await.map_err(Error::GRPC)?; let p2pool_server = ShaP2PoolServer::new(p2pool_grpc_service); - let p2p_service: p2p::Service = p2p::Service::new(&config, Arc::new(share_chain)).map_err(Error::P2PService)?; - Ok(Self { config, p2p_service, base_node_grpc_service: base_node_grpc_server, p2pool_grpc_service: p2pool_server }) } pub async fn start_grpc( base_node_service: BaseNodeServer, - p2pool_service: ShaP2PoolServer, + p2pool_service: ShaP2PoolServer>, grpc_port: u16, ) -> Result<(), Error> { info!("Starting gRPC server on port {}!", &grpc_port); diff --git a/crates/sha_p2pool/src/sharechain/grpc/mod.rs b/crates/sha_p2pool/src/sharechain/grpc/mod.rs deleted file mode 100644 index d4932d5a..00000000 --- a/crates/sha_p2pool/src/sharechain/grpc/mod.rs +++ /dev/null @@ -1,37 +0,0 @@ -use libp2p::futures::channel::mpsc; -use tonic::{Request, Response, Status}; - -use crate::sharechain::grpc::rpc::{Block, SyncRequest}; -use crate::sharechain::grpc::rpc::share_chain_server::ShareChain as GrpcShareChain; -use crate::sharechain::ShareChain; - -pub mod rpc { - tonic::include_proto!("tari.p2pool.sharechain.rpc"); -} - -#[derive(Debug)] -pub struct ShareChainGrpc - where T: ShareChain + Send + Sync + 'static, -{ - blockchain: T, -} - -impl ShareChainGrpc - where T: ShareChain + Send + Sync + 'static { - pub fn new(blockchain: T) -> Self { - Self { - blockchain - } - } -} - -#[tonic::async_trait] -impl GrpcShareChain for ShareChainGrpc - where T: ShareChain + Send + Sync + 'static, -{ - type SyncStream = mpsc::Receiver>; - - async fn sync(&self, request: Request) -> Result, Status> { - todo!() - } -} \ No newline at end of file diff --git a/crates/sha_p2pool/src/sharechain/mod.rs b/crates/sha_p2pool/src/sharechain/mod.rs index 6c164208..3dfca5ef 100644 --- a/crates/sha_p2pool/src/sharechain/mod.rs +++ b/crates/sha_p2pool/src/sharechain/mod.rs @@ -3,7 +3,6 @@ use tari_common_types::types::BlockHash; use tari_core::blocks::BlockHeader; use thiserror::Error; -pub mod grpc; pub mod in_memory; pub struct Block { From 768d9536bb836d2085a6c33dddd2d8f58d2936ef Mon Sep 17 00:00:00 2001 From: richardb Date: Wed, 12 Jun 2024 22:33:14 +0200 Subject: [PATCH 10/43] p2p service client basics --- .../sha_p2pool/src/server/grpc/base_node.rs | 17 ++++----- crates/sha_p2pool/src/server/p2p/client.rs | 25 ++++++++++++ crates/sha_p2pool/src/server/p2p/messages.rs | 14 ++++++- crates/sha_p2pool/src/server/p2p/mod.rs | 2 + crates/sha_p2pool/src/server/p2p/p2p.rs | 38 +++++++++++-------- crates/sha_p2pool/src/server/server.rs | 14 ++++--- crates/sha_p2pool/src/sharechain/mod.rs | 2 + 7 files changed, 80 insertions(+), 32 deletions(-) create mode 100644 crates/sha_p2pool/src/server/p2p/client.rs diff --git a/crates/sha_p2pool/src/server/grpc/base_node.rs b/crates/sha_p2pool/src/server/grpc/base_node.rs index a32c6107..9860cbf4 100644 --- a/crates/sha_p2pool/src/server/grpc/base_node.rs +++ b/crates/sha_p2pool/src/server/grpc/base_node.rs @@ -4,6 +4,7 @@ use std::sync::Arc; use libp2p::futures::channel::mpsc; use libp2p::futures::SinkExt; +use libp2p::Swarm; use log::{error, info, warn}; use minotari_app_grpc::conversions::*; use minotari_app_grpc::tari_rpc; @@ -17,6 +18,7 @@ use tonic::{IntoRequest, Request, Response, Status, Streaming}; use crate::server::grpc::error::{Error, TonicError}; use crate::server::p2p; +use crate::server::p2p::ServerNetworkBehaviour; use crate::sharechain::ShareChain; const LIST_HEADERS_PAGE_SIZE: usize = 10; @@ -53,23 +55,21 @@ macro_rules! proxy_stream_result { }; } -pub struct TariBaseNodeGrpc - where S: ShareChain + Send + Sync + 'static +pub struct TariBaseNodeGrpc { // TODO: check if 1 shared client is enough or we need a pool of clients to operate faster client: Arc>>, - p2p_service: Arc>, + p2p_client: p2p::ServiceClient, } -impl TariBaseNodeGrpc - where S: ShareChain + Send + Sync + 'static { - pub async fn new(base_node_address: String, p2p_service: Arc>) -> Result { +impl TariBaseNodeGrpc { + pub async fn new(base_node_address: String, p2p_client: p2p::ServiceClient) -> Result { // TODO: add retry mechanism to try at least 3 times before failing let client = BaseNodeGrpcClient::connect(base_node_address) .await .map_err(|e| Error::Tonic(TonicError::Transport(e)))?; - Ok(Self { client: Arc::new(Mutex::new(client)), p2p_service }) + Ok(Self { client: Arc::new(Mutex::new(client)), p2p_client }) } async fn streaming_response( @@ -100,8 +100,7 @@ impl TariBaseNodeGrpc } #[tonic::async_trait] -impl tari_rpc::base_node_server::BaseNode for TariBaseNodeGrpc - where S: ShareChain + Send + Sync + 'static +impl tari_rpc::base_node_server::BaseNode for TariBaseNodeGrpc { type ListHeadersStream = mpsc::Receiver>; async fn list_headers(&self, request: Request) -> Result, Status> { diff --git a/crates/sha_p2pool/src/server/p2p/client.rs b/crates/sha_p2pool/src/server/p2p/client.rs new file mode 100644 index 00000000..872e6ba5 --- /dev/null +++ b/crates/sha_p2pool/src/server/p2p/client.rs @@ -0,0 +1,25 @@ +use tokio::sync::broadcast; + +use crate::server::p2p::Error; +use crate::server::p2p::messages::{ValidateBlockRequest, ValidateBlockResult}; +use crate::sharechain::Block; + +struct ServiceClientChannels { + validate_block_sender: broadcast::Sender, + validate_block_receiver: broadcast::Receiver, +} + +pub struct ServiceClient { + channels: ServiceClientChannels, +} + +impl ServiceClient { + fn new(channels: ServiceClientChannels) -> Self { + Self { channels } + } + + pub fn validate_block(&self, block: Block) -> Result { + // TODO: continue impl + todo!() + } +} \ No newline at end of file diff --git a/crates/sha_p2pool/src/server/p2p/messages.rs b/crates/sha_p2pool/src/server/p2p/messages.rs index e2cf7f8e..d141b32f 100644 --- a/crates/sha_p2pool/src/server/p2p/messages.rs +++ b/crates/sha_p2pool/src/server/p2p/messages.rs @@ -2,6 +2,7 @@ use libp2p::gossipsub::Message; use serde::{Deserialize, Serialize}; use crate::server::p2p::Error; +use crate::sharechain::Block; macro_rules! impl_conversions { ($type:ty) => { @@ -43,4 +44,15 @@ impl PeerInfo { pub fn new(current_height: u64) -> Self { Self { current_height } } -} \ No newline at end of file +} + +#[derive(Serialize, Deserialize, Debug)] +pub struct ValidateBlockRequest(Block); +impl_conversions!(ValidateBlockRequest); + +#[derive(Serialize, Deserialize, Debug)] +pub struct ValidateBlockResult { + block: Block, + valid: bool, +} +impl_conversions!(ValidateBlockResult); \ No newline at end of file diff --git a/crates/sha_p2pool/src/server/p2p/mod.rs b/crates/sha_p2pool/src/server/p2p/mod.rs index a21a44e9..ba9697e9 100644 --- a/crates/sha_p2pool/src/server/p2p/mod.rs +++ b/crates/sha_p2pool/src/server/p2p/mod.rs @@ -1,3 +1,4 @@ +pub use client::*; pub use error::*; pub use p2p::*; @@ -5,4 +6,5 @@ mod p2p; mod error; mod messages; mod peer_store; +mod client; diff --git a/crates/sha_p2pool/src/server/p2p/p2p.rs b/crates/sha_p2pool/src/server/p2p/p2p.rs index 4d731e72..2a473f17 100644 --- a/crates/sha_p2pool/src/server/p2p/p2p.rs +++ b/crates/sha_p2pool/src/server/p2p/p2p.rs @@ -1,5 +1,7 @@ +use std::cell::RefCell; use std::collections::HashMap; use std::hash::{DefaultHasher, Hash, Hasher}; +use std::ops::DerefMut; use std::sync::Arc; use std::time::Duration; @@ -12,10 +14,10 @@ use log::{error, info, warn}; use rand::random; use serde::{Deserialize, Serialize}; use tokio::{io, select}; -use tokio::sync::{Mutex, MutexGuard}; +use tokio::sync::{broadcast, Mutex, MutexGuard, oneshot}; use crate::server::config; -use crate::server::p2p::{Error, LibP2PError, messages}; +use crate::server::p2p::{Error, LibP2PError, messages, ServiceClient}; use crate::server::p2p::messages::PeerInfo; use crate::server::p2p::peer_store::PeerStore; use crate::sharechain::ShareChain; @@ -29,11 +31,6 @@ pub struct ServerNetworkBehaviour { // pub request_response: json::Behaviour, } -// TODO: implement ServiceClient and wire into TariBaseNodeGrpc -pub struct ServiceClient - where S: ShareChain + Send + Sync + 'static -{} - pub struct Service where S: ShareChain + Send + Sync + 'static, { @@ -46,6 +43,15 @@ pub struct Service impl Service where S: ShareChain + Send + Sync + 'static, { + pub fn new(config: &config::Config, share_chain: Arc) -> Result { + Ok(Self { + swarm: Self::new_swarm(config)?, + port: config.p2p_port, + share_chain, + peer_store: PeerStore::new(config.idle_connection_timeout), + }) + } + fn new_swarm(config: &config::Config) -> Result, Error> { let swarm = libp2p::SwarmBuilder::with_new_identity() .with_tokio() @@ -88,13 +94,10 @@ impl Service Ok(swarm) } - pub fn new(config: &config::Config, share_chain: Arc) -> Result { - Ok(Self { - swarm: Self::new_swarm(config)?, - port: config.p2p_port, - share_chain, - peer_store: PeerStore::new(config.idle_connection_timeout), - }) + + pub fn client(&self) -> ServiceClient { + // TODO: implement + todo!() } async fn broadcast_peer_info(&mut self) -> Result<(), Error> { @@ -176,6 +179,7 @@ impl Service async fn main_loop(&mut self) -> Result<(), Error> { // TODO: get from config let mut publish_peer_info_interval = tokio::time::interval(Duration::from_secs(5)); + loop { select! { _ = publish_peer_info_interval.tick() => { @@ -190,8 +194,10 @@ impl Service } } } - }, - next = self.swarm.select_next_some() => self.handle_event(next).await, + } + event = self.swarm.select_next_some() => { + self.handle_event(event).await; + } } } } diff --git a/crates/sha_p2pool/src/server/server.rs b/crates/sha_p2pool/src/server/server.rs index 2518c665..274a0836 100644 --- a/crates/sha_p2pool/src/server/server.rs +++ b/crates/sha_p2pool/src/server/server.rs @@ -15,12 +15,13 @@ use minotari_app_grpc::tari_rpc::base_node_server::BaseNodeServer; use minotari_app_grpc::tari_rpc::sha_p2_pool_server::ShaP2PoolServer; use thiserror::Error; use tokio::{io, io::AsyncBufReadExt, select}; +use tokio::sync::Mutex; use crate::server::{config, grpc, p2p}; use crate::server::grpc::base_node::TariBaseNodeGrpc; use crate::server::grpc::error::TonicError; use crate::server::grpc::p2pool::ShaP2PoolGrpc; -use crate::server::p2p::{ServerNetworkBehaviour, ServerNetworkBehaviourEvent}; +use crate::server::p2p::{ServerNetworkBehaviour, ServerNetworkBehaviourEvent, ServiceClient}; use crate::sharechain::in_memory::InMemoryShareChain; use crate::sharechain::ShareChain; @@ -39,7 +40,7 @@ pub struct Server where S: ShareChain + Send + Sync + 'static { config: config::Config, - p2p_service: Arc>, + p2p_service: p2p::Service, base_node_grpc_service: BaseNodeServer, p2pool_grpc_service: ShaP2PoolServer>, } @@ -50,11 +51,12 @@ impl Server { pub async fn new(config: config::Config, share_chain: S) -> Result { let share_chain = Arc::new(share_chain); - let p2p_service: Arc> = Arc::new( - p2p::Service::new(&config, share_chain.clone()).map_err(Error::P2PService)? - ); + let p2p_service: p2p::Service = p2p::Service::new(&config, share_chain.clone()).map_err(Error::P2PService)?; - let base_node_grpc_service = TariBaseNodeGrpc::new(config.base_node_address.clone()).await.map_err(Error::GRPC)?; + let base_node_grpc_service = TariBaseNodeGrpc::new( + config.base_node_address.clone(), + p2p_service.client(), + ).await.map_err(Error::GRPC)?; let base_node_grpc_server = BaseNodeServer::new(base_node_grpc_service); let p2pool_grpc_service = ShaP2PoolGrpc::new(config.base_node_address.clone(), share_chain.clone()).await.map_err(Error::GRPC)?; diff --git a/crates/sha_p2pool/src/sharechain/mod.rs b/crates/sha_p2pool/src/sharechain/mod.rs index 3dfca5ef..023277aa 100644 --- a/crates/sha_p2pool/src/sharechain/mod.rs +++ b/crates/sha_p2pool/src/sharechain/mod.rs @@ -1,10 +1,12 @@ use async_trait::async_trait; +use serde::{Deserialize, Serialize}; use tari_common_types::types::BlockHash; use tari_core::blocks::BlockHeader; use thiserror::Error; pub mod in_memory; +#[derive(Serialize, Deserialize, Debug)] pub struct Block { hash: BlockHash, prev_hash: BlockHash, From f9583250c3cc4f106d32e4e7132990b2336ecd33 Mon Sep 17 00:00:00 2001 From: richardb Date: Thu, 13 Jun 2024 15:03:59 +0200 Subject: [PATCH 11/43] p2p block validation almost done --- .../sha_p2pool/src/server/grpc/base_node.rs | 15 +- crates/sha_p2pool/src/server/p2p/client.rs | 89 ++++++++++- crates/sha_p2pool/src/server/p2p/error.rs | 3 + crates/sha_p2pool/src/server/p2p/messages.rs | 38 ++++- crates/sha_p2pool/src/server/p2p/p2p.rs | 149 +++++++++++++++--- .../sha_p2pool/src/server/p2p/peer_store.rs | 44 ++++-- crates/sha_p2pool/src/sharechain/mod.rs | 17 +- 7 files changed, 304 insertions(+), 51 deletions(-) diff --git a/crates/sha_p2pool/src/server/grpc/base_node.rs b/crates/sha_p2pool/src/server/grpc/base_node.rs index 9860cbf4..ac81e830 100644 --- a/crates/sha_p2pool/src/server/grpc/base_node.rs +++ b/crates/sha_p2pool/src/server/grpc/base_node.rs @@ -18,7 +18,7 @@ use tonic::{IntoRequest, Request, Response, Status, Streaming}; use crate::server::grpc::error::{Error, TonicError}; use crate::server::p2p; -use crate::server::p2p::ServerNetworkBehaviour; +use crate::server::p2p::{ClientError, ServerNetworkBehaviour}; use crate::sharechain::ShareChain; const LIST_HEADERS_PAGE_SIZE: usize = 10; @@ -164,11 +164,19 @@ impl tari_rpc::base_node_server::BaseNode for TariBaseNodeGrpc } async fn submit_block(&self, request: Request) -> Result, Status> { - // Check block's difficulty compared to the latest network one to increase the probability - // to get the block accepted (and also a block with lower difficulty than latest one is invalid anyway). let grpc_block = request.into_inner(); let block = blocks::Block::try_from(grpc_block.clone()) .map_err(|e| { Status::internal(e) })?; + + // validate block + let validation_result = self.p2p_client.validate_block(block.clone().into()).await + .map_err(|error| Status::internal(error.to_string()))?; + if !validation_result { + return Err(Status::failed_precondition("invalid block")); // TODO: maybe another error would be better + } + + // Check block's difficulty compared to the latest network one to increase the probability + // to get the block accepted (and also a block with lower difficulty than latest one is invalid anyway). let request_block_difficulty = sha3x_difficulty(&block.header) .map_err(|error| { Status::internal(error.to_string()) })?; let mut network_difficulty_stream = self.client.lock().await.get_network_difficulty(HeightRequest { @@ -192,6 +200,7 @@ impl tari_rpc::base_node_server::BaseNode for TariBaseNodeGrpc })); } + let request = grpc_block.into_request(); match proxy_simple_result!(self, submit_block, request) { Ok(resp) => { diff --git a/crates/sha_p2pool/src/server/p2p/client.rs b/crates/sha_p2pool/src/server/p2p/client.rs index 872e6ba5..3be5d8ee 100644 --- a/crates/sha_p2pool/src/server/p2p/client.rs +++ b/crates/sha_p2pool/src/server/p2p/client.rs @@ -1,25 +1,100 @@ +use std::sync::Arc; +use std::time::Duration; + +use log::{info, warn}; +use thiserror::Error; +use tokio::select; use tokio::sync::broadcast; +use tokio::sync::broadcast::error::{RecvError, SendError}; +use tokio::time::sleep; -use crate::server::p2p::Error; use crate::server::p2p::messages::{ValidateBlockRequest, ValidateBlockResult}; +use crate::server::p2p::peer_store::PeerStore; use crate::sharechain::Block; -struct ServiceClientChannels { +#[derive(Error, Debug)] +pub enum ClientError { + #[error("Channel send error: {0}")] + ChannelSend(#[from] Box), + #[error("Channel receive error: {0}")] + ChannelReceive(#[from] RecvError), +} + +#[derive(Error, Debug)] +pub enum ChannelSendError { + #[error("Send ValidateBlockRequest error: {0}")] + SendValidateBlockRequest(#[from] SendError), +} + +pub struct ServiceClientChannels { validate_block_sender: broadcast::Sender, validate_block_receiver: broadcast::Receiver, } +impl ServiceClientChannels { + pub fn new( + validate_block_sender: broadcast::Sender, + validate_block_receiver: broadcast::Receiver, + ) -> Self { + Self { + validate_block_sender, + validate_block_receiver, + } + } +} + pub struct ServiceClient { channels: ServiceClientChannels, + peer_store: Arc, } impl ServiceClient { - fn new(channels: ServiceClientChannels) -> Self { - Self { channels } + pub fn new( + channels: ServiceClientChannels, + peer_store: Arc, + ) -> Self { + Self { channels, peer_store } } - pub fn validate_block(&self, block: Block) -> Result { - // TODO: continue impl - todo!() + pub async fn validate_block(&self, block: Block) -> Result { + info!("[CLIENT] Start block validation"); + // send request to validate block + self.channels.validate_block_sender.send(ValidateBlockRequest::new(block.clone())) + .map_err(|error| + ClientError::ChannelSend(Box::new(ChannelSendError::SendValidateBlockRequest(error))) + )?; + + // calculate how many validations we need (more than 2/3 of peers should validate) + let peer_count = self.peer_store.peer_count() + 1; // TODO: remove + 1 + info!("[CLIENT] Peer count: {peer_count:?}"); + // TODO: calculate well, if there are 3 peers (including us), then min validation count is: + // TODO: ((peer_count + 1 / 3) * 2) - 1 rounded to an int + let min_validation_count = (peer_count / 3) * 2; + info!("[CLIENT] Minimum validation count: {min_validation_count:?}"); + + // wait for the validations to come + let timeout = Duration::from_secs(30); + let mut validate_receiver = self.channels.validate_block_receiver.resubscribe(); + let mut validation_count = 0; + loop { + select! { + _ = sleep(timeout) => { + warn!("Timing out waiting for validations!"); + break; + } + result = validate_receiver.recv() => { + let validate_result = result.map_err(ClientError::ChannelReceive)?; + info!("New validation: {validate_result:?}"); + if validate_result.valid && validate_result.block == block { + validation_count+=1; + } + if validation_count >= min_validation_count { + break; + } + } + } + } + + Ok(validation_count >= min_validation_count) } } \ No newline at end of file diff --git a/crates/sha_p2pool/src/server/p2p/error.rs b/crates/sha_p2pool/src/server/p2p/error.rs index b0aa8eb5..9619bf6c 100644 --- a/crates/sha_p2pool/src/server/p2p/error.rs +++ b/crates/sha_p2pool/src/server/p2p/error.rs @@ -2,6 +2,7 @@ use libp2p::{multiaddr, noise, TransportError}; use libp2p::gossipsub::PublishError; use thiserror::Error; +use crate::server::p2p; use crate::sharechain; #[derive(Error, Debug)] @@ -12,6 +13,8 @@ pub enum Error { SerializeDeserialize(#[from] serde_cbor::Error), #[error("Share chain error: {0}")] ShareChain(#[from] sharechain::Error), + #[error("Share chain error: {0}")] + Client(#[from] p2p::client::ClientError), } #[derive(Error, Debug)] diff --git a/crates/sha_p2pool/src/server/p2p/messages.rs b/crates/sha_p2pool/src/server/p2p/messages.rs index d141b32f..7e24e308 100644 --- a/crates/sha_p2pool/src/server/p2p/messages.rs +++ b/crates/sha_p2pool/src/server/p2p/messages.rs @@ -1,4 +1,5 @@ use libp2p::gossipsub::Message; +use libp2p::PeerId; use serde::{Deserialize, Serialize}; use crate::server::p2p::Error; @@ -13,10 +14,10 @@ macro_rules! impl_conversions { deserialize_message::<$type>(message.data.as_slice()) } } - + impl TryInto> for $type { type Error = Error; - + fn try_into(self) -> Result, Self::Error> { serialize_message(&self) } @@ -46,13 +47,36 @@ impl PeerInfo { } } -#[derive(Serialize, Deserialize, Debug)] +#[derive(Serialize, Deserialize, Debug, Clone)] pub struct ValidateBlockRequest(Block); impl_conversions!(ValidateBlockRequest); +impl ValidateBlockRequest { + pub fn new(block: Block) -> Self { + Self(block) + } + + pub fn block(&self) -> Block { + self.0.clone() + } +} -#[derive(Serialize, Deserialize, Debug)] +#[derive(Serialize, Deserialize, Debug, Clone)] pub struct ValidateBlockResult { - block: Block, - valid: bool, + pub peer_id: PeerId, + pub block: Block, + pub valid: bool, } -impl_conversions!(ValidateBlockResult); \ No newline at end of file +impl_conversions!(ValidateBlockResult); +impl ValidateBlockResult { + pub fn new( + peer_id: PeerId, + block: Block, + valid: bool, + ) -> Self { + Self { + peer_id, + block, + valid, + } + } +} \ No newline at end of file diff --git a/crates/sha_p2pool/src/server/p2p/p2p.rs b/crates/sha_p2pool/src/server/p2p/p2p.rs index 2a473f17..98be0fa9 100644 --- a/crates/sha_p2pool/src/server/p2p/p2p.rs +++ b/crates/sha_p2pool/src/server/p2p/p2p.rs @@ -1,28 +1,26 @@ -use std::cell::RefCell; -use std::collections::HashMap; use std::hash::{DefaultHasher, Hash, Hasher}; -use std::ops::DerefMut; use std::sync::Arc; use std::time::Duration; -use libp2p::{gossipsub, mdns, noise, PeerId, Swarm, tcp, yamux}; -use libp2p::futures::{StreamExt, TryFutureExt}; +use libp2p::{gossipsub, mdns, noise, Swarm, tcp, yamux}; +use libp2p::futures::StreamExt; use libp2p::gossipsub::{Event, IdentTopic, Message, PublishError, Topic}; use libp2p::mdns::tokio::Tokio; use libp2p::swarm::{NetworkBehaviour, SwarmEvent}; use log::{error, info, warn}; -use rand::random; -use serde::{Deserialize, Serialize}; use tokio::{io, select}; -use tokio::sync::{broadcast, Mutex, MutexGuard, oneshot}; +use tokio::sync::{broadcast, Mutex}; +use tokio::sync::broadcast::error::RecvError; use crate::server::config; -use crate::server::p2p::{Error, LibP2PError, messages, ServiceClient}; -use crate::server::p2p::messages::PeerInfo; +use crate::server::p2p::{Error, LibP2PError, messages, ServiceClient, ServiceClientChannels}; +use crate::server::p2p::messages::{PeerInfo, ValidateBlockRequest, ValidateBlockResult}; use crate::server::p2p::peer_store::PeerStore; use crate::sharechain::ShareChain; const PEER_INFO_TOPIC: &str = "peer_info"; +const BLOCK_VALIDATION_REQUESTS_TOPIC: &str = "block_validation_requests"; +const BLOCK_VALIDATION_RESULTS_TOPIC: &str = "block_validation_results"; #[derive(NetworkBehaviour)] pub struct ServerNetworkBehaviour { @@ -37,18 +35,37 @@ pub struct Service swarm: Swarm, port: u16, share_chain: Arc, - peer_store: PeerStore, + peer_store: Arc, + + // service client related channels + client_validate_block_req_tx: broadcast::Sender, + client_validate_block_req_rx: broadcast::Receiver, + client_validate_block_res_tx: broadcast::Sender, + client_validate_block_res_rx: broadcast::Receiver, } impl Service where S: ShareChain + Send + Sync + 'static, { pub fn new(config: &config::Config, share_chain: Arc) -> Result { + let swarm = Self::new_swarm(config)?; + let peer_store = Arc::new( + PeerStore::new(config.idle_connection_timeout), + ); + + // client related channels + let (validate_req_tx, validate_req_rx) = broadcast::channel::(1); + let (validate_res_tx, validate_res_rx) = broadcast::channel::(1); + Ok(Self { - swarm: Self::new_swarm(config)?, + swarm, port: config.p2p_port, share_chain, - peer_store: PeerStore::new(config.idle_connection_timeout), + peer_store, + client_validate_block_req_tx: validate_req_tx, + client_validate_block_req_rx: validate_req_rx, + client_validate_block_res_tx: validate_res_tx, + client_validate_block_res_rx: validate_res_rx, }) } @@ -96,8 +113,60 @@ impl Service } pub fn client(&self) -> ServiceClient { - // TODO: implement - todo!() + ServiceClient::new( + ServiceClientChannels::new( + self.client_validate_block_req_tx.clone(), + self.client_validate_block_res_rx.resubscribe(), + ), + self.peer_store.clone(), + ) + } + + async fn handle_client_validate_block_request(&mut self, result: Result) { + match result { + Ok(request) => { + let request_raw_result: Result, Error> = request.try_into(); + match request_raw_result { + Ok(request_raw) => { + match self.swarm.behaviour_mut().gossipsub.publish( + IdentTopic::new(BLOCK_VALIDATION_REQUESTS_TOPIC), + request_raw, + ) { + Ok(_) => {} + Err(error) => { + error!("Failed to send block validation request: {error:?}"); + } + } + } + Err(error) => { + error!("Failed to convert block validation request to bytes: {error:?}"); + } + } + } + Err(error) => { + error!("Block validation request receive error: {error:?}"); + } + } + } + + async fn send_block_validation_result(&mut self, result: ValidateBlockResult) { + let result_raw_result: Result, Error> = result.try_into(); + match result_raw_result { + Ok(result_raw) => { + match self.swarm.behaviour_mut().gossipsub.publish( + IdentTopic::new(BLOCK_VALIDATION_RESULTS_TOPIC), + result_raw, + ) { + Ok(_) => {} + Err(error) => { + error!("Failed to publish block validation result: {error:?}"); + } + } + } + Err(error) => { + error!("Failed to convert block validation result to bytes: {error:?}"); + } + } } async fn broadcast_peer_info(&mut self) -> Result<(), Error> { @@ -114,9 +183,15 @@ impl Service Ok(()) } - async fn subscribe_to_peer_info(&mut self) { - self.swarm.behaviour_mut().gossipsub.subscribe(&IdentTopic::new(PEER_INFO_TOPIC)) - .expect("must be subscribed to node_info topic"); + fn subscribe(&mut self, topic: &str) { + self.swarm.behaviour_mut().gossipsub.subscribe(&IdentTopic::new(topic.clone())) + .expect("must be subscribed to topic"); + } + + fn subscribe_to_topics(&mut self) { + self.subscribe(PEER_INFO_TOPIC); + self.subscribe(BLOCK_VALIDATION_REQUESTS_TOPIC); + self.subscribe(BLOCK_VALIDATION_RESULTS_TOPIC); } async fn handle_new_message(&mut self, message: Message) { @@ -133,9 +208,39 @@ impl Service match messages::PeerInfo::try_from(message) { Ok(payload) => { self.peer_store.add(peer, payload); + info!("[PEER STORE] Number of peers: {:?}", self.peer_store.peer_count()); } Err(error) => { - error!("Can't deserialize node info payload: {:?}", error); + error!("Can't deserialize peer info payload: {:?}", error); + } + } + } + BLOCK_VALIDATION_REQUESTS_TOPIC => { + match messages::ValidateBlockRequest::try_from(message) { + Ok(payload) => { + info!("Block validation request: {payload:?}"); + // TODO: validate block + let validate_result = ValidateBlockResult::new( + self.swarm.local_peer_id().clone(), + payload.block(), + true, // TODO: validate block + ); + self.send_block_validation_result(validate_result).await; + } + Err(error) => { + error!("Can't deserialize block validation request payload: {:?}", error); + } + } + } + BLOCK_VALIDATION_RESULTS_TOPIC => { + match messages::ValidateBlockResult::try_from(message) { + Ok(payload) => { + if let Err(error) = self.client_validate_block_res_tx.send(payload) { + error!("Failed to send block validation result to clients: {error:?}"); + } + } + Err(error) => { + error!("Can't deserialize block validation request payload: {:?}", error); } } } @@ -179,6 +284,7 @@ impl Service async fn main_loop(&mut self) -> Result<(), Error> { // TODO: get from config let mut publish_peer_info_interval = tokio::time::interval(Duration::from_secs(5)); + let mut client_validate_block_req_rx = self.client_validate_block_req_rx.resubscribe(); loop { select! { @@ -195,6 +301,9 @@ impl Service } } } + result = client_validate_block_req_rx.recv() => { + self.handle_client_validate_block_request(result).await; + } event = self.swarm.select_next_some() => { self.handle_event(event).await; } @@ -211,7 +320,7 @@ impl Service ) .map_err(|e| Error::LibP2P(LibP2PError::Transport(e)))?; - self.subscribe_to_peer_info().await; + self.subscribe_to_topics(); self.main_loop().await } diff --git a/crates/sha_p2pool/src/server/p2p/peer_store.rs b/crates/sha_p2pool/src/server/p2p/peer_store.rs index 761b2581..4b18dc80 100644 --- a/crates/sha_p2pool/src/server/p2p/peer_store.rs +++ b/crates/sha_p2pool/src/server/p2p/peer_store.rs @@ -1,4 +1,4 @@ -use std::sync::Arc; +use std::sync::{Arc, RwLock}; use std::time::{Duration, Instant}; use dashmap::DashMap; @@ -39,7 +39,7 @@ pub struct PeerStore { inner: Arc>, // Max time to live for the items to avoid non-existing peers in list. ttl: Duration, - tip_of_block_height: Option, + tip_of_block_height: RwLock>, } impl PeerStore { @@ -47,35 +47,53 @@ impl PeerStore { Self { inner: Arc::new(DashMap::new()), ttl, - tip_of_block_height: None, + tip_of_block_height: RwLock::new(None), } } - pub fn add(&mut self, peer_id: PeerId, peer_info: PeerInfo) { + pub fn add(&self, peer_id: PeerId, peer_info: PeerInfo) { self.inner.insert(peer_id, PeerStoreRecord::new(peer_info)); self.set_tip_of_block_height(); } - fn set_tip_of_block_height(&mut self) { + pub fn peer_count(&self) -> usize { + self.inner.len() + } + + fn set_tip_of_block_height(&self) { if let Some(result) = self.inner.iter() .max_by(|r1, r2| { r1.peer_info.current_height.cmp(&r2.peer_info.current_height) }) { - self.tip_of_block_height = Some( - PeerStoreBlockHeightTip::new( - *result.key(), - result.peer_info.current_height, - ), - ) + // save result + if let Ok(mut tip_height_opt) = self.tip_of_block_height.write() { + if tip_height_opt.is_none() { + let _ = tip_height_opt.insert( + PeerStoreBlockHeightTip::new( + *result.key(), + result.peer_info.current_height, + ) + ); + } else { + let mut tip_height = tip_height_opt.unwrap(); + tip_height.peer_id = *result.key(); + tip_height.height = result.peer_info.current_height; + } + } } } pub fn tip_of_block_height(&self) -> Option { - self.tip_of_block_height + if let Ok(result) = self.tip_of_block_height.read() { + if result.is_some() { + return Some(result.unwrap()); + } + } + None } - pub fn cleanup(&mut self) { + pub fn cleanup(&self) { self.inner.iter() .filter(|record| { let elapsed = record.created.elapsed(); diff --git a/crates/sha_p2pool/src/sharechain/mod.rs b/crates/sha_p2pool/src/sharechain/mod.rs index 023277aa..78556e45 100644 --- a/crates/sha_p2pool/src/sharechain/mod.rs +++ b/crates/sha_p2pool/src/sharechain/mod.rs @@ -1,12 +1,13 @@ use async_trait::async_trait; use serde::{Deserialize, Serialize}; use tari_common_types::types::BlockHash; +use tari_core::blocks; use tari_core::blocks::BlockHeader; use thiserror::Error; pub mod in_memory; -#[derive(Serialize, Deserialize, Debug)] +#[derive(Serialize, Deserialize, Debug, Clone, PartialEq)] pub struct Block { hash: BlockHash, prev_hash: BlockHash, @@ -15,6 +16,20 @@ pub struct Block { miners: Vec, } +// TODO: generate real block from share chain here +impl From for Block { + fn from(tari_block: blocks::Block) -> Self { + Self { + hash: Default::default(), + prev_hash: Default::default(), + height: tari_block.header.height, + original_block_header: tari_block.header, + miners: vec![], + } + } +} + + #[derive(Error, Debug)] pub enum Error { #[error("Internal error: {0}")] From 3011129e47275ea2d5b2bca27235ea59dacc8db6 Mon Sep 17 00:00:00 2001 From: richardb Date: Fri, 14 Jun 2024 15:11:21 +0200 Subject: [PATCH 12/43] p2p block validation in progress --- .../sha_p2pool/src/server/grpc/base_node.rs | 88 +++++++++++-------- crates/sha_p2pool/src/server/grpc/p2pool.rs | 42 ++------- crates/sha_p2pool/src/server/p2p/client.rs | 36 ++++---- crates/sha_p2pool/src/server/p2p/p2p.rs | 5 +- crates/sha_p2pool/src/server/server.rs | 5 +- crates/sha_p2pool/src/sharechain/in_memory.rs | 33 +++++++ crates/sha_p2pool/src/sharechain/mod.rs | 3 + 7 files changed, 118 insertions(+), 94 deletions(-) diff --git a/crates/sha_p2pool/src/server/grpc/base_node.rs b/crates/sha_p2pool/src/server/grpc/base_node.rs index ac81e830..f972f6d3 100644 --- a/crates/sha_p2pool/src/server/grpc/base_node.rs +++ b/crates/sha_p2pool/src/server/grpc/base_node.rs @@ -10,9 +10,14 @@ use minotari_app_grpc::conversions::*; use minotari_app_grpc::tari_rpc; use minotari_app_grpc::tari_rpc::{Block, BlockBlobRequest, BlockGroupRequest, BlockGroupResponse, BlockHeaderResponse, BlockHeight, BlockTimingResponse, ConsensusConstants, Empty, FetchMatchingUtxosRequest, GetActiveValidatorNodesRequest, GetBlocksRequest, GetHeaderByHashRequest, GetMempoolTransactionsRequest, GetNewBlockBlobResult, GetNewBlockResult, GetNewBlockTemplateWithCoinbasesRequest, GetNewBlockWithCoinbasesRequest, GetPeersRequest, GetShardKeyRequest, GetShardKeyResponse, GetSideChainUtxosRequest, GetTemplateRegistrationsRequest, HeightRequest, HistoricalBlock, ListConnectedPeersResponse, ListHeadersRequest, MempoolStatsResponse, NetworkStatusResponse, NewBlockCoinbase, NewBlockTemplate, NewBlockTemplateRequest, NewBlockTemplateResponse, NodeIdentity, PowAlgo, SearchKernelsRequest, SearchUtxosRequest, SoftwareUpdate, StringValue, SubmitBlockResponse, SubmitTransactionRequest, SubmitTransactionResponse, SyncInfoResponse, SyncProgressResponse, TipInfoResponse, TransactionStateRequest, TransactionStateResponse, ValueAtHeightResponse}; use minotari_app_grpc::tari_rpc::base_node_client::BaseNodeClient; +use minotari_app_grpc::tari_rpc::pow_algo::PowAlgos; use minotari_node_grpc_client::BaseNodeGrpcClient; +use tari_common_types::tari_address::TariAddress; use tari_core::blocks; use tari_core::proof_of_work::sha3x_difficulty; +use tari_core::transactions::generate_coinbase; +use tari_core::transactions::key_manager::{create_memory_db_key_manager_with_range_proof_size, MemoryDbKeyManager}; +use tari_core::transactions::tari_amount::MicroMinotari; use tokio::sync::Mutex; use tonic::{IntoRequest, Request, Response, Status, Streaming}; @@ -41,66 +46,75 @@ macro_rules! proxy_simple_result { macro_rules! proxy_stream_result { ($self:ident, $call:ident, $request:ident, $page_size:ident) => { - TariBaseNodeGrpc::streaming_response(String::from(stringify!($call)), + streaming_response(String::from(stringify!($call)), $self.client.lock().await.$call($request.into_inner()).await, $page_size, ).await }; ($self:ident, $call:ident, $request:ident, $page_size:expr) => { - TariBaseNodeGrpc::streaming_response(String::from(stringify!($call)), + streaming_response(String::from(stringify!($call)), $self.client.lock().await.$call($request.into_inner()).await, $page_size, ).await }; } -pub struct TariBaseNodeGrpc +async fn streaming_response( + call: String, + result: Result>, Status>, + page_size: usize) + -> Result>>, Status> + where R: Send + Sync + 'static, +{ + match result { + Ok(response) => { + let (mut tx, rx) = mpsc::channel(page_size); + tokio::spawn(async move { + let mut stream = response.into_inner(); + tokio::spawn(async move { + while let Ok(Some(next_message)) = stream.message().await { + if let Err(e) = tx.send(Ok(next_message)).await { + error!("failed to send '{call}' response message: {e}"); + } + } + }); + }); + Ok(Response::new(rx)) + } + Err(status) => Err(status) + } +} + +pub struct TariBaseNodeGrpc + where S: ShareChain + Send + Sync + 'static, { // TODO: check if 1 shared client is enough or we need a pool of clients to operate faster client: Arc>>, p2p_client: p2p::ServiceClient, + share_chain: Arc, } -impl TariBaseNodeGrpc { - pub async fn new(base_node_address: String, p2p_client: p2p::ServiceClient) -> Result { +impl TariBaseNodeGrpc + where S: ShareChain + Send + Sync + 'static, +{ + pub async fn new( + base_node_address: String, + p2p_client: p2p::ServiceClient, + share_chain: Arc, + ) -> Result { // TODO: add retry mechanism to try at least 3 times before failing let client = BaseNodeGrpcClient::connect(base_node_address) .await .map_err(|e| Error::Tonic(TonicError::Transport(e)))?; - Ok(Self { client: Arc::new(Mutex::new(client)), p2p_client }) - } - - async fn streaming_response( - call: String, - result: Result>, Status>, - page_size: usize) - -> Result>>, Status> - where R: Send + Sync + 'static, - { - match result { - Ok(response) => { - let (mut tx, rx) = mpsc::channel(page_size); - tokio::spawn(async move { - let mut stream = response.into_inner(); - tokio::spawn(async move { - while let Ok(Some(next_message)) = stream.message().await { - if let Err(e) = tx.send(Ok(next_message)).await { - error!("failed to send '{call}' response message: {e}"); - } - } - }); - }); - Ok(Response::new(rx)) - } - Err(status) => Err(status) - } + Ok(Self { client: Arc::new(Mutex::new(client)), p2p_client, share_chain }) } } #[tonic::async_trait] -impl tari_rpc::base_node_server::BaseNode for TariBaseNodeGrpc +impl tari_rpc::base_node_server::BaseNode for TariBaseNodeGrpc + where S: ShareChain + Send + Sync + 'static, { type ListHeadersStream = mpsc::Receiver>; async fn list_headers(&self, request: Request) -> Result, Status> { @@ -164,15 +178,15 @@ impl tari_rpc::base_node_server::BaseNode for TariBaseNodeGrpc } async fn submit_block(&self, request: Request) -> Result, Status> { - let grpc_block = request.into_inner(); + let grpc_block = request.get_ref(); let block = blocks::Block::try_from(grpc_block.clone()) .map_err(|e| { Status::internal(e) })?; - // validate block + // validate block with other peers let validation_result = self.p2p_client.validate_block(block.clone().into()).await .map_err(|error| Status::internal(error.to_string()))?; if !validation_result { - return Err(Status::failed_precondition("invalid block")); // TODO: maybe another error would be better + return Err(Status::invalid_argument("invalid block")); } // Check block's difficulty compared to the latest network one to increase the probability @@ -200,8 +214,6 @@ impl tari_rpc::base_node_server::BaseNode for TariBaseNodeGrpc })); } - - let request = grpc_block.into_request(); match proxy_simple_result!(self, submit_block, request) { Ok(resp) => { info!("Block found and sent successfully! (rewards will be paid out)"); diff --git a/crates/sha_p2pool/src/server/grpc/p2pool.rs b/crates/sha_p2pool/src/server/grpc/p2pool.rs index d8220125..c57bbe17 100644 --- a/crates/sha_p2pool/src/server/grpc/p2pool.rs +++ b/crates/sha_p2pool/src/server/grpc/p2pool.rs @@ -1,7 +1,6 @@ -use std::collections::HashMap; use std::sync::Arc; -use minotari_app_grpc::tari_rpc::{GetNewBlockRequest, GetNewBlockResponse, GetNewBlockTemplateWithCoinbasesRequest, NewBlockCoinbase, NewBlockTemplateRequest, PowAlgo}; +use minotari_app_grpc::tari_rpc::{GetNewBlockRequest, GetNewBlockResponse, GetNewBlockTemplateWithCoinbasesRequest, NewBlockTemplateRequest, PowAlgo}; use minotari_app_grpc::tari_rpc::base_node_client::BaseNodeClient; use minotari_app_grpc::tari_rpc::pow_algo::PowAlgos; use minotari_app_grpc::tari_rpc::sha_p2_pool_server::ShaP2Pool; @@ -31,36 +30,6 @@ impl ShaP2PoolGrpc Ok(Self { client: Arc::new(Mutex::new(client)), share_chain }) } - - // TODO: complete implementation to find the right shares - async fn generate_shares(&self, request: &GetNewBlockRequest, reward: u64) -> Vec { - let mut result = vec![]; - let mut miners = HashMap::::new(); // target wallet address -> hash rate - - // TODO: remove, only for testing now, get miners from outside of this module using P2P network/sharechain - miners.insert(request.wallet_payment_address.clone(), 100.0); - miners.insert("260304a3699f8911c3d949b2eb0394595c8041a36fa13320fa2395b4090ae573a430ac21c5d087ecfcd1922e6ef58cd3f2a1eef2fcbd17e2374a09e0c68036fe6c5f91".to_string(), 100.0); - - // calculate full hash rate and shares - let full_hash_rate: f64 = miners.values().sum(); - miners.iter() - .map(|(addr, rate)| (addr, rate / full_hash_rate)) - .filter(|(_, share)| *share > 0.0) - .for_each(|(addr, share)| { - let curr_reward = ((reward as f64) * share) as u64; - // TODO: check if still needed - // info!("{addr} -> SHARE: {share:?}, REWARD: {curr_reward:?}"); - result.push(NewBlockCoinbase { - address: addr.clone(), - value: curr_reward, - stealth_payment: false, - revealed_value_proof: true, - coinbase_extra: vec![], - }); - }); - - result - } } #[tonic::async_trait] @@ -68,7 +37,7 @@ impl ShaP2Pool for ShaP2PoolGrpc where S: ShareChain + Send + Sync + 'static { async fn get_new_block(&self, request: Request) -> Result, Status> { - let template_request = request.into_inner(); + // TODO: revisit GetNewBlockRequest as we get shares from share chain and not including all the time the requested wallet address let mut pow_algo = PowAlgo::default(); pow_algo.set_pow_algo(PowAlgos::Sha3x); @@ -85,8 +54,9 @@ impl ShaP2Pool for ShaP2PoolGrpc let reward = miner_data.reward; // request new block template with shares as coinbases - let shares = self.generate_shares(&template_request, reward).await; + let shares = self.share_chain.generate_shares(reward); let share_count = shares.len(); + let response = self.client.lock().await .get_new_block_template_with_coinbases(GetNewBlockTemplateWithCoinbasesRequest { algo: Some(pow_algo), @@ -96,9 +66,7 @@ impl ShaP2Pool for ShaP2PoolGrpc // set target difficulty let miner_data = response.clone().miner_data.ok_or_else(|| Status::internal("missing miner data"))?; - // target difficulty is always: `original difficulty` / `number of shares` - // let target_difficulty = miner_data.target_difficulty / share_count as u64; // TODO: uncomment this - let target_difficulty = miner_data.target_difficulty / (share_count as u64 * 10); // TODO: remove this + let target_difficulty = miner_data.target_difficulty / share_count as u64; Ok(Response::new(GetNewBlockResponse { block: Some(response), diff --git a/crates/sha_p2pool/src/server/p2p/client.rs b/crates/sha_p2pool/src/server/p2p/client.rs index 3be5d8ee..3f29b164 100644 --- a/crates/sha_p2pool/src/server/p2p/client.rs +++ b/crates/sha_p2pool/src/server/p2p/client.rs @@ -1,7 +1,7 @@ use std::sync::Arc; -use std::time::Duration; +use std::time::{Duration, Instant}; -use log::{info, warn}; +use log::{error, info, warn}; use thiserror::Error; use tokio::select; use tokio::sync::broadcast; @@ -58,6 +58,8 @@ impl ServiceClient { pub async fn validate_block(&self, block: Block) -> Result { info!("[CLIENT] Start block validation"); + let start = Instant::now(); + // send request to validate block self.channels.validate_block_sender.send(ValidateBlockRequest::new(block.clone())) .map_err(|error| @@ -65,36 +67,40 @@ impl ServiceClient { )?; // calculate how many validations we need (more than 2/3 of peers should validate) - let peer_count = self.peer_store.peer_count() + 1; // TODO: remove + 1 - info!("[CLIENT] Peer count: {peer_count:?}"); - // TODO: calculate well, if there are 3 peers (including us), then min validation count is: - // TODO: ((peer_count + 1 / 3) * 2) - 1 rounded to an int - let min_validation_count = (peer_count / 3) * 2; + let peer_count = self.peer_store.peer_count() as f64 + 1.0; + let min_validation_count = (peer_count / 3.0) * 2.0; + let min_validation_count = min_validation_count.round() as u64; info!("[CLIENT] Minimum validation count: {min_validation_count:?}"); // wait for the validations to come let timeout = Duration::from_secs(30); let mut validate_receiver = self.channels.validate_block_receiver.resubscribe(); let mut validation_count = 0; - loop { + while validation_count < min_validation_count { select! { _ = sleep(timeout) => { warn!("Timing out waiting for validations!"); break; } result = validate_receiver.recv() => { - let validate_result = result.map_err(ClientError::ChannelReceive)?; - info!("New validation: {validate_result:?}"); - if validate_result.valid && validate_result.block == block { - validation_count+=1; - } - if validation_count >= min_validation_count { - break; + match result { + Ok(validate_result) => { + info!("New validation: {validate_result:?}"); + if validate_result.valid && validate_result.block == block { + validation_count+=1; + } + } + Err(error) => { + error!("Error during receiving: {error:?}"); + } } } } } + let validation_time = Instant::now().duration_since(start); + info!("Validation took {:?}", validation_time); + Ok(validation_count >= min_validation_count) } } \ No newline at end of file diff --git a/crates/sha_p2pool/src/server/p2p/p2p.rs b/crates/sha_p2pool/src/server/p2p/p2p.rs index 98be0fa9..24128c31 100644 --- a/crates/sha_p2pool/src/server/p2p/p2p.rs +++ b/crates/sha_p2pool/src/server/p2p/p2p.rs @@ -38,6 +38,7 @@ pub struct Service peer_store: Arc, // service client related channels + // TODO: consider mpsc channels instead of broadcast to not miss any message (might drop) client_validate_block_req_tx: broadcast::Sender, client_validate_block_req_rx: broadcast::Receiver, client_validate_block_res_tx: broadcast::Sender, @@ -54,8 +55,8 @@ impl Service ); // client related channels - let (validate_req_tx, validate_req_rx) = broadcast::channel::(1); - let (validate_res_tx, validate_res_rx) = broadcast::channel::(1); + let (validate_req_tx, validate_req_rx) = broadcast::channel::(1000); + let (validate_res_tx, validate_res_rx) = broadcast::channel::(1000); Ok(Self { swarm, diff --git a/crates/sha_p2pool/src/server/server.rs b/crates/sha_p2pool/src/server/server.rs index 274a0836..c775930c 100644 --- a/crates/sha_p2pool/src/server/server.rs +++ b/crates/sha_p2pool/src/server/server.rs @@ -41,7 +41,7 @@ pub struct Server { config: config::Config, p2p_service: p2p::Service, - base_node_grpc_service: BaseNodeServer, + base_node_grpc_service: BaseNodeServer>, p2pool_grpc_service: ShaP2PoolServer>, } @@ -56,6 +56,7 @@ impl Server let base_node_grpc_service = TariBaseNodeGrpc::new( config.base_node_address.clone(), p2p_service.client(), + share_chain.clone(), ).await.map_err(Error::GRPC)?; let base_node_grpc_server = BaseNodeServer::new(base_node_grpc_service); @@ -66,7 +67,7 @@ impl Server } pub async fn start_grpc( - base_node_service: BaseNodeServer, + base_node_service: BaseNodeServer>, p2pool_service: ShaP2PoolServer>, grpc_port: u16, ) -> Result<(), Error> { diff --git a/crates/sha_p2pool/src/sharechain/in_memory.rs b/crates/sha_p2pool/src/sharechain/in_memory.rs index 7941e35e..a3723b8e 100644 --- a/crates/sha_p2pool/src/sharechain/in_memory.rs +++ b/crates/sha_p2pool/src/sharechain/in_memory.rs @@ -1,4 +1,7 @@ +use std::collections::HashMap; + use async_trait::async_trait; +use minotari_app_grpc::tari_rpc::NewBlockCoinbase; use rand::random; use crate::sharechain::{Block, ShareChain, ShareChainResult}; @@ -22,4 +25,34 @@ impl ShareChain for InMemoryShareChain { //TODO: implement Ok(random()) } + + fn generate_shares(&self, reward: u64) -> Vec { + let mut result = vec![]; + // TODO: get miners with hashrates from chain + let mut miners = HashMap::::new(); // target wallet address -> hash rate + + // TODO: remove, only for testing now, get miners from chain + miners.insert("260396abcc66770f67ca4cdd296cc133e63b88578f3c362d4fa0ff7b05da1bc5a74c78a415009fa49eda8fd8721c20fb4617a833aa630c9790157b6b6f716f0ac72e2e".to_string(), 100.0); + miners.insert("260304a3699f8911c3d949b2eb0394595c8041a36fa13320fa2395b4090ae573a430ac21c5d087ecfcd1922e6ef58cd3f2a1eef2fcbd17e2374a09e0c68036fe6c5f91".to_string(), 100.0); + + // calculate full hash rate and shares + let full_hash_rate: f64 = miners.values().sum(); + miners.iter() + .map(|(addr, rate)| (addr, rate / full_hash_rate)) + .filter(|(_, share)| *share > 0.0) + .for_each(|(addr, share)| { + let curr_reward = ((reward as f64) * share) as u64; + // TODO: check if still needed + // info!("{addr} -> SHARE: {share:?}, REWARD: {curr_reward:?}"); + result.push(NewBlockCoinbase { + address: addr.clone(), + value: curr_reward, + stealth_payment: false, + revealed_value_proof: true, + coinbase_extra: vec![], + }); + }); + + result + } } \ No newline at end of file diff --git a/crates/sha_p2pool/src/sharechain/mod.rs b/crates/sha_p2pool/src/sharechain/mod.rs index 78556e45..de3e61f6 100644 --- a/crates/sha_p2pool/src/sharechain/mod.rs +++ b/crates/sha_p2pool/src/sharechain/mod.rs @@ -1,4 +1,5 @@ use async_trait::async_trait; +use minotari_app_grpc::tari_rpc::NewBlockCoinbase; use serde::{Deserialize, Serialize}; use tari_common_types::types::BlockHash; use tari_core::blocks; @@ -43,4 +44,6 @@ pub trait ShareChain { async fn submit_block(&self, block: Block) -> ShareChainResult<()>; async fn tip_height(&self) -> ShareChainResult; + + fn generate_shares(&self, reward: u64) -> Vec; } \ No newline at end of file From 1b1302f4bb795a8fc9d636a9791cc772453e1ff9 Mon Sep 17 00:00:00 2001 From: richardb Date: Wed, 19 Jun 2024 23:40:52 +0200 Subject: [PATCH 13/43] share chain implemented + block broadcast done too --- crates/sha_p2pool/Cargo.toml | 3 + crates/sha_p2pool/src/main.rs | 2 +- .../sha_p2pool/src/server/grpc/base_node.rs | 69 +------- crates/sha_p2pool/src/server/grpc/p2pool.rs | 106 +++++++++++- crates/sha_p2pool/src/server/mod.rs | 2 +- crates/sha_p2pool/src/server/p2p/client.rs | 23 ++- crates/sha_p2pool/src/server/p2p/error.rs | 2 +- crates/sha_p2pool/src/server/p2p/messages.rs | 18 +- crates/sha_p2pool/src/server/p2p/mod.rs | 2 +- crates/sha_p2pool/src/server/p2p/p2p.rs | 90 +++++++--- crates/sha_p2pool/src/server/server.rs | 2 +- crates/sha_p2pool/src/sharechain/block.rs | 119 +++++++++++++ crates/sha_p2pool/src/sharechain/error.rs | 26 +++ crates/sha_p2pool/src/sharechain/in_memory.rs | 159 ++++++++++++++++-- crates/sha_p2pool/src/sharechain/mod.rs | 47 ++---- 15 files changed, 504 insertions(+), 166 deletions(-) create mode 100644 crates/sha_p2pool/src/sharechain/block.rs create mode 100644 crates/sha_p2pool/src/sharechain/error.rs diff --git a/crates/sha_p2pool/Cargo.toml b/crates/sha_p2pool/Cargo.toml index 8a78aa16..7b8d2f53 100644 --- a/crates/sha_p2pool/Cargo.toml +++ b/crates/sha_p2pool/Cargo.toml @@ -14,6 +14,7 @@ minotari_node_grpc_client = { git = "https://github.com/ksrichard/tari.git", bra tari_common_types = { git = "https://github.com/ksrichard/tari.git", branch = "p2pool" } tari_core = { git = "https://github.com/ksrichard/tari.git", branch = "p2pool" } +tari_utilities = { version = "0.7", features = ["borsh"] } libp2p = { version = "0.53.2", features = [ "dns", "identify", @@ -41,5 +42,7 @@ async-trait = "0.1.80" serde_cbor = "0.11.2" rand = "0.8.5" dashmap = "5.5.3" +blake2 = "0.10.6" +digest = "0.10.7" diff --git a/crates/sha_p2pool/src/main.rs b/crates/sha_p2pool/src/main.rs index 6ad88f82..825fc771 100644 --- a/crates/sha_p2pool/src/main.rs +++ b/crates/sha_p2pool/src/main.rs @@ -7,7 +7,7 @@ mod sharechain; async fn main() -> anyhow::Result<()> { env_logger::init(); let config = server::Config::builder().build(); - let share_chain = InMemoryShareChain::new(); + let share_chain = InMemoryShareChain::default(); let mut server = server::Server::new(config, share_chain).await?; server.start().await?; Ok(()) diff --git a/crates/sha_p2pool/src/server/grpc/base_node.rs b/crates/sha_p2pool/src/server/grpc/base_node.rs index f972f6d3..21b4f71a 100644 --- a/crates/sha_p2pool/src/server/grpc/base_node.rs +++ b/crates/sha_p2pool/src/server/grpc/base_node.rs @@ -1,29 +1,17 @@ -use std::future::Future; -use std::ops::Deref; use std::sync::Arc; use libp2p::futures::channel::mpsc; use libp2p::futures::SinkExt; -use libp2p::Swarm; -use log::{error, info, warn}; -use minotari_app_grpc::conversions::*; +use log::{error, warn}; use minotari_app_grpc::tari_rpc; use minotari_app_grpc::tari_rpc::{Block, BlockBlobRequest, BlockGroupRequest, BlockGroupResponse, BlockHeaderResponse, BlockHeight, BlockTimingResponse, ConsensusConstants, Empty, FetchMatchingUtxosRequest, GetActiveValidatorNodesRequest, GetBlocksRequest, GetHeaderByHashRequest, GetMempoolTransactionsRequest, GetNewBlockBlobResult, GetNewBlockResult, GetNewBlockTemplateWithCoinbasesRequest, GetNewBlockWithCoinbasesRequest, GetPeersRequest, GetShardKeyRequest, GetShardKeyResponse, GetSideChainUtxosRequest, GetTemplateRegistrationsRequest, HeightRequest, HistoricalBlock, ListConnectedPeersResponse, ListHeadersRequest, MempoolStatsResponse, NetworkStatusResponse, NewBlockCoinbase, NewBlockTemplate, NewBlockTemplateRequest, NewBlockTemplateResponse, NodeIdentity, PowAlgo, SearchKernelsRequest, SearchUtxosRequest, SoftwareUpdate, StringValue, SubmitBlockResponse, SubmitTransactionRequest, SubmitTransactionResponse, SyncInfoResponse, SyncProgressResponse, TipInfoResponse, TransactionStateRequest, TransactionStateResponse, ValueAtHeightResponse}; use minotari_app_grpc::tari_rpc::base_node_client::BaseNodeClient; -use minotari_app_grpc::tari_rpc::pow_algo::PowAlgos; use minotari_node_grpc_client::BaseNodeGrpcClient; -use tari_common_types::tari_address::TariAddress; -use tari_core::blocks; -use tari_core::proof_of_work::sha3x_difficulty; -use tari_core::transactions::generate_coinbase; -use tari_core::transactions::key_manager::{create_memory_db_key_manager_with_range_proof_size, MemoryDbKeyManager}; -use tari_core::transactions::tari_amount::MicroMinotari; use tokio::sync::Mutex; -use tonic::{IntoRequest, Request, Response, Status, Streaming}; +use tonic::{Request, Response, Status, Streaming}; use crate::server::grpc::error::{Error, TonicError}; use crate::server::p2p; -use crate::server::p2p::{ClientError, ServerNetworkBehaviour}; use crate::sharechain::ShareChain; const LIST_HEADERS_PAGE_SIZE: usize = 10; @@ -32,6 +20,7 @@ const GET_TOKENS_IN_CIRCULATION_PAGE_SIZE: usize = 1_000; const GET_DIFFICULTY_PAGE_SIZE: usize = 1_000; +#[macro_export] macro_rules! proxy_simple_result { ($self:ident, $call:ident, $request:ident) => { match $self.client.lock().await.$call($request.into_inner()).await { @@ -178,57 +167,7 @@ impl tari_rpc::base_node_server::BaseNode for TariBaseNodeGrpc } async fn submit_block(&self, request: Request) -> Result, Status> { - let grpc_block = request.get_ref(); - let block = blocks::Block::try_from(grpc_block.clone()) - .map_err(|e| { Status::internal(e) })?; - - // validate block with other peers - let validation_result = self.p2p_client.validate_block(block.clone().into()).await - .map_err(|error| Status::internal(error.to_string()))?; - if !validation_result { - return Err(Status::invalid_argument("invalid block")); - } - - // Check block's difficulty compared to the latest network one to increase the probability - // to get the block accepted (and also a block with lower difficulty than latest one is invalid anyway). - let request_block_difficulty = sha3x_difficulty(&block.header) - .map_err(|error| { Status::internal(error.to_string()) })?; - let mut network_difficulty_stream = self.client.lock().await.get_network_difficulty(HeightRequest { - from_tip: 0, - start_height: block.header.height - 1, - end_height: block.header.height, - }).await?.into_inner(); - let mut network_difficulty_matches = false; - while let Ok(Some(diff_resp)) = network_difficulty_stream.message().await { - if block.header.height == diff_resp.height + 1 - && request_block_difficulty.as_u64() > diff_resp.difficulty { - network_difficulty_matches = true; - } - } - - if !network_difficulty_matches { - // TODO: simply append new block if valid to sharechain showing that it is not accepted by base node - // TODO: but still need to present on sharechain - return Ok(Response::new(SubmitBlockResponse { - block_hash: vec![], // TODO: get from sharechain - })); - } - - match proxy_simple_result!(self, submit_block, request) { - Ok(resp) => { - info!("Block found and sent successfully! (rewards will be paid out)"); - // TODO: append new block if valid to sharechain with a flag or something that shows - // TODO: that this block is accepted, so paid out - Ok(resp) - } - Err(_) => { - // TODO: simply append new block if valid to sharechain showing that it is not accepted by base node - // TODO: but still need to present on sharechain - Ok(Response::new(SubmitBlockResponse { - block_hash: vec![], // TODO: get from sharechain - })) - } - } + proxy_simple_result!(self, submit_block, request) } async fn submit_block_blob(&self, request: Request) -> Result, Status> { diff --git a/crates/sha_p2pool/src/server/grpc/p2pool.rs b/crates/sha_p2pool/src/server/grpc/p2pool.rs index c57bbe17..a984258d 100644 --- a/crates/sha_p2pool/src/server/grpc/p2pool.rs +++ b/crates/sha_p2pool/src/server/grpc/p2pool.rs @@ -1,34 +1,40 @@ use std::sync::Arc; -use minotari_app_grpc::tari_rpc::{GetNewBlockRequest, GetNewBlockResponse, GetNewBlockTemplateWithCoinbasesRequest, NewBlockTemplateRequest, PowAlgo}; +use log::info; +use minotari_app_grpc::tari_rpc::{GetNewBlockRequest, GetNewBlockResponse, GetNewBlockTemplateWithCoinbasesRequest, HeightRequest, NewBlockTemplateRequest, PowAlgo, SubmitBlockRequest, SubmitBlockResponse}; use minotari_app_grpc::tari_rpc::base_node_client::BaseNodeClient; use minotari_app_grpc::tari_rpc::pow_algo::PowAlgos; use minotari_app_grpc::tari_rpc::sha_p2_pool_server::ShaP2Pool; use minotari_node_grpc_client::BaseNodeGrpcClient; +use tari_core::proof_of_work::sha3x_difficulty; use tokio::sync::Mutex; use tonic::{Request, Response, Status}; use crate::server::grpc::error::Error; use crate::server::grpc::error::TonicError; +use crate::server::p2p; use crate::sharechain::ShareChain; +const MIN_SHARE_COUNT: usize = 10; + pub struct ShaP2PoolGrpc where S: ShareChain + Send + Sync + 'static { client: Arc>>, + p2p_client: p2p::ServiceClient, share_chain: Arc, } impl ShaP2PoolGrpc where S: ShareChain + Send + Sync + 'static { - pub async fn new(base_node_address: String, share_chain: Arc) -> Result { + pub async fn new(base_node_address: String, p2p_client: p2p::ServiceClient, share_chain: Arc) -> Result { // TODO: add retry mechanism to try at least 3 times before failing let client = BaseNodeGrpcClient::connect(base_node_address) .await .map_err(|e| Error::Tonic(TonicError::Transport(e)))?; - Ok(Self { client: Arc::new(Mutex::new(client)), share_chain }) + Ok(Self { client: Arc::new(Mutex::new(client)), p2p_client, share_chain }) } } @@ -36,8 +42,7 @@ impl ShaP2PoolGrpc impl ShaP2Pool for ShaP2PoolGrpc where S: ShareChain + Send + Sync + 'static { - async fn get_new_block(&self, request: Request) -> Result, Status> { - // TODO: revisit GetNewBlockRequest as we get shares from share chain and not including all the time the requested wallet address + async fn get_new_block(&self, _request: Request) -> Result, Status> { let mut pow_algo = PowAlgo::default(); pow_algo.set_pow_algo(PowAlgos::Sha3x); @@ -54,8 +59,12 @@ impl ShaP2Pool for ShaP2PoolGrpc let reward = miner_data.reward; // request new block template with shares as coinbases - let shares = self.share_chain.generate_shares(reward); - let share_count = shares.len(); + let shares = self.share_chain.generate_shares(reward).await; + let share_count = if shares.len() < MIN_SHARE_COUNT { + MIN_SHARE_COUNT + } else { + shares.len() + }; let response = self.client.lock().await .get_new_block_template_with_coinbases(GetNewBlockTemplateWithCoinbasesRequest { @@ -73,4 +82,87 @@ impl ShaP2Pool for ShaP2PoolGrpc target_difficulty, })) } + + async fn submit_block(&self, request: Request) -> Result, Status> { + let grpc_block = request.get_ref(); + let grpc_request_payload = grpc_block.block.clone() + .ok_or_else(|| Status::internal("missing block in request"))?; + let mut block = self.share_chain.new_block(grpc_block).await.map_err(|error| Status::internal(error.to_string()))?; + + // validate block with other peers + let validation_result = self.p2p_client.validate_block(&block).await + .map_err(|error| Status::internal(error.to_string()))?; + if !validation_result { + return Err(Status::invalid_argument("invalid block")); + } + + let origin_block_header = block.original_block_header().as_ref() + .ok_or_else(|| { Status::internal("missing original block header") })?; + + // Check block's difficulty compared to the latest network one to increase the probability + // to get the block accepted (and also a block with lower difficulty than latest one is invalid anyway). + let request_block_difficulty = sha3x_difficulty(&origin_block_header) + .map_err(|error| { Status::internal(error.to_string()) })?; + let mut network_difficulty_stream = self.client.lock().await.get_network_difficulty(HeightRequest { + from_tip: 0, + start_height: origin_block_header.height - 1, + end_height: origin_block_header.height, + }).await?.into_inner(); + let mut network_difficulty_matches = false; + while let Ok(Some(diff_resp)) = network_difficulty_stream.message().await { + if origin_block_header.height == diff_resp.height + 1 + && request_block_difficulty.as_u64() > diff_resp.difficulty { + network_difficulty_matches = true; + } + } + + if !network_difficulty_matches { + // TODO: simply append new block if valid to sharechain showing that it is not accepted by base node + // TODO: but still need to present on sharechain + block.set_sent_to_main_chain(false); + self.share_chain.submit_block(&block).await + .map_err(|error| Status::internal(error.to_string()))?; + info!("Broadcast block with height: {:?}", block.height()); + self.p2p_client.broadcast_block(&block).await + .map_err(|error| Status::internal(error.to_string()))?; + + return Ok(Response::new(SubmitBlockResponse { + block_hash: block.hash().to_vec(), + })); + } + + // submit block to base node + let grpc_request = Request::new(grpc_request_payload); + match self.client.lock().await.submit_block(grpc_request).await { + Ok(resp) => { + info!("Block found and sent successfully! (rewards will be paid out)"); + + // TODO: append new block if valid to sharechain with a flag or something that shows + // TODO: that this block is accepted, so paid out + block.set_sent_to_main_chain(true); + self.share_chain.submit_block(&block).await + .map_err(|error| Status::internal(error.to_string()))?; + info!("Broadcast block with height: {:?}", block.height()); + self.p2p_client.broadcast_block(&block).await + .map_err(|error| Status::internal(error.to_string()))?; + + Ok(resp) + } + Err(_) => { + info!("submit_block stop - block send failure"); + // TODO: simply append new block if valid to sharechain showing that it is not accepted by base node + // TODO: but still need to present on sharechain + block.set_sent_to_main_chain(false); + self.share_chain.submit_block(&block).await + .map_err(|error| Status::internal(error.to_string()))?; + info!("Broadcast block with height: {:?}", block.height()); + self.p2p_client.broadcast_block(&block).await + .map_err(|error| Status::internal(error.to_string()))?; + + Ok(Response::new(SubmitBlockResponse { + block_hash: block.hash().to_vec(), + })) + } + } + } } \ No newline at end of file diff --git a/crates/sha_p2pool/src/server/mod.rs b/crates/sha_p2pool/src/server/mod.rs index 355266da..6d9911b8 100644 --- a/crates/sha_p2pool/src/server/mod.rs +++ b/crates/sha_p2pool/src/server/mod.rs @@ -5,4 +5,4 @@ mod config; mod server; pub mod grpc; -mod p2p; +pub mod p2p; diff --git a/crates/sha_p2pool/src/server/p2p/client.rs b/crates/sha_p2pool/src/server/p2p/client.rs index 3f29b164..e325b72c 100644 --- a/crates/sha_p2pool/src/server/p2p/client.rs +++ b/crates/sha_p2pool/src/server/p2p/client.rs @@ -4,13 +4,13 @@ use std::time::{Duration, Instant}; use log::{error, info, warn}; use thiserror::Error; use tokio::select; -use tokio::sync::broadcast; +use tokio::sync::{broadcast, mpsc}; use tokio::sync::broadcast::error::{RecvError, SendError}; use tokio::time::sleep; use crate::server::p2p::messages::{ValidateBlockRequest, ValidateBlockResult}; use crate::server::p2p::peer_store::PeerStore; -use crate::sharechain::Block; +use crate::sharechain::block::Block; #[derive(Error, Debug)] pub enum ClientError { @@ -24,21 +24,26 @@ pub enum ClientError { pub enum ChannelSendError { #[error("Send ValidateBlockRequest error: {0}")] SendValidateBlockRequest(#[from] SendError), + #[error("Send broadcast block error: {0}")] + SendBroadcastBlock(#[from] SendError), } pub struct ServiceClientChannels { validate_block_sender: broadcast::Sender, validate_block_receiver: broadcast::Receiver, + broadcast_block_sender: broadcast::Sender, } impl ServiceClientChannels { pub fn new( validate_block_sender: broadcast::Sender, validate_block_receiver: broadcast::Receiver, + broadcast_block_sender: broadcast::Sender, ) -> Self { Self { validate_block_sender, validate_block_receiver, + broadcast_block_sender, } } } @@ -56,7 +61,16 @@ impl ServiceClient { Self { channels, peer_store } } - pub async fn validate_block(&self, block: Block) -> Result { + pub async fn broadcast_block(&self, block: &Block) -> Result<(), ClientError> { + self.channels.broadcast_block_sender.send(block.clone()) + .map_err(|error| + ClientError::ChannelSend(Box::new(ChannelSendError::SendBroadcastBlock(error))) + )?; + + Ok(()) + } + + pub async fn validate_block(&self, block: &Block) -> Result { info!("[CLIENT] Start block validation"); let start = Instant::now(); @@ -76,6 +90,7 @@ impl ServiceClient { let timeout = Duration::from_secs(30); let mut validate_receiver = self.channels.validate_block_receiver.resubscribe(); let mut validation_count = 0; + let block = block.clone(); while validation_count < min_validation_count { select! { _ = sleep(timeout) => { @@ -86,7 +101,7 @@ impl ServiceClient { match result { Ok(validate_result) => { info!("New validation: {validate_result:?}"); - if validate_result.valid && validate_result.block == block { + if validate_result.valid && validate_result.block == block.clone() { validation_count+=1; } } diff --git a/crates/sha_p2pool/src/server/p2p/error.rs b/crates/sha_p2pool/src/server/p2p/error.rs index 9619bf6c..5295830c 100644 --- a/crates/sha_p2pool/src/server/p2p/error.rs +++ b/crates/sha_p2pool/src/server/p2p/error.rs @@ -12,7 +12,7 @@ pub enum Error { #[error("CBOR serialize/deserialize error: {0}")] SerializeDeserialize(#[from] serde_cbor::Error), #[error("Share chain error: {0}")] - ShareChain(#[from] sharechain::Error), + ShareChain(#[from] sharechain::error::Error), #[error("Share chain error: {0}")] Client(#[from] p2p::client::ClientError), } diff --git a/crates/sha_p2pool/src/server/p2p/messages.rs b/crates/sha_p2pool/src/server/p2p/messages.rs index 7e24e308..404d7eb8 100644 --- a/crates/sha_p2pool/src/server/p2p/messages.rs +++ b/crates/sha_p2pool/src/server/p2p/messages.rs @@ -1,25 +1,25 @@ -use libp2p::gossipsub::Message; use libp2p::PeerId; use serde::{Deserialize, Serialize}; use crate::server::p2p::Error; -use crate::sharechain::Block; +use crate::sharechain::block::Block; +#[macro_export] macro_rules! impl_conversions { ($type:ty) => { - impl TryFrom for $type { - type Error = Error; + impl TryFrom for $type { + type Error = $crate::server::p2p::Error; - fn try_from(message: Message) -> Result { - deserialize_message::<$type>(message.data.as_slice()) + fn try_from(message: libp2p::gossipsub::Message) -> Result { + $crate::server::p2p::messages::deserialize_message::<$type>(message.data.as_slice()) } } impl TryInto> for $type { - type Error = Error; + type Error = $crate::server::p2p::Error; fn try_into(self) -> Result, Self::Error> { - serialize_message(&self) + $crate::server::p2p::messages::serialize_message(&self) } } }; @@ -54,7 +54,7 @@ impl ValidateBlockRequest { pub fn new(block: Block) -> Self { Self(block) } - + pub fn block(&self) -> Block { self.0.clone() } diff --git a/crates/sha_p2pool/src/server/p2p/mod.rs b/crates/sha_p2pool/src/server/p2p/mod.rs index ba9697e9..4b08295f 100644 --- a/crates/sha_p2pool/src/server/p2p/mod.rs +++ b/crates/sha_p2pool/src/server/p2p/mod.rs @@ -4,7 +4,7 @@ pub use p2p::*; mod p2p; mod error; -mod messages; +pub mod messages; mod peer_store; mod client; diff --git a/crates/sha_p2pool/src/server/p2p/p2p.rs b/crates/sha_p2pool/src/server/p2p/p2p.rs index 24128c31..ecba9345 100644 --- a/crates/sha_p2pool/src/server/p2p/p2p.rs +++ b/crates/sha_p2pool/src/server/p2p/p2p.rs @@ -1,26 +1,28 @@ use std::hash::{DefaultHasher, Hash, Hasher}; use std::sync::Arc; -use std::time::Duration; +use std::time::{Duration, Instant}; use libp2p::{gossipsub, mdns, noise, Swarm, tcp, yamux}; use libp2p::futures::StreamExt; -use libp2p::gossipsub::{Event, IdentTopic, Message, PublishError, Topic}; +use libp2p::gossipsub::{Event, IdentTopic, Message, MessageId, PublishError, Topic}; use libp2p::mdns::tokio::Tokio; use libp2p::swarm::{NetworkBehaviour, SwarmEvent}; use log::{error, info, warn}; use tokio::{io, select}; -use tokio::sync::{broadcast, Mutex}; +use tokio::sync::{broadcast, mpsc, Mutex}; use tokio::sync::broadcast::error::RecvError; use crate::server::config; use crate::server::p2p::{Error, LibP2PError, messages, ServiceClient, ServiceClientChannels}; use crate::server::p2p::messages::{PeerInfo, ValidateBlockRequest, ValidateBlockResult}; use crate::server::p2p::peer_store::PeerStore; -use crate::sharechain::ShareChain; +use crate::sharechain::{ShareChain, ShareChainResult}; +use crate::sharechain::block::Block; const PEER_INFO_TOPIC: &str = "peer_info"; const BLOCK_VALIDATION_REQUESTS_TOPIC: &str = "block_validation_requests"; const BLOCK_VALIDATION_RESULTS_TOPIC: &str = "block_validation_results"; +const NEW_BLOCK_TOPIC: &str = "new_block"; #[derive(NetworkBehaviour)] pub struct ServerNetworkBehaviour { @@ -43,6 +45,8 @@ pub struct Service client_validate_block_req_rx: broadcast::Receiver, client_validate_block_res_tx: broadcast::Sender, client_validate_block_res_rx: broadcast::Receiver, + client_broadcast_block_tx: broadcast::Sender, + client_broadcast_block_rx: broadcast::Receiver, } impl Service @@ -57,6 +61,7 @@ impl Service // client related channels let (validate_req_tx, validate_req_rx) = broadcast::channel::(1000); let (validate_res_tx, validate_res_rx) = broadcast::channel::(1000); + let (broadcast_block_tx, broadcast_block_rx) = broadcast::channel::(1000); Ok(Self { swarm, @@ -67,6 +72,8 @@ impl Service client_validate_block_req_rx: validate_req_rx, client_validate_block_res_tx: validate_res_tx, client_validate_block_res_rx: validate_res_rx, + client_broadcast_block_tx: broadcast_block_tx, + client_broadcast_block_rx: broadcast_block_rx, }) } @@ -83,7 +90,11 @@ impl Service // gossipsub let message_id_fn = |message: &gossipsub::Message| { let mut s = DefaultHasher::new(); + if let Some(soure_peer) = message.source { + soure_peer.to_bytes().hash(&mut s); + } message.data.hash(&mut s); + Instant::now().hash(&mut s); gossipsub::MessageId::from(s.finish().to_string()) }; let gossipsub_config = gossipsub::ConfigBuilder::default() @@ -118,6 +129,7 @@ impl Service ServiceClientChannels::new( self.client_validate_block_req_tx.clone(), self.client_validate_block_res_rx.resubscribe(), + self.client_broadcast_block_tx.clone(), ), self.peer_store.clone(), ) @@ -184,6 +196,25 @@ impl Service Ok(()) } + async fn broadcast_block(&mut self, result: Result) { + match result { + Ok(block) => { + let block_raw_result: Result, Error> = block.try_into(); + match block_raw_result { + Ok(block_raw) => { + match self.swarm.behaviour_mut().gossipsub.publish(IdentTopic::new(NEW_BLOCK_TOPIC), block_raw) + .map_err(|error| Error::LibP2P(LibP2PError::Publish(error))) { + Ok(_) => {} + Err(error) => error!("Failed to broadcast new block: {error:?}"), + } + } + Err(error) => error!("Failed to convert block to bytes: {error:?}"), + } + } + Err(error) => error!("Failed to receive new block: {error:?}"), + } + } + fn subscribe(&mut self, topic: &str) { self.swarm.behaviour_mut().gossipsub.subscribe(&IdentTopic::new(topic.clone())) .expect("must be subscribed to topic"); @@ -193,6 +224,7 @@ impl Service self.subscribe(PEER_INFO_TOPIC); self.subscribe(BLOCK_VALIDATION_REQUESTS_TOPIC); self.subscribe(BLOCK_VALIDATION_RESULTS_TOPIC); + self.subscribe(NEW_BLOCK_TOPIC); } async fn handle_new_message(&mut self, message: Message) { @@ -245,6 +277,19 @@ impl Service } } } + NEW_BLOCK_TOPIC => { + match Block::try_from(message) { + Ok(payload) => { + info!("New block from broadcast: {:?}", &payload); + if let Err(error) = self.share_chain.submit_block(&payload).await { + error!("Could not add new block to local share chain: {error:?}"); + } + } + Err(error) => { + error!("Can't deserialize broadcast block payload: {:?}", error); + } + } + } &_ => { warn!("Unknown topic {topic:?}!"); } @@ -285,30 +330,33 @@ impl Service async fn main_loop(&mut self) -> Result<(), Error> { // TODO: get from config let mut publish_peer_info_interval = tokio::time::interval(Duration::from_secs(5)); - let mut client_validate_block_req_rx = self.client_validate_block_req_rx.resubscribe(); loop { select! { - _ = publish_peer_info_interval.tick() => { - self.peer_store.cleanup(); - if let Err(error) = self.broadcast_peer_info().await { - match error { - Error::LibP2P(LibP2PError::Publish(PublishError::InsufficientPeers)) => { - warn!("No peers to broadcast peer info!"); - } - _ => { - error!("Failed to publish node info: {error:?}"); - } + _ = publish_peer_info_interval.tick() => { + self.peer_store.cleanup(); + if let Err(error) = self.broadcast_peer_info().await { + match error { + Error::LibP2P(LibP2PError::Publish(PublishError::InsufficientPeers)) => { + warn!("No peers to broadcast peer info!"); + } + Error::LibP2P(LibP2PError::Publish(PublishError::Duplicate)) => {} + _ => { + error!("Failed to publish node info: {error:?}"); } } } - result = client_validate_block_req_rx.recv() => { - self.handle_client_validate_block_request(result).await; - } - event = self.swarm.select_next_some() => { - self.handle_event(event).await; - } } + event = self.swarm.select_next_some() => { + self.handle_event(event).await; + } + result = self.client_validate_block_req_rx.recv() => { + self.handle_client_validate_block_request(result).await; + } + block = self.client_broadcast_block_rx.recv() => { + self.broadcast_block(block).await; + } + } } } diff --git a/crates/sha_p2pool/src/server/server.rs b/crates/sha_p2pool/src/server/server.rs index c775930c..af429537 100644 --- a/crates/sha_p2pool/src/server/server.rs +++ b/crates/sha_p2pool/src/server/server.rs @@ -60,7 +60,7 @@ impl Server ).await.map_err(Error::GRPC)?; let base_node_grpc_server = BaseNodeServer::new(base_node_grpc_service); - let p2pool_grpc_service = ShaP2PoolGrpc::new(config.base_node_address.clone(), share_chain.clone()).await.map_err(Error::GRPC)?; + let p2pool_grpc_service = ShaP2PoolGrpc::new(config.base_node_address.clone(), p2p_service.client(), share_chain.clone()).await.map_err(Error::GRPC)?; let p2pool_server = ShaP2PoolServer::new(p2pool_grpc_service); Ok(Self { config, p2p_service, base_node_grpc_service: base_node_grpc_server, p2pool_grpc_service: p2pool_server }) diff --git a/crates/sha_p2pool/src/sharechain/block.rs b/crates/sha_p2pool/src/sharechain/block.rs new file mode 100644 index 00000000..bded433a --- /dev/null +++ b/crates/sha_p2pool/src/sharechain/block.rs @@ -0,0 +1,119 @@ +use blake2::Blake2b; +use digest::consts::U32; +use serde::{Deserialize, Serialize}; +use tari_common_types::tari_address::TariAddress; +use tari_common_types::types::BlockHash; +use tari_core::blocks::{BlockHeader, BlocksHashDomain}; +use tari_core::consensus::DomainSeparatedConsensusHasher; +use tari_utilities::epoch_time::EpochTime; + +use crate::impl_conversions; + +#[derive(Serialize, Deserialize, Debug, Clone, PartialEq)] +pub struct Block { + hash: BlockHash, + timestamp: EpochTime, + prev_hash: BlockHash, + height: u64, + original_block_header: Option, + miner_wallet_address: Option, + sent_to_main_chain: bool, +} +impl_conversions!(Block); + +impl Block { + pub fn builder() -> BlockBuilder { + BlockBuilder::new() + } + pub fn generate_hash(&self) -> BlockHash { + let mut hash = DomainSeparatedConsensusHasher::>::new("block") + .chain(&self.prev_hash) + .chain(&self.height); + + if let Some(miner_wallet_address) = &self.miner_wallet_address { + hash = hash.chain(&miner_wallet_address.to_hex()); + } + + if let Some(original_block_header) = &self.original_block_header { + hash = hash.chain(original_block_header); + } + + hash.finalize().into() + } + pub fn timestamp(&self) -> EpochTime { + self.timestamp + } + pub fn prev_hash(&self) -> BlockHash { + self.prev_hash + } + pub fn height(&self) -> u64 { + self.height + } + pub fn original_block_header(&self) -> &Option { + &self.original_block_header + } + pub fn hash(&self) -> BlockHash { + self.hash + } + pub fn set_sent_to_main_chain(&mut self, sent_to_main_chain: bool) { + self.sent_to_main_chain = sent_to_main_chain; + } + pub fn miner_wallet_address(&self) -> &Option { + &self.miner_wallet_address + } + pub fn sent_to_main_chain(&self) -> bool { + self.sent_to_main_chain + } +} + +pub struct BlockBuilder { + block: Block, +} + +impl BlockBuilder { + pub fn new() -> Self { + Self { + block: Block { + hash: Default::default(), + timestamp: EpochTime::now(), + prev_hash: Default::default(), + height: 0, + original_block_header: None, + miner_wallet_address: None, + sent_to_main_chain: false, + }, + } + } + + pub fn with_timestamp(&mut self, timestamp: EpochTime) -> &mut Self { + self.block.timestamp = timestamp; + self + } + + pub fn with_prev_hash(&mut self, prev_hash: BlockHash) -> &mut Self { + self.block.prev_hash = prev_hash; + self + } + + pub fn with_height(&mut self, height: u64) -> &mut Self { + self.block.height = height; + self + } + + pub fn with_original_block_header(&mut self, original_block_header: BlockHeader) -> &mut Self { + self.block.original_block_header = Some(original_block_header); + self + } + + pub fn with_miner_wallet_address(&mut self, miner_wallet_address: TariAddress) -> &mut Self { + self.block.miner_wallet_address = Some(miner_wallet_address); + self + } + + pub fn build(&mut self) -> Block { + self.block.hash = self.block.generate_hash(); + self.block.clone() + } +} + + diff --git a/crates/sha_p2pool/src/sharechain/error.rs b/crates/sha_p2pool/src/sharechain/error.rs new file mode 100644 index 00000000..b1b82837 --- /dev/null +++ b/crates/sha_p2pool/src/sharechain/error.rs @@ -0,0 +1,26 @@ +use tari_common_types::tari_address::TariAddressError; +use thiserror::Error; + +use crate::sharechain::block::Block; + +#[derive(Error, Debug)] +pub enum Error { + #[error("Internal error: {0}")] + Internal(String), + #[error("gRPC Block conversion error: {0}")] + BlockConvert(#[from] BlockConvertError), + #[error("Share chain is empty, no genesis block found as well!")] + Empty, + #[error("Tari address error: {0}")] + TariAddress(#[from] TariAddressError), + #[error("Invalid block: {0:?}")] + InvalidBlock(Block), +} + +#[derive(Error, Debug)] +pub enum BlockConvertError { + #[error("Missing field: {0}")] + MissingField(String), + #[error("Converting gRPC block header error: {0}")] + GrpcBlockHeaderConvert(String), +} \ No newline at end of file diff --git a/crates/sha_p2pool/src/sharechain/in_memory.rs b/crates/sha_p2pool/src/sharechain/in_memory.rs index a3723b8e..d141728b 100644 --- a/crates/sha_p2pool/src/sharechain/in_memory.rs +++ b/crates/sha_p2pool/src/sharechain/in_memory.rs @@ -1,39 +1,138 @@ use std::collections::HashMap; +use std::sync::Arc; use async_trait::async_trait; -use minotari_app_grpc::tari_rpc::NewBlockCoinbase; -use rand::random; +use log::{info, warn}; +use minotari_app_grpc::tari_rpc::{NewBlockCoinbase, SubmitBlockRequest}; +use tari_common_types::tari_address::TariAddress; +use tari_core::blocks::BlockHeader; +use tari_utilities::epoch_time::EpochTime; +use tokio::sync::RwLock; use crate::sharechain::{Block, ShareChain, ShareChainResult}; +use crate::sharechain::error::{BlockConvertError, Error}; -pub struct InMemoryShareChain {} +const DEFAULT_MAX_BLOCKS_COUNT: usize = 5000; + +pub struct InMemoryShareChain { + max_blocks_count: usize, + blocks: Arc>>, +} + +impl Default for InMemoryShareChain { + fn default() -> Self { + Self { + max_blocks_count: DEFAULT_MAX_BLOCKS_COUNT, + blocks: Arc::new( + RwLock::new( + vec![ + // genesis block + Block::builder() + .with_height(0) + .build() + ], + ), + ), + } + } +} impl InMemoryShareChain { - pub fn new() -> Self { - Self {} + pub fn new(max_blocks_count: usize) -> Self { + Self { + max_blocks_count, + blocks: Arc::new( + RwLock::new( + vec![ + // genesis block + Block::builder() + .with_height(0) + .build() + ], + ), + ), + } + } + + async fn miners_with_hash_rates(&self) -> HashMap { + let mut result: HashMap = HashMap::new(); // target wallet address -> hash rate + let blocks_read_lock = self.blocks.read().await; + blocks_read_lock.iter().for_each(|block| { + if let Some(miner_wallet_address) = block.miner_wallet_address() { + let addr = miner_wallet_address.to_hex(); + if let Some(curr_hash_rate) = result.get(&addr) { + result.insert(addr, curr_hash_rate + 1.0); + } else { + result.insert(addr, 1.0); + } + } + }); + + result + } + + async fn validate_block(&self, last_block: &Block, block: &Block) -> ShareChainResult { + // check if we have this block as last + if last_block == block { + warn!("This block already added, skip"); + return Ok(false); + } + + // validate hash + if block.hash() != block.generate_hash() { + warn!("Invalid block, hashes do not match"); + return Ok(false); + } + + // validate height + info!("VALIDATION - Last block: {:?}", last_block); + if last_block.height() + 1 != block.height() { + warn!("Invalid block, invalid block height: {:?} != {:?}", last_block.height() + 1, block.height()); + return Ok(false); + } + + Ok(true) } } #[async_trait] impl ShareChain for InMemoryShareChain { - async fn submit_block(&self, block: Block) -> ShareChainResult<()> { - //TODO: implement + async fn submit_block(&self, block: &Block) -> ShareChainResult<()> { + let mut blocks_write_lock = self.blocks.write().await; + + let block = block.clone(); + + let last_block = blocks_write_lock.last().ok_or_else(|| Error::Empty)?; + + // validate + if !self.validate_block(last_block, &block).await? { + return Err(Error::InvalidBlock(block)); + } + + if blocks_write_lock.len() >= self.max_blocks_count { + // remove first element to keep the maximum vector size + blocks_write_lock.remove(0); + } + + info!("New block added: {:?}", block.clone()); + + blocks_write_lock.push(block); + + let last_block = blocks_write_lock.last().ok_or_else(|| Error::Empty)?; + info!("Current height: {:?}", last_block.height()); + Ok(()) } async fn tip_height(&self) -> ShareChainResult { - //TODO: implement - Ok(random()) + let blocks_read_lock = self.blocks.read().await; + let last_block = blocks_read_lock.last().ok_or_else(|| Error::Empty)?; + Ok(last_block.height()) } - fn generate_shares(&self, reward: u64) -> Vec { + async fn generate_shares(&self, reward: u64) -> Vec { let mut result = vec![]; - // TODO: get miners with hashrates from chain - let mut miners = HashMap::::new(); // target wallet address -> hash rate - - // TODO: remove, only for testing now, get miners from chain - miners.insert("260396abcc66770f67ca4cdd296cc133e63b88578f3c362d4fa0ff7b05da1bc5a74c78a415009fa49eda8fd8721c20fb4617a833aa630c9790157b6b6f716f0ac72e2e".to_string(), 100.0); - miners.insert("260304a3699f8911c3d949b2eb0394595c8041a36fa13320fa2395b4090ae573a430ac21c5d087ecfcd1922e6ef58cd3f2a1eef2fcbd17e2374a09e0c68036fe6c5f91".to_string(), 100.0); + let miners = self.miners_with_hash_rates().await; // calculate full hash rate and shares let full_hash_rate: f64 = miners.values().sum(); @@ -42,8 +141,7 @@ impl ShareChain for InMemoryShareChain { .filter(|(_, share)| *share > 0.0) .for_each(|(addr, share)| { let curr_reward = ((reward as f64) * share) as u64; - // TODO: check if still needed - // info!("{addr} -> SHARE: {share:?}, REWARD: {curr_reward:?}"); + info!("{addr} -> SHARE: {share:?}, REWARD: {curr_reward:?}"); result.push(NewBlockCoinbase { address: addr.clone(), value: curr_reward, @@ -55,4 +153,29 @@ impl ShareChain for InMemoryShareChain { result } + + async fn new_block(&self, request: &SubmitBlockRequest) -> ShareChainResult { + let origin_block_grpc = request.block.as_ref() + .ok_or_else(|| BlockConvertError::MissingField("block".to_string()))?; + let origin_block_header_grpc = origin_block_grpc.header.as_ref() + .ok_or_else(|| BlockConvertError::MissingField("header".to_string()))?; + let origin_block_header = BlockHeader::try_from(origin_block_header_grpc.clone()) + .map_err(BlockConvertError::GrpcBlockHeaderConvert)?; + + let blocks_read_lock = self.blocks.read().await; + let last_block = blocks_read_lock.last().ok_or_else(|| Error::Empty)?; + + Ok( + Block::builder() + .with_timestamp(EpochTime::now()) + .with_prev_hash(last_block.generate_hash()) + .with_height(last_block.height() + 1) + .with_original_block_header(origin_block_header) + .with_miner_wallet_address( + TariAddress::from_hex(request.wallet_payment_address.as_str()) + .map_err(Error::TariAddress)? + ) + .build() + ) + } } \ No newline at end of file diff --git a/crates/sha_p2pool/src/sharechain/mod.rs b/crates/sha_p2pool/src/sharechain/mod.rs index de3e61f6..fc422f93 100644 --- a/crates/sha_p2pool/src/sharechain/mod.rs +++ b/crates/sha_p2pool/src/sharechain/mod.rs @@ -1,49 +1,22 @@ use async_trait::async_trait; -use minotari_app_grpc::tari_rpc::NewBlockCoinbase; -use serde::{Deserialize, Serialize}; -use tari_common_types::types::BlockHash; -use tari_core::blocks; -use tari_core::blocks::BlockHeader; -use thiserror::Error; +use minotari_app_grpc::tari_rpc::{NewBlockCoinbase, SubmitBlockRequest}; -pub mod in_memory; - -#[derive(Serialize, Deserialize, Debug, Clone, PartialEq)] -pub struct Block { - hash: BlockHash, - prev_hash: BlockHash, - height: u64, - original_block_header: BlockHeader, - miners: Vec, -} - -// TODO: generate real block from share chain here -impl From for Block { - fn from(tari_block: blocks::Block) -> Self { - Self { - hash: Default::default(), - prev_hash: Default::default(), - height: tari_block.header.height, - original_block_header: tari_block.header, - miners: vec![], - } - } -} +use crate::sharechain::block::Block; +use crate::sharechain::error::Error; - -#[derive(Error, Debug)] -pub enum Error { - #[error("Internal error: {0}")] - Internal(String), -} +pub mod in_memory; +pub mod block; +pub mod error; pub type ShareChainResult = Result; #[async_trait] pub trait ShareChain { - async fn submit_block(&self, block: Block) -> ShareChainResult<()>; + async fn submit_block(&self, block: &Block) -> ShareChainResult<()>; async fn tip_height(&self) -> ShareChainResult; - fn generate_shares(&self, reward: u64) -> Vec; + async fn generate_shares(&self, reward: u64) -> Vec; + + async fn new_block(&self, request: &SubmitBlockRequest) -> ShareChainResult; } \ No newline at end of file From 9b0fccd0bcb5a1e8af4937266669d7fa90373610 Mon Sep 17 00:00:00 2001 From: richardb Date: Thu, 20 Jun 2024 15:49:01 +0200 Subject: [PATCH 14/43] implemented share chain synchronization --- crates/sha_p2pool/Cargo.toml | 4 +- crates/sha_p2pool/src/main.rs | 20 +++- crates/sha_p2pool/src/server/grpc/p2pool.rs | 56 +++++----- crates/sha_p2pool/src/server/p2p/client.rs | 38 ++++++- crates/sha_p2pool/src/server/p2p/messages.rs | 36 ++++++- crates/sha_p2pool/src/server/p2p/p2p.rs | 101 ++++++++++++++---- .../sha_p2pool/src/server/p2p/peer_store.rs | 4 +- crates/sha_p2pool/src/sharechain/error.rs | 2 - crates/sha_p2pool/src/sharechain/in_memory.rs | 48 ++++++--- crates/sha_p2pool/src/sharechain/mod.rs | 11 ++ 10 files changed, 244 insertions(+), 76 deletions(-) diff --git a/crates/sha_p2pool/Cargo.toml b/crates/sha_p2pool/Cargo.toml index 7b8d2f53..4c2ec9ea 100644 --- a/crates/sha_p2pool/Cargo.toml +++ b/crates/sha_p2pool/Cargo.toml @@ -27,7 +27,8 @@ libp2p = { version = "0.53.2", features = [ "mdns", "gossipsub", "request-response", - "json" + "json", + "cbor" ] } tokio = { version = "1.38.0", features = ["full"] } thiserror = "1.0" @@ -44,5 +45,6 @@ rand = "0.8.5" dashmap = "5.5.3" blake2 = "0.10.6" digest = "0.10.7" +clap = { version = "4.5.7", features = ["derive"] } diff --git a/crates/sha_p2pool/src/main.rs b/crates/sha_p2pool/src/main.rs index 825fc771..85ab425e 100644 --- a/crates/sha_p2pool/src/main.rs +++ b/crates/sha_p2pool/src/main.rs @@ -1,12 +1,30 @@ +use clap::Parser; + use crate::sharechain::in_memory::InMemoryShareChain; mod server; mod sharechain; +#[derive(Parser)] +#[command(version, about, long_about = None)] +struct Cli { + /// Optional gRPC port to use. + #[arg(short, long, value_name = "grpc-port")] + grpc_port: Option, +} + #[tokio::main] async fn main() -> anyhow::Result<()> { env_logger::init(); - let config = server::Config::builder().build(); + + // use cli params for constructing server config + let cli = Cli::parse(); + let mut config_builder = server::Config::builder(); + if let Some(grpc_port) = cli.grpc_port { + config_builder.with_grpc_port(grpc_port); + } + + let config = config_builder.build(); let share_chain = InMemoryShareChain::default(); let mut server = server::Server::new(config, share_chain).await?; server.start().await?; diff --git a/crates/sha_p2pool/src/server/grpc/p2pool.rs b/crates/sha_p2pool/src/server/grpc/p2pool.rs index a984258d..fe752e91 100644 --- a/crates/sha_p2pool/src/server/grpc/p2pool.rs +++ b/crates/sha_p2pool/src/server/grpc/p2pool.rs @@ -1,6 +1,6 @@ use std::sync::Arc; -use log::info; +use log::{error, info, warn}; use minotari_app_grpc::tari_rpc::{GetNewBlockRequest, GetNewBlockResponse, GetNewBlockTemplateWithCoinbasesRequest, HeightRequest, NewBlockTemplateRequest, PowAlgo, SubmitBlockRequest, SubmitBlockResponse}; use minotari_app_grpc::tari_rpc::base_node_client::BaseNodeClient; use minotari_app_grpc::tari_rpc::pow_algo::PowAlgos; @@ -8,14 +8,16 @@ use minotari_app_grpc::tari_rpc::sha_p2_pool_server::ShaP2Pool; use minotari_node_grpc_client::BaseNodeGrpcClient; use tari_core::proof_of_work::sha3x_difficulty; use tokio::sync::Mutex; -use tonic::{Request, Response, Status}; +use tonic::{IntoRequest, Request, Response, Status}; use crate::server::grpc::error::Error; use crate::server::grpc::error::TonicError; use crate::server::p2p; -use crate::sharechain::ShareChain; +use crate::server::p2p::ClientError; +use crate::sharechain::{ShareChain, ShareChainResult}; +use crate::sharechain::block::Block; -const MIN_SHARE_COUNT: usize = 10; +const MIN_SHARE_COUNT: usize = 1_000; pub struct ShaP2PoolGrpc where S: ShareChain + Send + Sync + 'static @@ -36,6 +38,20 @@ impl ShaP2PoolGrpc Ok(Self { client: Arc::new(Mutex::new(client)), p2p_client, share_chain }) } + + pub async fn submit_share_chain_block(&self, block: &Block) -> Result<(), Status> { + if let Err(error) = self.share_chain.submit_block(&block).await { + error!("Failed to add new block: {error:?}"); + // match self.p2p_client.sync_share_chain().await { + // Ok(true) => info!("Successfully synced share chain!"), + // Ok(false) => warn!("Failed to sync share chain!"), + // Err(error) => error!("Failed to sync share chain: {error:?}"), + // } + } + info!("Broadcast block with height: {:?}", block.height()); + self.p2p_client.broadcast_block(&block).await + .map_err(|error| Status::internal(error.to_string())) + } } #[tonic::async_trait] @@ -117,48 +133,26 @@ impl ShaP2Pool for ShaP2PoolGrpc } if !network_difficulty_matches { - // TODO: simply append new block if valid to sharechain showing that it is not accepted by base node - // TODO: but still need to present on sharechain block.set_sent_to_main_chain(false); - self.share_chain.submit_block(&block).await - .map_err(|error| Status::internal(error.to_string()))?; - info!("Broadcast block with height: {:?}", block.height()); - self.p2p_client.broadcast_block(&block).await - .map_err(|error| Status::internal(error.to_string()))?; - + self.submit_share_chain_block(&block).await?; return Ok(Response::new(SubmitBlockResponse { block_hash: block.hash().to_vec(), })); } // submit block to base node - let grpc_request = Request::new(grpc_request_payload); + let (metadata, extensions, _inner) = request.into_parts(); + let grpc_request = Request::from_parts(metadata, extensions, grpc_request_payload); match self.client.lock().await.submit_block(grpc_request).await { Ok(resp) => { info!("Block found and sent successfully! (rewards will be paid out)"); - - // TODO: append new block if valid to sharechain with a flag or something that shows - // TODO: that this block is accepted, so paid out block.set_sent_to_main_chain(true); - self.share_chain.submit_block(&block).await - .map_err(|error| Status::internal(error.to_string()))?; - info!("Broadcast block with height: {:?}", block.height()); - self.p2p_client.broadcast_block(&block).await - .map_err(|error| Status::internal(error.to_string()))?; - + self.submit_share_chain_block(&block).await?; Ok(resp) } Err(_) => { - info!("submit_block stop - block send failure"); - // TODO: simply append new block if valid to sharechain showing that it is not accepted by base node - // TODO: but still need to present on sharechain block.set_sent_to_main_chain(false); - self.share_chain.submit_block(&block).await - .map_err(|error| Status::internal(error.to_string()))?; - info!("Broadcast block with height: {:?}", block.height()); - self.p2p_client.broadcast_block(&block).await - .map_err(|error| Status::internal(error.to_string()))?; - + self.submit_share_chain_block(&block).await?; Ok(Response::new(SubmitBlockResponse { block_hash: block.hash().to_vec(), })) diff --git a/crates/sha_p2pool/src/server/p2p/client.rs b/crates/sha_p2pool/src/server/p2p/client.rs index e325b72c..f34b79af 100644 --- a/crates/sha_p2pool/src/server/p2p/client.rs +++ b/crates/sha_p2pool/src/server/p2p/client.rs @@ -1,3 +1,4 @@ +use std::ops::Deref; use std::sync::Arc; use std::time::{Duration, Instant}; @@ -8,7 +9,7 @@ use tokio::sync::{broadcast, mpsc}; use tokio::sync::broadcast::error::{RecvError, SendError}; use tokio::time::sleep; -use crate::server::p2p::messages::{ValidateBlockRequest, ValidateBlockResult}; +use crate::server::p2p::messages::{ShareChainSyncResponse, ValidateBlockRequest, ValidateBlockResult}; use crate::server::p2p::peer_store::PeerStore; use crate::sharechain::block::Block; @@ -18,14 +19,41 @@ pub enum ClientError { ChannelSend(#[from] Box), #[error("Channel receive error: {0}")] ChannelReceive(#[from] RecvError), + #[error("No response for share chain sync request!")] + NoSyncShareChainResponse, } #[derive(Error, Debug)] pub enum ChannelSendError { #[error("Send ValidateBlockRequest error: {0}")] - SendValidateBlockRequest(#[from] SendError), + ValidateBlockRequest(#[from] SendError), #[error("Send broadcast block error: {0}")] - SendBroadcastBlock(#[from] SendError), + BroadcastBlock(#[from] SendError), + #[error("Send sync share chain request error: {0}")] + ClientSyncShareChainRequest(#[from] SendError), +} + +#[derive(Clone, Debug)] +pub struct ClientSyncShareChainRequest { + pub request_id: String, +} + +impl ClientSyncShareChainRequest { + pub fn new(request_id: String) -> Self { + Self { request_id } + } +} + +#[derive(Clone, Debug)] +pub struct ClientSyncShareChainResponse { + pub request_id: String, + pub success: bool, +} + +impl ClientSyncShareChainResponse { + pub fn new(request_id: String, success: bool) -> Self { + Self { request_id, success } + } } pub struct ServiceClientChannels { @@ -64,7 +92,7 @@ impl ServiceClient { pub async fn broadcast_block(&self, block: &Block) -> Result<(), ClientError> { self.channels.broadcast_block_sender.send(block.clone()) .map_err(|error| - ClientError::ChannelSend(Box::new(ChannelSendError::SendBroadcastBlock(error))) + ClientError::ChannelSend(Box::new(ChannelSendError::BroadcastBlock(error))) )?; Ok(()) @@ -77,7 +105,7 @@ impl ServiceClient { // send request to validate block self.channels.validate_block_sender.send(ValidateBlockRequest::new(block.clone())) .map_err(|error| - ClientError::ChannelSend(Box::new(ChannelSendError::SendValidateBlockRequest(error))) + ClientError::ChannelSend(Box::new(ChannelSendError::ValidateBlockRequest(error))) )?; // calculate how many validations we need (more than 2/3 of peers should validate) diff --git a/crates/sha_p2pool/src/server/p2p/messages.rs b/crates/sha_p2pool/src/server/p2p/messages.rs index 404d7eb8..3991cf47 100644 --- a/crates/sha_p2pool/src/server/p2p/messages.rs +++ b/crates/sha_p2pool/src/server/p2p/messages.rs @@ -1,3 +1,5 @@ +use std::time::{SystemTime, UNIX_EPOCH}; + use libp2p::PeerId; use serde::{Deserialize, Serialize}; @@ -48,15 +50,19 @@ impl PeerInfo { } #[derive(Serialize, Deserialize, Debug, Clone)] -pub struct ValidateBlockRequest(Block); +pub struct ValidateBlockRequest { + block: Block, + timestamp: u64, +} impl_conversions!(ValidateBlockRequest); impl ValidateBlockRequest { pub fn new(block: Block) -> Self { - Self(block) + let timestamp = SystemTime::now().duration_since(UNIX_EPOCH).unwrap().as_secs(); + Self { block, timestamp } } pub fn block(&self) -> Block { - self.0.clone() + self.block.clone() } } @@ -79,4 +85,28 @@ impl ValidateBlockResult { valid, } } +} + +#[derive(Serialize, Deserialize, Debug, Clone)] +pub struct ShareChainSyncRequest { + pub request_id: String, + pub from_height: u64, +} + +impl ShareChainSyncRequest { + pub fn new(request_id: String, from_height: u64) -> Self { + Self { request_id, from_height } + } +} + +#[derive(Serialize, Deserialize, Debug, Clone)] +pub struct ShareChainSyncResponse { + pub request_id: String, + pub blocks: Vec, +} + +impl ShareChainSyncResponse { + pub fn new(request_id: String, blocks: Vec) -> Self { + Self { request_id, blocks } + } } \ No newline at end of file diff --git a/crates/sha_p2pool/src/server/p2p/p2p.rs b/crates/sha_p2pool/src/server/p2p/p2p.rs index ecba9345..df30a502 100644 --- a/crates/sha_p2pool/src/server/p2p/p2p.rs +++ b/crates/sha_p2pool/src/server/p2p/p2p.rs @@ -1,11 +1,13 @@ +use std::future::Future; use std::hash::{DefaultHasher, Hash, Hasher}; use std::sync::Arc; use std::time::{Duration, Instant}; -use libp2p::{gossipsub, mdns, noise, Swarm, tcp, yamux}; +use libp2p::{gossipsub, mdns, noise, request_response, StreamProtocol, Swarm, tcp, yamux}; use libp2p::futures::StreamExt; -use libp2p::gossipsub::{Event, IdentTopic, Message, MessageId, PublishError, Topic}; +use libp2p::gossipsub::{IdentTopic, Message, MessageId, PublishError, Topic}; use libp2p::mdns::tokio::Tokio; +use libp2p::request_response::{cbor, ResponseChannel}; use libp2p::swarm::{NetworkBehaviour, SwarmEvent}; use log::{error, info, warn}; use tokio::{io, select}; @@ -13,11 +15,11 @@ use tokio::sync::{broadcast, mpsc, Mutex}; use tokio::sync::broadcast::error::RecvError; use crate::server::config; -use crate::server::p2p::{Error, LibP2PError, messages, ServiceClient, ServiceClientChannels}; -use crate::server::p2p::messages::{PeerInfo, ValidateBlockRequest, ValidateBlockResult}; +use crate::server::p2p::{ClientSyncShareChainRequest, ClientSyncShareChainResponse, Error, LibP2PError, messages, ServiceClient, ServiceClientChannels}; +use crate::server::p2p::messages::{PeerInfo, ShareChainSyncRequest, ShareChainSyncResponse, ValidateBlockRequest, ValidateBlockResult}; use crate::server::p2p::peer_store::PeerStore; -use crate::sharechain::{ShareChain, ShareChainResult}; use crate::sharechain::block::Block; +use crate::sharechain::ShareChain; const PEER_INFO_TOPIC: &str = "peer_info"; const BLOCK_VALIDATION_REQUESTS_TOPIC: &str = "block_validation_requests"; @@ -28,7 +30,7 @@ const NEW_BLOCK_TOPIC: &str = "new_block"; pub struct ServerNetworkBehaviour { pub mdns: mdns::Behaviour, pub gossipsub: gossipsub::Behaviour, - // pub request_response: json::Behaviour, + pub share_chain_sync: cbor::Behaviour, } pub struct Service @@ -94,7 +96,7 @@ impl Service soure_peer.to_bytes().hash(&mut s); } message.data.hash(&mut s); - Instant::now().hash(&mut s); + // Instant::now().hash(&mut s); gossipsub::MessageId::from(s.finish().to_string()) }; let gossipsub_config = gossipsub::ConfigBuilder::default() @@ -115,6 +117,13 @@ impl Service key_pair.public().to_peer_id(), ) .map_err(|e| Error::LibP2P(LibP2PError::IO(e)))?, + share_chain_sync: cbor::Behaviour::::new( + [( + StreamProtocol::new("/share_chain_sync/1"), + request_response::ProtocolSupport::Full, + )], + request_response::Config::default(), + ), }) }) .map_err(|e| Error::LibP2P(LibP2PError::Behaviour(e.to_string())))? @@ -216,7 +225,7 @@ impl Service } fn subscribe(&mut self, topic: &str) { - self.swarm.behaviour_mut().gossipsub.subscribe(&IdentTopic::new(topic.clone())) + self.swarm.behaviour_mut().gossipsub.subscribe(&IdentTopic::new(topic)) .expect("must be subscribed to topic"); } @@ -227,7 +236,7 @@ impl Service self.subscribe(NEW_BLOCK_TOPIC); } - async fn handle_new_message(&mut self, message: Message) { + async fn handle_new_gossipsub_message(&mut self, message: Message) { let peer = message.source; if peer.is_none() { warn!("Message source is not set! {:?}", message); @@ -235,13 +244,17 @@ impl Service } let peer = peer.unwrap(); + if peer == *self.swarm.local_peer_id() { + return; + } + let topic = message.topic.as_str(); match topic { PEER_INFO_TOPIC => { match messages::PeerInfo::try_from(message) { Ok(payload) => { self.peer_store.add(peer, payload); - info!("[PEER STORE] Number of peers: {:?}", self.peer_store.peer_count()); + self.sync_share_chain(ClientSyncShareChainRequest::new(format!("{:p}", self))).await; } Err(error) => { error!("Can't deserialize peer info payload: {:?}", error); @@ -254,7 +267,7 @@ impl Service info!("Block validation request: {payload:?}"); // TODO: validate block let validate_result = ValidateBlockResult::new( - self.swarm.local_peer_id().clone(), + *self.swarm.local_peer_id(), payload.block(), true, // TODO: validate block ); @@ -296,6 +309,42 @@ impl Service } } + async fn handle_share_chain_sync_request(&mut self, channel: ResponseChannel, request: ShareChainSyncRequest) { + match self.share_chain.blocks(request.from_height).await { + Ok(blocks) => { + match self.swarm.behaviour_mut().share_chain_sync.send_response(channel, ShareChainSyncResponse::new(request.request_id, blocks.clone())) { + Ok(_) => {} + Err(_) => error!("Failed to send block sync response") + } + } + Err(error) => error!("Failed to get blocks from height: {error:?}"), + } + } + + async fn handle_share_chain_sync_response(&mut self, response: ShareChainSyncResponse) { + if let Err(error) = self.share_chain.submit_blocks(response.blocks).await { + error!("Failed to add synced blocks to share chain: {error:?}"); + } + } + + async fn sync_share_chain(&mut self, request: ClientSyncShareChainRequest) { + while self.peer_store.tip_of_block_height().is_none() {} // waiting for the highest blockchain + match self.peer_store.tip_of_block_height() { + Some(result) => { + match self.share_chain.tip_height().await { + Ok(tip) => { + self.swarm.behaviour_mut().share_chain_sync.send_request( + &result.peer_id, + ShareChainSyncRequest::new(request.request_id, tip), + ); + } + Err(error) => error!("Failed to get latest height of share chain: {error:?}"), + } + } + None => error!("Failed to get peer with highest share chain height!") + } + } + async fn handle_event(&mut self, event: SwarmEvent) { match event { SwarmEvent::NewListenAddr { address, .. } => { @@ -305,6 +354,7 @@ impl Service ServerNetworkBehaviourEvent::Mdns(mdns_event) => match mdns_event { mdns::Event::Discovered(peers) => { for (peer, addr) in peers { + self.swarm.add_peer_address(peer, addr); self.swarm.behaviour_mut().gossipsub.add_explicit_peer(&peer); } } @@ -315,12 +365,25 @@ impl Service } }, ServerNetworkBehaviourEvent::Gossipsub(event) => match event { - Event::Message { message, message_id: _message_id, propagation_source: _propagation_source } => { - self.handle_new_message(message).await; + gossipsub::Event::Message { message, message_id: _message_id, propagation_source: _propagation_source } => { + self.handle_new_gossipsub_message(message).await; + } + gossipsub::Event::Subscribed { .. } => {} + gossipsub::Event::Unsubscribed { .. } => {} + gossipsub::Event::GossipsubNotSupported { .. } => {} + }, + ServerNetworkBehaviourEvent::ShareChainSync(event) => match event { + request_response::Event::Message { peer, message } => match message { + request_response::Message::Request { request_id, request, channel } => { + self.handle_share_chain_sync_request(channel, request).await; + } + request_response::Message::Response { request_id, response } => { + self.handle_share_chain_sync_response(response).await; + } } - Event::Subscribed { .. } => {} - Event::Unsubscribed { .. } => {} - Event::GossipsubNotSupported { .. } => {} + request_response::Event::OutboundFailure { .. } => {} + request_response::Event::InboundFailure { .. } => {} + request_response::Event::ResponseSent { .. } => {} } }, _ => {} @@ -347,15 +410,15 @@ impl Service } } } - event = self.swarm.select_next_some() => { - self.handle_event(event).await; - } result = self.client_validate_block_req_rx.recv() => { self.handle_client_validate_block_request(result).await; } block = self.client_broadcast_block_rx.recv() => { self.broadcast_block(block).await; } + event = self.swarm.select_next_some() => { + self.handle_event(event).await; + } } } } diff --git a/crates/sha_p2pool/src/server/p2p/peer_store.rs b/crates/sha_p2pool/src/server/p2p/peer_store.rs index 4b18dc80..e32c73e3 100644 --- a/crates/sha_p2pool/src/server/p2p/peer_store.rs +++ b/crates/sha_p2pool/src/server/p2p/peer_store.rs @@ -22,8 +22,8 @@ impl PeerStoreRecord { #[derive(Copy, Clone, Debug)] pub struct PeerStoreBlockHeightTip { - peer_id: PeerId, - height: u64, + pub peer_id: PeerId, + pub height: u64, } impl PeerStoreBlockHeightTip { diff --git a/crates/sha_p2pool/src/sharechain/error.rs b/crates/sha_p2pool/src/sharechain/error.rs index b1b82837..96d68219 100644 --- a/crates/sha_p2pool/src/sharechain/error.rs +++ b/crates/sha_p2pool/src/sharechain/error.rs @@ -5,8 +5,6 @@ use crate::sharechain::block::Block; #[derive(Error, Debug)] pub enum Error { - #[error("Internal error: {0}")] - Internal(String), #[error("gRPC Block conversion error: {0}")] BlockConvert(#[from] BlockConvertError), #[error("Share chain is empty, no genesis block found as well!")] diff --git a/crates/sha_p2pool/src/sharechain/in_memory.rs b/crates/sha_p2pool/src/sharechain/in_memory.rs index d141728b..8645e92f 100644 --- a/crates/sha_p2pool/src/sharechain/in_memory.rs +++ b/crates/sha_p2pool/src/sharechain/in_memory.rs @@ -1,13 +1,16 @@ use std::collections::HashMap; +use std::fmt::Debug; +use std::future::Future; use std::sync::Arc; use async_trait::async_trait; use log::{info, warn}; use minotari_app_grpc::tari_rpc::{NewBlockCoinbase, SubmitBlockRequest}; +use prost::Message; use tari_common_types::tari_address::TariAddress; use tari_core::blocks::BlockHeader; use tari_utilities::epoch_time::EpochTime; -use tokio::sync::RwLock; +use tokio::sync::{Mutex, RwLock, RwLockWriteGuard}; use crate::sharechain::{Block, ShareChain, ShareChainResult}; use crate::sharechain::error::{BlockConvertError, Error}; @@ -93,36 +96,48 @@ impl InMemoryShareChain { Ok(true) } -} - -#[async_trait] -impl ShareChain for InMemoryShareChain { - async fn submit_block(&self, block: &Block) -> ShareChainResult<()> { - let mut blocks_write_lock = self.blocks.write().await; + async fn submit_block_with_lock(&self, blocks: &mut RwLockWriteGuard<'_, Vec>, block: &Block) -> ShareChainResult<()> { let block = block.clone(); - let last_block = blocks_write_lock.last().ok_or_else(|| Error::Empty)?; + let last_block = blocks.last().ok_or_else(|| Error::Empty)?; // validate if !self.validate_block(last_block, &block).await? { return Err(Error::InvalidBlock(block)); } - if blocks_write_lock.len() >= self.max_blocks_count { + if blocks.len() >= self.max_blocks_count { // remove first element to keep the maximum vector size - blocks_write_lock.remove(0); + blocks.remove(0); } info!("New block added: {:?}", block.clone()); - blocks_write_lock.push(block); + blocks.push(block); - let last_block = blocks_write_lock.last().ok_or_else(|| Error::Empty)?; + let last_block = blocks.last().ok_or_else(|| Error::Empty)?; info!("Current height: {:?}", last_block.height()); Ok(()) } +} + +#[async_trait] +impl ShareChain for InMemoryShareChain { + async fn submit_block(&self, block: &Block) -> ShareChainResult<()> { + let mut blocks_write_lock = self.blocks.write().await; + self.submit_block_with_lock(&mut blocks_write_lock, block).await + } + + async fn submit_blocks(&self, blocks: Vec) -> ShareChainResult<()> { + let mut blocks_write_lock = self.blocks.write().await; + for block in blocks { + self.submit_block_with_lock(&mut blocks_write_lock, &block).await?; + } + + Ok(()) + } async fn tip_height(&self) -> ShareChainResult { let blocks_read_lock = self.blocks.read().await; @@ -178,4 +193,13 @@ impl ShareChain for InMemoryShareChain { .build() ) } + + async fn blocks(&self, from_height: u64) -> ShareChainResult> { + let blocks_read_lock = self.blocks.read().await; + Ok( + blocks_read_lock.iter() + .filter(|block| block.height() > from_height).cloned() + .collect() + ) + } } \ No newline at end of file diff --git a/crates/sha_p2pool/src/sharechain/mod.rs b/crates/sha_p2pool/src/sharechain/mod.rs index fc422f93..edfe3533 100644 --- a/crates/sha_p2pool/src/sharechain/mod.rs +++ b/crates/sha_p2pool/src/sharechain/mod.rs @@ -12,11 +12,22 @@ pub type ShareChainResult = Result; #[async_trait] pub trait ShareChain { + /// Adds a new block if valid to chain. async fn submit_block(&self, block: &Block) -> ShareChainResult<()>; + /// Add multiple blocks at once. + /// While this operation runs, no other blocks can be added until it's done. + async fn submit_blocks(&self, blocks: Vec) -> ShareChainResult<()>; + + /// Returns the tip of height in chain. async fn tip_height(&self) -> ShareChainResult; + /// Generate shares based on the previous blocks. async fn generate_shares(&self, reward: u64) -> Vec; + /// Return a new block that could be added via `submit_block`. async fn new_block(&self, request: &SubmitBlockRequest) -> ShareChainResult; + + /// Returns blocks from the given height (`from_height`, exclusive). + async fn blocks(&self, from_height: u64) -> ShareChainResult>; } \ No newline at end of file From 1d94a93c771146751ff97eb8008f77609fae65a6 Mon Sep 17 00:00:00 2001 From: richardb Date: Thu, 20 Jun 2024 15:50:07 +0200 Subject: [PATCH 15/43] small update --- crates/sha_p2pool/src/server/p2p/p2p.rs | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/crates/sha_p2pool/src/server/p2p/p2p.rs b/crates/sha_p2pool/src/server/p2p/p2p.rs index df30a502..1732a8a9 100644 --- a/crates/sha_p2pool/src/server/p2p/p2p.rs +++ b/crates/sha_p2pool/src/server/p2p/p2p.rs @@ -392,7 +392,7 @@ impl Service async fn main_loop(&mut self) -> Result<(), Error> { // TODO: get from config - let mut publish_peer_info_interval = tokio::time::interval(Duration::from_secs(5)); + let mut publish_peer_info_interval = tokio::time::interval(Duration::from_secs(10)); loop { select! { From 1868ea4583a8f632f8ad4dc321ff7cc7577bcaa6 Mon Sep 17 00:00:00 2001 From: richardb Date: Thu, 20 Jun 2024 23:54:48 +0200 Subject: [PATCH 16/43] reimplemented peer store using moka --- crates/sha_p2pool/Cargo.toml | 2 +- crates/sha_p2pool/src/server/p2p/client.rs | 3 +- crates/sha_p2pool/src/server/p2p/messages.rs | 2 +- crates/sha_p2pool/src/server/p2p/p2p.rs | 45 +++++------ .../sha_p2pool/src/server/p2p/peer_store.rs | 74 ++++++++++++------- 5 files changed, 75 insertions(+), 51 deletions(-) diff --git a/crates/sha_p2pool/Cargo.toml b/crates/sha_p2pool/Cargo.toml index 4c2ec9ea..9ab34b9d 100644 --- a/crates/sha_p2pool/Cargo.toml +++ b/crates/sha_p2pool/Cargo.toml @@ -42,9 +42,9 @@ tonic = { workspace = true } async-trait = "0.1.80" serde_cbor = "0.11.2" rand = "0.8.5" -dashmap = "5.5.3" blake2 = "0.10.6" digest = "0.10.7" clap = { version = "4.5.7", features = ["derive"] } +moka = { version = "0.12.7", features = ["future"] } diff --git a/crates/sha_p2pool/src/server/p2p/client.rs b/crates/sha_p2pool/src/server/p2p/client.rs index f34b79af..a8340e01 100644 --- a/crates/sha_p2pool/src/server/p2p/client.rs +++ b/crates/sha_p2pool/src/server/p2p/client.rs @@ -109,12 +109,13 @@ impl ServiceClient { )?; // calculate how many validations we need (more than 2/3 of peers should validate) - let peer_count = self.peer_store.peer_count() as f64 + 1.0; + let peer_count = self.peer_store.peer_count().await as f64 + 1.0; let min_validation_count = (peer_count / 3.0) * 2.0; let min_validation_count = min_validation_count.round() as u64; info!("[CLIENT] Minimum validation count: {min_validation_count:?}"); // wait for the validations to come + // TODO: listen here for peer_store changes, so we can recalculate min validation count and restart validation flow here let timeout = Duration::from_secs(30); let mut validate_receiver = self.channels.validate_block_receiver.resubscribe(); let mut validation_count = 0; diff --git a/crates/sha_p2pool/src/server/p2p/messages.rs b/crates/sha_p2pool/src/server/p2p/messages.rs index 3991cf47..dde218bb 100644 --- a/crates/sha_p2pool/src/server/p2p/messages.rs +++ b/crates/sha_p2pool/src/server/p2p/messages.rs @@ -38,7 +38,7 @@ pub fn serialize_message(input: &T) -> Result, Error> serde_cbor::to_vec(input).map_err(Error::SerializeDeserialize) } -#[derive(Serialize, Deserialize, Debug)] +#[derive(Serialize, Deserialize, Debug, Copy, Clone)] pub struct PeerInfo { pub current_height: u64, } diff --git a/crates/sha_p2pool/src/server/p2p/p2p.rs b/crates/sha_p2pool/src/server/p2p/p2p.rs index 1732a8a9..2f773bd0 100644 --- a/crates/sha_p2pool/src/server/p2p/p2p.rs +++ b/crates/sha_p2pool/src/server/p2p/p2p.rs @@ -57,7 +57,7 @@ impl Service pub fn new(config: &config::Config, share_chain: Arc) -> Result { let swarm = Self::new_swarm(config)?; let peer_store = Arc::new( - PeerStore::new(config.idle_connection_timeout), + PeerStore::new(Duration::from_secs(5)), ); // client related channels @@ -96,7 +96,7 @@ impl Service soure_peer.to_bytes().hash(&mut s); } message.data.hash(&mut s); - // Instant::now().hash(&mut s); + Instant::now().hash(&mut s); gossipsub::MessageId::from(s.finish().to_string()) }; let gossipsub_config = gossipsub::ConfigBuilder::default() @@ -145,16 +145,20 @@ impl Service } async fn handle_client_validate_block_request(&mut self, result: Result) { + info!("handle_client_validate_block_request - hit!"); match result { Ok(request) => { let request_raw_result: Result, Error> = request.try_into(); match request_raw_result { Ok(request_raw) => { + info!("handle_client_validate_block_request - before publish!"); match self.swarm.behaviour_mut().gossipsub.publish( IdentTopic::new(BLOCK_VALIDATION_REQUESTS_TOPIC), request_raw, ) { - Ok(_) => {} + Ok(res) => { + info!("handle_client_validate_block_request - published!"); + } Err(error) => { error!("Failed to send block validation request: {error:?}"); } @@ -244,16 +248,12 @@ impl Service } let peer = peer.unwrap(); - if peer == *self.swarm.local_peer_id() { - return; - } - let topic = message.topic.as_str(); match topic { PEER_INFO_TOPIC => { match messages::PeerInfo::try_from(message) { Ok(payload) => { - self.peer_store.add(peer, payload); + self.peer_store.add(peer, payload).await; self.sync_share_chain(ClientSyncShareChainRequest::new(format!("{:p}", self))).await; } Err(error) => { @@ -328,8 +328,8 @@ impl Service } async fn sync_share_chain(&mut self, request: ClientSyncShareChainRequest) { - while self.peer_store.tip_of_block_height().is_none() {} // waiting for the highest blockchain - match self.peer_store.tip_of_block_height() { + while self.peer_store.tip_of_block_height().await.is_none() {} // waiting for the highest blockchain + match self.peer_store.tip_of_block_height().await { Some(result) => { match self.share_chain.tip_height().await { Ok(tip) => { @@ -392,12 +392,24 @@ impl Service async fn main_loop(&mut self) -> Result<(), Error> { // TODO: get from config - let mut publish_peer_info_interval = tokio::time::interval(Duration::from_secs(10)); + let mut publish_peer_info_interval = tokio::time::interval(Duration::from_secs(5)); loop { select! { + event = self.swarm.select_next_some() => { + self.handle_event(event).await; + } + result = self.client_validate_block_req_rx.recv() => { + self.handle_client_validate_block_request(result).await; + } + block = self.client_broadcast_block_rx.recv() => { + self.broadcast_block(block).await; + } _ = publish_peer_info_interval.tick() => { - self.peer_store.cleanup(); + let expired_peers = self.peer_store.cleanup().await; + for exp_peer in expired_peers { + self.swarm.behaviour_mut().gossipsub.remove_explicit_peer(&exp_peer); + } if let Err(error) = self.broadcast_peer_info().await { match error { Error::LibP2P(LibP2PError::Publish(PublishError::InsufficientPeers)) => { @@ -410,15 +422,6 @@ impl Service } } } - result = self.client_validate_block_req_rx.recv() => { - self.handle_client_validate_block_request(result).await; - } - block = self.client_broadcast_block_rx.recv() => { - self.broadcast_block(block).await; - } - event = self.swarm.select_next_some() => { - self.handle_event(event).await; - } } } } diff --git a/crates/sha_p2pool/src/server/p2p/peer_store.rs b/crates/sha_p2pool/src/server/p2p/peer_store.rs index e32c73e3..9b839d91 100644 --- a/crates/sha_p2pool/src/server/p2p/peer_store.rs +++ b/crates/sha_p2pool/src/server/p2p/peer_store.rs @@ -1,11 +1,14 @@ -use std::sync::{Arc, RwLock}; +use std::collections::HashMap; +use std::sync::{Arc, Mutex, RwLock}; use std::time::{Duration, Instant}; -use dashmap::DashMap; use libp2p::PeerId; +use log::info; +use moka::future::{Cache, CacheBuilder}; use crate::server::p2p::messages::PeerInfo; +#[derive(Copy, Clone, Debug)] pub struct PeerStoreRecord { peer_info: PeerInfo, created: Instant, @@ -36,7 +39,7 @@ impl PeerStoreBlockHeightTip { } pub struct PeerStore { - inner: Arc>, + inner: Cache, // Max time to live for the items to avoid non-existing peers in list. ttl: Duration, tip_of_block_height: RwLock>, @@ -45,46 +48,48 @@ pub struct PeerStore { impl PeerStore { pub fn new(ttl: Duration) -> Self { Self { - inner: Arc::new(DashMap::new()), + inner: CacheBuilder::new(100_000) + .time_to_live(Duration::from_secs(10)) + .build(), ttl, tip_of_block_height: RwLock::new(None), } } - pub fn add(&self, peer_id: PeerId, peer_info: PeerInfo) { - self.inner.insert(peer_id, PeerStoreRecord::new(peer_info)); - self.set_tip_of_block_height(); + pub async fn add(&self, peer_id: PeerId, peer_info: PeerInfo) { + self.inner.insert(peer_id, PeerStoreRecord::new(peer_info)).await; + self.set_tip_of_block_height().await; } - pub fn peer_count(&self) -> usize { - self.inner.len() + pub async fn peer_count(&self) -> u64 { + self.inner.entry_count() } - fn set_tip_of_block_height(&self) { - if let Some(result) = + async fn set_tip_of_block_height(&self) { + if let Some((k, v)) = self.inner.iter() - .max_by(|r1, r2| { - r1.peer_info.current_height.cmp(&r2.peer_info.current_height) + .max_by(|(k1, v1), (k2, v2)| { + v1.peer_info.current_height.cmp(&v2.peer_info.current_height) }) { // save result if let Ok(mut tip_height_opt) = self.tip_of_block_height.write() { if tip_height_opt.is_none() { let _ = tip_height_opt.insert( PeerStoreBlockHeightTip::new( - *result.key(), - result.peer_info.current_height, + *k, + v.peer_info.current_height, ) ); } else { let mut tip_height = tip_height_opt.unwrap(); - tip_height.peer_id = *result.key(); - tip_height.height = result.peer_info.current_height; + tip_height.peer_id = *k; + tip_height.height = v.peer_info.current_height; } } } } - pub fn tip_of_block_height(&self) -> Option { + pub async fn tip_of_block_height(&self) -> Option { if let Ok(result) = self.tip_of_block_height.read() { if result.is_some() { return Some(result.unwrap()); @@ -93,14 +98,29 @@ impl PeerStore { None } - pub fn cleanup(&self) { - self.inner.iter() - .filter(|record| { - let elapsed = record.created.elapsed(); - elapsed.gt(&self.ttl) - }).for_each(|record| { - self.inner.remove(record.key()); - }); - self.set_tip_of_block_height(); + pub async fn cleanup(&self) -> Vec { + info!("PEER STORE - cleanup"); + let mut expired_peers = vec![]; + + for (k, v) in self.inner.iter() { + info!("PEER STORE - {:?} -> {:?}", k, v); + let elapsed = v.created.elapsed(); + let expired = elapsed.gt(&self.ttl); + info!("{:?} ttl elapsed: {:?} <-> {:?}, Expired: {:?}", k, elapsed, &self.ttl, expired); + if expired { + expired_peers.push(*k); + self.inner.remove(k.as_ref()).await; + info!("PEER STORE - removed!"); + } + } + + // for exp_peer in expired_peers.clone() { + // lock.remove(exp_peer); + // info!("PEER STORE - {:?} removed", exp_peer); + // } + + self.set_tip_of_block_height().await; + + expired_peers } } \ No newline at end of file From bae56f89214fb62aa41618a3292d51c162c7a68f Mon Sep 17 00:00:00 2001 From: richardb Date: Fri, 21 Jun 2024 16:01:53 +0200 Subject: [PATCH 17/43] fixed deadlocks, implemented block sync, block validation almost done --- Cargo.toml | 54 +++++++++++++-- crates/sha_p2pool/.gitignore | 19 ------ crates/sha_p2pool/Cargo.toml | 50 -------------- crates/sha_p2pool/src/server/grpc/mod.rs | 3 - {crates/sha_p2pool/src => src}/main.rs | 0 .../sha_p2pool/src => src}/server/config.rs | 0 .../src => src}/server/grpc/base_node.rs | 35 ++++------ .../src => src}/server/grpc/error.rs | 0 src/server/grpc/mod.rs | 6 ++ .../src => src}/server/grpc/p2pool.rs | 51 +++++++-------- src/server/grpc/util.rs | 34 ++++++++++ {crates/sha_p2pool/src => src}/server/mod.rs | 0 .../src => src}/server/p2p/client.rs | 51 +++++++++------ .../src => src}/server/p2p/error.rs | 0 .../src => src}/server/p2p/messages.rs | 4 +- .../sha_p2pool/src => src}/server/p2p/mod.rs | 0 .../sha_p2pool/src => src}/server/p2p/p2p.rs | 65 ++++++++++++++----- .../src => src}/server/p2p/peer_store.rs | 7 +- .../sha_p2pool/src => src}/server/server.rs | 30 ++------- .../src => src}/sharechain/block.rs | 0 .../src => src}/sharechain/error.rs | 0 .../src => src}/sharechain/in_memory.rs | 10 ++- .../sha_p2pool/src => src}/sharechain/mod.rs | 3 + 23 files changed, 223 insertions(+), 199 deletions(-) delete mode 100644 crates/sha_p2pool/.gitignore delete mode 100644 crates/sha_p2pool/Cargo.toml delete mode 100644 crates/sha_p2pool/src/server/grpc/mod.rs rename {crates/sha_p2pool/src => src}/main.rs (100%) rename {crates/sha_p2pool/src => src}/server/config.rs (100%) rename {crates/sha_p2pool/src => src}/server/grpc/base_node.rs (91%) rename {crates/sha_p2pool/src => src}/server/grpc/error.rs (100%) create mode 100644 src/server/grpc/mod.rs rename {crates/sha_p2pool/src => src}/server/grpc/p2pool.rs (80%) create mode 100644 src/server/grpc/util.rs rename {crates/sha_p2pool/src => src}/server/mod.rs (100%) rename {crates/sha_p2pool/src => src}/server/p2p/client.rs (72%) rename {crates/sha_p2pool/src => src}/server/p2p/error.rs (100%) rename {crates/sha_p2pool/src => src}/server/p2p/messages.rs (94%) rename {crates/sha_p2pool/src => src}/server/p2p/mod.rs (100%) rename {crates/sha_p2pool/src => src}/server/p2p/p2p.rs (85%) rename {crates/sha_p2pool/src => src}/server/p2p/peer_store.rs (93%) rename {crates/sha_p2pool/src => src}/server/server.rs (73%) rename {crates/sha_p2pool/src => src}/sharechain/block.rs (100%) rename {crates/sha_p2pool/src => src}/sharechain/error.rs (100%) rename {crates/sha_p2pool/src => src}/sharechain/in_memory.rs (94%) rename {crates/sha_p2pool/src => src}/sharechain/mod.rs (91%) diff --git a/Cargo.toml b/Cargo.toml index 839a03e4..2452bac7 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -1,10 +1,50 @@ -[workspace] -resolver = "2" +[package] +name = "sha_p2pool" +version = "0.1.0" +edition = "2021" -members = [ - "crates/sha_p2pool" -] +[dependencies] +#sha_p2pool_grpc = { path = "../sha_p2pool_grpc" } +#minotari_app_grpc = { git = "https://github.com/tari-project/tari.git" } +#minotari_node_grpc_client = { git = "https://github.com/tari-project/tari.git" } +#tari_common_types = { git = "https://github.com/tari-project/tari.git" } -[workspace.dependencies] +minotari_app_grpc = { git = "https://github.com/ksrichard/tari.git", branch = "p2pool" } +minotari_node_grpc_client = { git = "https://github.com/ksrichard/tari.git", branch = "p2pool" } +tari_common_types = { git = "https://github.com/ksrichard/tari.git", branch = "p2pool" } +tari_core = { git = "https://github.com/ksrichard/tari.git", branch = "p2pool" } + +tari_utilities = { version = "0.7", features = ["borsh"] } +libp2p = { version = "0.53.2", features = [ + "dns", + "identify", + "macros", + "tokio", + "serde", + "noise", + "tcp", + "yamux", + "mdns", + "gossipsub", + "request-response", + "json", + "cbor" +] } +tokio = { version = "1.38.0", features = ["full"] } +thiserror = "1.0" +serde = "1.0.203" +anyhow = "1.0" +log = "0.4.21" +prost = "0.11.9" +prost-types = "0.11.9" +env_logger = "0.11.3" tonic = "0.8.3" -tonic-build = "0.8.4" +async-trait = "0.1.80" +serde_cbor = "0.11.2" +rand = "0.8.5" +blake2 = "0.10.6" +digest = "0.10.7" +clap = { version = "4.5.7", features = ["derive"] } +moka = { version = "0.12.7", features = ["future"] } + + diff --git a/crates/sha_p2pool/.gitignore b/crates/sha_p2pool/.gitignore deleted file mode 100644 index 196e176d..00000000 --- a/crates/sha_p2pool/.gitignore +++ /dev/null @@ -1,19 +0,0 @@ -# Generated by Cargo -# will have compiled files and executables -debug/ -target/ - -# Remove Cargo.lock from gitignore if creating an executable, leave it for libraries -# More information here https://doc.rust-lang.org/cargo/guide/cargo-toml-vs-cargo-lock.html -Cargo.lock - -# These are backup files generated by rustfmt -**/*.rs.bk - -# MSVC Windows builds of rustc generate these, which store debugging information -*.pdb - - -# Added by cargo - -/target diff --git a/crates/sha_p2pool/Cargo.toml b/crates/sha_p2pool/Cargo.toml deleted file mode 100644 index 9ab34b9d..00000000 --- a/crates/sha_p2pool/Cargo.toml +++ /dev/null @@ -1,50 +0,0 @@ -[package] -name = "sha_p2pool" -version = "0.1.0" -edition = "2021" - -[dependencies] -#sha_p2pool_grpc = { path = "../sha_p2pool_grpc" } -#minotari_app_grpc = { git = "https://github.com/tari-project/tari.git" } -#minotari_node_grpc_client = { git = "https://github.com/tari-project/tari.git" } -#tari_common_types = { git = "https://github.com/tari-project/tari.git" } - -minotari_app_grpc = { git = "https://github.com/ksrichard/tari.git", branch = "p2pool" } -minotari_node_grpc_client = { git = "https://github.com/ksrichard/tari.git", branch = "p2pool" } -tari_common_types = { git = "https://github.com/ksrichard/tari.git", branch = "p2pool" } -tari_core = { git = "https://github.com/ksrichard/tari.git", branch = "p2pool" } - -tari_utilities = { version = "0.7", features = ["borsh"] } -libp2p = { version = "0.53.2", features = [ - "dns", - "identify", - "macros", - "tokio", - "serde", - "noise", - "tcp", - "yamux", - "mdns", - "gossipsub", - "request-response", - "json", - "cbor" -] } -tokio = { version = "1.38.0", features = ["full"] } -thiserror = "1.0" -serde = "1.0.203" -anyhow = "1.0" -log = "0.4.21" -prost = "0.11.9" -prost-types = "0.11.9" -env_logger = "0.11.3" -tonic = { workspace = true } -async-trait = "0.1.80" -serde_cbor = "0.11.2" -rand = "0.8.5" -blake2 = "0.10.6" -digest = "0.10.7" -clap = { version = "4.5.7", features = ["derive"] } -moka = { version = "0.12.7", features = ["future"] } - - diff --git a/crates/sha_p2pool/src/server/grpc/mod.rs b/crates/sha_p2pool/src/server/grpc/mod.rs deleted file mode 100644 index 9e3fee3b..00000000 --- a/crates/sha_p2pool/src/server/grpc/mod.rs +++ /dev/null @@ -1,3 +0,0 @@ -pub mod base_node; -pub mod error; -pub mod p2pool; \ No newline at end of file diff --git a/crates/sha_p2pool/src/main.rs b/src/main.rs similarity index 100% rename from crates/sha_p2pool/src/main.rs rename to src/main.rs diff --git a/crates/sha_p2pool/src/server/config.rs b/src/server/config.rs similarity index 100% rename from crates/sha_p2pool/src/server/config.rs rename to src/server/config.rs diff --git a/crates/sha_p2pool/src/server/grpc/base_node.rs b/src/server/grpc/base_node.rs similarity index 91% rename from crates/sha_p2pool/src/server/grpc/base_node.rs rename to src/server/grpc/base_node.rs index 21b4f71a..b0347074 100644 --- a/crates/sha_p2pool/src/server/grpc/base_node.rs +++ b/src/server/grpc/base_node.rs @@ -2,17 +2,16 @@ use std::sync::Arc; use libp2p::futures::channel::mpsc; use libp2p::futures::SinkExt; -use log::{error, warn}; +use log::error; use minotari_app_grpc::tari_rpc; use minotari_app_grpc::tari_rpc::{Block, BlockBlobRequest, BlockGroupRequest, BlockGroupResponse, BlockHeaderResponse, BlockHeight, BlockTimingResponse, ConsensusConstants, Empty, FetchMatchingUtxosRequest, GetActiveValidatorNodesRequest, GetBlocksRequest, GetHeaderByHashRequest, GetMempoolTransactionsRequest, GetNewBlockBlobResult, GetNewBlockResult, GetNewBlockTemplateWithCoinbasesRequest, GetNewBlockWithCoinbasesRequest, GetPeersRequest, GetShardKeyRequest, GetShardKeyResponse, GetSideChainUtxosRequest, GetTemplateRegistrationsRequest, HeightRequest, HistoricalBlock, ListConnectedPeersResponse, ListHeadersRequest, MempoolStatsResponse, NetworkStatusResponse, NewBlockCoinbase, NewBlockTemplate, NewBlockTemplateRequest, NewBlockTemplateResponse, NodeIdentity, PowAlgo, SearchKernelsRequest, SearchUtxosRequest, SoftwareUpdate, StringValue, SubmitBlockResponse, SubmitTransactionRequest, SubmitTransactionResponse, SyncInfoResponse, SyncProgressResponse, TipInfoResponse, TransactionStateRequest, TransactionStateResponse, ValueAtHeightResponse}; use minotari_app_grpc::tari_rpc::base_node_client::BaseNodeClient; -use minotari_node_grpc_client::BaseNodeGrpcClient; use tokio::sync::Mutex; use tonic::{Request, Response, Status, Streaming}; +use tonic::transport::Channel; -use crate::server::grpc::error::{Error, TonicError}; -use crate::server::p2p; -use crate::sharechain::ShareChain; +use crate::server::grpc::error::Error; +use crate::server::grpc::util; const LIST_HEADERS_PAGE_SIZE: usize = 10; const GET_BLOCKS_PAGE_SIZE: usize = 10; @@ -49,6 +48,7 @@ macro_rules! proxy_stream_result { }; } +/// Returns a streaming response for any gRPC methods. async fn streaming_response( call: String, result: Result>, Status>, @@ -75,35 +75,24 @@ async fn streaming_response( } } -pub struct TariBaseNodeGrpc - where S: ShareChain + Send + Sync + 'static, +/// Base node gRPC service that proxies all the requests to base node when miner calls them. +/// This makes sure that any extra call towards base node is served. +pub struct TariBaseNodeGrpc { - // TODO: check if 1 shared client is enough or we need a pool of clients to operate faster - client: Arc>>, - p2p_client: p2p::ServiceClient, - share_chain: Arc, + client: Arc>>, } -impl TariBaseNodeGrpc - where S: ShareChain + Send + Sync + 'static, +impl TariBaseNodeGrpc { pub async fn new( base_node_address: String, - p2p_client: p2p::ServiceClient, - share_chain: Arc, ) -> Result { - // TODO: add retry mechanism to try at least 3 times before failing - let client = BaseNodeGrpcClient::connect(base_node_address) - .await - .map_err(|e| Error::Tonic(TonicError::Transport(e)))?; - - Ok(Self { client: Arc::new(Mutex::new(client)), p2p_client, share_chain }) + Ok(Self { client: Arc::new(Mutex::new(util::connect_base_node(base_node_address).await?)) }) } } #[tonic::async_trait] -impl tari_rpc::base_node_server::BaseNode for TariBaseNodeGrpc - where S: ShareChain + Send + Sync + 'static, +impl tari_rpc::base_node_server::BaseNode for TariBaseNodeGrpc { type ListHeadersStream = mpsc::Receiver>; async fn list_headers(&self, request: Request) -> Result, Status> { diff --git a/crates/sha_p2pool/src/server/grpc/error.rs b/src/server/grpc/error.rs similarity index 100% rename from crates/sha_p2pool/src/server/grpc/error.rs rename to src/server/grpc/error.rs diff --git a/src/server/grpc/mod.rs b/src/server/grpc/mod.rs new file mode 100644 index 00000000..58970f8a --- /dev/null +++ b/src/server/grpc/mod.rs @@ -0,0 +1,6 @@ +//! This module contains all the gRPC implementations to mimic a real Tari base node interface +//! and also expose the custom SHA-3 P2Pool related gRPC interfaces. +pub mod base_node; +pub mod error; +pub mod p2pool; +mod util; \ No newline at end of file diff --git a/crates/sha_p2pool/src/server/grpc/p2pool.rs b/src/server/grpc/p2pool.rs similarity index 80% rename from crates/sha_p2pool/src/server/grpc/p2pool.rs rename to src/server/grpc/p2pool.rs index fe752e91..920568a2 100644 --- a/crates/sha_p2pool/src/server/grpc/p2pool.rs +++ b/src/server/grpc/p2pool.rs @@ -1,23 +1,21 @@ use std::sync::Arc; -use log::{error, info, warn}; +use log::{debug, error, info}; use minotari_app_grpc::tari_rpc::{GetNewBlockRequest, GetNewBlockResponse, GetNewBlockTemplateWithCoinbasesRequest, HeightRequest, NewBlockTemplateRequest, PowAlgo, SubmitBlockRequest, SubmitBlockResponse}; use minotari_app_grpc::tari_rpc::base_node_client::BaseNodeClient; use minotari_app_grpc::tari_rpc::pow_algo::PowAlgos; use minotari_app_grpc::tari_rpc::sha_p2_pool_server::ShaP2Pool; -use minotari_node_grpc_client::BaseNodeGrpcClient; use tari_core::proof_of_work::sha3x_difficulty; use tokio::sync::Mutex; -use tonic::{IntoRequest, Request, Response, Status}; +use tonic::{Request, Response, Status}; use crate::server::grpc::error::Error; -use crate::server::grpc::error::TonicError; +use crate::server::grpc::util; use crate::server::p2p; -use crate::server::p2p::ClientError; -use crate::sharechain::{ShareChain, ShareChainResult}; use crate::sharechain::block::Block; +use crate::sharechain::ShareChain; -const MIN_SHARE_COUNT: usize = 1_000; +const MIN_DIFFICULTY: u64 = 100_000; pub struct ShaP2PoolGrpc where S: ShareChain + Send + Sync + 'static @@ -31,25 +29,16 @@ impl ShaP2PoolGrpc where S: ShareChain + Send + Sync + 'static { pub async fn new(base_node_address: String, p2p_client: p2p::ServiceClient, share_chain: Arc) -> Result { - // TODO: add retry mechanism to try at least 3 times before failing - let client = BaseNodeGrpcClient::connect(base_node_address) - .await - .map_err(|e| Error::Tonic(TonicError::Transport(e)))?; - - Ok(Self { client: Arc::new(Mutex::new(client)), p2p_client, share_chain }) + Ok(Self { client: Arc::new(Mutex::new(util::connect_base_node(base_node_address).await?)), p2p_client, share_chain }) } + /// Submits a new block to share chain and broadcasts to the p2p network. pub async fn submit_share_chain_block(&self, block: &Block) -> Result<(), Status> { - if let Err(error) = self.share_chain.submit_block(&block).await { + if let Err(error) = self.share_chain.submit_block(block).await { error!("Failed to add new block: {error:?}"); - // match self.p2p_client.sync_share_chain().await { - // Ok(true) => info!("Successfully synced share chain!"), - // Ok(false) => warn!("Failed to sync share chain!"), - // Err(error) => error!("Failed to sync share chain: {error:?}"), - // } } - info!("Broadcast block with height: {:?}", block.height()); - self.p2p_client.broadcast_block(&block).await + debug!("Broadcast new block with height: {:?}", block.height()); + self.p2p_client.broadcast_block(block).await .map_err(|error| Status::internal(error.to_string())) } } @@ -58,6 +47,8 @@ impl ShaP2PoolGrpc impl ShaP2Pool for ShaP2PoolGrpc where S: ShareChain + Send + Sync + 'static { + /// Returns a new block (that can be mined) which contains all the shares generated + /// from the current share chain as coinbase transactions. async fn get_new_block(&self, _request: Request) -> Result, Status> { let mut pow_algo = PowAlgo::default(); pow_algo.set_pow_algo(PowAlgos::Sha3x); @@ -76,11 +67,7 @@ impl ShaP2Pool for ShaP2PoolGrpc // request new block template with shares as coinbases let shares = self.share_chain.generate_shares(reward).await; - let share_count = if shares.len() < MIN_SHARE_COUNT { - MIN_SHARE_COUNT - } else { - shares.len() - }; + let share_count = shares.len(); let response = self.client.lock().await .get_new_block_template_with_coinbases(GetNewBlockTemplateWithCoinbasesRequest { @@ -92,6 +79,11 @@ impl ShaP2Pool for ShaP2PoolGrpc // set target difficulty let miner_data = response.clone().miner_data.ok_or_else(|| Status::internal("missing miner data"))?; let target_difficulty = miner_data.target_difficulty / share_count as u64; + let target_difficulty = if target_difficulty < MIN_DIFFICULTY { + MIN_DIFFICULTY + } else { + target_difficulty + }; Ok(Response::new(GetNewBlockResponse { block: Some(response), @@ -99,6 +91,9 @@ impl ShaP2Pool for ShaP2PoolGrpc })) } + /// Validates the submitted block with the p2pool network, checks for difficulty matching + /// with network (using base node), submits mined block to base node and submits new p2pool block + /// to p2pool network. async fn submit_block(&self, request: Request) -> Result, Status> { let grpc_block = request.get_ref(); let grpc_request_payload = grpc_block.block.clone() @@ -117,7 +112,7 @@ impl ShaP2Pool for ShaP2PoolGrpc // Check block's difficulty compared to the latest network one to increase the probability // to get the block accepted (and also a block with lower difficulty than latest one is invalid anyway). - let request_block_difficulty = sha3x_difficulty(&origin_block_header) + let request_block_difficulty = sha3x_difficulty(origin_block_header) .map_err(|error| { Status::internal(error.to_string()) })?; let mut network_difficulty_stream = self.client.lock().await.get_network_difficulty(HeightRequest { from_tip: 0, @@ -145,7 +140,7 @@ impl ShaP2Pool for ShaP2PoolGrpc let grpc_request = Request::from_parts(metadata, extensions, grpc_request_payload); match self.client.lock().await.submit_block(grpc_request).await { Ok(resp) => { - info!("Block found and sent successfully! (rewards will be paid out)"); + info!("💰 New matching block found and sent to network!"); block.set_sent_to_main_chain(true); self.submit_share_chain_block(&block).await?; Ok(resp) diff --git a/src/server/grpc/util.rs b/src/server/grpc/util.rs new file mode 100644 index 00000000..b245a0e6 --- /dev/null +++ b/src/server/grpc/util.rs @@ -0,0 +1,34 @@ +use std::time::Duration; + +use log::error; +use minotari_app_grpc::tari_rpc::base_node_client::BaseNodeClient; +use minotari_node_grpc_client::BaseNodeGrpcClient; +use tokio::time::sleep; +use tonic::transport::Channel; + +use crate::server::grpc::error::{Error, TonicError}; + +pub async fn connect_base_node(base_node_address: String) -> Result, Error> { + let client_result = BaseNodeGrpcClient::connect(base_node_address.clone()) + .await + .map_err(|e| Error::Tonic(TonicError::Transport(e))); + let client = match client_result { + Ok(client) => client, + Err(error) => { + error!("[Retry] Failed to connect to Tari base node: {:?}", error.to_string()); + let mut client = None; + while client.is_none() { + sleep(Duration::from_secs(5)).await; + match BaseNodeGrpcClient::connect(base_node_address.clone()) + .await + .map_err(|e| Error::Tonic(TonicError::Transport(e))) { + Ok(curr_client) => client = Some(curr_client), + Err(error) => error!("[Retry] Failed to connect to Tari base node: {:?}", error.to_string()), + } + } + client.unwrap() + } + }; + + Ok(client) +} \ No newline at end of file diff --git a/crates/sha_p2pool/src/server/mod.rs b/src/server/mod.rs similarity index 100% rename from crates/sha_p2pool/src/server/mod.rs rename to src/server/mod.rs diff --git a/crates/sha_p2pool/src/server/p2p/client.rs b/src/server/p2p/client.rs similarity index 72% rename from crates/sha_p2pool/src/server/p2p/client.rs rename to src/server/p2p/client.rs index a8340e01..7fa76641 100644 --- a/crates/sha_p2pool/src/server/p2p/client.rs +++ b/src/server/p2p/client.rs @@ -1,11 +1,12 @@ use std::ops::Deref; +use std::pin::Pin; use std::sync::Arc; -use std::time::{Duration, Instant}; +use std::time::{Duration, Instant, SystemTime, UNIX_EPOCH}; use log::{error, info, warn}; use thiserror::Error; use tokio::select; -use tokio::sync::{broadcast, mpsc}; +use tokio::sync::{broadcast, mpsc, Mutex}; use tokio::sync::broadcast::error::{RecvError, SendError}; use tokio::time::sleep; @@ -19,8 +20,6 @@ pub enum ClientError { ChannelSend(#[from] Box), #[error("Channel receive error: {0}")] ChannelReceive(#[from] RecvError), - #[error("No response for share chain sync request!")] - NoSyncShareChainResponse, } #[derive(Error, Debug)] @@ -36,11 +35,13 @@ pub enum ChannelSendError { #[derive(Clone, Debug)] pub struct ClientSyncShareChainRequest { pub request_id: String, + timestamp: u64, } impl ClientSyncShareChainRequest { pub fn new(request_id: String) -> Self { - Self { request_id } + let timestamp = SystemTime::now().duration_since(UNIX_EPOCH).unwrap().as_secs(); + Self { request_id, timestamp } } } @@ -58,20 +59,23 @@ impl ClientSyncShareChainResponse { pub struct ServiceClientChannels { validate_block_sender: broadcast::Sender, - validate_block_receiver: broadcast::Receiver, + validate_block_receiver: Arc>>, broadcast_block_sender: broadcast::Sender, + peer_changes_receiver: broadcast::Receiver<()>, } impl ServiceClientChannels { pub fn new( validate_block_sender: broadcast::Sender, - validate_block_receiver: broadcast::Receiver, + validate_block_receiver: mpsc::UnboundedReceiver, broadcast_block_sender: broadcast::Sender, + peer_changes_receiver: broadcast::Receiver<()>, ) -> Self { Self { validate_block_sender, - validate_block_receiver, + validate_block_receiver: Arc::new(Mutex::new(validate_block_receiver)), broadcast_block_sender, + peer_changes_receiver, } } } @@ -115,33 +119,38 @@ impl ServiceClient { info!("[CLIENT] Minimum validation count: {min_validation_count:?}"); // wait for the validations to come - // TODO: listen here for peer_store changes, so we can recalculate min validation count and restart validation flow here let timeout = Duration::from_secs(30); - let mut validate_receiver = self.channels.validate_block_receiver.resubscribe(); + let mut validate_block_receiver = self.channels.validate_block_receiver.lock().await; + let mut peer_changes_receiver = self.channels.peer_changes_receiver.resubscribe(); + let mut peers_changed = false; let mut validation_count = 0; - let block = block.clone(); while validation_count < min_validation_count { select! { _ = sleep(timeout) => { warn!("Timing out waiting for validations!"); break; } - result = validate_receiver.recv() => { - match result { - Ok(validate_result) => { - info!("New validation: {validate_result:?}"); - if validate_result.valid && validate_result.block == block.clone() { - validation_count+=1; - } - } - Err(error) => { - error!("Error during receiving: {error:?}"); + _ = peer_changes_receiver.recv() => { + peers_changed = true; + break; + } + result = validate_block_receiver.recv() => { + if let Some(validate_result) = result { + info!("New validation: {validate_result:?}"); + if validate_result.valid && validate_result.block == *block { + validation_count+=1; } + } else { + break; } } } } + if peers_changed { + return Box::pin(self.validate_block(block)).await; + } + let validation_time = Instant::now().duration_since(start); info!("Validation took {:?}", validation_time); diff --git a/crates/sha_p2pool/src/server/p2p/error.rs b/src/server/p2p/error.rs similarity index 100% rename from crates/sha_p2pool/src/server/p2p/error.rs rename to src/server/p2p/error.rs diff --git a/crates/sha_p2pool/src/server/p2p/messages.rs b/src/server/p2p/messages.rs similarity index 94% rename from crates/sha_p2pool/src/server/p2p/messages.rs rename to src/server/p2p/messages.rs index dde218bb..76303a5b 100644 --- a/crates/sha_p2pool/src/server/p2p/messages.rs +++ b/src/server/p2p/messages.rs @@ -41,11 +41,13 @@ pub fn serialize_message(input: &T) -> Result, Error> #[derive(Serialize, Deserialize, Debug, Copy, Clone)] pub struct PeerInfo { pub current_height: u64, + timestamp: u64, } impl_conversions!(PeerInfo); impl PeerInfo { pub fn new(current_height: u64) -> Self { - Self { current_height } + let timestamp = SystemTime::now().duration_since(UNIX_EPOCH).unwrap().as_secs(); + Self { current_height, timestamp } } } diff --git a/crates/sha_p2pool/src/server/p2p/mod.rs b/src/server/p2p/mod.rs similarity index 100% rename from crates/sha_p2pool/src/server/p2p/mod.rs rename to src/server/p2p/mod.rs diff --git a/crates/sha_p2pool/src/server/p2p/p2p.rs b/src/server/p2p/p2p.rs similarity index 85% rename from crates/sha_p2pool/src/server/p2p/p2p.rs rename to src/server/p2p/p2p.rs index 2f773bd0..2c656a8a 100644 --- a/crates/sha_p2pool/src/server/p2p/p2p.rs +++ b/src/server/p2p/p2p.rs @@ -9,7 +9,7 @@ use libp2p::gossipsub::{IdentTopic, Message, MessageId, PublishError, Topic}; use libp2p::mdns::tokio::Tokio; use libp2p::request_response::{cbor, ResponseChannel}; use libp2p::swarm::{NetworkBehaviour, SwarmEvent}; -use log::{error, info, warn}; +use log::{debug, error, info, warn}; use tokio::{io, select}; use tokio::sync::{broadcast, mpsc, Mutex}; use tokio::sync::broadcast::error::RecvError; @@ -45,10 +45,11 @@ pub struct Service // TODO: consider mpsc channels instead of broadcast to not miss any message (might drop) client_validate_block_req_tx: broadcast::Sender, client_validate_block_req_rx: broadcast::Receiver, - client_validate_block_res_tx: broadcast::Sender, - client_validate_block_res_rx: broadcast::Receiver, + client_validate_block_res_txs: Vec>, client_broadcast_block_tx: broadcast::Sender, client_broadcast_block_rx: broadcast::Receiver, + client_peer_changes_tx: broadcast::Sender<()>, + client_peer_changes_rx: broadcast::Receiver<()>, } impl Service @@ -57,13 +58,14 @@ impl Service pub fn new(config: &config::Config, share_chain: Arc) -> Result { let swarm = Self::new_swarm(config)?; let peer_store = Arc::new( - PeerStore::new(Duration::from_secs(5)), + PeerStore::new(Duration::from_secs(10)), // TODO: get from config ); // client related channels let (validate_req_tx, validate_req_rx) = broadcast::channel::(1000); - let (validate_res_tx, validate_res_rx) = broadcast::channel::(1000); + let (broadcast_block_tx, broadcast_block_rx) = broadcast::channel::(1000); + let (peer_changes_tx, peer_changes_rx) = broadcast::channel::<()>(1000); Ok(Self { swarm, @@ -72,10 +74,11 @@ impl Service peer_store, client_validate_block_req_tx: validate_req_tx, client_validate_block_req_rx: validate_req_rx, - client_validate_block_res_tx: validate_res_tx, - client_validate_block_res_rx: validate_res_rx, + client_validate_block_res_txs: vec![], client_broadcast_block_tx: broadcast_block_tx, client_broadcast_block_rx: broadcast_block_rx, + client_peer_changes_tx: peer_changes_tx, + client_peer_changes_rx: peer_changes_rx, }) } @@ -96,7 +99,6 @@ impl Service soure_peer.to_bytes().hash(&mut s); } message.data.hash(&mut s); - Instant::now().hash(&mut s); gossipsub::MessageId::from(s.finish().to_string()) }; let gossipsub_config = gossipsub::ConfigBuilder::default() @@ -133,12 +135,16 @@ impl Service Ok(swarm) } - pub fn client(&self) -> ServiceClient { + pub fn client(&mut self) -> ServiceClient { + let (validate_res_tx, validate_res_rx) = mpsc::unbounded_channel::(); + self.client_validate_block_res_txs.push(validate_res_tx); + ServiceClient::new( ServiceClientChannels::new( self.client_validate_block_req_tx.clone(), - self.client_validate_block_res_rx.resubscribe(), + validate_res_rx, self.client_broadcast_block_tx.clone(), + self.client_peer_changes_rx.resubscribe(), ), self.peer_store.clone(), ) @@ -254,7 +260,13 @@ impl Service match messages::PeerInfo::try_from(message) { Ok(payload) => { self.peer_store.add(peer, payload).await; - self.sync_share_chain(ClientSyncShareChainRequest::new(format!("{:p}", self))).await; + if let Some(tip) = self.peer_store.tip_of_block_height().await { + if let Ok(curr_height) = self.share_chain.tip_height().await { + if curr_height < tip.height { + self.sync_share_chain(ClientSyncShareChainRequest::new(format!("{:p}", self))).await; + } + } + } } Err(error) => { error!("Can't deserialize peer info payload: {:?}", error); @@ -264,12 +276,23 @@ impl Service BLOCK_VALIDATION_REQUESTS_TOPIC => { match messages::ValidateBlockRequest::try_from(message) { Ok(payload) => { - info!("Block validation request: {payload:?}"); - // TODO: validate block + debug!("Block validation request: {payload:?}"); + + let validate_result = self.share_chain.validate_block(&payload.block()).await; + let mut valid = false; + if let Ok(is_valid) = validate_result { + valid = is_valid; + } + + // TODO: Generate partial schnorr signature to prove that current peer validated the block (using peer's private key and broadcast public key vie PeerInfo) + // TODO: to be able to verify at other peers. + // TODO: Validate whether new block includes all the shares (generate shares until height of new_block.height - 1) + // TODO: by generating a new block and check kernels/outputs whether they are the same or not. + let validate_result = ValidateBlockResult::new( *self.swarm.local_peer_id(), payload.block(), - true, // TODO: validate block + valid, ); self.send_block_validation_result(validate_result).await; } @@ -281,9 +304,14 @@ impl Service BLOCK_VALIDATION_RESULTS_TOPIC => { match messages::ValidateBlockResult::try_from(message) { Ok(payload) => { - if let Err(error) = self.client_validate_block_res_tx.send(payload) { - error!("Failed to send block validation result to clients: {error:?}"); + let mut senders_to_delete = vec![]; + for (i, sender) in self.client_validate_block_res_txs.iter().enumerate() { + if let Err(error) = sender.send(payload.clone()) { + error!("Failed to send block validation result to client: {error:?}"); + senders_to_delete.push(i); + } } + senders_to_delete.iter().for_each(|i| { self.client_validate_block_res_txs.remove(*i); }); } Err(error) => { error!("Can't deserialize block validation request payload: {:?}", error); @@ -406,10 +434,15 @@ impl Service self.broadcast_block(block).await; } _ = publish_peer_info_interval.tick() => { + // handle case when we have some peers removed let expired_peers = self.peer_store.cleanup().await; for exp_peer in expired_peers { self.swarm.behaviour_mut().gossipsub.remove_explicit_peer(&exp_peer); } + if let Err(error) = self.client_peer_changes_tx.send(()) { + error!("Failed to send peer changes trigger: {error:?}"); + } + if let Err(error) = self.broadcast_peer_info().await { match error { Error::LibP2P(LibP2PError::Publish(PublishError::InsufficientPeers)) => { diff --git a/crates/sha_p2pool/src/server/p2p/peer_store.rs b/src/server/p2p/peer_store.rs similarity index 93% rename from crates/sha_p2pool/src/server/p2p/peer_store.rs rename to src/server/p2p/peer_store.rs index 9b839d91..5d558509 100644 --- a/crates/sha_p2pool/src/server/p2p/peer_store.rs +++ b/src/server/p2p/peer_store.rs @@ -49,7 +49,7 @@ impl PeerStore { pub fn new(ttl: Duration) -> Self { Self { inner: CacheBuilder::new(100_000) - .time_to_live(Duration::from_secs(10)) + .time_to_live(ttl) .build(), ttl, tip_of_block_height: RwLock::new(None), @@ -114,11 +114,6 @@ impl PeerStore { } } - // for exp_peer in expired_peers.clone() { - // lock.remove(exp_peer); - // info!("PEER STORE - {:?} removed", exp_peer); - // } - self.set_tip_of_block_height().await; expired_peers diff --git a/crates/sha_p2pool/src/server/server.rs b/src/server/server.rs similarity index 73% rename from crates/sha_p2pool/src/server/server.rs rename to src/server/server.rs index af429537..ea83ed3f 100644 --- a/crates/sha_p2pool/src/server/server.rs +++ b/src/server/server.rs @@ -1,28 +1,16 @@ -use std::convert::Infallible; -use std::hash::{DefaultHasher, Hash, Hasher}; use std::net::{AddrParseError, SocketAddr}; use std::str::FromStr; use std::sync::Arc; -use std::time::Duration; -use libp2p::{gossipsub, mdns, multiaddr, noise, PeerId, Swarm, tcp, TransportError, yamux}; -use libp2p::futures::StreamExt; -use libp2p::gossipsub::Topic; -use libp2p::mdns::tokio::Tokio; -use libp2p::swarm::{NetworkBehaviour, SwarmEvent}; use log::{error, info}; use minotari_app_grpc::tari_rpc::base_node_server::BaseNodeServer; use minotari_app_grpc::tari_rpc::sha_p2_pool_server::ShaP2PoolServer; use thiserror::Error; -use tokio::{io, io::AsyncBufReadExt, select}; -use tokio::sync::Mutex; use crate::server::{config, grpc, p2p}; use crate::server::grpc::base_node::TariBaseNodeGrpc; use crate::server::grpc::error::TonicError; use crate::server::grpc::p2pool::ShaP2PoolGrpc; -use crate::server::p2p::{ServerNetworkBehaviour, ServerNetworkBehaviourEvent, ServiceClient}; -use crate::sharechain::in_memory::InMemoryShareChain; use crate::sharechain::ShareChain; #[derive(Error, Debug)] @@ -30,7 +18,7 @@ pub enum Error { #[error("P2P service error: {0}")] P2PService(#[from] p2p::Error), #[error("gRPC error: {0}")] - GRPC(#[from] grpc::error::Error), + Grpc(#[from] grpc::error::Error), #[error("Socket address parse error: {0}")] AddrParse(#[from] AddrParseError), } @@ -41,7 +29,7 @@ pub struct Server { config: config::Config, p2p_service: p2p::Service, - base_node_grpc_service: BaseNodeServer>, + base_node_grpc_service: BaseNodeServer, p2pool_grpc_service: ShaP2PoolServer>, } @@ -51,23 +39,19 @@ impl Server { pub async fn new(config: config::Config, share_chain: S) -> Result { let share_chain = Arc::new(share_chain); - let p2p_service: p2p::Service = p2p::Service::new(&config, share_chain.clone()).map_err(Error::P2PService)?; + let mut p2p_service: p2p::Service = p2p::Service::new(&config, share_chain.clone()).map_err(Error::P2PService)?; - let base_node_grpc_service = TariBaseNodeGrpc::new( - config.base_node_address.clone(), - p2p_service.client(), - share_chain.clone(), - ).await.map_err(Error::GRPC)?; + let base_node_grpc_service = TariBaseNodeGrpc::new(config.base_node_address.clone()).await.map_err(Error::Grpc)?; let base_node_grpc_server = BaseNodeServer::new(base_node_grpc_service); - let p2pool_grpc_service = ShaP2PoolGrpc::new(config.base_node_address.clone(), p2p_service.client(), share_chain.clone()).await.map_err(Error::GRPC)?; + let p2pool_grpc_service = ShaP2PoolGrpc::new(config.base_node_address.clone(), p2p_service.client(), share_chain.clone()).await.map_err(Error::Grpc)?; let p2pool_server = ShaP2PoolServer::new(p2pool_grpc_service); Ok(Self { config, p2p_service, base_node_grpc_service: base_node_grpc_server, p2pool_grpc_service: p2pool_server }) } pub async fn start_grpc( - base_node_service: BaseNodeServer>, + base_node_service: BaseNodeServer, p2pool_service: ShaP2PoolServer>, grpc_port: u16, ) -> Result<(), Error> { @@ -84,7 +68,7 @@ impl Server .await .map_err(|err| { error!("GRPC encountered an error: {:?}", err); - Error::GRPC(grpc::error::Error::Tonic(TonicError::Transport(err))) + Error::Grpc(grpc::error::Error::Tonic(TonicError::Transport(err))) })?; info!("gRPC server stopped!"); diff --git a/crates/sha_p2pool/src/sharechain/block.rs b/src/sharechain/block.rs similarity index 100% rename from crates/sha_p2pool/src/sharechain/block.rs rename to src/sharechain/block.rs diff --git a/crates/sha_p2pool/src/sharechain/error.rs b/src/sharechain/error.rs similarity index 100% rename from crates/sha_p2pool/src/sharechain/error.rs rename to src/sharechain/error.rs diff --git a/crates/sha_p2pool/src/sharechain/in_memory.rs b/src/sharechain/in_memory.rs similarity index 94% rename from crates/sha_p2pool/src/sharechain/in_memory.rs rename to src/sharechain/in_memory.rs index 8645e92f..8822c538 100644 --- a/crates/sha_p2pool/src/sharechain/in_memory.rs +++ b/src/sharechain/in_memory.rs @@ -4,7 +4,7 @@ use std::future::Future; use std::sync::Arc; use async_trait::async_trait; -use log::{info, warn}; +use log::{debug, info, warn}; use minotari_app_grpc::tari_rpc::{NewBlockCoinbase, SubmitBlockRequest}; use prost::Message; use tari_common_types::tari_address::TariAddress; @@ -156,7 +156,7 @@ impl ShareChain for InMemoryShareChain { .filter(|(_, share)| *share > 0.0) .for_each(|(addr, share)| { let curr_reward = ((reward as f64) * share) as u64; - info!("{addr} -> SHARE: {share:?}, REWARD: {curr_reward:?}"); + debug!("{addr} -> SHARE: {share:?} T, REWARD: {curr_reward:?}"); result.push(NewBlockCoinbase { address: addr.clone(), value: curr_reward, @@ -202,4 +202,10 @@ impl ShareChain for InMemoryShareChain { .collect() ) } + + async fn validate_block(&self, block: &Block) -> ShareChainResult { + let blocks_read_lock = self.blocks.read().await; + let last_block = blocks_read_lock.last().ok_or_else(|| Error::Empty)?; + self.validate_block(last_block, block).await + } } \ No newline at end of file diff --git a/crates/sha_p2pool/src/sharechain/mod.rs b/src/sharechain/mod.rs similarity index 91% rename from crates/sha_p2pool/src/sharechain/mod.rs rename to src/sharechain/mod.rs index edfe3533..38c9a40c 100644 --- a/crates/sha_p2pool/src/sharechain/mod.rs +++ b/src/sharechain/mod.rs @@ -30,4 +30,7 @@ pub trait ShareChain { /// Returns blocks from the given height (`from_height`, exclusive). async fn blocks(&self, from_height: u64) -> ShareChainResult>; + + /// Validates a block. + async fn validate_block(&self, block: &Block) -> ShareChainResult; } \ No newline at end of file From a7e215b029bb5fa7f0cb066255807ae921d8140f Mon Sep 17 00:00:00 2001 From: richardb Date: Fri, 21 Jun 2024 16:10:17 +0200 Subject: [PATCH 18/43] small fixes --- src/server/grpc/p2pool.rs | 10 +++++++--- src/server/p2p/peer_store.rs | 9 ++++----- 2 files changed, 11 insertions(+), 8 deletions(-) diff --git a/src/server/grpc/p2pool.rs b/src/server/grpc/p2pool.rs index 920568a2..f53a47b7 100644 --- a/src/server/grpc/p2pool.rs +++ b/src/server/grpc/p2pool.rs @@ -47,7 +47,7 @@ impl ShaP2PoolGrpc impl ShaP2Pool for ShaP2PoolGrpc where S: ShareChain + Send + Sync + 'static { - /// Returns a new block (that can be mined) which contains all the shares generated + /// Returns a new block (that can be mined) which contains all the shares generated /// from the current share chain as coinbase transactions. async fn get_new_block(&self, _request: Request) -> Result, Status> { let mut pow_algo = PowAlgo::default(); @@ -67,7 +67,11 @@ impl ShaP2Pool for ShaP2PoolGrpc // request new block template with shares as coinbases let shares = self.share_chain.generate_shares(reward).await; - let share_count = shares.len(); + let share_count = if shares.is_empty() { + 1 + } else { + shares.len() + }; let response = self.client.lock().await .get_new_block_template_with_coinbases(GetNewBlockTemplateWithCoinbasesRequest { @@ -91,7 +95,7 @@ impl ShaP2Pool for ShaP2PoolGrpc })) } - /// Validates the submitted block with the p2pool network, checks for difficulty matching + /// Validates the submitted block with the p2pool network, checks for difficulty matching /// with network (using base node), submits mined block to base node and submits new p2pool block /// to p2pool network. async fn submit_block(&self, request: Request) -> Result, Status> { diff --git a/src/server/p2p/peer_store.rs b/src/server/p2p/peer_store.rs index 5d558509..b839c8d2 100644 --- a/src/server/p2p/peer_store.rs +++ b/src/server/p2p/peer_store.rs @@ -3,7 +3,7 @@ use std::sync::{Arc, Mutex, RwLock}; use std::time::{Duration, Instant}; use libp2p::PeerId; -use log::info; +use log::{debug, info}; use moka::future::{Cache, CacheBuilder}; use crate::server::p2p::messages::PeerInfo; @@ -99,18 +99,17 @@ impl PeerStore { } pub async fn cleanup(&self) -> Vec { - info!("PEER STORE - cleanup"); + debug!("PEER STORE - cleanup"); let mut expired_peers = vec![]; for (k, v) in self.inner.iter() { - info!("PEER STORE - {:?} -> {:?}", k, v); + debug!("PEER STORE - {:?} -> {:?}", k, v); let elapsed = v.created.elapsed(); let expired = elapsed.gt(&self.ttl); - info!("{:?} ttl elapsed: {:?} <-> {:?}, Expired: {:?}", k, elapsed, &self.ttl, expired); + debug!("{:?} ttl elapsed: {:?} <-> {:?}, Expired: {:?}", k, elapsed, &self.ttl, expired); if expired { expired_peers.push(*k); self.inner.remove(k.as_ref()).await; - info!("PEER STORE - removed!"); } } From fc7a20b252c2bc90628baef43cf684efebaad75f Mon Sep 17 00:00:00 2001 From: richardb Date: Mon, 24 Jun 2024 14:40:02 +0200 Subject: [PATCH 19/43] code cleanup in progress + fixed shares count logic --- src/server/config.rs | 19 +++- src/server/grpc/p2pool.rs | 19 ++-- src/server/grpc/util.rs | 1 + src/server/p2p/client.rs | 57 +++++------ src/server/p2p/messages.rs | 10 +- src/server/p2p/mod.rs | 7 +- src/server/p2p/p2p.rs | 191 ++++++++++++++++++++--------------- src/server/p2p/peer_store.rs | 46 +++++++-- src/sharechain/in_memory.rs | 33 +++--- src/sharechain/mod.rs | 4 + 10 files changed, 224 insertions(+), 163 deletions(-) diff --git a/src/server/config.rs b/src/server/config.rs index 83787261..e413db47 100644 --- a/src/server/config.rs +++ b/src/server/config.rs @@ -1,5 +1,8 @@ use std::time::Duration; +use crate::server::p2p; +use crate::server::p2p::peer_store::PeerStoreConfig; + /// Config is the server configuration struct. #[derive(Clone)] pub struct Config { @@ -7,6 +10,8 @@ pub struct Config { pub p2p_port: u16, pub grpc_port: u16, pub idle_connection_timeout: Duration, + pub peer_store: PeerStoreConfig, + pub p2p_service: p2p::Config, } impl Default for Config { @@ -16,6 +21,8 @@ impl Default for Config { p2p_port: 0, // bind to any free port grpc_port: 18145, // to possibly not collide with any other ports idle_connection_timeout: Duration::from_secs(30), + peer_store: PeerStoreConfig::default(), + p2p_service: p2p::Config::default(), } } } @@ -43,11 +50,21 @@ impl ConfigBuilder { self } - pub fn with_idle_connection_timeout(&mut self, timeout: Duration) -> &Self { + pub fn with_idle_connection_timeout(&mut self, timeout: Duration) -> &mut Self { self.config.idle_connection_timeout = timeout; self } + pub fn with_peer_store_config(&mut self, config: PeerStoreConfig) -> &mut Self { + self.config.peer_store = config; + self + } + + pub fn with_p2p_service_config(&mut self, config: p2p::Config) -> &mut Self { + self.config.p2p_service = config; + self + } + pub fn build(&self) -> Config { self.config.clone() } diff --git a/src/server/grpc/p2pool.rs b/src/server/grpc/p2pool.rs index f53a47b7..39efb32d 100644 --- a/src/server/grpc/p2pool.rs +++ b/src/server/grpc/p2pool.rs @@ -13,15 +13,18 @@ use crate::server::grpc::error::Error; use crate::server::grpc::util; use crate::server::p2p; use crate::sharechain::block::Block; +use crate::sharechain::SHARE_COUNT; use crate::sharechain::ShareChain; -const MIN_DIFFICULTY: u64 = 100_000; - +/// P2Pool specific gRPC service to provide `get_new_block` and `submit_block` functionalities. pub struct ShaP2PoolGrpc where S: ShareChain + Send + Sync + 'static { + /// Base node client client: Arc>>, + /// P2P service client p2p_client: p2p::ServiceClient, + /// Current share chain share_chain: Arc, } @@ -67,11 +70,6 @@ impl ShaP2Pool for ShaP2PoolGrpc // request new block template with shares as coinbases let shares = self.share_chain.generate_shares(reward).await; - let share_count = if shares.is_empty() { - 1 - } else { - shares.len() - }; let response = self.client.lock().await .get_new_block_template_with_coinbases(GetNewBlockTemplateWithCoinbasesRequest { @@ -82,12 +80,7 @@ impl ShaP2Pool for ShaP2PoolGrpc // set target difficulty let miner_data = response.clone().miner_data.ok_or_else(|| Status::internal("missing miner data"))?; - let target_difficulty = miner_data.target_difficulty / share_count as u64; - let target_difficulty = if target_difficulty < MIN_DIFFICULTY { - MIN_DIFFICULTY - } else { - target_difficulty - }; + let target_difficulty = miner_data.target_difficulty / SHARE_COUNT; Ok(Response::new(GetNewBlockResponse { block: Some(response), diff --git a/src/server/grpc/util.rs b/src/server/grpc/util.rs index b245a0e6..4096c81d 100644 --- a/src/server/grpc/util.rs +++ b/src/server/grpc/util.rs @@ -8,6 +8,7 @@ use tonic::transport::Channel; use crate::server::grpc::error::{Error, TonicError}; +/// Utility function to connect to a Base node and try infinitely when it fails until gets connected. pub async fn connect_base_node(base_node_address: String) -> Result, Error> { let client_result = BaseNodeGrpcClient::connect(base_node_address.clone()) .await diff --git a/src/server/p2p/client.rs b/src/server/p2p/client.rs index 7fa76641..286b0245 100644 --- a/src/server/p2p/client.rs +++ b/src/server/p2p/client.rs @@ -1,19 +1,19 @@ -use std::ops::Deref; -use std::pin::Pin; use std::sync::Arc; -use std::time::{Duration, Instant, SystemTime, UNIX_EPOCH}; +use std::time::{Duration, Instant, UNIX_EPOCH}; -use log::{error, info, warn}; +use log::{debug, error, warn}; use thiserror::Error; use tokio::select; use tokio::sync::{broadcast, mpsc, Mutex}; use tokio::sync::broadcast::error::{RecvError, SendError}; use tokio::time::sleep; -use crate::server::p2p::messages::{ShareChainSyncResponse, ValidateBlockRequest, ValidateBlockResult}; +use crate::server::p2p::messages::{ValidateBlockRequest, ValidateBlockResult}; use crate::server::p2p::peer_store::PeerStore; use crate::sharechain::block::Block; +const LOG_TARGET: &str = "p2p_service_client"; + #[derive(Error, Debug)] pub enum ClientError { #[error("Channel send error: {0}")] @@ -28,35 +28,22 @@ pub enum ChannelSendError { ValidateBlockRequest(#[from] SendError), #[error("Send broadcast block error: {0}")] BroadcastBlock(#[from] SendError), - #[error("Send sync share chain request error: {0}")] - ClientSyncShareChainRequest(#[from] SendError), } #[derive(Clone, Debug)] -pub struct ClientSyncShareChainRequest { - pub request_id: String, - timestamp: u64, +pub struct ClientConfig { + pub block_validation_timeout: Duration, } -impl ClientSyncShareChainRequest { - pub fn new(request_id: String) -> Self { - let timestamp = SystemTime::now().duration_since(UNIX_EPOCH).unwrap().as_secs(); - Self { request_id, timestamp } - } -} - -#[derive(Clone, Debug)] -pub struct ClientSyncShareChainResponse { - pub request_id: String, - pub success: bool, -} - -impl ClientSyncShareChainResponse { - pub fn new(request_id: String, success: bool) -> Self { - Self { request_id, success } +impl Default for ClientConfig { + fn default() -> Self { + Self { + block_validation_timeout: Duration::from_secs(30), + } } } +/// Contains all the channels a client needs to operate successfully. pub struct ServiceClientChannels { validate_block_sender: broadcast::Sender, validate_block_receiver: Arc>>, @@ -80,19 +67,23 @@ impl ServiceClientChannels { } } +/// P2P service client. pub struct ServiceClient { channels: ServiceClientChannels, peer_store: Arc, + config: ClientConfig, } impl ServiceClient { pub fn new( channels: ServiceClientChannels, peer_store: Arc, + config: ClientConfig, ) -> Self { - Self { channels, peer_store } + Self { channels, peer_store, config } } + /// Triggering broadcasting of a new block to p2pool network. pub async fn broadcast_block(&self, block: &Block) -> Result<(), ClientError> { self.channels.broadcast_block_sender.send(block.clone()) .map_err(|error| @@ -102,8 +93,8 @@ impl ServiceClient { Ok(()) } + /// Triggers validation of a new block and waits for the result. pub async fn validate_block(&self, block: &Block) -> Result { - info!("[CLIENT] Start block validation"); let start = Instant::now(); // send request to validate block @@ -116,17 +107,16 @@ impl ServiceClient { let peer_count = self.peer_store.peer_count().await as f64 + 1.0; let min_validation_count = (peer_count / 3.0) * 2.0; let min_validation_count = min_validation_count.round() as u64; - info!("[CLIENT] Minimum validation count: {min_validation_count:?}"); + debug!(target: LOG_TARGET, "Minimum validation count: {min_validation_count:?}"); // wait for the validations to come - let timeout = Duration::from_secs(30); let mut validate_block_receiver = self.channels.validate_block_receiver.lock().await; let mut peer_changes_receiver = self.channels.peer_changes_receiver.resubscribe(); let mut peers_changed = false; let mut validation_count = 0; while validation_count < min_validation_count { select! { - _ = sleep(timeout) => { + _ = sleep(self.config.block_validation_timeout) => { warn!("Timing out waiting for validations!"); break; } @@ -136,8 +126,8 @@ impl ServiceClient { } result = validate_block_receiver.recv() => { if let Some(validate_result) = result { - info!("New validation: {validate_result:?}"); if validate_result.valid && validate_result.block == *block { + debug!(target: LOG_TARGET, "New validation result: {validate_result:?}"); validation_count+=1; } } else { @@ -147,12 +137,13 @@ impl ServiceClient { } } + // TODO: add max number of retry times if peers_changed { return Box::pin(self.validate_block(block)).await; } let validation_time = Instant::now().duration_since(start); - info!("Validation took {:?}", validation_time); + debug!(target: LOG_TARGET, "Validation took {:?}", validation_time); Ok(validation_count >= min_validation_count) } diff --git a/src/server/p2p/messages.rs b/src/server/p2p/messages.rs index 76303a5b..b3529ca4 100644 --- a/src/server/p2p/messages.rs +++ b/src/server/p2p/messages.rs @@ -91,24 +91,22 @@ impl ValidateBlockResult { #[derive(Serialize, Deserialize, Debug, Clone)] pub struct ShareChainSyncRequest { - pub request_id: String, pub from_height: u64, } impl ShareChainSyncRequest { - pub fn new(request_id: String, from_height: u64) -> Self { - Self { request_id, from_height } + pub fn new(from_height: u64) -> Self { + Self { from_height } } } #[derive(Serialize, Deserialize, Debug, Clone)] pub struct ShareChainSyncResponse { - pub request_id: String, pub blocks: Vec, } impl ShareChainSyncResponse { - pub fn new(request_id: String, blocks: Vec) -> Self { - Self { request_id, blocks } + pub fn new(blocks: Vec) -> Self { + Self { blocks } } } \ No newline at end of file diff --git a/src/server/p2p/mod.rs b/src/server/p2p/mod.rs index 4b08295f..e29f95ce 100644 --- a/src/server/p2p/mod.rs +++ b/src/server/p2p/mod.rs @@ -1,3 +1,6 @@ +//! P2p module contains all the peer-to-peer related implementations and communications. +//! This module uses hardly `libp2p` to communicate between peers efficiently. + pub use client::*; pub use error::*; pub use p2p::*; @@ -5,6 +8,6 @@ pub use p2p::*; mod p2p; mod error; pub mod messages; -mod peer_store; -mod client; +pub mod peer_store; +pub(crate) mod client; diff --git a/src/server/p2p/p2p.rs b/src/server/p2p/p2p.rs index 2c656a8a..11a60418 100644 --- a/src/server/p2p/p2p.rs +++ b/src/server/p2p/p2p.rs @@ -1,7 +1,6 @@ -use std::future::Future; use std::hash::{DefaultHasher, Hash, Hasher}; use std::sync::Arc; -use std::time::{Duration, Instant}; +use std::time::Duration; use libp2p::{gossipsub, mdns, noise, request_response, StreamProtocol, Swarm, tcp, yamux}; use libp2p::futures::StreamExt; @@ -11,11 +10,11 @@ use libp2p::request_response::{cbor, ResponseChannel}; use libp2p::swarm::{NetworkBehaviour, SwarmEvent}; use log::{debug, error, info, warn}; use tokio::{io, select}; -use tokio::sync::{broadcast, mpsc, Mutex}; +use tokio::sync::{broadcast, mpsc}; use tokio::sync::broadcast::error::RecvError; use crate::server::config; -use crate::server::p2p::{ClientSyncShareChainRequest, ClientSyncShareChainResponse, Error, LibP2PError, messages, ServiceClient, ServiceClientChannels}; +use crate::server::p2p::{client, Error, LibP2PError, messages, ServiceClient, ServiceClientChannels}; use crate::server::p2p::messages::{PeerInfo, ShareChainSyncRequest, ShareChainSyncResponse, ValidateBlockRequest, ValidateBlockResult}; use crate::server::p2p::peer_store::PeerStore; use crate::sharechain::block::Block; @@ -25,6 +24,23 @@ const PEER_INFO_TOPIC: &str = "peer_info"; const BLOCK_VALIDATION_REQUESTS_TOPIC: &str = "block_validation_requests"; const BLOCK_VALIDATION_RESULTS_TOPIC: &str = "block_validation_results"; const NEW_BLOCK_TOPIC: &str = "new_block"; +const SHARE_CHAIN_SYNC_REQ_RESP_PROTOCOL: &str = "/share_chain_sync/1"; +const LOG_TARGET: &str = "p2p_service"; + +#[derive(Clone, Debug)] +pub struct Config { + pub client: client::ClientConfig, + pub peer_info_publish_interval: Duration, +} + +impl Default for Config { + fn default() -> Self { + Self { + client: client::ClientConfig::default(), + peer_info_publish_interval: Duration::from_secs(5), + } + } +} #[derive(NetworkBehaviour)] pub struct ServerNetworkBehaviour { @@ -33,6 +49,8 @@ pub struct ServerNetworkBehaviour { pub share_chain_sync: cbor::Behaviour, } +/// Service is the implementation that holds every peer-to-peer related logic +/// that makes sure that all the communications, syncing, broadcasting etc... are done. pub struct Service where S: ShareChain + Send + Sync + 'static, { @@ -40,6 +58,7 @@ pub struct Service port: u16, share_chain: Arc, peer_store: Arc, + config: Config, // service client related channels // TODO: consider mpsc channels instead of broadcast to not miss any message (might drop) @@ -55,15 +74,16 @@ pub struct Service impl Service where S: ShareChain + Send + Sync + 'static, { + /// Constructs a new Service from the provided config. + /// It also instantiates libp2p swarm inside. pub fn new(config: &config::Config, share_chain: Arc) -> Result { let swarm = Self::new_swarm(config)?; let peer_store = Arc::new( - PeerStore::new(Duration::from_secs(10)), // TODO: get from config + PeerStore::new(&config.peer_store), ); // client related channels let (validate_req_tx, validate_req_rx) = broadcast::channel::(1000); - let (broadcast_block_tx, broadcast_block_rx) = broadcast::channel::(1000); let (peer_changes_tx, peer_changes_rx) = broadcast::channel::<()>(1000); @@ -72,6 +92,7 @@ impl Service port: config.p2p_port, share_chain, peer_store, + config: config.p2p_service.clone(), client_validate_block_req_tx: validate_req_tx, client_validate_block_req_rx: validate_req_rx, client_validate_block_res_txs: vec![], @@ -82,6 +103,7 @@ impl Service }) } + /// Creates a new swarm from the provided config fn new_swarm(config: &config::Config) -> Result, Error> { let swarm = libp2p::SwarmBuilder::with_new_identity() .with_tokio() @@ -106,7 +128,7 @@ impl Service .validation_mode(gossipsub::ValidationMode::Strict) .message_id_fn(message_id_fn) .build() - .map_err(|msg| io::Error::new(io::ErrorKind::Other, msg))?; // Temporary hack because `build` does not return a proper `std::error::Error`. + .map_err(|msg| io::Error::new(io::ErrorKind::Other, msg))?; let gossipsub = gossipsub::Behaviour::new( gossipsub::MessageAuthenticity::Signed(key_pair.clone()), gossipsub_config, @@ -121,7 +143,7 @@ impl Service .map_err(|e| Error::LibP2P(LibP2PError::IO(e)))?, share_chain_sync: cbor::Behaviour::::new( [( - StreamProtocol::new("/share_chain_sync/1"), + StreamProtocol::new(SHARE_CHAIN_SYNC_REQ_RESP_PROTOCOL), request_response::ProtocolSupport::Full, )], request_response::Config::default(), @@ -135,6 +157,8 @@ impl Service Ok(swarm) } + /// Creates a new client for this service, it is thread safe (Send + Sync). + /// Any amount of clients can be created, no need to share the same one across many components. pub fn client(&mut self) -> ServiceClient { let (validate_res_tx, validate_res_rx) = mpsc::unbounded_channel::(); self.client_validate_block_res_txs.push(validate_res_tx); @@ -147,60 +171,56 @@ impl Service self.client_peer_changes_rx.resubscribe(), ), self.peer_store.clone(), + self.config.client.clone(), ) } + /// Handles block validation requests coming from Service clients. + /// All the requests from clients are sent to [`BLOCK_VALIDATION_REQUESTS_TOPIC`]. async fn handle_client_validate_block_request(&mut self, result: Result) { - info!("handle_client_validate_block_request - hit!"); match result { Ok(request) => { let request_raw_result: Result, Error> = request.try_into(); match request_raw_result { Ok(request_raw) => { - info!("handle_client_validate_block_request - before publish!"); - match self.swarm.behaviour_mut().gossipsub.publish( + if let Err(error) = self.swarm.behaviour_mut().gossipsub.publish( IdentTopic::new(BLOCK_VALIDATION_REQUESTS_TOPIC), request_raw, ) { - Ok(res) => { - info!("handle_client_validate_block_request - published!"); - } - Err(error) => { - error!("Failed to send block validation request: {error:?}"); - } + error!(target: LOG_TARGET, "Failed to send block validation request: {error:?}"); } } Err(error) => { - error!("Failed to convert block validation request to bytes: {error:?}"); + error!(target: LOG_TARGET, "Failed to convert block validation request to bytes: {error:?}"); } } } Err(error) => { - error!("Block validation request receive error: {error:?}"); + error!(target: LOG_TARGET, "Block validation request receive error: {error:?}"); } } } + /// Sending validation result for a block to [`BLOCK_VALIDATION_RESULTS_TOPIC`] gossipsub topic. async fn send_block_validation_result(&mut self, result: ValidateBlockResult) { let result_raw_result: Result, Error> = result.try_into(); match result_raw_result { Ok(result_raw) => { - match self.swarm.behaviour_mut().gossipsub.publish( + if let Err(error) = self.swarm.behaviour_mut().gossipsub.publish( IdentTopic::new(BLOCK_VALIDATION_RESULTS_TOPIC), result_raw, ) { - Ok(_) => {} - Err(error) => { - error!("Failed to publish block validation result: {error:?}"); - } + error!(target: LOG_TARGET, "Failed to publish block validation result: {error:?}"); } } Err(error) => { - error!("Failed to convert block validation result to bytes: {error:?}"); + error!(target: LOG_TARGET, "Failed to convert block validation result to bytes: {error:?}"); } } } + /// Broadcasting current peer's information ([`PeerInfo`]) to other peers in the network + /// by sending this data to [`PEER_INFO_TOPIC`] gossipsub topic. async fn broadcast_peer_info(&mut self) -> Result<(), Error> { // get peer info let share_chain = self.share_chain.clone(); @@ -215,6 +235,7 @@ impl Service Ok(()) } + /// Broadcasting a new mined [`Block`] to the network (assume it is already validated with the network). async fn broadcast_block(&mut self, result: Result) { match result { Ok(block) => { @@ -224,21 +245,23 @@ impl Service match self.swarm.behaviour_mut().gossipsub.publish(IdentTopic::new(NEW_BLOCK_TOPIC), block_raw) .map_err(|error| Error::LibP2P(LibP2PError::Publish(error))) { Ok(_) => {} - Err(error) => error!("Failed to broadcast new block: {error:?}"), + Err(error) => error!(target: LOG_TARGET, "Failed to broadcast new block: {error:?}"), } } - Err(error) => error!("Failed to convert block to bytes: {error:?}"), + Err(error) => error!(target: LOG_TARGET, "Failed to convert block to bytes: {error:?}"), } } - Err(error) => error!("Failed to receive new block: {error:?}"), + Err(error) => error!(target: LOG_TARGET, "Failed to receive new block: {error:?}"), } } + /// Subscribing to a gossipsub topic. fn subscribe(&mut self, topic: &str) { self.swarm.behaviour_mut().gossipsub.subscribe(&IdentTopic::new(topic)) .expect("must be subscribed to topic"); } + /// Subscribes to all topics we need. fn subscribe_to_topics(&mut self) { self.subscribe(PEER_INFO_TOPIC); self.subscribe(BLOCK_VALIDATION_REQUESTS_TOPIC); @@ -246,6 +269,7 @@ impl Service self.subscribe(NEW_BLOCK_TOPIC); } + /// Main method to handle any message comes from gossipsub. async fn handle_new_gossipsub_message(&mut self, message: Message) { let peer = message.source; if peer.is_none() { @@ -263,20 +287,20 @@ impl Service if let Some(tip) = self.peer_store.tip_of_block_height().await { if let Ok(curr_height) = self.share_chain.tip_height().await { if curr_height < tip.height { - self.sync_share_chain(ClientSyncShareChainRequest::new(format!("{:p}", self))).await; + self.sync_share_chain().await; } } } } Err(error) => { - error!("Can't deserialize peer info payload: {:?}", error); + error!(target: LOG_TARGET, "Can't deserialize peer info payload: {:?}", error); } } } BLOCK_VALIDATION_REQUESTS_TOPIC => { match messages::ValidateBlockRequest::try_from(message) { Ok(payload) => { - debug!("Block validation request: {payload:?}"); + debug!(target: LOG_TARGET, "Block validation request: {payload:?}"); let validate_result = self.share_chain.validate_block(&payload.block()).await; let mut valid = false; @@ -288,6 +312,7 @@ impl Service // TODO: to be able to verify at other peers. // TODO: Validate whether new block includes all the shares (generate shares until height of new_block.height - 1) // TODO: by generating a new block and check kernels/outputs whether they are the same or not. + // TODO: Validating new blocks version 2 would be to send a proof that was generated from the shares. let validate_result = ValidateBlockResult::new( *self.swarm.local_peer_id(), @@ -297,7 +322,7 @@ impl Service self.send_block_validation_result(validate_result).await; } Err(error) => { - error!("Can't deserialize block validation request payload: {:?}", error); + error!(target: LOG_TARGET, "Can't deserialize block validation request payload: {:?}", error); } } } @@ -307,55 +332,60 @@ impl Service let mut senders_to_delete = vec![]; for (i, sender) in self.client_validate_block_res_txs.iter().enumerate() { if let Err(error) = sender.send(payload.clone()) { - error!("Failed to send block validation result to client: {error:?}"); + error!(target: LOG_TARGET, "Failed to send block validation result to client: {error:?}"); senders_to_delete.push(i); } } senders_to_delete.iter().for_each(|i| { self.client_validate_block_res_txs.remove(*i); }); } Err(error) => { - error!("Can't deserialize block validation request payload: {:?}", error); + error!(target: LOG_TARGET, "Can't deserialize block validation request payload: {:?}", error); } } } + // TODO: send a signature that proves that the actual block was coming from this peer NEW_BLOCK_TOPIC => { match Block::try_from(message) { Ok(payload) => { info!("New block from broadcast: {:?}", &payload); if let Err(error) = self.share_chain.submit_block(&payload).await { - error!("Could not add new block to local share chain: {error:?}"); + error!(target: LOG_TARGET, "Could not add new block to local share chain: {error:?}"); } } Err(error) => { - error!("Can't deserialize broadcast block payload: {:?}", error); + error!(target: LOG_TARGET, "Can't deserialize broadcast block payload: {:?}", error); } } } &_ => { - warn!("Unknown topic {topic:?}!"); + warn!(target: LOG_TARGET, "Unknown topic {topic:?}!"); } } } + /// Handles share chain sync request (coming from other peer). async fn handle_share_chain_sync_request(&mut self, channel: ResponseChannel, request: ShareChainSyncRequest) { match self.share_chain.blocks(request.from_height).await { Ok(blocks) => { - match self.swarm.behaviour_mut().share_chain_sync.send_response(channel, ShareChainSyncResponse::new(request.request_id, blocks.clone())) { - Ok(_) => {} - Err(_) => error!("Failed to send block sync response") + if self.swarm.behaviour_mut().share_chain_sync.send_response(channel, ShareChainSyncResponse::new(blocks.clone())) + .is_err() { + error!(target: LOG_TARGET, "Failed to send block sync response"); } } - Err(error) => error!("Failed to get blocks from height: {error:?}"), + Err(error) => error!(target: LOG_TARGET, "Failed to get blocks from height: {error:?}"), } } + /// Handle share chain sync response. + /// All the responding blocks will be tried to put into local share chain. async fn handle_share_chain_sync_response(&mut self, response: ShareChainSyncResponse) { if let Err(error) = self.share_chain.submit_blocks(response.blocks).await { - error!("Failed to add synced blocks to share chain: {error:?}"); + error!(target: LOG_TARGET, "Failed to add synced blocks to share chain: {error:?}"); } } - async fn sync_share_chain(&mut self, request: ClientSyncShareChainRequest) { + /// Trigger share chai sync with another peer with the highest known block height. + async fn sync_share_chain(&mut self) { while self.peer_store.tip_of_block_height().await.is_none() {} // waiting for the highest blockchain match self.peer_store.tip_of_block_height().await { Some(result) => { @@ -363,20 +393,21 @@ impl Service Ok(tip) => { self.swarm.behaviour_mut().share_chain_sync.send_request( &result.peer_id, - ShareChainSyncRequest::new(request.request_id, tip), + ShareChainSyncRequest::new(tip), ); } - Err(error) => error!("Failed to get latest height of share chain: {error:?}"), + Err(error) => error!(target: LOG_TARGET, "Failed to get latest height of share chain: {error:?}"), } } - None => error!("Failed to get peer with highest share chain height!") + None => error!(target: LOG_TARGET, "Failed to get peer with highest share chain height!") } } + /// Main method to handle libp2p events. async fn handle_event(&mut self, event: SwarmEvent) { match event { SwarmEvent::NewListenAddr { address, .. } => { - info!("Listening on {address:?}"); + info!(target: LOG_TARGET, "Listening on {address:?}"); } SwarmEvent::Behaviour(event) => match event { ServerNetworkBehaviourEvent::Mdns(mdns_event) => match mdns_event { @@ -387,7 +418,7 @@ impl Service } } mdns::Event::Expired(peers) => { - for (peer, addr) in peers { + for (peer, _addr) in peers { self.swarm.behaviour_mut().gossipsub.remove_explicit_peer(&peer); } } @@ -401,11 +432,11 @@ impl Service gossipsub::Event::GossipsubNotSupported { .. } => {} }, ServerNetworkBehaviourEvent::ShareChainSync(event) => match event { - request_response::Event::Message { peer, message } => match message { - request_response::Message::Request { request_id, request, channel } => { + request_response::Event::Message { peer: _peer, message } => match message { + request_response::Message::Request { request_id: _request_id, request, channel } => { self.handle_share_chain_sync_request(channel, request).await; } - request_response::Message::Response { request_id, response } => { + request_response::Message::Response { request_id: _request_id, response } => { self.handle_share_chain_sync_response(response).await; } } @@ -418,47 +449,49 @@ impl Service }; } + /// Main loop of the service that drives the events and libp2p swarm forward. async fn main_loop(&mut self) -> Result<(), Error> { - // TODO: get from config - let mut publish_peer_info_interval = tokio::time::interval(Duration::from_secs(5)); + let mut publish_peer_info_interval = tokio::time::interval(self.config.peer_info_publish_interval); loop { select! { - event = self.swarm.select_next_some() => { - self.handle_event(event).await; - } - result = self.client_validate_block_req_rx.recv() => { - self.handle_client_validate_block_request(result).await; - } - block = self.client_broadcast_block_rx.recv() => { - self.broadcast_block(block).await; - } - _ = publish_peer_info_interval.tick() => { - // handle case when we have some peers removed - let expired_peers = self.peer_store.cleanup().await; - for exp_peer in expired_peers { - self.swarm.behaviour_mut().gossipsub.remove_explicit_peer(&exp_peer); + event = self.swarm.select_next_some() => { + self.handle_event(event).await; } - if let Err(error) = self.client_peer_changes_tx.send(()) { - error!("Failed to send peer changes trigger: {error:?}"); + result = self.client_validate_block_req_rx.recv() => { + self.handle_client_validate_block_request(result).await; } - - if let Err(error) = self.broadcast_peer_info().await { - match error { - Error::LibP2P(LibP2PError::Publish(PublishError::InsufficientPeers)) => { - warn!("No peers to broadcast peer info!"); - } - Error::LibP2P(LibP2PError::Publish(PublishError::Duplicate)) => {} - _ => { - error!("Failed to publish node info: {error:?}"); + block = self.client_broadcast_block_rx.recv() => { + self.broadcast_block(block).await; + } + _ = publish_peer_info_interval.tick() => { + // handle case when we have some peers removed + let expired_peers = self.peer_store.cleanup().await; + for exp_peer in expired_peers { + self.swarm.behaviour_mut().gossipsub.remove_explicit_peer(&exp_peer); + } + if let Err(error) = self.client_peer_changes_tx.send(()) { + error!("Failed to send peer changes trigger: {error:?}"); + } + + if let Err(error) = self.broadcast_peer_info().await { + match error { + Error::LibP2P(LibP2PError::Publish(PublishError::InsufficientPeers)) => { + warn!("No peers to broadcast peer info!"); + } + Error::LibP2P(LibP2PError::Publish(PublishError::Duplicate)) => {} + _ => { + error!("Failed to publish node info: {error:?}"); + } } } } } } - } } + /// Starts p2p service. + /// Please note that this is a blocking call! pub async fn start(&mut self) -> Result<(), Error> { self.swarm .listen_on( diff --git a/src/server/p2p/peer_store.rs b/src/server/p2p/peer_store.rs index b839c8d2..7f67313b 100644 --- a/src/server/p2p/peer_store.rs +++ b/src/server/p2p/peer_store.rs @@ -1,13 +1,28 @@ -use std::collections::HashMap; -use std::sync::{Arc, Mutex, RwLock}; +use std::sync::{Mutex, RwLock}; use std::time::{Duration, Instant}; use libp2p::PeerId; -use log::{debug, info}; +use log::debug; use moka::future::{Cache, CacheBuilder}; use crate::server::p2p::messages::PeerInfo; +const LOG_TARGET: &str = "peer_store"; + +#[derive(Copy, Clone, Debug)] +pub struct PeerStoreConfig { + pub peer_record_ttl: Duration, +} + +impl Default for PeerStoreConfig { + fn default() -> Self { + Self { + peer_record_ttl: Duration::from_secs(10), + } + } +} + +/// A record in peer store that holds all needed info of a peer. #[derive(Copy, Clone, Debug)] pub struct PeerStoreRecord { peer_info: PeerInfo, @@ -23,6 +38,7 @@ impl PeerStoreRecord { } } +/// Tip of height from known peers. #[derive(Copy, Clone, Debug)] pub struct PeerStoreBlockHeightTip { pub peer_id: PeerId, @@ -38,37 +54,46 @@ impl PeerStoreBlockHeightTip { } } +/// A peer store, which stores all the known peers (from broadcasted [`PeerInfo`] messages) in-memory. +/// This implementation is thread safe and async, so an [`Arc`] is enough to be used to share. pub struct PeerStore { inner: Cache, // Max time to live for the items to avoid non-existing peers in list. ttl: Duration, + // Peer with the highest share chain height. tip_of_block_height: RwLock>, } impl PeerStore { - pub fn new(ttl: Duration) -> Self { + /// Constructs a new peer store with config. + pub fn new(config: &PeerStoreConfig) -> Self { Self { inner: CacheBuilder::new(100_000) - .time_to_live(ttl) + .time_to_live(config.peer_record_ttl) .build(), - ttl, + ttl: config.peer_record_ttl, tip_of_block_height: RwLock::new(None), } } + /// Add a new peer to store. + /// If a peer already exists, just replaces it. pub async fn add(&self, peer_id: PeerId, peer_info: PeerInfo) { self.inner.insert(peer_id, PeerStoreRecord::new(peer_info)).await; self.set_tip_of_block_height().await; } + /// Returns count of peers. + /// Note: it is needed to calculate number of validations needed to make sure a new block is valid. pub async fn peer_count(&self) -> u64 { self.inner.entry_count() } + /// Sets the actual highest block height with peer. async fn set_tip_of_block_height(&self) { if let Some((k, v)) = self.inner.iter() - .max_by(|(k1, v1), (k2, v2)| { + .max_by(|(_k1, v1), (_k2, v2)| { v1.peer_info.current_height.cmp(&v2.peer_info.current_height) }) { // save result @@ -89,6 +114,7 @@ impl PeerStore { } } + /// Returns peer with the highest share chain height. pub async fn tip_of_block_height(&self) -> Option { if let Ok(result) = self.tip_of_block_height.read() { if result.is_some() { @@ -98,15 +124,15 @@ impl PeerStore { None } + /// Clean up expired peers. pub async fn cleanup(&self) -> Vec { - debug!("PEER STORE - cleanup"); let mut expired_peers = vec![]; for (k, v) in self.inner.iter() { - debug!("PEER STORE - {:?} -> {:?}", k, v); + debug!(target: LOG_TARGET, "{:?} -> {:?}", k, v); let elapsed = v.created.elapsed(); let expired = elapsed.gt(&self.ttl); - debug!("{:?} ttl elapsed: {:?} <-> {:?}, Expired: {:?}", k, elapsed, &self.ttl, expired); + debug!(target: LOG_TARGET, "{:?} ttl elapsed: {:?} <-> {:?}, Expired: {:?}", k, elapsed, &self.ttl, expired); if expired { expired_peers.push(*k); self.inner.remove(k.as_ref()).await; diff --git a/src/sharechain/in_memory.rs b/src/sharechain/in_memory.rs index 8822c538..f06bcb53 100644 --- a/src/sharechain/in_memory.rs +++ b/src/sharechain/in_memory.rs @@ -1,21 +1,18 @@ use std::collections::HashMap; -use std::fmt::Debug; -use std::future::Future; use std::sync::Arc; use async_trait::async_trait; use log::{debug, info, warn}; use minotari_app_grpc::tari_rpc::{NewBlockCoinbase, SubmitBlockRequest}; -use prost::Message; use tari_common_types::tari_address::TariAddress; use tari_core::blocks::BlockHeader; use tari_utilities::epoch_time::EpochTime; -use tokio::sync::{Mutex, RwLock, RwLockWriteGuard}; +use tokio::sync::{RwLock, RwLockWriteGuard}; -use crate::sharechain::{Block, ShareChain, ShareChainResult}; +use crate::sharechain::{Block, MAX_BLOCKS_COUNT, SHARE_COUNT, ShareChain, ShareChainResult}; use crate::sharechain::error::{BlockConvertError, Error}; -const DEFAULT_MAX_BLOCKS_COUNT: usize = 5000; +const LOG_TARGET: &str = "in_memory_share_chain"; pub struct InMemoryShareChain { max_blocks_count: usize, @@ -25,7 +22,7 @@ pub struct InMemoryShareChain { impl Default for InMemoryShareChain { fn default() -> Self { Self { - max_blocks_count: DEFAULT_MAX_BLOCKS_COUNT, + max_blocks_count: MAX_BLOCKS_COUNT, blocks: Arc::new( RwLock::new( vec![ @@ -57,8 +54,8 @@ impl InMemoryShareChain { } } - async fn miners_with_hash_rates(&self) -> HashMap { - let mut result: HashMap = HashMap::new(); // target wallet address -> hash rate + async fn miners_with_shares(&self) -> HashMap { + let mut result: HashMap = HashMap::new(); // target wallet address -> number of shares let blocks_read_lock = self.blocks.read().await; blocks_read_lock.iter().for_each(|block| { if let Some(miner_wallet_address) = block.miner_wallet_address() { @@ -77,20 +74,19 @@ impl InMemoryShareChain { async fn validate_block(&self, last_block: &Block, block: &Block) -> ShareChainResult { // check if we have this block as last if last_block == block { - warn!("This block already added, skip"); + warn!(target: LOG_TARGET, "This block already added, skip"); return Ok(false); } // validate hash if block.hash() != block.generate_hash() { - warn!("Invalid block, hashes do not match"); + warn!(target: LOG_TARGET, "Invalid block, hashes do not match"); return Ok(false); } // validate height - info!("VALIDATION - Last block: {:?}", last_block); if last_block.height() + 1 != block.height() { - warn!("Invalid block, invalid block height: {:?} != {:?}", last_block.height() + 1, block.height()); + warn!(target: LOG_TARGET, "Invalid block, invalid block height: {:?} != {:?}", last_block.height() + 1, block.height()); return Ok(false); } @@ -112,12 +108,12 @@ impl InMemoryShareChain { blocks.remove(0); } - info!("New block added: {:?}", block.clone()); + info!(target: LOG_TARGET, "New block added: {:?}", block.clone()); blocks.push(block); let last_block = blocks.last().ok_or_else(|| Error::Empty)?; - info!("Current height: {:?}", last_block.height()); + info!(target: LOG_TARGET, "Current share chain height: {:?}", last_block.height()); Ok(()) } @@ -147,16 +143,15 @@ impl ShareChain for InMemoryShareChain { async fn generate_shares(&self, reward: u64) -> Vec { let mut result = vec![]; - let miners = self.miners_with_hash_rates().await; + let miners = self.miners_with_shares().await; // calculate full hash rate and shares - let full_hash_rate: f64 = miners.values().sum(); miners.iter() - .map(|(addr, rate)| (addr, rate / full_hash_rate)) + .map(|(addr, rate)| (addr, rate / SHARE_COUNT as f64)) .filter(|(_, share)| *share > 0.0) .for_each(|(addr, share)| { let curr_reward = ((reward as f64) * share) as u64; - debug!("{addr} -> SHARE: {share:?} T, REWARD: {curr_reward:?}"); + debug!(target: LOG_TARGET, "{addr} -> SHARE: {share:?} T, REWARD: {curr_reward:?}"); result.push(NewBlockCoinbase { address: addr.clone(), value: curr_reward, diff --git a/src/sharechain/mod.rs b/src/sharechain/mod.rs index 38c9a40c..54abb321 100644 --- a/src/sharechain/mod.rs +++ b/src/sharechain/mod.rs @@ -4,6 +4,10 @@ use minotari_app_grpc::tari_rpc::{NewBlockCoinbase, SubmitBlockRequest}; use crate::sharechain::block::Block; use crate::sharechain::error::Error; +pub const MAX_BLOCKS_COUNT: usize = 80; + +pub const SHARE_COUNT: u64 = 100; + pub mod in_memory; pub mod block; pub mod error; From 7c4a64c688b49dc703adf6c1dfe7e366d2c214d0 Mon Sep 17 00:00:00 2001 From: richardb Date: Tue, 25 Jun 2024 12:42:48 +0200 Subject: [PATCH 20/43] fixes --- src/main.rs | 46 ++++++++++++++++++++++++++++++++---- src/server/config.rs | 6 +++++ src/server/grpc/base_node.rs | 2 +- src/server/mod.rs | 2 ++ src/server/p2p/client.rs | 23 +++++++++++++----- src/server/p2p/error.rs | 3 +++ src/server/p2p/p2p.rs | 20 +++++++++++++--- src/server/p2p/peer_store.rs | 2 +- src/server/server.rs | 12 ++++++---- src/sharechain/block.rs | 1 + src/sharechain/in_memory.rs | 15 ++++++------ 11 files changed, 104 insertions(+), 28 deletions(-) diff --git a/src/main.rs b/src/main.rs index 85ab425e..005ced26 100644 --- a/src/main.rs +++ b/src/main.rs @@ -1,28 +1,64 @@ +use clap::builder::Styles; +use clap::builder::styling::AnsiColor; use clap::Parser; +use env_logger::Builder; +use log::LevelFilter; use crate::sharechain::in_memory::InMemoryShareChain; mod server; mod sharechain; +fn cli_styles() -> Styles { + Styles::styled() + .header(AnsiColor::BrightYellow.on_default()) + .usage(AnsiColor::BrightYellow.on_default()) + .literal(AnsiColor::BrightGreen.on_default()) + .placeholder(AnsiColor::BrightCyan.on_default()) + .error(AnsiColor::BrightRed.on_default()) + .invalid(AnsiColor::BrightRed.on_default()) + .valid(AnsiColor::BrightGreen.on_default()) +} + #[derive(Parser)] -#[command(version, about, long_about = None)] +#[command(version)] +#[command(styles = cli_styles())] +#[command(about = "⛏ Decentralized pool mining for Tari network ⛏", long_about = None)] struct Cli { - /// Optional gRPC port to use. + /// Log level + #[arg(short, long, value_name = "log-level", default_value = Some("info"))] + log_level: LevelFilter, + + /// (Optional) gRPC port to use. #[arg(short, long, value_name = "grpc-port")] grpc_port: Option, + + /// (Optional) p2p port to use. It is used to connect p2pool nodes. + #[arg(short, long, value_name = "p2p-port")] + p2p_port: Option, + + /// (Optional) seed peers. + /// Any amount of seed peers can be added to join a p2pool network. + /// Please note that these addresses must be in libp2p multi address format! + /// e.g.: + #[arg(short, long, value_name = "seed-peers")] + seed_peers: Option>, } #[tokio::main] async fn main() -> anyhow::Result<()> { - env_logger::init(); - - // use cli params for constructing server config let cli = Cli::parse(); + Builder::new().filter_level(cli.log_level).init(); let mut config_builder = server::Config::builder(); if let Some(grpc_port) = cli.grpc_port { config_builder.with_grpc_port(grpc_port); } + if let Some(p2p_port) = cli.p2p_port { + config_builder.with_p2p_port(p2p_port); + } + if let Some(seed_peers) = cli.seed_peers { + config_builder.with_seed_peers(seed_peers); + } let config = config_builder.build(); let share_chain = InMemoryShareChain::default(); diff --git a/src/server/config.rs b/src/server/config.rs index e413db47..3ba2274f 100644 --- a/src/server/config.rs +++ b/src/server/config.rs @@ -39,6 +39,7 @@ pub struct ConfigBuilder { config: Config, } +#[allow(dead_code)] impl ConfigBuilder { pub fn with_p2p_port(&mut self, port: u16) -> &mut Self { self.config.p2p_port = port; @@ -65,6 +66,11 @@ impl ConfigBuilder { self } + pub fn with_seed_peers(&mut self, config: Vec) -> &mut Self { + self.config.p2p_service.seed_peers = config; + self + } + pub fn build(&self) -> Config { self.config.clone() } diff --git a/src/server/grpc/base_node.rs b/src/server/grpc/base_node.rs index b0347074..6a98f2a1 100644 --- a/src/server/grpc/base_node.rs +++ b/src/server/grpc/base_node.rs @@ -4,7 +4,7 @@ use libp2p::futures::channel::mpsc; use libp2p::futures::SinkExt; use log::error; use minotari_app_grpc::tari_rpc; -use minotari_app_grpc::tari_rpc::{Block, BlockBlobRequest, BlockGroupRequest, BlockGroupResponse, BlockHeaderResponse, BlockHeight, BlockTimingResponse, ConsensusConstants, Empty, FetchMatchingUtxosRequest, GetActiveValidatorNodesRequest, GetBlocksRequest, GetHeaderByHashRequest, GetMempoolTransactionsRequest, GetNewBlockBlobResult, GetNewBlockResult, GetNewBlockTemplateWithCoinbasesRequest, GetNewBlockWithCoinbasesRequest, GetPeersRequest, GetShardKeyRequest, GetShardKeyResponse, GetSideChainUtxosRequest, GetTemplateRegistrationsRequest, HeightRequest, HistoricalBlock, ListConnectedPeersResponse, ListHeadersRequest, MempoolStatsResponse, NetworkStatusResponse, NewBlockCoinbase, NewBlockTemplate, NewBlockTemplateRequest, NewBlockTemplateResponse, NodeIdentity, PowAlgo, SearchKernelsRequest, SearchUtxosRequest, SoftwareUpdate, StringValue, SubmitBlockResponse, SubmitTransactionRequest, SubmitTransactionResponse, SyncInfoResponse, SyncProgressResponse, TipInfoResponse, TransactionStateRequest, TransactionStateResponse, ValueAtHeightResponse}; +use minotari_app_grpc::tari_rpc::{Block, BlockBlobRequest, BlockGroupRequest, BlockGroupResponse, BlockHeaderResponse, BlockHeight, BlockTimingResponse, ConsensusConstants, Empty, FetchMatchingUtxosRequest, GetActiveValidatorNodesRequest, GetBlocksRequest, GetHeaderByHashRequest, GetMempoolTransactionsRequest, GetNewBlockBlobResult, GetNewBlockResult, GetNewBlockTemplateWithCoinbasesRequest, GetNewBlockWithCoinbasesRequest, GetPeersRequest, GetShardKeyRequest, GetShardKeyResponse, GetSideChainUtxosRequest, GetTemplateRegistrationsRequest, HeightRequest, HistoricalBlock, ListConnectedPeersResponse, ListHeadersRequest, MempoolStatsResponse, NetworkStatusResponse, NewBlockTemplate, NewBlockTemplateRequest, NewBlockTemplateResponse, NodeIdentity, SearchKernelsRequest, SearchUtxosRequest, SoftwareUpdate, StringValue, SubmitBlockResponse, SubmitTransactionRequest, SubmitTransactionResponse, SyncInfoResponse, SyncProgressResponse, TipInfoResponse, TransactionStateRequest, TransactionStateResponse, ValueAtHeightResponse}; use minotari_app_grpc::tari_rpc::base_node_client::BaseNodeClient; use tokio::sync::Mutex; use tonic::{Request, Response, Status, Streaming}; diff --git a/src/server/mod.rs b/src/server/mod.rs index 6d9911b8..d08be40d 100644 --- a/src/server/mod.rs +++ b/src/server/mod.rs @@ -2,6 +2,8 @@ pub use config::*; pub use server::*; mod config; + +#[allow(clippy::module_inception)] mod server; pub mod grpc; diff --git a/src/server/p2p/client.rs b/src/server/p2p/client.rs index 286b0245..bd63e3e4 100644 --- a/src/server/p2p/client.rs +++ b/src/server/p2p/client.rs @@ -1,5 +1,5 @@ use std::sync::Arc; -use std::time::{Duration, Instant, UNIX_EPOCH}; +use std::time::{Duration, Instant}; use log::{debug, error, warn}; use thiserror::Error; @@ -33,12 +33,14 @@ pub enum ChannelSendError { #[derive(Clone, Debug)] pub struct ClientConfig { pub block_validation_timeout: Duration, + pub validate_block_max_retries: u64, } impl Default for ClientConfig { fn default() -> Self { Self { block_validation_timeout: Duration::from_secs(30), + validate_block_max_retries: 5, } } } @@ -93,8 +95,12 @@ impl ServiceClient { Ok(()) } - /// Triggers validation of a new block and waits for the result. - pub async fn validate_block(&self, block: &Block) -> Result { + async fn validate_block_with_retries(&self, block: &Block, mut retries: u64) -> Result { + if retries >= self.config.validate_block_max_retries { + warn!(target: LOG_TARGET, "❗Too many validation retries!"); + return Ok(false); + } + let start = Instant::now(); // send request to validate block @@ -117,7 +123,7 @@ impl ServiceClient { while validation_count < min_validation_count { select! { _ = sleep(self.config.block_validation_timeout) => { - warn!("Timing out waiting for validations!"); + warn!(target: LOG_TARGET, "⏰ Timing out waiting for validations!"); break; } _ = peer_changes_receiver.recv() => { @@ -137,9 +143,9 @@ impl ServiceClient { } } - // TODO: add max number of retry times if peers_changed { - return Box::pin(self.validate_block(block)).await; + retries += 1; + return Box::pin(self.validate_block_with_retries(block, retries)).await; } let validation_time = Instant::now().duration_since(start); @@ -147,4 +153,9 @@ impl ServiceClient { Ok(validation_count >= min_validation_count) } + + /// Triggers validation of a new block and waits for the result. + pub async fn validate_block(&self, block: &Block) -> Result { + self.validate_block_with_retries(block, 0).await + } } \ No newline at end of file diff --git a/src/server/p2p/error.rs b/src/server/p2p/error.rs index 5295830c..7ba93ef6 100644 --- a/src/server/p2p/error.rs +++ b/src/server/p2p/error.rs @@ -1,5 +1,6 @@ use libp2p::{multiaddr, noise, TransportError}; use libp2p::gossipsub::PublishError; +use libp2p::swarm::DialError; use thiserror::Error; use crate::server::p2p; @@ -31,4 +32,6 @@ pub enum LibP2PError { Behaviour(String), #[error("Gossip sub publish error: {0}")] Publish(#[from] PublishError), + #[error("Dial error: {0}")] + Dial(#[from] DialError), } \ No newline at end of file diff --git a/src/server/p2p/p2p.rs b/src/server/p2p/p2p.rs index 11a60418..e251bbfe 100644 --- a/src/server/p2p/p2p.rs +++ b/src/server/p2p/p2p.rs @@ -2,9 +2,9 @@ use std::hash::{DefaultHasher, Hash, Hasher}; use std::sync::Arc; use std::time::Duration; -use libp2p::{gossipsub, mdns, noise, request_response, StreamProtocol, Swarm, tcp, yamux}; +use libp2p::{gossipsub, mdns, Multiaddr, noise, request_response, StreamProtocol, Swarm, tcp, yamux}; use libp2p::futures::StreamExt; -use libp2p::gossipsub::{IdentTopic, Message, MessageId, PublishError, Topic}; +use libp2p::gossipsub::{IdentTopic, Message, PublishError}; use libp2p::mdns::tokio::Tokio; use libp2p::request_response::{cbor, ResponseChannel}; use libp2p::swarm::{NetworkBehaviour, SwarmEvent}; @@ -29,6 +29,7 @@ const LOG_TARGET: &str = "p2p_service"; #[derive(Clone, Debug)] pub struct Config { + pub seed_peers: Vec, pub client: client::ClientConfig, pub peer_info_publish_interval: Duration, } @@ -36,6 +37,7 @@ pub struct Config { impl Default for Config { fn default() -> Self { Self { + seed_peers: vec![], client: client::ClientConfig::default(), peer_info_publish_interval: Duration::from_secs(5), } @@ -473,7 +475,7 @@ impl Service if let Err(error) = self.client_peer_changes_tx.send(()) { error!("Failed to send peer changes trigger: {error:?}"); } - + if let Err(error) = self.broadcast_peer_info().await { match error { Error::LibP2P(LibP2PError::Publish(PublishError::InsufficientPeers)) => { @@ -490,6 +492,17 @@ impl Service } } + fn join_seed_peers(&mut self) -> Result<(), Error> { + for seed_peer in &self.config.seed_peers { + self.swarm.dial(seed_peer.parse::() + .map_err(|error| Error::LibP2P(LibP2PError::MultiAddrParse(error)))? + ) + .map_err(|error| Error::LibP2P(LibP2PError::Dial(error)))?; + } + + Ok(()) + } + /// Starts p2p service. /// Please note that this is a blocking call! pub async fn start(&mut self) -> Result<(), Error> { @@ -501,6 +514,7 @@ impl Service ) .map_err(|e| Error::LibP2P(LibP2PError::Transport(e)))?; + self.join_seed_peers()?; self.subscribe_to_topics(); self.main_loop().await diff --git a/src/server/p2p/peer_store.rs b/src/server/p2p/peer_store.rs index 7f67313b..bcab063c 100644 --- a/src/server/p2p/peer_store.rs +++ b/src/server/p2p/peer_store.rs @@ -1,4 +1,4 @@ -use std::sync::{Mutex, RwLock}; +use std::sync::RwLock; use std::time::{Duration, Instant}; use libp2p::PeerId; diff --git a/src/server/server.rs b/src/server/server.rs index ea83ed3f..032eaea7 100644 --- a/src/server/server.rs +++ b/src/server/server.rs @@ -13,6 +13,8 @@ use crate::server::grpc::error::TonicError; use crate::server::grpc::p2pool::ShaP2PoolGrpc; use crate::sharechain::ShareChain; +const LOG_TARGET: &str = "server"; + #[derive(Error, Debug)] pub enum Error { #[error("P2P service error: {0}")] @@ -55,7 +57,7 @@ impl Server p2pool_service: ShaP2PoolServer>, grpc_port: u16, ) -> Result<(), Error> { - info!("Starting gRPC server on port {}!", &grpc_port); + info!(target: LOG_TARGET, "Starting gRPC server on port {}!", &grpc_port); tonic::transport::Server::builder() .add_service(base_node_service) @@ -67,17 +69,17 @@ impl Server ) .await .map_err(|err| { - error!("GRPC encountered an error: {:?}", err); + error!(target: LOG_TARGET, "GRPC encountered an error: {:?}", err); Error::Grpc(grpc::error::Error::Tonic(TonicError::Transport(err))) })?; - info!("gRPC server stopped!"); + info!(target: LOG_TARGET, "gRPC server stopped!"); Ok(()) } pub async fn start(&mut self) -> Result<(), Error> { - info!("Starting Tari SHA-3 mining P2Pool..."); + info!(target: LOG_TARGET, "⛏ Starting Tari SHA-3 mining P2Pool..."); // local base node and p2pool node grpc services let base_node_grpc_service = self.base_node_grpc_service.clone(); @@ -87,7 +89,7 @@ impl Server match Self::start_grpc(base_node_grpc_service, p2pool_grpc_service, grpc_port).await { Ok(_) => {} Err(error) => { - error!("GRPC Server encountered an error: {:?}", error); + error!(target: LOG_TARGET, "GRPC Server encountered an error: {:?}", error); } } }); diff --git a/src/sharechain/block.rs b/src/sharechain/block.rs index bded433a..7f957b83 100644 --- a/src/sharechain/block.rs +++ b/src/sharechain/block.rs @@ -21,6 +21,7 @@ pub struct Block { } impl_conversions!(Block); +#[allow(dead_code)] impl Block { pub fn builder() -> BlockBuilder { BlockBuilder::new() diff --git a/src/sharechain/in_memory.rs b/src/sharechain/in_memory.rs index f06bcb53..1be2601e 100644 --- a/src/sharechain/in_memory.rs +++ b/src/sharechain/in_memory.rs @@ -37,6 +37,7 @@ impl Default for InMemoryShareChain { } } +#[allow(dead_code)] impl InMemoryShareChain { pub fn new(max_blocks_count: usize) -> Self { Self { @@ -74,19 +75,19 @@ impl InMemoryShareChain { async fn validate_block(&self, last_block: &Block, block: &Block) -> ShareChainResult { // check if we have this block as last if last_block == block { - warn!(target: LOG_TARGET, "This block already added, skip"); + warn!(target: LOG_TARGET, "↩️ This block already added, skip"); return Ok(false); } // validate hash if block.hash() != block.generate_hash() { - warn!(target: LOG_TARGET, "Invalid block, hashes do not match"); + warn!(target: LOG_TARGET, "❌ Invalid block, hashes do not match"); return Ok(false); } // validate height if last_block.height() + 1 != block.height() { - warn!(target: LOG_TARGET, "Invalid block, invalid block height: {:?} != {:?}", last_block.height() + 1, block.height()); + warn!(target: LOG_TARGET, "❌ Invalid block, invalid block height: {:?} != {:?}", last_block.height() + 1, block.height()); return Ok(false); } @@ -104,16 +105,16 @@ impl InMemoryShareChain { } if blocks.len() >= self.max_blocks_count { - // remove first element to keep the maximum vector size - blocks.remove(0); + let diff = blocks.len() - self.max_blocks_count; + blocks.drain(0..diff); } - info!(target: LOG_TARGET, "New block added: {:?}", block.clone()); + info!(target: LOG_TARGET, "🆕 New block added: {:?}", block.hash()); blocks.push(block); let last_block = blocks.last().ok_or_else(|| Error::Empty)?; - info!(target: LOG_TARGET, "Current share chain height: {:?}", last_block.height()); + info!(target: LOG_TARGET, "⬆️ Current height: {:?}", last_block.height()); Ok(()) } From bb527a9d5c4e4e5715072c0cdc30ecde450eeaf0 Mon Sep 17 00:00:00 2001 From: richardb Date: Tue, 25 Jun 2024 12:44:31 +0200 Subject: [PATCH 21/43] small update --- src/main.rs | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/main.rs b/src/main.rs index 005ced26..84f1e814 100644 --- a/src/main.rs +++ b/src/main.rs @@ -40,7 +40,7 @@ struct Cli { /// (Optional) seed peers. /// Any amount of seed peers can be added to join a p2pool network. /// Please note that these addresses must be in libp2p multi address format! - /// e.g.: + /// e.g.: /dnsaddr/libp2p.io #[arg(short, long, value_name = "seed-peers")] seed_peers: Option>, } From 5ab0866a49699db1d6b70b108b259ee7cae5c874 Mon Sep 17 00:00:00 2001 From: richardb Date: Tue, 25 Jun 2024 14:48:39 +0200 Subject: [PATCH 22/43] small changes --- Cargo.toml | 2 +- src/server/p2p/messages.rs | 3 +++ src/server/p2p/p2p.rs | 31 ++++++++++++++++++++++++++----- src/sharechain/in_memory.rs | 5 +++-- 4 files changed, 33 insertions(+), 8 deletions(-) diff --git a/Cargo.toml b/Cargo.toml index 2452bac7..fffcf1ef 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -28,7 +28,7 @@ libp2p = { version = "0.53.2", features = [ "gossipsub", "request-response", "json", - "cbor" + "cbor", ] } tokio = { version = "1.38.0", features = ["full"] } thiserror = "1.0" diff --git a/src/server/p2p/messages.rs b/src/server/p2p/messages.rs index b3529ca4..a1d9013c 100644 --- a/src/server/p2p/messages.rs +++ b/src/server/p2p/messages.rs @@ -73,6 +73,7 @@ pub struct ValidateBlockResult { pub peer_id: PeerId, pub block: Block, pub valid: bool, + pub timestamp: u64, } impl_conversions!(ValidateBlockResult); impl ValidateBlockResult { @@ -81,10 +82,12 @@ impl ValidateBlockResult { block: Block, valid: bool, ) -> Self { + let timestamp = SystemTime::now().duration_since(UNIX_EPOCH).unwrap().as_secs(); Self { peer_id, block, valid, + timestamp, } } } diff --git a/src/server/p2p/p2p.rs b/src/server/p2p/p2p.rs index e251bbfe..229ae82f 100644 --- a/src/server/p2p/p2p.rs +++ b/src/server/p2p/p2p.rs @@ -9,6 +9,7 @@ use libp2p::mdns::tokio::Tokio; use libp2p::request_response::{cbor, ResponseChannel}; use libp2p::swarm::{NetworkBehaviour, SwarmEvent}; use log::{debug, error, info, warn}; +use tari_utilities::hex::Hex; use tokio::{io, select}; use tokio::sync::{broadcast, mpsc}; use tokio::sync::broadcast::error::RecvError; @@ -150,6 +151,8 @@ impl Service )], request_response::Config::default(), ), + // rendezvous_server: rendezvous::server::Behaviour::new(rendezvous::server::Config::default()), + // rendezvous_client: rendezvous::client::Behaviour::new(key_pair.clone()), }) }) .map_err(|e| Error::LibP2P(LibP2PError::Behaviour(e.to_string())))? @@ -349,7 +352,7 @@ impl Service NEW_BLOCK_TOPIC => { match Block::try_from(message) { Ok(payload) => { - info!("New block from broadcast: {:?}", &payload); + info!(target: LOG_TARGET,"🆕 New block from broadcast: {:?}", &payload.hash().to_hex()); if let Err(error) = self.share_chain.submit_block(&payload).await { error!(target: LOG_TARGET, "Could not add new block to local share chain: {error:?}"); } @@ -445,8 +448,26 @@ impl Service request_response::Event::OutboundFailure { .. } => {} request_response::Event::InboundFailure { .. } => {} request_response::Event::ResponseSent { .. } => {} - } + }, }, + SwarmEvent::ConnectionEstablished { peer_id, .. } => { + // TODO: do some discovery somehow, possibly use rendezvous + self.swarm.behaviour_mut().gossipsub.add_explicit_peer(&peer_id); + } + SwarmEvent::ConnectionClosed { peer_id, .. } => { + self.swarm.behaviour_mut().gossipsub.remove_explicit_peer(&peer_id); + } + SwarmEvent::IncomingConnection { .. } => {} + SwarmEvent::IncomingConnectionError { .. } => {} + SwarmEvent::OutgoingConnectionError { .. } => {} + SwarmEvent::ExpiredListenAddr { .. } => {} + SwarmEvent::ListenerClosed { .. } => {} + SwarmEvent::ListenerError { .. } => {} + SwarmEvent::Dialing { .. } => {} + SwarmEvent::NewExternalAddrCandidate { .. } => {} + SwarmEvent::ExternalAddrConfirmed { .. } => {} + SwarmEvent::ExternalAddrExpired { .. } => {} + SwarmEvent::NewExternalAddrOfPeer { .. } => {} _ => {} }; } @@ -473,17 +494,17 @@ impl Service self.swarm.behaviour_mut().gossipsub.remove_explicit_peer(&exp_peer); } if let Err(error) = self.client_peer_changes_tx.send(()) { - error!("Failed to send peer changes trigger: {error:?}"); + error!(target: LOG_TARGET, "Failed to send peer changes trigger: {error:?}"); } if let Err(error) = self.broadcast_peer_info().await { match error { Error::LibP2P(LibP2PError::Publish(PublishError::InsufficientPeers)) => { - warn!("No peers to broadcast peer info!"); + warn!(target: LOG_TARGET, "No peers to broadcast peer info!"); } Error::LibP2P(LibP2PError::Publish(PublishError::Duplicate)) => {} _ => { - error!("Failed to publish node info: {error:?}"); + error!(target: LOG_TARGET, "Failed to publish node info: {error:?}"); } } } diff --git a/src/sharechain/in_memory.rs b/src/sharechain/in_memory.rs index 1be2601e..c8573f76 100644 --- a/src/sharechain/in_memory.rs +++ b/src/sharechain/in_memory.rs @@ -7,6 +7,7 @@ use minotari_app_grpc::tari_rpc::{NewBlockCoinbase, SubmitBlockRequest}; use tari_common_types::tari_address::TariAddress; use tari_core::blocks::BlockHeader; use tari_utilities::epoch_time::EpochTime; +use tari_utilities::hex::Hex; use tokio::sync::{RwLock, RwLockWriteGuard}; use crate::sharechain::{Block, MAX_BLOCKS_COUNT, SHARE_COUNT, ShareChain, ShareChainResult}; @@ -109,12 +110,12 @@ impl InMemoryShareChain { blocks.drain(0..diff); } - info!(target: LOG_TARGET, "🆕 New block added: {:?}", block.hash()); + info!(target: LOG_TARGET, "🆕 New block added: {:?}", block.hash().to_hex()); blocks.push(block); let last_block = blocks.last().ok_or_else(|| Error::Empty)?; - info!(target: LOG_TARGET, "⬆️ Current height: {:?}", last_block.height()); + info!(target: LOG_TARGET, "⬆️ Current height: {:?}", last_block.height()); Ok(()) } From 88e365a99921500e9c1efcb7bd86ea6c663b74b4 Mon Sep 17 00:00:00 2001 From: "C.Lee Taylor" <47312074+leet4tari@users.noreply.github.com> Date: Tue, 25 Jun 2024 14:54:39 +0200 Subject: [PATCH 23/43] ci: github base setup (#1) --- .github/ISSUE_TEMPLATE/bug_report.md | 36 + .github/PULL_REQUEST_TEMPLATE.md | 27 + .github/dependabot.yml | 8 + .github/workflows/audit.yml | 27 + .github/workflows/build_binaries.json | 63 ++ .github/workflows/build_binaries.yml | 866 ++++++++++++++++++++++++ .github/workflows/ci.yml | 250 +++++++ .github/workflows/coverage.yml | 48 ++ .github/workflows/integration_tests.yml | 209 ++++++ .github/workflows/pr_title.yml | 30 + .gitignore | 3 + CODEOWNERS | 3 + clippy.toml | 4 + lints.toml | 73 ++ 14 files changed, 1647 insertions(+) create mode 100644 .github/ISSUE_TEMPLATE/bug_report.md create mode 100644 .github/PULL_REQUEST_TEMPLATE.md create mode 100644 .github/dependabot.yml create mode 100644 .github/workflows/audit.yml create mode 100644 .github/workflows/build_binaries.json create mode 100644 .github/workflows/build_binaries.yml create mode 100644 .github/workflows/ci.yml create mode 100644 .github/workflows/coverage.yml create mode 100644 .github/workflows/integration_tests.yml create mode 100644 .github/workflows/pr_title.yml create mode 100644 CODEOWNERS create mode 100644 clippy.toml create mode 100644 lints.toml diff --git a/.github/ISSUE_TEMPLATE/bug_report.md b/.github/ISSUE_TEMPLATE/bug_report.md new file mode 100644 index 00000000..c403e78a --- /dev/null +++ b/.github/ISSUE_TEMPLATE/bug_report.md @@ -0,0 +1,36 @@ +--- +name: Bug report +about: Create a report to help us improve +title: "[TITLE]" +labels: 'bug-report' +assignees: '' + +--- + +**Describe the bug** +A clear and concise description of what the bug is. + +**To Reproduce** +Steps to reproduce the behavior: +1. Go to '...' +2. Click on '....' +3. Scroll down to '....' +4. See error + +**Expected behavior** +A clear and concise description of what you expected to happen. + +**Screenshots** +If applicable, add screenshots to help explain your problem. + +**Desktop (please complete the following information):** + - OS & Version: [e.g. iOS 10.2.1] + - Browser & Version [e.g. chrome v71.0.12345] + +**Smartphone (please complete the following information):** + - Device: [e.g. iPhone6] + - OS: [e.g. iOS8.1] + - Browser & Version [e.g. stock browser v0.1.2] + +**Additional context** +Add any other context about the problem here. diff --git a/.github/PULL_REQUEST_TEMPLATE.md b/.github/PULL_REQUEST_TEMPLATE.md new file mode 100644 index 00000000..ac0792f1 --- /dev/null +++ b/.github/PULL_REQUEST_TEMPLATE.md @@ -0,0 +1,27 @@ +Description +--- + +Motivation and Context +--- + +How Has This Been Tested? +--- + +What process can a PR reviewer use to test or verify this change? +--- + + + + + +Breaking Changes +--- + +- [x] None +- [ ] Requires data directory on base node to be deleted +- [ ] Requires hard fork +- [ ] Other - Please specify + + + diff --git a/.github/dependabot.yml b/.github/dependabot.yml new file mode 100644 index 00000000..567c3fdb --- /dev/null +++ b/.github/dependabot.yml @@ -0,0 +1,8 @@ +--- +version: 2 +updates: + # Maintain dependencies for GitHub Actions + - package-ecosystem: "github-actions" + directory: "/" + schedule: + interval: "weekly" diff --git a/.github/workflows/audit.yml b/.github/workflows/audit.yml new file mode 100644 index 00000000..1fffef08 --- /dev/null +++ b/.github/workflows/audit.yml @@ -0,0 +1,27 @@ +--- +name: Security audit - daily + +'on': + push: + paths: + # Run if workflow changes + - '.github/workflows/audit.yml' + # Run on changed dependencies + - '**/Cargo.toml' + - '**/Cargo.lock' + # Run if the configuration file changes + - '**/audit.toml' + # Rerun periodicly to pick up new advisories + schedule: + - cron: '43 05 * * *' + # Run manually + workflow_dispatch: + +jobs: + security_audit: + runs-on: ubuntu-latest + steps: + - uses: actions/checkout@v4 + - uses: rustsec/audit-check@v1.4.1 + with: + token: ${{ secrets.GITHUB_TOKEN }} \ No newline at end of file diff --git a/.github/workflows/build_binaries.json b/.github/workflows/build_binaries.json new file mode 100644 index 00000000..943e5110 --- /dev/null +++ b/.github/workflows/build_binaries.json @@ -0,0 +1,63 @@ +[ + { + "name": "linux-x86_64", + "runs-on": "ubuntu-20.04", + "rust": "nightly-2024-03-01", + "target": "x86_64-unknown-linux-gnu", + "cross": false, + "build_metric": true + }, + { + "name": "linux-arm64", + "runs-on": "ubuntu-latest", + "rust": "stable", + "target": "aarch64-unknown-linux-gnu", + "cross": true, + "flags": "--features libtor --workspace --exclude minotari_mining_helper_ffi --exclude tari_integration_tests", + "build_metric": true + }, + { + "name": "linux-riscv64", + "runs-on": "ubuntu-latest", + "rust": "stable", + "target": "riscv64gc-unknown-linux-gnu", + "cross": true, + "flags": "--workspace --exclude minotari_mining_helper_ffi --exclude tari_integration_tests", + "build_enabled": true, + "best_effort": true + }, + { + "name": "macos-x86_64", + "runs-on": "macos-12", + "rust": "stable", + "target": "x86_64-apple-darwin", + "cross": false + }, + { + "name": "macos-arm64", + "runs-on": "macos-14", + "rust": "stable", + "target": "aarch64-apple-darwin", + "cross": false + }, + { + "name": "windows-x64", + "runs-on": "windows-2019", + "rust": "stable", + "target": "x86_64-pc-windows-msvc", + "cross": false, + "features": "safe", + "flags": "--workspace --exclude tari_libtor" + }, + { + "name": "windows-arm64", + "runs-on": "windows-latest", + "rust": "stable", + "target": "aarch64-pc-windows-msvc", + "cross": false, + "features": "safe", + "target_bins": "minotari_node, minotari_console_wallet, minotari_merge_mining_proxy, minotari_miner", + "flags": "--workspace --exclude tari_libtor", + "build_enabled": false + } +] diff --git a/.github/workflows/build_binaries.yml b/.github/workflows/build_binaries.yml new file mode 100644 index 00000000..6bd01edb --- /dev/null +++ b/.github/workflows/build_binaries.yml @@ -0,0 +1,866 @@ +--- +name: Build Matrix of Binaries + +'on': + push: + tags: + - "v[0-9]+.[0-9]+.[0-9]*" + branches: + - "build-all-*" + - "build-bins-*" + schedule: + - cron: "05 00 * * *" + workflow_dispatch: + inputs: + customTag: + description: "Development Tag" + required: true + default: "development-tag" + +env: + TS_FILENAME: "tari_suite" + TS_BUNDLE_ID_BASE: "com.tarilabs" + TS_SIG_FN: "sha256-unsigned.txt" + ## Must be a JSon string + TS_FILES: '["minotari_node","minotari_console_wallet","minotari_miner","minotari_merge_mining_proxy"]' + TS_FEATURES: "default, safe" + TS_LIBRARIES: "minotari_mining_helper_ffi" + TARI_NETWORK_DIR: testnet + toolchain: nightly-2024-03-01 + matrix-json-file: ".github/workflows/build_binaries.json" + CARGO_HTTP_MULTIPLEXING: false + CARGO_UNSTABLE_SPARSE_REGISTRY: true + CARGO: cargo + CARGO_OPTIONS: "--release" + CARGO_CACHE: true + +concurrency: + # https://docs.github.com/en/actions/examples/using-concurrency-expressions-and-a-test-matrix + group: '${{ github.workflow }} @ ${{ github.event.pull_request.head.label || github.head_ref || github.ref }}' + cancel-in-progress: ${{ !startsWith(github.ref, 'refs/tags/v') || github.ref != 'refs/heads/development' || github.ref != 'refs/heads/nextnet' || github.ref != 'refs/heads/stagenet' }} + +permissions: {} + +jobs: + matrix-prep: + runs-on: ubuntu-latest + outputs: + matrix: ${{ steps.set-matrix.outputs.matrix }} + steps: + - uses: actions/checkout@v4 + with: + submodules: false + + - name: Set Matrix + id: set-matrix + run: | + # + # build all targets images + # matrix=$( jq -s -c .[] .github/workflows/build_binaries.json ) + # + # build only single target image + # matrix_selection=$( jq -c '.[] | select( ."name" == "windows-x64" )' ${{ env.matrix-json-file }} ) + # matrix_selection=$( jq -c '.[] | select( ."name" | contains("macos") )' ${{ env.matrix-json-file }} ) + # + # build select target images - build_enabled + matrix_selection=$( jq -c '.[] | select( ."build_enabled" != false )' ${{ env.matrix-json-file }} ) + # + # Setup the json build matrix + matrix=$(echo ${matrix_selection} | jq -s -c '{"builds": .}') + echo $matrix + echo $matrix | jq . + echo "matrix=${matrix}" >> $GITHUB_OUTPUT + + matrix-check: + # Debug matrix + if: ${{ false }} + runs-on: ubuntu-latest + needs: matrix-prep + steps: + - name: Install json2yaml + run: | + sudo npm install -g json2yaml + + - name: Check matrix definition + run: | + matrix='${{ needs.matrix-prep.outputs.matrix }}' + echo $matrix + echo $matrix | jq . + echo $matrix | json2yaml + + builds: + name: Building ${{ matrix.builds.name }} on ${{ matrix.builds.runs-on }} + needs: matrix-prep + continue-on-error: ${{ matrix.builds.best_effort || false }} + outputs: + TARI_NETWORK_DIR: ${{ steps.set-tari-network.outputs.TARI_NETWORK_DIR }} + TARI_VERSION: ${{ steps.set-tari-vars.outputs.TARI_VERSION }} + VSHA_SHORT: ${{ steps.set-tari-vars.outputs.VSHA_SHORT }} + strategy: + fail-fast: false + matrix: ${{ fromJson(needs.matrix-prep.outputs.matrix) }} + + runs-on: ${{ matrix.builds.runs-on }} + + steps: + - name: Checkout source code + uses: actions/checkout@v4 + with: + submodules: recursive + + - name: Declare TestNet for tags + id: set-tari-network + # Don't forget to comment out the below if, when force testing with GHA_NETWORK + if: ${{ startsWith(github.ref, 'refs/tags/v') }} + env: + GHA_NETWORK: ${{ github.ref_name }} + # GHA_NETWORK: "v1.0.0-rc.4" + shell: bash + run: | + source buildtools/multinet_envs.sh ${{ env.GHA_NETWORK }} + echo ${TARI_NETWORK} + echo ${TARI_TARGET_NETWORK} + echo ${TARI_NETWORK_DIR} + echo "TARI_NETWORK=${TARI_NETWORK}" >> $GITHUB_ENV + echo "TARI_TARGET_NETWORK=${TARI_TARGET_NETWORK}" >> $GITHUB_ENV + echo "TARI_NETWORK_DIR=${TARI_NETWORK_DIR}" >> $GITHUB_ENV + echo "TARI_NETWORK_DIR=${TARI_NETWORK_DIR}" >> $GITHUB_OUTPUT + + - name: Declare Global Variables 4 GHA ${{ github.event_name }} + id: set-tari-vars + shell: bash + run: | + echo "VBRANCH=${{ github.ref_name }}" >> $GITHUB_ENV + VSHA_SHORT=$(git rev-parse --short HEAD) + echo "VSHA_SHORT=${VSHA_SHORT}" >> $GITHUB_ENV + echo "VSHA_SHORT=${VSHA_SHORT}" >> $GITHUB_OUTPUT + TARI_VERSION=$(awk -F ' = ' '$1 ~ /^version/ \ + { gsub(/["]/, "", $2); printf("%s",$2) }' \ + "$GITHUB_WORKSPACE/applications/minotari_node/Cargo.toml") + echo "TARI_VERSION=${TARI_VERSION}" >> $GITHUB_ENV + echo "TARI_VERSION=${TARI_VERSION}" >> $GITHUB_OUTPUT + if [[ "${{ matrix.builds.features }}" == "" ]]; then + echo "BUILD_FEATURES=${{ env.TS_FEATURES }}" >> $GITHUB_ENV + else + echo "BUILD_FEATURES=${{ matrix.builds.features }}" >> $GITHUB_ENV + fi + TARGET_BINS="" + if [[ "${{ matrix.builds.target_bins }}" == "" ]]; then + ARRAY_BINS=( $(echo ${TS_FILES} | jq --raw-output '.[]' | awk '{ print $1 }') ) + else + ARRAY_BINS=( $(echo "${{ matrix.builds.target_bins }}" | tr ', ' '\n') ) + fi + for BIN_FILE in "${ARRAY_BINS[@]}"; do + echo "Adding ${BIN_FILE} to Builds" + TARGET_BINS+="--bin ${BIN_FILE} " + done + echo "TARGET_BINS=${TARGET_BINS}" >> $GITHUB_ENV + TARGET_LIBS="" + if [[ "${{ matrix.builds.target_libs }}" == "" ]]; then + ARRAY_LIBS=( $(echo ${TS_LIBRARIES} | tr ', ' '\n') ) + else + ARRAY_LIBS=( $(echo "${{ matrix.builds.target_libs }}" | tr ', ' '\n') ) + fi + for LIB_FILE in "${ARRAY_LIBS[@]}"; do + echo "Adding ${LIB_FILE} to library Builds" + TARGET_LIBS+="--package ${LIB_FILE} " + done + echo "TARGET_LIBS=${TARGET_LIBS}" >> $GITHUB_ENV + TARI_BUILD_ISA_CPU=${{ matrix.builds.target }} + # Strip unknown part + TARI_BUILD_ISA_CPU=${TARI_BUILD_ISA_CPU//-unknown-linux-gnu} + # Strip gc used by rust + TARI_BUILD_ISA_CPU=${TARI_BUILD_ISA_CPU//gc} + echo "TARI_BUILD_ISA_CPU=${TARI_BUILD_ISA_CPU}" >> $GITHUB_ENV + + - name: Scheduled Destination Folder Override + if: ${{ github.event_name == 'schedule' && github.event.schedule == '05 00 * * *' }} + shell: bash + run: | + echo "S3_DEST_OVERRIDE=daily/" >> $GITHUB_ENV + + - name: Setup Rust toolchain + uses: dtolnay/rust-toolchain@master + with: + components: rustfmt, clippy + toolchain: ${{ matrix.builds.rust }} + targets: ${{ matrix.builds.target }} + + - name: Install Linux dependencies - Ubuntu + if: ${{ startsWith(runner.os,'Linux') && ( ! matrix.builds.cross ) }} + run: | + sudo apt-get update + sudo bash scripts/install_ubuntu_dependencies.sh + + - name: Install Linux dependencies - Ubuntu - cross-compiled ${{ env.TARI_BUILD_ISA_CPU }} on x86-64 + if: ${{ startsWith(runner.os,'Linux') && ( ! matrix.builds.cross ) && matrix.builds.name != 'linux-x86_64' }} + run: | + sudo apt-get update + sudo bash scripts/install_ubuntu_dependencies-cross_compile.sh ${{ env.TARI_BUILD_ISA_CPU }} + rustup target add ${{ matrix.builds.target }} + echo "PKG_CONFIG_SYSROOT_DIR=/usr/${{ env.TARI_BUILD_ISA_CPU }}-linux-gnu/" >> $GITHUB_ENV + + - name: Install macOS dependencies + if: startsWith(runner.os,'macOS') + run: | + # openssl, cmake and autoconf already installed + brew install zip coreutils automake protobuf + rustup target add ${{ matrix.builds.target }} + + - name: Install Windows dependencies + if: startsWith(runner.os,'Windows') + run: | + vcpkg.exe install sqlite3:x64-windows zlib:x64-windows + # Bug in choco - need to install each package individually + choco upgrade llvm -y + # psutils is out of date + # choco upgrade psutils -y + choco upgrade openssl -y + # Should already be installed + # choco upgrade strawberryperl -y + choco upgrade protoc -y + + - name: Set environment variables - Nix + if: ${{ ! startsWith(runner.os,'Windows') }} + shell: bash + run: | + echo "SHARUN=shasum --algorithm 256" >> $GITHUB_ENV + echo "CC=gcc" >> $GITHUB_ENV + echo "TS_EXT=" >> $GITHUB_ENV + echo "SHELL_EXT=.sh" >> $GITHUB_ENV + echo "PLATFORM_SPECIFIC_DIR=linux" >> $GITHUB_ENV + echo "TS_DIST=/dist" >> $GITHUB_ENV + + - name: Set environment variables - macOS + if: startsWith(runner.os,'macOS') + shell: bash + run: | + echo "PLATFORM_SPECIFIC_DIR=osx" >> $GITHUB_ENV + echo "LIB_EXT=.dylib" >> $GITHUB_ENV + + # Hardcoded sdk for MacOSX on ARM64 + - name: Set environment variables - macOS - ARM64 (pin/sdk) + # Debug + if: ${{ false }} + # if: ${{ startsWith(runner.os,'macOS') && matrix.builds.name == 'macos-arm64' }} + run: | + xcrun --show-sdk-path + ls -alhtR "/Library/Developer/CommandLineTools/SDKs/" + echo "RANDOMX_RS_CMAKE_OSX_SYSROOT=/Library/Developer/CommandLineTools/SDKs/MacOSX12.1.sdk" >> $GITHUB_ENV + + - name: Set environment variables - Ubuntu + if: startsWith(runner.os,'Linux') + shell: bash + run: | + echo "LIB_EXT=.so" >> $GITHUB_ENV + + - name: Set environment variables - Windows + if: startsWith(runner.os,'Windows') + shell: bash + run: | + # echo "SHARUN=pwsh C:\ProgramData\chocolatey\lib\psutils\tools\psutils-master\shasum.ps1 --algorithm 256" >> $GITHUB_ENV + mkdir -p "$GITHUB_WORKSPACE\psutils" + curl -v -o "$GITHUB_WORKSPACE\psutils\getopt.ps1" "https://raw.githubusercontent.com/lukesampson/psutils/master/getopt.ps1" + curl -v -o "$GITHUB_WORKSPACE\psutils\shasum.ps1" "https://raw.githubusercontent.com/lukesampson/psutils/master/shasum.ps1" + echo "SHARUN=pwsh $GITHUB_WORKSPACE\psutils\shasum.ps1 --algorithm 256" >> $GITHUB_ENV + echo "TS_EXT=.exe" >> $GITHUB_ENV + echo "LIB_EXT=.dll" >> $GITHUB_ENV + echo "SHELL_EXT=.bat" >> $GITHUB_ENV + echo "TS_DIST=\dist" >> $GITHUB_ENV + echo "PLATFORM_SPECIFIC_DIR=windows" >> $GITHUB_ENV + echo "SQLITE3_LIB_DIR=C:\vcpkg\installed\x64-windows\lib" >> $GITHUB_ENV + echo "OPENSSL_DIR=C:\Program Files\OpenSSL-Win64" >> $GITHUB_ENV + echo "LIBCLANG_PATH=C:\Program Files\LLVM\bin" >> $GITHUB_ENV + echo "C:\Strawberry\perl\bin" >> $GITHUB_PATH + + - name: Cache cargo files and outputs + if: ${{ ( ! startsWith(github.ref, 'refs/tags/v') ) && ( ! matrix.builds.cross ) && ( env.CARGO_CACHE ) }} + uses: Swatinem/rust-cache@v2 + with: + key: ${{ matrix.builds.target }} + + - name: Install and setup cargo cross + if: ${{ matrix.builds.cross }} + shell: bash + run: | + #cargo install cross + cargo install cross --git https://github.com/cross-rs/cross + echo "CARGO=cross" >> $GITHUB_ENV + + - name: Install and setup cargo-auditable + if: ${{ false }} + # if: ${{ startsWith(github.ref, 'refs/tags/v') }} + shell: bash + run: | + cargo install cargo-auditable + echo "CARGO=${{ env.CARGO }} auditable" >> $GITHUB_ENV + echo "CARGO_OPTIONS=${{ env.CARGO_OPTIONS }} --release" >> $GITHUB_ENV + + - name: Show command used for Cargo + shell: bash + run: | + echo "cargo command is: ${{ env.CARGO }}" + echo "cargo options is: ${{ env.CARGO_OPTIONS }}" + echo "cross flag: ${{ matrix.builds.cross }}" + + - name: Build release binaries + shell: bash + run: | + ${{ env.CARGO }} build ${{ env.CARGO_OPTIONS }} \ + --target ${{ matrix.builds.target }} \ + --features "${{ env.BUILD_FEATURES }}" \ + ${{ env.TARGET_BINS }} \ + ${{ matrix.builds.flags }} --locked + + - name: Build release libraries + shell: bash + run: | + ${{ env.CARGO }} build ${{ env.CARGO_OPTIONS }} \ + --target ${{ matrix.builds.target }} \ + --lib ${{ env.TARGET_LIBS }} \ + ${{ matrix.builds.flags }} --locked + + - name: Copy binaries to folder for archiving + shell: bash + run: | + # set -xo pipefail + mkdir -p "$GITHUB_WORKSPACE${TS_DIST}" + cd "$GITHUB_WORKSPACE${TS_DIST}" + BINFILE="${TS_FILENAME}-${TARI_VERSION}-${VSHA_SHORT}-${{ matrix.builds.name }}${TS_EXT}" + echo "BINFILE=${BINFILE}" >> $GITHUB_ENV + echo "Copying files for ${BINFILE} to $(pwd)" + echo "MTS_SOURCE=$(pwd)" >> $GITHUB_ENV + ls -alht "$GITHUB_WORKSPACE/target/${{ matrix.builds.target }}/release/" + ARRAY_FILES=( $(echo ${TS_FILES} | jq --raw-output '.[]' | awk '{ print $1 }') ) + for FILE in "${ARRAY_FILES[@]}"; do + echo "checking for file - ${FILE}${TS_EXT}" + if [ -f "${GITHUB_WORKSPACE}/target/${{ matrix.builds.target }}/release/${FILE}${TS_EXT}" ]; then + cp -vf "${GITHUB_WORKSPACE}/target/${{ matrix.builds.target }}/release/${FILE}${TS_EXT}" . + fi + done + if [[ "${{ matrix.builds.target_libs }}" == "" ]]; then + ARRAY_LIBS=( $(echo ${TS_LIBRARIES} | tr ', ' '\n') ) + else + ARRAY_LIBS=( $(echo "${{ matrix.builds.target_libs }}" | tr ', ' '\n') ) + fi + for FILE in "${ARRAY_LIBS[@]}"; do + echo "checking for file - ${FILE}${TS_EXT}" + # Check on Nix for libs + if [ -f "${GITHUB_WORKSPACE}/target/${{ matrix.builds.target }}/release/lib${FILE}${LIB_EXT}" ]; then + cp -vf "${GITHUB_WORKSPACE}/target/${{ matrix.builds.target }}/release/lib${FILE}${LIB_EXT}" . + fi + # Check on Windows libs + if [ -f "${GITHUB_WORKSPACE}/target/${{ matrix.builds.target }}/release/${FILE}${LIB_EXT}" ]; then + cp -vf "${GITHUB_WORKSPACE}/target/${{ matrix.builds.target }}/release/${FILE}${LIB_EXT}" . + fi + done + if [ -f "${GITHUB_WORKSPACE}/applications/minotari_node/${PLATFORM_SPECIFIC_DIR}/runtime/start_tor${SHELL_EXT}" ]; then + cp -vf "${GITHUB_WORKSPACE}/applications/minotari_node/${PLATFORM_SPECIFIC_DIR}/runtime/start_tor${SHELL_EXT}" . + fi + ls -alhtR ${{ env.MTS_SOURCE }} + + - name: Build minotari_node with metrics too + if: ${{ matrix.builds.build_metric }} + shell: bash + run: | + ${{ env.CARGO }} build ${{ env.CARGO_OPTIONS }} \ + --target ${{ matrix.builds.target }} \ + --features "${{ env.BUILD_FEATURES }}, metrics" \ + --bin minotari_node \ + ${{ matrix.builds.flags }} --locked + cp -vf "$GITHUB_WORKSPACE/target/${{ matrix.builds.target }}/release/minotari_node${TS_EXT}" \ + "${{ env.MTS_SOURCE }}/minotari_node-metrics${TS_EXT}" + + - name: Build targeted miners + # if: ${{ ( startsWith(github.ref, 'refs/tags/v') ) && ( matrix.builds.miner_cpu_targets != '' ) }} + if: ${{ matrix.builds.miner_cpu_targets != '' }} + shell: bash + run: | + ARRAY_TARGETS=( $(echo "${{ matrix.builds.miner_cpu_targets }}" | tr ', ' '\n') ) + for CPU_TARGET in "${ARRAY_TARGETS[@]}"; do + echo "Target CPU ${CPU_TARGET} for miner" + export RUSTFLAGS="-C target-cpu=${CPU_TARGET}" + ${{ env.CARGO }} build ${{ env.CARGO_OPTIONS }} \ + --target ${{ matrix.builds.target }} \ + --features "${{ env.BUILD_FEATURES }}" \ + --bin minotari_miner \ + ${{ matrix.builds.flags }} --locked + cp -vf "$GITHUB_WORKSPACE/target/${{ matrix.builds.target }}/release/minotari_miner" \ + "${{ env.MTS_SOURCE }}/minotari_miner-${CPU_TARGET}" + done + + - name: Pre/unsigned OSX Artifact upload for Archive + # Debug + if: ${{ false }} + # if: startsWith(runner.os,'macOS') + continue-on-error: true + uses: actions/upload-artifact@v4 + with: + name: ${{ env.TS_FILENAME }}_unsigned-archive-${{ matrix.builds.name }} + path: "${{ env.MTS_SOURCE }}/*" + + - name: Build the macOS pkg + if: startsWith(runner.os,'macOS') + continue-on-error: true + env: + MACOS_KEYCHAIN_PASS: ${{ secrets.MACOS_KEYCHAIN_PASS }} + MACOS_APPLICATION_ID: ${{ secrets.MACOS_APPLICATION_ID }} + MACOS_APPLICATION_CERT: ${{ secrets.MACOS_APPLICATION_CERT }} + MACOS_APPLICATION_PASS: ${{ secrets.MACOS_APPLICATION_PASS }} + MACOS_INSTALLER_ID: ${{ secrets.MACOS_INSTALLER_ID }} + MACOS_INSTALLER_CERT: ${{ secrets.MACOS_INSTALLER_CERT }} + MACOS_INSTALLER_PASS: ${{ secrets.MACOS_INSTALLER_PASS }} + MACOS_NOTARIZE_USERNAME: ${{ secrets.MACOS_NOTARIZE_USERNAME }} + MACOS_NOTARIZE_PASSWORD: ${{ secrets.MACOS_NOTARIZE_PASSWORD }} + MACOS_ASC_PROVIDER: ${{ secrets.MACOS_ASC_PROVIDER }} + run: | + echo $MACOS_APPLICATION_CERT | base64 --decode > application.p12 + echo $MACOS_INSTALLER_CERT | base64 --decode > installer.p12 + security create-keychain -p $MACOS_KEYCHAIN_PASS build.keychain + security default-keychain -s build.keychain + security unlock-keychain -p $MACOS_KEYCHAIN_PASS build.keychain + security import application.p12 -k build.keychain -P $MACOS_APPLICATION_PASS -T /usr/bin/codesign + security import installer.p12 -k build.keychain -P $MACOS_INSTALLER_PASS -T /usr/bin/pkgbuild + security set-key-partition-list -S apple-tool:,apple:,codesign: -s -k $MACOS_KEYCHAIN_PASS build.keychain + if [[ "${{ matrix.builds.name }}" == "macos-arm64" ]]; then + echo "Add codesign extra args for ${{ matrix.builds.name }}" + OSX_CODESIGN_EXTRAS="--entitlements ${GITHUB_WORKSPACE}/applications/minotari_node/osx-pkg/entitlements.xml" + fi + cd buildtools + export target_release="target/${{ matrix.builds.target }}/release" + mkdir -p "${{ runner.temp }}/osxpkg" + export tarball_parent="${{ runner.temp }}/osxpkg" + export tarball_source="${{ env.TARI_NETWORK_DIR }}" + ./create_osx_install_zip.sh unused nozip + ARRAY_FILES=( $(echo ${TS_FILES} | jq --raw-output '.[]' | awk '{ print $1 }') ) + find "${GITHUB_WORKSPACE}/${target_release}" \ + -name "randomx-*" -type f -perm -+x \ + -exec cp -vf {} "${{ runner.temp }}/osxpkg/${{ env.TARI_NETWORK_DIR }}/runtime/" \; + FILES_DIAG_UTILS=( \ + $(find "${GITHUB_WORKSPACE}/${target_release}" \ + -name "randomx-*" -type f -perm -+x \ + -exec sh -c 'echo "$(basename "{}")"' \; \ + ) \ + ) + ARRAY_FILES+=(${FILES_DIAG_UTILS[@]}) + for FILE in "${ARRAY_FILES[@]}"; do + codesign --options runtime --force --verify --verbose --timestamp ${OSX_CODESIGN_EXTRAS} \ + --prefix "${{ env.TS_BUNDLE_ID_BASE }}.${{ env.TS_FILENAME }}." \ + --sign "Developer ID Application: $MACOS_APPLICATION_ID" \ + "${{ runner.temp }}/osxpkg/${{ env.TARI_NETWORK_DIR }}/runtime/$FILE" + codesign --verify --deep --display --verbose=4 \ + "${{ runner.temp }}/osxpkg/${{ env.TARI_NETWORK_DIR }}/runtime/$FILE" + cp -vf "${{ runner.temp }}/osxpkg/${{ env.TARI_NETWORK_DIR }}/runtime/$FILE" \ + "${{ env.MTS_SOURCE }}" + done + distDirPKG=$(mktemp -d -t ${{ env.TS_FILENAME }}) + echo "${distDirPKG}" + echo "distDirPKG=${distDirPKG}" >> $GITHUB_ENV + TS_Temp=${{ env.TS_FILENAME }} + TS_BUNDLE_ID_VALID_NAME=$(echo "${TS_Temp//_/-}") + # Strip apple-darwin + TS_ARCH=$(echo "${${{ matrix.builds.target }}//-apple-darwin/}") + pkgbuild --root "${{ runner.temp }}/osxpkg/${{ env.TARI_NETWORK_DIR }}" \ + --identifier "${{ env.TS_BUNDLE_ID_BASE }}.pkg.${TS_BUNDLE_ID_VALID_NAME}" \ + --version "${TARI_VERSION}" \ + --install-location "/tmp/tari" \ + --scripts "${{ runner.temp }}/osxpkg/${{ env.TARI_NETWORK_DIR }}/scripts" \ + --sign "Developer ID Installer: ${MACOS_INSTALLER_ID}" \ + "${distDirPKG}/${{ env.TS_FILENAME }}-${{ matrix.builds.name }}-${{ env.TARI_VERSION }}.pkg" + echo -e "Submitting to Apple...\n\n" + xcrun notarytool submit \ + "${distDirPKG}/${{ env.TS_FILENAME }}-${{ matrix.builds.name }}-${{ env.TARI_VERSION }}.pkg" \ + --apple-id "${MACOS_NOTARIZE_USERNAME}" \ + --password ${MACOS_NOTARIZE_PASSWORD} \ + --team-id ${MACOS_ASC_PROVIDER} \ + --verbose --wait 2>&1 | tee -a notarisation.result + # Maybe use line from with "Processing complete"? + requestUUID=$(tail -n5 notarisation.result | grep "id:" | cut -d" " -f 4) + requestSTATUS=$(tail -n5 notarisation.result | grep "\ \ status:" | cut -d" " -f 4) + if [[ ${requestUUID} == "" ]] || [[ ${requestSTATUS} != "Accepted" ]]; then + echo "## status: ${requestSTATUS} - could not notarize - ${requestUUID} - ${{ env.TS_FILENAME }}-${{ matrix.builds.name }}-${{ env.TARI_VERSION }}.pkg" + exit 1 + else + echo "Notarization RequestUUID: ${requestUUID}" + echo -e "\nStapling package...\ + ${{ env.TS_FILENAME }}-${{ matrix.builds.name }}-${{ env.TARI_VERSION }}.pkg\n" + xcrun stapler staple -v \ + "${distDirPKG}/${{ env.TS_FILENAME }}-${{ matrix.builds.name }}-${{ env.TARI_VERSION }}.pkg" + fi + cd ${distDirPKG} + echo "Compute pkg shasum" + ${SHARUN} "${{ env.TS_FILENAME }}-${{ matrix.builds.name }}-${{ env.TARI_VERSION }}.pkg" \ + >> "${{ env.TS_FILENAME }}-${{ matrix.builds.name }}-${{ env.TARI_VERSION }}.pkg.sha256" + cat "${{ env.TS_FILENAME }}-${{ matrix.builds.name }}-${{ env.TARI_VERSION }}.pkg.sha256" + echo "Checksum verification for pkg is " + ${SHARUN} --check "${{ env.TS_FILENAME }}-${{ matrix.builds.name }}-${{ env.TARI_VERSION }}.pkg.sha256" + + - name: Artifact upload for macOS pkg + if: startsWith(runner.os,'macOS') + continue-on-error: true + uses: actions/upload-artifact@v4 + with: + name: ${{ env.TS_FILENAME }}-${{ matrix.builds.name }}-${{ env.TARI_VERSION }}.pkg + path: "${{ env.distDirPKG }}/${{ env.TS_FILENAME }}-${{ matrix.builds.name }}-${{ env.TARI_VERSION }}*.pkg*" + + - name: Build the Windows installer + if: startsWith(runner.os,'Windows') + shell: cmd + run: | + cd buildtools + "%programfiles(x86)%\Inno Setup 6\iscc.exe" "/DMyAppVersion=${{ env.TARI_VERSION }}-${{ env.VSHA_SHORT }}-${{ matrix.builds.name }}-installer" "/DMinotariSuite=${{ env.TS_FILENAME }}" "/DTariSuitePath=${{ github.workspace }}${{ env.TS_DIST }}" "windows_inno_installer.iss" + cd Output + echo "Compute archive shasum" + ${{ env.SHARUN }} "${{ env.TS_FILENAME }}-${{ env.TARI_VERSION }}-${{ env.VSHA_SHORT }}-${{ matrix.builds.name }}-installer.exe" >> "${{ env.TS_FILENAME }}-${{ env.TARI_VERSION }}-${{ env.VSHA_SHORT }}-${{ matrix.builds.name }}-installer.exe.sha256" + echo "Show the shasum" + cat "${{ env.TS_FILENAME }}-${{ env.TARI_VERSION }}-${{ env.VSHA_SHORT }}-${{ matrix.builds.name }}-installer.exe.sha256" + echo "Checksum verification archive is " + ${{ env.SHARUN }} --check "${{ env.TS_FILENAME }}-${{ env.TARI_VERSION }}-${{ env.VSHA_SHORT }}-${{ matrix.builds.name }}-installer.exe.sha256" + + - name: Artifact upload for Windows installer + if: startsWith(runner.os,'Windows') + uses: actions/upload-artifact@v4 + with: + name: "${{ env.TS_FILENAME }}_windows_installer" + path: "${{ github.workspace }}/buildtools/Output/*" + + - name: Archive and Checksum Binaries + shell: bash + run: | + echo "Archive ${{ env.BINFILE }} too ${{ env.BINFILE }}.zip" + cd "${{ env.MTS_SOURCE }}" + echo "Compute files shasum" + ${SHARUN} * >> "${{ env.BINFILE }}.sha256" + echo "Show the shasum" + cat "${{ env.BINFILE }}.sha256" + echo "Checksum verification for files is " + ${SHARUN} --check "${{ env.BINFILE }}.sha256" + 7z a "${{ env.BINFILE }}.zip" * + echo "Compute archive shasum" + ${SHARUN} "${{ env.BINFILE }}.zip" >> "${{ env.BINFILE }}.zip.sha256" + echo "Show the shasum" + cat "${{ env.BINFILE }}.zip.sha256" + echo "Checksum verification archive is " + ${SHARUN} --check "${{ env.BINFILE }}.zip.sha256" + + - name: Artifact upload for Archive + uses: actions/upload-artifact@v4 + with: + name: ${{ env.TS_FILENAME }}_archive-${{ matrix.builds.name }} + path: "${{ github.workspace }}${{ env.TS_DIST }}/${{ env.BINFILE }}.zip*" + + - name: Prep diag-utils archive for upload + continue-on-error: true + shell: bash + run: | + mkdir -p "${{ env.MTS_SOURCE }}-diag-utils" + cd "${{ env.MTS_SOURCE }}-diag-utils" + # Find RandomX built tools for testing + find "$GITHUB_WORKSPACE/target/${{ matrix.builds.target }}/release/" \ + -name "randomx-*${{ env.TS_EXT}}" -type f -perm -+x -exec cp -vf {} . \; + echo "Compute diag utils shasum" + ${SHARUN} * \ + >> "${{ env.TS_FILENAME }}_archive-diag-utils-${{ matrix.builds.name }}.sha256" + cat "${{ env.TS_FILENAME }}_archive-diag-utils-${{ matrix.builds.name }}.sha256" + echo "Checksum verification for diag utils is " + ${SHARUN} --check "${{ env.TS_FILENAME }}_archive-diag-utils-${{ matrix.builds.name }}.sha256" + 7z a "${{ env.TS_FILENAME }}_archive-diag-utils-${{ matrix.builds.name }}.zip" * + echo "Compute diag utils archive shasum" + ${SHARUN} "${{ env.TS_FILENAME }}_archive-diag-utils-${{ matrix.builds.name }}.zip" \ + >> "${{ env.TS_FILENAME }}_archive-diag-utils-${{ matrix.builds.name }}.zip.sha256" + cat "${{ env.TS_FILENAME }}_archive-diag-utils-${{ matrix.builds.name }}.zip.sha256" + echo "Checksum verification for diag utils archive is " + ${SHARUN} --check "${{ env.TS_FILENAME }}_archive-diag-utils-${{ matrix.builds.name }}.zip.sha256" + + - name: Artifact upload for diag-utils + continue-on-error: true + uses: actions/upload-artifact@v4 + with: + name: ${{ env.TS_FILENAME }}_archive-diag-utils-${{ matrix.builds.name }} + path: "${{ github.workspace }}${{ env.TS_DIST }}-diag-utils/*.zip*" + + macOS-universal-assemble: + name: macOS universal assemble + needs: builds + + env: + TARI_VERSION: ${{ needs.builds.outputs.TARI_VERSION }} + VSHA_SHORT: ${{ needs.builds.outputs.VSHA_SHORT }} + SHARUN: "shasum --algorithm 256" + + continue-on-error: true + + runs-on: macos-14 + + steps: + - name: Checkout source code + uses: actions/checkout@v4 + + - name: Download macOS binaries + uses: actions/download-artifact@v4 + with: + path: osxuni + # macos - x86_64 / arm64 + pattern: ${{ env.TS_FILENAME }}_archive-macos-* + merge-multiple: true + + - name: Set environment variables for macOS universal + shell: bash + run: | + BINFN="${TS_FILENAME}-${TARI_VERSION}-${VSHA_SHORT}" + echo "BINFN=${BINFN}" >> $GITHUB_ENV + + - name: Install macOS dependencies + shell: bash + run: | + brew install coreutils + + - name: Verify checksums and extract + shell: bash + working-directory: osxuni + run: | + ls -alhtR + ${SHARUN} --ignore-missing --check \ + "${{ env.BINFN }}-macos-x86_64.zip.sha256" + ${SHARUN} --ignore-missing --check \ + "${{ env.BINFN }}-macos-arm64.zip.sha256" + ls -alhtR + mkdir macos-universal macos-x86_64 macos-arm64 + cd macos-x86_64 + 7z e "../${{ env.BINFN }}-macos-x86_64.zip" + cd ../macos-arm64 + 7z e "../${{ env.BINFN }}-macos-arm64.zip" + + - name: Assemble macOS universal binaries + shell: bash + working-directory: osxuni + run: | + ls -alhtR + ARRAY_FILES=( $(echo ${TS_FILES} | jq --raw-output '.[]' | awk '{ print $1 }') ) + for FILE in "${ARRAY_FILES[@]}"; do + echo "processing binary file - ${FILE}" + lipo -create -output macos-universal/${FILE} \ + macos-x86_64/${FILE} \ + macos-arm64/${FILE} + done + ARRAY_LIBS=( $(echo ${TS_LIBRARIES} | tr ', ' '\n') ) + for FILE in "${ARRAY_LIBS[@]}"; do + echo "processing library file - lib${FILE}.dylib" + lipo -create -output macos-universal/lib${FILE}.dylib \ + macos-x86_64/lib${FILE}.dylib \ + macos-arm64/lib${FILE}.dylib + done + ls -alhtR macos-universal + + - name: Build the macOS universal pkg + continue-on-error: true + env: + MACOS_KEYCHAIN_PASS: ${{ secrets.MACOS_KEYCHAIN_PASS }} + MACOS_APPLICATION_ID: ${{ secrets.MACOS_APPLICATION_ID }} + MACOS_APPLICATION_CERT: ${{ secrets.MACOS_APPLICATION_CERT }} + MACOS_APPLICATION_PASS: ${{ secrets.MACOS_APPLICATION_PASS }} + MACOS_INSTALLER_ID: ${{ secrets.MACOS_INSTALLER_ID }} + MACOS_INSTALLER_CERT: ${{ secrets.MACOS_INSTALLER_CERT }} + MACOS_INSTALLER_PASS: ${{ secrets.MACOS_INSTALLER_PASS }} + MACOS_NOTARIZE_USERNAME: ${{ secrets.MACOS_NOTARIZE_USERNAME }} + MACOS_NOTARIZE_PASSWORD: ${{ secrets.MACOS_NOTARIZE_PASSWORD }} + MACOS_ASC_PROVIDER: ${{ secrets.MACOS_ASC_PROVIDER }} + run: | + echo $MACOS_APPLICATION_CERT | base64 --decode > application.p12 + echo $MACOS_INSTALLER_CERT | base64 --decode > installer.p12 + security create-keychain -p $MACOS_KEYCHAIN_PASS build.keychain + security default-keychain -s build.keychain + security unlock-keychain -p $MACOS_KEYCHAIN_PASS build.keychain + security import application.p12 -k build.keychain -P $MACOS_APPLICATION_PASS -T /usr/bin/codesign + security import installer.p12 -k build.keychain -P $MACOS_INSTALLER_PASS -T /usr/bin/pkgbuild + security set-key-partition-list -S apple-tool:,apple:,codesign: -s -k $MACOS_KEYCHAIN_PASS build.keychain + OSX_CODESIGN_EXTRAS="--entitlements ${GITHUB_WORKSPACE}/applications/minotari_node/osx-pkg/entitlements.xml" + cd buildtools + # export target_release="target/${{ matrix.builds.target }}/release" + # matrix.builds.target=macos-universal + # matrix.builds.name=macos-universal + export target_release="osxuni/macos-universal" + mkdir -p "${{ runner.temp }}/osxpkg" + export tarball_parent="${{ runner.temp }}/osxpkg" + export tarball_source="${{ env.TARI_NETWORK_DIR }}" + ./create_osx_install_zip.sh unused nozip + ARRAY_FILES=( $(echo ${TS_FILES} | jq --raw-output '.[]' | awk '{ print $1 }') ) + for FILE in "${ARRAY_FILES[@]}"; do + codesign --options runtime --force --verify --verbose --timestamp ${OSX_CODESIGN_EXTRAS} \ + --prefix "${{ env.TS_BUNDLE_ID_BASE }}.${{ env.TS_FILENAME }}." \ + --sign "Developer ID Application: $MACOS_APPLICATION_ID" \ + "${{ runner.temp }}/osxpkg/${{ env.TARI_NETWORK_DIR }}/runtime/$FILE" + codesign --verify --deep --display --verbose=4 \ + "${{ runner.temp }}/osxpkg/${{ env.TARI_NETWORK_DIR }}/runtime/$FILE" + cp -vf "${{ runner.temp }}/osxpkg/${{ env.TARI_NETWORK_DIR }}/runtime/$FILE" \ + "${{ github.workspace }}/osxuni/macos-universal/" + done + distDirPKG=$(mktemp -d -t ${{ env.TS_FILENAME }}) + echo "${distDirPKG}" + echo "distDirPKG=${distDirPKG}" >> $GITHUB_ENV + TS_Temp=${{ env.TS_FILENAME }} + TS_BUNDLE_ID_VALID_NAME=$(echo "${TS_Temp//_/-}") + TS_ARCH=universal + pkgbuild --root "${{ runner.temp }}/osxpkg/${{ env.TARI_NETWORK_DIR }}" \ + --identifier "${{ env.TS_BUNDLE_ID_BASE }}.pkg.${TS_BUNDLE_ID_VALID_NAME}" \ + --version "${TARI_VERSION}" \ + --install-location "/tmp/tari" \ + --scripts "${{ runner.temp }}/osxpkg/${{ env.TARI_NETWORK_DIR }}/scripts" \ + --sign "Developer ID Installer: ${MACOS_INSTALLER_ID}" \ + "${distDirPKG}/${{ env.TS_FILENAME }}-macos-universal-${{ env.TARI_VERSION }}.pkg" + echo -e "Submitting to Apple...\n\n" + xcrun notarytool submit \ + "${distDirPKG}/${{ env.TS_FILENAME }}-macos-universal-${{ env.TARI_VERSION }}.pkg" \ + --apple-id "${MACOS_NOTARIZE_USERNAME}" \ + --password ${MACOS_NOTARIZE_PASSWORD} \ + --team-id ${MACOS_ASC_PROVIDER} \ + --verbose --wait 2>&1 | tee -a notarisation.result + # Maybe use line from with "Processing complete"? + requestUUID=$(tail -n5 notarisation.result | grep "id:" | cut -d" " -f 4) + requestSTATUS=$(tail -n5 notarisation.result | grep "\ \ status:" | cut -d" " -f 4) + if [[ ${requestUUID} == "" ]] || [[ ${requestSTATUS} != "Accepted" ]]; then + echo "## status: ${requestSTATUS} - could not notarize - ${requestUUID} - ${{ env.TS_FILENAME }}-macos-universal-${{ env.TARI_VERSION }}.pkg" + exit 1 + else + echo "Notarization RequestUUID: ${requestUUID}" + echo -e "\nStapling package...\ + ${{ env.TS_FILENAME }}-macos-universal-${{ env.TARI_VERSION }}.pkg\n" + xcrun stapler staple -v \ + "${distDirPKG}/${{ env.TS_FILENAME }}-macos-universal-${{ env.TARI_VERSION }}.pkg" + fi + cd ${distDirPKG} + echo "Compute pkg shasum" + ${SHARUN} "${{ env.TS_FILENAME }}-macos-universal-${{ env.TARI_VERSION }}.pkg" \ + >> "${{ env.TS_FILENAME }}-macos-universal-${{ env.TARI_VERSION }}.pkg.sha256" + cat "${{ env.TS_FILENAME }}-macos-universal-${{ env.TARI_VERSION }}.pkg.sha256" + echo "Checksum verification for pkg is " + ${SHARUN} --check "${{ env.TS_FILENAME }}-macos-universal-${{ env.TARI_VERSION }}.pkg.sha256" + + - name: Artifact upload for macOS universal pkg + if: startsWith(runner.os,'macOS') + continue-on-error: true + uses: actions/upload-artifact@v4 + with: + name: ${{ env.TS_FILENAME }}-macos-universal-${{ env.TARI_VERSION }}.pkg + path: "${{ env.distDirPKG }}/${{ env.TS_FILENAME }}-macos-universal-${{ env.TARI_VERSION }}*.pkg*" + + - name: Archive and Checksum macOS universal Binaries + shell: bash + working-directory: osxuni/macos-universal + run: | + # set -xo pipefail + BINFILE="${BINFN}-macos-universal" + echo "BINFILE=${BINFILE}" >> $GITHUB_ENV + echo "Archive ${BINFILE} into ${BINFILE}.zip" + echo "Compute files shasum into ${BINFILE}.sha256" + ${SHARUN} * >> "${BINFILE}.sha256" + echo "Show the shasum" + cat "${BINFILE}.sha256" + echo "Checksum verification for files is " + ${SHARUN} --check "${BINFILE}.sha256" + 7z a "${BINFILE}.zip" * + echo "Compute archive shasum into ${BINFILE}.zip.sha256" + ${SHARUN} "${BINFILE}.zip" >> "${BINFILE}.zip.sha256" + echo "Show the shasum from ${BINFILE}.zip.sha256" + cat "${BINFILE}.zip.sha256" + echo "Checksum verification archive is " + ${SHARUN} --check "${BINFILE}.zip.sha256" + + - name: Artifact upload for Archive + uses: actions/upload-artifact@v4 + with: + name: ${{ env.TS_FILENAME }}_archive-macos-universal + path: "${{ github.workspace }}/osxuni/macos-universal/${{ env.BINFILE }}.zip*" + + create-release: + if: ${{ startsWith(github.ref, 'refs/tags/v') }} + + runs-on: ubuntu-latest + needs: builds + + env: + TARI_NETWORK_DIR: ${{ needs.builds.outputs.TARI_NETWORK_DIR }} + TARI_VERSION: ${{ needs.builds.outputs.TARI_VERSION }} + + permissions: + contents: write + + steps: + - name: Download binaries + uses: actions/download-artifact@v4 + with: + path: ${{ env.TS_FILENAME }} + pattern: "${{ env.TS_FILENAME }}*" + merge-multiple: true + + - name: Verify checksums and Prep Uploads + shell: bash + working-directory: ${{ env.TS_FILENAME }} + run: | + # set -xo pipefail + sudo apt-get update + sudo apt-get --no-install-recommends --assume-yes install dos2unix + ls -alhtR + if [ -f "${{ env.TS_FILENAME }}-${{ env.TARI_VERSION }}.${{ env.TS_SIG_FN }}" ] ; then + rm -fv "${{ env.TS_FILENAME }}-${{ env.TARI_VERSION }}.${{ env.TS_SIG_FN }}" + fi + # Merge all sha256 files into one + find . -name "*.sha256" -type f -print | xargs cat >> "${{ env.TS_FILENAME }}-${{ env.TARI_VERSION }}.${{ env.TS_SIG_FN }}" + dos2unix --quiet "${{ env.TS_FILENAME }}-${{ env.TARI_VERSION }}.${{ env.TS_SIG_FN }}" + cat "${{ env.TS_FILENAME }}-${{ env.TARI_VERSION }}.${{ env.TS_SIG_FN }}" + sha256sum --ignore-missing --check "${{ env.TS_FILENAME }}-${{ env.TARI_VERSION }}.${{ env.TS_SIG_FN }}" + ls -alhtR + + - name: Create release + uses: ncipollo/release-action@v1 + with: + artifacts: "${{ env.TS_FILENAME }}*/**/*" + token: ${{ secrets.GITHUB_TOKEN }} + prerelease: true + draft: true + allowUpdates: true + updateOnlyUnreleased: true + replacesArtifacts: true + + - name: Sync assets to S3 + continue-on-error: true + if: ${{ env.AWS_SECRET_ACCESS_KEY != '' && matrix.builds.runs-on != 'self-hosted' }} + env: + AWS_ACCESS_KEY_ID: ${{ secrets.AWS_ACCESS_KEY_ID }} + AWS_SECRET_ACCESS_KEY: ${{ secrets.AWS_SECRET_ACCESS_KEY }} + S3CMD: "cp" + S3OPTIONS: '--recursive --exclude "*" --include "*.sha256*" --include "*.zip*" --include "*.pkg*" --include "*installer.exe*"' + shell: bash + working-directory: ${{ env.TS_FILENAME }} + run: | + echo "Upload processing ..." + ls -alhtR + echo "Clean up" + # Bash check if file with wildcards, does not work as expected + # if [ -f ${{ env.TS_FILENAME }}*diag-utils* ] ; then + if ls ${{ env.TS_FILENAME }}*diag-utils* > /dev/null 2>&1 ; then + rm -fv ${{ env.TS_FILENAME }}*diag-utils* + fi + echo "Folder setup" + if ls ${{ env.TS_FILENAME }}*linux* > /dev/null 2>&1 ; then + mkdir -p "linux/${{ env.TARI_NETWORK_DIR }}/" + mv -v ${{ env.TS_FILENAME }}*linux* "linux/${{ env.TARI_NETWORK_DIR }}/" + fi + if ls ${{ env.TS_FILENAME }}*macos* > /dev/null 2>&1 ; then + mkdir -p "osx/${{ env.TARI_NETWORK_DIR }}/" + mv -v ${{ env.TS_FILENAME }}*macos* "osx/${{ env.TARI_NETWORK_DIR }}/" + fi + if ls ${{ env.TS_FILENAME }}*windows* > /dev/null 2>&1 ; then + mkdir -p "windows/${{ env.TARI_NETWORK_DIR }}/" + mv -v ${{ env.TS_FILENAME }}*windows* "windows/${{ env.TARI_NETWORK_DIR }}/" + fi + ls -alhtR + aws --version + echo "ls current" + aws s3 ls --region ${{ secrets.AWS_REGION }} \ + s3://${{ secrets.AWS_S3_BUCKET }}/current/ + echo "Upload current" + aws s3 ${{ env.S3CMD }} --region ${{ secrets.AWS_REGION }} \ + . \ + s3://${{ secrets.AWS_S3_BUCKET }}/current/ \ + ${{ env.S3OPTIONS }} diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml new file mode 100644 index 00000000..70b3fc86 --- /dev/null +++ b/.github/workflows/ci.yml @@ -0,0 +1,250 @@ +--- +name: CI + +'on': + workflow_dispatch: + pull_request: + types: + - opened + - reopened + - synchronize + merge_group: + +env: + toolchain: nightly-2024-03-01 + CARGO_HTTP_MULTIPLEXING: false + CARGO_TERM_COLOR: always + CARGO_UNSTABLE_SPARSE_REGISTRY: true + CARGO_INCREMENTAL: 0 + PROTOC: protoc + TERM: unknown + +concurrency: + group: ${{ github.workflow }}-${{ github.ref }} + cancel-in-progress: true + +jobs: + clippy: + name: clippy + runs-on: [ubuntu-20.04] + steps: + - name: checkout + uses: actions/checkout@v4 + - name: toolchain + uses: dtolnay/rust-toolchain@master + with: + toolchain: ${{ env.toolchain }} + components: clippy, rustfmt + - name: ubuntu dependencies + run: | + sudo apt-get update + sudo bash scripts/install_ubuntu_dependencies.sh + - name: caching (nightly) + # Don't use rust-cache. + # Rust-cache disables a key feature of actions/cache: restoreKeys. + # Without restore keys, we lose the ability to get partial matches on caches, and end + # up with too many cache misses. + # Use a "small" suffix to use the build caches where possible, but build caches won't use this + uses: actions/cache@v4 + with: + path: | + ~/.cargo/registry/index + ~/.cargo/registry/cache + ~/.cargo/registry/CACHEDIR.TAG + ~/.cargo/git + target + key: tari-${{ runner.os }}-${{ runner.cpu-model }}-${{ env.toolchain }}-nightly-${{ hashFiles('**/Cargo.lock') }}-small + restore-keys: | + tari-${{ runner.os }}-${{ runner.cpu-model }}-${{ env.toolchain }}-nightly-${{ hashFiles('**/Cargo.lock') }}-small + tari-${{ runner.os }}-${{ runner.cpu-model }}-${{ env.toolchain }}-nightly-${{ hashFiles('**/Cargo.lock') }} + tari-${{ runner.os }}-${{ runner.cpu-model }}-${{ env.toolchain }}-nightly + - name: cargo format + run: cargo fmt --all -- --check + - name: Install cargo-lints + run: cargo install cargo-lints + - name: Clippy check (with lints) + run: cargo lints clippy --all-targets --all-features + machete: + # Checks for unused dependencies. + name: machete + runs-on: [ubuntu-20.04] + steps: + - name: checkout + uses: actions/checkout@v4 + - name: toolchain + uses: dtolnay/rust-toolchain@master + with: + toolchain: ${{ env.toolchain }} + components: clippy, rustfmt + - name: ubuntu dependencies + run: | + sudo apt-get update + sudo bash scripts/install_ubuntu_dependencies.sh + - name: caching (machete) + # Don't use rust-cache. + # Rust-cache disables a key feature of actions/cache: restoreKeys. + # Without restore keys, we lose the ability to get partial matches on caches, and end + # up with too many cache misses. + uses: actions/cache@v4 + with: + path: | + ~/.cargo/registry/index + ~/.cargo/registry/cache + ~/.cargo/registry/CACHEDIR.TAG + ~/.cargo/git + target + key: tari-${{ runner.os }}-${{ runner.cpu-model }}-${{ env.toolchain }}-nightly-${{ hashFiles('**/Cargo.lock') }}-small + restore-keys: | + tari-${{ runner.os }}-${{ runner.cpu-model }}-${{ env.toolchain }}-nightly-${{ hashFiles('**/Cargo.lock') }}-small + tari-${{ runner.os }}-${{ runner.cpu-model }}-${{ env.toolchain }}-nightly-${{ hashFiles('**/Cargo.lock') }} + tari-${{ runner.os }}-${{ runner.cpu-model }}-${{ env.toolchain }}-nightly + - name: cargo machete + run: | + cargo install cargo-machete + cargo machete + build-stable: + # Runs cargo check with stable toolchain to determine whether the codebase is likely to build + # on stable Rust. + name: cargo check with stable + runs-on: [self-hosted, ubuntu-high-cpu] + steps: + - name: checkout + uses: actions/checkout@v4 + - name: caching (stable) + # Don't use rust-cache. + # Rust-cache disables a key feature of actions/cache: restoreKeys. + # Without restore keys, we lose the ability to get partial matches on caches, and end + # up with too many cache misses. + # This job runs on self-hosted, so use local-cache instead. + uses: maxnowack/local-cache@v2 + with: + path: | + ~/.cargo/registry/index + ~/.cargo/registry/cache + ~/.cargo/registry/CACHEDIR.TAG + ~/.cargo/git + target + key: tari-${{ runner.os }}-${{ runner.cpu-model }}-${{ env.toolchain }}-stable-${{ hashFiles('**/Cargo.lock') }}-small + restore-keys: | + tari-${{ runner.os }}-${{ runner.cpu-model }}-${{ env.toolchain }}-stable-${{ hashFiles('**/Cargo.lock') }}-small + tari-${{ runner.os }}-${{ runner.cpu-model }}-${{ env.toolchain }}-stable-${{ hashFiles('**/Cargo.lock') }} + tari-${{ runner.os }}-${{ runner.cpu-model }}-${{ env.toolchain }}-stable + tari-${{ runner.os }}-${{ runner.cpu-model }}-${{ env.toolchain }} + - name: rust-toolchain.toml override by removing + run: rm -f rust-toolchain.toml + - name: toolchain + uses: dtolnay/rust-toolchain@stable + with: + toolchain: stable + - name: ubuntu dependencies + run: | + sudo apt-get update + sudo bash scripts/install_ubuntu_dependencies.sh + - name: rustup show + run: rustup show + - name: cargo check + run: cargo check --release --all-targets --workspace --exclude tari_integration_tests --locked + - name: cargo check wallet ffi separately + run: cargo check --release --package minotari_wallet_ffi --locked + - name: cargo check chat ffi separately + run: cargo check --release --package minotari_chat_ffi --locked + licenses: + name: file licenses + runs-on: [ubuntu-20.04] + steps: + - name: checkout + uses: actions/checkout@v4 + - name: install ripgrep + run: | + wget https://github.com/BurntSushi/ripgrep/releases/download/13.0.0/ripgrep_13.0.0_amd64.deb + sudo dpkg -i ripgrep_13.0.0_amd64.deb + rg --version || exit 1 + - name: run the license check + run: ./scripts/file_license_check.sh + test: + name: test + runs-on: [self-hosted, ubuntu-high-cpu] + permissions: + checks: write + pull-requests: write + strategy: + matrix: + tari_target_network: [ + { target: "testnet", network: "esmeralda" }, + { target: "nextnet", network: "nextnet" }, + { target: "mainnet", network: "stagenet" }, + ] + env: + TARI_TARGET_NETWORK: ${{ matrix.tari_target_network.target }} + TARI_NETWORK: ${{ matrix.tari_target_network.network }} + RUST_LOG: debug + steps: + - name: checkout + uses: actions/checkout@v4 + - name: toolchain + uses: dtolnay/rust-toolchain@master + with: + toolchain: ${{ env.toolchain }} + - name: ubuntu dependencies + run: | + sudo apt-get update + sudo bash scripts/install_ubuntu_dependencies.sh + - name: caching (nightly) + # Don't use rust-cache. + # Rust-cache disables a key feature of actions/cache: restoreKeys. + # Without restore keys, we lose the ability to get partial matches on caches, and end + # up with too many cache misses. + # This job runs on self-hosted, so use local-cache instead. + uses: maxnowack/local-cache@v2 + with: + path: | + ~/.cargo/registry/index + ~/.cargo/registry/cache + ~/.cargo/registry/CACHEDIR.TAG + ~/.cargo/git + target + key: tari-${{ runner.os }}-${{ runner.cpu-model }}-${{ env.toolchain }}-nightly-${{ hashFiles('**/Cargo.lock') }}-${{ matrix.tari_target_network.target }} + restore-keys: | + tari-${{ runner.os }}-${{ runner.cpu-model }}-${{ env.toolchain }}-nightly-${{ hashFiles('**/Cargo.lock') }}-${{ matrix.tari_target_network.network }} + tari-${{ runner.os }}-${{ runner.cpu-model }}-${{ env.toolchain }}-nightly-${{ hashFiles('**/Cargo.lock') }} + tari-${{ runner.os }}-${{ runner.cpu-model }}-${{ env.toolchain }}-nightly + - name: Install cargo-nextest + run: cargo install cargo-nextest --locked --force + - name: cargo test compile + run: cargo test -vv --no-run --locked --all-features --release + - name: cargo test + run: cargo nextest run --all-features --release -E "not package(tari_integration_tests)" --profile ci + - name: upload artifact + uses: actions/upload-artifact@v4 # upload test results as artifact + if: always() + with: + name: test-results-${{ matrix.tari_target_network.target }}.${{ matrix.tari_target_network.network }} + path: ${{ github.workspace }}/target/nextest/ci/junit.xml + + # Allows other workflows to know the PR number + artifacts: + name: pr_2_artifact + runs-on: [ubuntu-20.04] + steps: + - name: Save the PR number in an artifact + shell: bash + env: + PR_NUM: ${{ github.event.number }} + run: echo $PR_NUM > pr_num.txt + + - name: Upload the PR number + uses: actions/upload-artifact@v4 + with: + name: pr_num + path: ./pr_num.txt + + # needed for test results + event_file: + name: "Upload Event File for Test Results" + runs-on: ubuntu-latest + steps: + - name: Upload + uses: actions/upload-artifact@v4 + with: + name: Event File + path: ${{ github.event_path }} diff --git a/.github/workflows/coverage.yml b/.github/workflows/coverage.yml new file mode 100644 index 00000000..78e4b2fa --- /dev/null +++ b/.github/workflows/coverage.yml @@ -0,0 +1,48 @@ +--- +# Notes for this action: +# Restoring caches is largely useless, since the compiler flags are only useful for code coverage runs. +# This GA is self-hosted, and has local caching solutions. +name: Source Coverage + +'on': + push: + branches: + - development + - ci-coverage-* + +env: + toolchain: nightly-2024-03-01 + +concurrency: + group: ${{ github.workflow }}-${{ github.ref }} + cancel-in-progress: true + +jobs: + coverage: + name: test and generate coverage + runs-on: [ self-hosted, ubuntu-high-mem ] + steps: + - name: checkout source code + uses: actions/checkout@v4 + + - name: ubuntu dependencies + run: | + sudo apt-get update + sudo bash scripts/install_ubuntu_dependencies.sh + + - uses: dtolnay/rust-toolchain@master + with: + toolchain: nightly + + - name: run tests with coverage + # Prepare the coverage data, even if some tests fail + continue-on-error: true + run: bash -c ./scripts/source_coverage.sh + + - name: Coveralls upload + uses: coverallsapp/github-action@v2 + with: + github-token: ${{ secrets.GITHUB_TOKEN }} + format: lcov + file: lcov.info + diff --git a/.github/workflows/integration_tests.yml b/.github/workflows/integration_tests.yml new file mode 100644 index 00000000..921ab950 --- /dev/null +++ b/.github/workflows/integration_tests.yml @@ -0,0 +1,209 @@ +--- +name: Integration tests + +"on": + pull_request: + types: + - opened + - reopened + - synchronize + merge_group: + schedule: + - cron: '0 2 * * *' # daily @ 02h00 (non-critical) + - cron: '0 12 * * 6' # weekly - Saturday @ noon (long-running) + workflow_dispatch: + inputs: + ci_bins: + type: boolean + default: true + description: 'run ci on binaries' + ci_ffi: + type: boolean + default: true + description: 'run ci on ffi' + ci_profile: + default: ci + description: 'ci profile to run' + type: string + +env: + toolchain: nightly-2024-03-01 + +concurrency: + group: ${{ github.workflow }}-${{ github.ref }} + # cancel-in-progress: true + +jobs: + base_layer: + name: Cucumber tests / Base Layer + runs-on: [self-hosted, ubuntu-high-cpu] + steps: + - name: checkout + uses: actions/checkout@v4 + + - name: Envs setup + id: envs_setup + shell: bash + run: | + if [ "${{ github.event_name }}" == "schedule" ] ; then + echo "CI_FFI=false" >> $GITHUB_ENV + if [ "${{ github.event.schedule }}" == "0 2 * * *" ] ; then + echo "CI_PROFILE=(not @long-running)" >> $GITHUB_ENV + elif [ "${{ github.event.schedule }}" == "0 12 * * 6" ] ; then + echo "CI_PROFILE=@long-running" >> $GITHUB_ENV + fi + else + echo "CI ..." + echo "CI_PROFILE=@critical and (not @long-running)" >> $GITHUB_ENV + CI_BINS=${{ inputs.ci_bins }} + echo "Run binary - ${CI_BINS}" + echo "CI_BINS=${CI_BINS:-true}" >> $GITHUB_ENV + fi + + - name: Setup rust toolchain + uses: dtolnay/rust-toolchain@master + with: + components: rustfmt, clippy + toolchain: ${{ env.toolchain }} + + - name: Install ubuntu dependencies + shell: bash + run: | + sudo apt-get update + sudo bash scripts/install_ubuntu_dependencies.sh + + - name: caching (nightly) + # Don't use rust-cache. + # Rust-cache disables a key feature of actions/cache: restoreKeys. + # Without restore keys, we lose the ability to get partial matches on caches, and end + # up with too many cache misses. + # This job runs on self-hosted, so use local-cache instead. + uses: maxnowack/local-cache@v2 + with: + path: | + ~/.cargo/registry/index + ~/.cargo/registry/cache + ~/.cargo/registry/CACHEDIR.TAG + ~/.cargo/git + target + key: tari-${{ runner.os }}-${{ runner.cpu-model }}-${{ env.toolchain }}-nightly-${{ hashFiles('**/Cargo.lock') }} + restore-keys: | + tari-${{ runner.os }}-${{ runner.cpu-model }}-${{ env.toolchain }}-nightly-${{ hashFiles('**/Cargo.lock') }} + tari-${{ runner.os }}-${{ runner.cpu-model }}-${{ env.toolchain }}-nightly + + - name: cargo test compile + run: cargo test --no-run --locked --all-features --release ${{ env.TARGET_BINS }} + + - name: Run ${{ env.CI_PROFILE }} integration tests for binaries + if: ${{ env.CI_BINS == 'true' }} + timeout-minutes: 90 + run: | + cargo test \ + --test cucumber \ + -v \ + --all-features \ + --release \ + --package tari_integration_tests \ + -- -t "${{ env.CI_PROFILE }} and (not @wallet-ffi) and (not @chat-ffi) and (not @broken)" \ + -c 5 \ + --retry 2 + + - name: upload artifact + uses: actions/upload-artifact@v4 # upload test results as artifact + if: always() + with: + name: junit-cucumber + path: ${{ github.workspace }}/integration_tests/cucumber-output-junit.xml + + ffi: + name: Cucumber tests / FFI + runs-on: [self-hosted, ubuntu-high-cpu] + steps: + - name: checkout + uses: actions/checkout@v4 + + - name: Envs setup + id: envs_setup + shell: bash + run: | + if [ "${{ github.event_name }}" == "schedule" ] ; then + echo "CI_FFI=false" >> $GITHUB_ENV + if [ "${{ github.event.schedule }}" == "0 2 * * *" ] ; then + echo "CI_PROFILE=(not @long-running)" >> $GITHUB_ENV + elif [ "${{ github.event.schedule }}" == "0 12 * * 6" ] ; then + echo "CI_PROFILE=@long-running" >> $GITHUB_ENV + fi + else + echo "CI ..." + echo "CI_PROFILE=@critical and (not @long-running)" >> $GITHUB_ENV + CI_FFI=${{ inputs.ci_ffi }} + echo "Run FFI - ${CI_FFI}" + echo "CI_FFI=${CI_FFI:-true}" >> $GITHUB_ENV + fi + + - name: Setup rust toolchain + if: ${{ env.CI_FFI == 'true' }} + uses: dtolnay/rust-toolchain@master + with: + components: rustfmt, clippy + toolchain: ${{ env.toolchain }} + + - name: Install ubuntu dependencies + if: ${{ env.CI_FFI == 'true' }} + shell: bash + run: | + sudo apt-get update + sudo bash scripts/install_ubuntu_dependencies.sh + + - name: Cache cargo files and outputs + if: ${{ env.CI_FFI == 'true' }} + # Don't use rust-cache. + # This job runs on self-hosted, so use local-cache instead. + uses: maxnowack/local-cache@v2 + with: + path: | + ~/.cargo/registry/index + ~/.cargo/registry/cache + ~/.cargo/registry/CACHEDIR.TAG + ~/.cargo/git + target + key: tari-${{ runner.os }}-${{ runner.cpu-model }}-${{ env.toolchain }}-nightly-${{ hashFiles('**/Cargo.lock') }} + restore-keys: | + tari-${{ runner.os }}-${{ runner.cpu-model }}-${{ env.toolchain }}-nightly-${{ hashFiles('**/Cargo.lock') }} + tari-${{ runner.os }}-${{ runner.cpu-model }}-${{ env.toolchain }}-nightly + + - name: cargo test compile + if: ${{ env.CI_FFI == 'true' }} + run: cargo test --no-run --locked --all-features --release ${{ env.TARGET_BINS }} + + - name: Run ${{ env.CI_PROFILE }} integration tests for ffi + if: ${{ env.CI_FFI == 'true' }} + timeout-minutes: 90 + run: | + cargo test \ + --test cucumber \ + -v \ + --all-features \ + --release \ + --package tari_integration_tests \ + -- -t "(@wallet-ffi or @chat-ffi) and ${{ env.CI_PROFILE }} and (not @broken)" \ + -c 1 \ + --retry 2 + + - name: upload artifact + uses: actions/upload-artifact@v4 # upload test results as artifact + if: always() + with: + name: junit-ffi-cucumber + path: ${{ github.workspace }}/integration_tests/cucumber-output-junit.xml + + # needed for test results + event_file: + name: "Upload Event File for Test Results" + runs-on: ubuntu-latest + steps: + - name: Upload + uses: actions/upload-artifact@v4 + with: + name: Event File + path: ${{ github.event_path }} diff --git a/.github/workflows/pr_title.yml b/.github/workflows/pr_title.yml new file mode 100644 index 00000000..ae85f6b7 --- /dev/null +++ b/.github/workflows/pr_title.yml @@ -0,0 +1,30 @@ +--- +# Checks that PR titles conform to Conventional Commits +# See https://www.conventionalcommits.org/en/v1.0.0/ for more information +name: PR + +'on': + pull_request: + types: + - opened + - reopened + - edited + - synchronize + +concurrency: + group: ${{ github.workflow }}-${{ github.ref }} + cancel-in-progress: true + +jobs: + check-title: + runs-on: ubuntu-latest + steps: + - name: install + run: | + npm install -g @commitlint/cli @commitlint/config-conventional + echo "module.exports = {extends: ['@commitlint/config-conventional']}" > commitlint.config.js + - name: lint + run: | + echo "$PR_TITLE" | commitlint + env: + PR_TITLE: ${{github.event.pull_request.title}} diff --git a/.gitignore b/.gitignore index 6985cf1b..75ae65bb 100644 --- a/.gitignore +++ b/.gitignore @@ -12,3 +12,6 @@ Cargo.lock # MSVC Windows builds of rustc generate these, which store debugging information *.pdb + +# Ignore OS files +.DS_Store diff --git a/CODEOWNERS b/CODEOWNERS new file mode 100644 index 00000000..da1acdfa --- /dev/null +++ b/CODEOWNERS @@ -0,0 +1,3 @@ +# CI/CD-related files require a review by the devops team +.github/**/* @tari-project/devops +CODEOWNERS @tari-project/devops diff --git a/clippy.toml b/clippy.toml new file mode 100644 index 00000000..8fcbb433 --- /dev/null +++ b/clippy.toml @@ -0,0 +1,4 @@ +cognitive-complexity-threshold = 15 +too-many-arguments-threshold = 12 +# Set from 200 to size of a RistrettoPublicKey +enum-variant-size-threshold = 216 diff --git a/lints.toml b/lints.toml new file mode 100644 index 00000000..cb44026f --- /dev/null +++ b/lints.toml @@ -0,0 +1,73 @@ +deny = [ + # Prevent spelling mistakes in lints + 'unknown_lints', + # clippy groups: + 'clippy::correctness', + # All clippy allows must have a reason + # TODO: enable lint-reasons feature + # 'clippy::allow_attributes_without_reason', + # Docs + #'missing_docs', + # 'clippy::missing_errors_doc', + # 'clippy::missing_safety_doc', + # 'clippy::missing_panics_doc', + + # Common mistakes + 'clippy::await_holding_lock', + 'unused_variables', + 'unused_imports', + 'dead_code', + 'unused_extern_crates', + 'unused_must_use', + 'unreachable_patterns', + 'clippy::cloned_instead_of_copied', + 'clippy::create_dir', + 'clippy::dbg_macro', + 'clippy::else_if_without_else', + 'clippy::inline_always', + 'let_underscore_drop', + 'clippy::let_unit_value', + 'clippy::match_on_vec_items', + 'clippy::match_wild_err_arm', + # In crypto code, it is fairly common to have similar names e.g. `owner_pk` and `owner_k` + # 'clippy::similar_names', + 'clippy::needless_borrow', + # style + 'clippy::style', + 'clippy::explicit_into_iter_loop', + 'clippy::explicit_iter_loop', + 'clippy::if_not_else', + 'clippy::match_bool', + # Although generally good practice, this is disabled because the code becomes worse + # or needs mod-level exclusion in these cases: + # tauri commands, blockchain async db needs owned copy, &Arc, Copy types, T: AsRef<..>, T: ToString + # 'clippy::needless_pass_by_value', + 'clippy::range_plus_one', + 'clippy::struct_excessive_bools', + 'clippy::too_many_lines', + 'clippy::trivially_copy_pass_by_ref', + # Highlights potential casting mistakes + 'clippy::cast_lossless', + 'clippy::cast_possible_truncation', + 'clippy::cast_possible_wrap', + # Precision loss is almost always competely fine and is only useful as a sanity check. + # https://rust-lang.github.io/rust-clippy/master/index.html#cast_precision_loss + # 'clippy::cast_precision_loss', +# 'clippy::cast_sign_loss' + 'clippy::unnecessary_to_owned', + 'clippy::nonminimal_bool', + 'clippy::needless_question_mark', + # dbg! macro is intended as a debugging tool. It should not be in version control. + 'clippy::dbg_macro' +] + +allow = [ + # allow Default::default calls + 'clippy::default_trait_access', + # Generally when developers fix this, it can lead to leaky abstractions or worse, so + # too many arguments is generally the lesser of two evils + 'clippy::too_many_arguments', + # `assert!(!foo(bar))` is misread the majority of the time, while `assert_eq!(foo(bar), false)` is crystal clear + 'clippy::bool-assert-comparison', + 'clippy::blocks_in_conditions', +] From aa1a6b94fbed225e76ead553422c9883dab233d3 Mon Sep 17 00:00:00 2001 From: richardb Date: Wed, 26 Jun 2024 14:25:45 +0200 Subject: [PATCH 24/43] small changes --- Cargo.toml | 1 + src/main.rs | 14 +++- src/server/config.rs | 5 ++ src/server/p2p/client.rs | 11 ++- src/server/p2p/error.rs | 8 ++ src/server/p2p/messages.rs | 12 +-- src/server/p2p/p2p.rs | 148 +++++++++++++++++++++++------------ src/server/p2p/peer_store.rs | 2 +- 8 files changed, 135 insertions(+), 66 deletions(-) diff --git a/Cargo.toml b/Cargo.toml index fffcf1ef..2476e17e 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -29,6 +29,7 @@ libp2p = { version = "0.53.2", features = [ "request-response", "json", "cbor", + "kad" ] } tokio = { version = "1.38.0", features = ["full"] } thiserror = "1.0" diff --git a/src/main.rs b/src/main.rs index 84f1e814..27ffe709 100644 --- a/src/main.rs +++ b/src/main.rs @@ -23,7 +23,7 @@ fn cli_styles() -> Styles { #[derive(Parser)] #[command(version)] #[command(styles = cli_styles())] -#[command(about = "⛏ Decentralized pool mining for Tari network ⛏", long_about = None)] +#[command(about = "⛏ Decentralized mining pool for Tari network ⛏", long_about = None)] struct Cli { /// Log level #[arg(short, long, value_name = "log-level", default_value = Some("info"))] @@ -39,10 +39,15 @@ struct Cli { /// (Optional) seed peers. /// Any amount of seed peers can be added to join a p2pool network. - /// Please note that these addresses must be in libp2p multi address format! - /// e.g.: /dnsaddr/libp2p.io - #[arg(short, long, value_name = "seed-peers")] + /// Please note that these addresses must be in libp2p multi address format and must contain peer ID! + /// e.g.: /ip4/127.0.0.1/tcp/52313/p2p/12D3KooWCUNCvi7PBPymgsHx39JWErYdSoT3EFPrn3xoVff4CHFu + #[arg(short = 'e', long, value_name = "seed-peers")] seed_peers: Option>, + + // /// Stable peer. + // /// If set to true, then this peer will have always the same identity (key pair and peer ID). + // #[arg(short = 's', long, value_name = "stable-peer", default_value_t = false)] + // stable_peer: bool, } #[tokio::main] @@ -59,6 +64,7 @@ async fn main() -> anyhow::Result<()> { if let Some(seed_peers) = cli.seed_peers { config_builder.with_seed_peers(seed_peers); } + // config_builder.with_stable_peer(cli.stable_peer); let config = config_builder.build(); let share_chain = InMemoryShareChain::default(); diff --git a/src/server/config.rs b/src/server/config.rs index 3ba2274f..f4e4e74f 100644 --- a/src/server/config.rs +++ b/src/server/config.rs @@ -71,6 +71,11 @@ impl ConfigBuilder { self } + pub fn with_stable_peer(&mut self, config: bool) -> &mut Self { + self.config.p2p_service.stable_peer = config; + self + } + pub fn build(&self) -> Config { self.config.clone() } diff --git a/src/server/p2p/client.rs b/src/server/p2p/client.rs index bd63e3e4..c9a7913b 100644 --- a/src/server/p2p/client.rs +++ b/src/server/p2p/client.rs @@ -1,7 +1,7 @@ use std::sync::Arc; use std::time::{Duration, Instant}; -use log::{debug, error, warn}; +use log::{debug, error, info, warn}; use thiserror::Error; use tokio::select; use tokio::sync::{broadcast, mpsc, Mutex}; @@ -113,7 +113,7 @@ impl ServiceClient { let peer_count = self.peer_store.peer_count().await as f64 + 1.0; let min_validation_count = (peer_count / 3.0) * 2.0; let min_validation_count = min_validation_count.round() as u64; - debug!(target: LOG_TARGET, "Minimum validation count: {min_validation_count:?}"); + info!(target: LOG_TARGET, "Minimum validation count: {min_validation_count:?}"); // wait for the validations to come let mut validate_block_receiver = self.channels.validate_block_receiver.lock().await; @@ -127,16 +127,18 @@ impl ServiceClient { break; } _ = peer_changes_receiver.recv() => { + warn!(target: LOG_TARGET, "Peers list has been changed, retry!"); peers_changed = true; break; } result = validate_block_receiver.recv() => { if let Some(validate_result) = result { if validate_result.valid && validate_result.block == *block { - debug!(target: LOG_TARGET, "New validation result: {validate_result:?}"); + info!(target: LOG_TARGET, "New validation result: {validate_result:?}"); validation_count+=1; } } else { + error!(target: LOG_TARGET, "Validate block receiver got None!"); break; } } @@ -144,12 +146,13 @@ impl ServiceClient { } if peers_changed { + drop(validate_block_receiver); retries += 1; return Box::pin(self.validate_block_with_retries(block, retries)).await; } let validation_time = Instant::now().duration_since(start); - debug!(target: LOG_TARGET, "Validation took {:?}", validation_time); + info!(target: LOG_TARGET, "Validation took {:?}", validation_time); Ok(validation_count >= min_validation_count) } diff --git a/src/server/p2p/error.rs b/src/server/p2p/error.rs index 7ba93ef6..01041e0c 100644 --- a/src/server/p2p/error.rs +++ b/src/server/p2p/error.rs @@ -1,5 +1,7 @@ use libp2p::{multiaddr, noise, TransportError}; use libp2p::gossipsub::PublishError; +use libp2p::identity::DecodingError; +use libp2p::kad::NoKnownPeers; use libp2p::swarm::DialError; use thiserror::Error; @@ -34,4 +36,10 @@ pub enum LibP2PError { Publish(#[from] PublishError), #[error("Dial error: {0}")] Dial(#[from] DialError), + #[error("Kademlia: No known peers error: {0}")] + KademliaNoKnownPeers(#[from] NoKnownPeers), + #[error("Missing peer ID from address: {0}")] + MissingPeerId(String), + #[error("Key decode error: {0}")] + KeyDecoding(#[from] DecodingError), } \ No newline at end of file diff --git a/src/server/p2p/messages.rs b/src/server/p2p/messages.rs index a1d9013c..05cd2488 100644 --- a/src/server/p2p/messages.rs +++ b/src/server/p2p/messages.rs @@ -41,12 +41,12 @@ pub fn serialize_message(input: &T) -> Result, Error> #[derive(Serialize, Deserialize, Debug, Copy, Clone)] pub struct PeerInfo { pub current_height: u64, - timestamp: u64, + timestamp: u128, } impl_conversions!(PeerInfo); impl PeerInfo { pub fn new(current_height: u64) -> Self { - let timestamp = SystemTime::now().duration_since(UNIX_EPOCH).unwrap().as_secs(); + let timestamp = SystemTime::now().duration_since(UNIX_EPOCH).unwrap().as_micros(); Self { current_height, timestamp } } } @@ -54,12 +54,12 @@ impl PeerInfo { #[derive(Serialize, Deserialize, Debug, Clone)] pub struct ValidateBlockRequest { block: Block, - timestamp: u64, + timestamp: u128, } impl_conversions!(ValidateBlockRequest); impl ValidateBlockRequest { pub fn new(block: Block) -> Self { - let timestamp = SystemTime::now().duration_since(UNIX_EPOCH).unwrap().as_secs(); + let timestamp = SystemTime::now().duration_since(UNIX_EPOCH).unwrap().as_micros(); Self { block, timestamp } } @@ -73,7 +73,7 @@ pub struct ValidateBlockResult { pub peer_id: PeerId, pub block: Block, pub valid: bool, - pub timestamp: u64, + pub timestamp: u128, } impl_conversions!(ValidateBlockResult); impl ValidateBlockResult { @@ -82,7 +82,7 @@ impl ValidateBlockResult { block: Block, valid: bool, ) -> Self { - let timestamp = SystemTime::now().duration_since(UNIX_EPOCH).unwrap().as_secs(); + let timestamp = SystemTime::now().duration_since(UNIX_EPOCH).unwrap().as_micros(); Self { peer_id, block, diff --git a/src/server/p2p/p2p.rs b/src/server/p2p/p2p.rs index 229ae82f..bf06f225 100644 --- a/src/server/p2p/p2p.rs +++ b/src/server/p2p/p2p.rs @@ -2,10 +2,14 @@ use std::hash::{DefaultHasher, Hash, Hasher}; use std::sync::Arc; use std::time::Duration; -use libp2p::{gossipsub, mdns, Multiaddr, noise, request_response, StreamProtocol, Swarm, tcp, yamux}; +use libp2p::{gossipsub, kad, mdns, Multiaddr, noise, request_response, StreamProtocol, Swarm, SwarmBuilder, tcp, yamux}; use libp2p::futures::StreamExt; use libp2p::gossipsub::{IdentTopic, Message, PublishError}; +use libp2p::identity::Keypair; +use libp2p::kad::{Event, Mode, NoKnownPeers, QueryId}; +use libp2p::kad::store::MemoryStore; use libp2p::mdns::tokio::Tokio; +use libp2p::multiaddr::Protocol; use libp2p::request_response::{cbor, ResponseChannel}; use libp2p::swarm::{NetworkBehaviour, SwarmEvent}; use log::{debug, error, info, warn}; @@ -33,6 +37,7 @@ pub struct Config { pub seed_peers: Vec, pub client: client::ClientConfig, pub peer_info_publish_interval: Duration, + pub stable_peer: bool, } impl Default for Config { @@ -41,15 +46,17 @@ impl Default for Config { seed_peers: vec![], client: client::ClientConfig::default(), peer_info_publish_interval: Duration::from_secs(5), + stable_peer: false, } } } #[derive(NetworkBehaviour)] pub struct ServerNetworkBehaviour { - pub mdns: mdns::Behaviour, + // pub mdns: mdns::Behaviour, pub gossipsub: gossipsub::Behaviour, pub share_chain_sync: cbor::Behaviour, + pub kademlia: kad::Behaviour, } /// Service is the implementation that holds every peer-to-peer related logic @@ -108,7 +115,15 @@ impl Service /// Creates a new swarm from the provided config fn new_swarm(config: &config::Config) -> Result, Error> { - let swarm = libp2p::SwarmBuilder::with_new_identity() + let mut swarm_builder = libp2p::SwarmBuilder::with_new_identity(); + + // if config.p2p_service.stable_peer { + // let key_pair = Keypair::ed25519_from_bytes(vec![1, 2, 3]) + // .map_err(|error| Error::LibP2P(LibP2PError::KeyDecoding(error)))?; + // swarm_builder = libp2p::SwarmBuilder::with_existing_identity(key_pair); + // } + + let mut swarm = swarm_builder .with_tokio() .with_tcp( tcp::Config::default(), @@ -139,11 +154,11 @@ impl Service Ok(ServerNetworkBehaviour { gossipsub, - mdns: mdns::Behaviour::new( - mdns::Config::default(), - key_pair.public().to_peer_id(), - ) - .map_err(|e| Error::LibP2P(LibP2PError::IO(e)))?, + // mdns: mdns::Behaviour::new( + // mdns::Config::default(), + // key_pair.public().to_peer_id(), + // ) + // .map_err(|e| Error::LibP2P(LibP2PError::IO(e)))?, share_chain_sync: cbor::Behaviour::::new( [( StreamProtocol::new(SHARE_CHAIN_SYNC_REQ_RESP_PROTOCOL), @@ -151,14 +166,18 @@ impl Service )], request_response::Config::default(), ), - // rendezvous_server: rendezvous::server::Behaviour::new(rendezvous::server::Config::default()), - // rendezvous_client: rendezvous::client::Behaviour::new(key_pair.clone()), + kademlia: kad::Behaviour::new( + key_pair.public().to_peer_id(), + MemoryStore::new(key_pair.public().to_peer_id()), + ), }) }) .map_err(|e| Error::LibP2P(LibP2PError::Behaviour(e.to_string())))? .with_swarm_config(|c| c.with_idle_connection_timeout(config.idle_connection_timeout)) .build(); + swarm.behaviour_mut().kademlia.set_mode(Some(Mode::Server)); + Ok(swarm) } @@ -288,6 +307,7 @@ impl Service PEER_INFO_TOPIC => { match messages::PeerInfo::try_from(message) { Ok(payload) => { + info!(target: LOG_TARGET, "New peer info: {peer:?} -> {payload:?}"); self.peer_store.add(peer, payload).await; if let Some(tip) = self.peer_store.tip_of_block_height().await { if let Ok(curr_height) = self.share_chain.tip_height().await { @@ -305,7 +325,7 @@ impl Service BLOCK_VALIDATION_REQUESTS_TOPIC => { match messages::ValidateBlockRequest::try_from(message) { Ok(payload) => { - debug!(target: LOG_TARGET, "Block validation request: {payload:?}"); + info!(target: LOG_TARGET, "Block validation request: {payload:?}"); let validate_result = self.share_chain.validate_block(&payload.block()).await; let mut valid = false; @@ -370,6 +390,7 @@ impl Service /// Handles share chain sync request (coming from other peer). async fn handle_share_chain_sync_request(&mut self, channel: ResponseChannel, request: ShareChainSyncRequest) { + info!(target: LOG_TARGET, "Incoming Share chain sync request: {request:?}"); match self.share_chain.blocks(request.from_height).await { Ok(blocks) => { if self.swarm.behaviour_mut().share_chain_sync.send_response(channel, ShareChainSyncResponse::new(blocks.clone())) @@ -384,6 +405,7 @@ impl Service /// Handle share chain sync response. /// All the responding blocks will be tried to put into local share chain. async fn handle_share_chain_sync_response(&mut self, response: ShareChainSyncResponse) { + info!(target: LOG_TARGET, "Share chain sync response: {response:?}"); if let Err(error) = self.share_chain.submit_blocks(response.blocks).await { error!(target: LOG_TARGET, "Failed to add synced blocks to share chain: {error:?}"); } @@ -391,11 +413,16 @@ impl Service /// Trigger share chai sync with another peer with the highest known block height. async fn sync_share_chain(&mut self) { - while self.peer_store.tip_of_block_height().await.is_none() {} // waiting for the highest blockchain + info!(target: LOG_TARGET, "Syncing share chain..."); + while self.peer_store.tip_of_block_height().await.is_none() { + info!(target: LOG_TARGET, "Waiting for highest block height..."); + } // waiting for the highest blockchain match self.peer_store.tip_of_block_height().await { Some(result) => { + info!(target: LOG_TARGET, "Found highet block height: {result:?}"); match self.share_chain.tip_height().await { Ok(tip) => { + info!(target: LOG_TARGET, "Send share chain sync request: {result:?}"); self.swarm.behaviour_mut().share_chain_sync.send_request( &result.peer_id, ShareChainSyncRequest::new(tip), @@ -415,19 +442,19 @@ impl Service info!(target: LOG_TARGET, "Listening on {address:?}"); } SwarmEvent::Behaviour(event) => match event { - ServerNetworkBehaviourEvent::Mdns(mdns_event) => match mdns_event { - mdns::Event::Discovered(peers) => { - for (peer, addr) in peers { - self.swarm.add_peer_address(peer, addr); - self.swarm.behaviour_mut().gossipsub.add_explicit_peer(&peer); - } - } - mdns::Event::Expired(peers) => { - for (peer, _addr) in peers { - self.swarm.behaviour_mut().gossipsub.remove_explicit_peer(&peer); - } - } - }, + // ServerNetworkBehaviourEvent::Mdns(mdns_event) => match mdns_event { + // mdns::Event::Discovered(peers) => { + // for (peer, addr) in peers { + // self.swarm.add_peer_address(peer, addr); + // self.swarm.behaviour_mut().gossipsub.add_explicit_peer(&peer); + // } + // } + // mdns::Event::Expired(peers) => { + // for (peer, _addr) in peers { + // self.swarm.behaviour_mut().gossipsub.remove_explicit_peer(&peer); + // } + // } + // }, ServerNetworkBehaviourEvent::Gossipsub(event) => match event { gossipsub::Event::Message { message, message_id: _message_id, propagation_source: _propagation_source } => { self.handle_new_gossipsub_message(message).await; @@ -445,29 +472,32 @@ impl Service self.handle_share_chain_sync_response(response).await; } } - request_response::Event::OutboundFailure { .. } => {} - request_response::Event::InboundFailure { .. } => {} + request_response::Event::OutboundFailure { peer, error, .. } => { + error!(target: LOG_TARGET, "REQ-RES outbound failure: {peer:?} -> {error:?}"); + } + request_response::Event::InboundFailure { peer, error, .. } => { + error!(target: LOG_TARGET, "REQ-RES inbound failure: {peer:?} -> {error:?}"); + } request_response::Event::ResponseSent { .. } => {} }, + ServerNetworkBehaviourEvent::Kademlia(event) => { + match event { + Event::RoutingUpdated { peer, old_peer, addresses, .. } => { + addresses.iter().for_each(|addr| { + self.swarm.add_peer_address(peer, addr.clone()); + }); + self.swarm.behaviour_mut().gossipsub.add_explicit_peer(&peer); + if let Some(old_peer) = old_peer { + self.swarm.behaviour_mut().gossipsub.remove_explicit_peer(&old_peer); + if let Err(error) = self.client_peer_changes_tx.send(()) { + error!(target: LOG_TARGET, "Failed to send peer changes trigger: {error:?}"); + } + } + } + _ => info!(target: LOG_TARGET, "[KADEMLIA] {event:?}"), + } + } }, - SwarmEvent::ConnectionEstablished { peer_id, .. } => { - // TODO: do some discovery somehow, possibly use rendezvous - self.swarm.behaviour_mut().gossipsub.add_explicit_peer(&peer_id); - } - SwarmEvent::ConnectionClosed { peer_id, .. } => { - self.swarm.behaviour_mut().gossipsub.remove_explicit_peer(&peer_id); - } - SwarmEvent::IncomingConnection { .. } => {} - SwarmEvent::IncomingConnectionError { .. } => {} - SwarmEvent::OutgoingConnectionError { .. } => {} - SwarmEvent::ExpiredListenAddr { .. } => {} - SwarmEvent::ListenerClosed { .. } => {} - SwarmEvent::ListenerError { .. } => {} - SwarmEvent::Dialing { .. } => {} - SwarmEvent::NewExternalAddrCandidate { .. } => {} - SwarmEvent::ExternalAddrConfirmed { .. } => {} - SwarmEvent::ExternalAddrExpired { .. } => {} - SwarmEvent::NewExternalAddrOfPeer { .. } => {} _ => {} }; } @@ -491,12 +521,15 @@ impl Service // handle case when we have some peers removed let expired_peers = self.peer_store.cleanup().await; for exp_peer in expired_peers { + self.swarm.behaviour_mut().kademlia.remove_peer(&exp_peer); self.swarm.behaviour_mut().gossipsub.remove_explicit_peer(&exp_peer); - } - if let Err(error) = self.client_peer_changes_tx.send(()) { + if let Err(error) = self.client_peer_changes_tx.send(()) { error!(target: LOG_TARGET, "Failed to send peer changes trigger: {error:?}"); + } } + // broadcast peer info + info!(target: LOG_TARGET, "Peer count: {:?}", self.peer_store.peer_count().await); if let Err(error) = self.broadcast_peer_info().await { match error { Error::LibP2P(LibP2PError::Publish(PublishError::InsufficientPeers)) => { @@ -514,13 +547,26 @@ impl Service } fn join_seed_peers(&mut self) -> Result<(), Error> { + if self.config.seed_peers.is_empty() { + return Ok(()); + } + for seed_peer in &self.config.seed_peers { - self.swarm.dial(seed_peer.parse::() - .map_err(|error| Error::LibP2P(LibP2PError::MultiAddrParse(error)))? - ) - .map_err(|error| Error::LibP2P(LibP2PError::Dial(error)))?; + let addr = seed_peer.parse::() + .map_err(|error| Error::LibP2P(LibP2PError::MultiAddrParse(error)))?; + let peer_id = match addr.iter().last() { + Some(Protocol::P2p(peer_id)) => Some(peer_id), + _ => None, + }; + if peer_id.is_none() { + return Err(Error::LibP2P(LibP2PError::MissingPeerId(seed_peer.clone()))); + } + self.swarm.behaviour_mut().kademlia.add_address(&peer_id.unwrap(), addr); } + self.swarm.behaviour_mut().kademlia.bootstrap() + .map_err(|error| Error::LibP2P(LibP2PError::KademliaNoKnownPeers(error)))?; + Ok(()) } diff --git a/src/server/p2p/peer_store.rs b/src/server/p2p/peer_store.rs index bcab063c..45788207 100644 --- a/src/server/p2p/peer_store.rs +++ b/src/server/p2p/peer_store.rs @@ -69,7 +69,7 @@ impl PeerStore { pub fn new(config: &PeerStoreConfig) -> Self { Self { inner: CacheBuilder::new(100_000) - .time_to_live(config.peer_record_ttl) + .time_to_live(config.peer_record_ttl * 2) .build(), ttl: config.peer_record_ttl, tip_of_block_height: RwLock::new(None), From 9fb89aef8c4868c30113d5f1466c7c63331b8369 Mon Sep 17 00:00:00 2001 From: richardb Date: Wed, 26 Jun 2024 14:58:14 +0200 Subject: [PATCH 25/43] reenabled mdns --- README.md | 5 ++++- src/server/p2p/p2p.rs | 40 ++++++++++++++++++++-------------------- 2 files changed, 24 insertions(+), 21 deletions(-) diff --git a/README.md b/README.md index 52a5bb8d..1184e114 100644 --- a/README.md +++ b/README.md @@ -1 +1,4 @@ -# sha-p2pool \ No newline at end of file +# sha-p2pool + +This is a decentralized pool mining software for Tari network using SHA-3 algorithm. + diff --git a/src/server/p2p/p2p.rs b/src/server/p2p/p2p.rs index bf06f225..8b2560eb 100644 --- a/src/server/p2p/p2p.rs +++ b/src/server/p2p/p2p.rs @@ -53,7 +53,7 @@ impl Default for Config { #[derive(NetworkBehaviour)] pub struct ServerNetworkBehaviour { - // pub mdns: mdns::Behaviour, + pub mdns: mdns::Behaviour, pub gossipsub: gossipsub::Behaviour, pub share_chain_sync: cbor::Behaviour, pub kademlia: kad::Behaviour, @@ -116,7 +116,7 @@ impl Service /// Creates a new swarm from the provided config fn new_swarm(config: &config::Config) -> Result, Error> { let mut swarm_builder = libp2p::SwarmBuilder::with_new_identity(); - + // if config.p2p_service.stable_peer { // let key_pair = Keypair::ed25519_from_bytes(vec![1, 2, 3]) // .map_err(|error| Error::LibP2P(LibP2PError::KeyDecoding(error)))?; @@ -154,11 +154,11 @@ impl Service Ok(ServerNetworkBehaviour { gossipsub, - // mdns: mdns::Behaviour::new( - // mdns::Config::default(), - // key_pair.public().to_peer_id(), - // ) - // .map_err(|e| Error::LibP2P(LibP2PError::IO(e)))?, + mdns: mdns::Behaviour::new( + mdns::Config::default(), + key_pair.public().to_peer_id(), + ) + .map_err(|e| Error::LibP2P(LibP2PError::IO(e)))?, share_chain_sync: cbor::Behaviour::::new( [( StreamProtocol::new(SHARE_CHAIN_SYNC_REQ_RESP_PROTOCOL), @@ -442,19 +442,19 @@ impl Service info!(target: LOG_TARGET, "Listening on {address:?}"); } SwarmEvent::Behaviour(event) => match event { - // ServerNetworkBehaviourEvent::Mdns(mdns_event) => match mdns_event { - // mdns::Event::Discovered(peers) => { - // for (peer, addr) in peers { - // self.swarm.add_peer_address(peer, addr); - // self.swarm.behaviour_mut().gossipsub.add_explicit_peer(&peer); - // } - // } - // mdns::Event::Expired(peers) => { - // for (peer, _addr) in peers { - // self.swarm.behaviour_mut().gossipsub.remove_explicit_peer(&peer); - // } - // } - // }, + ServerNetworkBehaviourEvent::Mdns(mdns_event) => match mdns_event { + mdns::Event::Discovered(peers) => { + for (peer, addr) in peers { + self.swarm.add_peer_address(peer, addr); + self.swarm.behaviour_mut().gossipsub.add_explicit_peer(&peer); + } + } + mdns::Event::Expired(peers) => { + for (peer, _addr) in peers { + self.swarm.behaviour_mut().gossipsub.remove_explicit_peer(&peer); + } + } + }, ServerNetworkBehaviourEvent::Gossipsub(event) => match event { gossipsub::Event::Message { message, message_id: _message_id, propagation_source: _propagation_source } => { self.handle_new_gossipsub_message(message).await; From 8893cda1920dcd760421d0a8bbaa682ca7e5d44a Mon Sep 17 00:00:00 2001 From: richardb Date: Wed, 26 Jun 2024 16:52:56 +0200 Subject: [PATCH 26/43] finalize code + added todo --- src/main.rs | 8 +------- src/server/config.rs | 5 ----- src/server/grpc/mod.rs | 2 +- src/server/p2p/client.rs | 9 ++++----- src/server/p2p/p2p.rs | 37 ++++++++++++------------------------- src/server/server.rs | 4 ++++ 6 files changed, 22 insertions(+), 43 deletions(-) diff --git a/src/main.rs b/src/main.rs index 27ffe709..b795fb98 100644 --- a/src/main.rs +++ b/src/main.rs @@ -41,13 +41,8 @@ struct Cli { /// Any amount of seed peers can be added to join a p2pool network. /// Please note that these addresses must be in libp2p multi address format and must contain peer ID! /// e.g.: /ip4/127.0.0.1/tcp/52313/p2p/12D3KooWCUNCvi7PBPymgsHx39JWErYdSoT3EFPrn3xoVff4CHFu - #[arg(short = 'e', long, value_name = "seed-peers")] + #[arg(short, long, value_name = "seed-peers")] seed_peers: Option>, - - // /// Stable peer. - // /// If set to true, then this peer will have always the same identity (key pair and peer ID). - // #[arg(short = 's', long, value_name = "stable-peer", default_value_t = false)] - // stable_peer: bool, } #[tokio::main] @@ -64,7 +59,6 @@ async fn main() -> anyhow::Result<()> { if let Some(seed_peers) = cli.seed_peers { config_builder.with_seed_peers(seed_peers); } - // config_builder.with_stable_peer(cli.stable_peer); let config = config_builder.build(); let share_chain = InMemoryShareChain::default(); diff --git a/src/server/config.rs b/src/server/config.rs index f4e4e74f..3ba2274f 100644 --- a/src/server/config.rs +++ b/src/server/config.rs @@ -71,11 +71,6 @@ impl ConfigBuilder { self } - pub fn with_stable_peer(&mut self, config: bool) -> &mut Self { - self.config.p2p_service.stable_peer = config; - self - } - pub fn build(&self) -> Config { self.config.clone() } diff --git a/src/server/grpc/mod.rs b/src/server/grpc/mod.rs index 58970f8a..8edbe380 100644 --- a/src/server/grpc/mod.rs +++ b/src/server/grpc/mod.rs @@ -3,4 +3,4 @@ pub mod base_node; pub mod error; pub mod p2pool; -mod util; \ No newline at end of file +pub mod util; \ No newline at end of file diff --git a/src/server/p2p/client.rs b/src/server/p2p/client.rs index c9a7913b..2b8f9f92 100644 --- a/src/server/p2p/client.rs +++ b/src/server/p2p/client.rs @@ -1,7 +1,7 @@ use std::sync::Arc; use std::time::{Duration, Instant}; -use log::{debug, error, info, warn}; +use log::{debug, error, warn}; use thiserror::Error; use tokio::select; use tokio::sync::{broadcast, mpsc, Mutex}; @@ -113,7 +113,7 @@ impl ServiceClient { let peer_count = self.peer_store.peer_count().await as f64 + 1.0; let min_validation_count = (peer_count / 3.0) * 2.0; let min_validation_count = min_validation_count.round() as u64; - info!(target: LOG_TARGET, "Minimum validation count: {min_validation_count:?}"); + debug!(target: LOG_TARGET, "Minimum validation count: {min_validation_count:?}"); // wait for the validations to come let mut validate_block_receiver = self.channels.validate_block_receiver.lock().await; @@ -134,11 +134,10 @@ impl ServiceClient { result = validate_block_receiver.recv() => { if let Some(validate_result) = result { if validate_result.valid && validate_result.block == *block { - info!(target: LOG_TARGET, "New validation result: {validate_result:?}"); + debug!(target: LOG_TARGET, "New validation result: {validate_result:?}"); validation_count+=1; } } else { - error!(target: LOG_TARGET, "Validate block receiver got None!"); break; } } @@ -152,7 +151,7 @@ impl ServiceClient { } let validation_time = Instant::now().duration_since(start); - info!(target: LOG_TARGET, "Validation took {:?}", validation_time); + debug!(target: LOG_TARGET, "Validation took {:?}", validation_time); Ok(validation_count >= min_validation_count) } diff --git a/src/server/p2p/p2p.rs b/src/server/p2p/p2p.rs index 8b2560eb..70f95333 100644 --- a/src/server/p2p/p2p.rs +++ b/src/server/p2p/p2p.rs @@ -2,11 +2,10 @@ use std::hash::{DefaultHasher, Hash, Hasher}; use std::sync::Arc; use std::time::Duration; -use libp2p::{gossipsub, kad, mdns, Multiaddr, noise, request_response, StreamProtocol, Swarm, SwarmBuilder, tcp, yamux}; +use libp2p::{gossipsub, kad, mdns, Multiaddr, noise, request_response, StreamProtocol, Swarm, tcp, yamux}; use libp2p::futures::StreamExt; use libp2p::gossipsub::{IdentTopic, Message, PublishError}; -use libp2p::identity::Keypair; -use libp2p::kad::{Event, Mode, NoKnownPeers, QueryId}; +use libp2p::kad::{Event, Mode}; use libp2p::kad::store::MemoryStore; use libp2p::mdns::tokio::Tokio; use libp2p::multiaddr::Protocol; @@ -37,7 +36,6 @@ pub struct Config { pub seed_peers: Vec, pub client: client::ClientConfig, pub peer_info_publish_interval: Duration, - pub stable_peer: bool, } impl Default for Config { @@ -46,7 +44,6 @@ impl Default for Config { seed_peers: vec![], client: client::ClientConfig::default(), peer_info_publish_interval: Duration::from_secs(5), - stable_peer: false, } } } @@ -115,15 +112,7 @@ impl Service /// Creates a new swarm from the provided config fn new_swarm(config: &config::Config) -> Result, Error> { - let mut swarm_builder = libp2p::SwarmBuilder::with_new_identity(); - - // if config.p2p_service.stable_peer { - // let key_pair = Keypair::ed25519_from_bytes(vec![1, 2, 3]) - // .map_err(|error| Error::LibP2P(LibP2PError::KeyDecoding(error)))?; - // swarm_builder = libp2p::SwarmBuilder::with_existing_identity(key_pair); - // } - - let mut swarm = swarm_builder + let mut swarm = libp2p::SwarmBuilder::with_new_identity() .with_tokio() .with_tcp( tcp::Config::default(), @@ -307,7 +296,7 @@ impl Service PEER_INFO_TOPIC => { match messages::PeerInfo::try_from(message) { Ok(payload) => { - info!(target: LOG_TARGET, "New peer info: {peer:?} -> {payload:?}"); + debug!(target: LOG_TARGET, "New peer info: {peer:?} -> {payload:?}"); self.peer_store.add(peer, payload).await; if let Some(tip) = self.peer_store.tip_of_block_height().await { if let Ok(curr_height) = self.share_chain.tip_height().await { @@ -325,7 +314,7 @@ impl Service BLOCK_VALIDATION_REQUESTS_TOPIC => { match messages::ValidateBlockRequest::try_from(message) { Ok(payload) => { - info!(target: LOG_TARGET, "Block validation request: {payload:?}"); + debug!(target: LOG_TARGET, "Block validation request: {payload:?}"); let validate_result = self.share_chain.validate_block(&payload.block()).await; let mut valid = false; @@ -390,7 +379,7 @@ impl Service /// Handles share chain sync request (coming from other peer). async fn handle_share_chain_sync_request(&mut self, channel: ResponseChannel, request: ShareChainSyncRequest) { - info!(target: LOG_TARGET, "Incoming Share chain sync request: {request:?}"); + debug!(target: LOG_TARGET, "Incoming Share chain sync request: {request:?}"); match self.share_chain.blocks(request.from_height).await { Ok(blocks) => { if self.swarm.behaviour_mut().share_chain_sync.send_response(channel, ShareChainSyncResponse::new(blocks.clone())) @@ -405,7 +394,7 @@ impl Service /// Handle share chain sync response. /// All the responding blocks will be tried to put into local share chain. async fn handle_share_chain_sync_response(&mut self, response: ShareChainSyncResponse) { - info!(target: LOG_TARGET, "Share chain sync response: {response:?}"); + debug!(target: LOG_TARGET, "Share chain sync response: {response:?}"); if let Err(error) = self.share_chain.submit_blocks(response.blocks).await { error!(target: LOG_TARGET, "Failed to add synced blocks to share chain: {error:?}"); } @@ -413,16 +402,14 @@ impl Service /// Trigger share chai sync with another peer with the highest known block height. async fn sync_share_chain(&mut self) { - info!(target: LOG_TARGET, "Syncing share chain..."); - while self.peer_store.tip_of_block_height().await.is_none() { - info!(target: LOG_TARGET, "Waiting for highest block height..."); - } // waiting for the highest blockchain + debug!(target: LOG_TARGET, "Syncing share chain..."); + while self.peer_store.tip_of_block_height().await.is_none() {} // waiting for the highest blockchain match self.peer_store.tip_of_block_height().await { Some(result) => { - info!(target: LOG_TARGET, "Found highet block height: {result:?}"); + debug!(target: LOG_TARGET, "Found highet block height: {result:?}"); match self.share_chain.tip_height().await { Ok(tip) => { - info!(target: LOG_TARGET, "Send share chain sync request: {result:?}"); + debug!(target: LOG_TARGET, "Send share chain sync request: {result:?}"); self.swarm.behaviour_mut().share_chain_sync.send_request( &result.peer_id, ShareChainSyncRequest::new(tip), @@ -494,7 +481,7 @@ impl Service } } } - _ => info!(target: LOG_TARGET, "[KADEMLIA] {event:?}"), + _ => debug!(target: LOG_TARGET, "[KADEMLIA] {event:?}"), } } }, diff --git a/src/server/server.rs b/src/server/server.rs index 032eaea7..1cf2e44c 100644 --- a/src/server/server.rs +++ b/src/server/server.rs @@ -41,6 +41,10 @@ impl Server { pub async fn new(config: config::Config, share_chain: S) -> Result { let share_chain = Arc::new(share_chain); + + // TODO: have base node's network here and pass to p2p_service to be able to subscribe to the right gossipsub topics + // TODO: se we are not mixing main net and test net blocks. + let mut p2p_service: p2p::Service = p2p::Service::new(&config, share_chain.clone()).map_err(Error::P2PService)?; let base_node_grpc_service = TariBaseNodeGrpc::new(config.base_node_address.clone()).await.map_err(Error::Grpc)?; From 02cf2f4f0a87840e4b1b8af3eb66ca75327b0e2b Mon Sep 17 00:00:00 2001 From: richardb Date: Thu, 27 Jun 2024 08:05:46 +0200 Subject: [PATCH 27/43] Added stable peer functionality --- src/main.rs | 21 +++++++++++++++++ src/server/config.rs | 11 +++++++++ src/server/p2p/p2p.rs | 52 +++++++++++++++++++++++++++++++++++++++---- src/server/server.rs | 2 +- 4 files changed, 81 insertions(+), 5 deletions(-) diff --git a/src/main.rs b/src/main.rs index b795fb98..3247209c 100644 --- a/src/main.rs +++ b/src/main.rs @@ -1,3 +1,5 @@ +use std::path::PathBuf; + use clap::builder::Styles; use clap::builder::styling::AnsiColor; use clap::Parser; @@ -39,14 +41,30 @@ struct Cli { /// (Optional) seed peers. /// Any amount of seed peers can be added to join a p2pool network. + /// /// Please note that these addresses must be in libp2p multi address format and must contain peer ID! + /// /// e.g.: /ip4/127.0.0.1/tcp/52313/p2p/12D3KooWCUNCvi7PBPymgsHx39JWErYdSoT3EFPrn3xoVff4CHFu #[arg(short, long, value_name = "seed-peers")] seed_peers: Option>, + + /// Starts the node as a stable peer. + /// + /// Identity of the peer will be saved locally (to --private-key-location) + /// and ID of the Peer remains the same. + #[arg(long, value_name = "stable-peer", default_value_t = false)] + stable_peer: bool, + + /// Private key folder. + /// + /// Needs --stable-peer to be used. + #[arg(long, value_name = "private-key-location", requires = "stable_peer", default_value = ".")] + private_key_folder: PathBuf, } #[tokio::main] async fn main() -> anyhow::Result<()> { + // cli let cli = Cli::parse(); Builder::new().filter_level(cli.log_level).init(); let mut config_builder = server::Config::builder(); @@ -59,7 +77,10 @@ async fn main() -> anyhow::Result<()> { if let Some(seed_peers) = cli.seed_peers { config_builder.with_seed_peers(seed_peers); } + config_builder.with_stable_peer(cli.stable_peer); + config_builder.with_private_key_folder(cli.private_key_folder); + // server start let config = config_builder.build(); let share_chain = InMemoryShareChain::default(); let mut server = server::Server::new(config, share_chain).await?; diff --git a/src/server/config.rs b/src/server/config.rs index 3ba2274f..49b7bca7 100644 --- a/src/server/config.rs +++ b/src/server/config.rs @@ -1,3 +1,4 @@ +use std::path::PathBuf; use std::time::Duration; use crate::server::p2p; @@ -71,6 +72,16 @@ impl ConfigBuilder { self } + pub fn with_stable_peer(&mut self, config: bool) -> &mut Self { + self.config.p2p_service.stable_peer = config; + self + } + + pub fn with_private_key_folder(&mut self, config: PathBuf) -> &mut Self { + self.config.p2p_service.private_key_folder = config; + self + } + pub fn build(&self) -> Config { self.config.clone() } diff --git a/src/server/p2p/p2p.rs b/src/server/p2p/p2p.rs index 70f95333..98235d44 100644 --- a/src/server/p2p/p2p.rs +++ b/src/server/p2p/p2p.rs @@ -1,10 +1,12 @@ use std::hash::{DefaultHasher, Hash, Hasher}; +use std::path::PathBuf; use std::sync::Arc; use std::time::Duration; use libp2p::{gossipsub, kad, mdns, Multiaddr, noise, request_response, StreamProtocol, Swarm, tcp, yamux}; use libp2p::futures::StreamExt; use libp2p::gossipsub::{IdentTopic, Message, PublishError}; +use libp2p::identity::Keypair; use libp2p::kad::{Event, Mode}; use libp2p::kad::store::MemoryStore; use libp2p::mdns::tokio::Tokio; @@ -14,6 +16,8 @@ use libp2p::swarm::{NetworkBehaviour, SwarmEvent}; use log::{debug, error, info, warn}; use tari_utilities::hex::Hex; use tokio::{io, select}; +use tokio::fs::File; +use tokio::io::{AsyncReadExt, AsyncWriteExt}; use tokio::sync::{broadcast, mpsc}; use tokio::sync::broadcast::error::RecvError; @@ -30,12 +34,15 @@ const BLOCK_VALIDATION_RESULTS_TOPIC: &str = "block_validation_results"; const NEW_BLOCK_TOPIC: &str = "new_block"; const SHARE_CHAIN_SYNC_REQ_RESP_PROTOCOL: &str = "/share_chain_sync/1"; const LOG_TARGET: &str = "p2p_service"; +const STABLE_PRIVATE_KEY_FILE: &str = "p2pool_private.key"; #[derive(Clone, Debug)] pub struct Config { pub seed_peers: Vec, pub client: client::ClientConfig, pub peer_info_publish_interval: Duration, + pub stable_peer: bool, + pub private_key_folder: PathBuf, } impl Default for Config { @@ -44,6 +51,8 @@ impl Default for Config { seed_peers: vec![], client: client::ClientConfig::default(), peer_info_publish_interval: Duration::from_secs(5), + stable_peer: false, + private_key_folder: PathBuf::from("."), } } } @@ -83,8 +92,8 @@ impl Service { /// Constructs a new Service from the provided config. /// It also instantiates libp2p swarm inside. - pub fn new(config: &config::Config, share_chain: Arc) -> Result { - let swarm = Self::new_swarm(config)?; + pub async fn new(config: &config::Config, share_chain: Arc) -> Result { + let swarm = Self::new_swarm(config).await?; let peer_store = Arc::new( PeerStore::new(&config.peer_store), ); @@ -110,9 +119,44 @@ impl Service }) } + /// Generates or reads libp2p private key if stable_peer is set to true otherwise returns a random key. + /// Using this method we can be sure that our Peer ID remains the same across restarts in case of + /// stable_peer is set to true. + async fn keypair(config: &Config) -> Result { + if !config.stable_peer { + return Ok(Keypair::generate_ed25519()); + } + + // if we have a saved private key, just use it + let mut content = vec![]; + let mut key_path = config.private_key_folder.clone(); + key_path.push(STABLE_PRIVATE_KEY_FILE); + + if let Ok(mut file) = File::open(key_path.clone()).await { + if file.read_to_end(&mut content).await.is_ok() { + return Keypair::from_protobuf_encoding(content.as_slice()) + .map_err(|error| Error::LibP2P(LibP2PError::KeyDecoding(error))); + } + } + + // otherwise create a new one + let key_pair = Keypair::generate_ed25519(); + let mut new_private_key_file = File::create_new(key_path).await + .map_err(|error| Error::LibP2P(LibP2PError::IO(error)))?; + new_private_key_file.write_all( + key_pair.to_protobuf_encoding() + .map_err(|error| Error::LibP2P(LibP2PError::KeyDecoding(error)))?.as_slice() + ).await + .map_err(|error| Error::LibP2P(LibP2PError::IO(error)))?; + + Ok(key_pair) + } + /// Creates a new swarm from the provided config - fn new_swarm(config: &config::Config) -> Result, Error> { - let mut swarm = libp2p::SwarmBuilder::with_new_identity() + async fn new_swarm(config: &config::Config) -> Result, Error> { + let mut swarm = libp2p::SwarmBuilder::with_existing_identity( + Self::keypair(&config.p2p_service).await? + ) .with_tokio() .with_tcp( tcp::Config::default(), diff --git a/src/server/server.rs b/src/server/server.rs index 1cf2e44c..4d9b52c1 100644 --- a/src/server/server.rs +++ b/src/server/server.rs @@ -45,7 +45,7 @@ impl Server // TODO: have base node's network here and pass to p2p_service to be able to subscribe to the right gossipsub topics // TODO: se we are not mixing main net and test net blocks. - let mut p2p_service: p2p::Service = p2p::Service::new(&config, share_chain.clone()).map_err(Error::P2PService)?; + let mut p2p_service: p2p::Service = p2p::Service::new(&config, share_chain.clone()).await.map_err(Error::P2PService)?; let base_node_grpc_service = TariBaseNodeGrpc::new(config.base_node_address.clone()).await.map_err(Error::Grpc)?; let base_node_grpc_server = BaseNodeServer::new(base_node_grpc_service); From f40e91715f93cf32122d49de2a728e4bdc9273b4 Mon Sep 17 00:00:00 2001 From: richardb Date: Thu, 27 Jun 2024 11:02:40 +0200 Subject: [PATCH 28/43] small updates --- .gitmodules | 3 --- src/main.rs | 4 ++-- 2 files changed, 2 insertions(+), 5 deletions(-) delete mode 100644 .gitmodules diff --git a/.gitmodules b/.gitmodules deleted file mode 100644 index d422d81e..00000000 --- a/.gitmodules +++ /dev/null @@ -1,3 +0,0 @@ -[submodule "crates/grpc/submodules/tari"] - path = crates/grpc/submodules/tari - url = https://github.com/tari-project/tari diff --git a/src/main.rs b/src/main.rs index 3247209c..2eaaa22e 100644 --- a/src/main.rs +++ b/src/main.rs @@ -57,8 +57,8 @@ struct Cli { /// Private key folder. /// - /// Needs --stable-peer to be used. - #[arg(long, value_name = "private-key-location", requires = "stable_peer", default_value = ".")] + /// Needs --stable-peer to be set. + #[arg(long, value_name = "private-key-folder", requires = "stable_peer", default_value = ".")] private_key_folder: PathBuf, } From e399b3f94c6e59fc99dbec964dcda420d804c0e3 Mon Sep 17 00:00:00 2001 From: richardb Date: Thu, 27 Jun 2024 13:44:13 +0200 Subject: [PATCH 29/43] fixed miner wallet address format --- src/sharechain/in_memory.rs | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/src/sharechain/in_memory.rs b/src/sharechain/in_memory.rs index c8573f76..925cce28 100644 --- a/src/sharechain/in_memory.rs +++ b/src/sharechain/in_memory.rs @@ -61,7 +61,7 @@ impl InMemoryShareChain { let blocks_read_lock = self.blocks.read().await; blocks_read_lock.iter().for_each(|block| { if let Some(miner_wallet_address) = block.miner_wallet_address() { - let addr = miner_wallet_address.to_hex(); + let addr = miner_wallet_address.to_base58(); if let Some(curr_hash_rate) = result.get(&addr) { result.insert(addr, curr_hash_rate + 1.0); } else { @@ -157,7 +157,7 @@ impl ShareChain for InMemoryShareChain { result.push(NewBlockCoinbase { address: addr.clone(), value: curr_reward, - stealth_payment: false, + stealth_payment: true, revealed_value_proof: true, coinbase_extra: vec![], }); From 9277872b4b12c559dec0528c0035722eeedee839 Mon Sep 17 00:00:00 2001 From: richardb Date: Thu, 27 Jun 2024 13:47:17 +0200 Subject: [PATCH 30/43] readme in progress --- README.md | 38 ++++++++++++++++++++++++++++++++++++++ 1 file changed, 38 insertions(+) diff --git a/README.md b/README.md index 1184e114..8bd934aa 100644 --- a/README.md +++ b/README.md @@ -2,3 +2,41 @@ This is a decentralized pool mining software for Tari network using SHA-3 algorithm. +How it works +--- +This is an implementation of [Bitcoin's original p2pool](https://en.bitcoin.it/wiki/P2Pool) with some modifications. + +Sha P2Pool uses an in-memory **side chain** (it is called **share chain**) to keep track of the miners +who contributed in mining +(only the last **80 blocks** are stored, so whoever get a share recently (mined a share chain block) will be included in +payouts!). + +All the payouts are instant and decentralized without any authority to control them. + +Prerequisites +--- + +- Running Tor proxy ([Instructions](https://github.com/tari-project/tari?tab=readme-ov-file#perform-sha3-mining)) +- **Running** and **Synced** **Tari Base Node** + ([Setup instructions (run minotari_node)](https://github.com/tari-project/tari)) +- A Tari wallet address ([Setup instructions (run minotari_console_wallet)](https://github.com/tari-project/tari)) + +How to use +--- + +- First let's assume that we have all the [prerequisites](#Prerequisites) are set up and running +- Create a new configuration for the miner in `~/.tari//miner.toml` + + (where you should replace `` with the current network the base node runs on, `esmeralda` is the + default): + ```toml + [miner] + base_node_grpc_address = "/ip4/127.0.0.1/tcp/18145" + sha_p2pool_enabled = true + ``` + + Please note that `18145` is the port where `base node's gRPC` is running, so if it is different from the default + one (`18145`) + just use the right port. +- + From 9426a2d46b18a8c3efe2e369e0136133ef079585 Mon Sep 17 00:00:00 2001 From: richardb Date: Fri, 28 Jun 2024 07:42:47 +0200 Subject: [PATCH 31/43] updated readme --- README.md | 6 +++++- 1 file changed, 5 insertions(+), 1 deletion(-) diff --git a/README.md b/README.md index 8bd934aa..9fabc1cf 100644 --- a/README.md +++ b/README.md @@ -38,5 +38,9 @@ How to use Please note that `18145` is the port where `base node's gRPC` is running, so if it is different from the default one (`18145`) just use the right port. -- +- Start sha p2pool by running the binary simple `$ ./sha_p2pool` or using `Cargo` (if installed and want to build from + source): + `$ cargo build --release && ./target/release/sha_p2pool` + +**Note:** For more information about usage of p2pool, just run `$ sha_p2pool --help`! From 6f355fbcc20e0c2d59a9a7ae69e9acba761155de Mon Sep 17 00:00:00 2001 From: richardb Date: Fri, 28 Jun 2024 11:35:35 +0200 Subject: [PATCH 32/43] fixing block sync logic and format all code --- src/main.rs | 9 +- src/server/config.rs | 2 +- src/server/grpc/base_node.rs | 292 ++++++++++++++++++++------- src/server/grpc/error.rs | 2 +- src/server/grpc/mod.rs | 2 +- src/server/grpc/p2pool.rs | 111 ++++++++--- src/server/grpc/util.rs | 19 +- src/server/p2p/client.rs | 32 ++- src/server/p2p/error.rs | 4 +- src/server/p2p/messages.rs | 34 ++-- src/server/p2p/mod.rs | 5 +- src/server/p2p/p2p.rs | 373 +++++++++++++++++++++-------------- src/server/p2p/peer_store.rs | 29 ++- src/server/server.rs | 36 +++- src/sharechain/block.rs | 9 +- src/sharechain/error.rs | 2 +- src/sharechain/in_memory.rs | 104 +++++----- src/sharechain/mod.rs | 6 +- 18 files changed, 702 insertions(+), 369 deletions(-) diff --git a/src/main.rs b/src/main.rs index 2eaaa22e..619026a7 100644 --- a/src/main.rs +++ b/src/main.rs @@ -1,7 +1,7 @@ use std::path::PathBuf; -use clap::builder::Styles; use clap::builder::styling::AnsiColor; +use clap::builder::Styles; use clap::Parser; use env_logger::Builder; use log::LevelFilter; @@ -58,7 +58,12 @@ struct Cli { /// Private key folder. /// /// Needs --stable-peer to be set. - #[arg(long, value_name = "private-key-folder", requires = "stable_peer", default_value = ".")] + #[arg( + long, + value_name = "private-key-folder", + requires = "stable_peer", + default_value = "." + )] private_key_folder: PathBuf, } diff --git a/src/server/config.rs b/src/server/config.rs index 49b7bca7..eb7a61d9 100644 --- a/src/server/config.rs +++ b/src/server/config.rs @@ -19,7 +19,7 @@ impl Default for Config { fn default() -> Self { Self { base_node_address: String::from("http://127.0.0.1:18142"), - p2p_port: 0, // bind to any free port + p2p_port: 0, // bind to any free port grpc_port: 18145, // to possibly not collide with any other ports idle_connection_timeout: Duration::from_secs(30), peer_store: PeerStoreConfig::default(), diff --git a/src/server/grpc/base_node.rs b/src/server/grpc/base_node.rs index 6a98f2a1..8bcee7fc 100644 --- a/src/server/grpc/base_node.rs +++ b/src/server/grpc/base_node.rs @@ -4,11 +4,24 @@ use libp2p::futures::channel::mpsc; use libp2p::futures::SinkExt; use log::error; use minotari_app_grpc::tari_rpc; -use minotari_app_grpc::tari_rpc::{Block, BlockBlobRequest, BlockGroupRequest, BlockGroupResponse, BlockHeaderResponse, BlockHeight, BlockTimingResponse, ConsensusConstants, Empty, FetchMatchingUtxosRequest, GetActiveValidatorNodesRequest, GetBlocksRequest, GetHeaderByHashRequest, GetMempoolTransactionsRequest, GetNewBlockBlobResult, GetNewBlockResult, GetNewBlockTemplateWithCoinbasesRequest, GetNewBlockWithCoinbasesRequest, GetPeersRequest, GetShardKeyRequest, GetShardKeyResponse, GetSideChainUtxosRequest, GetTemplateRegistrationsRequest, HeightRequest, HistoricalBlock, ListConnectedPeersResponse, ListHeadersRequest, MempoolStatsResponse, NetworkStatusResponse, NewBlockTemplate, NewBlockTemplateRequest, NewBlockTemplateResponse, NodeIdentity, SearchKernelsRequest, SearchUtxosRequest, SoftwareUpdate, StringValue, SubmitBlockResponse, SubmitTransactionRequest, SubmitTransactionResponse, SyncInfoResponse, SyncProgressResponse, TipInfoResponse, TransactionStateRequest, TransactionStateResponse, ValueAtHeightResponse}; use minotari_app_grpc::tari_rpc::base_node_client::BaseNodeClient; +use minotari_app_grpc::tari_rpc::{ + Block, BlockBlobRequest, BlockGroupRequest, BlockGroupResponse, BlockHeaderResponse, + BlockHeight, BlockTimingResponse, ConsensusConstants, Empty, FetchMatchingUtxosRequest, + GetActiveValidatorNodesRequest, GetBlocksRequest, GetHeaderByHashRequest, + GetMempoolTransactionsRequest, GetNewBlockBlobResult, GetNewBlockResult, + GetNewBlockTemplateWithCoinbasesRequest, GetNewBlockWithCoinbasesRequest, GetPeersRequest, + GetShardKeyRequest, GetShardKeyResponse, GetSideChainUtxosRequest, + GetTemplateRegistrationsRequest, HeightRequest, HistoricalBlock, ListConnectedPeersResponse, + ListHeadersRequest, MempoolStatsResponse, NetworkStatusResponse, NewBlockTemplate, + NewBlockTemplateRequest, NewBlockTemplateResponse, NodeIdentity, SearchKernelsRequest, + SearchUtxosRequest, SoftwareUpdate, StringValue, SubmitBlockResponse, SubmitTransactionRequest, + SubmitTransactionResponse, SyncInfoResponse, SyncProgressResponse, TipInfoResponse, + TransactionStateRequest, TransactionStateResponse, ValueAtHeightResponse, +}; use tokio::sync::Mutex; -use tonic::{Request, Response, Status, Streaming}; use tonic::transport::Channel; +use tonic::{Request, Response, Status, Streaming}; use crate::server::grpc::error::Error; use crate::server::grpc::util; @@ -25,7 +38,11 @@ macro_rules! proxy_simple_result { match $self.client.lock().await.$call($request.into_inner()).await { Ok(resp) => Ok(resp), Err(error) => { - error!("Error while calling {:?} on base node: {:?}", stringify!($call), error); + error!( + "Error while calling {:?} on base node: {:?}", + stringify!($call), + error + ); Err(error) } } @@ -34,17 +51,21 @@ macro_rules! proxy_simple_result { macro_rules! proxy_stream_result { ($self:ident, $call:ident, $request:ident, $page_size:ident) => { - streaming_response(String::from(stringify!($call)), - $self.client.lock().await.$call($request.into_inner()).await, - $page_size, - ).await + streaming_response( + String::from(stringify!($call)), + $self.client.lock().await.$call($request.into_inner()).await, + $page_size, + ) + .await }; ($self:ident, $call:ident, $request:ident, $page_size:expr) => { - streaming_response(String::from(stringify!($call)), - $self.client.lock().await.$call($request.into_inner()).await, - $page_size, - ).await + streaming_response( + String::from(stringify!($call)), + $self.client.lock().await.$call($request.into_inner()).await, + $page_size, + ) + .await }; } @@ -52,9 +73,10 @@ macro_rules! proxy_stream_result { async fn streaming_response( call: String, result: Result>, Status>, - page_size: usize) - -> Result>>, Status> - where R: Send + Sync + 'static, + page_size: usize, +) -> Result>>, Status> +where + R: Send + Sync + 'static, { match result { Ok(response) => { @@ -71,145 +93,243 @@ async fn streaming_response( }); Ok(Response::new(rx)) } - Err(status) => Err(status) + Err(status) => Err(status), } } /// Base node gRPC service that proxies all the requests to base node when miner calls them. /// This makes sure that any extra call towards base node is served. -pub struct TariBaseNodeGrpc -{ +pub struct TariBaseNodeGrpc { client: Arc>>, } -impl TariBaseNodeGrpc -{ - pub async fn new( - base_node_address: String, - ) -> Result { - Ok(Self { client: Arc::new(Mutex::new(util::connect_base_node(base_node_address).await?)) }) +impl TariBaseNodeGrpc { + pub async fn new(base_node_address: String) -> Result { + Ok(Self { + client: Arc::new(Mutex::new( + util::connect_base_node(base_node_address).await?, + )), + }) } } #[tonic::async_trait] -impl tari_rpc::base_node_server::BaseNode for TariBaseNodeGrpc -{ +impl tari_rpc::base_node_server::BaseNode for TariBaseNodeGrpc { type ListHeadersStream = mpsc::Receiver>; - async fn list_headers(&self, request: Request) -> Result, Status> { + async fn list_headers( + &self, + request: Request, + ) -> Result, Status> { proxy_stream_result!(self, list_headers, request, LIST_HEADERS_PAGE_SIZE) } - async fn get_header_by_hash(&self, request: Request) -> Result, Status> { + async fn get_header_by_hash( + &self, + request: Request, + ) -> Result, Status> { proxy_simple_result!(self, get_header_by_hash, request) } type GetBlocksStream = mpsc::Receiver>; - async fn get_blocks(&self, request: Request) -> Result, Status> { + async fn get_blocks( + &self, + request: Request, + ) -> Result, Status> { proxy_stream_result!(self, get_blocks, request, GET_BLOCKS_PAGE_SIZE) } - async fn get_block_timing(&self, request: Request) -> Result, Status> { + async fn get_block_timing( + &self, + request: Request, + ) -> Result, Status> { proxy_simple_result!(self, get_block_timing, request) } - async fn get_constants(&self, request: Request) -> Result, Status> { + async fn get_constants( + &self, + request: Request, + ) -> Result, Status> { proxy_simple_result!(self, get_constants, request) } - async fn get_block_size(&self, request: Request) -> Result, Status> { + async fn get_block_size( + &self, + request: Request, + ) -> Result, Status> { proxy_simple_result!(self, get_block_size, request) } - async fn get_block_fees(&self, request: Request) -> Result, Status> { + async fn get_block_fees( + &self, + request: Request, + ) -> Result, Status> { proxy_simple_result!(self, get_block_fees, request) } async fn get_version(&self, request: Request) -> Result, Status> { proxy_simple_result!(self, get_version, request) } - async fn check_for_updates(&self, request: Request) -> Result, Status> { + async fn check_for_updates( + &self, + request: Request, + ) -> Result, Status> { proxy_simple_result!(self, check_for_updates, request) } type GetTokensInCirculationStream = mpsc::Receiver>; - async fn get_tokens_in_circulation(&self, request: Request) -> Result, Status> { - proxy_stream_result!(self, get_tokens_in_circulation, request, GET_TOKENS_IN_CIRCULATION_PAGE_SIZE) - } - - type GetNetworkDifficultyStream = mpsc::Receiver>; - - async fn get_network_difficulty(&self, request: Request) -> Result, Status> { - proxy_stream_result!(self, get_network_difficulty, request, GET_DIFFICULTY_PAGE_SIZE) - } - - async fn get_new_block_template(&self, request: Request) -> Result, Status> { + async fn get_tokens_in_circulation( + &self, + request: Request, + ) -> Result, Status> { + proxy_stream_result!( + self, + get_tokens_in_circulation, + request, + GET_TOKENS_IN_CIRCULATION_PAGE_SIZE + ) + } + + type GetNetworkDifficultyStream = + mpsc::Receiver>; + + async fn get_network_difficulty( + &self, + request: Request, + ) -> Result, Status> { + proxy_stream_result!( + self, + get_network_difficulty, + request, + GET_DIFFICULTY_PAGE_SIZE + ) + } + + async fn get_new_block_template( + &self, + request: Request, + ) -> Result, Status> { proxy_simple_result!(self, get_new_block_template, request) } - async fn get_new_block(&self, request: Request) -> Result, Status> { + async fn get_new_block( + &self, + request: Request, + ) -> Result, Status> { proxy_simple_result!(self, get_new_block, request) } - async fn get_new_block_with_coinbases(&self, request: Request) -> Result, Status> { + async fn get_new_block_with_coinbases( + &self, + request: Request, + ) -> Result, Status> { proxy_simple_result!(self, get_new_block_with_coinbases, request) } - async fn get_new_block_template_with_coinbases(&self, request: Request) -> Result, Status> { + async fn get_new_block_template_with_coinbases( + &self, + request: Request, + ) -> Result, Status> { proxy_simple_result!(self, get_new_block_template_with_coinbases, request) } - async fn get_new_block_blob(&self, request: Request) -> Result, Status> { + async fn get_new_block_blob( + &self, + request: Request, + ) -> Result, Status> { proxy_simple_result!(self, get_new_block_blob, request) } - async fn submit_block(&self, request: Request) -> Result, Status> { + async fn submit_block( + &self, + request: Request, + ) -> Result, Status> { proxy_simple_result!(self, submit_block, request) } - async fn submit_block_blob(&self, request: Request) -> Result, Status> { + async fn submit_block_blob( + &self, + request: Request, + ) -> Result, Status> { proxy_simple_result!(self, submit_block_blob, request) } - async fn submit_transaction(&self, request: Request) -> Result, Status> { + async fn submit_transaction( + &self, + request: Request, + ) -> Result, Status> { proxy_simple_result!(self, submit_transaction, request) } - async fn get_sync_info(&self, request: Request) -> Result, Status> { + async fn get_sync_info( + &self, + request: Request, + ) -> Result, Status> { proxy_simple_result!(self, get_sync_info, request) } - async fn get_sync_progress(&self, request: Request) -> Result, Status> { + async fn get_sync_progress( + &self, + request: Request, + ) -> Result, Status> { proxy_simple_result!(self, get_sync_progress, request) } - async fn get_tip_info(&self, request: Request) -> Result, Status> { + async fn get_tip_info( + &self, + request: Request, + ) -> Result, Status> { proxy_simple_result!(self, get_tip_info, request) } type SearchKernelsStream = mpsc::Receiver>; - async fn search_kernels(&self, request: Request) -> Result, Status> { + async fn search_kernels( + &self, + request: Request, + ) -> Result, Status> { proxy_stream_result!(self, search_kernels, request, GET_BLOCKS_PAGE_SIZE) } type SearchUtxosStream = mpsc::Receiver>; - async fn search_utxos(&self, request: Request) -> Result, Status> { + async fn search_utxos( + &self, + request: Request, + ) -> Result, Status> { proxy_stream_result!(self, search_utxos, request, GET_BLOCKS_PAGE_SIZE) } - type FetchMatchingUtxosStream = mpsc::Receiver>; + type FetchMatchingUtxosStream = + mpsc::Receiver>; - async fn fetch_matching_utxos(&self, request: Request) -> Result, Status> { + async fn fetch_matching_utxos( + &self, + request: Request, + ) -> Result, Status> { proxy_stream_result!(self, fetch_matching_utxos, request, GET_BLOCKS_PAGE_SIZE) } type GetPeersStream = mpsc::Receiver>; - async fn get_peers(&self, request: Request) -> Result, Status> { + async fn get_peers( + &self, + request: Request, + ) -> Result, Status> { proxy_stream_result!(self, get_peers, request, GET_BLOCKS_PAGE_SIZE) } - type GetMempoolTransactionsStream = mpsc::Receiver>; + type GetMempoolTransactionsStream = + mpsc::Receiver>; - async fn get_mempool_transactions(&self, request: Request) -> Result, Status> { - proxy_stream_result!(self, get_mempool_transactions, request, GET_BLOCKS_PAGE_SIZE) + async fn get_mempool_transactions( + &self, + request: Request, + ) -> Result, Status> { + proxy_stream_result!( + self, + get_mempool_transactions, + request, + GET_BLOCKS_PAGE_SIZE + ) } - async fn transaction_state(&self, request: Request) -> Result, Status> { + async fn transaction_state( + &self, + request: Request, + ) -> Result, Status> { proxy_simple_result!(self, transaction_state, request) } @@ -217,37 +337,61 @@ impl tari_rpc::base_node_server::BaseNode for TariBaseNodeGrpc proxy_simple_result!(self, identify, request) } - async fn get_network_status(&self, request: Request) -> Result, Status> { + async fn get_network_status( + &self, + request: Request, + ) -> Result, Status> { proxy_simple_result!(self, get_network_status, request) } - async fn list_connected_peers(&self, request: Request) -> Result, Status> { + async fn list_connected_peers( + &self, + request: Request, + ) -> Result, Status> { proxy_simple_result!(self, list_connected_peers, request) } - async fn get_mempool_stats(&self, request: Request) -> Result, Status> { + async fn get_mempool_stats( + &self, + request: Request, + ) -> Result, Status> { proxy_simple_result!(self, get_mempool_stats, request) } - type GetActiveValidatorNodesStream = mpsc::Receiver>; + type GetActiveValidatorNodesStream = + mpsc::Receiver>; - async fn get_active_validator_nodes(&self, request: Request) -> Result, Status> { + async fn get_active_validator_nodes( + &self, + request: Request, + ) -> Result, Status> { proxy_stream_result!(self, get_active_validator_nodes, request, 1000) } - async fn get_shard_key(&self, request: Request) -> Result, Status> { + async fn get_shard_key( + &self, + request: Request, + ) -> Result, Status> { proxy_simple_result!(self, get_shard_key, request) } - type GetTemplateRegistrationsStream = mpsc::Receiver>; + type GetTemplateRegistrationsStream = + mpsc::Receiver>; - async fn get_template_registrations(&self, request: Request) -> Result, Status> { + async fn get_template_registrations( + &self, + request: Request, + ) -> Result, Status> { proxy_stream_result!(self, get_template_registrations, request, 10) } - type GetSideChainUtxosStream = mpsc::Receiver>; + type GetSideChainUtxosStream = + mpsc::Receiver>; - async fn get_side_chain_utxos(&self, request: Request) -> Result, Status> { + async fn get_side_chain_utxos( + &self, + request: Request, + ) -> Result, Status> { proxy_stream_result!(self, get_side_chain_utxos, request, 10) } -} \ No newline at end of file +} diff --git a/src/server/grpc/error.rs b/src/server/grpc/error.rs index 0f17335d..e32985bb 100644 --- a/src/server/grpc/error.rs +++ b/src/server/grpc/error.rs @@ -10,4 +10,4 @@ pub enum Error { pub enum TonicError { #[error("Transport error: {0}")] Transport(#[from] tonic::transport::Error), -} \ No newline at end of file +} diff --git a/src/server/grpc/mod.rs b/src/server/grpc/mod.rs index 8edbe380..0cd49153 100644 --- a/src/server/grpc/mod.rs +++ b/src/server/grpc/mod.rs @@ -3,4 +3,4 @@ pub mod base_node; pub mod error; pub mod p2pool; -pub mod util; \ No newline at end of file +pub mod util; diff --git a/src/server/grpc/p2pool.rs b/src/server/grpc/p2pool.rs index 39efb32d..a6fffce4 100644 --- a/src/server/grpc/p2pool.rs +++ b/src/server/grpc/p2pool.rs @@ -1,10 +1,13 @@ use std::sync::Arc; use log::{debug, error, info}; -use minotari_app_grpc::tari_rpc::{GetNewBlockRequest, GetNewBlockResponse, GetNewBlockTemplateWithCoinbasesRequest, HeightRequest, NewBlockTemplateRequest, PowAlgo, SubmitBlockRequest, SubmitBlockResponse}; use minotari_app_grpc::tari_rpc::base_node_client::BaseNodeClient; use minotari_app_grpc::tari_rpc::pow_algo::PowAlgos; use minotari_app_grpc::tari_rpc::sha_p2_pool_server::ShaP2Pool; +use minotari_app_grpc::tari_rpc::{ + GetNewBlockRequest, GetNewBlockResponse, GetNewBlockTemplateWithCoinbasesRequest, + HeightRequest, NewBlockTemplateRequest, PowAlgo, SubmitBlockRequest, SubmitBlockResponse, +}; use tari_core::proof_of_work::sha3x_difficulty; use tokio::sync::Mutex; use tonic::{Request, Response, Status}; @@ -13,12 +16,13 @@ use crate::server::grpc::error::Error; use crate::server::grpc::util; use crate::server::p2p; use crate::sharechain::block::Block; -use crate::sharechain::SHARE_COUNT; use crate::sharechain::ShareChain; +use crate::sharechain::SHARE_COUNT; /// P2Pool specific gRPC service to provide `get_new_block` and `submit_block` functionalities. pub struct ShaP2PoolGrpc - where S: ShareChain + Send + Sync + 'static +where + S: ShareChain + Send + Sync + 'static, { /// Base node client client: Arc>>, @@ -29,10 +33,21 @@ pub struct ShaP2PoolGrpc } impl ShaP2PoolGrpc - where S: ShareChain + Send + Sync + 'static +where + S: ShareChain + Send + Sync + 'static, { - pub async fn new(base_node_address: String, p2p_client: p2p::ServiceClient, share_chain: Arc) -> Result { - Ok(Self { client: Arc::new(Mutex::new(util::connect_base_node(base_node_address).await?)), p2p_client, share_chain }) + pub async fn new( + base_node_address: String, + p2p_client: p2p::ServiceClient, + share_chain: Arc, + ) -> Result { + Ok(Self { + client: Arc::new(Mutex::new( + util::connect_base_node(base_node_address).await?, + )), + p2p_client, + share_chain, + }) } /// Submits a new block to share chain and broadcasts to the p2p network. @@ -41,18 +56,24 @@ impl ShaP2PoolGrpc error!("Failed to add new block: {error:?}"); } debug!("Broadcast new block with height: {:?}", block.height()); - self.p2p_client.broadcast_block(block).await + self.p2p_client + .broadcast_block(block) + .await .map_err(|error| Status::internal(error.to_string())) } } #[tonic::async_trait] impl ShaP2Pool for ShaP2PoolGrpc - where S: ShareChain + Send + Sync + 'static +where + S: ShareChain + Send + Sync + 'static, { /// Returns a new block (that can be mined) which contains all the shares generated /// from the current share chain as coinbase transactions. - async fn get_new_block(&self, _request: Request) -> Result, Status> { + async fn get_new_block( + &self, + _request: Request, + ) -> Result, Status> { let mut pow_algo = PowAlgo::default(); pow_algo.set_pow_algo(PowAlgos::Sha3x); @@ -61,25 +82,38 @@ impl ShaP2Pool for ShaP2PoolGrpc algo: Some(pow_algo.clone()), max_weight: 0, }; - let template_response = self.client.lock().await + let template_response = self + .client + .lock() + .await .get_new_block_template(req) .await? .into_inner(); - let miner_data = template_response.miner_data.ok_or_else(|| Status::internal("missing miner data"))?; + let miner_data = template_response + .miner_data + .ok_or_else(|| Status::internal("missing miner data"))?; let reward = miner_data.reward; // request new block template with shares as coinbases let shares = self.share_chain.generate_shares(reward).await; - let response = self.client.lock().await + let response = self + .client + .lock() + .await .get_new_block_template_with_coinbases(GetNewBlockTemplateWithCoinbasesRequest { algo: Some(pow_algo), max_weight: 0, coinbases: shares, - }).await?.into_inner(); + }) + .await? + .into_inner(); // set target difficulty - let miner_data = response.clone().miner_data.ok_or_else(|| Status::internal("missing miner data"))?; + let miner_data = response + .clone() + .miner_data + .ok_or_else(|| Status::internal("missing miner data"))?; let target_difficulty = miner_data.target_difficulty / SHARE_COUNT; Ok(Response::new(GetNewBlockResponse { @@ -91,35 +125,56 @@ impl ShaP2Pool for ShaP2PoolGrpc /// Validates the submitted block with the p2pool network, checks for difficulty matching /// with network (using base node), submits mined block to base node and submits new p2pool block /// to p2pool network. - async fn submit_block(&self, request: Request) -> Result, Status> { + async fn submit_block( + &self, + request: Request, + ) -> Result, Status> { let grpc_block = request.get_ref(); - let grpc_request_payload = grpc_block.block.clone() + let grpc_request_payload = grpc_block + .block + .clone() .ok_or_else(|| Status::internal("missing block in request"))?; - let mut block = self.share_chain.new_block(grpc_block).await.map_err(|error| Status::internal(error.to_string()))?; + let mut block = self + .share_chain + .new_block(grpc_block) + .await + .map_err(|error| Status::internal(error.to_string()))?; // validate block with other peers - let validation_result = self.p2p_client.validate_block(&block).await + let validation_result = self + .p2p_client + .validate_block(&block) + .await .map_err(|error| Status::internal(error.to_string()))?; if !validation_result { return Err(Status::invalid_argument("invalid block")); } - let origin_block_header = block.original_block_header().as_ref() - .ok_or_else(|| { Status::internal("missing original block header") })?; + let origin_block_header = block + .original_block_header() + .as_ref() + .ok_or_else(|| Status::internal("missing original block header"))?; // Check block's difficulty compared to the latest network one to increase the probability // to get the block accepted (and also a block with lower difficulty than latest one is invalid anyway). let request_block_difficulty = sha3x_difficulty(origin_block_header) - .map_err(|error| { Status::internal(error.to_string()) })?; - let mut network_difficulty_stream = self.client.lock().await.get_network_difficulty(HeightRequest { - from_tip: 0, - start_height: origin_block_header.height - 1, - end_height: origin_block_header.height, - }).await?.into_inner(); + .map_err(|error| Status::internal(error.to_string()))?; + let mut network_difficulty_stream = self + .client + .lock() + .await + .get_network_difficulty(HeightRequest { + from_tip: 0, + start_height: origin_block_header.height - 1, + end_height: origin_block_header.height, + }) + .await? + .into_inner(); let mut network_difficulty_matches = false; while let Ok(Some(diff_resp)) = network_difficulty_stream.message().await { if origin_block_header.height == diff_resp.height + 1 - && request_block_difficulty.as_u64() > diff_resp.difficulty { + && request_block_difficulty.as_u64() > diff_resp.difficulty + { network_difficulty_matches = true; } } @@ -151,4 +206,4 @@ impl ShaP2Pool for ShaP2PoolGrpc } } } -} \ No newline at end of file +} diff --git a/src/server/grpc/util.rs b/src/server/grpc/util.rs index 4096c81d..445c3426 100644 --- a/src/server/grpc/util.rs +++ b/src/server/grpc/util.rs @@ -9,22 +9,31 @@ use tonic::transport::Channel; use crate::server::grpc::error::{Error, TonicError}; /// Utility function to connect to a Base node and try infinitely when it fails until gets connected. -pub async fn connect_base_node(base_node_address: String) -> Result, Error> { +pub async fn connect_base_node( + base_node_address: String, +) -> Result, Error> { let client_result = BaseNodeGrpcClient::connect(base_node_address.clone()) .await .map_err(|e| Error::Tonic(TonicError::Transport(e))); let client = match client_result { Ok(client) => client, Err(error) => { - error!("[Retry] Failed to connect to Tari base node: {:?}", error.to_string()); + error!( + "[Retry] Failed to connect to Tari base node: {:?}", + error.to_string() + ); let mut client = None; while client.is_none() { sleep(Duration::from_secs(5)).await; match BaseNodeGrpcClient::connect(base_node_address.clone()) .await - .map_err(|e| Error::Tonic(TonicError::Transport(e))) { + .map_err(|e| Error::Tonic(TonicError::Transport(e))) + { Ok(curr_client) => client = Some(curr_client), - Err(error) => error!("[Retry] Failed to connect to Tari base node: {:?}", error.to_string()), + Err(error) => error!( + "[Retry] Failed to connect to Tari base node: {:?}", + error.to_string() + ), } } client.unwrap() @@ -32,4 +41,4 @@ pub async fn connect_base_node(base_node_address: String) -> Result, config: ClientConfig, ) -> Self { - Self { channels, peer_store, config } + Self { + channels, + peer_store, + config, + } } /// Triggering broadcasting of a new block to p2pool network. pub async fn broadcast_block(&self, block: &Block) -> Result<(), ClientError> { - self.channels.broadcast_block_sender.send(block.clone()) - .map_err(|error| + self.channels + .broadcast_block_sender + .send(block.clone()) + .map_err(|error| { ClientError::ChannelSend(Box::new(ChannelSendError::BroadcastBlock(error))) - )?; + })?; Ok(()) } - async fn validate_block_with_retries(&self, block: &Block, mut retries: u64) -> Result { + async fn validate_block_with_retries( + &self, + block: &Block, + mut retries: u64, + ) -> Result { if retries >= self.config.validate_block_max_retries { warn!(target: LOG_TARGET, "❗Too many validation retries!"); return Ok(false); @@ -104,10 +114,12 @@ impl ServiceClient { let start = Instant::now(); // send request to validate block - self.channels.validate_block_sender.send(ValidateBlockRequest::new(block.clone())) - .map_err(|error| + self.channels + .validate_block_sender + .send(ValidateBlockRequest::new(block.clone())) + .map_err(|error| { ClientError::ChannelSend(Box::new(ChannelSendError::ValidateBlockRequest(error))) - )?; + })?; // calculate how many validations we need (more than 2/3 of peers should validate) let peer_count = self.peer_store.peer_count().await as f64 + 1.0; @@ -160,4 +172,4 @@ impl ServiceClient { pub async fn validate_block(&self, block: &Block) -> Result { self.validate_block_with_retries(block, 0).await } -} \ No newline at end of file +} diff --git a/src/server/p2p/error.rs b/src/server/p2p/error.rs index 01041e0c..0cf2a0fe 100644 --- a/src/server/p2p/error.rs +++ b/src/server/p2p/error.rs @@ -1,8 +1,8 @@ -use libp2p::{multiaddr, noise, TransportError}; use libp2p::gossipsub::PublishError; use libp2p::identity::DecodingError; use libp2p::kad::NoKnownPeers; use libp2p::swarm::DialError; +use libp2p::{multiaddr, noise, TransportError}; use thiserror::Error; use crate::server::p2p; @@ -42,4 +42,4 @@ pub enum LibP2PError { MissingPeerId(String), #[error("Key decode error: {0}")] KeyDecoding(#[from] DecodingError), -} \ No newline at end of file +} diff --git a/src/server/p2p/messages.rs b/src/server/p2p/messages.rs index 05cd2488..32c092a6 100644 --- a/src/server/p2p/messages.rs +++ b/src/server/p2p/messages.rs @@ -27,13 +27,15 @@ macro_rules! impl_conversions { }; } pub fn deserialize_message<'a, T>(raw_message: &'a [u8]) -> Result - where T: Deserialize<'a>, +where + T: Deserialize<'a>, { serde_cbor::from_slice(raw_message).map_err(Error::SerializeDeserialize) } pub fn serialize_message(input: &T) -> Result, Error> - where T: Serialize, +where + T: Serialize, { serde_cbor::to_vec(input).map_err(Error::SerializeDeserialize) } @@ -46,8 +48,14 @@ pub struct PeerInfo { impl_conversions!(PeerInfo); impl PeerInfo { pub fn new(current_height: u64) -> Self { - let timestamp = SystemTime::now().duration_since(UNIX_EPOCH).unwrap().as_micros(); - Self { current_height, timestamp } + let timestamp = SystemTime::now() + .duration_since(UNIX_EPOCH) + .unwrap() + .as_micros(); + Self { + current_height, + timestamp, + } } } @@ -59,7 +67,10 @@ pub struct ValidateBlockRequest { impl_conversions!(ValidateBlockRequest); impl ValidateBlockRequest { pub fn new(block: Block) -> Self { - let timestamp = SystemTime::now().duration_since(UNIX_EPOCH).unwrap().as_micros(); + let timestamp = SystemTime::now() + .duration_since(UNIX_EPOCH) + .unwrap() + .as_micros(); Self { block, timestamp } } @@ -77,12 +88,11 @@ pub struct ValidateBlockResult { } impl_conversions!(ValidateBlockResult); impl ValidateBlockResult { - pub fn new( - peer_id: PeerId, - block: Block, - valid: bool, - ) -> Self { - let timestamp = SystemTime::now().duration_since(UNIX_EPOCH).unwrap().as_micros(); + pub fn new(peer_id: PeerId, block: Block, valid: bool) -> Self { + let timestamp = SystemTime::now() + .duration_since(UNIX_EPOCH) + .unwrap() + .as_micros(); Self { peer_id, block, @@ -112,4 +122,4 @@ impl ShareChainSyncResponse { pub fn new(blocks: Vec) -> Self { Self { blocks } } -} \ No newline at end of file +} diff --git a/src/server/p2p/mod.rs b/src/server/p2p/mod.rs index e29f95ce..e32658e9 100644 --- a/src/server/p2p/mod.rs +++ b/src/server/p2p/mod.rs @@ -5,9 +5,8 @@ pub use client::*; pub use error::*; pub use p2p::*; -mod p2p; +pub(crate) mod client; mod error; pub mod messages; +mod p2p; pub mod peer_store; -pub(crate) mod client; - diff --git a/src/server/p2p/p2p.rs b/src/server/p2p/p2p.rs index 98235d44..7e5d4d22 100644 --- a/src/server/p2p/p2p.rs +++ b/src/server/p2p/p2p.rs @@ -3,28 +3,35 @@ use std::path::PathBuf; use std::sync::Arc; use std::time::Duration; -use libp2p::{gossipsub, kad, mdns, Multiaddr, noise, request_response, StreamProtocol, Swarm, tcp, yamux}; use libp2p::futures::StreamExt; use libp2p::gossipsub::{IdentTopic, Message, PublishError}; use libp2p::identity::Keypair; -use libp2p::kad::{Event, Mode}; use libp2p::kad::store::MemoryStore; +use libp2p::kad::{Event, Mode}; use libp2p::mdns::tokio::Tokio; use libp2p::multiaddr::Protocol; use libp2p::request_response::{cbor, ResponseChannel}; use libp2p::swarm::{NetworkBehaviour, SwarmEvent}; +use libp2p::{ + gossipsub, kad, mdns, noise, request_response, tcp, yamux, Multiaddr, StreamProtocol, Swarm, +}; use log::{debug, error, info, warn}; use tari_utilities::hex::Hex; -use tokio::{io, select}; use tokio::fs::File; use tokio::io::{AsyncReadExt, AsyncWriteExt}; -use tokio::sync::{broadcast, mpsc}; use tokio::sync::broadcast::error::RecvError; +use tokio::sync::{broadcast, mpsc}; +use tokio::{io, select}; use crate::server::config; -use crate::server::p2p::{client, Error, LibP2PError, messages, ServiceClient, ServiceClientChannels}; -use crate::server::p2p::messages::{PeerInfo, ShareChainSyncRequest, ShareChainSyncResponse, ValidateBlockRequest, ValidateBlockResult}; +use crate::server::p2p::messages::{ + PeerInfo, ShareChainSyncRequest, ShareChainSyncResponse, ValidateBlockRequest, + ValidateBlockResult, +}; use crate::server::p2p::peer_store::PeerStore; +use crate::server::p2p::{ + client, messages, Error, LibP2PError, ServiceClient, ServiceClientChannels, +}; use crate::sharechain::block::Block; use crate::sharechain::ShareChain; @@ -68,7 +75,8 @@ pub struct ServerNetworkBehaviour { /// Service is the implementation that holds every peer-to-peer related logic /// that makes sure that all the communications, syncing, broadcasting etc... are done. pub struct Service - where S: ShareChain + Send + Sync + 'static, +where + S: ShareChain + Send + Sync + 'static, { swarm: Swarm, port: u16, @@ -88,15 +96,14 @@ pub struct Service } impl Service - where S: ShareChain + Send + Sync + 'static, +where + S: ShareChain + Send + Sync + 'static, { /// Constructs a new Service from the provided config. /// It also instantiates libp2p swarm inside. pub async fn new(config: &config::Config, share_chain: Arc) -> Result { let swarm = Self::new_swarm(config).await?; - let peer_store = Arc::new( - PeerStore::new(&config.peer_store), - ); + let peer_store = Arc::new(PeerStore::new(&config.peer_store)); // client related channels let (validate_req_tx, validate_req_rx) = broadcast::channel::(1000); @@ -141,12 +148,17 @@ impl Service // otherwise create a new one let key_pair = Keypair::generate_ed25519(); - let mut new_private_key_file = File::create_new(key_path).await + let mut new_private_key_file = File::create_new(key_path) + .await .map_err(|error| Error::LibP2P(LibP2PError::IO(error)))?; - new_private_key_file.write_all( - key_pair.to_protobuf_encoding() - .map_err(|error| Error::LibP2P(LibP2PError::KeyDecoding(error)))?.as_slice() - ).await + new_private_key_file + .write_all( + key_pair + .to_protobuf_encoding() + .map_err(|error| Error::LibP2P(LibP2PError::KeyDecoding(error)))? + .as_slice(), + ) + .await .map_err(|error| Error::LibP2P(LibP2PError::IO(error)))?; Ok(key_pair) @@ -154,60 +166,64 @@ impl Service /// Creates a new swarm from the provided config async fn new_swarm(config: &config::Config) -> Result, Error> { - let mut swarm = libp2p::SwarmBuilder::with_existing_identity( - Self::keypair(&config.p2p_service).await? - ) - .with_tokio() - .with_tcp( - tcp::Config::default(), - noise::Config::new, - yamux::Config::default, - ) - .map_err(|e| Error::LibP2P(LibP2PError::Noise(e)))? - .with_behaviour(move |key_pair| { - // gossipsub - let message_id_fn = |message: &gossipsub::Message| { - let mut s = DefaultHasher::new(); - if let Some(soure_peer) = message.source { - soure_peer.to_bytes().hash(&mut s); - } - message.data.hash(&mut s); - gossipsub::MessageId::from(s.finish().to_string()) - }; - let gossipsub_config = gossipsub::ConfigBuilder::default() - .heartbeat_interval(Duration::from_secs(10)) - .validation_mode(gossipsub::ValidationMode::Strict) - .message_id_fn(message_id_fn) - .build() - .map_err(|msg| io::Error::new(io::ErrorKind::Other, msg))?; - let gossipsub = gossipsub::Behaviour::new( - gossipsub::MessageAuthenticity::Signed(key_pair.clone()), - gossipsub_config, - )?; - - Ok(ServerNetworkBehaviour { - gossipsub, - mdns: mdns::Behaviour::new( - mdns::Config::default(), - key_pair.public().to_peer_id(), - ) + let mut swarm = + libp2p::SwarmBuilder::with_existing_identity(Self::keypair(&config.p2p_service).await?) + .with_tokio() + .with_tcp( + tcp::Config::default(), + noise::Config::new, + yamux::Config::default, + ) + .map_err(|e| Error::LibP2P(LibP2PError::Noise(e)))? + .with_behaviour(move |key_pair| { + // gossipsub + let message_id_fn = |message: &gossipsub::Message| { + let mut s = DefaultHasher::new(); + if let Some(soure_peer) = message.source { + soure_peer.to_bytes().hash(&mut s); + } + message.data.hash(&mut s); + gossipsub::MessageId::from(s.finish().to_string()) + }; + let gossipsub_config = gossipsub::ConfigBuilder::default() + .heartbeat_interval(Duration::from_secs(10)) + .validation_mode(gossipsub::ValidationMode::Strict) + .message_id_fn(message_id_fn) + .build() + .map_err(|msg| io::Error::new(io::ErrorKind::Other, msg))?; + let gossipsub = gossipsub::Behaviour::new( + gossipsub::MessageAuthenticity::Signed(key_pair.clone()), + gossipsub_config, + )?; + + Ok(ServerNetworkBehaviour { + gossipsub, + mdns: mdns::Behaviour::new( + mdns::Config::default(), + key_pair.public().to_peer_id(), + ) .map_err(|e| Error::LibP2P(LibP2PError::IO(e)))?, - share_chain_sync: cbor::Behaviour::::new( - [( - StreamProtocol::new(SHARE_CHAIN_SYNC_REQ_RESP_PROTOCOL), - request_response::ProtocolSupport::Full, - )], - request_response::Config::default(), - ), - kademlia: kad::Behaviour::new( - key_pair.public().to_peer_id(), - MemoryStore::new(key_pair.public().to_peer_id()), - ), + share_chain_sync: cbor::Behaviour::< + ShareChainSyncRequest, + ShareChainSyncResponse, + >::new( + [( + StreamProtocol::new(SHARE_CHAIN_SYNC_REQ_RESP_PROTOCOL), + request_response::ProtocolSupport::Full, + )], + request_response::Config::default(), + ), + kademlia: kad::Behaviour::new( + key_pair.public().to_peer_id(), + MemoryStore::new(key_pair.public().to_peer_id()), + ), + }) + }) + .map_err(|e| Error::LibP2P(LibP2PError::Behaviour(e.to_string())))? + .with_swarm_config(|c| { + c.with_idle_connection_timeout(config.idle_connection_timeout) }) - }) - .map_err(|e| Error::LibP2P(LibP2PError::Behaviour(e.to_string())))? - .with_swarm_config(|c| c.with_idle_connection_timeout(config.idle_connection_timeout)) - .build(); + .build(); swarm.behaviour_mut().kademlia.set_mode(Some(Mode::Server)); @@ -234,7 +250,10 @@ impl Service /// Handles block validation requests coming from Service clients. /// All the requests from clients are sent to [`BLOCK_VALIDATION_REQUESTS_TOPIC`]. - async fn handle_client_validate_block_request(&mut self, result: Result) { + async fn handle_client_validate_block_request( + &mut self, + result: Result, + ) { match result { Ok(request) => { let request_raw_result: Result, Error> = request.try_into(); @@ -263,10 +282,12 @@ impl Service let result_raw_result: Result, Error> = result.try_into(); match result_raw_result { Ok(result_raw) => { - if let Err(error) = self.swarm.behaviour_mut().gossipsub.publish( - IdentTopic::new(BLOCK_VALIDATION_RESULTS_TOPIC), - result_raw, - ) { + if let Err(error) = self + .swarm + .behaviour_mut() + .gossipsub + .publish(IdentTopic::new(BLOCK_VALIDATION_RESULTS_TOPIC), result_raw) + { error!(target: LOG_TARGET, "Failed to publish block validation result: {error:?}"); } } @@ -281,12 +302,14 @@ impl Service async fn broadcast_peer_info(&mut self) -> Result<(), Error> { // get peer info let share_chain = self.share_chain.clone(); - let current_height = share_chain.tip_height().await - .map_err(Error::ShareChain)?; + let current_height = share_chain.tip_height().await.map_err(Error::ShareChain)?; let peer_info_raw: Vec = PeerInfo::new(current_height).try_into()?; // broadcast peer info - self.swarm.behaviour_mut().gossipsub.publish(IdentTopic::new(PEER_INFO_TOPIC), peer_info_raw) + self.swarm + .behaviour_mut() + .gossipsub + .publish(IdentTopic::new(PEER_INFO_TOPIC), peer_info_raw) .map_err(|error| Error::LibP2P(LibP2PError::Publish(error)))?; Ok(()) @@ -299,13 +322,22 @@ impl Service let block_raw_result: Result, Error> = block.try_into(); match block_raw_result { Ok(block_raw) => { - match self.swarm.behaviour_mut().gossipsub.publish(IdentTopic::new(NEW_BLOCK_TOPIC), block_raw) - .map_err(|error| Error::LibP2P(LibP2PError::Publish(error))) { + match self + .swarm + .behaviour_mut() + .gossipsub + .publish(IdentTopic::new(NEW_BLOCK_TOPIC), block_raw) + .map_err(|error| Error::LibP2P(LibP2PError::Publish(error))) + { Ok(_) => {} - Err(error) => error!(target: LOG_TARGET, "Failed to broadcast new block: {error:?}"), + Err(error) => { + error!(target: LOG_TARGET, "Failed to broadcast new block: {error:?}") + } } } - Err(error) => error!(target: LOG_TARGET, "Failed to convert block to bytes: {error:?}"), + Err(error) => { + error!(target: LOG_TARGET, "Failed to convert block to bytes: {error:?}") + } } } Err(error) => error!(target: LOG_TARGET, "Failed to receive new block: {error:?}"), @@ -314,7 +346,10 @@ impl Service /// Subscribing to a gossipsub topic. fn subscribe(&mut self, topic: &str) { - self.swarm.behaviour_mut().gossipsub.subscribe(&IdentTopic::new(topic)) + self.swarm + .behaviour_mut() + .gossipsub + .subscribe(&IdentTopic::new(topic)) .expect("must be subscribed to topic"); } @@ -337,30 +372,29 @@ impl Service let topic = message.topic.as_str(); match topic { - PEER_INFO_TOPIC => { - match messages::PeerInfo::try_from(message) { - Ok(payload) => { - debug!(target: LOG_TARGET, "New peer info: {peer:?} -> {payload:?}"); - self.peer_store.add(peer, payload).await; - if let Some(tip) = self.peer_store.tip_of_block_height().await { - if let Ok(curr_height) = self.share_chain.tip_height().await { - if curr_height < tip.height { - self.sync_share_chain().await; - } + PEER_INFO_TOPIC => match messages::PeerInfo::try_from(message) { + Ok(payload) => { + debug!(target: LOG_TARGET, "New peer info: {peer:?} -> {payload:?}"); + self.peer_store.add(peer, payload).await; + if let Some(tip) = self.peer_store.tip_of_block_height().await { + if let Ok(curr_height) = self.share_chain.tip_height().await { + if curr_height < tip.height { + self.sync_share_chain().await; } } } - Err(error) => { - error!(target: LOG_TARGET, "Can't deserialize peer info payload: {:?}", error); - } } - } + Err(error) => { + error!(target: LOG_TARGET, "Can't deserialize peer info payload: {:?}", error); + } + }, BLOCK_VALIDATION_REQUESTS_TOPIC => { match messages::ValidateBlockRequest::try_from(message) { Ok(payload) => { debug!(target: LOG_TARGET, "Block validation request: {payload:?}"); - let validate_result = self.share_chain.validate_block(&payload.block()).await; + let validate_result = + self.share_chain.validate_block(&payload.block()).await; let mut valid = false; if let Ok(is_valid) = validate_result { valid = is_valid; @@ -368,7 +402,7 @@ impl Service // TODO: Generate partial schnorr signature to prove that current peer validated the block (using peer's private key and broadcast public key vie PeerInfo) // TODO: to be able to verify at other peers. - // TODO: Validate whether new block includes all the shares (generate shares until height of new_block.height - 1) + // TODO: Validate whether new block includes all the shares (generate shares until height of new_block.height - 1) // TODO: by generating a new block and check kernels/outputs whether they are the same or not. // TODO: Validating new blocks version 2 would be to send a proof that was generated from the shares. @@ -394,7 +428,9 @@ impl Service senders_to_delete.push(i); } } - senders_to_delete.iter().for_each(|i| { self.client_validate_block_res_txs.remove(*i); }); + senders_to_delete.iter().for_each(|i| { + self.client_validate_block_res_txs.remove(*i); + }); } Err(error) => { error!(target: LOG_TARGET, "Can't deserialize block validation request payload: {:?}", error); @@ -402,19 +438,17 @@ impl Service } } // TODO: send a signature that proves that the actual block was coming from this peer - NEW_BLOCK_TOPIC => { - match Block::try_from(message) { - Ok(payload) => { - info!(target: LOG_TARGET,"🆕 New block from broadcast: {:?}", &payload.hash().to_hex()); - if let Err(error) = self.share_chain.submit_block(&payload).await { - error!(target: LOG_TARGET, "Could not add new block to local share chain: {error:?}"); - } - } - Err(error) => { - error!(target: LOG_TARGET, "Can't deserialize broadcast block payload: {:?}", error); + NEW_BLOCK_TOPIC => match Block::try_from(message) { + Ok(payload) => { + info!(target: LOG_TARGET,"🆕 New block from broadcast: {:?}", &payload.hash().to_hex()); + if let Err(error) = self.share_chain.submit_block(&payload).await { + error!(target: LOG_TARGET, "Could not add new block to local share chain: {error:?}"); } } - } + Err(error) => { + error!(target: LOG_TARGET, "Can't deserialize broadcast block payload: {:?}", error); + } + }, &_ => { warn!(target: LOG_TARGET, "Unknown topic {topic:?}!"); } @@ -422,12 +456,21 @@ impl Service } /// Handles share chain sync request (coming from other peer). - async fn handle_share_chain_sync_request(&mut self, channel: ResponseChannel, request: ShareChainSyncRequest) { + async fn handle_share_chain_sync_request( + &mut self, + channel: ResponseChannel, + request: ShareChainSyncRequest, + ) { debug!(target: LOG_TARGET, "Incoming Share chain sync request: {request:?}"); match self.share_chain.blocks(request.from_height).await { Ok(blocks) => { - if self.swarm.behaviour_mut().share_chain_sync.send_response(channel, ShareChainSyncResponse::new(blocks.clone())) - .is_err() { + if self + .swarm + .behaviour_mut() + .share_chain_sync + .send_response(channel, ShareChainSyncResponse::new(blocks.clone())) + .is_err() + { error!(target: LOG_TARGET, "Failed to send block sync response"); } } @@ -439,7 +482,7 @@ impl Service /// All the responding blocks will be tried to put into local share chain. async fn handle_share_chain_sync_response(&mut self, response: ShareChainSyncResponse) { debug!(target: LOG_TARGET, "Share chain sync response: {response:?}"); - if let Err(error) = self.share_chain.submit_blocks(response.blocks).await { + if let Err(error) = self.share_chain.submit_blocks(response.blocks, true).await { error!(target: LOG_TARGET, "Failed to add synced blocks to share chain: {error:?}"); } } @@ -454,15 +497,19 @@ impl Service match self.share_chain.tip_height().await { Ok(tip) => { debug!(target: LOG_TARGET, "Send share chain sync request: {result:?}"); - self.swarm.behaviour_mut().share_chain_sync.send_request( - &result.peer_id, - ShareChainSyncRequest::new(tip), - ); + self.swarm + .behaviour_mut() + .share_chain_sync + .send_request(&result.peer_id, ShareChainSyncRequest::new(tip)); + } + Err(error) => { + error!(target: LOG_TARGET, "Failed to get latest height of share chain: {error:?}") } - Err(error) => error!(target: LOG_TARGET, "Failed to get latest height of share chain: {error:?}"), } } - None => error!(target: LOG_TARGET, "Failed to get peer with highest share chain height!") + None => { + error!(target: LOG_TARGET, "Failed to get peer with highest share chain height!") + } } } @@ -477,17 +524,27 @@ impl Service mdns::Event::Discovered(peers) => { for (peer, addr) in peers { self.swarm.add_peer_address(peer, addr); - self.swarm.behaviour_mut().gossipsub.add_explicit_peer(&peer); + self.swarm + .behaviour_mut() + .gossipsub + .add_explicit_peer(&peer); } } mdns::Event::Expired(peers) => { for (peer, _addr) in peers { - self.swarm.behaviour_mut().gossipsub.remove_explicit_peer(&peer); + self.swarm + .behaviour_mut() + .gossipsub + .remove_explicit_peer(&peer); } } }, ServerNetworkBehaviourEvent::Gossipsub(event) => match event { - gossipsub::Event::Message { message, message_id: _message_id, propagation_source: _propagation_source } => { + gossipsub::Event::Message { + message, + message_id: _message_id, + propagation_source: _propagation_source, + } => { self.handle_new_gossipsub_message(message).await; } gossipsub::Event::Subscribed { .. } => {} @@ -495,14 +552,24 @@ impl Service gossipsub::Event::GossipsubNotSupported { .. } => {} }, ServerNetworkBehaviourEvent::ShareChainSync(event) => match event { - request_response::Event::Message { peer: _peer, message } => match message { - request_response::Message::Request { request_id: _request_id, request, channel } => { + request_response::Event::Message { + peer: _peer, + message, + } => match message { + request_response::Message::Request { + request_id: _request_id, + request, + channel, + } => { self.handle_share_chain_sync_request(channel, request).await; } - request_response::Message::Response { request_id: _request_id, response } => { + request_response::Message::Response { + request_id: _request_id, + response, + } => { self.handle_share_chain_sync_response(response).await; } - } + }, request_response::Event::OutboundFailure { peer, error, .. } => { error!(target: LOG_TARGET, "REQ-RES outbound failure: {peer:?} -> {error:?}"); } @@ -511,23 +578,32 @@ impl Service } request_response::Event::ResponseSent { .. } => {} }, - ServerNetworkBehaviourEvent::Kademlia(event) => { - match event { - Event::RoutingUpdated { peer, old_peer, addresses, .. } => { - addresses.iter().for_each(|addr| { - self.swarm.add_peer_address(peer, addr.clone()); - }); - self.swarm.behaviour_mut().gossipsub.add_explicit_peer(&peer); - if let Some(old_peer) = old_peer { - self.swarm.behaviour_mut().gossipsub.remove_explicit_peer(&old_peer); - if let Err(error) = self.client_peer_changes_tx.send(()) { - error!(target: LOG_TARGET, "Failed to send peer changes trigger: {error:?}"); - } + ServerNetworkBehaviourEvent::Kademlia(event) => match event { + Event::RoutingUpdated { + peer, + old_peer, + addresses, + .. + } => { + addresses.iter().for_each(|addr| { + self.swarm.add_peer_address(peer, addr.clone()); + }); + self.swarm + .behaviour_mut() + .gossipsub + .add_explicit_peer(&peer); + if let Some(old_peer) = old_peer { + self.swarm + .behaviour_mut() + .gossipsub + .remove_explicit_peer(&old_peer); + if let Err(error) = self.client_peer_changes_tx.send(()) { + error!(target: LOG_TARGET, "Failed to send peer changes trigger: {error:?}"); } } - _ => debug!(target: LOG_TARGET, "[KADEMLIA] {event:?}"), } - } + _ => debug!(target: LOG_TARGET, "[KADEMLIA] {event:?}"), + }, }, _ => {} }; @@ -535,7 +611,8 @@ impl Service /// Main loop of the service that drives the events and libp2p swarm forward. async fn main_loop(&mut self) -> Result<(), Error> { - let mut publish_peer_info_interval = tokio::time::interval(self.config.peer_info_publish_interval); + let mut publish_peer_info_interval = + tokio::time::interval(self.config.peer_info_publish_interval); loop { select! { @@ -583,7 +660,8 @@ impl Service } for seed_peer in &self.config.seed_peers { - let addr = seed_peer.parse::() + let addr = seed_peer + .parse::() .map_err(|error| Error::LibP2P(LibP2PError::MultiAddrParse(error)))?; let peer_id = match addr.iter().last() { Some(Protocol::P2p(peer_id)) => Some(peer_id), @@ -592,10 +670,16 @@ impl Service if peer_id.is_none() { return Err(Error::LibP2P(LibP2PError::MissingPeerId(seed_peer.clone()))); } - self.swarm.behaviour_mut().kademlia.add_address(&peer_id.unwrap(), addr); + self.swarm + .behaviour_mut() + .kademlia + .add_address(&peer_id.unwrap(), addr); } - self.swarm.behaviour_mut().kademlia.bootstrap() + self.swarm + .behaviour_mut() + .kademlia + .bootstrap() .map_err(|error| Error::LibP2P(LibP2PError::KademliaNoKnownPeers(error)))?; Ok(()) @@ -618,4 +702,3 @@ impl Service self.main_loop().await } } - diff --git a/src/server/p2p/peer_store.rs b/src/server/p2p/peer_store.rs index 45788207..4640115e 100644 --- a/src/server/p2p/peer_store.rs +++ b/src/server/p2p/peer_store.rs @@ -47,10 +47,7 @@ pub struct PeerStoreBlockHeightTip { impl PeerStoreBlockHeightTip { pub fn new(peer_id: PeerId, height: u64) -> Self { - Self { - peer_id, - height, - } + Self { peer_id, height } } } @@ -79,7 +76,9 @@ impl PeerStore { /// Add a new peer to store. /// If a peer already exists, just replaces it. pub async fn add(&self, peer_id: PeerId, peer_info: PeerInfo) { - self.inner.insert(peer_id, PeerStoreRecord::new(peer_info)).await; + self.inner + .insert(peer_id, PeerStoreRecord::new(peer_info)) + .await; self.set_tip_of_block_height().await; } @@ -91,20 +90,16 @@ impl PeerStore { /// Sets the actual highest block height with peer. async fn set_tip_of_block_height(&self) { - if let Some((k, v)) = - self.inner.iter() - .max_by(|(_k1, v1), (_k2, v2)| { - v1.peer_info.current_height.cmp(&v2.peer_info.current_height) - }) { + if let Some((k, v)) = self.inner.iter().max_by(|(_k1, v1), (_k2, v2)| { + v1.peer_info + .current_height + .cmp(&v2.peer_info.current_height) + }) { // save result if let Ok(mut tip_height_opt) = self.tip_of_block_height.write() { if tip_height_opt.is_none() { - let _ = tip_height_opt.insert( - PeerStoreBlockHeightTip::new( - *k, - v.peer_info.current_height, - ) - ); + let _ = tip_height_opt + .insert(PeerStoreBlockHeightTip::new(*k, v.peer_info.current_height)); } else { let mut tip_height = tip_height_opt.unwrap(); tip_height.peer_id = *k; @@ -143,4 +138,4 @@ impl PeerStore { expired_peers } -} \ No newline at end of file +} diff --git a/src/server/server.rs b/src/server/server.rs index 4d9b52c1..e0d5f3f5 100644 --- a/src/server/server.rs +++ b/src/server/server.rs @@ -7,10 +7,10 @@ use minotari_app_grpc::tari_rpc::base_node_server::BaseNodeServer; use minotari_app_grpc::tari_rpc::sha_p2_pool_server::ShaP2PoolServer; use thiserror::Error; -use crate::server::{config, grpc, p2p}; use crate::server::grpc::base_node::TariBaseNodeGrpc; use crate::server::grpc::error::TonicError; use crate::server::grpc::p2pool::ShaP2PoolGrpc; +use crate::server::{config, grpc, p2p}; use crate::sharechain::ShareChain; const LOG_TARGET: &str = "server"; @@ -27,7 +27,8 @@ pub enum Error { /// Server represents the server running all the necessary components for sha-p2pool. pub struct Server - where S: ShareChain + Send + Sync + 'static +where + S: ShareChain + Send + Sync + 'static, { config: config::Config, p2p_service: p2p::Service, @@ -37,7 +38,8 @@ pub struct Server // TODO: add graceful shutdown impl Server - where S: ShareChain + Send + Sync + 'static +where + S: ShareChain + Send + Sync + 'static, { pub async fn new(config: config::Config, share_chain: S) -> Result { let share_chain = Arc::new(share_chain); @@ -45,15 +47,30 @@ impl Server // TODO: have base node's network here and pass to p2p_service to be able to subscribe to the right gossipsub topics // TODO: se we are not mixing main net and test net blocks. - let mut p2p_service: p2p::Service = p2p::Service::new(&config, share_chain.clone()).await.map_err(Error::P2PService)?; + let mut p2p_service: p2p::Service = p2p::Service::new(&config, share_chain.clone()) + .await + .map_err(Error::P2PService)?; - let base_node_grpc_service = TariBaseNodeGrpc::new(config.base_node_address.clone()).await.map_err(Error::Grpc)?; + let base_node_grpc_service = TariBaseNodeGrpc::new(config.base_node_address.clone()) + .await + .map_err(Error::Grpc)?; let base_node_grpc_server = BaseNodeServer::new(base_node_grpc_service); - let p2pool_grpc_service = ShaP2PoolGrpc::new(config.base_node_address.clone(), p2p_service.client(), share_chain.clone()).await.map_err(Error::Grpc)?; + let p2pool_grpc_service = ShaP2PoolGrpc::new( + config.base_node_address.clone(), + p2p_service.client(), + share_chain.clone(), + ) + .await + .map_err(Error::Grpc)?; let p2pool_server = ShaP2PoolServer::new(p2pool_grpc_service); - Ok(Self { config, p2p_service, base_node_grpc_service: base_node_grpc_server, p2pool_grpc_service: p2pool_server }) + Ok(Self { + config, + p2p_service, + base_node_grpc_service: base_node_grpc_server, + p2pool_grpc_service: p2pool_server, + }) } pub async fn start_grpc( @@ -67,9 +84,8 @@ impl Server .add_service(base_node_service) .add_service(p2pool_service) .serve( - SocketAddr::from_str( - format!("0.0.0.0:{}", grpc_port).as_str() - ).map_err(Error::AddrParse)? + SocketAddr::from_str(format!("0.0.0.0:{}", grpc_port).as_str()) + .map_err(Error::AddrParse)?, ) .await .map_err(|err| { diff --git a/src/sharechain/block.rs b/src/sharechain/block.rs index 7f957b83..0ebc3cb0 100644 --- a/src/sharechain/block.rs +++ b/src/sharechain/block.rs @@ -27,9 +27,10 @@ impl Block { BlockBuilder::new() } pub fn generate_hash(&self) -> BlockHash { - let mut hash = DomainSeparatedConsensusHasher::>::new("block") - .chain(&self.prev_hash) - .chain(&self.height); + let mut hash = + DomainSeparatedConsensusHasher::>::new("block") + .chain(&self.prev_hash) + .chain(&self.height); if let Some(miner_wallet_address) = &self.miner_wallet_address { hash = hash.chain(&miner_wallet_address.to_hex()); @@ -116,5 +117,3 @@ impl BlockBuilder { self.block.clone() } } - - diff --git a/src/sharechain/error.rs b/src/sharechain/error.rs index 96d68219..65d2f780 100644 --- a/src/sharechain/error.rs +++ b/src/sharechain/error.rs @@ -21,4 +21,4 @@ pub enum BlockConvertError { MissingField(String), #[error("Converting gRPC block header error: {0}")] GrpcBlockHeaderConvert(String), -} \ No newline at end of file +} diff --git a/src/sharechain/in_memory.rs b/src/sharechain/in_memory.rs index 925cce28..bd39e9e5 100644 --- a/src/sharechain/in_memory.rs +++ b/src/sharechain/in_memory.rs @@ -10,8 +10,8 @@ use tari_utilities::epoch_time::EpochTime; use tari_utilities::hex::Hex; use tokio::sync::{RwLock, RwLockWriteGuard}; -use crate::sharechain::{Block, MAX_BLOCKS_COUNT, SHARE_COUNT, ShareChain, ShareChainResult}; use crate::sharechain::error::{BlockConvertError, Error}; +use crate::sharechain::{Block, ShareChain, ShareChainResult, MAX_BLOCKS_COUNT, SHARE_COUNT}; const LOG_TARGET: &str = "in_memory_share_chain"; @@ -24,16 +24,10 @@ impl Default for InMemoryShareChain { fn default() -> Self { Self { max_blocks_count: MAX_BLOCKS_COUNT, - blocks: Arc::new( - RwLock::new( - vec![ - // genesis block - Block::builder() - .with_height(0) - .build() - ], - ), - ), + blocks: Arc::new(RwLock::new(vec![ + // genesis block + Block::builder().with_height(0).build(), + ])), } } } @@ -43,16 +37,10 @@ impl InMemoryShareChain { pub fn new(max_blocks_count: usize) -> Self { Self { max_blocks_count, - blocks: Arc::new( - RwLock::new( - vec![ - // genesis block - Block::builder() - .with_height(0) - .build() - ], - ), - ), + blocks: Arc::new(RwLock::new(vec![ + // genesis block + Block::builder().with_height(0).build(), + ])), } } @@ -95,14 +83,26 @@ impl InMemoryShareChain { Ok(true) } - async fn submit_block_with_lock(&self, blocks: &mut RwLockWriteGuard<'_, Vec>, block: &Block) -> ShareChainResult<()> { + async fn submit_block_with_lock( + &self, + blocks: &mut RwLockWriteGuard<'_, Vec>, + block: &Block, + clear_before_add: bool, + ) -> ShareChainResult<()> { let block = block.clone(); - let last_block = blocks.last().ok_or_else(|| Error::Empty)?; + let last_block = blocks.last(); // validate - if !self.validate_block(last_block, &block).await? { - return Err(Error::InvalidBlock(block)); + if !clear_before_add && last_block.is_some() { + if !self.validate_block(last_block.unwrap(), &block).await? { + return Err(Error::InvalidBlock(block)); + } + } else if !clear_before_add && last_block.is_none() { + return Err(Error::Empty); + } else if clear_before_add { + // if we are synchronizing blocks, we trust we receive all the valid blocks + blocks.clear(); } if blocks.len() >= self.max_blocks_count { @@ -125,13 +125,16 @@ impl InMemoryShareChain { impl ShareChain for InMemoryShareChain { async fn submit_block(&self, block: &Block) -> ShareChainResult<()> { let mut blocks_write_lock = self.blocks.write().await; - self.submit_block_with_lock(&mut blocks_write_lock, block).await + self.submit_block_with_lock(&mut blocks_write_lock, block, false) + .await } - async fn submit_blocks(&self, blocks: Vec) -> ShareChainResult<()> { + async fn submit_blocks(&self, blocks: Vec, mut sync: bool) -> ShareChainResult<()> { let mut blocks_write_lock = self.blocks.write().await; for block in blocks { - self.submit_block_with_lock(&mut blocks_write_lock, &block).await?; + self.submit_block_with_lock(&mut blocks_write_lock, &block, sync) + .await?; + sync = false; } Ok(()) @@ -148,7 +151,8 @@ impl ShareChain for InMemoryShareChain { let miners = self.miners_with_shares().await; // calculate full hash rate and shares - miners.iter() + miners + .iter() .map(|(addr, rate)| (addr, rate / SHARE_COUNT as f64)) .filter(|(_, share)| *share > 0.0) .for_each(|(addr, share)| { @@ -167,9 +171,13 @@ impl ShareChain for InMemoryShareChain { } async fn new_block(&self, request: &SubmitBlockRequest) -> ShareChainResult { - let origin_block_grpc = request.block.as_ref() + let origin_block_grpc = request + .block + .as_ref() .ok_or_else(|| BlockConvertError::MissingField("block".to_string()))?; - let origin_block_header_grpc = origin_block_grpc.header.as_ref() + let origin_block_header_grpc = origin_block_grpc + .header + .as_ref() .ok_or_else(|| BlockConvertError::MissingField("header".to_string()))?; let origin_block_header = BlockHeader::try_from(origin_block_header_grpc.clone()) .map_err(BlockConvertError::GrpcBlockHeaderConvert)?; @@ -177,27 +185,25 @@ impl ShareChain for InMemoryShareChain { let blocks_read_lock = self.blocks.read().await; let last_block = blocks_read_lock.last().ok_or_else(|| Error::Empty)?; - Ok( - Block::builder() - .with_timestamp(EpochTime::now()) - .with_prev_hash(last_block.generate_hash()) - .with_height(last_block.height() + 1) - .with_original_block_header(origin_block_header) - .with_miner_wallet_address( - TariAddress::from_hex(request.wallet_payment_address.as_str()) - .map_err(Error::TariAddress)? - ) - .build() - ) + Ok(Block::builder() + .with_timestamp(EpochTime::now()) + .with_prev_hash(last_block.generate_hash()) + .with_height(last_block.height() + 1) + .with_original_block_header(origin_block_header) + .with_miner_wallet_address( + TariAddress::from_hex(request.wallet_payment_address.as_str()) + .map_err(Error::TariAddress)?, + ) + .build()) } async fn blocks(&self, from_height: u64) -> ShareChainResult> { let blocks_read_lock = self.blocks.read().await; - Ok( - blocks_read_lock.iter() - .filter(|block| block.height() > from_height).cloned() - .collect() - ) + Ok(blocks_read_lock + .iter() + .filter(|block| block.height() > from_height) + .cloned() + .collect()) } async fn validate_block(&self, block: &Block) -> ShareChainResult { @@ -205,4 +211,4 @@ impl ShareChain for InMemoryShareChain { let last_block = blocks_read_lock.last().ok_or_else(|| Error::Empty)?; self.validate_block(last_block, block).await } -} \ No newline at end of file +} diff --git a/src/sharechain/mod.rs b/src/sharechain/mod.rs index 54abb321..91832d1d 100644 --- a/src/sharechain/mod.rs +++ b/src/sharechain/mod.rs @@ -8,9 +8,9 @@ pub const MAX_BLOCKS_COUNT: usize = 80; pub const SHARE_COUNT: u64 = 100; -pub mod in_memory; pub mod block; pub mod error; +pub mod in_memory; pub type ShareChainResult = Result; @@ -21,7 +21,7 @@ pub trait ShareChain { /// Add multiple blocks at once. /// While this operation runs, no other blocks can be added until it's done. - async fn submit_blocks(&self, blocks: Vec) -> ShareChainResult<()>; + async fn submit_blocks(&self, blocks: Vec, sync: bool) -> ShareChainResult<()>; /// Returns the tip of height in chain. async fn tip_height(&self) -> ShareChainResult; @@ -37,4 +37,4 @@ pub trait ShareChain { /// Validates a block. async fn validate_block(&self, block: &Block) -> ShareChainResult; -} \ No newline at end of file +} From 9f1268bb17b2de08c4eefbc42acdf9429d5b0deb Mon Sep 17 00:00:00 2001 From: richardb Date: Fri, 28 Jun 2024 11:45:27 +0200 Subject: [PATCH 33/43] added mining_enabled flag --- src/main.rs | 9 ++++++++ src/server/config.rs | 7 ++++++ src/server/server.rs | 55 +++++++++++++++++++++++++------------------- 3 files changed, 47 insertions(+), 24 deletions(-) diff --git a/src/main.rs b/src/main.rs index 619026a7..66c0937c 100644 --- a/src/main.rs +++ b/src/main.rs @@ -65,6 +65,14 @@ struct Cli { default_value = "." )] private_key_folder: PathBuf, + + /// Mining enabled + /// + /// In case it is set to false, the node will only handle p2p operations, + /// will be syncing with share chain, but not starting gRPC services and no Tari base node needed. + /// By setting this to, false it can be used as a stable node for routing only. + #[arg(long, value_name = "mining-enabled", default_value_t = true)] + mining_enabled: bool, } #[tokio::main] @@ -84,6 +92,7 @@ async fn main() -> anyhow::Result<()> { } config_builder.with_stable_peer(cli.stable_peer); config_builder.with_private_key_folder(cli.private_key_folder); + config_builder.with_mining_enabled(cli.mining_enabled); // server start let config = config_builder.build(); diff --git a/src/server/config.rs b/src/server/config.rs index eb7a61d9..f98cf29b 100644 --- a/src/server/config.rs +++ b/src/server/config.rs @@ -13,6 +13,7 @@ pub struct Config { pub idle_connection_timeout: Duration, pub peer_store: PeerStoreConfig, pub p2p_service: p2p::Config, + pub mining_enabled: bool, } impl Default for Config { @@ -24,6 +25,7 @@ impl Default for Config { idle_connection_timeout: Duration::from_secs(30), peer_store: PeerStoreConfig::default(), p2p_service: p2p::Config::default(), + mining_enabled: true, } } } @@ -82,6 +84,11 @@ impl ConfigBuilder { self } + pub fn with_mining_enabled(&mut self, config: bool) -> &mut Self { + self.config.mining_enabled = config; + self + } + pub fn build(&self) -> Config { self.config.clone() } diff --git a/src/server/server.rs b/src/server/server.rs index e0d5f3f5..a6328d55 100644 --- a/src/server/server.rs +++ b/src/server/server.rs @@ -32,8 +32,8 @@ where { config: config::Config, p2p_service: p2p::Service, - base_node_grpc_service: BaseNodeServer, - p2pool_grpc_service: ShaP2PoolServer>, + base_node_grpc_service: Option>, + p2pool_grpc_service: Option>>, } // TODO: add graceful shutdown @@ -51,19 +51,23 @@ where .await .map_err(Error::P2PService)?; - let base_node_grpc_service = TariBaseNodeGrpc::new(config.base_node_address.clone()) + let mut base_node_grpc_server = None; + let mut p2pool_server = None; + if config.mining_enabled { + let base_node_grpc_service = TariBaseNodeGrpc::new(config.base_node_address.clone()) + .await + .map_err(Error::Grpc)?; + base_node_grpc_server = Some(BaseNodeServer::new(base_node_grpc_service)); + + let p2pool_grpc_service = ShaP2PoolGrpc::new( + config.base_node_address.clone(), + p2p_service.client(), + share_chain.clone(), + ) .await .map_err(Error::Grpc)?; - let base_node_grpc_server = BaseNodeServer::new(base_node_grpc_service); - - let p2pool_grpc_service = ShaP2PoolGrpc::new( - config.base_node_address.clone(), - p2p_service.client(), - share_chain.clone(), - ) - .await - .map_err(Error::Grpc)?; - let p2pool_server = ShaP2PoolServer::new(p2pool_grpc_service); + p2pool_server = Some(ShaP2PoolServer::new(p2pool_grpc_service)); + } Ok(Self { config, @@ -101,18 +105,21 @@ where pub async fn start(&mut self) -> Result<(), Error> { info!(target: LOG_TARGET, "⛏ Starting Tari SHA-3 mining P2Pool..."); - // local base node and p2pool node grpc services - let base_node_grpc_service = self.base_node_grpc_service.clone(); - let p2pool_grpc_service = self.p2pool_grpc_service.clone(); - let grpc_port = self.config.grpc_port; - tokio::spawn(async move { - match Self::start_grpc(base_node_grpc_service, p2pool_grpc_service, grpc_port).await { - Ok(_) => {} - Err(error) => { - error!(target: LOG_TARGET, "GRPC Server encountered an error: {:?}", error); + if self.config.mining_enabled { + // local base node and p2pool node grpc services + let base_node_grpc_service = self.base_node_grpc_service.clone().unwrap(); + let p2pool_grpc_service = self.p2pool_grpc_service.clone().unwrap(); + let grpc_port = self.config.grpc_port; + tokio::spawn(async move { + match Self::start_grpc(base_node_grpc_service, p2pool_grpc_service, grpc_port).await + { + Ok(_) => {} + Err(error) => { + error!(target: LOG_TARGET, "GRPC Server encountered an error: {:?}", error); + } } - } - }); + }); + } self.p2p_service.start().await.map_err(Error::P2PService) } From 4a6d9c985db808949ba3ffb02855cbe2e744ebd8 Mon Sep 17 00:00:00 2001 From: richardb Date: Tue, 2 Jul 2024 07:29:37 +0200 Subject: [PATCH 34/43] Added ubuntu dependencies bash --- scripts/install_ubuntu_dependencies.sh | 27 ++++++++++++++++++++++++++ 1 file changed, 27 insertions(+) create mode 100755 scripts/install_ubuntu_dependencies.sh diff --git a/scripts/install_ubuntu_dependencies.sh b/scripts/install_ubuntu_dependencies.sh new file mode 100755 index 00000000..b326d220 --- /dev/null +++ b/scripts/install_ubuntu_dependencies.sh @@ -0,0 +1,27 @@ +apt-get install --no-install-recommends --assume-yes \ + apt-transport-https \ + ca-certificates \ + curl \ + gpg \ + bash \ + less \ + openssl \ + libssl-dev \ + pkg-config \ + libsqlite3-dev \ + libsqlite3-0 \ + libreadline-dev \ + git \ + cmake \ + dh-autoreconf \ + clang \ + g++ \ + libc++-dev \ + libc++abi-dev \ + libprotobuf-dev \ + protobuf-compiler \ + libncurses5-dev \ + libncursesw5-dev \ + libudev-dev \ + libhidapi-dev \ + zip From ac76e6f2d2337b88a8ef6f4592037e048a11872b Mon Sep 17 00:00:00 2001 From: richardb Date: Tue, 2 Jul 2024 07:42:21 +0200 Subject: [PATCH 35/43] Remove unneeded deps + added Cargo.lock --- .gitignore | 4 - Cargo.lock | 5868 ++++++++++++++++++++++++++++++++++++++++++++++++++++ Cargo.toml | 3 - 3 files changed, 5868 insertions(+), 7 deletions(-) create mode 100644 Cargo.lock diff --git a/.gitignore b/.gitignore index 75ae65bb..1560a6fd 100644 --- a/.gitignore +++ b/.gitignore @@ -3,10 +3,6 @@ debug/ target/ -# Remove Cargo.lock from gitignore if creating an executable, leave it for libraries -# More information here https://doc.rust-lang.org/cargo/guide/cargo-toml-vs-cargo-lock.html -Cargo.lock - # These are backup files generated by rustfmt **/*.rs.bk diff --git a/Cargo.lock b/Cargo.lock new file mode 100644 index 00000000..dd8cfb7b --- /dev/null +++ b/Cargo.lock @@ -0,0 +1,5868 @@ +# This file is automatically @generated by Cargo. +# It is not intended for manual editing. +version = 3 + +[[package]] +name = "addr2line" +version = "0.22.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "6e4503c46a5c0c7844e948c9a4d6acd9f50cccb4de1c48eb9e291ea17470c678" +dependencies = [ + "gimli", +] + +[[package]] +name = "adler" +version = "1.0.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f26201604c87b1e01bd3d98f8d5d9a8fcbb815e8cedb41ffccbeb4bf593a35fe" + +[[package]] +name = "aead" +version = "0.5.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d122413f284cf2d62fb1b7db97e02edb8cda96d769b16e443a4f6195e35662b0" +dependencies = [ + "crypto-common", + "generic-array", +] + +[[package]] +name = "aes" +version = "0.8.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b169f7a6d4742236a0a00c541b845991d0ac43e546831af1249753ab4c3aa3a0" +dependencies = [ + "cfg-if", + "cipher 0.4.4", + "cpufeatures", +] + +[[package]] +name = "aes-gcm" +version = "0.10.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "831010a0f742e1209b3bcea8fab6a8e149051ba6099432c8cb2cc117dec3ead1" +dependencies = [ + "aead", + "aes", + "cipher 0.4.4", + "ctr", + "ghash", + "subtle", +] + +[[package]] +name = "ahash" +version = "0.8.11" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e89da841a80418a9b391ebaea17f5c112ffaaa96f621d2c285b5174da76b9011" +dependencies = [ + "cfg-if", + "once_cell", + "version_check", + "zerocopy", +] + +[[package]] +name = "aho-corasick" +version = "1.1.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "8e60d3430d3a69478ad0993f19238d2df97c507009a52b3c10addcd7f6bcb916" +dependencies = [ + "memchr", +] + +[[package]] +name = "allocator-api2" +version = "0.2.18" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "5c6cb57a04249c6480766f7f7cef5467412af1490f8d1e243141daddada3264f" + +[[package]] +name = "android-tzdata" +version = "0.1.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e999941b234f3131b00bc13c22d06e8c5ff726d1b6318ac7eb276997bbb4fef0" + +[[package]] +name = "android_system_properties" +version = "0.1.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "819e7219dbd41043ac279b19830f2efc897156490d7fd6ea916720117ee66311" +dependencies = [ + "libc", +] + +[[package]] +name = "anstream" +version = "0.6.14" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "418c75fa768af9c03be99d17643f93f79bbba589895012a80e3452a19ddda15b" +dependencies = [ + "anstyle", + "anstyle-parse", + "anstyle-query", + "anstyle-wincon", + "colorchoice", + "is_terminal_polyfill", + "utf8parse", +] + +[[package]] +name = "anstyle" +version = "1.0.7" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "038dfcf04a5feb68e9c60b21c9625a54c2c0616e79b72b0fd87075a056ae1d1b" + +[[package]] +name = "anstyle-parse" +version = "0.2.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "c03a11a9034d92058ceb6ee011ce58af4a9bf61491aa7e1e59ecd24bd40d22d4" +dependencies = [ + "utf8parse", +] + +[[package]] +name = "anstyle-query" +version = "1.1.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ad186efb764318d35165f1758e7dcef3b10628e26d41a44bc5550652e6804391" +dependencies = [ + "windows-sys 0.52.0", +] + +[[package]] +name = "anstyle-wincon" +version = "3.0.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "61a38449feb7068f52bb06c12759005cf459ee52bb4adc1d5a7c4322d716fb19" +dependencies = [ + "anstyle", + "windows-sys 0.52.0", +] + +[[package]] +name = "anyhow" +version = "1.0.86" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b3d1d046238990b9cf5bcde22a3fb3584ee5cf65fb2765f454ed428c7a0063da" + +[[package]] +name = "arc-swap" +version = "1.7.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "69f7f8c3906b62b754cd5326047894316021dcfe5a194c8ea52bdd94934a3457" + +[[package]] +name = "argon2" +version = "0.4.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "db4ce4441f99dbd377ca8a8f57b698c44d0d6e712d8329b5040da5a64aa1ce73" +dependencies = [ + "base64ct", + "blake2", + "password-hash", +] + +[[package]] +name = "arrayref" +version = "0.3.7" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "6b4930d2cb77ce62f89ee5d5289b4ac049559b1c45539271f5ed4fdc7db34545" + +[[package]] +name = "arrayvec" +version = "0.7.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "96d30a06541fbafbc7f82ed10c06164cfbd2c401138f6addd8404629c4b16711" + +[[package]] +name = "asn1-rs" +version = "0.6.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "22ad1373757efa0f70ec53939aabc7152e1591cb485208052993070ac8d2429d" +dependencies = [ + "asn1-rs-derive", + "asn1-rs-impl", + "displaydoc", + "nom", + "num-traits", + "rusticata-macros", + "thiserror", + "time", +] + +[[package]] +name = "asn1-rs-derive" +version = "0.5.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "7378575ff571966e99a744addeff0bff98b8ada0dedf1956d59e634db95eaac1" +dependencies = [ + "proc-macro2", + "quote", + "syn 2.0.68", + "synstructure 0.13.1", +] + +[[package]] +name = "asn1-rs-impl" +version = "0.2.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "7b18050c2cd6fe86c3a76584ef5e0baf286d038cda203eb6223df2cc413565f7" +dependencies = [ + "proc-macro2", + "quote", + "syn 2.0.68", +] + +[[package]] +name = "async-io" +version = "2.3.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "0d6baa8f0178795da0e71bc42c9e5d13261aac7ee549853162e66a241ba17964" +dependencies = [ + "async-lock", + "cfg-if", + "concurrent-queue", + "futures-io", + "futures-lite", + "parking", + "polling", + "rustix", + "slab", + "tracing", + "windows-sys 0.52.0", +] + +[[package]] +name = "async-lock" +version = "3.4.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ff6e472cdea888a4bd64f342f09b3f50e1886d32afe8df3d663c01140b811b18" +dependencies = [ + "event-listener", + "event-listener-strategy", + "pin-project-lite", +] + +[[package]] +name = "async-stream" +version = "0.3.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "cd56dd203fef61ac097dd65721a419ddccb106b2d2b70ba60a6b529f03961a51" +dependencies = [ + "async-stream-impl", + "futures-core", + "pin-project-lite", +] + +[[package]] +name = "async-stream-impl" +version = "0.3.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "16e62a023e7c117e27523144c5d2459f4397fcc3cab0085af8e2224f643a0193" +dependencies = [ + "proc-macro2", + "quote", + "syn 2.0.68", +] + +[[package]] +name = "async-trait" +version = "0.1.80" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "c6fa2087f2753a7da8cc1c0dbfcf89579dd57458e36769de5ac750b4671737ca" +dependencies = [ + "proc-macro2", + "quote", + "syn 2.0.68", +] + +[[package]] +name = "asynchronous-codec" +version = "0.7.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "a860072022177f903e59730004fb5dc13db9275b79bb2aef7ba8ce831956c233" +dependencies = [ + "bytes 1.6.0", + "futures-sink", + "futures-util", + "memchr", + "pin-project-lite", +] + +[[package]] +name = "attohttpc" +version = "0.24.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "8d9a9bf8b79a749ee0b911b91b671cc2b6c670bdbc7e3dfd537576ddc94bb2a2" +dependencies = [ + "http", + "log", + "url", +] + +[[package]] +name = "autocfg" +version = "1.3.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "0c4b4d0bd25bd0b74681c0ad21497610ce1b7c91b1022cd21c80c6fbdd9476b0" + +[[package]] +name = "axum" +version = "0.6.20" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "3b829e4e32b91e643de6eafe82b1d90675f5874230191a4ffbc1b336dec4d6bf" +dependencies = [ + "async-trait", + "axum-core", + "bitflags 1.3.2", + "bytes 1.6.0", + "futures-util", + "http", + "http-body", + "hyper", + "itoa", + "matchit", + "memchr", + "mime", + "percent-encoding", + "pin-project-lite", + "rustversion", + "serde", + "sync_wrapper", + "tower", + "tower-layer", + "tower-service", +] + +[[package]] +name = "axum-core" +version = "0.3.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "759fa577a247914fd3f7f76d62972792636412fbfd634cd452f6a385a74d2d2c" +dependencies = [ + "async-trait", + "bytes 1.6.0", + "futures-util", + "http", + "http-body", + "mime", + "rustversion", + "tower-layer", + "tower-service", +] + +[[package]] +name = "backtrace" +version = "0.3.73" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "5cc23269a4f8976d0a4d2e7109211a419fe30e8d88d677cd60b6bc79c5732e0a" +dependencies = [ + "addr2line", + "cc", + "cfg-if", + "libc", + "miniz_oxide", + "object", + "rustc-demangle", +] + +[[package]] +name = "base-x" +version = "0.2.11" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "4cbbc9d0964165b47557570cce6c952866c2678457aca742aafc9fb771d30270" + +[[package]] +name = "base58-monero" +version = "0.3.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "935c90240f9b7749c80746bf88ad9cb346f34b01ee30ad4d566dfdecd6e3cc6a" +dependencies = [ + "thiserror", +] + +[[package]] +name = "base58-monero" +version = "2.0.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "978e81a45367d2409ecd33369a45dda2e9a3ca516153ec194de1fbda4b9fb79d" +dependencies = [ + "thiserror", +] + +[[package]] +name = "base64" +version = "0.13.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "9e1b586273c5702936fe7b7d6896644d8be71e6314cfe09d3167c95f712589e8" + +[[package]] +name = "base64" +version = "0.21.7" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "9d297deb1925b89f2ccc13d7635fa0714f12c87adce1c75356b39ca9b7178567" + +[[package]] +name = "base64" +version = "0.22.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "72b3254f16251a8381aa12e40e3c4d2f0199f8c6508fbecb9d91f575e0fbb8c6" + +[[package]] +name = "base64ct" +version = "1.6.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "8c3c1a368f70d6cf7302d78f8f7093da241fb8e8807c05cc9e51a125895a6d5b" + +[[package]] +name = "bigdecimal" +version = "0.4.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "51d712318a27c7150326677b321a5fa91b55f6d9034ffd67f20319e147d40cee" +dependencies = [ + "autocfg", + "libm", + "num-bigint", + "num-integer", + "num-traits", +] + +[[package]] +name = "bincode" +version = "1.3.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b1f45e9417d87227c7a56d22e471c6206462cba514c7590c09aff4cf6d1ddcad" +dependencies = [ + "serde", +] + +[[package]] +name = "bitflags" +version = "0.9.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "4efd02e230a02e18f92fc2735f44597385ed02ad8f831e7c1c1156ee5e1ab3a5" + +[[package]] +name = "bitflags" +version = "1.3.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "bef38d45163c2f1dde094a7dfd33ccf595c92905c8f8f4fdc18d06fb1037718a" + +[[package]] +name = "bitflags" +version = "2.6.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b048fb63fd8b5923fc5aa7b340d8e156aec7ec02f0c78fa8a6ddc2613f6f71de" +dependencies = [ + "serde", +] + +[[package]] +name = "bitstring" +version = "0.1.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "22a39c0db600cfe77ef1b6d9ea71173bdccf190722877969d526d380519b6ecc" + +[[package]] +name = "bitvec" +version = "1.0.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "1bc2832c24239b0141d5674bb9174f9d68a8b5b3f2753311927c172ca46f7e9c" +dependencies = [ + "funty", + "radium", + "tap", + "wyz", +] + +[[package]] +name = "blake2" +version = "0.10.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "46502ad458c9a52b69d4d4d32775c788b7a1b85e8bc9d482d92250fc0e3f8efe" +dependencies = [ + "digest", +] + +[[package]] +name = "block-buffer" +version = "0.10.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "3078c7629b62d3f0439517fa394996acacc5cbc91c5a20d8c658e77abd503a71" +dependencies = [ + "generic-array", +] + +[[package]] +name = "borsh" +version = "1.5.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "a6362ed55def622cddc70a4746a68554d7b687713770de539e59a739b249f8ed" +dependencies = [ + "borsh-derive", + "cfg_aliases", +] + +[[package]] +name = "borsh-derive" +version = "1.5.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "c3ef8005764f53cd4dca619f5bf64cafd4664dada50ece25e4d81de54c80cc0b" +dependencies = [ + "once_cell", + "proc-macro-crate 3.1.0", + "proc-macro2", + "quote", + "syn 2.0.68", + "syn_derive", +] + +[[package]] +name = "bs58" +version = "0.4.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "771fe0050b883fcc3ea2359b1a96bcfbc090b7116eae7c3c512c7a083fdf23d3" + +[[package]] +name = "bs58" +version = "0.5.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "bf88ba1141d185c399bee5288d850d63b8369520c1eafc32a0430b5b6c287bf4" +dependencies = [ + "tinyvec", +] + +[[package]] +name = "bumpalo" +version = "3.16.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "79296716171880943b8470b5f8d03aa55eb2e645a4874bdbb28adb49162e012c" + +[[package]] +name = "byte-slice-cast" +version = "1.2.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "c3ac9f8b63eca6fd385229b3675f6cc0dc5c8a5c8a54a59d4f52ffd670d87b0c" + +[[package]] +name = "byteorder" +version = "1.5.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "1fd0f2584146f6f2ef48085050886acf353beff7305ebd1ae69500e27c67f64b" + +[[package]] +name = "bytes" +version = "0.5.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "0e4cec68f03f32e44924783795810fa50a7035d8c8ebe78580ad7e6c703fba38" + +[[package]] +name = "bytes" +version = "1.6.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "514de17de45fdb8dc022b1a7975556c53c86f9f0aa5f534b98977b171857c2c9" +dependencies = [ + "serde", +] + +[[package]] +name = "cbor4ii" +version = "0.3.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "59b4c883b9cc4757b061600d39001d4d0232bece4a3174696cf8f58a14db107d" +dependencies = [ + "serde", +] + +[[package]] +name = "cc" +version = "1.0.104" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "74b6a57f98764a267ff415d50a25e6e166f3831a5071af4995296ea97d210490" + +[[package]] +name = "cfg-if" +version = "1.0.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "baf1de4339761588bc0619e3cbc0120ee582ebb74b53b4efbf79117bd2da40fd" + +[[package]] +name = "cfg_aliases" +version = "0.2.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "613afe47fcd5fac7ccf1db93babcb082c5994d996f20b8b159f2ad1658eb5724" + +[[package]] +name = "chacha20" +version = "0.7.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f08493fa7707effc63254c66c6ea908675912493cd67952eda23c09fae2610b1" +dependencies = [ + "cfg-if", + "cipher 0.3.0", + "cpufeatures", +] + +[[package]] +name = "chacha20" +version = "0.9.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "c3613f74bd2eac03dad61bd53dbe620703d4371614fe0bc3b9f04dd36fe4e818" +dependencies = [ + "cfg-if", + "cipher 0.4.4", + "cpufeatures", +] + +[[package]] +name = "chacha20poly1305" +version = "0.10.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "10cd79432192d1c0f4e1a0fef9527696cc039165d729fb41b3f4f4f354c2dc35" +dependencies = [ + "aead", + "chacha20 0.9.1", + "cipher 0.4.4", + "poly1305", + "zeroize", +] + +[[package]] +name = "chrono" +version = "0.4.38" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "a21f936df1771bf62b77f047b726c4625ff2e8aa607c01ec06e5a05bd8463401" +dependencies = [ + "android-tzdata", + "iana-time-zone", + "num-traits", + "serde", + "windows-targets 0.52.5", +] + +[[package]] +name = "cidr" +version = "0.1.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "c6316c62053228eddd526a5e6deb6344c80bf2bc1e9786e7f90b3083e73197c1" +dependencies = [ + "bitstring", + "serde", +] + +[[package]] +name = "cipher" +version = "0.3.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "7ee52072ec15386f770805afd189a01c8841be8696bed250fa2f13c4c0d6dfb7" +dependencies = [ + "generic-array", +] + +[[package]] +name = "cipher" +version = "0.4.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "773f3b9af64447d2ce9850330c473515014aa235e6a783b02db81ff39e4a3dad" +dependencies = [ + "crypto-common", + "inout", + "zeroize", +] + +[[package]] +name = "clap" +version = "2.34.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "a0610544180c38b88101fecf2dd634b174a62eef6946f84dfc6a7127512b381c" +dependencies = [ + "bitflags 1.3.2", + "textwrap", + "unicode-width", +] + +[[package]] +name = "clap" +version = "4.5.8" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "84b3edb18336f4df585bc9aa31dd99c036dfa5dc5e9a2939a722a188f3a8970d" +dependencies = [ + "clap_builder", + "clap_derive", +] + +[[package]] +name = "clap_builder" +version = "4.5.8" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "c1c09dd5ada6c6c78075d6fd0da3f90d8080651e2d6cc8eb2f1aaa4034ced708" +dependencies = [ + "anstream", + "anstyle", + "clap_lex", + "strsim", +] + +[[package]] +name = "clap_derive" +version = "4.5.8" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "2bac35c6dafb060fd4d275d9a4ffae97917c13a6327903a8be2153cd964f7085" +dependencies = [ + "heck 0.5.0", + "proc-macro2", + "quote", + "syn 2.0.68", +] + +[[package]] +name = "clap_lex" +version = "0.7.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "4b82cf0babdbd58558212896d1a4272303a57bdb245c2bf1147185fb45640e70" + +[[package]] +name = "colorchoice" +version = "1.0.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "0b6a852b24ab71dffc585bcb46eaf7959d175cb865a7152e35b348d1b2960422" + +[[package]] +name = "concurrent-queue" +version = "2.5.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "4ca0197aee26d1ae37445ee532fefce43251d24cc7c166799f4d46817f1d3973" +dependencies = [ + "crossbeam-utils", +] + +[[package]] +name = "config" +version = "0.14.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "7328b20597b53c2454f0b1919720c25c7339051c02b72b7e05409e00b14132be" +dependencies = [ + "lazy_static", + "nom", + "pathdiff", + "serde", + "toml 0.8.14", +] + +[[package]] +name = "const-oid" +version = "0.9.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "c2459377285ad874054d797f3ccebf984978aa39129f6eafde5cdc8315b612f8" + +[[package]] +name = "core-foundation" +version = "0.9.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "91e195e091a93c46f7102ec7818a2aa394e1e1771c3ab4825963fa03e45afb8f" +dependencies = [ + "core-foundation-sys", + "libc", +] + +[[package]] +name = "core-foundation-sys" +version = "0.8.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "06ea2b9bc92be3c2baa9334a323ebca2d6f074ff852cd1d7b11064035cd3868f" + +[[package]] +name = "core2" +version = "0.4.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b49ba7ef1ad6107f8824dbe97de947cbaac53c44e7f9756a1fba0d37c1eec505" +dependencies = [ + "memchr", +] + +[[package]] +name = "cpufeatures" +version = "0.2.12" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "53fe5e26ff1b7aef8bca9c6080520cfb8d9333c7568e1829cef191a9723e5504" +dependencies = [ + "libc", +] + +[[package]] +name = "crc32fast" +version = "1.4.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "a97769d94ddab943e4510d138150169a2758b5ef3eb191a9ee688de3e23ef7b3" +dependencies = [ + "cfg-if", +] + +[[package]] +name = "critical-section" +version = "1.1.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "7059fff8937831a9ae6f0fe4d658ffabf58f2ca96aa9dec1c889f936f705f216" + +[[package]] +name = "crossbeam-channel" +version = "0.5.13" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "33480d6946193aa8033910124896ca395333cae7e2d1113d1fef6c3272217df2" +dependencies = [ + "crossbeam-utils", +] + +[[package]] +name = "crossbeam-epoch" +version = "0.9.18" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "5b82ac4a3c2ca9c3460964f020e1402edd5753411d7737aa39c3714ad1b5420e" +dependencies = [ + "crossbeam-utils", +] + +[[package]] +name = "crossbeam-utils" +version = "0.8.20" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "22ec99545bb0ed0ea7bb9b8e1e9122ea386ff8a48c0922e43f36d45ab09e0e80" + +[[package]] +name = "crunchy" +version = "0.2.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "7a81dae078cea95a014a339291cec439d2f232ebe854a9d672b796c6afafa9b7" + +[[package]] +name = "crypto-common" +version = "0.1.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "1bfb12502f3fc46cca1bb51ac28df9d618d813cdc3d2f25b9fe775a34af26bb3" +dependencies = [ + "generic-array", + "rand_core", + "typenum", +] + +[[package]] +name = "ctr" +version = "0.9.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "0369ee1ad671834580515889b80f2ea915f23b8be8d0daa4bbaf2ac5c7590835" +dependencies = [ + "cipher 0.4.4", +] + +[[package]] +name = "curve25519-dalek" +version = "4.1.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "97fb8b7c4503de7d6ae7b42ab72a5a59857b4c937ec27a3d4539dba95b5ab2be" +dependencies = [ + "cfg-if", + "cpufeatures", + "curve25519-dalek-derive", + "digest", + "fiat-crypto", + "group", + "rand_core", + "rustc_version", + "serde", + "subtle", + "zeroize", +] + +[[package]] +name = "curve25519-dalek-derive" +version = "0.1.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f46882e17999c6cc590af592290432be3bce0428cb0d5f8b6715e4dc7b383eb3" +dependencies = [ + "proc-macro2", + "quote", + "syn 2.0.68", +] + +[[package]] +name = "darling" +version = "0.20.9" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "83b2eb4d90d12bdda5ed17de686c2acb4c57914f8f921b8da7e112b5a36f3fe1" +dependencies = [ + "darling_core", + "darling_macro", +] + +[[package]] +name = "darling_core" +version = "0.20.9" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "622687fe0bac72a04e5599029151f5796111b90f1baaa9b544d807a5e31cd120" +dependencies = [ + "fnv", + "ident_case", + "proc-macro2", + "quote", + "strsim", + "syn 2.0.68", +] + +[[package]] +name = "darling_macro" +version = "0.20.9" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "733cabb43482b1a1b53eee8583c2b9e8684d592215ea83efd305dd31bc2f0178" +dependencies = [ + "darling_core", + "quote", + "syn 2.0.68", +] + +[[package]] +name = "data-encoding" +version = "2.6.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e8566979429cf69b49a5c740c60791108e86440e8be149bbea4fe54d2c32d6e2" + +[[package]] +name = "data-encoding-macro" +version = "0.1.15" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f1559b6cba622276d6d63706db152618eeb15b89b3e4041446b05876e352e639" +dependencies = [ + "data-encoding", + "data-encoding-macro-internal", +] + +[[package]] +name = "data-encoding-macro-internal" +version = "0.1.13" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "332d754c0af53bc87c108fed664d121ecf59207ec4196041f04d6ab9002ad33f" +dependencies = [ + "data-encoding", + "syn 1.0.109", +] + +[[package]] +name = "decimal-rs" +version = "0.1.43" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "da0ad9d041ab836f528b91b4f4039feda1091adbef4d85850eac6b3d2f9cd6f3" +dependencies = [ + "stack-buf", +] + +[[package]] +name = "der" +version = "0.7.9" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f55bf8e7b65898637379c1b74eb1551107c8294ed26d855ceb9fd1a09cfc9bc0" +dependencies = [ + "const-oid", + "zeroize", +] + +[[package]] +name = "der-parser" +version = "9.0.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "5cd0a5c643689626bec213c4d8bd4d96acc8ffdb4ad4bb6bc16abf27d5f4b553" +dependencies = [ + "asn1-rs", + "displaydoc", + "nom", + "num-bigint", + "num-traits", + "rusticata-macros", +] + +[[package]] +name = "deranged" +version = "0.3.11" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b42b6fa04a440b495c8b04d0e71b707c585f83cb9cb28cf8cd0d976c315e31b4" +dependencies = [ + "powerfmt", +] + +[[package]] +name = "derivative" +version = "2.2.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "fcc3dd5e9e9c0b295d6e1e4d811fb6f157d5ffd784b8d202fc62eac8035a770b" +dependencies = [ + "proc-macro2", + "quote", + "syn 1.0.109", +] + +[[package]] +name = "destructure_traitobject" +version = "0.2.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "3c877555693c14d2f84191cfd3ad8582790fc52b5e2274b40b59cf5f5cea25c7" + +[[package]] +name = "diesel" +version = "2.2.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "62d6dcd069e7b5fe49a302411f759d4cf1cf2c27fe798ef46fb8baefc053dd2b" +dependencies = [ + "bigdecimal", + "chrono", + "diesel_derives", + "libsqlite3-sys", + "num-bigint", + "num-integer", + "num-traits", + "r2d2", + "serde_json", + "time", +] + +[[package]] +name = "diesel_derives" +version = "2.2.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "59de76a222c2b8059f789cbe07afbfd8deb8c31dd0bc2a21f85e256c1def8259" +dependencies = [ + "diesel_table_macro_syntax", + "dsl_auto_type", + "proc-macro2", + "quote", + "syn 2.0.68", +] + +[[package]] +name = "diesel_migrations" +version = "2.2.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "8a73ce704bad4231f001bff3314d91dce4aba0770cee8b233991859abc15c1f6" +dependencies = [ + "diesel", + "migrations_internals", + "migrations_macros", +] + +[[package]] +name = "diesel_table_macro_syntax" +version = "0.2.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "209c735641a413bc68c4923a9d6ad4bcb3ca306b794edaa7eb0b3228a99ffb25" +dependencies = [ + "syn 2.0.68", +] + +[[package]] +name = "digest" +version = "0.10.7" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "9ed9a281f7bc9b7576e61468ba615a66a5c8cfdff42420a70aa82701a3b1e292" +dependencies = [ + "block-buffer", + "crypto-common", + "subtle", +] + +[[package]] +name = "dirs-next" +version = "1.0.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "cf36e65a80337bea855cd4ef9b8401ffce06a7baedf2e85ec467b1ac3f6e82b6" +dependencies = [ + "cfg-if", + "dirs-sys-next", +] + +[[package]] +name = "dirs-sys-next" +version = "0.1.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "4ebda144c4fe02d1f7ea1a7d9641b6fc6b580adcfa024ae48797ecdeb6825b4d" +dependencies = [ + "libc", + "redox_users", + "winapi", +] + +[[package]] +name = "displaydoc" +version = "0.2.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "97369cbbc041bc366949bc74d34658d6cda5621039731c6310521892a3a20ae0" +dependencies = [ + "proc-macro2", + "quote", + "syn 2.0.68", +] + +[[package]] +name = "doc-comment" +version = "0.3.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "fea41bba32d969b513997752735605054bc0dfa92b4c56bf1189f2e174be7a10" + +[[package]] +name = "dsl_auto_type" +version = "0.1.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "0892a17df262a24294c382f0d5997571006e7a4348b4327557c4ff1cd4a8bccc" +dependencies = [ + "darling", + "either", + "heck 0.5.0", + "proc-macro2", + "quote", + "syn 2.0.68", +] + +[[package]] +name = "dtoa" +version = "1.0.9" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "dcbb2bf8e87535c23f7a8a321e364ce21462d0ff10cb6407820e8e96dfff6653" + +[[package]] +name = "ed25519" +version = "2.2.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "115531babc129696a58c64a4fef0a8bf9e9698629fb97e9e40767d235cfbcd53" +dependencies = [ + "pkcs8", + "signature", +] + +[[package]] +name = "ed25519-dalek" +version = "2.1.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "4a3daa8e81a3963a60642bcc1f90a670680bd4a77535faa384e9d1c79d620871" +dependencies = [ + "curve25519-dalek", + "ed25519", + "rand_core", + "serde", + "sha2", + "subtle", + "zeroize", +] + +[[package]] +name = "either" +version = "1.13.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "60b1af1c220855b6ceac025d3f6ecdd2b7c4894bfe9cd9bda4fbb4bc7c0d4cf0" + +[[package]] +name = "endian-type" +version = "0.1.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "c34f04666d835ff5d62e058c3995147c06f42fe86ff053337632bca83e42702d" + +[[package]] +name = "enum-as-inner" +version = "0.4.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "21cdad81446a7f7dc43f6a77409efeb9733d2fa65553efef6018ef257c959b73" +dependencies = [ + "heck 0.4.1", + "proc-macro2", + "quote", + "syn 1.0.109", +] + +[[package]] +name = "enum-as-inner" +version = "0.6.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "5ffccbb6966c05b32ef8fbac435df276c4ae4d3dc55a8cd0eb9745e6c12f546a" +dependencies = [ + "heck 0.4.1", + "proc-macro2", + "quote", + "syn 2.0.68", +] + +[[package]] +name = "env_filter" +version = "0.1.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "a009aa4810eb158359dda09d0c87378e4bbb89b5a801f016885a4707ba24f7ea" +dependencies = [ + "log", + "regex", +] + +[[package]] +name = "env_logger" +version = "0.11.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "38b35839ba51819680ba087cd351788c9a3c476841207e0b8cee0b04722343b9" +dependencies = [ + "anstream", + "anstyle", + "env_filter", + "humantime", + "log", +] + +[[package]] +name = "equivalent" +version = "1.0.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "5443807d6dff69373d433ab9ef5378ad8df50ca6298caf15de6e52e24aaf54d5" + +[[package]] +name = "errno" +version = "0.3.9" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "534c5cf6194dfab3db3242765c03bbe257cf92f22b38f6bc0c58d59108a820ba" +dependencies = [ + "libc", + "windows-sys 0.52.0", +] + +[[package]] +name = "event-listener" +version = "5.3.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "6032be9bd27023a771701cc49f9f053c751055f71efb2e0ae5c15809093675ba" +dependencies = [ + "concurrent-queue", + "parking", + "pin-project-lite", +] + +[[package]] +name = "event-listener-strategy" +version = "0.5.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "0f214dc438f977e6d4e3500aaa277f5ad94ca83fbbd9b1a15713ce2344ccc5a1" +dependencies = [ + "event-listener", + "pin-project-lite", +] + +[[package]] +name = "fastrand" +version = "2.1.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "9fc0510504f03c51ada170672ac806f1f105a88aa97a5281117e1ddc3368e51a" + +[[package]] +name = "ff" +version = "0.13.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ded41244b729663b1e574f1b4fb731469f69f79c17667b5d776b16cda0479449" +dependencies = [ + "rand_core", + "subtle", +] + +[[package]] +name = "fiat-crypto" +version = "0.2.9" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "28dea519a9695b9977216879a3ebfddf92f1c08c05d984f8996aecd6ecdc811d" + +[[package]] +name = "fixed-hash" +version = "0.8.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "835c052cb0c08c1acf6ffd71c022172e18723949c8282f2b9f27efbc51e64534" +dependencies = [ + "byteorder", + "rand", + "rustc-hex", + "static_assertions", +] + +[[package]] +name = "fixedbitset" +version = "0.4.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "0ce7134b9999ecaf8bcd65542e436736ef32ddca1b3e06094cb6ec5755203b80" + +[[package]] +name = "fnv" +version = "1.0.7" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "3f9eec918d3f24069decb9af1554cad7c880e2da24a9afd88aca000531ab82c1" + +[[package]] +name = "form_urlencoded" +version = "1.2.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e13624c2627564efccf4934284bdd98cbaa14e79b0b5a141218e507b3a823456" +dependencies = [ + "percent-encoding", +] + +[[package]] +name = "fs2" +version = "0.4.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "9564fc758e15025b46aa6643b1b77d047d1a56a1aea6e01002ac0c7026876213" +dependencies = [ + "libc", + "winapi", +] + +[[package]] +name = "funty" +version = "2.0.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e6d5a32815ae3f33302d95fdcb2ce17862f8c65363dcfd29360480ba1001fc9c" + +[[package]] +name = "futures" +version = "0.1.31" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "3a471a38ef8ed83cd6e40aa59c1ffe17db6855c18e3604d9c4ed8c08ebc28678" + +[[package]] +name = "futures" +version = "0.3.30" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "645c6916888f6cb6350d2550b80fb63e734897a8498abe35cfb732b6487804b0" +dependencies = [ + "futures-channel", + "futures-core", + "futures-executor", + "futures-io", + "futures-sink", + "futures-task", + "futures-util", +] + +[[package]] +name = "futures-bounded" +version = "0.2.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "91f328e7fb845fc832912fb6a34f40cf6d1888c92f974d1893a54e97b5ff542e" +dependencies = [ + "futures-timer", + "futures-util", +] + +[[package]] +name = "futures-channel" +version = "0.3.30" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "eac8f7d7865dcb88bd4373ab671c8cf4508703796caa2b1985a9ca867b3fcb78" +dependencies = [ + "futures-core", + "futures-sink", +] + +[[package]] +name = "futures-core" +version = "0.3.30" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "dfc6580bb841c5a68e9ef15c77ccc837b40a7504914d52e47b8b0e9bbda25a1d" + +[[package]] +name = "futures-executor" +version = "0.3.30" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "a576fc72ae164fca6b9db127eaa9a9dda0d61316034f33a0a0d4eda41f02b01d" +dependencies = [ + "futures-core", + "futures-task", + "futures-util", + "num_cpus", +] + +[[package]] +name = "futures-io" +version = "0.3.30" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "a44623e20b9681a318efdd71c299b6b222ed6f231972bfe2f224ebad6311f0c1" + +[[package]] +name = "futures-lite" +version = "2.3.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "52527eb5074e35e9339c6b4e8d12600c7128b68fb25dcb9fa9dec18f7c25f3a5" +dependencies = [ + "futures-core", + "pin-project-lite", +] + +[[package]] +name = "futures-macro" +version = "0.3.30" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "87750cf4b7a4c0625b1529e4c543c2182106e4dedc60a2a6455e00d212c489ac" +dependencies = [ + "proc-macro2", + "quote", + "syn 2.0.68", +] + +[[package]] +name = "futures-rustls" +version = "0.26.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "a8f2f12607f92c69b12ed746fabf9ca4f5c482cba46679c1a75b874ed7c26adb" +dependencies = [ + "futures-io", + "rustls 0.23.10", + "rustls-pki-types", +] + +[[package]] +name = "futures-sink" +version = "0.3.30" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "9fb8e00e87438d937621c1c6269e53f536c14d3fbd6a042bb24879e57d474fb5" + +[[package]] +name = "futures-task" +version = "0.3.30" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "38d84fa142264698cdce1a9f9172cf383a0c82de1bddcf3092901442c4097004" + +[[package]] +name = "futures-ticker" +version = "0.0.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "9763058047f713632a52e916cc7f6a4b3fc6e9fc1ff8c5b1dc49e5a89041682e" +dependencies = [ + "futures 0.3.30", + "futures-timer", + "instant", +] + +[[package]] +name = "futures-timer" +version = "3.0.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f288b0a4f20f9a56b5d1da57e2227c661b7b16168e2f72365f57b63326e29b24" + +[[package]] +name = "futures-util" +version = "0.3.30" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "3d6401deb83407ab3da39eba7e33987a73c3df0c82b4bb5813ee871c19c41d48" +dependencies = [ + "futures 0.1.31", + "futures-channel", + "futures-core", + "futures-io", + "futures-macro", + "futures-sink", + "futures-task", + "memchr", + "pin-project-lite", + "pin-utils", + "slab", +] + +[[package]] +name = "gcc" +version = "0.3.55" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "8f5f3913fa0bfe7ee1fd8248b6b9f42a5af4b9d65ec2dd2c3c26132b950ecfc2" + +[[package]] +name = "generic-array" +version = "0.14.7" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "85649ca51fd72272d7821adaf274ad91c288277713d9c18820d8499a7ff69e9a" +dependencies = [ + "typenum", + "version_check", +] + +[[package]] +name = "getrandom" +version = "0.2.15" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "c4567c8db10ae91089c99af84c68c38da3ec2f087c3f82960bcdbf3656b6f4d7" +dependencies = [ + "cfg-if", + "libc", + "wasi", +] + +[[package]] +name = "ghash" +version = "0.5.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f0d8a4362ccb29cb0b265253fb0a2728f592895ee6854fd9bc13f2ffda266ff1" +dependencies = [ + "opaque-debug", + "polyval", +] + +[[package]] +name = "gimli" +version = "0.29.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "40ecd4077b5ae9fd2e9e169b102c6c330d0605168eb0e8bf79952b256dbefffd" + +[[package]] +name = "group" +version = "0.13.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f0f9ef7462f7c099f518d754361858f86d8a07af53ba9af0fe635bbccb151a63" +dependencies = [ + "ff", + "rand_core", + "subtle", +] + +[[package]] +name = "h2" +version = "0.3.26" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "81fe527a889e1532da5c525686d96d4c2e74cdd345badf8dfef9f6b39dd5f5e8" +dependencies = [ + "bytes 1.6.0", + "fnv", + "futures-core", + "futures-sink", + "futures-util", + "http", + "indexmap 2.2.6", + "slab", + "tokio", + "tokio-util 0.7.11", + "tracing", +] + +[[package]] +name = "half" +version = "1.8.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "1b43ede17f21864e81be2fa654110bf1e793774238d86ef8555c37e6519c0403" + +[[package]] +name = "hashbrown" +version = "0.12.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "8a9ee70c43aaf417c914396645a0fa852624801b24ebb7ae78fe8272889ac888" + +[[package]] +name = "hashbrown" +version = "0.14.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e5274423e17b7c9fc20b6e7e208532f9b19825d82dfd615708b70edd83df41f1" +dependencies = [ + "ahash", + "allocator-api2", +] + +[[package]] +name = "hdrhistogram" +version = "7.5.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "765c9198f173dd59ce26ff9f95ef0aafd0a0fe01fb9d72841bc5066a4c06511d" +dependencies = [ + "byteorder", + "num-traits", +] + +[[package]] +name = "heck" +version = "0.3.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "6d621efb26863f0e9924c6ac577e8275e5e6b77455db64ffa6c65c904e9e132c" +dependencies = [ + "unicode-segmentation", +] + +[[package]] +name = "heck" +version = "0.4.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "95505c38b4572b2d910cecb0281560f54b440a19336cbbcb27bf6ce6adc6f5a8" + +[[package]] +name = "heck" +version = "0.5.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "2304e00983f87ffb38b55b444b5e3b60a884b5d30c0fca7d82fe33449bbe55ea" + +[[package]] +name = "hermit-abi" +version = "0.3.9" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d231dfb89cfffdbc30e7fc41579ed6066ad03abda9e567ccafae602b97ec5024" + +[[package]] +name = "hermit-abi" +version = "0.4.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "fbf6a919d6cf397374f7dfeeea91d974c7c0a7221d0d0f4f20d859d329e53fcc" + +[[package]] +name = "hex" +version = "0.4.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "7f24254aa9a54b5c858eaee2f5bccdb46aaf0e486a595ed5fd8f86ba55232a70" + +[[package]] +name = "hex-literal" +version = "0.4.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "6fe2267d4ed49bc07b63801559be28c718ea06c4738b7a03c94df7386d2cde46" + +[[package]] +name = "hex_fmt" +version = "0.3.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b07f60793ff0a4d9cef0f18e63b5357e06209987153a64648c972c1e5aff336f" + +[[package]] +name = "hickory-proto" +version = "0.24.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "07698b8420e2f0d6447a436ba999ec85d8fbf2a398bbd737b82cac4a2e96e512" +dependencies = [ + "async-trait", + "cfg-if", + "data-encoding", + "enum-as-inner 0.6.0", + "futures-channel", + "futures-io", + "futures-util", + "idna 0.4.0", + "ipnet", + "once_cell", + "rand", + "socket2", + "thiserror", + "tinyvec", + "tokio", + "tracing", + "url", +] + +[[package]] +name = "hickory-resolver" +version = "0.24.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "28757f23aa75c98f254cf0405e6d8c25b831b32921b050a66692427679b1f243" +dependencies = [ + "cfg-if", + "futures-util", + "hickory-proto", + "ipconfig", + "lru-cache", + "once_cell", + "parking_lot", + "rand", + "resolv-conf", + "smallvec", + "thiserror", + "tokio", + "tracing", +] + +[[package]] +name = "hkdf" +version = "0.12.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "7b5f8eb2ad728638ea2c7d47a21db23b7b58a72ed6a38256b8a1849f15fbbdf7" +dependencies = [ + "hmac", +] + +[[package]] +name = "hmac" +version = "0.12.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "6c49c37c09c17a53d937dfbb742eb3a961d65a994e6bcdcf37e7399d0cc8ab5e" +dependencies = [ + "digest", +] + +[[package]] +name = "home" +version = "0.5.9" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e3d1354bf6b7235cb4a0576c2619fd4ed18183f689b12b006a0ee7329eeff9a5" +dependencies = [ + "windows-sys 0.52.0", +] + +[[package]] +name = "hostname" +version = "0.3.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "3c731c3e10504cc8ed35cfe2f1db4c9274c3d35fa486e3b31df46f068ef3e867" +dependencies = [ + "libc", + "match_cfg", + "winapi", +] + +[[package]] +name = "http" +version = "0.2.12" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "601cbb57e577e2f5ef5be8e7b83f0f63994f25aa94d673e54a92d5c516d101f1" +dependencies = [ + "bytes 1.6.0", + "fnv", + "itoa", +] + +[[package]] +name = "http-body" +version = "0.4.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "7ceab25649e9960c0311ea418d17bee82c0dcec1bd053b5f9a66e265a693bed2" +dependencies = [ + "bytes 1.6.0", + "http", + "pin-project-lite", +] + +[[package]] +name = "httparse" +version = "1.9.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "0fcc0b4a115bf80b728eb8ea024ad5bd707b615bfed49e0665b6e0f86fd082d9" + +[[package]] +name = "httpdate" +version = "1.0.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "df3b46402a9d5adb4c86a0cf463f42e19994e3ee891101b1841f30a545cb49a9" + +[[package]] +name = "humantime" +version = "2.1.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "9a3a5bfb195931eeb336b2a7b4d761daec841b97f947d34394601737a7bba5e4" + +[[package]] +name = "hyper" +version = "0.14.29" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f361cde2f109281a220d4307746cdfd5ee3f410da58a70377762396775634b33" +dependencies = [ + "bytes 1.6.0", + "futures-channel", + "futures-core", + "futures-util", + "h2", + "http", + "http-body", + "httparse", + "httpdate", + "itoa", + "pin-project-lite", + "socket2", + "tokio", + "tower-service", + "tracing", + "want", +] + +[[package]] +name = "hyper-timeout" +version = "0.4.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "bbb958482e8c7be4bc3cf272a766a2b0bf1a6755e7a6ae777f017a31d11b13b1" +dependencies = [ + "hyper", + "pin-project-lite", + "tokio", + "tokio-io-timeout", +] + +[[package]] +name = "iana-time-zone" +version = "0.1.60" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e7ffbb5a1b541ea2561f8c41c087286cc091e21e556a4f09a8f6cbf17b69b141" +dependencies = [ + "android_system_properties", + "core-foundation-sys", + "iana-time-zone-haiku", + "js-sys", + "wasm-bindgen", + "windows-core 0.52.0", +] + +[[package]] +name = "iana-time-zone-haiku" +version = "0.1.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f31827a206f56af32e590ba56d5d2d085f558508192593743f16b2306495269f" +dependencies = [ + "cc", +] + +[[package]] +name = "ident_case" +version = "1.0.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b9e0384b61958566e926dc50660321d12159025e767c18e043daf26b70104c39" + +[[package]] +name = "idna" +version = "0.2.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "418a0a6fab821475f634efe3ccc45c013f742efe03d853e8d3355d5cb850ecf8" +dependencies = [ + "matches", + "unicode-bidi", + "unicode-normalization", +] + +[[package]] +name = "idna" +version = "0.4.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "7d20d6b07bfbc108882d88ed8e37d39636dcc260e15e30c45e6ba089610b917c" +dependencies = [ + "unicode-bidi", + "unicode-normalization", +] + +[[package]] +name = "idna" +version = "0.5.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "634d9b1461af396cad843f47fdba5597a4f9e6ddd4bfb6ff5d85028c25cb12f6" +dependencies = [ + "unicode-bidi", + "unicode-normalization", +] + +[[package]] +name = "if-addrs" +version = "0.10.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "cabb0019d51a643781ff15c9c8a3e5dedc365c47211270f4e8f82812fedd8f0a" +dependencies = [ + "libc", + "windows-sys 0.48.0", +] + +[[package]] +name = "if-watch" +version = "3.2.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d6b0422c86d7ce0e97169cc42e04ae643caf278874a7a3c87b8150a220dc7e1e" +dependencies = [ + "async-io", + "core-foundation", + "fnv", + "futures 0.3.30", + "if-addrs", + "ipnet", + "log", + "rtnetlink", + "system-configuration", + "tokio", + "windows", +] + +[[package]] +name = "igd-next" +version = "0.14.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "064d90fec10d541084e7b39ead8875a5a80d9114a2b18791565253bae25f49e4" +dependencies = [ + "async-trait", + "attohttpc", + "bytes 1.6.0", + "futures 0.3.30", + "http", + "hyper", + "log", + "rand", + "tokio", + "url", + "xmltree", +] + +[[package]] +name = "impl-codec" +version = "0.6.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ba6a270039626615617f3f36d15fc827041df3b78c439da2cadfa47455a77f2f" +dependencies = [ + "parity-scale-codec", +] + +[[package]] +name = "impl-serde" +version = "0.4.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ebc88fc67028ae3db0c853baa36269d398d5f45b6982f95549ff5def78c935cd" +dependencies = [ + "serde", +] + +[[package]] +name = "impl-trait-for-tuples" +version = "0.2.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "11d7a9f6330b71fea57921c9b61c47ee6e84f72d394754eff6163ae67e7395eb" +dependencies = [ + "proc-macro2", + "quote", + "syn 1.0.109", +] + +[[package]] +name = "indexmap" +version = "1.9.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "bd070e393353796e801d209ad339e89596eb4c8d430d18ede6a1cced8fafbd99" +dependencies = [ + "autocfg", + "hashbrown 0.12.3", +] + +[[package]] +name = "indexmap" +version = "2.2.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "168fb715dda47215e360912c096649d23d58bf392ac62f73919e831745e40f26" +dependencies = [ + "equivalent", + "hashbrown 0.14.5", +] + +[[package]] +name = "inout" +version = "0.1.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "a0c10553d664a4d0bcff9f4215d0aac67a639cc68ef660840afe309b807bc9f5" +dependencies = [ + "generic-array", +] + +[[package]] +name = "instant" +version = "0.1.13" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e0242819d153cba4b4b05a5a8f2a7e9bbf97b6055b2a002b395c96b5ff3c0222" +dependencies = [ + "cfg-if", +] + +[[package]] +name = "integer-encoding" +version = "3.0.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "8bb03732005da905c88227371639bf1ad885cc712789c011c31c5fb3ab3ccf02" + +[[package]] +name = "ipconfig" +version = "0.3.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b58db92f96b720de98181bbbe63c831e87005ab460c1bf306eb2622b4707997f" +dependencies = [ + "socket2", + "widestring", + "windows-sys 0.48.0", + "winreg", +] + +[[package]] +name = "ipnet" +version = "2.9.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "8f518f335dce6725a761382244631d86cf0ccb2863413590b31338feb467f9c3" + +[[package]] +name = "is_terminal_polyfill" +version = "1.70.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f8478577c03552c21db0e2724ffb8986a5ce7af88107e6be5d2ee6e158c12800" + +[[package]] +name = "itertools" +version = "0.10.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b0fd2260e829bddf4cb6ea802289de2f86d6a7a690192fbe91b3f46e0f2c8473" +dependencies = [ + "either", +] + +[[package]] +name = "itertools" +version = "0.12.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ba291022dbbd398a455acf126c1e341954079855bc60dfdda641363bd6922569" +dependencies = [ + "either", +] + +[[package]] +name = "itoa" +version = "1.0.11" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "49f1f14873335454500d59611f1cf4a4b0f786f9ac11f4312a78e4cf2566695b" + +[[package]] +name = "js-sys" +version = "0.3.69" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "29c15563dc2726973df627357ce0c9ddddbea194836909d655df6a75d2cf296d" +dependencies = [ + "wasm-bindgen", +] + +[[package]] +name = "keccak" +version = "0.1.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ecc2af9a1119c51f12a14607e783cb977bde58bc069ff0c3da1095e635d70654" +dependencies = [ + "cpufeatures", +] + +[[package]] +name = "lazy_static" +version = "1.5.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "bbd2bcb4c963f2ddae06a2efc7e9f3591312473c50c6685e1f298068316e66fe" + +[[package]] +name = "libc" +version = "0.2.155" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "97b3888a4aecf77e811145cadf6eef5901f4782c53886191b2f693f24761847c" + +[[package]] +name = "liblmdb-sys" +version = "0.2.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "feed38a3a580f60bf61aaa067b0ff4123395966839adeaf67258a9e50c4d2e49" +dependencies = [ + "gcc", + "libc", +] + +[[package]] +name = "libm" +version = "0.2.8" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "4ec2a862134d2a7d32d7983ddcdd1c4923530833c9f2ea1a44fc5fa473989058" + +[[package]] +name = "libp2p" +version = "0.53.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "681fb3f183edfbedd7a57d32ebe5dcdc0b9f94061185acf3c30249349cc6fc99" +dependencies = [ + "bytes 1.6.0", + "either", + "futures 0.3.30", + "futures-timer", + "getrandom", + "instant", + "libp2p-allow-block-list", + "libp2p-connection-limits", + "libp2p-core", + "libp2p-dns", + "libp2p-gossipsub", + "libp2p-identify", + "libp2p-identity", + "libp2p-kad", + "libp2p-mdns", + "libp2p-metrics", + "libp2p-noise", + "libp2p-quic", + "libp2p-request-response", + "libp2p-swarm", + "libp2p-tcp", + "libp2p-upnp", + "libp2p-yamux", + "multiaddr 0.18.1", + "pin-project 1.1.5", + "rw-stream-sink", + "thiserror", +] + +[[package]] +name = "libp2p-allow-block-list" +version = "0.3.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "107b238b794cb83ab53b74ad5dcf7cca3200899b72fe662840cfb52f5b0a32e6" +dependencies = [ + "libp2p-core", + "libp2p-identity", + "libp2p-swarm", + "void", +] + +[[package]] +name = "libp2p-connection-limits" +version = "0.3.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "c7cd50a78ccfada14de94cbacd3ce4b0138157f376870f13d3a8422cd075b4fd" +dependencies = [ + "libp2p-core", + "libp2p-identity", + "libp2p-swarm", + "void", +] + +[[package]] +name = "libp2p-core" +version = "0.41.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "8130a8269e65a2554d55131c770bdf4bcd94d2b8d4efb24ca23699be65066c05" +dependencies = [ + "either", + "fnv", + "futures 0.3.30", + "futures-timer", + "instant", + "libp2p-identity", + "multiaddr 0.18.1", + "multihash 0.19.1", + "multistream-select", + "once_cell", + "parking_lot", + "pin-project 1.1.5", + "quick-protobuf", + "rand", + "rw-stream-sink", + "serde", + "smallvec", + "thiserror", + "tracing", + "unsigned-varint 0.8.0", + "void", +] + +[[package]] +name = "libp2p-dns" +version = "0.41.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d17cbcf7160ff35c3e8e560de4a068fe9d6cb777ea72840e48eb76ff9576c4b6" +dependencies = [ + "async-trait", + "futures 0.3.30", + "hickory-resolver", + "libp2p-core", + "libp2p-identity", + "parking_lot", + "smallvec", + "tracing", +] + +[[package]] +name = "libp2p-gossipsub" +version = "0.46.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d665144a616dadebdc5fff186b1233488cdcd8bfb1223218ff084b6d052c94f7" +dependencies = [ + "asynchronous-codec", + "base64 0.21.7", + "byteorder", + "bytes 1.6.0", + "either", + "fnv", + "futures 0.3.30", + "futures-ticker", + "getrandom", + "hex_fmt", + "instant", + "libp2p-core", + "libp2p-identity", + "libp2p-swarm", + "prometheus-client", + "quick-protobuf", + "quick-protobuf-codec", + "rand", + "regex", + "serde", + "sha2", + "smallvec", + "tracing", + "void", +] + +[[package]] +name = "libp2p-identify" +version = "0.44.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b5d635ebea5ca0c3c3e77d414ae9b67eccf2a822be06091b9c1a0d13029a1e2f" +dependencies = [ + "asynchronous-codec", + "either", + "futures 0.3.30", + "futures-bounded", + "futures-timer", + "libp2p-core", + "libp2p-identity", + "libp2p-swarm", + "lru", + "quick-protobuf", + "quick-protobuf-codec", + "smallvec", + "thiserror", + "tracing", + "void", +] + +[[package]] +name = "libp2p-identity" +version = "0.2.9" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "55cca1eb2bc1fd29f099f3daaab7effd01e1a54b7c577d0ed082521034d912e8" +dependencies = [ + "bs58 0.5.1", + "ed25519-dalek", + "hkdf", + "multihash 0.19.1", + "quick-protobuf", + "rand", + "serde", + "sha2", + "thiserror", + "tracing", + "zeroize", +] + +[[package]] +name = "libp2p-kad" +version = "0.45.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "5cc5767727d062c4eac74dd812c998f0e488008e82cce9c33b463d38423f9ad2" +dependencies = [ + "arrayvec", + "asynchronous-codec", + "bytes 1.6.0", + "either", + "fnv", + "futures 0.3.30", + "futures-bounded", + "futures-timer", + "instant", + "libp2p-core", + "libp2p-identity", + "libp2p-swarm", + "quick-protobuf", + "quick-protobuf-codec", + "rand", + "serde", + "sha2", + "smallvec", + "thiserror", + "tracing", + "uint", + "void", +] + +[[package]] +name = "libp2p-mdns" +version = "0.45.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "49007d9a339b3e1d7eeebc4d67c05dbf23d300b7d091193ec2d3f26802d7faf2" +dependencies = [ + "data-encoding", + "futures 0.3.30", + "hickory-proto", + "if-watch", + "libp2p-core", + "libp2p-identity", + "libp2p-swarm", + "rand", + "smallvec", + "socket2", + "tokio", + "tracing", + "void", +] + +[[package]] +name = "libp2p-metrics" +version = "0.14.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "fdac91ae4f291046a3b2660c039a2830c931f84df2ee227989af92f7692d3357" +dependencies = [ + "futures 0.3.30", + "instant", + "libp2p-core", + "libp2p-gossipsub", + "libp2p-identify", + "libp2p-identity", + "libp2p-kad", + "libp2p-swarm", + "pin-project 1.1.5", + "prometheus-client", +] + +[[package]] +name = "libp2p-noise" +version = "0.44.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "8ecd0545ce077f6ea5434bcb76e8d0fe942693b4380aaad0d34a358c2bd05793" +dependencies = [ + "asynchronous-codec", + "bytes 1.6.0", + "curve25519-dalek", + "futures 0.3.30", + "libp2p-core", + "libp2p-identity", + "multiaddr 0.18.1", + "multihash 0.19.1", + "once_cell", + "quick-protobuf", + "rand", + "sha2", + "snow", + "static_assertions", + "thiserror", + "tracing", + "x25519-dalek", + "zeroize", +] + +[[package]] +name = "libp2p-quic" +version = "0.10.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "c67296ad4e092e23f92aea3d2bdb6f24eab79c0929ed816dfb460ea2f4567d2b" +dependencies = [ + "bytes 1.6.0", + "futures 0.3.30", + "futures-timer", + "if-watch", + "libp2p-core", + "libp2p-identity", + "libp2p-tls", + "parking_lot", + "quinn", + "rand", + "ring 0.17.8", + "rustls 0.23.10", + "socket2", + "thiserror", + "tokio", + "tracing", +] + +[[package]] +name = "libp2p-request-response" +version = "0.26.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "c314fe28368da5e3a262553fb0ad575c1c8934c461e10de10265551478163836" +dependencies = [ + "async-trait", + "cbor4ii", + "futures 0.3.30", + "futures-bounded", + "futures-timer", + "instant", + "libp2p-core", + "libp2p-identity", + "libp2p-swarm", + "rand", + "serde", + "serde_json", + "smallvec", + "tracing", + "void", +] + +[[package]] +name = "libp2p-swarm" +version = "0.44.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "80cae6cb75f89dbca53862f9ebe0b9f463aa7b302762fcfaafb9e51dcc9b0f7e" +dependencies = [ + "either", + "fnv", + "futures 0.3.30", + "futures-timer", + "instant", + "libp2p-core", + "libp2p-identity", + "libp2p-swarm-derive", + "lru", + "multistream-select", + "once_cell", + "rand", + "smallvec", + "tokio", + "tracing", + "void", +] + +[[package]] +name = "libp2p-swarm-derive" +version = "0.34.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "5daceb9dd908417b6dfcfe8e94098bc4aac54500c282e78120b885dadc09b999" +dependencies = [ + "heck 0.5.0", + "proc-macro2", + "quote", + "syn 2.0.68", +] + +[[package]] +name = "libp2p-tcp" +version = "0.41.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "8b2460fc2748919adff99ecbc1aab296e4579e41f374fb164149bd2c9e529d4c" +dependencies = [ + "futures 0.3.30", + "futures-timer", + "if-watch", + "libc", + "libp2p-core", + "libp2p-identity", + "socket2", + "tokio", + "tracing", +] + +[[package]] +name = "libp2p-tls" +version = "0.4.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "251b17aebdd29df7e8f80e4d94b782fae42e934c49086e1a81ba23b60a8314f2" +dependencies = [ + "futures 0.3.30", + "futures-rustls", + "libp2p-core", + "libp2p-identity", + "rcgen", + "ring 0.17.8", + "rustls 0.23.10", + "rustls-webpki 0.101.7", + "thiserror", + "x509-parser", + "yasna", +] + +[[package]] +name = "libp2p-upnp" +version = "0.2.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "cccf04b0e3ff3de52d07d5fd6c3b061d0e7f908ffc683c32d9638caedce86fc8" +dependencies = [ + "futures 0.3.30", + "futures-timer", + "igd-next", + "libp2p-core", + "libp2p-swarm", + "tokio", + "tracing", + "void", +] + +[[package]] +name = "libp2p-yamux" +version = "0.45.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "200cbe50349a44760927d50b431d77bed79b9c0a3959de1af8d24a63434b71e5" +dependencies = [ + "either", + "futures 0.3.30", + "libp2p-core", + "thiserror", + "tracing", + "yamux 0.12.1", + "yamux 0.13.3", +] + +[[package]] +name = "libredox" +version = "0.1.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "c0ff37bd590ca25063e35af745c343cb7a0271906fb7b37e4813e8f79f00268d" +dependencies = [ + "bitflags 2.6.0", + "libc", +] + +[[package]] +name = "libsqlite3-sys" +version = "0.28.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "0c10584274047cb335c23d3e61bcef8e323adae7c5c8c760540f73610177fc3f" +dependencies = [ + "pkg-config", + "vcpkg", +] + +[[package]] +name = "linked-hash-map" +version = "0.5.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "0717cef1bc8b636c6e1c1bbdefc09e6322da8a9321966e8928ef80d20f7f770f" + +[[package]] +name = "linux-raw-sys" +version = "0.4.14" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "78b3ae25bc7c8c38cec158d1f2757ee79e9b3740fbc7ccf0e59e4b08d793fa89" + +[[package]] +name = "lmdb-zero" +version = "0.4.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "13416eee745b087c22934f35f1f24da22da41ba2a5ce197143d168ce055cc58d" +dependencies = [ + "bitflags 0.9.1", + "libc", + "liblmdb-sys", + "supercow", +] + +[[package]] +name = "lock_api" +version = "0.4.12" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "07af8b9cdd281b7915f413fa73f29ebd5d55d0d3f0155584dade1ff18cea1b17" +dependencies = [ + "autocfg", + "scopeguard", +] + +[[package]] +name = "log" +version = "0.4.22" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "a7a70ba024b9dc04c27ea2f0c0548feb474ec5c54bba33a7f72f873a39d07b24" +dependencies = [ + "serde", +] + +[[package]] +name = "log-mdc" +version = "0.1.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "a94d21414c1f4a51209ad204c1776a3d0765002c76c6abcb602a6f09f1e881c7" + +[[package]] +name = "log4rs" +version = "1.3.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "0816135ae15bd0391cf284eab37e6e3ee0a6ee63d2ceeb659862bd8d0a984ca6" +dependencies = [ + "anyhow", + "arc-swap", + "derivative", + "fnv", + "humantime", + "log", + "once_cell", + "serde", + "serde-value", + "serde_yaml", + "thiserror", + "typemap-ors", +] + +[[package]] +name = "lru" +version = "0.12.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d3262e75e648fce39813cb56ac41f3c3e3f65217ebf3844d818d1f9398cfb0dc" +dependencies = [ + "hashbrown 0.14.5", +] + +[[package]] +name = "lru-cache" +version = "0.1.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "31e24f1ad8321ca0e8a1e0ac13f23cb668e6f5466c2c57319f6a5cf1cc8e3b1c" +dependencies = [ + "linked-hash-map", +] + +[[package]] +name = "match_cfg" +version = "0.1.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ffbee8634e0d45d258acb448e7eaab3fce7a0a467395d4d9f228e3c1f01fb2e4" + +[[package]] +name = "matches" +version = "0.1.10" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "2532096657941c2fea9c289d370a250971c689d4f143798ff67113ec042024a5" + +[[package]] +name = "matchit" +version = "0.7.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "0e7465ac9959cc2b1404e8e2367b43684a6d13790fe23056cc8c6c5a6b7bcb94" + +[[package]] +name = "memchr" +version = "2.7.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "78ca9ab1a0babb1e7d5695e3530886289c18cf2f87ec19a575a0abdce112e3a3" + +[[package]] +name = "merlin" +version = "3.0.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "58c38e2799fc0978b65dfff8023ec7843e2330bb462f19198840b34b6582397d" +dependencies = [ + "byteorder", + "keccak", + "rand_core", + "zeroize", +] + +[[package]] +name = "migrations_internals" +version = "2.2.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "fd01039851e82f8799046eabbb354056283fb265c8ec0996af940f4e85a380ff" +dependencies = [ + "serde", + "toml 0.8.14", +] + +[[package]] +name = "migrations_macros" +version = "2.2.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ffb161cc72176cb37aa47f1fc520d3ef02263d67d661f44f05d05a079e1237fd" +dependencies = [ + "migrations_internals", + "proc-macro2", + "quote", +] + +[[package]] +name = "mime" +version = "0.3.17" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "6877bb514081ee2a7ff5ef9de3281f14a4dd4bceac4c09388074a6b5df8a139a" + +[[package]] +name = "minimal-lexical" +version = "0.2.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "68354c5c6bd36d73ff3feceb05efa59b6acb7626617f4962be322a825e61f79a" + +[[package]] +name = "miniz_oxide" +version = "0.7.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b8a240ddb74feaf34a79a7add65a741f3167852fba007066dcac1ca548d89c08" +dependencies = [ + "adler", +] + +[[package]] +name = "minotari_app_grpc" +version = "1.0.0-pre.14" +source = "git+https://github.com/ksrichard/tari.git?branch=p2pool#56d65428c1257cd9c750c4d1c4100e270202a6d5" +dependencies = [ + "argon2", + "base64 0.13.1", + "borsh", + "chrono", + "log", + "prost", + "prost-types", + "rand", + "rcgen", + "subtle", + "tari_common_types", + "tari_comms", + "tari_core", + "tari_crypto", + "tari_features", + "tari_script", + "tari_utilities", + "thiserror", + "tokio", + "tonic", + "tonic-build", + "zeroize", +] + +[[package]] +name = "minotari_node_grpc_client" +version = "0.1.0" +source = "git+https://github.com/ksrichard/tari.git?branch=p2pool#56d65428c1257cd9c750c4d1c4100e270202a6d5" +dependencies = [ + "minotari_app_grpc", +] + +[[package]] +name = "mio" +version = "0.8.11" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "a4a650543ca06a924e8b371db273b2756685faae30f8487da1b56505a8f78b0c" +dependencies = [ + "libc", + "wasi", + "windows-sys 0.48.0", +] + +[[package]] +name = "moka" +version = "0.12.7" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "9e0d88686dc561d743b40de8269b26eaf0dc58781bde087b0984646602021d08" +dependencies = [ + "async-lock", + "async-trait", + "crossbeam-channel", + "crossbeam-epoch", + "crossbeam-utils", + "event-listener", + "futures-util", + "once_cell", + "parking_lot", + "quanta", + "rustc_version", + "smallvec", + "tagptr", + "thiserror", + "triomphe", + "uuid", +] + +[[package]] +name = "monero" +version = "0.21.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f25218523ad4a171ddda05251669afb788cdc2f0df94082aab856a2b09541c3f" +dependencies = [ + "base58-monero 2.0.0", + "curve25519-dalek", + "fixed-hash", + "hex", + "hex-literal", + "sealed", + "serde", + "thiserror", + "tiny-keccak", +] + +[[package]] +name = "multiaddr" +version = "0.14.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "3c580bfdd8803cce319b047d239559a22f809094aaea4ac13902a1fdcfcd4261" +dependencies = [ + "arrayref", + "bs58 0.4.0", + "byteorder", + "data-encoding", + "multihash 0.16.3", + "percent-encoding", + "serde", + "static_assertions", + "unsigned-varint 0.7.2", + "url", +] + +[[package]] +name = "multiaddr" +version = "0.18.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "8b852bc02a2da5feed68cd14fa50d0774b92790a5bdbfa932a813926c8472070" +dependencies = [ + "arrayref", + "byteorder", + "data-encoding", + "libp2p-identity", + "multibase", + "multihash 0.19.1", + "percent-encoding", + "serde", + "static_assertions", + "unsigned-varint 0.7.2", + "url", +] + +[[package]] +name = "multibase" +version = "0.9.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "9b3539ec3c1f04ac9748a260728e855f261b4977f5c3406612c884564f329404" +dependencies = [ + "base-x", + "data-encoding", + "data-encoding-macro", +] + +[[package]] +name = "multihash" +version = "0.16.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "1c346cf9999c631f002d8f977c4eaeaa0e6386f16007202308d0b3757522c2cc" +dependencies = [ + "core2", + "multihash-derive", + "unsigned-varint 0.7.2", +] + +[[package]] +name = "multihash" +version = "0.19.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "076d548d76a0e2a0d4ab471d0b1c36c577786dfc4471242035d97a12a735c492" +dependencies = [ + "core2", + "serde", + "unsigned-varint 0.7.2", +] + +[[package]] +name = "multihash-derive" +version = "0.8.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "1d6d4752e6230d8ef7adf7bd5d8c4b1f6561c1014c5ba9a37445ccefe18aa1db" +dependencies = [ + "proc-macro-crate 1.1.3", + "proc-macro-error", + "proc-macro2", + "quote", + "syn 1.0.109", + "synstructure 0.12.6", +] + +[[package]] +name = "multimap" +version = "0.8.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e5ce46fe64a9d73be07dcbe690a38ce1b293be448fd8ce1e6c1b8062c9f72c6a" + +[[package]] +name = "multistream-select" +version = "0.13.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ea0df8e5eec2298a62b326ee4f0d7fe1a6b90a09dfcf9df37b38f947a8c42f19" +dependencies = [ + "bytes 1.6.0", + "futures 0.3.30", + "log", + "pin-project 1.1.5", + "smallvec", + "unsigned-varint 0.7.2", +] + +[[package]] +name = "netlink-packet-core" +version = "0.4.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "345b8ab5bd4e71a2986663e88c56856699d060e78e152e6e9d7966fcd5491297" +dependencies = [ + "anyhow", + "byteorder", + "libc", + "netlink-packet-utils", +] + +[[package]] +name = "netlink-packet-route" +version = "0.12.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d9ea4302b9759a7a88242299225ea3688e63c85ea136371bb6cf94fd674efaab" +dependencies = [ + "anyhow", + "bitflags 1.3.2", + "byteorder", + "libc", + "netlink-packet-core", + "netlink-packet-utils", +] + +[[package]] +name = "netlink-packet-utils" +version = "0.5.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "0ede8a08c71ad5a95cdd0e4e52facd37190977039a4704eb82a283f713747d34" +dependencies = [ + "anyhow", + "byteorder", + "paste", + "thiserror", +] + +[[package]] +name = "netlink-proto" +version = "0.10.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "65b4b14489ab424703c092062176d52ba55485a89c076b4f9db05092b7223aa6" +dependencies = [ + "bytes 1.6.0", + "futures 0.3.30", + "log", + "netlink-packet-core", + "netlink-sys", + "thiserror", + "tokio", +] + +[[package]] +name = "netlink-sys" +version = "0.8.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "416060d346fbaf1f23f9512963e3e878f1a78e707cb699ba9215761754244307" +dependencies = [ + "bytes 1.6.0", + "futures 0.3.30", + "libc", + "log", + "tokio", +] + +[[package]] +name = "newtype-ops" +version = "0.1.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d36047f46c69ef97b60e7b069a26ce9a15cd8a7852eddb6991ea94a83ba36a78" + +[[package]] +name = "nibble_vec" +version = "0.1.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "77a5d83df9f36fe23f0c3648c6bbb8b0298bb5f1939c8f2704431371f4b84d43" +dependencies = [ + "smallvec", +] + +[[package]] +name = "nix" +version = "0.24.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "fa52e972a9a719cecb6864fb88568781eb706bac2cd1d4f04a648542dbf78069" +dependencies = [ + "bitflags 1.3.2", + "cfg-if", + "libc", +] + +[[package]] +name = "nohash-hasher" +version = "0.2.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "2bf50223579dc7cdcfb3bfcacf7069ff68243f8c363f62ffa99cf000a6b9c451" + +[[package]] +name = "nom" +version = "7.1.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d273983c5a657a70a3e8f2a01329822f3b8c8172b73826411a55751e404a0a4a" +dependencies = [ + "memchr", + "minimal-lexical", +] + +[[package]] +name = "num-bigint" +version = "0.4.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "a5e44f723f1133c9deac646763579fdb3ac745e418f2a7af9cd0c431da1f20b9" +dependencies = [ + "num-integer", + "num-traits", +] + +[[package]] +name = "num-conv" +version = "0.1.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "51d515d32fb182ee37cda2ccdcb92950d6a3c2893aa280e540671c2cd0f3b1d9" + +[[package]] +name = "num-derive" +version = "0.4.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ed3955f1a9c7c0c15e092f9c887db08b1fc683305fdf6eb6684f22555355e202" +dependencies = [ + "proc-macro2", + "quote", + "syn 2.0.68", +] + +[[package]] +name = "num-format" +version = "0.4.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "a652d9771a63711fd3c3deb670acfbe5c30a4072e664d7a3bf5a9e1056ac72c3" +dependencies = [ + "arrayvec", + "itoa", +] + +[[package]] +name = "num-integer" +version = "0.1.46" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "7969661fd2958a5cb096e56c8e1ad0444ac2bbcd0061bd28660485a44879858f" +dependencies = [ + "num-traits", +] + +[[package]] +name = "num-traits" +version = "0.2.19" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "071dfc062690e90b734c0b2273ce72ad0ffa95f0c74596bc250dcfd960262841" +dependencies = [ + "autocfg", +] + +[[package]] +name = "num_cpus" +version = "1.16.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "4161fcb6d602d4d2081af7c3a45852d875a03dd337a6bfdd6e06407b61342a43" +dependencies = [ + "hermit-abi 0.3.9", + "libc", +] + +[[package]] +name = "object" +version = "0.36.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "081b846d1d56ddfc18fdf1a922e4f6e07a11768ea1b92dec44e42b72712ccfce" +dependencies = [ + "memchr", +] + +[[package]] +name = "oid-registry" +version = "0.7.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "1c958dd45046245b9c3c2547369bb634eb461670b2e7e0de552905801a648d1d" +dependencies = [ + "asn1-rs", +] + +[[package]] +name = "once_cell" +version = "1.19.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "3fdb12b2476b595f9358c5161aa467c2438859caa136dec86c26fdd2efe17b92" +dependencies = [ + "critical-section", + "portable-atomic", +] + +[[package]] +name = "opaque-debug" +version = "0.3.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "c08d65885ee38876c4f86fa503fb49d7b507c2b62552df7c70b2fce627e06381" + +[[package]] +name = "ordered-float" +version = "2.10.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "68f19d67e5a2795c94e73e0bb1cc1a7edeb2e28efd39e2e1c9b7a40c1108b11c" +dependencies = [ + "num-traits", +] + +[[package]] +name = "parity-scale-codec" +version = "3.6.12" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "306800abfa29c7f16596b5970a588435e3d5b3149683d00c12b699cc19f895ee" +dependencies = [ + "arrayvec", + "bitvec", + "byte-slice-cast", + "impl-trait-for-tuples", + "parity-scale-codec-derive", + "serde", +] + +[[package]] +name = "parity-scale-codec-derive" +version = "3.6.12" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d830939c76d294956402033aee57a6da7b438f2294eb94864c37b0569053a42c" +dependencies = [ + "proc-macro-crate 3.1.0", + "proc-macro2", + "quote", + "syn 1.0.109", +] + +[[package]] +name = "parking" +version = "2.2.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "bb813b8af86854136c6922af0598d719255ecb2179515e6e7730d468f05c9cae" + +[[package]] +name = "parking_lot" +version = "0.12.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f1bf18183cf54e8d6059647fc3063646a1801cf30896933ec2311622cc4b9a27" +dependencies = [ + "lock_api", + "parking_lot_core", +] + +[[package]] +name = "parking_lot_core" +version = "0.9.10" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "1e401f977ab385c9e4e3ab30627d6f26d00e2c73eef317493c4ec6d468726cf8" +dependencies = [ + "cfg-if", + "libc", + "redox_syscall", + "smallvec", + "windows-targets 0.52.5", +] + +[[package]] +name = "password-hash" +version = "0.4.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "7676374caaee8a325c9e7a2ae557f216c5563a171d6997b0ef8a65af35147700" +dependencies = [ + "base64ct", + "rand_core", + "subtle", +] + +[[package]] +name = "paste" +version = "1.0.15" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "57c0d7b74b563b49d38dae00a0c37d4d6de9b432382b2892f0574ddcae73fd0a" + +[[package]] +name = "path-clean" +version = "0.1.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ecba01bf2678719532c5e3059e0b5f0811273d94b397088b82e3bd0a78c78fdd" + +[[package]] +name = "pathdiff" +version = "0.2.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "8835116a5c179084a830efb3adc117ab007512b535bc1a21c991d3b32a6b44dd" + +[[package]] +name = "pem" +version = "3.0.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "8e459365e590736a54c3fa561947c84837534b8e9af6fc5bf781307e82658fae" +dependencies = [ + "base64 0.22.1", + "serde", +] + +[[package]] +name = "percent-encoding" +version = "2.3.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e3148f5046208a5d56bcfc03053e3ca6334e51da8dfb19b6cdc8b306fae3283e" + +[[package]] +name = "petgraph" +version = "0.6.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b4c5cc86750666a3ed20bdaf5ca2a0344f9c67674cae0515bec2da16fbaa47db" +dependencies = [ + "fixedbitset", + "indexmap 2.2.6", +] + +[[package]] +name = "pin-project" +version = "0.4.30" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "3ef0f924a5ee7ea9cbcea77529dba45f8a9ba9f622419fe3386ca581a3ae9d5a" +dependencies = [ + "pin-project-internal 0.4.30", +] + +[[package]] +name = "pin-project" +version = "1.1.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b6bf43b791c5b9e34c3d182969b4abb522f9343702850a2e57f460d00d09b4b3" +dependencies = [ + "pin-project-internal 1.1.5", +] + +[[package]] +name = "pin-project-internal" +version = "0.4.30" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "851c8d0ce9bebe43790dedfc86614c23494ac9f423dd618d3a61fc693eafe61e" +dependencies = [ + "proc-macro2", + "quote", + "syn 1.0.109", +] + +[[package]] +name = "pin-project-internal" +version = "1.1.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "2f38a4412a78282e09a2cf38d195ea5420d15ba0602cb375210efbc877243965" +dependencies = [ + "proc-macro2", + "quote", + "syn 2.0.68", +] + +[[package]] +name = "pin-project-lite" +version = "0.2.14" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "bda66fc9667c18cb2758a2ac84d1167245054bcf85d5d1aaa6923f45801bdd02" + +[[package]] +name = "pin-utils" +version = "0.1.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "8b870d8c151b6f2fb93e84a13146138f05d02ed11c7e7c54f8826aaaf7c9f184" + +[[package]] +name = "pkcs8" +version = "0.10.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f950b2377845cebe5cf8b5165cb3cc1a5e0fa5cfa3e1f7f55707d8fd82e0a7b7" +dependencies = [ + "der", + "spki", +] + +[[package]] +name = "pkg-config" +version = "0.3.30" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d231b230927b5e4ad203db57bbcbee2802f6bce620b1e4a9024a07d94e2907ec" + +[[package]] +name = "polling" +version = "3.7.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "a3ed00ed3fbf728b5816498ecd316d1716eecaced9c0c8d2c5a6740ca214985b" +dependencies = [ + "cfg-if", + "concurrent-queue", + "hermit-abi 0.4.0", + "pin-project-lite", + "rustix", + "tracing", + "windows-sys 0.52.0", +] + +[[package]] +name = "poly1305" +version = "0.8.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "8159bd90725d2df49889a078b54f4f79e87f1f8a8444194cdca81d38f5393abf" +dependencies = [ + "cpufeatures", + "opaque-debug", + "universal-hash", +] + +[[package]] +name = "polyval" +version = "0.6.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "9d1fe60d06143b2430aa532c94cfe9e29783047f06c0d7fd359a9a51b729fa25" +dependencies = [ + "cfg-if", + "cpufeatures", + "opaque-debug", + "universal-hash", +] + +[[package]] +name = "portable-atomic" +version = "1.6.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "7170ef9988bc169ba16dd36a7fa041e5c4cbeb6a35b76d4c03daded371eae7c0" + +[[package]] +name = "powerfmt" +version = "0.2.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "439ee305def115ba05938db6eb1644ff94165c5ab5e9420d1c1bcedbba909391" + +[[package]] +name = "ppv-lite86" +version = "0.2.17" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "5b40af805b3121feab8a3c29f04d8ad262fa8e0561883e7653e024ae4479e6de" + +[[package]] +name = "prettyplease" +version = "0.1.25" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "6c8646e95016a7a6c4adea95bafa8a16baab64b583356217f2c85db4a39d9a86" +dependencies = [ + "proc-macro2", + "syn 1.0.109", +] + +[[package]] +name = "primitive-types" +version = "0.12.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "0b34d9fd68ae0b74a41b21c03c2f62847aa0ffea044eee893b4c140b37e244e2" +dependencies = [ + "fixed-hash", + "impl-codec", + "impl-serde", + "uint", +] + +[[package]] +name = "proc-macro-crate" +version = "1.1.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e17d47ce914bf4de440332250b0edd23ce48c005f59fab39d3335866b114f11a" +dependencies = [ + "thiserror", + "toml 0.5.11", +] + +[[package]] +name = "proc-macro-crate" +version = "3.1.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "6d37c51ca738a55da99dc0c4a34860fd675453b8b36209178c2249bb13651284" +dependencies = [ + "toml_edit 0.21.1", +] + +[[package]] +name = "proc-macro-error" +version = "1.0.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "da25490ff9892aab3fcf7c36f08cfb902dd3e71ca0f9f9517bea02a73a5ce38c" +dependencies = [ + "proc-macro-error-attr", + "proc-macro2", + "quote", + "syn 1.0.109", + "version_check", +] + +[[package]] +name = "proc-macro-error-attr" +version = "1.0.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "a1be40180e52ecc98ad80b184934baf3d0d29f979574e439af5a55274b35f869" +dependencies = [ + "proc-macro2", + "quote", + "version_check", +] + +[[package]] +name = "proc-macro2" +version = "1.0.86" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "5e719e8df665df0d1c8fbfd238015744736151d4445ec0836b8e628aae103b77" +dependencies = [ + "unicode-ident", +] + +[[package]] +name = "prometheus-client" +version = "0.22.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "c1ca959da22a332509f2a73ae9e5f23f9dcfc31fd3a54d71f159495bd5909baa" +dependencies = [ + "dtoa", + "itoa", + "parking_lot", + "prometheus-client-derive-encode", +] + +[[package]] +name = "prometheus-client-derive-encode" +version = "0.4.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "440f724eba9f6996b75d63681b0a92b06947f1457076d503a4d2e2c8f56442b8" +dependencies = [ + "proc-macro2", + "quote", + "syn 2.0.68", +] + +[[package]] +name = "prost" +version = "0.11.9" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "0b82eaa1d779e9a4bc1c3217db8ffbeabaae1dca241bf70183242128d48681cd" +dependencies = [ + "bytes 1.6.0", + "prost-derive", +] + +[[package]] +name = "prost-build" +version = "0.11.9" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "119533552c9a7ffacc21e099c24a0ac8bb19c2a2a3f363de84cd9b844feab270" +dependencies = [ + "bytes 1.6.0", + "heck 0.4.1", + "itertools 0.10.5", + "lazy_static", + "log", + "multimap", + "petgraph", + "prettyplease", + "prost", + "prost-types", + "regex", + "syn 1.0.109", + "tempfile", + "which", +] + +[[package]] +name = "prost-derive" +version = "0.11.9" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e5d2d8d10f3c6ded6da8b05b5fb3b8a5082514344d56c9f871412d29b4e075b4" +dependencies = [ + "anyhow", + "itertools 0.10.5", + "proc-macro2", + "quote", + "syn 1.0.109", +] + +[[package]] +name = "prost-types" +version = "0.11.9" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "213622a1460818959ac1181aaeb2dc9c7f63df720db7d788b3e24eacd1983e13" +dependencies = [ + "prost", +] + +[[package]] +name = "quanta" +version = "0.12.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "8e5167a477619228a0b284fac2674e3c388cba90631d7b7de620e6f1fcd08da5" +dependencies = [ + "crossbeam-utils", + "libc", + "once_cell", + "raw-cpuid", + "wasi", + "web-sys", + "winapi", +] + +[[package]] +name = "quick-error" +version = "1.2.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "a1d01941d82fa2ab50be1e79e6714289dd7cde78eba4c074bc5a4374f650dfe0" + +[[package]] +name = "quick-protobuf" +version = "0.8.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "9d6da84cc204722a989e01ba2f6e1e276e190f22263d0cb6ce8526fcdb0d2e1f" +dependencies = [ + "byteorder", +] + +[[package]] +name = "quick-protobuf-codec" +version = "0.3.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "15a0580ab32b169745d7a39db2ba969226ca16738931be152a3209b409de2474" +dependencies = [ + "asynchronous-codec", + "bytes 1.6.0", + "quick-protobuf", + "thiserror", + "unsigned-varint 0.8.0", +] + +[[package]] +name = "quinn" +version = "0.11.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e4ceeeeabace7857413798eb1ffa1e9c905a9946a57d81fb69b4b71c4d8eb3ad" +dependencies = [ + "bytes 1.6.0", + "futures-io", + "pin-project-lite", + "quinn-proto", + "quinn-udp", + "rustc-hash", + "rustls 0.23.10", + "thiserror", + "tokio", + "tracing", +] + +[[package]] +name = "quinn-proto" +version = "0.11.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ddf517c03a109db8100448a4be38d498df8a210a99fe0e1b9eaf39e78c640efe" +dependencies = [ + "bytes 1.6.0", + "rand", + "ring 0.17.8", + "rustc-hash", + "rustls 0.23.10", + "slab", + "thiserror", + "tinyvec", + "tracing", +] + +[[package]] +name = "quinn-udp" +version = "0.5.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "9096629c45860fc7fb143e125eb826b5e721e10be3263160c7d60ca832cf8c46" +dependencies = [ + "libc", + "once_cell", + "socket2", + "tracing", + "windows-sys 0.52.0", +] + +[[package]] +name = "quote" +version = "1.0.36" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "0fa76aaf39101c457836aec0ce2316dbdc3ab723cdda1c6bd4e6ad4208acaca7" +dependencies = [ + "proc-macro2", +] + +[[package]] +name = "r2d2" +version = "0.8.10" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "51de85fb3fb6524929c8a2eb85e6b6d363de4e8c48f9e2c2eac4944abc181c93" +dependencies = [ + "log", + "parking_lot", + "scheduled-thread-pool", +] + +[[package]] +name = "radium" +version = "0.7.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "dc33ff2d4973d518d823d61aa239014831e521c75da58e3df4840d3f47749d09" + +[[package]] +name = "radix_trie" +version = "0.2.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "c069c179fcdc6a2fe24d8d18305cf085fdbd4f922c041943e203685d6a1c58fd" +dependencies = [ + "endian-type", + "nibble_vec", +] + +[[package]] +name = "rand" +version = "0.8.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "34af8d1a0e25924bc5b7c43c079c942339d8f0a8b57c39049bef581b46327404" +dependencies = [ + "libc", + "rand_chacha", + "rand_core", +] + +[[package]] +name = "rand_chacha" +version = "0.3.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e6c10a63a0fa32252be49d21e7709d4d4baf8d231c2dbce1eaa8141b9b127d88" +dependencies = [ + "ppv-lite86", + "rand_core", +] + +[[package]] +name = "rand_core" +version = "0.6.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ec0be4795e2f6a28069bec0b5ff3e2ac9bafc99e6a9a7dc3547996c5c816922c" +dependencies = [ + "getrandom", +] + +[[package]] +name = "randomx-rs" +version = "1.3.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "14fb999f322669968fd0e80aeca5cb91e7a817a94ebf2b0fcd345a4a7c695203" +dependencies = [ + "bitflags 1.3.2", + "libc", + "thiserror", +] + +[[package]] +name = "raw-cpuid" +version = "11.0.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e29830cbb1290e404f24c73af91c5d8d631ce7e128691e9477556b540cd01ecd" +dependencies = [ + "bitflags 2.6.0", +] + +[[package]] +name = "rcgen" +version = "0.11.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "52c4f3084aa3bc7dfbba4eff4fab2a54db4324965d8872ab933565e6fbd83bc6" +dependencies = [ + "pem", + "ring 0.16.20", + "time", + "yasna", +] + +[[package]] +name = "redox_syscall" +version = "0.5.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "c82cf8cff14456045f55ec4241383baeff27af886adb72ffb2162f99911de0fd" +dependencies = [ + "bitflags 2.6.0", +] + +[[package]] +name = "redox_users" +version = "0.4.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "bd283d9651eeda4b2a83a43c1c91b266c40fd76ecd39a50a8c630ae69dc72891" +dependencies = [ + "getrandom", + "libredox", + "thiserror", +] + +[[package]] +name = "regex" +version = "1.10.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b91213439dad192326a0d7c6ee3955910425f441d7038e0d6933b0aec5c4517f" +dependencies = [ + "aho-corasick", + "memchr", + "regex-automata", + "regex-syntax", +] + +[[package]] +name = "regex-automata" +version = "0.4.7" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "38caf58cc5ef2fed281f89292ef23f6365465ed9a41b7a7754eb4e26496c92df" +dependencies = [ + "aho-corasick", + "memchr", + "regex-syntax", +] + +[[package]] +name = "regex-syntax" +version = "0.8.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "7a66a03ae7c801facd77a29370b4faec201768915ac14a721ba36f20bc9c209b" + +[[package]] +name = "resolv-conf" +version = "0.7.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "52e44394d2086d010551b14b53b1f24e31647570cd1deb0379e2c21b329aba00" +dependencies = [ + "hostname", + "quick-error", +] + +[[package]] +name = "ring" +version = "0.16.20" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "3053cf52e236a3ed746dfc745aa9cacf1b791d846bdaf412f60a8d7d6e17c8fc" +dependencies = [ + "cc", + "libc", + "once_cell", + "spin 0.5.2", + "untrusted 0.7.1", + "web-sys", + "winapi", +] + +[[package]] +name = "ring" +version = "0.17.8" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "c17fa4cb658e3583423e915b9f3acc01cceaee1860e33d59ebae66adc3a2dc0d" +dependencies = [ + "cc", + "cfg-if", + "getrandom", + "libc", + "spin 0.9.8", + "untrusted 0.9.0", + "windows-sys 0.52.0", +] + +[[package]] +name = "rtnetlink" +version = "0.10.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "322c53fd76a18698f1c27381d58091de3a043d356aa5bd0d510608b565f469a0" +dependencies = [ + "futures 0.3.30", + "log", + "netlink-packet-route", + "netlink-proto", + "nix", + "thiserror", + "tokio", +] + +[[package]] +name = "rustc-demangle" +version = "0.1.24" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "719b953e2095829ee67db738b3bfa9fa368c94900df327b3f07fe6e794d2fe1f" + +[[package]] +name = "rustc-hash" +version = "1.1.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "08d43f7aa6b08d49f382cde6a7982047c3426db949b1424bc4b7ec9ae12c6ce2" + +[[package]] +name = "rustc-hex" +version = "2.1.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "3e75f6a532d0fd9f7f13144f392b6ad56a32696bfcd9c78f797f16bbb6f072d6" + +[[package]] +name = "rustc_version" +version = "0.4.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "bfa0f585226d2e68097d4f95d113b15b83a82e819ab25717ec0590d9584ef366" +dependencies = [ + "semver", +] + +[[package]] +name = "rusticata-macros" +version = "4.1.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "faf0c4a6ece9950b9abdb62b1cfcf2a68b3b67a10ba445b3bb85be2a293d0632" +dependencies = [ + "nom", +] + +[[package]] +name = "rustix" +version = "0.38.34" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "70dc5ec042f7a43c4a73241207cecc9873a06d45debb38b329f8541d85c2730f" +dependencies = [ + "bitflags 2.6.0", + "errno", + "libc", + "linux-raw-sys", + "windows-sys 0.52.0", +] + +[[package]] +name = "rustls" +version = "0.20.9" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "1b80e3dec595989ea8510028f30c408a4630db12c9cbb8de34203b89d6577e99" +dependencies = [ + "log", + "ring 0.16.20", + "sct", + "webpki", +] + +[[package]] +name = "rustls" +version = "0.23.10" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "05cff451f60db80f490f3c182b77c35260baace73209e9cdbbe526bfe3a4d402" +dependencies = [ + "once_cell", + "ring 0.17.8", + "rustls-pki-types", + "rustls-webpki 0.102.4", + "subtle", + "zeroize", +] + +[[package]] +name = "rustls-pemfile" +version = "0.3.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "1ee86d63972a7c661d1536fefe8c3c8407321c3df668891286de28abcd087360" +dependencies = [ + "base64 0.13.1", +] + +[[package]] +name = "rustls-pemfile" +version = "1.0.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "1c74cae0a4cf6ccbbf5f359f08efdf8ee7e1dc532573bf0db71968cb56b1448c" +dependencies = [ + "base64 0.21.7", +] + +[[package]] +name = "rustls-pki-types" +version = "1.7.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "976295e77ce332211c0d24d92c0e83e50f5c5f046d11082cea19f3df13a3562d" + +[[package]] +name = "rustls-webpki" +version = "0.101.7" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "8b6275d1ee7a1cd780b64aca7726599a1dbc893b1e64144529e55c3c2f745765" +dependencies = [ + "ring 0.17.8", + "untrusted 0.9.0", +] + +[[package]] +name = "rustls-webpki" +version = "0.102.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ff448f7e92e913c4b7d4c6d8e4540a1724b319b4152b8aef6d4cf8339712b33e" +dependencies = [ + "ring 0.17.8", + "rustls-pki-types", + "untrusted 0.9.0", +] + +[[package]] +name = "rustversion" +version = "1.0.17" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "955d28af4278de8121b7ebeb796b6a45735dc01436d898801014aced2773a3d6" + +[[package]] +name = "rw-stream-sink" +version = "0.4.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d8c9026ff5d2f23da5e45bbc283f156383001bfb09c4e44256d02c1a685fe9a1" +dependencies = [ + "futures 0.3.30", + "pin-project 1.1.5", + "static_assertions", +] + +[[package]] +name = "ryu" +version = "1.0.18" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f3cb5ba0dc43242ce17de99c180e96db90b235b8a9fdc9543c96d2209116bd9f" + +[[package]] +name = "scheduled-thread-pool" +version = "0.2.7" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "3cbc66816425a074528352f5789333ecff06ca41b36b0b0efdfbb29edc391a19" +dependencies = [ + "parking_lot", +] + +[[package]] +name = "scopeguard" +version = "1.2.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "94143f37725109f92c262ed2cf5e59bce7498c01bcc1502d7b9afe439a4e9f49" + +[[package]] +name = "sct" +version = "0.7.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "da046153aa2352493d6cb7da4b6e5c0c057d8a1d0a9aa8560baffdd945acd414" +dependencies = [ + "ring 0.17.8", + "untrusted 0.9.0", +] + +[[package]] +name = "sealed" +version = "0.5.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f4a8caec23b7800fb97971a1c6ae365b6239aaeddfb934d6265f8505e795699d" +dependencies = [ + "heck 0.4.1", + "proc-macro2", + "quote", + "syn 2.0.68", +] + +[[package]] +name = "semver" +version = "1.0.23" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "61697e0a1c7e512e84a621326239844a24d8207b4669b41bc18b32ea5cbf988b" + +[[package]] +name = "serde" +version = "1.0.203" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "7253ab4de971e72fb7be983802300c30b5a7f0c2e56fab8abfc6a214307c0094" +dependencies = [ + "serde_derive", +] + +[[package]] +name = "serde-value" +version = "0.7.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f3a1a3341211875ef120e117ea7fd5228530ae7e7036a779fdc9117be6b3282c" +dependencies = [ + "ordered-float", + "serde", +] + +[[package]] +name = "serde_cbor" +version = "0.11.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "2bef2ebfde456fb76bbcf9f59315333decc4fda0b2b44b420243c11e0f5ec1f5" +dependencies = [ + "half", + "serde", +] + +[[package]] +name = "serde_derive" +version = "1.0.203" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "500cbc0ebeb6f46627f50f3f5811ccf6bf00643be300b4c3eabc0ef55dc5b5ba" +dependencies = [ + "proc-macro2", + "quote", + "syn 2.0.68", +] + +[[package]] +name = "serde_json" +version = "1.0.120" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "4e0d21c9a8cae1235ad58a00c11cb40d4b1e5c784f1ef2c537876ed6ffd8b7c5" +dependencies = [ + "itoa", + "ryu", + "serde", +] + +[[package]] +name = "serde_repr" +version = "0.1.19" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "6c64451ba24fc7a6a2d60fc75dd9c83c90903b19028d4eff35e88fc1e86564e9" +dependencies = [ + "proc-macro2", + "quote", + "syn 2.0.68", +] + +[[package]] +name = "serde_spanned" +version = "0.6.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "79e674e01f999af37c49f70a6ede167a8a60b2503e56c5599532a65baa5969a0" +dependencies = [ + "serde", +] + +[[package]] +name = "serde_yaml" +version = "0.9.34+deprecated" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "6a8b1a1a2ebf674015cc02edccce75287f1a0130d394307b36743c2f5d504b47" +dependencies = [ + "indexmap 2.2.6", + "itoa", + "ryu", + "serde", + "unsafe-libyaml", +] + +[[package]] +name = "sha2" +version = "0.10.8" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "793db75ad2bcafc3ffa7c68b215fee268f537982cd901d132f89c6343f3a3dc8" +dependencies = [ + "cfg-if", + "cpufeatures", + "digest", +] + +[[package]] +name = "sha3" +version = "0.10.8" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "75872d278a8f37ef87fa0ddbda7802605cb18344497949862c0d4dcb291eba60" +dependencies = [ + "digest", + "keccak", +] + +[[package]] +name = "sha_p2pool" +version = "0.1.0" +dependencies = [ + "anyhow", + "async-trait", + "blake2", + "clap 4.5.8", + "digest", + "env_logger", + "libp2p", + "log", + "minotari_app_grpc", + "minotari_node_grpc_client", + "moka", + "serde", + "serde_cbor", + "tari_common_types", + "tari_core", + "tari_utilities", + "thiserror", + "tokio", + "tonic", +] + +[[package]] +name = "signal-hook-registry" +version = "1.4.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "a9e9e0b4211b72e7b8b6e85c807d36c212bdb33ea8587f7569562a84df5465b1" +dependencies = [ + "libc", +] + +[[package]] +name = "signature" +version = "2.2.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "77549399552de45a898a580c1b41d445bf730df867cc44e6c0233bbc4b8329de" +dependencies = [ + "rand_core", +] + +[[package]] +name = "slab" +version = "0.4.9" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "8f92a496fb766b417c996b9c5e57daf2f7ad3b0bebe1ccfca4856390e3d3bb67" +dependencies = [ + "autocfg", +] + +[[package]] +name = "smallvec" +version = "1.13.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "3c5e1a9a646d36c3599cd173a41282daf47c44583ad367b8e6837255952e5c67" + +[[package]] +name = "snafu" +version = "0.7.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e4de37ad025c587a29e8f3f5605c00f70b98715ef90b9061a815b9e59e9042d6" +dependencies = [ + "doc-comment", + "snafu-derive", +] + +[[package]] +name = "snafu-derive" +version = "0.7.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "990079665f075b699031e9c08fd3ab99be5029b96f3b78dc0709e8f77e4efebf" +dependencies = [ + "heck 0.4.1", + "proc-macro2", + "quote", + "syn 1.0.109", +] + +[[package]] +name = "snow" +version = "0.9.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "850948bee068e713b8ab860fe1adc4d109676ab4c3b621fd8147f06b261f2f85" +dependencies = [ + "aes-gcm", + "blake2", + "chacha20poly1305", + "curve25519-dalek", + "rand_core", + "ring 0.17.8", + "rustc_version", + "sha2", + "subtle", +] + +[[package]] +name = "socket2" +version = "0.5.7" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ce305eb0b4296696835b71df73eb912e0f1ffd2556a501fcede6e0c50349191c" +dependencies = [ + "libc", + "windows-sys 0.52.0", +] + +[[package]] +name = "spin" +version = "0.5.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "6e63cff320ae2c57904679ba7cb63280a3dc4613885beafb148ee7bf9aa9042d" + +[[package]] +name = "spin" +version = "0.9.8" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "6980e8d7511241f8acf4aebddbb1ff938df5eebe98691418c4468d0b72a96a67" + +[[package]] +name = "spki" +version = "0.7.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d91ed6c858b01f942cd56b37a94b3e0a1798290327d1236e4d9cf4eaca44d29d" +dependencies = [ + "base64ct", + "der", +] + +[[package]] +name = "stack-buf" +version = "0.1.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e7386b49cb287f6fafbfd3bd604914bccb99fb8d53483f40e1ecfda5d45f3370" + +[[package]] +name = "static_assertions" +version = "1.1.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "a2eb9349b6444b326872e140eb1cf5e7c522154d69e7a0ffb0fb81c06b37543f" + +[[package]] +name = "strsim" +version = "0.11.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "7da8b5736845d9f2fcb837ea5d9e2628564b3b043a70948a3f0b778838c5fb4f" + +[[package]] +name = "structopt" +version = "0.3.26" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "0c6b5c64445ba8094a6ab0c3cd2ad323e07171012d9c98b0b15651daf1787a10" +dependencies = [ + "clap 2.34.0", + "lazy_static", + "structopt-derive", +] + +[[package]] +name = "structopt-derive" +version = "0.4.18" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "dcb5ae327f9cc13b68763b5749770cb9e048a99bd9dfdfa58d0cf05d5f64afe0" +dependencies = [ + "heck 0.3.3", + "proc-macro-error", + "proc-macro2", + "quote", + "syn 1.0.109", +] + +[[package]] +name = "strum" +version = "0.22.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f7ac893c7d471c8a21f31cfe213ec4f6d9afeed25537c772e08ef3f005f8729e" +dependencies = [ + "strum_macros", +] + +[[package]] +name = "strum_macros" +version = "0.22.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "339f799d8b549e3744c7ac7feb216383e4005d94bdb22561b3ab8f3b808ae9fb" +dependencies = [ + "heck 0.3.3", + "proc-macro2", + "quote", + "syn 1.0.109", +] + +[[package]] +name = "subtle" +version = "2.6.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "13c2bddecc57b384dee18652358fb23172facb8a2c51ccc10d74c157bdea3292" + +[[package]] +name = "supercow" +version = "0.1.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "171758edb47aa306a78dfa4ab9aeb5167405bd4e3dc2b64e88f6a84bbe98bd63" + +[[package]] +name = "syn" +version = "1.0.109" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "72b64191b275b66ffe2469e8af2c1cfe3bafa67b529ead792a6d0160888b4237" +dependencies = [ + "proc-macro2", + "quote", + "unicode-ident", +] + +[[package]] +name = "syn" +version = "2.0.68" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "901fa70d88b9d6c98022e23b4136f9f3e54e4662c3bc1bd1d84a42a9a0f0c1e9" +dependencies = [ + "proc-macro2", + "quote", + "unicode-ident", +] + +[[package]] +name = "syn_derive" +version = "0.1.8" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "1329189c02ff984e9736652b1631330da25eaa6bc639089ed4915d25446cbe7b" +dependencies = [ + "proc-macro-error", + "proc-macro2", + "quote", + "syn 2.0.68", +] + +[[package]] +name = "sync_wrapper" +version = "0.1.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "2047c6ded9c721764247e62cd3b03c09ffc529b2ba5b10ec482ae507a4a70160" + +[[package]] +name = "synstructure" +version = "0.12.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f36bdaa60a83aca3921b5259d5400cbf5e90fc51931376a9bd4a0eb79aa7210f" +dependencies = [ + "proc-macro2", + "quote", + "syn 1.0.109", + "unicode-xid", +] + +[[package]] +name = "synstructure" +version = "0.13.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "c8af7666ab7b6390ab78131fb5b0fce11d6b7a6951602017c35fa82800708971" +dependencies = [ + "proc-macro2", + "quote", + "syn 2.0.68", +] + +[[package]] +name = "system-configuration" +version = "0.5.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ba3a3adc5c275d719af8cb4272ea1c4a6d668a777f37e115f6d11ddbc1c8e0e7" +dependencies = [ + "bitflags 1.3.2", + "core-foundation", + "system-configuration-sys", +] + +[[package]] +name = "system-configuration-sys" +version = "0.5.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "a75fb188eb626b924683e3b95e3a48e63551fcfb51949de2f06a9d91dbee93c9" +dependencies = [ + "core-foundation-sys", + "libc", +] + +[[package]] +name = "tagptr" +version = "0.2.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "7b2093cf4c8eb1e67749a6762251bc9cd836b6fc171623bd0a9d324d37af2417" + +[[package]] +name = "tap" +version = "1.0.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "55937e1799185b12863d447f42597ed69d9928686b8d88a1df17376a097d8369" + +[[package]] +name = "tari-tiny-keccak" +version = "2.0.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "baa5d0f04bac67c31c1e0c2ffbc7f0e3aee2707405804ec5e022b4d550be259c" +dependencies = [ + "borsh", + "crunchy", +] + +[[package]] +name = "tari_bulletproofs_plus" +version = "0.4.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "8eac57729e3b003c18e822c64c8c4977770102b2eaea920a7c40bca5caf12c54" +dependencies = [ + "blake2", + "byteorder", + "curve25519-dalek", + "digest", + "ff", + "itertools 0.12.1", + "merlin", + "once_cell", + "rand_core", + "serde", + "sha3", + "thiserror-no-std", + "zeroize", +] + +[[package]] +name = "tari_common" +version = "1.0.0-pre.14" +source = "git+https://github.com/ksrichard/tari.git?branch=p2pool#56d65428c1257cd9c750c4d1c4100e270202a6d5" +dependencies = [ + "anyhow", + "blake2", + "config", + "dirs-next", + "log", + "log4rs", + "multiaddr 0.14.0", + "path-clean", + "prost-build", + "serde", + "serde_json", + "serde_yaml", + "sha2", + "structopt", + "tari_crypto", + "tari_features", + "tempfile", + "thiserror", + "toml 0.5.11", +] + +[[package]] +name = "tari_common_sqlite" +version = "1.0.0-pre.14" +source = "git+https://github.com/ksrichard/tari.git?branch=p2pool#56d65428c1257cd9c750c4d1c4100e270202a6d5" +dependencies = [ + "diesel", + "diesel_migrations", + "log", + "serde", + "tari_utilities", + "thiserror", + "tokio", +] + +[[package]] +name = "tari_common_types" +version = "1.0.0-pre.14" +source = "git+https://github.com/ksrichard/tari.git?branch=p2pool#56d65428c1257cd9c750c4d1c4100e270202a6d5" +dependencies = [ + "base64 0.21.7", + "bitflags 2.6.0", + "blake2", + "borsh", + "bs58 0.5.1", + "chacha20poly1305", + "digest", + "newtype-ops", + "once_cell", + "primitive-types", + "rand", + "serde", + "strum", + "strum_macros", + "tari_common", + "tari_crypto", + "tari_utilities", + "thiserror", +] + +[[package]] +name = "tari_comms" +version = "1.0.0-pre.14" +source = "git+https://github.com/ksrichard/tari.git?branch=p2pool#56d65428c1257cd9c750c4d1c4100e270202a6d5" +dependencies = [ + "anyhow", + "async-trait", + "bitflags 2.6.0", + "blake2", + "bytes 1.6.0", + "chrono", + "cidr", + "data-encoding", + "derivative", + "digest", + "futures 0.3.30", + "lmdb-zero", + "log", + "log-mdc", + "multiaddr 0.14.0", + "nom", + "once_cell", + "pin-project 1.1.5", + "prost", + "rand", + "serde", + "serde_derive", + "sha3", + "snow", + "tari_common", + "tari_crypto", + "tari_shutdown", + "tari_storage", + "tari_utilities", + "thiserror", + "tokio", + "tokio-stream", + "tokio-util 0.6.10", + "tower", + "tracing", + "yamux 0.13.3", + "zeroize", +] + +[[package]] +name = "tari_comms_dht" +version = "1.0.0-pre.14" +source = "git+https://github.com/ksrichard/tari.git?branch=p2pool#56d65428c1257cd9c750c4d1c4100e270202a6d5" +dependencies = [ + "anyhow", + "bitflags 2.6.0", + "blake2", + "chacha20 0.7.3", + "chacha20poly1305", + "chrono", + "diesel", + "diesel_migrations", + "digest", + "futures 0.3.30", + "log", + "log-mdc", + "pin-project 0.4.30", + "prost", + "rand", + "serde", + "tari_common", + "tari_common_sqlite", + "tari_comms", + "tari_comms_rpc_macros", + "tari_crypto", + "tari_shutdown", + "tari_storage", + "tari_utilities", + "thiserror", + "tokio", + "tower", + "zeroize", +] + +[[package]] +name = "tari_comms_rpc_macros" +version = "1.0.0-pre.14" +source = "git+https://github.com/ksrichard/tari.git?branch=p2pool#56d65428c1257cd9c750c4d1c4100e270202a6d5" +dependencies = [ + "proc-macro2", + "quote", + "syn 1.0.109", +] + +[[package]] +name = "tari_core" +version = "1.0.0-pre.14" +source = "git+https://github.com/ksrichard/tari.git?branch=p2pool#56d65428c1257cd9c750c4d1c4100e270202a6d5" +dependencies = [ + "async-trait", + "bincode", + "bitflags 2.6.0", + "blake2", + "borsh", + "bytes 0.5.6", + "chacha20poly1305", + "chrono", + "decimal-rs", + "derivative", + "digest", + "fs2", + "futures 0.3.30", + "hex", + "integer-encoding", + "lmdb-zero", + "log", + "log-mdc", + "monero", + "newtype-ops", + "num-derive", + "num-format", + "num-traits", + "once_cell", + "primitive-types", + "prost", + "rand", + "randomx-rs", + "serde", + "serde_json", + "serde_repr", + "sha2", + "sha3", + "strum", + "strum_macros", + "tari-tiny-keccak", + "tari_common", + "tari_common_sqlite", + "tari_common_types", + "tari_comms", + "tari_comms_dht", + "tari_comms_rpc_macros", + "tari_crypto", + "tari_features", + "tari_hashing", + "tari_key_manager", + "tari_mmr", + "tari_p2p", + "tari_script", + "tari_service_framework", + "tari_shutdown", + "tari_storage", + "tari_test_utils", + "tari_utilities", + "thiserror", + "tokio", + "tracing", + "zeroize", +] + +[[package]] +name = "tari_crypto" +version = "0.20.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "dd8b549b292deddde212f389f345ef58c0bffb552b7e0f561f5504859ca82466" +dependencies = [ + "blake2", + "borsh", + "curve25519-dalek", + "digest", + "log", + "merlin", + "once_cell", + "rand_chacha", + "rand_core", + "serde", + "sha3", + "snafu", + "subtle", + "tari_bulletproofs_plus", + "tari_utilities", + "zeroize", +] + +[[package]] +name = "tari_features" +version = "1.0.0-pre.14" +source = "git+https://github.com/ksrichard/tari.git?branch=p2pool#56d65428c1257cd9c750c4d1c4100e270202a6d5" + +[[package]] +name = "tari_hashing" +version = "1.0.0-pre.14" +source = "git+https://github.com/ksrichard/tari.git?branch=p2pool#56d65428c1257cd9c750c4d1c4100e270202a6d5" +dependencies = [ + "borsh", + "digest", + "tari_crypto", +] + +[[package]] +name = "tari_key_manager" +version = "1.0.0-pre.14" +source = "git+https://github.com/ksrichard/tari.git?branch=p2pool#56d65428c1257cd9c750c4d1c4100e270202a6d5" +dependencies = [ + "argon2", + "async-trait", + "blake2", + "chacha20 0.7.3", + "chacha20poly1305", + "chrono", + "crc32fast", + "derivative", + "diesel", + "diesel_migrations", + "digest", + "futures 0.3.30", + "log", + "rand", + "serde", + "strum", + "strum_macros", + "subtle", + "tari_common_sqlite", + "tari_common_types", + "tari_crypto", + "tari_service_framework", + "tari_utilities", + "thiserror", + "tokio", + "zeroize", +] + +[[package]] +name = "tari_mmr" +version = "1.0.0-pre.14" +source = "git+https://github.com/ksrichard/tari.git?branch=p2pool#56d65428c1257cd9c750c4d1c4100e270202a6d5" +dependencies = [ + "borsh", + "digest", + "log", + "serde", + "tari_common", + "tari_crypto", + "tari_utilities", + "thiserror", +] + +[[package]] +name = "tari_p2p" +version = "1.0.0-pre.14" +source = "git+https://github.com/ksrichard/tari.git?branch=p2pool#56d65428c1257cd9c750c4d1c4100e270202a6d5" +dependencies = [ + "anyhow", + "fs2", + "futures 0.3.30", + "lmdb-zero", + "log", + "prost", + "rand", + "rustls 0.20.9", + "serde", + "tari_common", + "tari_comms", + "tari_comms_dht", + "tari_crypto", + "tari_service_framework", + "tari_shutdown", + "tari_storage", + "tari_utilities", + "thiserror", + "tokio", + "tokio-stream", + "tower", + "trust-dns-client", + "webpki", +] + +[[package]] +name = "tari_script" +version = "1.0.0-pre.14" +source = "git+https://github.com/ksrichard/tari.git?branch=p2pool#56d65428c1257cd9c750c4d1c4100e270202a6d5" +dependencies = [ + "blake2", + "borsh", + "digest", + "integer-encoding", + "serde", + "sha2", + "sha3", + "tari_crypto", + "tari_utilities", + "thiserror", +] + +[[package]] +name = "tari_service_framework" +version = "1.0.0-pre.14" +source = "git+https://github.com/ksrichard/tari.git?branch=p2pool#56d65428c1257cd9c750c4d1c4100e270202a6d5" +dependencies = [ + "anyhow", + "async-trait", + "futures 0.3.30", + "log", + "tari_shutdown", + "thiserror", + "tokio", + "tower-service", +] + +[[package]] +name = "tari_shutdown" +version = "1.0.0-pre.14" +source = "git+https://github.com/ksrichard/tari.git?branch=p2pool#56d65428c1257cd9c750c4d1c4100e270202a6d5" +dependencies = [ + "futures 0.3.30", +] + +[[package]] +name = "tari_storage" +version = "1.0.0-pre.14" +source = "git+https://github.com/ksrichard/tari.git?branch=p2pool#56d65428c1257cd9c750c4d1c4100e270202a6d5" +dependencies = [ + "bincode", + "lmdb-zero", + "log", + "serde", + "thiserror", +] + +[[package]] +name = "tari_test_utils" +version = "1.0.0-pre.14" +source = "git+https://github.com/ksrichard/tari.git?branch=p2pool#56d65428c1257cd9c750c4d1c4100e270202a6d5" +dependencies = [ + "futures 0.3.30", + "rand", + "tari_comms", + "tari_shutdown", + "tempfile", + "tokio", +] + +[[package]] +name = "tari_utilities" +version = "0.7.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "8c1bb0e5d1d812f2be2d6ad861caad68f75adb5b2e8376264850300deb16ddc7" +dependencies = [ + "base58-monero 0.3.2", + "base64 0.13.1", + "bincode", + "borsh", + "generic-array", + "newtype-ops", + "serde", + "serde_json", + "snafu", + "subtle", + "zeroize", +] + +[[package]] +name = "tempfile" +version = "3.10.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "85b77fafb263dd9d05cbeac119526425676db3784113aa9295c88498cbf8bff1" +dependencies = [ + "cfg-if", + "fastrand", + "rustix", + "windows-sys 0.52.0", +] + +[[package]] +name = "textwrap" +version = "0.11.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d326610f408c7a4eb6f51c37c330e496b08506c9457c9d34287ecc38809fb060" +dependencies = [ + "unicode-width", +] + +[[package]] +name = "thiserror" +version = "1.0.61" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "c546c80d6be4bc6a00c0f01730c08df82eaa7a7a61f11d656526506112cc1709" +dependencies = [ + "thiserror-impl", +] + +[[package]] +name = "thiserror-impl" +version = "1.0.61" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "46c3384250002a6d5af4d114f2845d37b57521033f30d5c3f46c4d70e1197533" +dependencies = [ + "proc-macro2", + "quote", + "syn 2.0.68", +] + +[[package]] +name = "thiserror-impl-no-std" +version = "2.0.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "58e6318948b519ba6dc2b442a6d0b904ebfb8d411a3ad3e07843615a72249758" +dependencies = [ + "proc-macro2", + "quote", + "syn 1.0.109", +] + +[[package]] +name = "thiserror-no-std" +version = "2.0.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "a3ad459d94dd517257cc96add8a43190ee620011bb6e6cdc82dafd97dfafafea" +dependencies = [ + "thiserror-impl-no-std", +] + +[[package]] +name = "time" +version = "0.3.36" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "5dfd88e563464686c916c7e46e623e520ddc6d79fa6641390f2e3fa86e83e885" +dependencies = [ + "deranged", + "itoa", + "num-conv", + "powerfmt", + "serde", + "time-core", + "time-macros", +] + +[[package]] +name = "time-core" +version = "0.1.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ef927ca75afb808a4d64dd374f00a2adf8d0fcff8e7b184af886c3c87ec4a3f3" + +[[package]] +name = "time-macros" +version = "0.2.18" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "3f252a68540fde3a3877aeea552b832b40ab9a69e318efd078774a01ddee1ccf" +dependencies = [ + "num-conv", + "time-core", +] + +[[package]] +name = "tiny-keccak" +version = "2.0.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "2c9d3793400a45f954c52e73d068316d76b6f4e36977e3fcebb13a2721e80237" +dependencies = [ + "crunchy", +] + +[[package]] +name = "tinyvec" +version = "1.6.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "c55115c6fbe2d2bef26eb09ad74bde02d8255476fc0c7b515ef09fbb35742d82" +dependencies = [ + "tinyvec_macros", +] + +[[package]] +name = "tinyvec_macros" +version = "0.1.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "1f3ccbac311fea05f86f61904b462b55fb3df8837a366dfc601a0161d0532f20" + +[[package]] +name = "tokio" +version = "1.38.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ba4f4a02a7a80d6f274636f0aa95c7e383b912d41fe721a31f29e29698585a4a" +dependencies = [ + "backtrace", + "bytes 1.6.0", + "libc", + "mio", + "num_cpus", + "parking_lot", + "pin-project-lite", + "signal-hook-registry", + "socket2", + "tokio-macros", + "windows-sys 0.48.0", +] + +[[package]] +name = "tokio-io-timeout" +version = "1.2.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "30b74022ada614a1b4834de765f9bb43877f910cc8ce4be40e89042c9223a8bf" +dependencies = [ + "pin-project-lite", + "tokio", +] + +[[package]] +name = "tokio-macros" +version = "2.3.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "5f5ae998a069d4b5aba8ee9dad856af7d520c3699e6159b185c2acd48155d39a" +dependencies = [ + "proc-macro2", + "quote", + "syn 2.0.68", +] + +[[package]] +name = "tokio-rustls" +version = "0.23.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "c43ee83903113e03984cb9e5cebe6c04a5116269e900e3ddba8f068a62adda59" +dependencies = [ + "rustls 0.20.9", + "tokio", + "webpki", +] + +[[package]] +name = "tokio-stream" +version = "0.1.15" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "267ac89e0bec6e691e5813911606935d77c476ff49024f98abcea3e7b15e37af" +dependencies = [ + "futures-core", + "pin-project-lite", + "tokio", + "tokio-util 0.7.11", +] + +[[package]] +name = "tokio-util" +version = "0.6.10" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "36943ee01a6d67977dd3f84a5a1d2efeb4ada3a1ae771cadfaa535d9d9fc6507" +dependencies = [ + "bytes 1.6.0", + "futures-core", + "futures-io", + "futures-sink", + "log", + "pin-project-lite", + "tokio", +] + +[[package]] +name = "tokio-util" +version = "0.7.11" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "9cf6b47b3771c49ac75ad09a6162f53ad4b8088b76ac60e8ec1455b31a189fe1" +dependencies = [ + "bytes 1.6.0", + "futures-core", + "futures-sink", + "pin-project-lite", + "tokio", +] + +[[package]] +name = "toml" +version = "0.5.11" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f4f7f0dd8d50a853a531c426359045b1998f04219d88799810762cd4ad314234" +dependencies = [ + "serde", +] + +[[package]] +name = "toml" +version = "0.8.14" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "6f49eb2ab21d2f26bd6db7bf383edc527a7ebaee412d17af4d40fdccd442f335" +dependencies = [ + "serde", + "serde_spanned", + "toml_datetime", + "toml_edit 0.22.14", +] + +[[package]] +name = "toml_datetime" +version = "0.6.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "4badfd56924ae69bcc9039335b2e017639ce3f9b001c393c1b2d1ef846ce2cbf" +dependencies = [ + "serde", +] + +[[package]] +name = "toml_edit" +version = "0.21.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "6a8534fd7f78b5405e860340ad6575217ce99f38d4d5c8f2442cb5ecb50090e1" +dependencies = [ + "indexmap 2.2.6", + "toml_datetime", + "winnow 0.5.40", +] + +[[package]] +name = "toml_edit" +version = "0.22.14" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f21c7aaf97f1bd9ca9d4f9e73b0a6c74bd5afef56f2bc931943a6e1c37e04e38" +dependencies = [ + "indexmap 2.2.6", + "serde", + "serde_spanned", + "toml_datetime", + "winnow 0.6.13", +] + +[[package]] +name = "tonic" +version = "0.8.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "8f219fad3b929bef19b1f86fbc0358d35daed8f2cac972037ac0dc10bbb8d5fb" +dependencies = [ + "async-stream", + "async-trait", + "axum", + "base64 0.13.1", + "bytes 1.6.0", + "futures-core", + "futures-util", + "h2", + "http", + "http-body", + "hyper", + "hyper-timeout", + "percent-encoding", + "pin-project 1.1.5", + "prost", + "prost-derive", + "rustls-pemfile 1.0.4", + "tokio", + "tokio-rustls", + "tokio-stream", + "tokio-util 0.7.11", + "tower", + "tower-layer", + "tower-service", + "tracing", + "tracing-futures", +] + +[[package]] +name = "tonic-build" +version = "0.8.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "5bf5e9b9c0f7e0a7c027dcfaba7b2c60816c7049171f679d99ee2ff65d0de8c4" +dependencies = [ + "prettyplease", + "proc-macro2", + "prost-build", + "quote", + "syn 1.0.109", +] + +[[package]] +name = "tower" +version = "0.4.13" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b8fa9be0de6cf49e536ce1851f987bd21a43b771b09473c3549a6c853db37c1c" +dependencies = [ + "futures-core", + "futures-util", + "hdrhistogram", + "indexmap 1.9.3", + "pin-project 1.1.5", + "pin-project-lite", + "rand", + "slab", + "tokio", + "tokio-util 0.7.11", + "tower-layer", + "tower-service", + "tracing", +] + +[[package]] +name = "tower-layer" +version = "0.3.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "c20c8dbed6283a09604c3e69b4b7eeb54e298b8a600d4d5ecb5ad39de609f1d0" + +[[package]] +name = "tower-service" +version = "0.3.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b6bc1c9ce2b5135ac7f93c72918fc37feb872bdc6a5533a8b85eb4b86bfdae52" + +[[package]] +name = "tracing" +version = "0.1.40" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "c3523ab5a71916ccf420eebdf5521fcef02141234bbc0b8a49f2fdc4544364ef" +dependencies = [ + "log", + "pin-project-lite", + "tracing-attributes", + "tracing-core", +] + +[[package]] +name = "tracing-attributes" +version = "0.1.27" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "34704c8d6ebcbc939824180af020566b01a7c01f80641264eba0999f6c2b6be7" +dependencies = [ + "proc-macro2", + "quote", + "syn 2.0.68", +] + +[[package]] +name = "tracing-core" +version = "0.1.32" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "c06d3da6113f116aaee68e4d601191614c9053067f9ab7f6edbcb161237daa54" +dependencies = [ + "once_cell", +] + +[[package]] +name = "tracing-futures" +version = "0.2.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "97d095ae15e245a057c8e8451bab9b3ee1e1f68e9ba2b4fbc18d0ac5237835f2" +dependencies = [ + "pin-project 1.1.5", + "tracing", +] + +[[package]] +name = "triomphe" +version = "0.1.13" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e6631e42e10b40c0690bf92f404ebcfe6e1fdb480391d15f17cc8e96eeed5369" + +[[package]] +name = "trust-dns-client" +version = "0.21.0-alpha.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b62dfcea87b25f0810e2a527458dd621e252fd8a5827153329308d6e1f252d68" +dependencies = [ + "cfg-if", + "data-encoding", + "futures-channel", + "futures-util", + "lazy_static", + "log", + "radix_trie", + "rand", + "ring 0.16.20", + "rustls 0.20.9", + "thiserror", + "time", + "tokio", + "trust-dns-proto", + "webpki", +] + +[[package]] +name = "trust-dns-proto" +version = "0.21.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "9c31f240f59877c3d4bb3b3ea0ec5a6a0cff07323580ff8c7a605cd7d08b255d" +dependencies = [ + "async-trait", + "cfg-if", + "data-encoding", + "enum-as-inner 0.4.0", + "futures-channel", + "futures-io", + "futures-util", + "idna 0.2.3", + "ipnet", + "lazy_static", + "log", + "rand", + "ring 0.16.20", + "rustls 0.20.9", + "rustls-pemfile 0.3.0", + "smallvec", + "thiserror", + "tinyvec", + "tokio", + "tokio-rustls", + "url", + "webpki", +] + +[[package]] +name = "try-lock" +version = "0.2.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e421abadd41a4225275504ea4d6566923418b7f05506fbc9c0fe86ba7396114b" + +[[package]] +name = "typemap-ors" +version = "1.0.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "a68c24b707f02dd18f1e4ccceb9d49f2058c2fb86384ef9972592904d7a28867" +dependencies = [ + "unsafe-any-ors", +] + +[[package]] +name = "typenum" +version = "1.17.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "42ff0bf0c66b8238c6f3b578df37d0b7848e55df8577b3f74f92a69acceeb825" + +[[package]] +name = "uint" +version = "0.9.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "76f64bba2c53b04fcab63c01a7d7427eadc821e3bc48c34dc9ba29c501164b52" +dependencies = [ + "byteorder", + "crunchy", + "hex", + "static_assertions", +] + +[[package]] +name = "unicode-bidi" +version = "0.3.15" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "08f95100a766bf4f8f28f90d77e0a5461bbdb219042e7679bebe79004fed8d75" + +[[package]] +name = "unicode-ident" +version = "1.0.12" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "3354b9ac3fae1ff6755cb6db53683adb661634f67557942dea4facebec0fee4b" + +[[package]] +name = "unicode-normalization" +version = "0.1.23" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "a56d1686db2308d901306f92a263857ef59ea39678a5458e7cb17f01415101f5" +dependencies = [ + "tinyvec", +] + +[[package]] +name = "unicode-segmentation" +version = "1.11.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d4c87d22b6e3f4a18d4d40ef354e97c90fcb14dd91d7dc0aa9d8a1172ebf7202" + +[[package]] +name = "unicode-width" +version = "0.1.13" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "0336d538f7abc86d282a4189614dfaa90810dfc2c6f6427eaf88e16311dd225d" + +[[package]] +name = "unicode-xid" +version = "0.2.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f962df74c8c05a667b5ee8bcf162993134c104e96440b663c8daa176dc772d8c" + +[[package]] +name = "universal-hash" +version = "0.5.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "fc1de2c688dc15305988b563c3854064043356019f97a4b46276fe734c4f07ea" +dependencies = [ + "crypto-common", + "subtle", +] + +[[package]] +name = "unsafe-any-ors" +version = "1.0.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e0a303d30665362d9680d7d91d78b23f5f899504d4f08b3c4cf08d055d87c0ad" +dependencies = [ + "destructure_traitobject", +] + +[[package]] +name = "unsafe-libyaml" +version = "0.2.11" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "673aac59facbab8a9007c7f6108d11f63b603f7cabff99fabf650fea5c32b861" + +[[package]] +name = "unsigned-varint" +version = "0.7.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "6889a77d49f1f013504cec6bf97a2c730394adedaeb1deb5ea08949a50541105" + +[[package]] +name = "unsigned-varint" +version = "0.8.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "eb066959b24b5196ae73cb057f45598450d2c5f71460e98c49b738086eff9c06" + +[[package]] +name = "untrusted" +version = "0.7.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "a156c684c91ea7d62626509bce3cb4e1d9ed5c4d978f7b4352658f96a4c26b4a" + +[[package]] +name = "untrusted" +version = "0.9.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "8ecb6da28b8a351d773b68d5825ac39017e680750f980f3a1a85cd8dd28a47c1" + +[[package]] +name = "url" +version = "2.5.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "22784dbdf76fdde8af1aeda5622b546b422b6fc585325248a2bf9f5e41e94d6c" +dependencies = [ + "form_urlencoded", + "idna 0.5.0", + "percent-encoding", +] + +[[package]] +name = "utf8parse" +version = "0.2.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "06abde3611657adf66d383f00b093d7faecc7fa57071cce2578660c9f1010821" + +[[package]] +name = "uuid" +version = "1.9.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "5de17fd2f7da591098415cff336e12965a28061ddace43b59cb3c430179c9439" +dependencies = [ + "getrandom", +] + +[[package]] +name = "vcpkg" +version = "0.2.15" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "accd4ea62f7bb7a82fe23066fb0957d48ef677f6eeb8215f372f52e48bb32426" + +[[package]] +name = "version_check" +version = "0.9.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "49874b5167b65d7193b8aba1567f5c7d93d001cafc34600cee003eda787e483f" + +[[package]] +name = "void" +version = "1.0.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "6a02e4885ed3bc0f2de90ea6dd45ebcbb66dacffe03547fadbb0eeae2770887d" + +[[package]] +name = "want" +version = "0.3.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "bfa7760aed19e106de2c7c0b581b509f2f25d3dacaf737cb82ac61bc6d760b0e" +dependencies = [ + "try-lock", +] + +[[package]] +name = "wasi" +version = "0.11.0+wasi-snapshot-preview1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "9c8d87e72b64a3b4db28d11ce29237c246188f4f51057d65a7eab63b7987e423" + +[[package]] +name = "wasm-bindgen" +version = "0.2.92" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "4be2531df63900aeb2bca0daaaddec08491ee64ceecbee5076636a3b026795a8" +dependencies = [ + "cfg-if", + "wasm-bindgen-macro", +] + +[[package]] +name = "wasm-bindgen-backend" +version = "0.2.92" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "614d787b966d3989fa7bb98a654e369c762374fd3213d212cfc0251257e747da" +dependencies = [ + "bumpalo", + "log", + "once_cell", + "proc-macro2", + "quote", + "syn 2.0.68", + "wasm-bindgen-shared", +] + +[[package]] +name = "wasm-bindgen-macro" +version = "0.2.92" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "a1f8823de937b71b9460c0c34e25f3da88250760bec0ebac694b49997550d726" +dependencies = [ + "quote", + "wasm-bindgen-macro-support", +] + +[[package]] +name = "wasm-bindgen-macro-support" +version = "0.2.92" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e94f17b526d0a461a191c78ea52bbce64071ed5c04c9ffe424dcb38f74171bb7" +dependencies = [ + "proc-macro2", + "quote", + "syn 2.0.68", + "wasm-bindgen-backend", + "wasm-bindgen-shared", +] + +[[package]] +name = "wasm-bindgen-shared" +version = "0.2.92" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "af190c94f2773fdb3729c55b007a722abb5384da03bc0986df4c289bf5567e96" + +[[package]] +name = "web-sys" +version = "0.3.69" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "77afa9a11836342370f4817622a2f0f418b134426d91a82dfb48f532d2ec13ef" +dependencies = [ + "js-sys", + "wasm-bindgen", +] + +[[package]] +name = "web-time" +version = "1.1.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "5a6580f308b1fad9207618087a65c04e7a10bc77e02c8e84e9b00dd4b12fa0bb" +dependencies = [ + "js-sys", + "wasm-bindgen", +] + +[[package]] +name = "webpki" +version = "0.22.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ed63aea5ce73d0ff405984102c42de94fc55a6b75765d621c65262469b3c9b53" +dependencies = [ + "ring 0.17.8", + "untrusted 0.9.0", +] + +[[package]] +name = "which" +version = "4.4.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "87ba24419a2078cd2b0f2ede2691b6c66d8e47836da3b6db8265ebad47afbfc7" +dependencies = [ + "either", + "home", + "once_cell", + "rustix", +] + +[[package]] +name = "widestring" +version = "1.1.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "7219d36b6eac893fa81e84ebe06485e7dcbb616177469b142df14f1f4deb1311" + +[[package]] +name = "winapi" +version = "0.3.9" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "5c839a674fcd7a98952e593242ea400abe93992746761e38641405d28b00f419" +dependencies = [ + "winapi-i686-pc-windows-gnu", + "winapi-x86_64-pc-windows-gnu", +] + +[[package]] +name = "winapi-i686-pc-windows-gnu" +version = "0.4.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ac3b87c63620426dd9b991e5ce0329eff545bccbbb34f3be09ff6fb6ab51b7b6" + +[[package]] +name = "winapi-x86_64-pc-windows-gnu" +version = "0.4.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "712e227841d057c1ee1cd2fb22fa7e5a5461ae8e48fa2ca79ec42cfc1931183f" + +[[package]] +name = "windows" +version = "0.51.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ca229916c5ee38c2f2bc1e9d8f04df975b4bd93f9955dc69fabb5d91270045c9" +dependencies = [ + "windows-core 0.51.1", + "windows-targets 0.48.5", +] + +[[package]] +name = "windows-core" +version = "0.51.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f1f8cf84f35d2db49a46868f947758c7a1138116f7fac3bc844f43ade1292e64" +dependencies = [ + "windows-targets 0.48.5", +] + +[[package]] +name = "windows-core" +version = "0.52.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "33ab640c8d7e35bf8ba19b884ba838ceb4fba93a4e8c65a9059d08afcfc683d9" +dependencies = [ + "windows-targets 0.52.5", +] + +[[package]] +name = "windows-sys" +version = "0.48.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "677d2418bec65e3338edb076e806bc1ec15693c5d0104683f2efe857f61056a9" +dependencies = [ + "windows-targets 0.48.5", +] + +[[package]] +name = "windows-sys" +version = "0.52.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "282be5f36a8ce781fad8c8ae18fa3f9beff57ec1b52cb3de0789201425d9a33d" +dependencies = [ + "windows-targets 0.52.5", +] + +[[package]] +name = "windows-targets" +version = "0.48.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "9a2fa6e2155d7247be68c096456083145c183cbbbc2764150dda45a87197940c" +dependencies = [ + "windows_aarch64_gnullvm 0.48.5", + "windows_aarch64_msvc 0.48.5", + "windows_i686_gnu 0.48.5", + "windows_i686_msvc 0.48.5", + "windows_x86_64_gnu 0.48.5", + "windows_x86_64_gnullvm 0.48.5", + "windows_x86_64_msvc 0.48.5", +] + +[[package]] +name = "windows-targets" +version = "0.52.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "6f0713a46559409d202e70e28227288446bf7841d3211583a4b53e3f6d96e7eb" +dependencies = [ + "windows_aarch64_gnullvm 0.52.5", + "windows_aarch64_msvc 0.52.5", + "windows_i686_gnu 0.52.5", + "windows_i686_gnullvm", + "windows_i686_msvc 0.52.5", + "windows_x86_64_gnu 0.52.5", + "windows_x86_64_gnullvm 0.52.5", + "windows_x86_64_msvc 0.52.5", +] + +[[package]] +name = "windows_aarch64_gnullvm" +version = "0.48.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "2b38e32f0abccf9987a4e3079dfb67dcd799fb61361e53e2882c3cbaf0d905d8" + +[[package]] +name = "windows_aarch64_gnullvm" +version = "0.52.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "7088eed71e8b8dda258ecc8bac5fb1153c5cffaf2578fc8ff5d61e23578d3263" + +[[package]] +name = "windows_aarch64_msvc" +version = "0.48.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "dc35310971f3b2dbbf3f0690a219f40e2d9afcf64f9ab7cc1be722937c26b4bc" + +[[package]] +name = "windows_aarch64_msvc" +version = "0.52.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "9985fd1504e250c615ca5f281c3f7a6da76213ebd5ccc9561496568a2752afb6" + +[[package]] +name = "windows_i686_gnu" +version = "0.48.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "a75915e7def60c94dcef72200b9a8e58e5091744960da64ec734a6c6e9b3743e" + +[[package]] +name = "windows_i686_gnu" +version = "0.52.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "88ba073cf16d5372720ec942a8ccbf61626074c6d4dd2e745299726ce8b89670" + +[[package]] +name = "windows_i686_gnullvm" +version = "0.52.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "87f4261229030a858f36b459e748ae97545d6f1ec60e5e0d6a3d32e0dc232ee9" + +[[package]] +name = "windows_i686_msvc" +version = "0.48.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "8f55c233f70c4b27f66c523580f78f1004e8b5a8b659e05a4eb49d4166cca406" + +[[package]] +name = "windows_i686_msvc" +version = "0.52.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "db3c2bf3d13d5b658be73463284eaf12830ac9a26a90c717b7f771dfe97487bf" + +[[package]] +name = "windows_x86_64_gnu" +version = "0.48.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "53d40abd2583d23e4718fddf1ebec84dbff8381c07cae67ff7768bbf19c6718e" + +[[package]] +name = "windows_x86_64_gnu" +version = "0.52.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "4e4246f76bdeff09eb48875a0fd3e2af6aada79d409d33011886d3e1581517d9" + +[[package]] +name = "windows_x86_64_gnullvm" +version = "0.48.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "0b7b52767868a23d5bab768e390dc5f5c55825b6d30b86c844ff2dc7414044cc" + +[[package]] +name = "windows_x86_64_gnullvm" +version = "0.52.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "852298e482cd67c356ddd9570386e2862b5673c85bd5f88df9ab6802b334c596" + +[[package]] +name = "windows_x86_64_msvc" +version = "0.48.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ed94fce61571a4006852b7389a063ab983c02eb1bb37b47f8272ce92d06d9538" + +[[package]] +name = "windows_x86_64_msvc" +version = "0.52.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "bec47e5bfd1bff0eeaf6d8b485cc1074891a197ab4225d504cb7a1ab88b02bf0" + +[[package]] +name = "winnow" +version = "0.5.40" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f593a95398737aeed53e489c785df13f3618e41dbcd6718c6addbf1395aa6876" +dependencies = [ + "memchr", +] + +[[package]] +name = "winnow" +version = "0.6.13" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "59b5e5f6c299a3c7890b876a2a587f3115162487e704907d9b6cd29473052ba1" +dependencies = [ + "memchr", +] + +[[package]] +name = "winreg" +version = "0.50.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "524e57b2c537c0f9b1e69f1965311ec12182b4122e45035b1508cd24d2adadb1" +dependencies = [ + "cfg-if", + "windows-sys 0.48.0", +] + +[[package]] +name = "wyz" +version = "0.5.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "05f360fc0b24296329c78fda852a1e9ae82de9cf7b27dae4b7f62f118f77b9ed" +dependencies = [ + "tap", +] + +[[package]] +name = "x25519-dalek" +version = "2.0.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "c7e468321c81fb07fa7f4c636c3972b9100f0346e5b6a9f2bd0603a52f7ed277" +dependencies = [ + "curve25519-dalek", + "rand_core", + "serde", + "zeroize", +] + +[[package]] +name = "x509-parser" +version = "0.16.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "fcbc162f30700d6f3f82a24bf7cc62ffe7caea42c0b2cba8bf7f3ae50cf51f69" +dependencies = [ + "asn1-rs", + "data-encoding", + "der-parser", + "lazy_static", + "nom", + "oid-registry", + "rusticata-macros", + "thiserror", + "time", +] + +[[package]] +name = "xml-rs" +version = "0.8.20" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "791978798f0597cfc70478424c2b4fdc2b7a8024aaff78497ef00f24ef674193" + +[[package]] +name = "xmltree" +version = "0.10.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d7d8a75eaf6557bb84a65ace8609883db44a29951042ada9b393151532e41fcb" +dependencies = [ + "xml-rs", +] + +[[package]] +name = "yamux" +version = "0.12.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "9ed0164ae619f2dc144909a9f082187ebb5893693d8c0196e8085283ccd4b776" +dependencies = [ + "futures 0.3.30", + "log", + "nohash-hasher", + "parking_lot", + "pin-project 1.1.5", + "rand", + "static_assertions", +] + +[[package]] +name = "yamux" +version = "0.13.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "a31b5e376a8b012bee9c423acdbb835fc34d45001cfa3106236a624e4b738028" +dependencies = [ + "futures 0.3.30", + "log", + "nohash-hasher", + "parking_lot", + "pin-project 1.1.5", + "rand", + "static_assertions", + "web-time", +] + +[[package]] +name = "yasna" +version = "0.5.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e17bb3549cc1321ae1296b9cdc2698e2b6cb1992adfa19a8c72e5b7a738f44cd" +dependencies = [ + "time", +] + +[[package]] +name = "zerocopy" +version = "0.7.34" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ae87e3fcd617500e5d106f0380cf7b77f3c6092aae37191433159dda23cfb087" +dependencies = [ + "zerocopy-derive", +] + +[[package]] +name = "zerocopy-derive" +version = "0.7.34" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "15e934569e47891f7d9411f1a451d947a60e000ab3bd24fbb970f000387d1b3b" +dependencies = [ + "proc-macro2", + "quote", + "syn 2.0.68", +] + +[[package]] +name = "zeroize" +version = "1.8.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ced3678a2879b30306d323f4542626697a464a97c0a07c9aebf7ebca65cd4dde" +dependencies = [ + "zeroize_derive", +] + +[[package]] +name = "zeroize_derive" +version = "1.4.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ce36e65b0d2999d2aafac989fb249189a141aee1f53c612c1f37d72631959f69" +dependencies = [ + "proc-macro2", + "quote", + "syn 2.0.68", +] diff --git a/Cargo.toml b/Cargo.toml index 2476e17e..5411890f 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -36,13 +36,10 @@ thiserror = "1.0" serde = "1.0.203" anyhow = "1.0" log = "0.4.21" -prost = "0.11.9" -prost-types = "0.11.9" env_logger = "0.11.3" tonic = "0.8.3" async-trait = "0.1.80" serde_cbor = "0.11.2" -rand = "0.8.5" blake2 = "0.10.6" digest = "0.10.7" clap = { version = "4.5.7", features = ["derive"] } From dbf1cf6f2e691b6c02556fb6301254d72421e764 Mon Sep 17 00:00:00 2001 From: richardb Date: Tue, 2 Jul 2024 08:01:53 +0200 Subject: [PATCH 36/43] removed unneeded gh action steps --- .github/workflows/ci.yml | 16 ++++++---------- 1 file changed, 6 insertions(+), 10 deletions(-) diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml index 70b3fc86..5da496e3 100644 --- a/.github/workflows/ci.yml +++ b/.github/workflows/ci.yml @@ -26,7 +26,7 @@ concurrency: jobs: clippy: name: clippy - runs-on: [ubuntu-20.04] + runs-on: [ ubuntu-20.04 ] steps: - name: checkout uses: actions/checkout@v4 @@ -67,7 +67,7 @@ jobs: machete: # Checks for unused dependencies. name: machete - runs-on: [ubuntu-20.04] + runs-on: [ ubuntu-20.04 ] steps: - name: checkout uses: actions/checkout@v4 @@ -106,7 +106,7 @@ jobs: # Runs cargo check with stable toolchain to determine whether the codebase is likely to build # on stable Rust. name: cargo check with stable - runs-on: [self-hosted, ubuntu-high-cpu] + runs-on: [ self-hosted, ubuntu-high-cpu ] steps: - name: checkout uses: actions/checkout@v4 @@ -144,13 +144,9 @@ jobs: run: rustup show - name: cargo check run: cargo check --release --all-targets --workspace --exclude tari_integration_tests --locked - - name: cargo check wallet ffi separately - run: cargo check --release --package minotari_wallet_ffi --locked - - name: cargo check chat ffi separately - run: cargo check --release --package minotari_chat_ffi --locked licenses: name: file licenses - runs-on: [ubuntu-20.04] + runs-on: [ ubuntu-20.04 ] steps: - name: checkout uses: actions/checkout@v4 @@ -163,7 +159,7 @@ jobs: run: ./scripts/file_license_check.sh test: name: test - runs-on: [self-hosted, ubuntu-high-cpu] + runs-on: [ self-hosted, ubuntu-high-cpu ] permissions: checks: write pull-requests: write @@ -224,7 +220,7 @@ jobs: # Allows other workflows to know the PR number artifacts: name: pr_2_artifact - runs-on: [ubuntu-20.04] + runs-on: [ ubuntu-20.04 ] steps: - name: Save the PR number in an artifact shell: bash From 9435066e6e94329c5e3b729803dad50b82148567 Mon Sep 17 00:00:00 2001 From: "C.Lee Taylor" <47312074+leet4tari@users.noreply.github.com> Date: Tue, 2 Jul 2024 11:08:20 +0200 Subject: [PATCH 37/43] ci(fix): update scripts for ci workflows (#3) Description Updated some of the workflow use dscripts Motivation and Context Improve CI How Has This Been Tested? Not What process can a PR reviewer use to test or verify this change? Not yet Breaking Changes --- - [x] None - [ ] Requires data directory on base node to be deleted - [ ] Requires hard fork - [ ] Other - Please specify --- .github/workflows/build_binaries.json | 14 +- .github/workflows/build_binaries.yml | 125 +------------ .license.ignore | 0 scripts/cross_compile_tooling.sh | 59 +++++++ scripts/cross_compile_ubuntu_18-pre-build.sh | 166 ++++++++++++++++++ scripts/file_license_check.sh | 40 +++++ ...stall_ubuntu_dependencies-cross_compile.sh | 21 +++ .../install_ubuntu_dependencies-rust-arm64.sh | 10 ++ scripts/install_ubuntu_dependencies-rust.sh | 10 ++ scripts/install_ubuntu_dependencies.sh | 27 +++ 10 files changed, 347 insertions(+), 125 deletions(-) create mode 100644 .license.ignore create mode 100755 scripts/cross_compile_tooling.sh create mode 100755 scripts/cross_compile_ubuntu_18-pre-build.sh create mode 100755 scripts/file_license_check.sh create mode 100755 scripts/install_ubuntu_dependencies-cross_compile.sh create mode 100755 scripts/install_ubuntu_dependencies-rust-arm64.sh create mode 100755 scripts/install_ubuntu_dependencies-rust.sh create mode 100755 scripts/install_ubuntu_dependencies.sh diff --git a/.github/workflows/build_binaries.json b/.github/workflows/build_binaries.json index 943e5110..9bf9a4a1 100644 --- a/.github/workflows/build_binaries.json +++ b/.github/workflows/build_binaries.json @@ -13,8 +13,8 @@ "rust": "stable", "target": "aarch64-unknown-linux-gnu", "cross": true, - "flags": "--features libtor --workspace --exclude minotari_mining_helper_ffi --exclude tari_integration_tests", - "build_metric": true + "build_enabled": true, + "best_effort": true }, { "name": "linux-riscv64", @@ -22,7 +22,6 @@ "rust": "stable", "target": "riscv64gc-unknown-linux-gnu", "cross": true, - "flags": "--workspace --exclude minotari_mining_helper_ffi --exclude tari_integration_tests", "build_enabled": true, "best_effort": true }, @@ -46,8 +45,7 @@ "rust": "stable", "target": "x86_64-pc-windows-msvc", "cross": false, - "features": "safe", - "flags": "--workspace --exclude tari_libtor" + "features": "safe" }, { "name": "windows-arm64", @@ -55,9 +53,7 @@ "rust": "stable", "target": "aarch64-pc-windows-msvc", "cross": false, - "features": "safe", - "target_bins": "minotari_node, minotari_console_wallet, minotari_merge_mining_proxy, minotari_miner", - "flags": "--workspace --exclude tari_libtor", - "build_enabled": false + "build_enabled": true, + "best_effort": true } ] diff --git a/.github/workflows/build_binaries.yml b/.github/workflows/build_binaries.yml index 6bd01edb..1cff433d 100644 --- a/.github/workflows/build_binaries.yml +++ b/.github/workflows/build_binaries.yml @@ -18,14 +18,12 @@ name: Build Matrix of Binaries default: "development-tag" env: - TS_FILENAME: "tari_suite" + TS_FILENAME: "sha_p2pool" TS_BUNDLE_ID_BASE: "com.tarilabs" TS_SIG_FN: "sha256-unsigned.txt" ## Must be a JSon string - TS_FILES: '["minotari_node","minotari_console_wallet","minotari_miner","minotari_merge_mining_proxy"]' - TS_FEATURES: "default, safe" - TS_LIBRARIES: "minotari_mining_helper_ffi" - TARI_NETWORK_DIR: testnet + TS_FILES: '["sha_p2pool"]' + TS_FEATURES: "default" toolchain: nightly-2024-03-01 matrix-json-file: ".github/workflows/build_binaries.json" CARGO_HTTP_MULTIPLEXING: false @@ -93,7 +91,6 @@ jobs: needs: matrix-prep continue-on-error: ${{ matrix.builds.best_effort || false }} outputs: - TARI_NETWORK_DIR: ${{ steps.set-tari-network.outputs.TARI_NETWORK_DIR }} TARI_VERSION: ${{ steps.set-tari-vars.outputs.TARI_VERSION }} VSHA_SHORT: ${{ steps.set-tari-vars.outputs.VSHA_SHORT }} strategy: @@ -105,26 +102,6 @@ jobs: steps: - name: Checkout source code uses: actions/checkout@v4 - with: - submodules: recursive - - - name: Declare TestNet for tags - id: set-tari-network - # Don't forget to comment out the below if, when force testing with GHA_NETWORK - if: ${{ startsWith(github.ref, 'refs/tags/v') }} - env: - GHA_NETWORK: ${{ github.ref_name }} - # GHA_NETWORK: "v1.0.0-rc.4" - shell: bash - run: | - source buildtools/multinet_envs.sh ${{ env.GHA_NETWORK }} - echo ${TARI_NETWORK} - echo ${TARI_TARGET_NETWORK} - echo ${TARI_NETWORK_DIR} - echo "TARI_NETWORK=${TARI_NETWORK}" >> $GITHUB_ENV - echo "TARI_TARGET_NETWORK=${TARI_TARGET_NETWORK}" >> $GITHUB_ENV - echo "TARI_NETWORK_DIR=${TARI_NETWORK_DIR}" >> $GITHUB_ENV - echo "TARI_NETWORK_DIR=${TARI_NETWORK_DIR}" >> $GITHUB_OUTPUT - name: Declare Global Variables 4 GHA ${{ github.event_name }} id: set-tari-vars @@ -136,7 +113,7 @@ jobs: echo "VSHA_SHORT=${VSHA_SHORT}" >> $GITHUB_OUTPUT TARI_VERSION=$(awk -F ' = ' '$1 ~ /^version/ \ { gsub(/["]/, "", $2); printf("%s",$2) }' \ - "$GITHUB_WORKSPACE/applications/minotari_node/Cargo.toml") + "$GITHUB_WORKSPACE/Cargo.toml") echo "TARI_VERSION=${TARI_VERSION}" >> $GITHUB_ENV echo "TARI_VERSION=${TARI_VERSION}" >> $GITHUB_OUTPUT if [[ "${{ matrix.builds.features }}" == "" ]]; then @@ -155,17 +132,6 @@ jobs: TARGET_BINS+="--bin ${BIN_FILE} " done echo "TARGET_BINS=${TARGET_BINS}" >> $GITHUB_ENV - TARGET_LIBS="" - if [[ "${{ matrix.builds.target_libs }}" == "" ]]; then - ARRAY_LIBS=( $(echo ${TS_LIBRARIES} | tr ', ' '\n') ) - else - ARRAY_LIBS=( $(echo "${{ matrix.builds.target_libs }}" | tr ', ' '\n') ) - fi - for LIB_FILE in "${ARRAY_LIBS[@]}"; do - echo "Adding ${LIB_FILE} to library Builds" - TARGET_LIBS+="--package ${LIB_FILE} " - done - echo "TARGET_LIBS=${TARGET_LIBS}" >> $GITHUB_ENV TARI_BUILD_ISA_CPU=${{ matrix.builds.target }} # Strip unknown part TARI_BUILD_ISA_CPU=${TARI_BUILD_ISA_CPU//-unknown-linux-gnu} @@ -203,8 +169,9 @@ jobs: - name: Install macOS dependencies if: startsWith(runner.os,'macOS') run: | - # openssl, cmake and autoconf already installed - brew install zip coreutils automake protobuf + # Already installed items + # brew install openssl cmake autoconf zip + brew install coreutils automake protobuf rustup target add ${{ matrix.builds.target }} - name: Install Windows dependencies @@ -312,14 +279,6 @@ jobs: ${{ env.TARGET_BINS }} \ ${{ matrix.builds.flags }} --locked - - name: Build release libraries - shell: bash - run: | - ${{ env.CARGO }} build ${{ env.CARGO_OPTIONS }} \ - --target ${{ matrix.builds.target }} \ - --lib ${{ env.TARGET_LIBS }} \ - ${{ matrix.builds.flags }} --locked - - name: Copy binaries to folder for archiving shell: bash run: | @@ -354,41 +313,8 @@ jobs: cp -vf "${GITHUB_WORKSPACE}/target/${{ matrix.builds.target }}/release/${FILE}${LIB_EXT}" . fi done - if [ -f "${GITHUB_WORKSPACE}/applications/minotari_node/${PLATFORM_SPECIFIC_DIR}/runtime/start_tor${SHELL_EXT}" ]; then - cp -vf "${GITHUB_WORKSPACE}/applications/minotari_node/${PLATFORM_SPECIFIC_DIR}/runtime/start_tor${SHELL_EXT}" . - fi ls -alhtR ${{ env.MTS_SOURCE }} - - name: Build minotari_node with metrics too - if: ${{ matrix.builds.build_metric }} - shell: bash - run: | - ${{ env.CARGO }} build ${{ env.CARGO_OPTIONS }} \ - --target ${{ matrix.builds.target }} \ - --features "${{ env.BUILD_FEATURES }}, metrics" \ - --bin minotari_node \ - ${{ matrix.builds.flags }} --locked - cp -vf "$GITHUB_WORKSPACE/target/${{ matrix.builds.target }}/release/minotari_node${TS_EXT}" \ - "${{ env.MTS_SOURCE }}/minotari_node-metrics${TS_EXT}" - - - name: Build targeted miners - # if: ${{ ( startsWith(github.ref, 'refs/tags/v') ) && ( matrix.builds.miner_cpu_targets != '' ) }} - if: ${{ matrix.builds.miner_cpu_targets != '' }} - shell: bash - run: | - ARRAY_TARGETS=( $(echo "${{ matrix.builds.miner_cpu_targets }}" | tr ', ' '\n') ) - for CPU_TARGET in "${ARRAY_TARGETS[@]}"; do - echo "Target CPU ${CPU_TARGET} for miner" - export RUSTFLAGS="-C target-cpu=${CPU_TARGET}" - ${{ env.CARGO }} build ${{ env.CARGO_OPTIONS }} \ - --target ${{ matrix.builds.target }} \ - --features "${{ env.BUILD_FEATURES }}" \ - --bin minotari_miner \ - ${{ matrix.builds.flags }} --locked - cp -vf "$GITHUB_WORKSPACE/target/${{ matrix.builds.target }}/release/minotari_miner" \ - "${{ env.MTS_SOURCE }}/minotari_miner-${CPU_TARGET}" - done - - name: Pre/unsigned OSX Artifact upload for Archive # Debug if: ${{ false }} @@ -505,6 +431,7 @@ jobs: - name: Build the Windows installer if: startsWith(runner.os,'Windows') + continue-on-error: true shell: cmd run: | cd buildtools @@ -519,6 +446,7 @@ jobs: - name: Artifact upload for Windows installer if: startsWith(runner.os,'Windows') + continue-on-error: true uses: actions/upload-artifact@v4 with: name: "${{ env.TS_FILENAME }}_windows_installer" @@ -549,36 +477,6 @@ jobs: name: ${{ env.TS_FILENAME }}_archive-${{ matrix.builds.name }} path: "${{ github.workspace }}${{ env.TS_DIST }}/${{ env.BINFILE }}.zip*" - - name: Prep diag-utils archive for upload - continue-on-error: true - shell: bash - run: | - mkdir -p "${{ env.MTS_SOURCE }}-diag-utils" - cd "${{ env.MTS_SOURCE }}-diag-utils" - # Find RandomX built tools for testing - find "$GITHUB_WORKSPACE/target/${{ matrix.builds.target }}/release/" \ - -name "randomx-*${{ env.TS_EXT}}" -type f -perm -+x -exec cp -vf {} . \; - echo "Compute diag utils shasum" - ${SHARUN} * \ - >> "${{ env.TS_FILENAME }}_archive-diag-utils-${{ matrix.builds.name }}.sha256" - cat "${{ env.TS_FILENAME }}_archive-diag-utils-${{ matrix.builds.name }}.sha256" - echo "Checksum verification for diag utils is " - ${SHARUN} --check "${{ env.TS_FILENAME }}_archive-diag-utils-${{ matrix.builds.name }}.sha256" - 7z a "${{ env.TS_FILENAME }}_archive-diag-utils-${{ matrix.builds.name }}.zip" * - echo "Compute diag utils archive shasum" - ${SHARUN} "${{ env.TS_FILENAME }}_archive-diag-utils-${{ matrix.builds.name }}.zip" \ - >> "${{ env.TS_FILENAME }}_archive-diag-utils-${{ matrix.builds.name }}.zip.sha256" - cat "${{ env.TS_FILENAME }}_archive-diag-utils-${{ matrix.builds.name }}.zip.sha256" - echo "Checksum verification for diag utils archive is " - ${SHARUN} --check "${{ env.TS_FILENAME }}_archive-diag-utils-${{ matrix.builds.name }}.zip.sha256" - - - name: Artifact upload for diag-utils - continue-on-error: true - uses: actions/upload-artifact@v4 - with: - name: ${{ env.TS_FILENAME }}_archive-diag-utils-${{ matrix.builds.name }} - path: "${{ github.workspace }}${{ env.TS_DIST }}-diag-utils/*.zip*" - macOS-universal-assemble: name: macOS universal assemble needs: builds @@ -779,7 +677,6 @@ jobs: needs: builds env: - TARI_NETWORK_DIR: ${{ needs.builds.outputs.TARI_NETWORK_DIR }} TARI_VERSION: ${{ needs.builds.outputs.TARI_VERSION }} permissions: @@ -837,10 +734,6 @@ jobs: ls -alhtR echo "Clean up" # Bash check if file with wildcards, does not work as expected - # if [ -f ${{ env.TS_FILENAME }}*diag-utils* ] ; then - if ls ${{ env.TS_FILENAME }}*diag-utils* > /dev/null 2>&1 ; then - rm -fv ${{ env.TS_FILENAME }}*diag-utils* - fi echo "Folder setup" if ls ${{ env.TS_FILENAME }}*linux* > /dev/null 2>&1 ; then mkdir -p "linux/${{ env.TARI_NETWORK_DIR }}/" diff --git a/.license.ignore b/.license.ignore new file mode 100644 index 00000000..e69de29b diff --git a/scripts/cross_compile_tooling.sh b/scripts/cross_compile_tooling.sh new file mode 100755 index 00000000..625c3403 --- /dev/null +++ b/scripts/cross_compile_tooling.sh @@ -0,0 +1,59 @@ +#!/usr/bin/env bash +# +# Move all cross-compiling steps into a single sourced script +# + +set -e + +printenv + +if [ "${TARGETARCH}" = "arm64" ] ; then + platform_env=aarch64 + export BUILD_TARGET="${platform_env}-unknown-linux-gnu/" + export RUST_TARGET="--target=${platform_env}-unknown-linux-gnu" + #export ARCH=${ARCH:-generic} + export CARGO_TARGET_AARCH64_UNKNOWN_LINUX_GNU_LINKER=${platform_env}-linux-gnu-gcc + export CC_aarch64_unknown_linux_gnu=${platform_env}-linux-gnu-gcc + export CXX_aarch64_unknown_linux_gnu=${platform_env}-linux-gnu-g++ + export BINDGEN_EXTRA_CLANG_ARGS="--sysroot /usr/${platform_env}-linux-gnu/include/" + #export RUSTFLAGS="-C target_cpu=$ARCH" + #export ROARING_ARCH=$ARCH + rustup target add ${platform_env}-unknown-linux-gnu + rustup toolchain install stable-${platform_env}-unknown-linux-gnu --force-non-host + + # Check for Debian + if [ -f "/etc/debian_version" ] ; then + dpkg --add-architecture ${TARGETARCH} + apt-get update || true + apt-get install -y pkg-config libssl-dev:${TARGETARCH} gcc-${platform_env}-linux-gnu g++-${platform_env}-linux-gnu + export AARCH64_UNKNOWN_LINUX_GNU_OPENSSL_INCLUDE_DIR=/usr/include/${platform_env}-linux-gnu/openssl/ + export PKG_CONFIG_ALLOW_CROSS=1 + fi + +elif [ "${TARGETARCH}" = "amd64" ] ; then + platform_env=x86_64 + platform_env_alt=x86-64 + export BUILD_TARGET="${platform_env}-unknown-linux-gnu/" + export RUST_TARGET="--target=${platform_env}-unknown-linux-gnu" + # https://gcc.gnu.org/onlinedocs/gcc/x86-Options.html + #export ARCH=${ARCH:-x86-64} + export CARGO_TARGET_X86_64_UNKNOWN_LINUX_GNU_LINKER=${platform_env}-linux-gnu-gcc + export CC_x86_64_unknown_linux_gnu=${platform_env_alt}-linux-gnu-gcc + export CXX_x86_64_unknown_linux_gnu=${platform_env_alt}-linux-gnu-g++ + export BINDGEN_EXTRA_CLANG_ARGS="--sysroot /usr/${platform_env}-linux-gnu/include/" + rustup target add ${platform_env}-unknown-linux-gnu + rustup toolchain install stable-${platform_env}-unknown-linux-gnu --force-non-host + + # Check for Debian + if [ -f "/etc/debian_version" ] ; then + dpkg --add-architecture ${TARGETARCH} + apt-get update + apt-get install -y pkg-config libssl-dev:${TARGETARCH} gcc-${platform_env_alt}-linux-gnu g++-${platform_env_alt}-linux-gnu + export X86_64_UNKNOWN_LINUX_GNU_OPENSSL_INCLUDE_DIR=/usr/include/${platform_env}-linux-gnu/openssl/ + export PKG_CONFIG_ALLOW_CROSS=1 + fi + +else + echo "Need to source [ ${0##*/} ] with env TARGETARCH set to either arm64 or amd64" + exit 1 +fi diff --git a/scripts/cross_compile_ubuntu_18-pre-build.sh b/scripts/cross_compile_ubuntu_18-pre-build.sh new file mode 100755 index 00000000..e93f2a10 --- /dev/null +++ b/scripts/cross_compile_ubuntu_18-pre-build.sh @@ -0,0 +1,166 @@ +#!/usr/bin/env bash +# +# Single script for Ubuntu 18.04 package setup, mostly used for cross-compiling +# + +set -e + +USAGE="Usage: $0 target build ie: x86_64-unknown-linux-gnu or aarch64-unknown-linux-gnu" + +if [ "$#" == "0" ]; then + echo "$USAGE" + exit 1 +fi + +if [ -z "${CROSS_DEB_ARCH}" ]; then + echo "Should be run from cross, which sets the env CROSS_DEB_ARCH" + exit 1 +fi + +targetBuild="${1}" +nativeRunTime=$(uname -m) +echo "Native RunTime is ${nativeRunTime}" + +if [ "${nativeRunTime}" == "x86_64" ]; then + nativeArch=amd64 + if [ "${targetBuild}" == "aarch64-unknown-linux-gnu" ]; then + targetArch=arm64 + targetPlatform=aarch64 + else + targetArch=amd64 + targetPlatform=x86-64 + fi +elif [ "${nativeRunTime}" == "aarch64" ]; then + nativeArch=arm64 + if [ "${targetBuild}" == "x86_64-unknown-linux-gnu" ]; then + targetArch=amd64 + targetPlatform=x86-64 + fi +elif [ "${nativeRunTime}" == "riscv64" ]; then + nativeArch=riscv64 + echo "ToDo!" +else + echo "!!Unsupport platform!!" + exit 1 +fi + +crossArch=${CROSS_DEB_ARCH} +apt-get update + +# Base install packages +# scripts/install_ubuntu_dependencies.sh +apt-get install --no-install-recommends --assume-yes \ + apt-transport-https \ + ca-certificates \ + curl \ + gpg \ + bash \ + less \ + openssl \ + libssl-dev \ + pkg-config \ + libsqlite3-dev \ + libsqlite3-0 \ + libreadline-dev \ + git \ + cmake \ + dh-autoreconf \ + clang \ + g++ \ + libc++-dev \ + libc++abi-dev \ + libprotobuf-dev \ + protobuf-compiler \ + libncurses5-dev \ + libncursesw5-dev \ + libudev-dev \ + libhidapi-dev \ + zip + +echo "Installing rust ..." +mkdir -p "$HOME/.cargo/bin/" +curl --proto '=https' --tlsv1.2 -sSf https://sh.rustup.rs | sh -s -- -y +export PATH="$HOME/.cargo/bin:$PATH" +. "$HOME/.cargo/env" + +# Cross-CPU compile setup +if [ "${CROSS_DEB_ARCH}" != "${nativeArch}" ]; then + echo "Setup Cross CPU Compile ..." + sed -i.save -e "s/^deb\ http/deb [arch="${nativeArch}"] http/g" /etc/apt/sources.list + + . /etc/lsb-release + ubuntu_tag=${DISTRIB_CODENAME} + + if [ "${crossArch}" == "arm64" ]; then + cat << EoF > /etc/apt/sources.list.d/${ubuntu_tag}-${crossArch}.list +deb [arch=${crossArch}] http://ports.ubuntu.com/ubuntu-ports ${ubuntu_tag} main restricted universe multiverse +# deb-src [arch=${crossArch}] http://ports.ubuntu.com/ubuntu-ports ${ubuntu_tag} main restricted universe multiverse + +deb [arch=${crossArch}] http://ports.ubuntu.com/ubuntu-ports ${ubuntu_tag}-updates main restricted universe multiverse +# deb-src [arch=${crossArch}] http://ports.ubuntu.com/ubuntu-ports ${ubuntu_tag}-updates main restricted universe multiverse + +deb [arch=${crossArch}] http://ports.ubuntu.com/ubuntu-ports ${ubuntu_tag}-backports main restricted universe multiverse +# deb-src [arch=${crossArch}] http://ports.ubuntu.com/ubuntu-ports ${ubuntu_tag}-backports main restricted universe multiverse + +deb [arch=${crossArch}] http://ports.ubuntu.com/ubuntu-ports ${ubuntu_tag}-security main restricted universe multiverse +# deb-src [arch=${crossArch}] http://ports.ubuntu.com/ubuntu-ports ${ubuntu_tag}-security main restricted universe multiverse + +deb [arch=${crossArch}] http://archive.canonical.com/ubuntu ${ubuntu_tag} partner +# deb-src [arch=${crossArch}] http://archive.canonical.com/ubuntu ${ubuntu_tag} partner +EoF + fi + + if [ "${crossArch}" == "amd64" ]; then + cat << EoF > /etc/apt/sources.list.d/${ubuntu_tag}-${crossArch}.list +deb [arch=amd64] http://archive.ubuntu.com/ubuntu/ ${ubuntu_tag} main restricted +# deb-src http://archive.ubuntu.com/ubuntu/ ${ubuntu_tag} main restricted + +deb [arch=amd64] http://archive.ubuntu.com/ubuntu/ ${ubuntu_tag}-updates main restricted +# deb-src http://archive.ubuntu.com/ubuntu/ ${ubuntu_tag}-updates main restricted + +deb [arch=amd64] http://archive.ubuntu.com/ubuntu/ ${ubuntu_tag} universe +# deb-src http://archive.ubuntu.com/ubuntu/ ${ubuntu_tag} universe +deb [arch=amd64] http://archive.ubuntu.com/ubuntu/ ${ubuntu_tag}-updates universe +# deb-src http://archive.ubuntu.com/ubuntu/ ${ubuntu_tag}-updates universe + +deb [arch=amd64] http://archive.ubuntu.com/ubuntu/ ${ubuntu_tag} multiverse +# deb-src http://archive.ubuntu.com/ubuntu/ ${ubuntu_tag} multiverse +deb [arch=amd64] http://archive.ubuntu.com/ubuntu/ ${ubuntu_tag}-updates multiverse +# deb-src http://archive.ubuntu.com/ubuntu/ ${ubuntu_tag}-updates multiverse + +deb [arch=amd64] http://archive.ubuntu.com/ubuntu/ ${ubuntu_tag}-backports main restricted universe multiverse +# deb-src http://archive.ubuntu.com/ubuntu/ ${ubuntu_tag}-backports main restricted universe multiverse + +# deb http://archive.canonical.com/ubuntu ${ubuntu_tag} partner +# deb-src http://archive.canonical.com/ubuntu ${ubuntu_tag} partner + +deb [arch=amd64] http://security.ubuntu.com/ubuntu/ ${ubuntu_tag}-security main restricted +# deb-src http://security.ubuntu.com/ubuntu/ ${ubuntu_tag}-security main restricted +deb [arch=amd64] http://security.ubuntu.com/ubuntu/ ${ubuntu_tag}-security universe +# deb-src http://security.ubuntu.com/ubuntu/ ${ubuntu_tag}-security universe +deb [arch=amd64] http://security.ubuntu.com/ubuntu/ ${ubuntu_tag}-security multiverse +# deb-src http://security.ubuntu.com/ubuntu/ ${ubuntu_tag}-security multiverse +EoF + fi + + dpkg --add-architecture ${CROSS_DEB_ARCH} + apt-get update + + # scripts/install_ubuntu_dependencies-cross_compile.sh x86-64 + apt-get --assume-yes install \ + pkg-config-${targetPlatform}-linux-gnu \ + gcc-${targetPlatform}-linux-gnu \ + g++-${targetPlatform}-linux-gnu + + # packages needed for Ledger and hidapi + apt-get --assume-yes install \ + libudev-dev:${CROSS_DEB_ARCH} \ + libhidapi-dev:${CROSS_DEB_ARCH} + +fi + +rustup target add ${targetBuild} +rustup toolchain install stable-${targetBuild} --force-non-host + +rustup target list --installed +rustup toolchain list diff --git a/scripts/file_license_check.sh b/scripts/file_license_check.sh new file mode 100755 index 00000000..8985052d --- /dev/null +++ b/scripts/file_license_check.sh @@ -0,0 +1,40 @@ +#!/usr/bin/env bash +# +# Must be run from the repo root +# + +set -e + +# rg -i "Copyright.*The Tari Project" --files-without-match \ +# -g '!*.{Dockerfile,asc,bat,config,config.js,css,csv,drawio,env,gitkeep,hbs,html,ini,iss,json,lock,md,min.js,ps1,py,rc,scss,sh,sql,svg,toml,txt,yml,vue}' . \ +# | sort > /tmp/rgtemp + +# Exclude files without extensions as well as those with extensions that are not in the list +# +rgTemp=$(mktemp) +rg -i "Copyright.*The Tari Project" --files-without-match \ + -g '!*.{Dockerfile,asc,bat,config,config.js,css,csv,drawio,env,gitkeep,hbs,html,ini,iss,json,lock,md,min.js,ps1,py,rc,scss,sh,sql,svg,toml,txt,yml,vue}' . \ + | while IFS= read -r file; do + if [[ -n $(basename "$file" | grep -E '\.') ]]; then + echo "$file" + fi + done | sort > ${rgTemp} + +# Sort the .license.ignore file as sorting seems to behave differently on different platforms +licenseIgnoreTemp=$(mktemp) +cat .license.ignore | sort > ${licenseIgnoreTemp} + +DIFFS=$(diff -u --strip-trailing-cr ${licenseIgnoreTemp} ${rgTemp}) + +# clean up +rm -vf ${rgTemp} +rm -vf ${licenseIgnoreTemp} + +if [ -n "$DIFFS" ]; then + echo "New files detected that either need copyright/license identifiers added, or they need to be added to .license.ignore" + echo "NB: The ignore file must be sorted alphabetically!" + + echo "Diff:" + echo "$DIFFS" + exit 1 +fi diff --git a/scripts/install_ubuntu_dependencies-cross_compile.sh b/scripts/install_ubuntu_dependencies-cross_compile.sh new file mode 100755 index 00000000..79ce3310 --- /dev/null +++ b/scripts/install_ubuntu_dependencies-cross_compile.sh @@ -0,0 +1,21 @@ +#!/usr/bin/env sh +# +# Install Ubuntu aarch64(arm64)/riscv64 deb dev/tool packages on x86_64 +# + +set -e + +USAGE="Usage: $0 ISA_ARCH other packages, ie aarch64" + +if [ "$#" == "0" ]; then + echo "$USAGE" + exit 1 +fi + +isa_arch=${1} +shift + +apt-get --assume-yes install $* \ + pkg-config-${isa_arch}-linux-gnu \ + gcc-${isa_arch}-linux-gnu \ + g++-${isa_arch}-linux-gnu diff --git a/scripts/install_ubuntu_dependencies-rust-arm64.sh b/scripts/install_ubuntu_dependencies-rust-arm64.sh new file mode 100755 index 00000000..a17edcae --- /dev/null +++ b/scripts/install_ubuntu_dependencies-rust-arm64.sh @@ -0,0 +1,10 @@ +#!/usr/bin/env sh +# +# Install rust target/toolchain for aarch64/arm64 +# + +set -e + +export PATH="$HOME/.cargo/bin:$PATH" +rustup target add aarch64-unknown-linux-gnu +rustup toolchain install stable-aarch64-unknown-linux-gnu --force-non-host diff --git a/scripts/install_ubuntu_dependencies-rust.sh b/scripts/install_ubuntu_dependencies-rust.sh new file mode 100755 index 00000000..3ac1a9f1 --- /dev/null +++ b/scripts/install_ubuntu_dependencies-rust.sh @@ -0,0 +1,10 @@ +#!/usr/bin/env sh +# +# Install rust unattended - https://www.rust-lang.org/tools/install +# + +set -e + +curl --proto '=https' --tlsv1.2 -sSf https://sh.rustup.rs | sh -s -- -y +export PATH="$HOME/.cargo/bin:$PATH" +. "$HOME/.cargo/env" diff --git a/scripts/install_ubuntu_dependencies.sh b/scripts/install_ubuntu_dependencies.sh new file mode 100755 index 00000000..b326d220 --- /dev/null +++ b/scripts/install_ubuntu_dependencies.sh @@ -0,0 +1,27 @@ +apt-get install --no-install-recommends --assume-yes \ + apt-transport-https \ + ca-certificates \ + curl \ + gpg \ + bash \ + less \ + openssl \ + libssl-dev \ + pkg-config \ + libsqlite3-dev \ + libsqlite3-0 \ + libreadline-dev \ + git \ + cmake \ + dh-autoreconf \ + clang \ + g++ \ + libc++-dev \ + libc++abi-dev \ + libprotobuf-dev \ + protobuf-compiler \ + libncurses5-dev \ + libncursesw5-dev \ + libudev-dev \ + libhidapi-dev \ + zip From 0e0f36362c894fcce7b7da4f359ce6ef113ef507 Mon Sep 17 00:00:00 2001 From: richardb Date: Tue, 2 Jul 2024 11:48:01 +0200 Subject: [PATCH 38/43] small updates --- Cargo.lock | 1 + Cargo.toml | 1 + Cross.toml | 6 ++++ src/main.rs | 14 ++++----- src/server/p2p/p2p.rs | 61 +++++++++++++++++++++---------------- src/server/server.rs | 17 +++++------ src/sharechain/in_memory.rs | 5 +-- 7 files changed, 60 insertions(+), 45 deletions(-) create mode 100644 Cross.toml diff --git a/Cargo.lock b/Cargo.lock index dd8cfb7b..7624ca12 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -4059,6 +4059,7 @@ dependencies = [ "moka", "serde", "serde_cbor", + "tari_common", "tari_common_types", "tari_core", "tari_utilities", diff --git a/Cargo.toml b/Cargo.toml index 5411890f..99e076f2 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -12,6 +12,7 @@ edition = "2021" minotari_app_grpc = { git = "https://github.com/ksrichard/tari.git", branch = "p2pool" } minotari_node_grpc_client = { git = "https://github.com/ksrichard/tari.git", branch = "p2pool" } tari_common_types = { git = "https://github.com/ksrichard/tari.git", branch = "p2pool" } +tari_common = { git = "https://github.com/ksrichard/tari.git", branch = "p2pool" } tari_core = { git = "https://github.com/ksrichard/tari.git", branch = "p2pool" } tari_utilities = { version = "0.7", features = ["borsh"] } diff --git a/Cross.toml b/Cross.toml new file mode 100644 index 00000000..029accd1 --- /dev/null +++ b/Cross.toml @@ -0,0 +1,6 @@ +[target.x86_64-unknown-linux-gnu] +pre-build = [ + "dpkg --add-architecture $CROSS_DEB_ARCH", + "apt-get update", + "apt-get install --no-install-recommends --assume-yes apt-transport-https ca-certificates curl gpg bash less openssl libssl-dev pkg-config libsqlite3-dev:$CROSS_DEB_ARCH libsqlite3-0:$CROSS_DEB_ARCH libreadline-dev git cmake dh-autoreconf clang g++ libc++-dev libc++abi-dev libprotobuf-dev protobuf-compiler:$CROSS_DEB_ARCH libncurses5-dev libncursesw5-dev libudev-dev libhidapi-dev zip" +] \ No newline at end of file diff --git a/src/main.rs b/src/main.rs index 66c0937c..6a4ca3a6 100644 --- a/src/main.rs +++ b/src/main.rs @@ -1,7 +1,7 @@ use std::path::PathBuf; -use clap::builder::styling::AnsiColor; use clap::builder::Styles; +use clap::builder::styling::AnsiColor; use clap::Parser; use env_logger::Builder; use log::LevelFilter; @@ -66,13 +66,13 @@ struct Cli { )] private_key_folder: PathBuf, - /// Mining enabled + /// Mining disabled /// - /// In case it is set to false, the node will only handle p2p operations, + /// In case it is set, the node will only handle p2p operations, /// will be syncing with share chain, but not starting gRPC services and no Tari base node needed. - /// By setting this to, false it can be used as a stable node for routing only. - #[arg(long, value_name = "mining-enabled", default_value_t = true)] - mining_enabled: bool, + /// By setting this it can be used as a stable node for routing only. + #[arg(long, value_name = "mining-disabled", default_value_t = false)] + mining_disabled: bool, } #[tokio::main] @@ -92,7 +92,7 @@ async fn main() -> anyhow::Result<()> { } config_builder.with_stable_peer(cli.stable_peer); config_builder.with_private_key_folder(cli.private_key_folder); - config_builder.with_mining_enabled(cli.mining_enabled); + config_builder.with_mining_enabled(!cli.mining_disabled); // server start let config = config_builder.build(); diff --git a/src/server/p2p/p2p.rs b/src/server/p2p/p2p.rs index 7e5d4d22..5c38d3d8 100644 --- a/src/server/p2p/p2p.rs +++ b/src/server/p2p/p2p.rs @@ -3,35 +3,36 @@ use std::path::PathBuf; use std::sync::Arc; use std::time::Duration; +use libp2p::{ + gossipsub, kad, mdns, Multiaddr, noise, request_response, StreamProtocol, Swarm, tcp, yamux, +}; use libp2p::futures::StreamExt; use libp2p::gossipsub::{IdentTopic, Message, PublishError}; use libp2p::identity::Keypair; -use libp2p::kad::store::MemoryStore; use libp2p::kad::{Event, Mode}; +use libp2p::kad::store::MemoryStore; use libp2p::mdns::tokio::Tokio; use libp2p::multiaddr::Protocol; use libp2p::request_response::{cbor, ResponseChannel}; use libp2p::swarm::{NetworkBehaviour, SwarmEvent}; -use libp2p::{ - gossipsub, kad, mdns, noise, request_response, tcp, yamux, Multiaddr, StreamProtocol, Swarm, -}; use log::{debug, error, info, warn}; +use tari_common::configuration::Network; use tari_utilities::hex::Hex; +use tokio::{io, select}; use tokio::fs::File; use tokio::io::{AsyncReadExt, AsyncWriteExt}; -use tokio::sync::broadcast::error::RecvError; use tokio::sync::{broadcast, mpsc}; -use tokio::{io, select}; +use tokio::sync::broadcast::error::RecvError; use crate::server::config; +use crate::server::p2p::{ + client, Error, LibP2PError, messages, ServiceClient, ServiceClientChannels, +}; use crate::server::p2p::messages::{ PeerInfo, ShareChainSyncRequest, ShareChainSyncResponse, ValidateBlockRequest, ValidateBlockResult, }; use crate::server::p2p::peer_store::PeerStore; -use crate::server::p2p::{ - client, messages, Error, LibP2PError, ServiceClient, ServiceClientChannels, -}; use crate::sharechain::block::Block; use crate::sharechain::ShareChain; @@ -75,8 +76,8 @@ pub struct ServerNetworkBehaviour { /// Service is the implementation that holds every peer-to-peer related logic /// that makes sure that all the communications, syncing, broadcasting etc... are done. pub struct Service -where - S: ShareChain + Send + Sync + 'static, + where + S: ShareChain + Send + Sync + 'static, { swarm: Swarm, port: u16, @@ -96,8 +97,8 @@ where } impl Service -where - S: ShareChain + Send + Sync + 'static, + where + S: ShareChain + Send + Sync + 'static, { /// Constructs a new Service from the provided config. /// It also instantiates libp2p swarm inside. @@ -202,7 +203,7 @@ where mdns::Config::default(), key_pair.public().to_peer_id(), ) - .map_err(|e| Error::LibP2P(LibP2PError::IO(e)))?, + .map_err(|e| Error::LibP2P(LibP2PError::IO(e)))?, share_chain_sync: cbor::Behaviour::< ShareChainSyncRequest, ShareChainSyncResponse, @@ -260,7 +261,7 @@ where match request_raw_result { Ok(request_raw) => { if let Err(error) = self.swarm.behaviour_mut().gossipsub.publish( - IdentTopic::new(BLOCK_VALIDATION_REQUESTS_TOPIC), + IdentTopic::new(Self::topic_name(BLOCK_VALIDATION_REQUESTS_TOPIC)), request_raw, ) { error!(target: LOG_TARGET, "Failed to send block validation request: {error:?}"); @@ -286,7 +287,7 @@ where .swarm .behaviour_mut() .gossipsub - .publish(IdentTopic::new(BLOCK_VALIDATION_RESULTS_TOPIC), result_raw) + .publish(IdentTopic::new(Self::topic_name(BLOCK_VALIDATION_RESULTS_TOPIC)), result_raw) { error!(target: LOG_TARGET, "Failed to publish block validation result: {error:?}"); } @@ -309,7 +310,7 @@ where self.swarm .behaviour_mut() .gossipsub - .publish(IdentTopic::new(PEER_INFO_TOPIC), peer_info_raw) + .publish(IdentTopic::new(Self::topic_name(PEER_INFO_TOPIC)), peer_info_raw) .map_err(|error| Error::LibP2P(LibP2PError::Publish(error)))?; Ok(()) @@ -326,7 +327,7 @@ where .swarm .behaviour_mut() .gossipsub - .publish(IdentTopic::new(NEW_BLOCK_TOPIC), block_raw) + .publish(IdentTopic::new(Self::topic_name(NEW_BLOCK_TOPIC)), block_raw) .map_err(|error| Error::LibP2P(LibP2PError::Publish(error))) { Ok(_) => {} @@ -344,12 +345,19 @@ where } } + /// Generates the gossip sub topic names based on the current Tari network to avoid mixing up + /// blocks and peers with different Tari networks. + fn topic_name(topic: &str) -> String { + let network = Network::get_current_or_user_setting_or_default().to_string(); + format!("{network}_{topic}") + } + /// Subscribing to a gossipsub topic. fn subscribe(&mut self, topic: &str) { self.swarm .behaviour_mut() .gossipsub - .subscribe(&IdentTopic::new(topic)) + .subscribe(&IdentTopic::new(Self::topic_name(topic))) .expect("must be subscribed to topic"); } @@ -370,9 +378,10 @@ where } let peer = peer.unwrap(); - let topic = message.topic.as_str(); + let topic = message.topic.to_string(); + match topic { - PEER_INFO_TOPIC => match messages::PeerInfo::try_from(message) { + topic if topic == Self::topic_name(PEER_INFO_TOPIC) => match messages::PeerInfo::try_from(message) { Ok(payload) => { debug!(target: LOG_TARGET, "New peer info: {peer:?} -> {payload:?}"); self.peer_store.add(peer, payload).await; @@ -388,7 +397,7 @@ where error!(target: LOG_TARGET, "Can't deserialize peer info payload: {:?}", error); } }, - BLOCK_VALIDATION_REQUESTS_TOPIC => { + topic if topic == Self::topic_name(BLOCK_VALIDATION_REQUESTS_TOPIC) => { match messages::ValidateBlockRequest::try_from(message) { Ok(payload) => { debug!(target: LOG_TARGET, "Block validation request: {payload:?}"); @@ -418,7 +427,7 @@ where } } } - BLOCK_VALIDATION_RESULTS_TOPIC => { + topic if topic == Self::topic_name(BLOCK_VALIDATION_RESULTS_TOPIC) => { match messages::ValidateBlockResult::try_from(message) { Ok(payload) => { let mut senders_to_delete = vec![]; @@ -438,7 +447,7 @@ where } } // TODO: send a signature that proves that the actual block was coming from this peer - NEW_BLOCK_TOPIC => match Block::try_from(message) { + topic if topic == Self::topic_name(NEW_BLOCK_TOPIC) => match Block::try_from(message) { Ok(payload) => { info!(target: LOG_TARGET,"🆕 New block from broadcast: {:?}", &payload.hash().to_hex()); if let Err(error) = self.share_chain.submit_block(&payload).await { @@ -449,7 +458,7 @@ where error!(target: LOG_TARGET, "Can't deserialize broadcast block payload: {:?}", error); } }, - &_ => { + _ => { warn!(target: LOG_TARGET, "Unknown topic {topic:?}!"); } } @@ -493,7 +502,7 @@ where while self.peer_store.tip_of_block_height().await.is_none() {} // waiting for the highest blockchain match self.peer_store.tip_of_block_height().await { Some(result) => { - debug!(target: LOG_TARGET, "Found highet block height: {result:?}"); + debug!(target: LOG_TARGET, "Found highest block height: {result:?}"); match self.share_chain.tip_height().await { Ok(tip) => { debug!(target: LOG_TARGET, "Send share chain sync request: {result:?}"); diff --git a/src/server/server.rs b/src/server/server.rs index a6328d55..dab1e8e9 100644 --- a/src/server/server.rs +++ b/src/server/server.rs @@ -7,10 +7,10 @@ use minotari_app_grpc::tari_rpc::base_node_server::BaseNodeServer; use minotari_app_grpc::tari_rpc::sha_p2_pool_server::ShaP2PoolServer; use thiserror::Error; +use crate::server::{config, grpc, p2p}; use crate::server::grpc::base_node::TariBaseNodeGrpc; use crate::server::grpc::error::TonicError; use crate::server::grpc::p2pool::ShaP2PoolGrpc; -use crate::server::{config, grpc, p2p}; use crate::sharechain::ShareChain; const LOG_TARGET: &str = "server"; @@ -27,8 +27,8 @@ pub enum Error { /// Server represents the server running all the necessary components for sha-p2pool. pub struct Server -where - S: ShareChain + Send + Sync + 'static, + where + S: ShareChain + Send + Sync + 'static, { config: config::Config, p2p_service: p2p::Service, @@ -38,15 +38,12 @@ where // TODO: add graceful shutdown impl Server -where - S: ShareChain + Send + Sync + 'static, + where + S: ShareChain + Send + Sync + 'static, { pub async fn new(config: config::Config, share_chain: S) -> Result { let share_chain = Arc::new(share_chain); - // TODO: have base node's network here and pass to p2p_service to be able to subscribe to the right gossipsub topics - // TODO: se we are not mixing main net and test net blocks. - let mut p2p_service: p2p::Service = p2p::Service::new(&config, share_chain.clone()) .await .map_err(Error::P2PService)?; @@ -64,8 +61,8 @@ where p2p_service.client(), share_chain.clone(), ) - .await - .map_err(Error::Grpc)?; + .await + .map_err(Error::Grpc)?; p2pool_server = Some(ShaP2PoolServer::new(p2pool_grpc_service)); } diff --git a/src/sharechain/in_memory.rs b/src/sharechain/in_memory.rs index bd39e9e5..4deddc07 100644 --- a/src/sharechain/in_memory.rs +++ b/src/sharechain/in_memory.rs @@ -10,8 +10,8 @@ use tari_utilities::epoch_time::EpochTime; use tari_utilities::hex::Hex; use tokio::sync::{RwLock, RwLockWriteGuard}; +use crate::sharechain::{Block, MAX_BLOCKS_COUNT, SHARE_COUNT, ShareChain, ShareChainResult}; use crate::sharechain::error::{BlockConvertError, Error}; -use crate::sharechain::{Block, ShareChain, ShareChainResult, MAX_BLOCKS_COUNT, SHARE_COUNT}; const LOG_TARGET: &str = "in_memory_share_chain"; @@ -100,7 +100,8 @@ impl InMemoryShareChain { } } else if !clear_before_add && last_block.is_none() { return Err(Error::Empty); - } else if clear_before_add { + } else if (clear_before_add && last_block.is_none()) || + (clear_before_add && last_block.is_some() && last_block.unwrap().height() < block.height()) { // if we are synchronizing blocks, we trust we receive all the valid blocks blocks.clear(); } From 18127e3722468f1f00cf025269642d4225444306 Mon Sep 17 00:00:00 2001 From: richardb Date: Tue, 2 Jul 2024 11:58:55 +0200 Subject: [PATCH 39/43] small updates --- src/sharechain/in_memory.rs | 11 +++++++---- 1 file changed, 7 insertions(+), 4 deletions(-) diff --git a/src/sharechain/in_memory.rs b/src/sharechain/in_memory.rs index 4deddc07..a8065d9f 100644 --- a/src/sharechain/in_memory.rs +++ b/src/sharechain/in_memory.rs @@ -100,10 +100,6 @@ impl InMemoryShareChain { } } else if !clear_before_add && last_block.is_none() { return Err(Error::Empty); - } else if (clear_before_add && last_block.is_none()) || - (clear_before_add && last_block.is_some() && last_block.unwrap().height() < block.height()) { - // if we are synchronizing blocks, we trust we receive all the valid blocks - blocks.clear(); } if blocks.len() >= self.max_blocks_count { @@ -132,6 +128,13 @@ impl ShareChain for InMemoryShareChain { async fn submit_blocks(&self, blocks: Vec, mut sync: bool) -> ShareChainResult<()> { let mut blocks_write_lock = self.blocks.write().await; + + let last_block = blocks_write_lock.last(); + if (sync && last_block.is_none()) || + (sync && last_block.is_some() && last_block.unwrap().height() < blocks[0].height()) { + blocks_write_lock.clear(); + } + for block in blocks { self.submit_block_with_lock(&mut blocks_write_lock, &block, sync) .await?; From ebd88abdfa0a050bdcf27ef4fc9259bc6f60933f Mon Sep 17 00:00:00 2001 From: richardb Date: Tue, 2 Jul 2024 12:22:48 +0200 Subject: [PATCH 40/43] small updates --- src/server/grpc/p2pool.rs | 26 ++++++++++++++------------ src/sharechain/in_memory.rs | 18 +++++++++++------- 2 files changed, 25 insertions(+), 19 deletions(-) diff --git a/src/server/grpc/p2pool.rs b/src/server/grpc/p2pool.rs index a6fffce4..bddf1ddd 100644 --- a/src/server/grpc/p2pool.rs +++ b/src/server/grpc/p2pool.rs @@ -1,13 +1,13 @@ use std::sync::Arc; use log::{debug, error, info}; -use minotari_app_grpc::tari_rpc::base_node_client::BaseNodeClient; -use minotari_app_grpc::tari_rpc::pow_algo::PowAlgos; -use minotari_app_grpc::tari_rpc::sha_p2_pool_server::ShaP2Pool; use minotari_app_grpc::tari_rpc::{ GetNewBlockRequest, GetNewBlockResponse, GetNewBlockTemplateWithCoinbasesRequest, HeightRequest, NewBlockTemplateRequest, PowAlgo, SubmitBlockRequest, SubmitBlockResponse, }; +use minotari_app_grpc::tari_rpc::base_node_client::BaseNodeClient; +use minotari_app_grpc::tari_rpc::pow_algo::PowAlgos; +use minotari_app_grpc::tari_rpc::sha_p2_pool_server::ShaP2Pool; use tari_core::proof_of_work::sha3x_difficulty; use tokio::sync::Mutex; use tonic::{Request, Response, Status}; @@ -16,13 +16,15 @@ use crate::server::grpc::error::Error; use crate::server::grpc::util; use crate::server::p2p; use crate::sharechain::block::Block; -use crate::sharechain::ShareChain; use crate::sharechain::SHARE_COUNT; +use crate::sharechain::ShareChain; + +const LOG_TARGET: &str = "p2pool_grpc"; /// P2Pool specific gRPC service to provide `get_new_block` and `submit_block` functionalities. pub struct ShaP2PoolGrpc -where - S: ShareChain + Send + Sync + 'static, + where + S: ShareChain + Send + Sync + 'static, { /// Base node client client: Arc>>, @@ -33,8 +35,8 @@ where } impl ShaP2PoolGrpc -where - S: ShareChain + Send + Sync + 'static, + where + S: ShareChain + Send + Sync + 'static, { pub async fn new( base_node_address: String, @@ -53,9 +55,9 @@ where /// Submits a new block to share chain and broadcasts to the p2p network. pub async fn submit_share_chain_block(&self, block: &Block) -> Result<(), Status> { if let Err(error) = self.share_chain.submit_block(block).await { - error!("Failed to add new block: {error:?}"); + error!(target: LOG_TARGET, "Failed to add new block: {error:?}"); } - debug!("Broadcast new block with height: {:?}", block.height()); + debug!(target: LOG_TARGET, "Broadcast new block with height: {:?}", block.height()); self.p2p_client .broadcast_block(block) .await @@ -65,8 +67,8 @@ where #[tonic::async_trait] impl ShaP2Pool for ShaP2PoolGrpc -where - S: ShareChain + Send + Sync + 'static, + where + S: ShareChain + Send + Sync + 'static, { /// Returns a new block (that can be mined) which contains all the shares generated /// from the current share chain as coinbase transactions. diff --git a/src/sharechain/in_memory.rs b/src/sharechain/in_memory.rs index a8065d9f..a76424ae 100644 --- a/src/sharechain/in_memory.rs +++ b/src/sharechain/in_memory.rs @@ -87,21 +87,26 @@ impl InMemoryShareChain { &self, blocks: &mut RwLockWriteGuard<'_, Vec>, block: &Block, - clear_before_add: bool, + in_sync: bool, ) -> ShareChainResult<()> { let block = block.clone(); let last_block = blocks.last(); - - // validate - if !clear_before_add && last_block.is_some() { + if in_sync && last_block.is_some() { + // validate if !self.validate_block(last_block.unwrap(), &block).await? { return Err(Error::InvalidBlock(block)); } - } else if !clear_before_add && last_block.is_none() { + } else if !in_sync && last_block.is_none() { return Err(Error::Empty); + } else if !in_sync && last_block.is_some() { + // validate + if !self.validate_block(last_block.unwrap(), &block).await? { + return Err(Error::InvalidBlock(block)); + } } + if blocks.len() >= self.max_blocks_count { let diff = blocks.len() - self.max_blocks_count; blocks.drain(0..diff); @@ -126,7 +131,7 @@ impl ShareChain for InMemoryShareChain { .await } - async fn submit_blocks(&self, blocks: Vec, mut sync: bool) -> ShareChainResult<()> { + async fn submit_blocks(&self, blocks: Vec, sync: bool) -> ShareChainResult<()> { let mut blocks_write_lock = self.blocks.write().await; let last_block = blocks_write_lock.last(); @@ -138,7 +143,6 @@ impl ShareChain for InMemoryShareChain { for block in blocks { self.submit_block_with_lock(&mut blocks_write_lock, &block, sync) .await?; - sync = false; } Ok(()) From 3fb98b9d1f6fda7dc087d9853a2dfeea7fc00632 Mon Sep 17 00:00:00 2001 From: richardb Date: Tue, 2 Jul 2024 13:37:23 +0200 Subject: [PATCH 41/43] small updates --- src/server/grpc/p2pool.rs | 4 ++-- src/server/p2p/p2p.rs | 14 +++++++------- src/sharechain/in_memory.rs | 6 ++++-- 3 files changed, 13 insertions(+), 11 deletions(-) diff --git a/src/server/grpc/p2pool.rs b/src/server/grpc/p2pool.rs index bddf1ddd..4fc1cc78 100644 --- a/src/server/grpc/p2pool.rs +++ b/src/server/grpc/p2pool.rs @@ -1,6 +1,6 @@ use std::sync::Arc; -use log::{debug, error, info}; +use log::{debug, error, info, warn}; use minotari_app_grpc::tari_rpc::{ GetNewBlockRequest, GetNewBlockResponse, GetNewBlockTemplateWithCoinbasesRequest, HeightRequest, NewBlockTemplateRequest, PowAlgo, SubmitBlockRequest, SubmitBlockResponse, @@ -55,7 +55,7 @@ impl ShaP2PoolGrpc /// Submits a new block to share chain and broadcasts to the p2p network. pub async fn submit_share_chain_block(&self, block: &Block) -> Result<(), Status> { if let Err(error) = self.share_chain.submit_block(block).await { - error!(target: LOG_TARGET, "Failed to add new block: {error:?}"); + warn!(target: LOG_TARGET, "Failed to add new block: {error:?}"); } debug!(target: LOG_TARGET, "Broadcast new block with height: {:?}", block.height()); self.p2p_client diff --git a/src/server/p2p/p2p.rs b/src/server/p2p/p2p.rs index 5c38d3d8..cda588a8 100644 --- a/src/server/p2p/p2p.rs +++ b/src/server/p2p/p2p.rs @@ -385,13 +385,13 @@ impl Service Ok(payload) => { debug!(target: LOG_TARGET, "New peer info: {peer:?} -> {payload:?}"); self.peer_store.add(peer, payload).await; - if let Some(tip) = self.peer_store.tip_of_block_height().await { - if let Ok(curr_height) = self.share_chain.tip_height().await { - if curr_height < tip.height { - self.sync_share_chain().await; - } - } - } + // if let Some(tip) = self.peer_store.tip_of_block_height().await { + // if let Ok(curr_height) = self.share_chain.tip_height().await { + // if curr_height < tip.height { + self.sync_share_chain().await; + // } + // } + // } } Err(error) => { error!(target: LOG_TARGET, "Can't deserialize peer info payload: {:?}", error); diff --git a/src/sharechain/in_memory.rs b/src/sharechain/in_memory.rs index a76424ae..4205caf2 100644 --- a/src/sharechain/in_memory.rs +++ b/src/sharechain/in_memory.rs @@ -2,7 +2,7 @@ use std::collections::HashMap; use std::sync::Arc; use async_trait::async_trait; -use log::{debug, info, warn}; +use log::{debug, error, info, warn}; use minotari_app_grpc::tari_rpc::{NewBlockCoinbase, SubmitBlockRequest}; use tari_common_types::tari_address::TariAddress; use tari_core::blocks::BlockHeader; @@ -95,6 +95,7 @@ impl InMemoryShareChain { if in_sync && last_block.is_some() { // validate if !self.validate_block(last_block.unwrap(), &block).await? { + error!(target: LOG_TARGET, "Invalid block!"); return Err(Error::InvalidBlock(block)); } } else if !in_sync && last_block.is_none() { @@ -102,6 +103,7 @@ impl InMemoryShareChain { } else if !in_sync && last_block.is_some() { // validate if !self.validate_block(last_block.unwrap(), &block).await? { + error!(target: LOG_TARGET, "Invalid block!"); return Err(Error::InvalidBlock(block)); } } @@ -136,7 +138,7 @@ impl ShareChain for InMemoryShareChain { let last_block = blocks_write_lock.last(); if (sync && last_block.is_none()) || - (sync && last_block.is_some() && last_block.unwrap().height() < blocks[0].height()) { + (sync && last_block.is_some() && !blocks.is_empty() && last_block.unwrap().height() < blocks[0].height()) { blocks_write_lock.clear(); } From 9d7881492e3899807ecea25cb487847b6bfc5172 Mon Sep 17 00:00:00 2001 From: richardb Date: Tue, 2 Jul 2024 14:35:05 +0200 Subject: [PATCH 42/43] small updates --- src/server/p2p/client.rs | 2 +- src/server/p2p/p2p.rs | 1 + 2 files changed, 2 insertions(+), 1 deletion(-) diff --git a/src/server/p2p/client.rs b/src/server/p2p/client.rs index 7b4b37e5..b262a338 100644 --- a/src/server/p2p/client.rs +++ b/src/server/p2p/client.rs @@ -4,8 +4,8 @@ use std::time::{Duration, Instant}; use log::{debug, error, warn}; use thiserror::Error; use tokio::select; -use tokio::sync::broadcast::error::{RecvError, SendError}; use tokio::sync::{broadcast, mpsc, Mutex}; +use tokio::sync::broadcast::error::{RecvError, SendError}; use tokio::time::sleep; use crate::server::p2p::messages::{ValidateBlockRequest, ValidateBlockResult}; diff --git a/src/server/p2p/p2p.rs b/src/server/p2p/p2p.rs index cda588a8..7cf9db51 100644 --- a/src/server/p2p/p2p.rs +++ b/src/server/p2p/p2p.rs @@ -493,6 +493,7 @@ impl Service debug!(target: LOG_TARGET, "Share chain sync response: {response:?}"); if let Err(error) = self.share_chain.submit_blocks(response.blocks, true).await { error!(target: LOG_TARGET, "Failed to add synced blocks to share chain: {error:?}"); + self.sync_share_chain().await; } } From 3f3d50adce957f2210554239778a6122c95256ef Mon Sep 17 00:00:00 2001 From: richardb Date: Wed, 3 Jul 2024 08:16:26 +0200 Subject: [PATCH 43/43] added new configs and copyright --- .cargo/config.toml | 8 + ci_all.sh | 6 + rustfmt.toml | 32 ++++ src/main.rs | 10 +- src/server/config.rs | 9 +- src/server/grpc/base_node.rs | 212 ++++++++++----------- src/server/grpc/error.rs | 3 + src/server/grpc/mod.rs | 3 + src/server/grpc/p2pool.rs | 67 ++++--- src/server/grpc/util.rs | 19 +- src/server/mod.rs | 3 + src/server/p2p/client.rs | 55 +++--- src/server/p2p/error.rs | 20 +- src/server/p2p/messages.rs | 29 +-- src/server/p2p/mod.rs | 3 + src/server/p2p/p2p.rs | 357 +++++++++++++++++------------------ src/server/p2p/peer_store.rs | 26 +-- src/server/server.rs | 50 ++--- src/sharechain/block.rs | 28 ++- src/sharechain/error.rs | 3 + src/sharechain/in_memory.rs | 29 +-- src/sharechain/mod.rs | 6 +- 22 files changed, 511 insertions(+), 467 deletions(-) create mode 100644 .cargo/config.toml create mode 100755 ci_all.sh create mode 100644 rustfmt.toml diff --git a/.cargo/config.toml b/.cargo/config.toml new file mode 100644 index 00000000..4a90b60a --- /dev/null +++ b/.cargo/config.toml @@ -0,0 +1,8 @@ +[alias] +ci-fmt = "fmt --all -- --check" +ci-fmt-fix = "fmt --all" +ci-clippy = "lints clippy --all-targets --all-features" +ci-test-compile = "test --no-run --workspace --all-features --no-default-features" +ci-test = "nextest run --all-features --release --workspace --exclude integration_tests --profile ci" +ci-cucumber = "test --all-features --release --package integration_tests" +ci-check = "check --workspace --release --all-features --all-targets --locked" \ No newline at end of file diff --git a/ci_all.sh b/ci_all.sh new file mode 100755 index 00000000..8af0202e --- /dev/null +++ b/ci_all.sh @@ -0,0 +1,6 @@ +# +# Copyright 2024 The Tari Project +# SPDX-License-Identifier: BSD-3-Clause +# + +cargo +nightly ci-fmt && cargo machete && cargo ci-check && cargo ci-test && cargo ci-cucumber \ No newline at end of file diff --git a/rustfmt.toml b/rustfmt.toml new file mode 100644 index 00000000..643ed04d --- /dev/null +++ b/rustfmt.toml @@ -0,0 +1,32 @@ +edition = "2021" + +# Stable features +hard_tabs = false +match_block_trailing_comma = true +max_width = 120 +newline_style = "Native" +reorder_imports = true +reorder_modules = true +use_field_init_shorthand = true +use_small_heuristics = "default" +use_try_shorthand = true +wrap_comments = true + +ignore = [] + +# Nightly features +binop_separator = "Back" +comment_width = 120 +format_code_in_doc_comments = true +format_strings = true +group_imports = "StdExternalCrate" +imports_granularity = "Crate" +imports_layout = "HorizontalVertical" +normalize_comments = true +overflow_delimited_expr = true +reorder_impl_items = true +space_after_colon = true +space_before_colon = false +struct_lit_single_line = true +unstable_features = true +where_single_line = true \ No newline at end of file diff --git a/src/main.rs b/src/main.rs index 6a4ca3a6..0631bd41 100644 --- a/src/main.rs +++ b/src/main.rs @@ -1,8 +1,12 @@ +// Copyright 2024 The Tari Project +// SPDX-License-Identifier: BSD-3-Clause + use std::path::PathBuf; -use clap::builder::Styles; -use clap::builder::styling::AnsiColor; -use clap::Parser; +use clap::{ + builder::{styling::AnsiColor, Styles}, + Parser, +}; use env_logger::Builder; use log::LevelFilter; diff --git a/src/server/config.rs b/src/server/config.rs index f98cf29b..954b3c6f 100644 --- a/src/server/config.rs +++ b/src/server/config.rs @@ -1,8 +1,9 @@ -use std::path::PathBuf; -use std::time::Duration; +// Copyright 2024 The Tari Project +// SPDX-License-Identifier: BSD-3-Clause -use crate::server::p2p; -use crate::server::p2p::peer_store::PeerStoreConfig; +use std::{path::PathBuf, time::Duration}; + +use crate::server::{p2p, p2p::peer_store::PeerStoreConfig}; /// Config is the server configuration struct. #[derive(Clone)] diff --git a/src/server/grpc/base_node.rs b/src/server/grpc/base_node.rs index 8bcee7fc..23dcf152 100644 --- a/src/server/grpc/base_node.rs +++ b/src/server/grpc/base_node.rs @@ -1,30 +1,66 @@ +// Copyright 2024 The Tari Project +// SPDX-License-Identifier: BSD-3-Clause + use std::sync::Arc; -use libp2p::futures::channel::mpsc; -use libp2p::futures::SinkExt; +use libp2p::futures::{channel::mpsc, SinkExt}; use log::error; -use minotari_app_grpc::tari_rpc; -use minotari_app_grpc::tari_rpc::base_node_client::BaseNodeClient; -use minotari_app_grpc::tari_rpc::{ - Block, BlockBlobRequest, BlockGroupRequest, BlockGroupResponse, BlockHeaderResponse, - BlockHeight, BlockTimingResponse, ConsensusConstants, Empty, FetchMatchingUtxosRequest, - GetActiveValidatorNodesRequest, GetBlocksRequest, GetHeaderByHashRequest, - GetMempoolTransactionsRequest, GetNewBlockBlobResult, GetNewBlockResult, - GetNewBlockTemplateWithCoinbasesRequest, GetNewBlockWithCoinbasesRequest, GetPeersRequest, - GetShardKeyRequest, GetShardKeyResponse, GetSideChainUtxosRequest, - GetTemplateRegistrationsRequest, HeightRequest, HistoricalBlock, ListConnectedPeersResponse, - ListHeadersRequest, MempoolStatsResponse, NetworkStatusResponse, NewBlockTemplate, - NewBlockTemplateRequest, NewBlockTemplateResponse, NodeIdentity, SearchKernelsRequest, - SearchUtxosRequest, SoftwareUpdate, StringValue, SubmitBlockResponse, SubmitTransactionRequest, - SubmitTransactionResponse, SyncInfoResponse, SyncProgressResponse, TipInfoResponse, - TransactionStateRequest, TransactionStateResponse, ValueAtHeightResponse, +use minotari_app_grpc::{ + tari_rpc, + tari_rpc::{ + base_node_client::BaseNodeClient, + Block, + BlockBlobRequest, + BlockGroupRequest, + BlockGroupResponse, + BlockHeaderResponse, + BlockHeight, + BlockTimingResponse, + ConsensusConstants, + Empty, + FetchMatchingUtxosRequest, + GetActiveValidatorNodesRequest, + GetBlocksRequest, + GetHeaderByHashRequest, + GetMempoolTransactionsRequest, + GetNewBlockBlobResult, + GetNewBlockResult, + GetNewBlockTemplateWithCoinbasesRequest, + GetNewBlockWithCoinbasesRequest, + GetPeersRequest, + GetShardKeyRequest, + GetShardKeyResponse, + GetSideChainUtxosRequest, + GetTemplateRegistrationsRequest, + HeightRequest, + HistoricalBlock, + ListConnectedPeersResponse, + ListHeadersRequest, + MempoolStatsResponse, + NetworkStatusResponse, + NewBlockTemplate, + NewBlockTemplateRequest, + NewBlockTemplateResponse, + NodeIdentity, + SearchKernelsRequest, + SearchUtxosRequest, + SoftwareUpdate, + StringValue, + SubmitBlockResponse, + SubmitTransactionRequest, + SubmitTransactionResponse, + SyncInfoResponse, + SyncProgressResponse, + TipInfoResponse, + TransactionStateRequest, + TransactionStateResponse, + ValueAtHeightResponse, + }, }; use tokio::sync::Mutex; -use tonic::transport::Channel; -use tonic::{Request, Response, Status, Streaming}; +use tonic::{transport::Channel, Request, Response, Status, Streaming}; -use crate::server::grpc::error::Error; -use crate::server::grpc::util; +use crate::server::grpc::{error::Error, util}; const LIST_HEADERS_PAGE_SIZE: usize = 10; const GET_BLOCKS_PAGE_SIZE: usize = 10; @@ -38,13 +74,9 @@ macro_rules! proxy_simple_result { match $self.client.lock().await.$call($request.into_inner()).await { Ok(resp) => Ok(resp), Err(error) => { - error!( - "Error while calling {:?} on base node: {:?}", - stringify!($call), - error - ); + error!("Error while calling {:?} on base node: {:?}", stringify!($call), error); Err(error) - } + }, } }; } @@ -92,7 +124,7 @@ where }); }); Ok(Response::new(rx)) - } + }, Err(status) => Err(status), } } @@ -106,69 +138,73 @@ pub struct TariBaseNodeGrpc { impl TariBaseNodeGrpc { pub async fn new(base_node_address: String) -> Result { Ok(Self { - client: Arc::new(Mutex::new( - util::connect_base_node(base_node_address).await?, - )), + client: Arc::new(Mutex::new(util::connect_base_node(base_node_address).await?)), }) } } #[tonic::async_trait] impl tari_rpc::base_node_server::BaseNode for TariBaseNodeGrpc { + type FetchMatchingUtxosStream = mpsc::Receiver>; + type GetActiveValidatorNodesStream = mpsc::Receiver>; + type GetBlocksStream = mpsc::Receiver>; + type GetMempoolTransactionsStream = mpsc::Receiver>; + type GetNetworkDifficultyStream = mpsc::Receiver>; + type GetPeersStream = mpsc::Receiver>; + type GetSideChainUtxosStream = mpsc::Receiver>; + type GetTemplateRegistrationsStream = mpsc::Receiver>; + type GetTokensInCirculationStream = mpsc::Receiver>; type ListHeadersStream = mpsc::Receiver>; + type SearchKernelsStream = mpsc::Receiver>; + type SearchUtxosStream = mpsc::Receiver>; + async fn list_headers( &self, request: Request, ) -> Result, Status> { proxy_stream_result!(self, list_headers, request, LIST_HEADERS_PAGE_SIZE) } + async fn get_header_by_hash( &self, request: Request, ) -> Result, Status> { proxy_simple_result!(self, get_header_by_hash, request) } - type GetBlocksStream = mpsc::Receiver>; - async fn get_blocks( - &self, - request: Request, - ) -> Result, Status> { + + async fn get_blocks(&self, request: Request) -> Result, Status> { proxy_stream_result!(self, get_blocks, request, GET_BLOCKS_PAGE_SIZE) } - async fn get_block_timing( - &self, - request: Request, - ) -> Result, Status> { + + async fn get_block_timing(&self, request: Request) -> Result, Status> { proxy_simple_result!(self, get_block_timing, request) } - async fn get_constants( - &self, - request: Request, - ) -> Result, Status> { + + async fn get_constants(&self, request: Request) -> Result, Status> { proxy_simple_result!(self, get_constants, request) } + async fn get_block_size( &self, request: Request, ) -> Result, Status> { proxy_simple_result!(self, get_block_size, request) } + async fn get_block_fees( &self, request: Request, ) -> Result, Status> { proxy_simple_result!(self, get_block_fees, request) } + async fn get_version(&self, request: Request) -> Result, Status> { proxy_simple_result!(self, get_version, request) } - async fn check_for_updates( - &self, - request: Request, - ) -> Result, Status> { + + async fn check_for_updates(&self, request: Request) -> Result, Status> { proxy_simple_result!(self, check_for_updates, request) } - type GetTokensInCirculationStream = mpsc::Receiver>; async fn get_tokens_in_circulation( &self, @@ -182,19 +218,11 @@ impl tari_rpc::base_node_server::BaseNode for TariBaseNodeGrpc { ) } - type GetNetworkDifficultyStream = - mpsc::Receiver>; - async fn get_network_difficulty( &self, request: Request, ) -> Result, Status> { - proxy_stream_result!( - self, - get_network_difficulty, - request, - GET_DIFFICULTY_PAGE_SIZE - ) + proxy_stream_result!(self, get_network_difficulty, request, GET_DIFFICULTY_PAGE_SIZE) } async fn get_new_block_template( @@ -204,10 +232,7 @@ impl tari_rpc::base_node_server::BaseNode for TariBaseNodeGrpc { proxy_simple_result!(self, get_new_block_template, request) } - async fn get_new_block( - &self, - request: Request, - ) -> Result, Status> { + async fn get_new_block(&self, request: Request) -> Result, Status> { proxy_simple_result!(self, get_new_block, request) } @@ -232,10 +257,7 @@ impl tari_rpc::base_node_server::BaseNode for TariBaseNodeGrpc { proxy_simple_result!(self, get_new_block_blob, request) } - async fn submit_block( - &self, - request: Request, - ) -> Result, Status> { + async fn submit_block(&self, request: Request) -> Result, Status> { proxy_simple_result!(self, submit_block, request) } @@ -253,29 +275,18 @@ impl tari_rpc::base_node_server::BaseNode for TariBaseNodeGrpc { proxy_simple_result!(self, submit_transaction, request) } - async fn get_sync_info( - &self, - request: Request, - ) -> Result, Status> { + async fn get_sync_info(&self, request: Request) -> Result, Status> { proxy_simple_result!(self, get_sync_info, request) } - async fn get_sync_progress( - &self, - request: Request, - ) -> Result, Status> { + async fn get_sync_progress(&self, request: Request) -> Result, Status> { proxy_simple_result!(self, get_sync_progress, request) } - async fn get_tip_info( - &self, - request: Request, - ) -> Result, Status> { + async fn get_tip_info(&self, request: Request) -> Result, Status> { proxy_simple_result!(self, get_tip_info, request) } - type SearchKernelsStream = mpsc::Receiver>; - async fn search_kernels( &self, request: Request, @@ -283,8 +294,6 @@ impl tari_rpc::base_node_server::BaseNode for TariBaseNodeGrpc { proxy_stream_result!(self, search_kernels, request, GET_BLOCKS_PAGE_SIZE) } - type SearchUtxosStream = mpsc::Receiver>; - async fn search_utxos( &self, request: Request, @@ -292,9 +301,6 @@ impl tari_rpc::base_node_server::BaseNode for TariBaseNodeGrpc { proxy_stream_result!(self, search_utxos, request, GET_BLOCKS_PAGE_SIZE) } - type FetchMatchingUtxosStream = - mpsc::Receiver>; - async fn fetch_matching_utxos( &self, request: Request, @@ -302,28 +308,15 @@ impl tari_rpc::base_node_server::BaseNode for TariBaseNodeGrpc { proxy_stream_result!(self, fetch_matching_utxos, request, GET_BLOCKS_PAGE_SIZE) } - type GetPeersStream = mpsc::Receiver>; - - async fn get_peers( - &self, - request: Request, - ) -> Result, Status> { + async fn get_peers(&self, request: Request) -> Result, Status> { proxy_stream_result!(self, get_peers, request, GET_BLOCKS_PAGE_SIZE) } - type GetMempoolTransactionsStream = - mpsc::Receiver>; - async fn get_mempool_transactions( &self, request: Request, ) -> Result, Status> { - proxy_stream_result!( - self, - get_mempool_transactions, - request, - GET_BLOCKS_PAGE_SIZE - ) + proxy_stream_result!(self, get_mempool_transactions, request, GET_BLOCKS_PAGE_SIZE) } async fn transaction_state( @@ -337,10 +330,7 @@ impl tari_rpc::base_node_server::BaseNode for TariBaseNodeGrpc { proxy_simple_result!(self, identify, request) } - async fn get_network_status( - &self, - request: Request, - ) -> Result, Status> { + async fn get_network_status(&self, request: Request) -> Result, Status> { proxy_simple_result!(self, get_network_status, request) } @@ -351,16 +341,10 @@ impl tari_rpc::base_node_server::BaseNode for TariBaseNodeGrpc { proxy_simple_result!(self, list_connected_peers, request) } - async fn get_mempool_stats( - &self, - request: Request, - ) -> Result, Status> { + async fn get_mempool_stats(&self, request: Request) -> Result, Status> { proxy_simple_result!(self, get_mempool_stats, request) } - type GetActiveValidatorNodesStream = - mpsc::Receiver>; - async fn get_active_validator_nodes( &self, request: Request, @@ -375,9 +359,6 @@ impl tari_rpc::base_node_server::BaseNode for TariBaseNodeGrpc { proxy_simple_result!(self, get_shard_key, request) } - type GetTemplateRegistrationsStream = - mpsc::Receiver>; - async fn get_template_registrations( &self, request: Request, @@ -385,9 +366,6 @@ impl tari_rpc::base_node_server::BaseNode for TariBaseNodeGrpc { proxy_stream_result!(self, get_template_registrations, request, 10) } - type GetSideChainUtxosStream = - mpsc::Receiver>; - async fn get_side_chain_utxos( &self, request: Request, diff --git a/src/server/grpc/error.rs b/src/server/grpc/error.rs index e32985bb..5bb34f99 100644 --- a/src/server/grpc/error.rs +++ b/src/server/grpc/error.rs @@ -1,3 +1,6 @@ +// Copyright 2024 The Tari Project +// SPDX-License-Identifier: BSD-3-Clause + use thiserror::Error; #[derive(Error, Debug)] diff --git a/src/server/grpc/mod.rs b/src/server/grpc/mod.rs index 0cd49153..631371ff 100644 --- a/src/server/grpc/mod.rs +++ b/src/server/grpc/mod.rs @@ -1,3 +1,6 @@ +// Copyright 2024 The Tari Project +// SPDX-License-Identifier: BSD-3-Clause + //! This module contains all the gRPC implementations to mimic a real Tari base node interface //! and also expose the custom SHA-3 P2Pool related gRPC interfaces. pub mod base_node; diff --git a/src/server/grpc/p2pool.rs b/src/server/grpc/p2pool.rs index 4fc1cc78..76208639 100644 --- a/src/server/grpc/p2pool.rs +++ b/src/server/grpc/p2pool.rs @@ -1,30 +1,39 @@ +// Copyright 2024 The Tari Project +// SPDX-License-Identifier: BSD-3-Clause + use std::sync::Arc; -use log::{debug, error, info, warn}; +use log::{debug, info, warn}; use minotari_app_grpc::tari_rpc::{ - GetNewBlockRequest, GetNewBlockResponse, GetNewBlockTemplateWithCoinbasesRequest, - HeightRequest, NewBlockTemplateRequest, PowAlgo, SubmitBlockRequest, SubmitBlockResponse, + base_node_client::BaseNodeClient, + pow_algo::PowAlgos, + sha_p2_pool_server::ShaP2Pool, + GetNewBlockRequest, + GetNewBlockResponse, + GetNewBlockTemplateWithCoinbasesRequest, + HeightRequest, + NewBlockTemplateRequest, + PowAlgo, + SubmitBlockRequest, + SubmitBlockResponse, }; -use minotari_app_grpc::tari_rpc::base_node_client::BaseNodeClient; -use minotari_app_grpc::tari_rpc::pow_algo::PowAlgos; -use minotari_app_grpc::tari_rpc::sha_p2_pool_server::ShaP2Pool; use tari_core::proof_of_work::sha3x_difficulty; use tokio::sync::Mutex; use tonic::{Request, Response, Status}; -use crate::server::grpc::error::Error; -use crate::server::grpc::util; -use crate::server::p2p; -use crate::sharechain::block::Block; -use crate::sharechain::SHARE_COUNT; -use crate::sharechain::ShareChain; +use crate::{ + server::{ + grpc::{error::Error, util}, + p2p, + }, + sharechain::{block::Block, ShareChain, SHARE_COUNT}, +}; const LOG_TARGET: &str = "p2pool_grpc"; /// P2Pool specific gRPC service to provide `get_new_block` and `submit_block` functionalities. pub struct ShaP2PoolGrpc - where - S: ShareChain + Send + Sync + 'static, +where S: ShareChain + Send + Sync + 'static { /// Base node client client: Arc>>, @@ -35,8 +44,7 @@ pub struct ShaP2PoolGrpc } impl ShaP2PoolGrpc - where - S: ShareChain + Send + Sync + 'static, +where S: ShareChain + Send + Sync + 'static { pub async fn new( base_node_address: String, @@ -44,9 +52,7 @@ impl ShaP2PoolGrpc share_chain: Arc, ) -> Result { Ok(Self { - client: Arc::new(Mutex::new( - util::connect_base_node(base_node_address).await?, - )), + client: Arc::new(Mutex::new(util::connect_base_node(base_node_address).await?)), p2p_client, share_chain, }) @@ -67,8 +73,7 @@ impl ShaP2PoolGrpc #[tonic::async_trait] impl ShaP2Pool for ShaP2PoolGrpc - where - S: ShareChain + Send + Sync + 'static, +where S: ShareChain + Send + Sync + 'static { /// Returns a new block (that can be mined) which contains all the shares generated /// from the current share chain as coinbase transactions. @@ -84,13 +89,7 @@ impl ShaP2Pool for ShaP2PoolGrpc algo: Some(pow_algo.clone()), max_weight: 0, }; - let template_response = self - .client - .lock() - .await - .get_new_block_template(req) - .await? - .into_inner(); + let template_response = self.client.lock().await.get_new_block_template(req).await?.into_inner(); let miner_data = template_response .miner_data .ok_or_else(|| Status::internal("missing miner data"))?; @@ -159,8 +158,8 @@ impl ShaP2Pool for ShaP2PoolGrpc // Check block's difficulty compared to the latest network one to increase the probability // to get the block accepted (and also a block with lower difficulty than latest one is invalid anyway). - let request_block_difficulty = sha3x_difficulty(origin_block_header) - .map_err(|error| Status::internal(error.to_string()))?; + let request_block_difficulty = + sha3x_difficulty(origin_block_header).map_err(|error| Status::internal(error.to_string()))?; let mut network_difficulty_stream = self .client .lock() @@ -174,8 +173,8 @@ impl ShaP2Pool for ShaP2PoolGrpc .into_inner(); let mut network_difficulty_matches = false; while let Ok(Some(diff_resp)) = network_difficulty_stream.message().await { - if origin_block_header.height == diff_resp.height + 1 - && request_block_difficulty.as_u64() > diff_resp.difficulty + if origin_block_header.height == diff_resp.height + 1 && + request_block_difficulty.as_u64() > diff_resp.difficulty { network_difficulty_matches = true; } @@ -198,14 +197,14 @@ impl ShaP2Pool for ShaP2PoolGrpc block.set_sent_to_main_chain(true); self.submit_share_chain_block(&block).await?; Ok(resp) - } + }, Err(_) => { block.set_sent_to_main_chain(false); self.submit_share_chain_block(&block).await?; Ok(Response::new(SubmitBlockResponse { block_hash: block.hash().to_vec(), })) - } + }, } } } diff --git a/src/server/grpc/util.rs b/src/server/grpc/util.rs index 445c3426..4bb6c49e 100644 --- a/src/server/grpc/util.rs +++ b/src/server/grpc/util.rs @@ -1,3 +1,6 @@ +// Copyright 2024 The Tari Project +// SPDX-License-Identifier: BSD-3-Clause + use std::time::Duration; use log::error; @@ -9,19 +12,14 @@ use tonic::transport::Channel; use crate::server::grpc::error::{Error, TonicError}; /// Utility function to connect to a Base node and try infinitely when it fails until gets connected. -pub async fn connect_base_node( - base_node_address: String, -) -> Result, Error> { +pub async fn connect_base_node(base_node_address: String) -> Result, Error> { let client_result = BaseNodeGrpcClient::connect(base_node_address.clone()) .await .map_err(|e| Error::Tonic(TonicError::Transport(e))); let client = match client_result { Ok(client) => client, Err(error) => { - error!( - "[Retry] Failed to connect to Tari base node: {:?}", - error.to_string() - ); + error!("[Retry] Failed to connect to Tari base node: {:?}", error.to_string()); let mut client = None; while client.is_none() { sleep(Duration::from_secs(5)).await; @@ -30,14 +28,11 @@ pub async fn connect_base_node( .map_err(|e| Error::Tonic(TonicError::Transport(e))) { Ok(curr_client) => client = Some(curr_client), - Err(error) => error!( - "[Retry] Failed to connect to Tari base node: {:?}", - error.to_string() - ), + Err(error) => error!("[Retry] Failed to connect to Tari base node: {:?}", error.to_string()), } } client.unwrap() - } + }, }; Ok(client) diff --git a/src/server/mod.rs b/src/server/mod.rs index d08be40d..cf53508e 100644 --- a/src/server/mod.rs +++ b/src/server/mod.rs @@ -1,3 +1,6 @@ +// Copyright 2024 The Tari Project +// SPDX-License-Identifier: BSD-3-Clause + pub use config::*; pub use server::*; diff --git a/src/server/p2p/client.rs b/src/server/p2p/client.rs index b262a338..dc96dd07 100644 --- a/src/server/p2p/client.rs +++ b/src/server/p2p/client.rs @@ -1,16 +1,31 @@ -use std::sync::Arc; -use std::time::{Duration, Instant}; +// Copyright 2024 The Tari Project +// SPDX-License-Identifier: BSD-3-Clause + +use std::{ + sync::Arc, + time::{Duration, Instant}, +}; use log::{debug, error, warn}; use thiserror::Error; -use tokio::select; -use tokio::sync::{broadcast, mpsc, Mutex}; -use tokio::sync::broadcast::error::{RecvError, SendError}; -use tokio::time::sleep; - -use crate::server::p2p::messages::{ValidateBlockRequest, ValidateBlockResult}; -use crate::server::p2p::peer_store::PeerStore; -use crate::sharechain::block::Block; +use tokio::{ + select, + sync::{ + broadcast, + broadcast::error::{RecvError, SendError}, + mpsc, + Mutex, + }, + time::sleep, +}; + +use crate::{ + server::p2p::{ + messages::{ValidateBlockRequest, ValidateBlockResult}, + peer_store::PeerStore, + }, + sharechain::block::Block, +}; const LOG_TARGET: &str = "p2p_service_client"; @@ -77,11 +92,7 @@ pub struct ServiceClient { } impl ServiceClient { - pub fn new( - channels: ServiceClientChannels, - peer_store: Arc, - config: ClientConfig, - ) -> Self { + pub fn new(channels: ServiceClientChannels, peer_store: Arc, config: ClientConfig) -> Self { Self { channels, peer_store, @@ -94,18 +105,12 @@ impl ServiceClient { self.channels .broadcast_block_sender .send(block.clone()) - .map_err(|error| { - ClientError::ChannelSend(Box::new(ChannelSendError::BroadcastBlock(error))) - })?; + .map_err(|error| ClientError::ChannelSend(Box::new(ChannelSendError::BroadcastBlock(error))))?; Ok(()) } - async fn validate_block_with_retries( - &self, - block: &Block, - mut retries: u64, - ) -> Result { + async fn validate_block_with_retries(&self, block: &Block, mut retries: u64) -> Result { if retries >= self.config.validate_block_max_retries { warn!(target: LOG_TARGET, "❗Too many validation retries!"); return Ok(false); @@ -117,9 +122,7 @@ impl ServiceClient { self.channels .validate_block_sender .send(ValidateBlockRequest::new(block.clone())) - .map_err(|error| { - ClientError::ChannelSend(Box::new(ChannelSendError::ValidateBlockRequest(error))) - })?; + .map_err(|error| ClientError::ChannelSend(Box::new(ChannelSendError::ValidateBlockRequest(error))))?; // calculate how many validations we need (more than 2/3 of peers should validate) let peer_count = self.peer_store.peer_count().await as f64 + 1.0; diff --git a/src/server/p2p/error.rs b/src/server/p2p/error.rs index 0cf2a0fe..b73ad386 100644 --- a/src/server/p2p/error.rs +++ b/src/server/p2p/error.rs @@ -1,12 +1,18 @@ -use libp2p::gossipsub::PublishError; -use libp2p::identity::DecodingError; -use libp2p::kad::NoKnownPeers; -use libp2p::swarm::DialError; -use libp2p::{multiaddr, noise, TransportError}; +// Copyright 2024 The Tari Project +// SPDX-License-Identifier: BSD-3-Clause + +use libp2p::{ + gossipsub::PublishError, + identity::DecodingError, + kad::NoKnownPeers, + multiaddr, + noise, + swarm::DialError, + TransportError, +}; use thiserror::Error; -use crate::server::p2p; -use crate::sharechain; +use crate::{server::p2p, sharechain}; #[derive(Error, Debug)] pub enum Error { diff --git a/src/server/p2p/messages.rs b/src/server/p2p/messages.rs index 32c092a6..cc9e3cad 100644 --- a/src/server/p2p/messages.rs +++ b/src/server/p2p/messages.rs @@ -1,10 +1,12 @@ +// Copyright 2024 The Tari Project +// SPDX-License-Identifier: BSD-3-Clause + use std::time::{SystemTime, UNIX_EPOCH}; use libp2p::PeerId; use serde::{Deserialize, Serialize}; -use crate::server::p2p::Error; -use crate::sharechain::block::Block; +use crate::{server::p2p::Error, sharechain::block::Block}; #[macro_export] macro_rules! impl_conversions { @@ -27,16 +29,12 @@ macro_rules! impl_conversions { }; } pub fn deserialize_message<'a, T>(raw_message: &'a [u8]) -> Result -where - T: Deserialize<'a>, -{ +where T: Deserialize<'a> { serde_cbor::from_slice(raw_message).map_err(Error::SerializeDeserialize) } pub fn serialize_message(input: &T) -> Result, Error> -where - T: Serialize, -{ +where T: Serialize { serde_cbor::to_vec(input).map_err(Error::SerializeDeserialize) } @@ -48,10 +46,7 @@ pub struct PeerInfo { impl_conversions!(PeerInfo); impl PeerInfo { pub fn new(current_height: u64) -> Self { - let timestamp = SystemTime::now() - .duration_since(UNIX_EPOCH) - .unwrap() - .as_micros(); + let timestamp = SystemTime::now().duration_since(UNIX_EPOCH).unwrap().as_micros(); Self { current_height, timestamp, @@ -67,10 +62,7 @@ pub struct ValidateBlockRequest { impl_conversions!(ValidateBlockRequest); impl ValidateBlockRequest { pub fn new(block: Block) -> Self { - let timestamp = SystemTime::now() - .duration_since(UNIX_EPOCH) - .unwrap() - .as_micros(); + let timestamp = SystemTime::now().duration_since(UNIX_EPOCH).unwrap().as_micros(); Self { block, timestamp } } @@ -89,10 +81,7 @@ pub struct ValidateBlockResult { impl_conversions!(ValidateBlockResult); impl ValidateBlockResult { pub fn new(peer_id: PeerId, block: Block, valid: bool) -> Self { - let timestamp = SystemTime::now() - .duration_since(UNIX_EPOCH) - .unwrap() - .as_micros(); + let timestamp = SystemTime::now().duration_since(UNIX_EPOCH).unwrap().as_micros(); Self { peer_id, block, diff --git a/src/server/p2p/mod.rs b/src/server/p2p/mod.rs index e32658e9..62cdba3e 100644 --- a/src/server/p2p/mod.rs +++ b/src/server/p2p/mod.rs @@ -1,3 +1,6 @@ +// Copyright 2024 The Tari Project +// SPDX-License-Identifier: BSD-3-Clause + //! P2p module contains all the peer-to-peer related implementations and communications. //! This module uses hardly `libp2p` to communicate between peers efficiently. diff --git a/src/server/p2p/p2p.rs b/src/server/p2p/p2p.rs index 7cf9db51..1ac13704 100644 --- a/src/server/p2p/p2p.rs +++ b/src/server/p2p/p2p.rs @@ -1,40 +1,66 @@ -use std::hash::{DefaultHasher, Hash, Hasher}; -use std::path::PathBuf; -use std::sync::Arc; -use std::time::Duration; +// Copyright 2024 The Tari Project +// SPDX-License-Identifier: BSD-3-Clause + +use std::{ + hash::{DefaultHasher, Hash, Hasher}, + path::PathBuf, + sync::Arc, + time::Duration, +}; use libp2p::{ - gossipsub, kad, mdns, Multiaddr, noise, request_response, StreamProtocol, Swarm, tcp, yamux, + futures::StreamExt, + gossipsub, + gossipsub::{IdentTopic, Message, PublishError}, + identity::Keypair, + kad, + kad::{store::MemoryStore, Event, Mode}, + mdns, + mdns::tokio::Tokio, + multiaddr::Protocol, + noise, + request_response, + request_response::{cbor, ResponseChannel}, + swarm::{NetworkBehaviour, SwarmEvent}, + tcp, + yamux, + Multiaddr, + StreamProtocol, + Swarm, }; -use libp2p::futures::StreamExt; -use libp2p::gossipsub::{IdentTopic, Message, PublishError}; -use libp2p::identity::Keypair; -use libp2p::kad::{Event, Mode}; -use libp2p::kad::store::MemoryStore; -use libp2p::mdns::tokio::Tokio; -use libp2p::multiaddr::Protocol; -use libp2p::request_response::{cbor, ResponseChannel}; -use libp2p::swarm::{NetworkBehaviour, SwarmEvent}; use log::{debug, error, info, warn}; use tari_common::configuration::Network; use tari_utilities::hex::Hex; -use tokio::{io, select}; -use tokio::fs::File; -use tokio::io::{AsyncReadExt, AsyncWriteExt}; -use tokio::sync::{broadcast, mpsc}; -use tokio::sync::broadcast::error::RecvError; - -use crate::server::config; -use crate::server::p2p::{ - client, Error, LibP2PError, messages, ServiceClient, ServiceClientChannels, +use tokio::{ + fs::File, + io, + io::{AsyncReadExt, AsyncWriteExt}, + select, + sync::{broadcast, broadcast::error::RecvError, mpsc}, }; -use crate::server::p2p::messages::{ - PeerInfo, ShareChainSyncRequest, ShareChainSyncResponse, ValidateBlockRequest, - ValidateBlockResult, + +use crate::{ + server::{ + config, + p2p::{ + client, + messages, + messages::{ + PeerInfo, + ShareChainSyncRequest, + ShareChainSyncResponse, + ValidateBlockRequest, + ValidateBlockResult, + }, + peer_store::PeerStore, + Error, + LibP2PError, + ServiceClient, + ServiceClientChannels, + }, + }, + sharechain::{block::Block, ShareChain}, }; -use crate::server::p2p::peer_store::PeerStore; -use crate::sharechain::block::Block; -use crate::sharechain::ShareChain; const PEER_INFO_TOPIC: &str = "peer_info"; const BLOCK_VALIDATION_REQUESTS_TOPIC: &str = "block_validation_requests"; @@ -76,8 +102,7 @@ pub struct ServerNetworkBehaviour { /// Service is the implementation that holds every peer-to-peer related logic /// that makes sure that all the communications, syncing, broadcasting etc... are done. pub struct Service - where - S: ShareChain + Send + Sync + 'static, +where S: ShareChain + Send + Sync + 'static { swarm: Swarm, port: u16, @@ -97,8 +122,7 @@ pub struct Service } impl Service - where - S: ShareChain + Send + Sync + 'static, +where S: ShareChain + Send + Sync + 'static { /// Constructs a new Service from the provided config. /// It also instantiates libp2p swarm inside. @@ -167,64 +191,51 @@ impl Service /// Creates a new swarm from the provided config async fn new_swarm(config: &config::Config) -> Result, Error> { - let mut swarm = - libp2p::SwarmBuilder::with_existing_identity(Self::keypair(&config.p2p_service).await?) - .with_tokio() - .with_tcp( - tcp::Config::default(), - noise::Config::new, - yamux::Config::default, - ) - .map_err(|e| Error::LibP2P(LibP2PError::Noise(e)))? - .with_behaviour(move |key_pair| { - // gossipsub - let message_id_fn = |message: &gossipsub::Message| { - let mut s = DefaultHasher::new(); - if let Some(soure_peer) = message.source { - soure_peer.to_bytes().hash(&mut s); - } - message.data.hash(&mut s); - gossipsub::MessageId::from(s.finish().to_string()) - }; - let gossipsub_config = gossipsub::ConfigBuilder::default() - .heartbeat_interval(Duration::from_secs(10)) - .validation_mode(gossipsub::ValidationMode::Strict) - .message_id_fn(message_id_fn) - .build() - .map_err(|msg| io::Error::new(io::ErrorKind::Other, msg))?; - let gossipsub = gossipsub::Behaviour::new( - gossipsub::MessageAuthenticity::Signed(key_pair.clone()), - gossipsub_config, - )?; - - Ok(ServerNetworkBehaviour { - gossipsub, - mdns: mdns::Behaviour::new( - mdns::Config::default(), - key_pair.public().to_peer_id(), - ) - .map_err(|e| Error::LibP2P(LibP2PError::IO(e)))?, - share_chain_sync: cbor::Behaviour::< - ShareChainSyncRequest, - ShareChainSyncResponse, - >::new( - [( - StreamProtocol::new(SHARE_CHAIN_SYNC_REQ_RESP_PROTOCOL), - request_response::ProtocolSupport::Full, - )], - request_response::Config::default(), - ), - kademlia: kad::Behaviour::new( - key_pair.public().to_peer_id(), - MemoryStore::new(key_pair.public().to_peer_id()), - ), - }) - }) - .map_err(|e| Error::LibP2P(LibP2PError::Behaviour(e.to_string())))? - .with_swarm_config(|c| { - c.with_idle_connection_timeout(config.idle_connection_timeout) + let mut swarm = libp2p::SwarmBuilder::with_existing_identity(Self::keypair(&config.p2p_service).await?) + .with_tokio() + .with_tcp(tcp::Config::default(), noise::Config::new, yamux::Config::default) + .map_err(|e| Error::LibP2P(LibP2PError::Noise(e)))? + .with_behaviour(move |key_pair| { + // gossipsub + let message_id_fn = |message: &gossipsub::Message| { + let mut s = DefaultHasher::new(); + if let Some(soure_peer) = message.source { + soure_peer.to_bytes().hash(&mut s); + } + message.data.hash(&mut s); + gossipsub::MessageId::from(s.finish().to_string()) + }; + let gossipsub_config = gossipsub::ConfigBuilder::default() + .heartbeat_interval(Duration::from_secs(10)) + .validation_mode(gossipsub::ValidationMode::Strict) + .message_id_fn(message_id_fn) + .build() + .map_err(|msg| io::Error::new(io::ErrorKind::Other, msg))?; + let gossipsub = gossipsub::Behaviour::new( + gossipsub::MessageAuthenticity::Signed(key_pair.clone()), + gossipsub_config, + )?; + + Ok(ServerNetworkBehaviour { + gossipsub, + mdns: mdns::Behaviour::new(mdns::Config::default(), key_pair.public().to_peer_id()) + .map_err(|e| Error::LibP2P(LibP2PError::IO(e)))?, + share_chain_sync: cbor::Behaviour::::new( + [( + StreamProtocol::new(SHARE_CHAIN_SYNC_REQ_RESP_PROTOCOL), + request_response::ProtocolSupport::Full, + )], + request_response::Config::default(), + ), + kademlia: kad::Behaviour::new( + key_pair.public().to_peer_id(), + MemoryStore::new(key_pair.public().to_peer_id()), + ), }) - .build(); + }) + .map_err(|e| Error::LibP2P(LibP2PError::Behaviour(e.to_string())))? + .with_swarm_config(|c| c.with_idle_connection_timeout(config.idle_connection_timeout)) + .build(); swarm.behaviour_mut().kademlia.set_mode(Some(Mode::Server)); @@ -251,10 +262,7 @@ impl Service /// Handles block validation requests coming from Service clients. /// All the requests from clients are sent to [`BLOCK_VALIDATION_REQUESTS_TOPIC`]. - async fn handle_client_validate_block_request( - &mut self, - result: Result, - ) { + async fn handle_client_validate_block_request(&mut self, result: Result) { match result { Ok(request) => { let request_raw_result: Result, Error> = request.try_into(); @@ -266,15 +274,15 @@ impl Service ) { error!(target: LOG_TARGET, "Failed to send block validation request: {error:?}"); } - } + }, Err(error) => { error!(target: LOG_TARGET, "Failed to convert block validation request to bytes: {error:?}"); - } + }, } - } + }, Err(error) => { error!(target: LOG_TARGET, "Block validation request receive error: {error:?}"); - } + }, } } @@ -283,18 +291,16 @@ impl Service let result_raw_result: Result, Error> = result.try_into(); match result_raw_result { Ok(result_raw) => { - if let Err(error) = self - .swarm - .behaviour_mut() - .gossipsub - .publish(IdentTopic::new(Self::topic_name(BLOCK_VALIDATION_RESULTS_TOPIC)), result_raw) - { + if let Err(error) = self.swarm.behaviour_mut().gossipsub.publish( + IdentTopic::new(Self::topic_name(BLOCK_VALIDATION_RESULTS_TOPIC)), + result_raw, + ) { error!(target: LOG_TARGET, "Failed to publish block validation result: {error:?}"); } - } + }, Err(error) => { error!(target: LOG_TARGET, "Failed to convert block validation result to bytes: {error:?}"); - } + }, } } @@ -330,17 +336,17 @@ impl Service .publish(IdentTopic::new(Self::topic_name(NEW_BLOCK_TOPIC)), block_raw) .map_err(|error| Error::LibP2P(LibP2PError::Publish(error))) { - Ok(_) => {} + Ok(_) => {}, Err(error) => { error!(target: LOG_TARGET, "Failed to broadcast new block: {error:?}") - } + }, } - } + }, Err(error) => { error!(target: LOG_TARGET, "Failed to convert block to bytes: {error:?}") - } + }, } - } + }, Err(error) => error!(target: LOG_TARGET, "Failed to receive new block: {error:?}"), } } @@ -385,48 +391,46 @@ impl Service Ok(payload) => { debug!(target: LOG_TARGET, "New peer info: {peer:?} -> {payload:?}"); self.peer_store.add(peer, payload).await; - // if let Some(tip) = self.peer_store.tip_of_block_height().await { - // if let Ok(curr_height) = self.share_chain.tip_height().await { - // if curr_height < tip.height { - self.sync_share_chain().await; - // } - // } - // } - } + if let Some(tip) = self.peer_store.tip_of_block_height().await { + if let Ok(curr_height) = self.share_chain.tip_height().await { + if curr_height < tip.height { + self.sync_share_chain().await; + } + } + } + }, Err(error) => { error!(target: LOG_TARGET, "Can't deserialize peer info payload: {:?}", error); - } + }, }, topic if topic == Self::topic_name(BLOCK_VALIDATION_REQUESTS_TOPIC) => { match messages::ValidateBlockRequest::try_from(message) { Ok(payload) => { debug!(target: LOG_TARGET, "Block validation request: {payload:?}"); - let validate_result = - self.share_chain.validate_block(&payload.block()).await; + let validate_result = self.share_chain.validate_block(&payload.block()).await; let mut valid = false; if let Ok(is_valid) = validate_result { valid = is_valid; } - // TODO: Generate partial schnorr signature to prove that current peer validated the block (using peer's private key and broadcast public key vie PeerInfo) + // TODO: Generate partial schnorr signature to prove that current peer validated the block + // (using peer's private key and broadcast public key vie PeerInfo) // TODO: to be able to verify at other peers. - // TODO: Validate whether new block includes all the shares (generate shares until height of new_block.height - 1) - // TODO: by generating a new block and check kernels/outputs whether they are the same or not. - // TODO: Validating new blocks version 2 would be to send a proof that was generated from the shares. - - let validate_result = ValidateBlockResult::new( - *self.swarm.local_peer_id(), - payload.block(), - valid, - ); + // TODO: Validate whether new block includes all the shares (generate shares until height of + // new_block.height - 1) TODO: by generating a new block and check + // kernels/outputs whether they are the same or not. TODO: Validating + // new blocks version 2 would be to send a proof that was generated from the shares. + + let validate_result = + ValidateBlockResult::new(*self.swarm.local_peer_id(), payload.block(), valid); self.send_block_validation_result(validate_result).await; - } + }, Err(error) => { error!(target: LOG_TARGET, "Can't deserialize block validation request payload: {:?}", error); - } + }, } - } + }, topic if topic == Self::topic_name(BLOCK_VALIDATION_RESULTS_TOPIC) => { match messages::ValidateBlockResult::try_from(message) { Ok(payload) => { @@ -440,12 +444,12 @@ impl Service senders_to_delete.iter().for_each(|i| { self.client_validate_block_res_txs.remove(*i); }); - } + }, Err(error) => { error!(target: LOG_TARGET, "Can't deserialize block validation request payload: {:?}", error); - } + }, } - } + }, // TODO: send a signature that proves that the actual block was coming from this peer topic if topic == Self::topic_name(NEW_BLOCK_TOPIC) => match Block::try_from(message) { Ok(payload) => { @@ -453,14 +457,14 @@ impl Service if let Err(error) = self.share_chain.submit_block(&payload).await { error!(target: LOG_TARGET, "Could not add new block to local share chain: {error:?}"); } - } + }, Err(error) => { error!(target: LOG_TARGET, "Can't deserialize broadcast block payload: {:?}", error); - } + }, }, _ => { warn!(target: LOG_TARGET, "Unknown topic {topic:?}!"); - } + }, } } @@ -482,7 +486,7 @@ impl Service { error!(target: LOG_TARGET, "Failed to send block sync response"); } - } + }, Err(error) => error!(target: LOG_TARGET, "Failed to get blocks from height: {error:?}"), } } @@ -511,15 +515,15 @@ impl Service .behaviour_mut() .share_chain_sync .send_request(&result.peer_id, ShareChainSyncRequest::new(tip)); - } + }, Err(error) => { error!(target: LOG_TARGET, "Failed to get latest height of share chain: {error:?}") - } + }, } - } + }, None => { error!(target: LOG_TARGET, "Failed to get peer with highest share chain height!") - } + }, } } @@ -528,26 +532,20 @@ impl Service match event { SwarmEvent::NewListenAddr { address, .. } => { info!(target: LOG_TARGET, "Listening on {address:?}"); - } + }, SwarmEvent::Behaviour(event) => match event { ServerNetworkBehaviourEvent::Mdns(mdns_event) => match mdns_event { mdns::Event::Discovered(peers) => { for (peer, addr) in peers { self.swarm.add_peer_address(peer, addr); - self.swarm - .behaviour_mut() - .gossipsub - .add_explicit_peer(&peer); + self.swarm.behaviour_mut().gossipsub.add_explicit_peer(&peer); } - } + }, mdns::Event::Expired(peers) => { for (peer, _addr) in peers { - self.swarm - .behaviour_mut() - .gossipsub - .remove_explicit_peer(&peer); + self.swarm.behaviour_mut().gossipsub.remove_explicit_peer(&peer); } - } + }, }, ServerNetworkBehaviourEvent::Gossipsub(event) => match event { gossipsub::Event::Message { @@ -556,37 +554,34 @@ impl Service propagation_source: _propagation_source, } => { self.handle_new_gossipsub_message(message).await; - } - gossipsub::Event::Subscribed { .. } => {} - gossipsub::Event::Unsubscribed { .. } => {} - gossipsub::Event::GossipsubNotSupported { .. } => {} + }, + gossipsub::Event::Subscribed { .. } => {}, + gossipsub::Event::Unsubscribed { .. } => {}, + gossipsub::Event::GossipsubNotSupported { .. } => {}, }, ServerNetworkBehaviourEvent::ShareChainSync(event) => match event { - request_response::Event::Message { - peer: _peer, - message, - } => match message { + request_response::Event::Message { peer: _peer, message } => match message { request_response::Message::Request { request_id: _request_id, request, channel, } => { self.handle_share_chain_sync_request(channel, request).await; - } + }, request_response::Message::Response { request_id: _request_id, response, } => { self.handle_share_chain_sync_response(response).await; - } + }, }, request_response::Event::OutboundFailure { peer, error, .. } => { error!(target: LOG_TARGET, "REQ-RES outbound failure: {peer:?} -> {error:?}"); - } + }, request_response::Event::InboundFailure { peer, error, .. } => { error!(target: LOG_TARGET, "REQ-RES inbound failure: {peer:?} -> {error:?}"); - } - request_response::Event::ResponseSent { .. } => {} + }, + request_response::Event::ResponseSent { .. } => {}, }, ServerNetworkBehaviourEvent::Kademlia(event) => match event { Event::RoutingUpdated { @@ -598,31 +593,24 @@ impl Service addresses.iter().for_each(|addr| { self.swarm.add_peer_address(peer, addr.clone()); }); - self.swarm - .behaviour_mut() - .gossipsub - .add_explicit_peer(&peer); + self.swarm.behaviour_mut().gossipsub.add_explicit_peer(&peer); if let Some(old_peer) = old_peer { - self.swarm - .behaviour_mut() - .gossipsub - .remove_explicit_peer(&old_peer); + self.swarm.behaviour_mut().gossipsub.remove_explicit_peer(&old_peer); if let Err(error) = self.client_peer_changes_tx.send(()) { error!(target: LOG_TARGET, "Failed to send peer changes trigger: {error:?}"); } } - } + }, _ => debug!(target: LOG_TARGET, "[KADEMLIA] {event:?}"), }, }, - _ => {} + _ => {}, }; } /// Main loop of the service that drives the events and libp2p swarm forward. async fn main_loop(&mut self) -> Result<(), Error> { - let mut publish_peer_info_interval = - tokio::time::interval(self.config.peer_info_publish_interval); + let mut publish_peer_info_interval = tokio::time::interval(self.config.peer_info_publish_interval); loop { select! { @@ -680,10 +668,7 @@ impl Service if peer_id.is_none() { return Err(Error::LibP2P(LibP2PError::MissingPeerId(seed_peer.clone()))); } - self.swarm - .behaviour_mut() - .kademlia - .add_address(&peer_id.unwrap(), addr); + self.swarm.behaviour_mut().kademlia.add_address(&peer_id.unwrap(), addr); } self.swarm diff --git a/src/server/p2p/peer_store.rs b/src/server/p2p/peer_store.rs index 4640115e..56208f60 100644 --- a/src/server/p2p/peer_store.rs +++ b/src/server/p2p/peer_store.rs @@ -1,5 +1,10 @@ -use std::sync::RwLock; -use std::time::{Duration, Instant}; +// Copyright 2024 The Tari Project +// SPDX-License-Identifier: BSD-3-Clause + +use std::{ + sync::RwLock, + time::{Duration, Instant}, +}; use libp2p::PeerId; use log::debug; @@ -76,9 +81,7 @@ impl PeerStore { /// Add a new peer to store. /// If a peer already exists, just replaces it. pub async fn add(&self, peer_id: PeerId, peer_info: PeerInfo) { - self.inner - .insert(peer_id, PeerStoreRecord::new(peer_info)) - .await; + self.inner.insert(peer_id, PeerStoreRecord::new(peer_info)).await; self.set_tip_of_block_height().await; } @@ -90,16 +93,15 @@ impl PeerStore { /// Sets the actual highest block height with peer. async fn set_tip_of_block_height(&self) { - if let Some((k, v)) = self.inner.iter().max_by(|(_k1, v1), (_k2, v2)| { - v1.peer_info - .current_height - .cmp(&v2.peer_info.current_height) - }) { + if let Some((k, v)) = self + .inner + .iter() + .max_by(|(_k1, v1), (_k2, v2)| v1.peer_info.current_height.cmp(&v2.peer_info.current_height)) + { // save result if let Ok(mut tip_height_opt) = self.tip_of_block_height.write() { if tip_height_opt.is_none() { - let _ = tip_height_opt - .insert(PeerStoreBlockHeightTip::new(*k, v.peer_info.current_height)); + let _ = tip_height_opt.insert(PeerStoreBlockHeightTip::new(*k, v.peer_info.current_height)); } else { let mut tip_height = tip_height_opt.unwrap(); tip_height.peer_id = *k; diff --git a/src/server/server.rs b/src/server/server.rs index dab1e8e9..45c5b088 100644 --- a/src/server/server.rs +++ b/src/server/server.rs @@ -1,17 +1,25 @@ -use std::net::{AddrParseError, SocketAddr}; -use std::str::FromStr; -use std::sync::Arc; +// Copyright 2024 The Tari Project +// SPDX-License-Identifier: BSD-3-Clause + +use std::{ + net::{AddrParseError, SocketAddr}, + str::FromStr, + sync::Arc, +}; use log::{error, info}; -use minotari_app_grpc::tari_rpc::base_node_server::BaseNodeServer; -use minotari_app_grpc::tari_rpc::sha_p2_pool_server::ShaP2PoolServer; +use minotari_app_grpc::tari_rpc::{base_node_server::BaseNodeServer, sha_p2_pool_server::ShaP2PoolServer}; use thiserror::Error; -use crate::server::{config, grpc, p2p}; -use crate::server::grpc::base_node::TariBaseNodeGrpc; -use crate::server::grpc::error::TonicError; -use crate::server::grpc::p2pool::ShaP2PoolGrpc; -use crate::sharechain::ShareChain; +use crate::{ + server::{ + config, + grpc, + grpc::{base_node::TariBaseNodeGrpc, error::TonicError, p2pool::ShaP2PoolGrpc}, + p2p, + }, + sharechain::ShareChain, +}; const LOG_TARGET: &str = "server"; @@ -27,8 +35,7 @@ pub enum Error { /// Server represents the server running all the necessary components for sha-p2pool. pub struct Server - where - S: ShareChain + Send + Sync + 'static, +where S: ShareChain + Send + Sync + 'static { config: config::Config, p2p_service: p2p::Service, @@ -38,8 +45,7 @@ pub struct Server // TODO: add graceful shutdown impl Server - where - S: ShareChain + Send + Sync + 'static, +where S: ShareChain + Send + Sync + 'static { pub async fn new(config: config::Config, share_chain: S) -> Result { let share_chain = Arc::new(share_chain); @@ -61,8 +67,8 @@ impl Server p2p_service.client(), share_chain.clone(), ) - .await - .map_err(Error::Grpc)?; + .await + .map_err(Error::Grpc)?; p2pool_server = Some(ShaP2PoolServer::new(p2pool_grpc_service)); } @@ -84,10 +90,7 @@ impl Server tonic::transport::Server::builder() .add_service(base_node_service) .add_service(p2pool_service) - .serve( - SocketAddr::from_str(format!("0.0.0.0:{}", grpc_port).as_str()) - .map_err(Error::AddrParse)?, - ) + .serve(SocketAddr::from_str(format!("0.0.0.0:{}", grpc_port).as_str()).map_err(Error::AddrParse)?) .await .map_err(|err| { error!(target: LOG_TARGET, "GRPC encountered an error: {:?}", err); @@ -108,12 +111,11 @@ impl Server let p2pool_grpc_service = self.p2pool_grpc_service.clone().unwrap(); let grpc_port = self.config.grpc_port; tokio::spawn(async move { - match Self::start_grpc(base_node_grpc_service, p2pool_grpc_service, grpc_port).await - { - Ok(_) => {} + match Self::start_grpc(base_node_grpc_service, p2pool_grpc_service, grpc_port).await { + Ok(_) => {}, Err(error) => { error!(target: LOG_TARGET, "GRPC Server encountered an error: {:?}", error); - } + }, } }); } diff --git a/src/sharechain/block.rs b/src/sharechain/block.rs index 0ebc3cb0..7dcbc794 100644 --- a/src/sharechain/block.rs +++ b/src/sharechain/block.rs @@ -1,10 +1,14 @@ +// Copyright 2024 The Tari Project +// SPDX-License-Identifier: BSD-3-Clause + use blake2::Blake2b; use digest::consts::U32; use serde::{Deserialize, Serialize}; -use tari_common_types::tari_address::TariAddress; -use tari_common_types::types::BlockHash; -use tari_core::blocks::{BlockHeader, BlocksHashDomain}; -use tari_core::consensus::DomainSeparatedConsensusHasher; +use tari_common_types::{tari_address::TariAddress, types::BlockHash}; +use tari_core::{ + blocks::{BlockHeader, BlocksHashDomain}, + consensus::DomainSeparatedConsensusHasher, +}; use tari_utilities::epoch_time::EpochTime; use crate::impl_conversions; @@ -26,11 +30,11 @@ impl Block { pub fn builder() -> BlockBuilder { BlockBuilder::new() } + pub fn generate_hash(&self) -> BlockHash { - let mut hash = - DomainSeparatedConsensusHasher::>::new("block") - .chain(&self.prev_hash) - .chain(&self.height); + let mut hash = DomainSeparatedConsensusHasher::>::new("block") + .chain(&self.prev_hash) + .chain(&self.height); if let Some(miner_wallet_address) = &self.miner_wallet_address { hash = hash.chain(&miner_wallet_address.to_hex()); @@ -42,27 +46,35 @@ impl Block { hash.finalize().into() } + pub fn timestamp(&self) -> EpochTime { self.timestamp } + pub fn prev_hash(&self) -> BlockHash { self.prev_hash } + pub fn height(&self) -> u64 { self.height } + pub fn original_block_header(&self) -> &Option { &self.original_block_header } + pub fn hash(&self) -> BlockHash { self.hash } + pub fn set_sent_to_main_chain(&mut self, sent_to_main_chain: bool) { self.sent_to_main_chain = sent_to_main_chain; } + pub fn miner_wallet_address(&self) -> &Option { &self.miner_wallet_address } + pub fn sent_to_main_chain(&self) -> bool { self.sent_to_main_chain } diff --git a/src/sharechain/error.rs b/src/sharechain/error.rs index 65d2f780..dbe77561 100644 --- a/src/sharechain/error.rs +++ b/src/sharechain/error.rs @@ -1,3 +1,6 @@ +// Copyright 2024 The Tari Project +// SPDX-License-Identifier: BSD-3-Clause + use tari_common_types::tari_address::TariAddressError; use thiserror::Error; diff --git a/src/sharechain/in_memory.rs b/src/sharechain/in_memory.rs index 4205caf2..446bc3fb 100644 --- a/src/sharechain/in_memory.rs +++ b/src/sharechain/in_memory.rs @@ -1,17 +1,24 @@ -use std::collections::HashMap; -use std::sync::Arc; +// Copyright 2024 The Tari Project +// SPDX-License-Identifier: BSD-3-Clause + +use std::{collections::HashMap, sync::Arc}; use async_trait::async_trait; use log::{debug, error, info, warn}; use minotari_app_grpc::tari_rpc::{NewBlockCoinbase, SubmitBlockRequest}; use tari_common_types::tari_address::TariAddress; use tari_core::blocks::BlockHeader; -use tari_utilities::epoch_time::EpochTime; -use tari_utilities::hex::Hex; +use tari_utilities::{epoch_time::EpochTime, hex::Hex}; use tokio::sync::{RwLock, RwLockWriteGuard}; -use crate::sharechain::{Block, MAX_BLOCKS_COUNT, SHARE_COUNT, ShareChain, ShareChainResult}; -use crate::sharechain::error::{BlockConvertError, Error}; +use crate::sharechain::{ + error::{BlockConvertError, Error}, + Block, + ShareChain, + ShareChainResult, + MAX_BLOCKS_COUNT, + SHARE_COUNT, +}; const LOG_TARGET: &str = "in_memory_share_chain"; @@ -108,7 +115,6 @@ impl InMemoryShareChain { } } - if blocks.len() >= self.max_blocks_count { let diff = blocks.len() - self.max_blocks_count; blocks.drain(0..diff); @@ -129,8 +135,7 @@ impl InMemoryShareChain { impl ShareChain for InMemoryShareChain { async fn submit_block(&self, block: &Block) -> ShareChainResult<()> { let mut blocks_write_lock = self.blocks.write().await; - self.submit_block_with_lock(&mut blocks_write_lock, block, false) - .await + self.submit_block_with_lock(&mut blocks_write_lock, block, false).await } async fn submit_blocks(&self, blocks: Vec, sync: bool) -> ShareChainResult<()> { @@ -138,7 +143,8 @@ impl ShareChain for InMemoryShareChain { let last_block = blocks_write_lock.last(); if (sync && last_block.is_none()) || - (sync && last_block.is_some() && !blocks.is_empty() && last_block.unwrap().height() < blocks[0].height()) { + (sync && last_block.is_some() && !blocks.is_empty() && last_block.unwrap().height() < blocks[0].height()) + { blocks_write_lock.clear(); } @@ -201,8 +207,7 @@ impl ShareChain for InMemoryShareChain { .with_height(last_block.height() + 1) .with_original_block_header(origin_block_header) .with_miner_wallet_address( - TariAddress::from_hex(request.wallet_payment_address.as_str()) - .map_err(Error::TariAddress)?, + TariAddress::from_hex(request.wallet_payment_address.as_str()).map_err(Error::TariAddress)?, ) .build()) } diff --git a/src/sharechain/mod.rs b/src/sharechain/mod.rs index 91832d1d..f81a27a6 100644 --- a/src/sharechain/mod.rs +++ b/src/sharechain/mod.rs @@ -1,8 +1,10 @@ +// Copyright 2024 The Tari Project +// SPDX-License-Identifier: BSD-3-Clause + use async_trait::async_trait; use minotari_app_grpc::tari_rpc::{NewBlockCoinbase, SubmitBlockRequest}; -use crate::sharechain::block::Block; -use crate::sharechain::error::Error; +use crate::sharechain::{block::Block, error::Error}; pub const MAX_BLOCKS_COUNT: usize = 80;