Skip to content
Merged
Show file tree
Hide file tree
Changes from 4 commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
37 changes: 29 additions & 8 deletions Cargo.lock

Some generated files are not rendered by default. Learn more about how customized files appear on GitHub.

2 changes: 1 addition & 1 deletion Cargo.toml
Original file line number Diff line number Diff line change
Expand Up @@ -18,7 +18,7 @@ bytes = { version = "1", features = ["serde"] }
derive_more = { version = "2.0.1", features = ["from", "try_from", "into", "debug", "display", "deref", "deref_mut"] }
futures-lite = "2.6.0"
quinn = { package = "iroh-quinn", version = "0.14.0" }
n0-future = "0.1.2"
n0-future = "0.2.0"
n0-snafu = "0.2.0"
range-collections = { version = "0.4.6", features = ["serde"] }
redb = { version = "=2.4" }
Expand Down
179 changes: 12 additions & 167 deletions src/api/downloader.rs
Original file line number Diff line number Diff line change
Expand Up @@ -4,26 +4,26 @@ use std::{
fmt::Debug,
future::{Future, IntoFuture},
io,
ops::Deref,
sync::Arc,
time::{Duration, SystemTime},
};

use anyhow::bail;
use genawaiter::sync::Gen;
use iroh::{endpoint::Connection, Endpoint, NodeId};
use iroh::{Endpoint, NodeId};
use irpc::{channel::mpsc, rpc_requests};
use n0_future::{future, stream, BufferedStreamExt, Stream, StreamExt};
use rand::seq::SliceRandom;
use serde::{de::Error, Deserialize, Serialize};
use tokio::{sync::Mutex, task::JoinSet};
use tokio_util::time::FutureExt;
use tracing::{info, instrument::Instrument, warn};
use tokio::task::JoinSet;
use tracing::instrument::Instrument;

use super::{remote::GetConnection, Store};
use super::Store;
use crate::{
protocol::{GetManyRequest, GetRequest},
util::sink::{Drain, IrpcSenderRefSink, Sink, TokioMpscSenderSink},
util::{
connection_pool::ConnectionPool,
sink::{Drain, IrpcSenderRefSink, Sink, TokioMpscSenderSink},
},
BlobFormat, Hash, HashAndFormat,
};

Expand Down Expand Up @@ -69,7 +69,7 @@ impl DownloaderActor {
fn new(store: Store, endpoint: Endpoint) -> Self {
Self {
store,
pool: ConnectionPool::new(endpoint, crate::ALPN.to_vec()),
pool: ConnectionPool::new(endpoint, crate::ALPN, Default::default()),
tasks: JoinSet::new(),
running: HashSet::new(),
}
Expand Down Expand Up @@ -414,90 +414,6 @@ async fn split_request<'a>(
})
}

#[derive(Debug)]
struct ConnectionPoolInner {
alpn: Vec<u8>,
endpoint: Endpoint,
connections: Mutex<HashMap<NodeId, Arc<Mutex<SlotState>>>>,
retry_delay: Duration,
connect_timeout: Duration,
}

#[derive(Debug, Clone)]
struct ConnectionPool(Arc<ConnectionPoolInner>);

#[derive(Debug, Default)]
enum SlotState {
#[default]
Initial,
Connected(Connection),
AttemptFailed(SystemTime),
#[allow(dead_code)]
Evil(String),
}

impl ConnectionPool {
fn new(endpoint: Endpoint, alpn: Vec<u8>) -> Self {
Self(
ConnectionPoolInner {
endpoint,
alpn,
connections: Default::default(),
retry_delay: Duration::from_secs(5),
connect_timeout: Duration::from_secs(2),
}
.into(),
)
}

pub fn alpn(&self) -> &[u8] {
&self.0.alpn
}

pub fn endpoint(&self) -> &Endpoint {
&self.0.endpoint
}

pub fn retry_delay(&self) -> Duration {
self.0.retry_delay
}

fn dial(&self, id: NodeId) -> DialNode {
DialNode {
pool: self.clone(),
id,
}
}

#[allow(dead_code)]
async fn mark_evil(&self, id: NodeId, reason: String) {
let slot = self
.0
.connections
.lock()
.await
.entry(id)
.or_default()
.clone();
let mut t = slot.lock().await;
*t = SlotState::Evil(reason)
}

#[allow(dead_code)]
async fn mark_closed(&self, id: NodeId) {
let slot = self
.0
.connections
.lock()
.await
.entry(id)
.or_default()
.clone();
let mut t = slot.lock().await;
*t = SlotState::Initial
}
}

/// Execute a get request sequentially for multiple providers.
///
/// It will try each provider in order
Expand Down Expand Up @@ -526,13 +442,13 @@ async fn execute_get(
request: request.clone(),
})
.await?;
let mut conn = pool.dial(provider);
let conn = pool.connect(provider);
let local = remote.local_for_request(request.clone()).await?;
if local.is_complete() {
return Ok(());
}
let local_bytes = local.local_bytes();
let Ok(conn) = conn.connection().await else {
let Ok(conn) = conn.await else {
progress
.send(DownloadProgessItem::ProviderFailed {
id: provider,
Expand All @@ -543,7 +459,7 @@ async fn execute_get(
};
match remote
.execute_get_sink(
conn,
&conn,
local.missing(),
(&mut progress).with_map(move |x| DownloadProgessItem::Progress(x + local_bytes)),
)
Expand Down Expand Up @@ -571,77 +487,6 @@ async fn execute_get(
bail!("Unable to download {}", request.hash);
}

#[derive(Debug, Clone)]
struct DialNode {
pool: ConnectionPool,
id: NodeId,
}

impl DialNode {
async fn connection_impl(&self) -> anyhow::Result<Connection> {
info!("Getting connection for node {}", self.id);
let slot = self
.pool
.0
.connections
.lock()
.await
.entry(self.id)
.or_default()
.clone();
info!("Dialing node {}", self.id);
let mut guard = slot.lock().await;
match guard.deref() {
SlotState::Connected(conn) => {
return Ok(conn.clone());
}
SlotState::AttemptFailed(time) => {
let elapsed = time.elapsed().unwrap_or_default();
if elapsed <= self.pool.retry_delay() {
bail!(
"Connection attempt failed {} seconds ago",
elapsed.as_secs_f64()
);
}
}
SlotState::Evil(reason) => {
bail!("Node is banned due to evil behavior: {reason}");
}
SlotState::Initial => {}
}
let res = self
.pool
.endpoint()
.connect(self.id, self.pool.alpn())
.timeout(self.pool.0.connect_timeout)
.await;
match res {
Ok(Ok(conn)) => {
info!("Connected to node {}", self.id);
*guard = SlotState::Connected(conn.clone());
Ok(conn)
}
Ok(Err(e)) => {
warn!("Failed to connect to node {}: {}", self.id, e);
*guard = SlotState::AttemptFailed(SystemTime::now());
Err(e.into())
}
Err(e) => {
warn!("Failed to connect to node {}: {}", self.id, e);
*guard = SlotState::AttemptFailed(SystemTime::now());
bail!("Failed to connect to node: {}", e);
}
}
}
}

impl GetConnection for DialNode {
fn connection(&mut self) -> impl Future<Output = Result<Connection, anyhow::Error>> + '_ {
let this = self.clone();
async move { this.connection_impl().await }
}
}

/// Trait for pluggable content discovery strategies.
pub trait ContentDiscovery: Debug + Send + Sync + 'static {
fn find_providers(&self, hash: HashAndFormat) -> n0_future::stream::Boxed<NodeId>;
Expand Down
10 changes: 6 additions & 4 deletions src/api/remote.rs
Original file line number Diff line number Diff line change
Expand Up @@ -518,7 +518,7 @@ impl Remote {
.connection()
.await
.map_err(|e| LocalFailureSnafu.into_error(e.into()))?;
let stats = self.execute_get_sink(conn, request, progress).await?;
let stats = self.execute_get_sink(&conn, request, progress).await?;
Ok(stats)
}

Expand Down Expand Up @@ -637,7 +637,7 @@ impl Remote {
.with_map_err(io::Error::other);
let this = self.clone();
let fut = async move {
let res = this.execute_get_sink(conn, request, sink).await.into();
let res = this.execute_get_sink(&conn, request, sink).await.into();
tx2.send(res).await.ok();
};
GetProgress {
Expand All @@ -656,13 +656,15 @@ impl Remote {
/// This will return the stats of the download.
pub(crate) async fn execute_get_sink(
&self,
conn: Connection,
conn: &Connection,
request: GetRequest,
mut progress: impl Sink<u64, Error = io::Error>,
) -> GetResult<Stats> {
let store = self.store();
let root = request.hash;
let start = crate::get::fsm::start(conn, request, Default::default());
// I am cloning the connection, but it's fine because the original connection or ConnectionRef stays alive
// for the duration of the operation.
let start = crate::get::fsm::start(conn.clone(), request, Default::default());
let connected = start.next().await?;
trace!("Getting header");
// read the header
Expand Down
2 changes: 2 additions & 0 deletions src/util.rs
Original file line number Diff line number Diff line change
Expand Up @@ -4,7 +4,9 @@ use bao_tree::{io::round_up_to_chunks, ChunkNum, ChunkRanges};
use range_collections::{range_set::RangeSetEntry, RangeSet2};

pub mod channel;
pub(crate) mod connection_pool;
pub(crate) mod temp_tag;

pub mod serde {
// Module that handles io::Error serialization/deserialization
pub mod io_error_serde {
Expand Down
Loading
Loading