Skip to content

Commit

Permalink
make connection pool size configurable and add throughput logging
Browse files Browse the repository at this point in the history
  • Loading branch information
technillogue committed Jul 29, 2023
1 parent 47107e9 commit 59caeec
Show file tree
Hide file tree
Showing 2 changed files with 18 additions and 6 deletions.
9 changes: 7 additions & 2 deletions storage/src/backend/connection.rs
Original file line number Diff line number Diff line change
Expand Up @@ -10,7 +10,7 @@ use std::str::FromStr;
use std::sync::atomic::{AtomicBool, AtomicI16, AtomicU8, Ordering};
use std::sync::Arc;
use std::time::{Duration, Instant, SystemTime, UNIX_EPOCH};
use std::{fmt, thread};
use std::{env, fmt, thread};

use log::{max_level, Level};

Expand Down Expand Up @@ -626,14 +626,19 @@ impl Connection {
} else {
None
};
// get pool size from envvar
let pool_max_idle_per_host = match env::var("REGISTRY_CLIENT_POOL_MAX_IDLE_PER_HOST") {
Ok(val) => val.parse::<usize>().unwrap_or(20),
Err(_) => 20,
};

let mut cb = Client::builder()
.timeout(timeout)
.connect_timeout(connect_timeout)
.redirect(Policy::none())
.use_rustls_tls()
.tcp_keepalive(Some(Duration::from_secs(5 * 60)))
.pool_max_idle_per_host(20);
.pool_max_idle_per_host(pool_max_idle_per_host);

if config.skip_verify {
cb = cb.danger_accept_invalid_certs(true);
Expand Down
15 changes: 11 additions & 4 deletions storage/src/cache/mod.rs
Original file line number Diff line number Diff line change
Expand Up @@ -270,13 +270,17 @@ pub trait BlobCache: Send + Sync {
)));
}
let duration = Instant::now().duration_since(start).as_millis();
let duration_s = duration as f64 / 1000.0;
let throughput_mbps = blob_size as f64 / duration_s / 1_000_000.0;

debug!(
"read_chunks_from_backend: {} {} {} bytes at {}, duration {}ms",
"read_chunks_from_backend: {} {} {} bytes at {}, duration {}ms, throughput {:.4}Mbps",
std::thread::current().name().unwrap_or_default(),
if prefetch { "prefetch" } else { "fetch" },
blob_size,
blob_offset,
duration
duration,
throughput_mbps
);

let chunks = chunks.iter().map(|v| v.as_ref()).collect();
Expand Down Expand Up @@ -328,12 +332,15 @@ pub trait BlobCache: Send + Sync {
}

let duration = Instant::now().duration_since(start).as_millis();
let duration_s = duration as f64 / 1000.0;
let throughput_mbps = chunk.compressed_size() as f64 / duration_s / 1_000_000.0;
debug!(
"read_chunk_from_backend: {} {} bytes at {}, duration {}ms",
"read_chunk_from_backend: {} {} bytes at {}, duration {}ms, throughput {:.4}Mbps",
std::thread::current().name().unwrap_or_default(),
chunk.compressed_size(),
chunk.compressed_offset(),
duration
duration,
throughput_mbps
);
self.validate_chunk_data(chunk, buffer, false)
.map_err(|e| {
Expand Down

0 comments on commit 59caeec

Please sign in to comment.