Skip to content

Commit c352913

Browse files
authored
feat: reduce log verbosity by changing info logs to debug (#1496)
* style(grpc): change info log to debug log Signed-off-by: Gaius <gaius.qi@gmail.com> * feat: reduce log verbosity by changing info logs to debug Changes log levels from `info` to `debug` for verbose operational logs to reduce noise in production environments. This includes: - Backend operations (HDFS head/get requests) - Parent selector operations (register/unregister, weight updates) - Piece collector sync operations - gRPC request/response logging - Proxy request routing decisions - Scheduler announcements - Stream message transmission confirmations Also removes redundant log messages that don't provide additional value: - Duplicate request logging in handlers - Metadata initialization confirmations - Content/piece length debug prints These changes maintain important milestone logs at `info` level while moving routine operational details to `debug` level for better log clarity in production deployments. Signed-off-by: Gaius <gaius.qi@gmail.com> --------- Signed-off-by: Gaius <gaius.qi@gmail.com>
1 parent 118416e commit c352913

File tree

14 files changed

+83
-129
lines changed

14 files changed

+83
-129
lines changed

dragonfly-client-backend/src/hdfs.rs

Lines changed: 4 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -21,7 +21,7 @@ use opendal::{layers::TimeoutLayer, Operator};
2121
use percent_encoding::percent_decode_str;
2222
use std::time::Duration;
2323
use tokio_util::io::StreamReader;
24-
use tracing::{error, info, instrument};
24+
use tracing::{debug, error, instrument};
2525
use url::Url;
2626

2727
/// HDFS_SCHEME is the scheme of the HDFS.
@@ -90,7 +90,7 @@ impl super::Backend for Hdfs {
9090
/// head gets the header of the request.
9191
#[instrument(skip_all)]
9292
async fn head(&self, request: super::HeadRequest) -> ClientResult<super::HeadResponse> {
93-
info!(
93+
debug!(
9494
"head request {} {}: {:?}",
9595
request.task_id, request.url, request.http_header
9696
);
@@ -154,7 +154,7 @@ impl super::Backend for Hdfs {
154154
}))
155155
})?;
156156

157-
info!(
157+
debug!(
158158
"head response {} {}: {}",
159159
request.task_id,
160160
request.url,
@@ -177,7 +177,7 @@ impl super::Backend for Hdfs {
177177
&self,
178178
request: super::GetRequest,
179179
) -> ClientResult<super::GetResponse<super::Body>> {
180-
info!(
180+
debug!(
181181
"get request {} {}: {:?}",
182182
request.piece_id, request.url, request.http_header
183183
);

dragonfly-client-storage/src/storage_engine/rocksdb.rs

Lines changed: 0 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -132,7 +132,6 @@ impl RocksdbStorageEngine {
132132
// Open rocksdb.
133133
let db =
134134
rocksdb::DB::open_cf_with_opts(&options, &dir, cfs).or_err(ErrorType::StorageError)?;
135-
info!("metadata initialized directory: {:?}", dir);
136135

137136
Ok(Self { inner: db })
138137
}

dragonfly-client-util/src/request/selector.rs

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -114,7 +114,7 @@ impl SeedPeerSelector {
114114
async fn refresh(&self) -> Result<()> {
115115
// Only one refresh can be running at a time.
116116
let Ok(_guard) = self.mutex.try_lock() else {
117-
info!("refresh is already running");
117+
debug!("refresh is already running");
118118
return Ok(());
119119
};
120120

dragonfly-client/src/dynconfig/mod.rs

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -115,7 +115,7 @@ impl Dynconfig {
115115
pub async fn refresh(&self) -> Result<()> {
116116
// Only one refresh can be running at a time.
117117
let Ok(_guard) = self.mutex.try_lock() else {
118-
info!("refresh is already running");
118+
debug!("refresh is already running");
119119
return Ok(());
120120
};
121121

dragonfly-client/src/grpc/dfdaemon_download.rs

Lines changed: 0 additions & 17 deletions
Original file line numberDiff line numberDiff line change
@@ -292,7 +292,6 @@ impl DfdaemonDownload for DfdaemonDownloadServerHandler {
292292
"remote_ip",
293293
download.remote_ip.clone().unwrap_or_default().as_str(),
294294
);
295-
info!("download task in download server");
296295

297296
// Download task started.
298297
info!("download task started: {:?}", download);
@@ -342,12 +341,6 @@ impl DfdaemonDownload for DfdaemonDownloadServerHandler {
342341
task
343342
}
344343
};
345-
info!(
346-
"content length {:?}, piece length {:?}",
347-
task.content_length(),
348-
task.piece_length()
349-
);
350-
351344
Span::current().record("content_length", task.content_length().unwrap_or_default());
352345

353346
// Update the actual content length, actual piece length and actual
@@ -635,7 +628,6 @@ impl DfdaemonDownload for DfdaemonDownloadServerHandler {
635628

636629
// If prefetch flag is true, prefetch the full task.
637630
if download.prefetch {
638-
info!("try to prefetch task");
639631
match self.task.prefetch_task_started(task_id.as_str()).await {
640632
Ok(_) => {
641633
info!("prefetch task started");
@@ -943,7 +935,6 @@ impl DfdaemonDownload for DfdaemonDownloadServerHandler {
943935
"remote_ip",
944936
request.remote_ip.clone().unwrap_or_default().as_str(),
945937
);
946-
info!("download persistent cache task in download server");
947938

948939
// Download task started.
949940
info!("download persistent cache task started: {:?}", request);
@@ -996,13 +987,6 @@ impl DfdaemonDownload for DfdaemonDownloadServerHandler {
996987
task
997988
}
998989
};
999-
1000-
info!(
1001-
"content length {}, piece length {}",
1002-
task.content_length(),
1003-
task.piece_length()
1004-
);
1005-
1006990
Span::current().record("content_length", task.content_length());
1007991

1008992
// Initialize stream channel.
@@ -1212,7 +1196,6 @@ impl DfdaemonDownload for DfdaemonDownloadServerHandler {
12121196
error!("generate persistent cache task id: {}", err);
12131197
Status::invalid_argument(err.to_string())
12141198
})?;
1215-
info!("generate persistent cache task id: {}", task_id);
12161199

12171200
// Generate the host id.
12181201
let host_id = self.task.id_generator.host_id();

dragonfly-client/src/grpc/dfdaemon_upload.rs

Lines changed: 0 additions & 19 deletions
Original file line numberDiff line numberDiff line change
@@ -289,7 +289,6 @@ impl DfdaemonUpload for DfdaemonUploadServerHandler {
289289
"remote_ip",
290290
download.remote_ip.clone().unwrap_or_default().as_str(),
291291
);
292-
info!("download task in upload server");
293292

294293
// Download task started.
295294
info!("download task started: {:?}", download);
@@ -339,13 +338,6 @@ impl DfdaemonUpload for DfdaemonUploadServerHandler {
339338
task
340339
}
341340
};
342-
343-
info!(
344-
"content length {:?}, piece length {:?}",
345-
task.content_length(),
346-
task.piece_length()
347-
);
348-
349341
Span::current().record("content_length", task.content_length().unwrap_or_default());
350342

351343
// Update the actual content length, actual piece length and actual
@@ -609,7 +601,6 @@ impl DfdaemonUpload for DfdaemonUploadServerHandler {
609601

610602
// If prefetch flag is true, prefetch the full task.
611603
if download.prefetch {
612-
info!("try to prefetch task");
613604
match self.task.prefetch_task_started(task_id.as_str()).await {
614605
Ok(_) => {
615606
info!("prefetch task started");
@@ -1059,7 +1050,6 @@ impl DfdaemonUpload for DfdaemonUploadServerHandler {
10591050
Span::current().record("remote_host_id", remote_host_id.as_str());
10601051
Span::current().record("task_id", task_id.as_str());
10611052
Span::current().record("piece_id", piece_id.as_str());
1062-
info!("download piece content in upload server");
10631053

10641054
// Get the piece metadata from the local storage.
10651055
let piece = self
@@ -1257,7 +1247,6 @@ impl DfdaemonUpload for DfdaemonUploadServerHandler {
12571247
"remote_ip",
12581248
request.remote_ip.clone().unwrap_or_default().as_str(),
12591249
);
1260-
info!("download persistent cache task in download server");
12611250

12621251
// Download task started.
12631252
info!("download persistent cache task started: {:?}", request);
@@ -1310,13 +1299,6 @@ impl DfdaemonUpload for DfdaemonUploadServerHandler {
13101299
task
13111300
}
13121301
};
1313-
1314-
info!(
1315-
"content length {}, piece length {}",
1316-
task.content_length(),
1317-
task.piece_length()
1318-
);
1319-
13201302
Span::current().record("content_length", task.content_length());
13211303

13221304
// Initialize stream channel.
@@ -1810,7 +1792,6 @@ impl DfdaemonUpload for DfdaemonUploadServerHandler {
18101792
Span::current().record("remote_host_id", remote_host_id.as_str());
18111793
Span::current().record("task_id", task_id.as_str());
18121794
Span::current().record("piece_id", piece_id.as_str());
1813-
info!("download persistent cache piece in upload server");
18141795

18151796
// Get the piece metadata from the local storage.
18161797
let piece = self

dragonfly-client/src/grpc/mod.rs

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -22,7 +22,7 @@ use dragonfly_client_metric::{
2222
use std::path::PathBuf;
2323
use std::time::Duration;
2424
use tonic::Request;
25-
use tracing::{error, info, instrument, Instrument};
25+
use tracing::{debug, error, info, instrument, Instrument};
2626

2727
pub mod dfdaemon_download;
2828
pub mod dfdaemon_upload;
@@ -115,7 +115,7 @@ pub async fn prefetch_task(
115115
let mut out_stream = response.into_inner();
116116
loop {
117117
match out_stream.message().await {
118-
Ok(Some(_)) => info!("prefetch piece finished"),
118+
Ok(Some(_)) => debug!("prefetch piece finished"),
119119
Ok(None) => {
120120
info!("prefetch task finished");
121121
return;

dragonfly-client/src/grpc/scheduler.rs

Lines changed: 3 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -37,7 +37,7 @@ use tokio::sync::RwLock;
3737
use tokio::task::JoinSet;
3838
use tonic::service::interceptor::InterceptedService;
3939
use tonic::transport::Channel;
40-
use tracing::{error, info, instrument, Instrument};
40+
use tracing::{debug, error, info, instrument, Instrument};
4141
use url::Url;
4242

4343
use super::interceptor::InjectTracingInterceptor;
@@ -176,7 +176,7 @@ impl SchedulerClient {
176176
addr: SocketAddr,
177177
request: tonic::Request<AnnounceHostRequest>,
178178
) -> Result<()> {
179-
info!("announce host to {}", addr);
179+
debug!("announce host to {}", addr);
180180

181181
// Connect to the scheduler.
182182
let channel = Channel::from_shared(format!("http://{}", addr))
@@ -551,7 +551,7 @@ impl SchedulerClient {
551551
.zip(available_schedulers_clone.iter())
552552
.all(|(a, b)| a == b)
553553
{
554-
info!(
554+
debug!(
555555
"available schedulers is not changed: {:?}",
556556
data_available_schedulers_clone
557557
.iter()

dragonfly-client/src/proxy/mod.rs

Lines changed: 17 additions & 28 deletions
Original file line numberDiff line numberDiff line change
@@ -358,8 +358,6 @@ pub async fn http_handler(
358358
dfdaemon_download_client: DfdaemonDownloadClient,
359359
registry_cert: Arc<Option<Vec<CertificateDer<'static>>>>,
360360
) -> ClientResult<Response> {
361-
info!("handle HTTP request: {:?}", request);
362-
363361
// Authenticate the request with the basic auth.
364362
if let Some(basic_auth) = config.proxy.server.basic_auth.as_ref() {
365363
match basic_auth.credentials().verify(request.headers()) {
@@ -390,9 +388,8 @@ pub async fn http_handler(
390388
request_uri.to_string().as_str(),
391389
) {
392390
info!(
393-
"proxy HTTP request via dfdaemon by rule config for method: {}, uri: {}",
394-
request.method(),
395-
request_uri
391+
"proxy HTTP request via dfdaemon by rule config: {:?}",
392+
request
396393
);
397394
return proxy_via_dfdaemon(
398395
config,
@@ -409,9 +406,8 @@ pub async fn http_handler(
409406
// dfdaemon.
410407
if header::get_use_p2p(request.headers()) {
411408
info!(
412-
"proxy HTTP request via dfdaemon by X-Dragonfly-Use-P2P header for method: {}, uri: {}",
413-
request.method(),
414-
request_uri
409+
"proxy HTTP request via dfdaemon by X-Dragonfly-Use-P2P header: {:?}",
410+
request
415411
);
416412
return proxy_via_dfdaemon(
417413
config,
@@ -426,17 +422,15 @@ pub async fn http_handler(
426422

427423
if request.uri().scheme().cloned() == Some(http::uri::Scheme::HTTPS) {
428424
info!(
429-
"proxy HTTPS request directly to remote server for method: {}, uri: {}",
430-
request.method(),
431-
request.uri()
425+
"proxy HTTPS request directly to remote server: {:?}",
426+
request
432427
);
433428
return proxy_via_https(request, registry_cert).await;
434429
}
435430

436431
info!(
437-
"proxy HTTP request directly to remote server for method: {}, uri: {}",
438-
request.method(),
439-
request.uri()
432+
"proxy HTTP request directly to remote server: {:?}",
433+
request
440434
);
441435
return proxy_via_http(request).await;
442436
}
@@ -623,15 +617,13 @@ pub async fn upgraded_handler(
623617
}
624618

625619
// If find the matching rule, proxy the request via the dfdaemon.
626-
let request_uri = request.uri();
627620
if let Some(rule) = find_matching_rule(
628621
config.proxy.rules.as_deref(),
629-
request_uri.to_string().as_str(),
622+
request.uri().to_string().as_str(),
630623
) {
631624
info!(
632-
"proxy HTTPS request via dfdaemon by rule config for method: {}, uri: {}",
633-
request.method(),
634-
request_uri
625+
"proxy HTTPS request via dfdaemon by rule config: {:?}",
626+
request,
635627
);
636628
return proxy_via_dfdaemon(
637629
config,
@@ -648,9 +640,8 @@ pub async fn upgraded_handler(
648640
// dfdaemon.
649641
if header::get_use_p2p(request.headers()) {
650642
info!(
651-
"proxy HTTP request via dfdaemon by X-Dragonfly-Use-P2P header for method: {}, uri: {}",
652-
request.method(),
653-
request_uri
643+
"proxy HTTP request via dfdaemon by X-Dragonfly-Use-P2P header: {:?}",
644+
request,
654645
);
655646
return proxy_via_dfdaemon(
656647
config,
@@ -665,17 +656,15 @@ pub async fn upgraded_handler(
665656

666657
if request.uri().scheme().cloned() == Some(http::uri::Scheme::HTTPS) {
667658
info!(
668-
"proxy HTTPS request directly to remote server for method: {}, uri: {}",
669-
request.method(),
670-
request.uri()
659+
"proxy HTTPS request directly to remote server: {:?}",
660+
request,
671661
);
672662
return proxy_via_https(request, registry_cert).await;
673663
}
674664

675665
info!(
676-
"proxy HTTP request directly to remote server for method: {}, uri: {}",
677-
request.method(),
678-
request.uri()
666+
"proxy HTTP request directly to remote server: {:?}",
667+
request,
679668
);
680669
return proxy_via_http(request).await;
681670
}

0 commit comments

Comments
 (0)