Skip to content

Commit

Permalink
feat: add retry mechanism in read blob metadata
Browse files Browse the repository at this point in the history
When read blob size from blob metadata, we should retry read from the remote if error occurs.
Also set the max retry times is 3.

Signed-off-by: Yadong Ding <[email protected]>
  • Loading branch information
Desiki-high committed Sep 18, 2024
1 parent 7fc9ede commit 0f123e8
Showing 1 changed file with 29 additions and 9 deletions.
38 changes: 29 additions & 9 deletions storage/src/meta/mod.rs
Original file line number Diff line number Diff line change
Expand Up @@ -753,15 +753,35 @@ impl BlobCompressionContextInfo {
let expected_raw_size = (compressed_size + BLOB_CCT_HEADER_SIZE) as usize;
let mut raw_data = alloc_buf(expected_raw_size);

let read_size = reader
.read_all(&mut raw_data, blob_info.meta_ci_offset())
.map_err(|e| {
eio!(format!(
"failed to read metadata for blob {} from backend, {}",
blob_info.blob_id(),
e
))
})?;
let read_size = (|| {
// The maximum retry times
let mut retry_count = 3;

loop {
match reader.read_all(&mut raw_data, blob_info.meta_ci_offset()) {
Ok(size) => return Ok(size),
Err(e) => {
// Handle BackendError, retry max_retries times.
if retry_count > 0 {
warn!(
"failed to read metadata for blob {} from backend, {}, retry read metadata",
blob_info.blob_id(),
e
);
retry_count -= 1;
continue;
}

return Err(eio!(format!(
"failed to read metadata for blob {} from backend, {}",
blob_info.blob_id(),
e
)));
}
}
}
})()?;

if read_size != expected_raw_size {
return Err(eio!(format!(
"failed to read metadata for blob {} from backend, compressor {}, got {} bytes, expect {} bytes",
Expand Down

0 comments on commit 0f123e8

Please sign in to comment.