Skip to content

Commit

Permalink
Remove -v/--print-progress option
Browse files Browse the repository at this point in the history
The detailed report depended on async downloading which is gone and the
current reporting is a single line print, not worth maintaining this
command line option for.
  • Loading branch information
pothos committed Jan 17, 2024
1 parent c266c9c commit 339648e
Show file tree
Hide file tree
Showing 2 changed files with 11 additions and 49 deletions.
26 changes: 6 additions & 20 deletions src/bin/download_sysext.rs
Original file line number Diff line number Diff line change
Expand Up @@ -98,7 +98,7 @@ impl<'a> Package<'a> {
Ok(())
}

fn download(&mut self, into_dir: &Path, client: &Client, print_progress: bool) -> Result<()> {
fn download(&mut self, into_dir: &Path, client: &Client) -> Result<()> {
// FIXME: use _range_start for completing downloads
let _range_start = match self.status {
PackageStatus::ToDownload => 0,
Expand All @@ -115,7 +115,6 @@ impl<'a> Package<'a> {
&path,
self.hash_sha256.clone(),
self.hash_sha1.clone(),
print_progress,
) {
Ok(ok) => ok,
Err(err) => {
Expand Down Expand Up @@ -251,12 +250,12 @@ fn get_pkgs_to_download<'a>(resp: &'a omaha::Response, glob_set: &GlobSet)
}

// Read data from remote URL into File
fn fetch_url_to_file<'a, U>(path: &'a Path, input_url: U, client: &'a Client, print_progress: bool) -> Result<Package<'a>>
fn fetch_url_to_file<'a, U>(path: &'a Path, input_url: U, client: &'a Client) -> Result<Package<'a>>
where
U: reqwest::IntoUrl + From<U> + std::clone::Clone + std::fmt::Debug,
Url: From<U>,
{
let r = ue_rs::download_and_hash(client, input_url.clone(), path, None, None, print_progress).context(format!("unable to download data(url {:?})", input_url))?;
let r = ue_rs::download_and_hash(client, input_url.clone(), path, None, None).context(format!("unable to download data(url {:?})", input_url))?;

Ok(Package {
name: Cow::Borrowed(path.file_name().unwrap_or(OsStr::new("fakepackage")).to_str().unwrap_or("fakepackage")),
Expand All @@ -268,10 +267,10 @@ where
})
}

fn do_download_verify(pkg: &mut Package<'_>, output_dir: &Path, unverified_dir: &Path, pubkey_file: &str, client: &Client, print_progress: bool) -> Result<()> {
fn do_download_verify(pkg: &mut Package<'_>, output_dir: &Path, unverified_dir: &Path, pubkey_file: &str, client: &Client) -> Result<()> {
pkg.check_download(unverified_dir)?;

pkg.download(unverified_dir, client, print_progress).context(format!("unable to download \"{:?}\"", pkg.name))?;
pkg.download(unverified_dir, client).context(format!("unable to download \"{:?}\"", pkg.name))?;

// Unverified payload is stored in e.g. "output_dir/.unverified/oem.gz".
// Verified payload is stored in e.g. "output_dir/oem.raw".
Expand Down Expand Up @@ -311,10 +310,6 @@ struct Args {
/// may be specified multiple times.
#[argh(option, short = 'm')]
image_match: Vec<String>,

/// report download progress
#[argh(switch, short = 'v')]
print_progress: bool,
}

impl Args {
Expand Down Expand Up @@ -387,15 +382,13 @@ fn main() -> Result<(), Box<dyn Error>> {
&temp_payload_path,
Url::from_str(url.as_str()).context(anyhow!("failed to convert into url ({:?})", url))?,
&client,
args.print_progress,
)?;
do_download_verify(
&mut pkg_fake,
output_dir,
unverified_dir.as_path(),
args.pubkey_file.as_str(),
&client,
args.print_progress,
)?;

// verify only a fake package, early exit and skip the rest.
Expand All @@ -422,14 +415,7 @@ fn main() -> Result<(), Box<dyn Error>> {
////

for pkg in pkgs_to_dl.iter_mut() {
do_download_verify(
pkg,
output_dir,
unverified_dir.as_path(),
args.pubkey_file.as_str(),
&client,
args.print_progress,
)?;
do_download_verify(pkg, output_dir, unverified_dir.as_path(), args.pubkey_file.as_str(), &client)?;
}

// clean up data
Expand Down
34 changes: 5 additions & 29 deletions src/download.rs
Original file line number Diff line number Diff line change
Expand Up @@ -59,14 +59,7 @@ pub fn hash_on_disk<T: omaha::HashAlgo>(path: &Path, maxlen: Option<usize>) -> R
Ok(omaha::Hash::from_bytes(Box::new(hasher).finalize()))
}

fn do_download_and_hash<U>(
client: &Client,
url: U,
path: &Path,
expected_sha256: Option<omaha::Hash<omaha::Sha256>>,
expected_sha1: Option<omaha::Hash<omaha::Sha1>>,
print_progress: bool,
) -> Result<DownloadResult>
fn do_download_and_hash<U>(client: &Client, url: U, path: &Path, expected_sha256: Option<omaha::Hash<omaha::Sha256>>, expected_sha1: Option<omaha::Hash<omaha::Sha1>>) -> Result<DownloadResult>
where
U: reqwest::IntoUrl + Clone,
Url: From<U>,
Expand Down Expand Up @@ -96,9 +89,8 @@ where
}
}

if print_progress {
println!("writing to {}", path.display());
}
println!("writing to {}", path.display());

let mut file = File::create(path).context(format!("failed to create path ({:?})", path.display()))?;
res.copy_to(&mut file)?;

Expand Down Expand Up @@ -126,29 +118,13 @@ where
})
}

pub fn download_and_hash<U>(
client: &Client,
url: U,
path: &Path,
expected_sha256: Option<omaha::Hash<omaha::Sha256>>,
expected_sha1: Option<omaha::Hash<omaha::Sha1>>,
print_progress: bool,
) -> Result<DownloadResult>
pub fn download_and_hash<U>(client: &Client, url: U, path: &Path, expected_sha256: Option<omaha::Hash<omaha::Sha256>>, expected_sha1: Option<omaha::Hash<omaha::Sha1>>) -> Result<DownloadResult>
where
U: reqwest::IntoUrl + Clone,
Url: From<U>,
{
crate::retry_loop(
|| {
do_download_and_hash(
client,
url.clone(),
path,
expected_sha256.clone(),
expected_sha1.clone(),
print_progress,
)
},
|| do_download_and_hash(client, url.clone(), path, expected_sha256.clone(), expected_sha1.clone()),
MAX_DOWNLOAD_RETRY,
)
}

0 comments on commit 339648e

Please sign in to comment.