Skip to content

Commit

Permalink
Remove failure crate from non-test code
Browse files Browse the repository at this point in the history
  • Loading branch information
charlespierce committed May 6, 2020
1 parent 5728f8b commit 24dbe23
Show file tree
Hide file tree
Showing 73 changed files with 810 additions and 794 deletions.
48 changes: 31 additions & 17 deletions Cargo.lock

Some generated files are not rendered by default. Learn more about how customized files appear on GitHub.

2 changes: 0 additions & 2 deletions Cargo.toml
Original file line number Diff line number Diff line change
Expand Up @@ -27,8 +27,6 @@ serde = { version = "1.0", features = ["derive"] }
serde_json = "1.0.37"
lazy_static = "1.3.0"
log = { version = "0.4", features = ["std"] }
volta-fail = { path = "crates/volta-fail" }
volta-fail-derive = { path = "crates/volta-fail-derive" }
semver = { git = "https://github.com/mikrostew/semver", branch = "new-parser" }
structopt = "0.2.14"
cfg-if = "0.1"
Expand Down
3 changes: 1 addition & 2 deletions crates/archive/Cargo.toml
Original file line number Diff line number Diff line change
Expand Up @@ -10,10 +10,9 @@ tar = "0.4.13"
zip_rs = { version = "0.2.6", package = "zip" }
reqwest = "0.9.9"
tee = "0.1.0"
failure = "0.1.1"
failure_derive = "0.1.1"
fs-utils = { path = "../fs-utils" }
progress-read = { path = "../progress-read" }
verbatim = "0.1"
cfg-if = "0.1"
hyperx = "0.15.2"
thiserror = "1.0.16"
45 changes: 30 additions & 15 deletions crates/archive/src/lib.rs
Original file line number Diff line number Diff line change
@@ -1,22 +1,37 @@
//! This crate provides types for fetching and unpacking compressed
//! archives in tarball or zip format.
use std::fs::File;
use std::path::Path;

use thiserror::Error;

mod tarball;
mod zip;

use failure::Fail;

#[derive(Fail, Debug)]
#[fail(display = "HTTP failure ({})", code)]
pub struct HttpError {
pub code: ::reqwest::StatusCode,
}

pub use crate::tarball::Tarball;
pub use crate::zip::Zip;

use std::fs::File;
use std::path::Path;
/// Error type for this crate
#[derive(Error, Debug)]
pub enum ArchiveError {
#[error("HTTP failure ({0})")]
HttpError(::reqwest::StatusCode),

#[error("HTTP header '{0}' not found")]
MissingHeaderError(String),

#[error("unexpected content length in HTTP response: {0}")]
UnexpectedContentLengthError(u64),

#[error("{0}")]
IoError(#[from] std::io::Error),

#[error("{0}")]
ReqwestError(#[from] reqwest::Error),

#[error("{0}")]
ZipError(#[from] zip_rs::result::ZipError),
}

/// Metadata describing whether an archive comes from a local or remote origin.
#[derive(Copy, Clone)]
Expand All @@ -34,7 +49,7 @@ pub trait Archive {
self: Box<Self>,
dest: &Path,
progress: &mut dyn FnMut(&(), usize),
) -> Result<(), failure::Error>;
) -> Result<(), ArchiveError>;

fn origin(&self) -> Origin;
}
Expand All @@ -45,7 +60,7 @@ cfg_if::cfg_if! {
///
/// On Windows, the preferred format is zip. On Unixes, the preferred format
/// is tarball.
pub fn load_native(source: File) -> Result<Box<dyn Archive>, failure::Error> {
pub fn load_native(source: File) -> Result<Box<dyn Archive>, ArchiveError> {
Tarball::load(source)
}

Expand All @@ -54,15 +69,15 @@ cfg_if::cfg_if! {
///
/// On Windows, the preferred format is zip. On Unixes, the preferred format
/// is tarball.
pub fn fetch_native(url: &str, cache_file: &Path) -> Result<Box<dyn Archive>, failure::Error> {
pub fn fetch_native(url: &str, cache_file: &Path) -> Result<Box<dyn Archive>, ArchiveError> {
Tarball::fetch(url, cache_file)
}
} else if #[cfg(windows)] {
/// Load an archive in the native OS-preferred format from the specified file.
///
/// On Windows, the preferred format is zip. On Unixes, the preferred format
/// is tarball.
pub fn load_native(source: File) -> Result<Box<dyn Archive>, failure::Error> {
pub fn load_native(source: File) -> Result<Box<dyn Archive>, ArchiveError> {
Zip::load(source)
}

Expand All @@ -71,7 +86,7 @@ cfg_if::cfg_if! {
///
/// On Windows, the preferred format is zip. On Unixes, the preferred format
/// is tarball.
pub fn fetch_native(url: &str, cache_file: &Path) -> Result<Box<dyn Archive>, failure::Error> {
pub fn fetch_native(url: &str, cache_file: &Path) -> Result<Box<dyn Archive>, ArchiveError> {
Zip::fetch(url, cache_file)
}
} else {
Expand Down
51 changes: 11 additions & 40 deletions crates/archive/src/tarball.rs
Original file line number Diff line number Diff line change
Expand Up @@ -5,7 +5,7 @@ use std::fs::File;
use std::io::{Read, Seek, SeekFrom};
use std::path::Path;

use failure::{self, Fail};
use super::{Archive, ArchiveError, Origin};
use flate2::read::GzDecoder;
use fs_utils::ensure_containing_dir_exists;
use hyperx::header::{
Expand All @@ -15,9 +15,6 @@ use progress_read::ProgressRead;
use reqwest::Response;
use tee::TeeReader;

use super::Archive;
use super::Origin;

/// A Node installation tarball.
pub struct Tarball {
compressed_size: u64,
Expand All @@ -30,31 +27,20 @@ pub struct Tarball {
origin: Origin,
}

#[derive(Fail, Debug)]
#[fail(display = "HTTP header '{}' not found", header)]
struct MissingHeaderError {
header: String,
}

/// Determines the length of an HTTP response's content in bytes, using
/// the HTTP `"Content-Length"` header.
fn content_length(response: &Response) -> Result<u64, failure::Error> {
fn content_length(response: &Response) -> Result<u64, ArchiveError> {
response
.headers()
.decode::<ContentLength>()
.ok()
.map(|v| v.0)
.ok_or_else(|| {
MissingHeaderError {
header: String::from("Content-Length"),
}
.into()
})
.ok_or_else(|| ArchiveError::MissingHeaderError(String::from("Content-Length")))
}

impl Tarball {
/// Loads a tarball from the specified file.
pub fn load(mut source: File) -> Result<Box<dyn Archive>, failure::Error> {
pub fn load(mut source: File) -> Result<Box<dyn Archive>, ArchiveError> {
let uncompressed_size = load_uncompressed_size(&mut source);
let compressed_size = source.metadata()?.len();
Ok(Box::new(Tarball {
Expand All @@ -68,14 +54,11 @@ impl Tarball {
/// Initiate fetching of a tarball from the given URL, returning a
/// tarball that can be streamed (and that tees its data to a local
/// file as it streams).
pub fn fetch(url: &str, cache_file: &Path) -> Result<Box<dyn Archive>, failure::Error> {
pub fn fetch(url: &str, cache_file: &Path) -> Result<Box<dyn Archive>, ArchiveError> {
let response = reqwest::get(url)?;

if !response.status().is_success() {
return Err(super::HttpError {
code: response.status(),
}
.into());
return Err(ArchiveError::HttpError(response.status()));
}

let compressed_size = content_length(&response)?;
Expand Down Expand Up @@ -109,7 +92,7 @@ impl Archive for Tarball {
self: Box<Self>,
dest: &Path,
progress: &mut dyn FnMut(&(), usize),
) -> Result<(), failure::Error> {
) -> Result<(), ArchiveError> {
let decoded = GzDecoder::new(self.data);
let mut tarball = tar::Archive::new(ProgressRead::new(decoded, (), progress));
tarball.unpack(dest)?;
Expand Down Expand Up @@ -140,17 +123,11 @@ fn unpack_isize(packed: [u8; 4]) -> u64 {
unpacked32 as u64
}

#[derive(Fail, Debug)]
#[fail(display = "unexpected content length in HTTP response: {}", length)]
struct UnexpectedContentLengthError {
length: u64,
}

/// Fetches just the `isize` field (the field that indicates the uncompressed size)
/// of a gzip file from a URL. This makes two round-trips to the server but avoids
/// downloading the entire gzip file. For very small files it's unlikely to be
/// more efficient than simply downloading the entire file up front.
fn fetch_isize(url: &str, len: u64) -> Result<[u8; 4], failure::Error> {
fn fetch_isize(url: &str, len: u64) -> Result<[u8; 4], ArchiveError> {
let client = reqwest::Client::new();
let range_header = Range::Bytes(vec![ByteRangeSpec::FromTo(len - 4, len - 1)]);
let mut response = client
Expand All @@ -159,19 +136,13 @@ fn fetch_isize(url: &str, len: u64) -> Result<[u8; 4], failure::Error> {
.send()?;

if !response.status().is_success() {
return Err(super::HttpError {
code: response.status(),
}
.into());
return Err(ArchiveError::HttpError(response.status()));
}

let actual_length = content_length(&response)?;

if actual_length != 4 {
return Err(UnexpectedContentLengthError {
length: actual_length,
}
.into());
return Err(ArchiveError::UnexpectedContentLengthError(actual_length));
}

let mut buf = [0; 4];
Expand All @@ -181,7 +152,7 @@ fn fetch_isize(url: &str, len: u64) -> Result<[u8; 4], failure::Error> {

/// Loads the `isize` field (the field that indicates the uncompressed size)
/// of a gzip file from disk.
fn load_isize(file: &mut File) -> Result<[u8; 4], failure::Error> {
fn load_isize(file: &mut File) -> Result<[u8; 4], ArchiveError> {
file.seek(SeekFrom::End(-4))?;
let mut buf = [0; 4];
file.read_exact(&mut buf)?;
Expand Down
Loading

0 comments on commit 24dbe23

Please sign in to comment.