Placate clippy

type: reform
This commit is contained in:
Casey Rodarmor 2021-05-02 20:40:01 -07:00
parent 61bbd3bad5
commit 452486a782
No known key found for this signature in database
GPG Key ID: 556186B153EC6FE0
23 changed files with 106 additions and 119 deletions

View File

@ -79,13 +79,13 @@ jobs:
cargo clippy --version cargo clippy --version
- name: Build - name: Build
run: cargo build --all --verbose run: cargo build --all
- name: Test - name: Test
run: cargo test --all --verbose run: cargo test --all
- name: Clippy - name: Clippy
run: cargo clippy --all run: cargo clippy --all-targets --all-features
- name: Lint - name: Lint
if: matrix.os == 'macos-latest' if: matrix.os == 'macos-latest'

View File

@ -99,10 +99,7 @@ impl Changelog {
#[throws] #[throws]
pub(crate) fn render(&self, book: bool) -> String { pub(crate) fn render(&self, book: bool) -> String {
let mut lines: Vec<String> = Vec::new(); let mut lines: Vec<String> = vec!["Changelog".into(), "=========".into()];
lines.push("Changelog".into());
lines.push("=========".into());
for release in &self.releases { for release in &self.releases {
lines.push("".into()); lines.push("".into());

View File

@ -12,8 +12,7 @@ impl<R: Row> Table<R> {
impl<R: Row> Display for Table<R> { impl<R: Row> Display for Table<R> {
fn fmt(&self, f: &mut Formatter) -> fmt::Result { fn fmt(&self, f: &mut Formatter) -> fmt::Result {
let mut rows = Vec::new(); let mut rows = vec![R::header().to_vec()];
rows.push(R::header().to_vec());
for row in &self.rows { for row in &self.rows {
rows.push(row.entries()); rows.push(row.entries());

View File

@ -1 +0,0 @@
../../target/gen/book/changelog.md

View File

@ -33,7 +33,7 @@ test:
cargo test --all cargo test --all
clippy: clippy:
cargo clippy --all cargo clippy --all-targets --all-features
fmt: fmt:
cargo +nightly fmt --all cargo +nightly fmt --all

View File

@ -5,8 +5,8 @@ error_on_unformatted = true
format_code_in_doc_comments = true format_code_in_doc_comments = true
format_macro_bodies = true format_macro_bodies = true
format_strings = true format_strings = true
imports_granularity = "Crate"
max_width = 100 max_width = 100
merge_imports = true
newline_style = "Unix" newline_style = "Unix"
normalize_comments = true normalize_comments = true
reorder_impl_items = true reorder_impl_items = true

View File

@ -1,3 +1,5 @@
#![allow(clippy::unwrap_used)]
use crate::common::*; use crate::common::*;
use std::io::BufWriter; use std::io::BufWriter;
@ -39,7 +41,7 @@ impl Bench for HasherBench {
while written < TEMPFILE_BYTES { while written < TEMPFILE_BYTES {
rand::thread_rng().fill_bytes(&mut bytes); rand::thread_rng().fill_bytes(&mut bytes);
writer.write(&bytes).unwrap(); writer.write_all(&bytes).unwrap();
written += bytes.len().into_u64(); written += bytes.len().into_u64();
} }

View File

@ -214,7 +214,7 @@ mod tests {
("1kib", KI), ("1kib", KI),
("1KiB", KI), ("1KiB", KI),
("12kib", 12 * KI), ("12kib", 12 * KI),
("1.5mib", 1 * MI + 512 * KI), ("1.5mib", MI + 512 * KI),
]; ];
for (text, value) in CASES { for (text, value) in CASES {

View File

@ -22,11 +22,11 @@ impl FileError {
let metadata = match path.metadata() { let metadata = match path.metadata() {
Ok(metadata) => metadata, Ok(metadata) => metadata,
Err(error) => { Err(error) => {
if error.kind() == io::ErrorKind::NotFound { return Err(if error.kind() == io::ErrorKind::NotFound {
return Err(FileError::Missing); FileError::Missing
} else { } else {
return Err(FileError::Io(error)); FileError::Io(error)
} })
} }
}; };

View File

@ -56,11 +56,7 @@ impl FilePath {
#[cfg(test)] #[cfg(test)]
pub(crate) fn from_components(components: &[&str]) -> FilePath { pub(crate) fn from_components(components: &[&str]) -> FilePath {
let components: Vec<String> = components let components: Vec<String> = components.iter().cloned().map(ToOwned::to_owned).collect();
.iter()
.cloned()
.map(|component| component.to_owned())
.collect();
assert!(!components.is_empty()); assert!(!components.is_empty());
FilePath { components } FilePath { components }
} }

View File

@ -62,9 +62,9 @@ impl From<Sha1Digest> for Infohash {
} }
} }
impl Into<Sha1Digest> for Infohash { impl From<Infohash> for Sha1Digest {
fn into(self) -> Sha1Digest { fn from(infohash: Infohash) -> Sha1Digest {
self.inner infohash.inner
} }
} }

View File

@ -1,6 +1,7 @@
#![deny(clippy::all, clippy::pedantic, clippy::restriction)] #![deny(clippy::all, clippy::pedantic, clippy::restriction)]
#![allow( #![allow(
clippy::blanket_clippy_restriction_lints, clippy::blanket_clippy_restriction_lints,
clippy::create_dir,
clippy::else_if_without_else, clippy::else_if_without_else,
clippy::enum_glob_use, clippy::enum_glob_use,
clippy::float_arithmetic, clippy::float_arithmetic,
@ -13,6 +14,7 @@
clippy::map_unwrap_or, clippy::map_unwrap_or,
clippy::missing_docs_in_private_items, clippy::missing_docs_in_private_items,
clippy::missing_inline_in_public_items, clippy::missing_inline_in_public_items,
clippy::module_name_repetitions,
clippy::needless_lifetimes, clippy::needless_lifetimes,
clippy::needless_pass_by_value, clippy::needless_pass_by_value,
clippy::non_ascii_literal, clippy::non_ascii_literal,
@ -24,6 +26,18 @@
clippy::wildcard_enum_match_arm, clippy::wildcard_enum_match_arm,
clippy::wildcard_imports clippy::wildcard_imports
)] )]
#![cfg_attr(
any(test),
allow(
clippy::blacklisted_name,
clippy::expect_fun_call,
clippy::expect_used,
clippy::panic,
clippy::panic_in_result_fn,
clippy::unwrap_in_result,
clippy::unwrap_used
)
)]
#[cfg(test)] #[cfg(test)]
#[macro_use] #[macro_use]

View File

@ -86,7 +86,7 @@ impl MagnetLink {
} }
fn parse(text: &str) -> Result<Self, MagnetLinkParseError> { fn parse(text: &str) -> Result<Self, MagnetLinkParseError> {
let url = Url::parse(&text).context(magnet_link_parse_error::URL)?; let url = Url::parse(&text).context(magnet_link_parse_error::Url)?;
if url.scheme() != "magnet" { if url.scheme() != "magnet" {
return Err(MagnetLinkParseError::Scheme { return Err(MagnetLinkParseError::Scheme {
@ -105,7 +105,7 @@ impl MagnetLink {
} }
let buf = hex::decode(infohash).context(magnet_link_parse_error::HexParse { let buf = hex::decode(infohash).context(magnet_link_parse_error::HexParse {
text: infohash.to_string(), text: infohash.to_owned(),
})?; })?;
link = Some(MagnetLink::with_infohash( link = Some(MagnetLink::with_infohash(
@ -270,7 +270,7 @@ mod tests {
assert_matches!(e, Error::MagnetLinkParse { assert_matches!(e, Error::MagnetLinkParse {
text, text,
source: MagnetLinkParseError::URL { .. }, source: MagnetLinkParseError::Url { .. },
} if text == link); } if text == link);
} }
@ -307,7 +307,7 @@ mod tests {
text, text,
source: MagnetLinkParseError::HexParse { source: MagnetLinkParseError::HexParse {
text: ih, text: ih,
source: _, ..
}} if text == link && infohash == ih); }} if text == link && infohash == ih);
} }
@ -335,7 +335,7 @@ mod tests {
text, text,
source: MagnetLinkParseError::TrackerAddress { source: MagnetLinkParseError::TrackerAddress {
text: addr, text: addr,
source: _, ..
}, },
} if text == link && addr == bad_addr); } if text == link && addr == bad_addr);
} }
@ -352,7 +352,7 @@ mod tests {
text, text,
source: MagnetLinkParseError::PeerAddress { source: MagnetLinkParseError::PeerAddress {
text: addr, text: addr,
source: _, ..
} }
} if text == link && addr == bad_addr } if text == link && addr == bad_addr
); );

View File

@ -28,5 +28,5 @@ pub(crate) enum MagnetLinkParseError {
source: url::ParseError, source: url::ParseError,
}, },
#[snafu(display("Failed to parse URL: {}", source))] #[snafu(display("Failed to parse URL: {}", source))]
URL { source: url::ParseError }, Url { source: url::ParseError },
} }

View File

@ -14,9 +14,9 @@ impl Md5Digest {
let mut bytes: [u8; 16] = [0; 16]; let mut bytes: [u8; 16] = [0; 16];
for n in 0..16 { for (n, byte) in bytes.iter_mut().enumerate() {
let i = n * 2; let i = n * 2;
bytes[n] = u8::from_str_radix(&hex[i..i + 2], 16).unwrap(); *byte = u8::from_str_radix(&hex[i..i + 2], 16).unwrap();
} }
Self { bytes } Self { bytes }

View File

@ -44,14 +44,14 @@ impl Shell {
} }
} }
impl Into<clap::Shell> for Shell { impl From<Shell> for clap::Shell {
fn into(self) -> clap::Shell { fn from(shell: Shell) -> Self {
match self { match shell {
Self::Bash => clap::Shell::Bash, Shell::Bash => clap::Shell::Bash,
Self::Fish => clap::Shell::Fish, Shell::Fish => clap::Shell::Fish,
Self::Zsh => clap::Shell::Zsh, Shell::Zsh => clap::Shell::Zsh,
Self::Powershell => clap::Shell::PowerShell, Shell::Powershell => clap::Shell::PowerShell,
Self::Elvish => clap::Shell::Elvish, Shell::Elvish => clap::Shell::Elvish,
} }
} }
} }

View File

@ -387,7 +387,7 @@ impl Create {
let metainfo = Metainfo { let metainfo = Metainfo {
comment: self.comment, comment: self.comment,
encoding: Some(consts::ENCODING_UTF8.to_string()), encoding: Some(consts::ENCODING_UTF8.to_owned()),
announce: self.announce.map(|url| url.to_string()), announce: self.announce.map(|url| url.to_string()),
announce_list: if announce_list.is_empty() { announce_list: if announce_list.is_empty() {
None None
@ -1832,7 +1832,7 @@ Content Size 9 bytes
assert_matches!( assert_matches!(
metainfo.info.mode, metainfo.info.mode,
Mode::Multiple { files } if files.len() == 0 Mode::Multiple { files } if files.is_empty()
); );
assert_eq!(metainfo.info.pieces, PieceList::new()); assert_eq!(metainfo.info.pieces, PieceList::new());
Ok(()) Ok(())
@ -2044,7 +2044,7 @@ Content Size 9 bytes
} }
#[test] #[test]
fn skip_hidden_attribute_dir_contents() -> Result<()> { fn skip_hidden_attribute_dir_contents() {
let mut env = test_env! { let mut env = test_env! {
args: [ args: [
"torrent", "torrent",
@ -2065,7 +2065,7 @@ Content Size 9 bytes
#[cfg(target_os = "windows")] #[cfg(target_os = "windows")]
{ {
env.write("foo/bar/baz", "baz"); env.write("foo/bar/baz", "baz");
let path = env.resolve("foo/bar")?; let path = env.resolve("foo/bar").unwrap();
Command::new("attrib") Command::new("attrib")
.arg("+h") .arg("+h")
.arg(&path) .arg(&path)
@ -2080,7 +2080,6 @@ Content Size 9 bytes
Mode::Multiple { files } if files.is_empty() Mode::Multiple { files } if files.is_empty()
); );
assert_eq!(metainfo.info.pieces, PieceList::new()); assert_eq!(metainfo.info.pieces, PieceList::new());
Ok(())
} }
#[test] #[test]
@ -2886,7 +2885,7 @@ Content Size 9 bytes
} }
#[test] #[test]
fn create_messages_path() -> Result<()> { fn create_messages_path() {
let mut env = test_env! { let mut env = test_env! {
args: [ args: [
"torrent", "torrent",
@ -2901,20 +2900,17 @@ Content Size 9 bytes
} }
}; };
let want = format!( let want = "[1/3] \u{1F9FF} Searching `foo` for files…\n[2/3] \u{1F9EE} Hashing \
"[1/3] \u{1F9FF} Searching `foo` for files…\n[2/3] \u{1F9EE} Hashing pieces…\n[3/3] \ pieces\n[3/3] \u{1F4BE} Writing metainfo to `foo.torrent`\n\u{2728}\u{2728} \
\u{1F4BE} Writing metainfo to `foo.torrent`\n\u{2728}\u{2728} Done! \u{2728}\u{2728}\n", Done! \u{2728}\u{2728}\n";
);
env.assert_ok(); env.assert_ok();
assert_eq!(env.err(), want); assert_eq!(env.err(), want);
Ok(())
} }
#[test] #[test]
fn create_messages_subdir() -> Result<()> { fn create_messages_subdir() {
let mut env = test_env! { let mut env = test_env! {
args: [ args: [
"torrent", "torrent",
@ -2940,12 +2936,10 @@ Content Size 9 bytes
env.assert_ok(); env.assert_ok();
assert_eq!(env.err(), want); assert_eq!(env.err(), want);
Ok(())
} }
#[test] #[test]
fn create_messages_dot() -> Result<()> { fn create_messages_dot() {
let mut env = test_env! { let mut env = test_env! {
args: [ args: [
"torrent", "torrent",
@ -2975,12 +2969,10 @@ Content Size 9 bytes
); );
assert_eq!(env.err(), want); assert_eq!(env.err(), want);
Ok(())
} }
#[test] #[test]
fn create_messages_dot_dot() -> Result<()> { fn create_messages_dot_dot() {
let mut env = test_env! { let mut env = test_env! {
args: [ args: [
"torrent", "torrent",
@ -3011,12 +3003,10 @@ Content Size 9 bytes
); );
assert_eq!(env.err(), want); assert_eq!(env.err(), want);
Ok(())
} }
#[test] #[test]
fn create_messages_absolute() -> Result<()> { fn create_messages_absolute() {
let dir = TempDir::new().unwrap(); let dir = TempDir::new().unwrap();
let input = dir.path().join("foo"); let input = dir.path().join("foo");
@ -3051,12 +3041,10 @@ Content Size 9 bytes
); );
assert_eq!(env.err(), want); assert_eq!(env.err(), want);
Ok(())
} }
#[test] #[test]
fn create_messages_stdio() -> Result<()> { fn create_messages_stdio() {
let dir = TempDir::new().unwrap(); let dir = TempDir::new().unwrap();
let input = dir.path().join("foo"); let input = dir.path().join("foo");
@ -3097,15 +3085,11 @@ Content Size 9 bytes
} }
); );
let want = format!( let want = "[1/3] \u{1F9FF} Creating single-file torrent from standard input…\n[2/3] \
"[1/3] \u{1F9FF} Creating single-file torrent from standard input…\n[2/3] \u{1F9EE} Hashing \ \u{1F9EE} Hashing pieces\n[3/3] \u{1F4BE} Writing metainfo to standard \
pieces\n[3/3] \u{1F4BE} Writing metainfo to standard output\n\u{2728}\u{2728} Done! \ output\n\u{2728}\u{2728} Done! \u{2728}\u{2728}\n";
\u{2728}\u{2728}\n",
);
assert_eq!(env.err(), want); assert_eq!(env.err(), want);
Ok(())
} }
#[test] #[test]

View File

@ -158,6 +158,8 @@ mod tests {
#[test] #[test]
fn no_announce_flag() { fn no_announce_flag() {
const INFO: &str = "d6:lengthi0e4:name3:foo12:piece lengthi1e6:pieces0:e";
let mut env = test_env! { let mut env = test_env! {
args: [ args: [
"torrent", "torrent",
@ -172,8 +174,6 @@ mod tests {
env.assert_ok(); env.assert_ok();
const INFO: &str = "d6:lengthi0e4:name3:foo12:piece lengthi1e6:pieces0:e";
let infohash = Sha1Digest::from_data(INFO.as_bytes()); let infohash = Sha1Digest::from_data(INFO.as_bytes());
assert_eq!( assert_eq!(
@ -184,6 +184,8 @@ mod tests {
#[test] #[test]
fn no_announce_positional() { fn no_announce_positional() {
const INFO: &str = "d6:lengthi0e4:name3:foo12:piece lengthi1e6:pieces0:e";
let mut env = test_env! { let mut env = test_env! {
args: [ args: [
"torrent", "torrent",
@ -197,8 +199,6 @@ mod tests {
env.assert_ok(); env.assert_ok();
const INFO: &str = "d6:lengthi0e4:name3:foo12:piece lengthi1e6:pieces0:e";
let infohash = Sha1Digest::from_data(INFO.as_bytes()); let infohash = Sha1Digest::from_data(INFO.as_bytes());
assert_eq!( assert_eq!(
@ -209,6 +209,8 @@ mod tests {
#[test] #[test]
fn with_announce() { fn with_announce() {
const INFO: &str = "d6:lengthi0e4:name3:foo12:piece lengthi1e6:pieces0:e";
let mut env = test_env! { let mut env = test_env! {
args: [ args: [
"torrent", "torrent",
@ -226,8 +228,6 @@ mod tests {
env.assert_ok(); env.assert_ok();
const INFO: &str = "d6:lengthi0e4:name3:foo12:piece lengthi1e6:pieces0:e";
let infohash = Sha1Digest::from_data(INFO.as_bytes()); let infohash = Sha1Digest::from_data(INFO.as_bytes());
assert_eq!( assert_eq!(
@ -241,6 +241,8 @@ mod tests {
#[test] #[test]
fn unique_trackers() { fn unique_trackers() {
const INFO: &str = "d6:lengthi0e4:name3:foo12:piece lengthi1e6:pieces0:e";
let mut env = test_env! { let mut env = test_env! {
args: [ args: [
"torrent", "torrent",
@ -259,8 +261,6 @@ mod tests {
env.assert_ok(); env.assert_ok();
const INFO: &str = "d6:lengthi0e4:name3:foo12:piece lengthi1e6:pieces0:e";
let infohash = Sha1Digest::from_data(INFO.as_bytes()); let infohash = Sha1Digest::from_data(INFO.as_bytes());
assert_eq!( assert_eq!(
@ -271,8 +271,11 @@ mod tests {
), ),
); );
} }
#[test] #[test]
fn with_peer() { fn with_peer() {
const INFO: &str = "d6:lengthi0e4:name3:foo12:piece lengthi1e6:pieces0:e";
let mut env = test_env! { let mut env = test_env! {
args: [ args: [
"torrent", "torrent",
@ -292,8 +295,6 @@ mod tests {
env.assert_ok(); env.assert_ok();
const INFO: &str = "d6:lengthi0e4:name3:foo12:piece lengthi1e6:pieces0:e";
let infohash = Sha1Digest::from_data(INFO.as_bytes()); let infohash = Sha1Digest::from_data(INFO.as_bytes());
assert_eq!( assert_eq!(
@ -307,6 +308,8 @@ mod tests {
#[test] #[test]
fn with_indices() { fn with_indices() {
const INFO: &str = "d6:lengthi0e4:name3:foo12:piece lengthi1e6:pieces0:e";
let mut env = test_env! { let mut env = test_env! {
args: [ args: [
"torrent", "torrent",
@ -328,8 +331,6 @@ mod tests {
env.assert_ok(); env.assert_ok();
const INFO: &str = "d6:lengthi0e4:name3:foo12:piece lengthi1e6:pieces0:e";
let infohash = Sha1Digest::from_data(INFO.as_bytes()); let infohash = Sha1Digest::from_data(INFO.as_bytes());
assert_eq!( assert_eq!(
@ -343,6 +344,8 @@ mod tests {
#[test] #[test]
fn infohash_correct_with_nonstandard_info_dict() { fn infohash_correct_with_nonstandard_info_dict() {
const INFO: &str = "d1:ai0e6:lengthi0e4:name3:foo12:piece lengthi1e6:pieces0:e";
let mut env = test_env! { let mut env = test_env! {
args: [ args: [
"torrent", "torrent",
@ -357,8 +360,6 @@ mod tests {
env.assert_ok(); env.assert_ok();
const INFO: &str = "d1:ai0e6:lengthi0e4:name3:foo12:piece lengthi1e6:pieces0:e";
let infohash = Sha1Digest::from_data(INFO.as_bytes()); let infohash = Sha1Digest::from_data(INFO.as_bytes());
assert_eq!( assert_eq!(

View File

@ -11,14 +11,12 @@ pub(crate) struct PieceLength {}
#[allow(clippy::unused_self)] #[allow(clippy::unused_self)]
impl PieceLength { impl PieceLength {
pub(crate) fn run(self, env: &mut Env) -> Result<(), Error> { pub(crate) fn run(self, env: &mut Env) -> Result<(), Error> {
let mut rows: Vec<(String, String, String, String)> = Vec::new(); let mut rows: Vec<(String, String, String, String)> = vec![(
rows.push((
"Content".into(), "Content".into(),
"Piece Length".into(), "Piece Length".into(),
"Count".into(), "Count".into(),
"Piece List Size".into(), "Piece List Size".into(),
)); )];
for i in 14..51 { for i in 14..51 {
let content_size = Bytes::from(1u64 << i); let content_size = Bytes::from(1u64 << i);

View File

@ -462,6 +462,17 @@ mod tests {
#[test] #[test]
fn output_color() -> Result<()> { fn output_color() -> Result<()> {
fn error(path: &str, message: &str) -> String {
let style = Style::active();
format!(
"{}{}:{} {}",
style.message().prefix(),
path,
style.message().suffix(),
message,
)
}
let mut create_env = test_env! { let mut create_env = test_env! {
args: [ args: [
"torrent", "torrent",
@ -519,17 +530,6 @@ mod tests {
let style = Style::active(); let style = Style::active();
fn error(path: &str, message: &str) -> String {
let style = Style::active();
format!(
"{}{}:{} {}",
style.message().prefix(),
path,
style.message().suffix(),
message,
)
}
let want = [ let want = [
&format!( &format!(
"{} \u{1F4BE} {}", "{} \u{1F4BE} {}",
@ -671,10 +671,8 @@ mod tests {
verify_env.assert_ok(); verify_env.assert_ok();
let want = format!( let want = "[1/2] \u{1F4BE} Loading metainfo from standard input…\n[2/2] \u{1F9EE} Verifying \
"[1/2] \u{1F4BE} Loading metainfo from standard input…\n[2/2] \u{1F9EE} Verifying pieces \ pieces from `foo`\n\u{2728}\u{2728} Verification succeeded! \u{2728}\u{2728}\n";
from `foo`\n\u{2728}\u{2728} Verification succeeded! \u{2728}\u{2728}\n",
);
assert_eq!(verify_env.err(), want); assert_eq!(verify_env.err(), want);

View File

@ -66,11 +66,10 @@ impl TestEnvBuilder {
let tempdir = self.tempdir.unwrap_or_else(|| tempfile::tempdir().unwrap()); let tempdir = self.tempdir.unwrap_or_else(|| tempfile::tempdir().unwrap());
let current_dir = if let Some(current_dir) = self.current_dir { let current_dir = self.current_dir.map_or_else(
tempdir.path().join(current_dir) || tempdir.path().to_owned(),
} else { |current_dir| tempdir.path().join(current_dir),
tempdir.path().to_owned() );
};
let out_stream = OutputStream::new( let out_stream = OutputStream::new(
Box::new(out.clone()), Box::new(out.clone()),

View File

@ -177,7 +177,7 @@ impl TorrentSummary {
fn torrent_summary_data(&self) -> TorrentSummaryJson { fn torrent_summary_data(&self) -> TorrentSummaryJson {
let (file_count, files) = match &self.metainfo.info.mode { let (file_count, files) = match &self.metainfo.info.mode {
Mode::Single { .. } => (1, vec![self.metainfo.info.name.to_string()]), Mode::Single { .. } => (1, vec![self.metainfo.info.name.to_owned()]),
Mode::Multiple { files } => ( Mode::Multiple { files } => (
files.len(), files.len(),
files files
@ -197,7 +197,7 @@ impl TorrentSummary {
}; };
TorrentSummaryJson { TorrentSummaryJson {
name: self.metainfo.info.name.to_string(), name: self.metainfo.info.name.to_owned(),
comment: self.metainfo.comment.clone(), comment: self.metainfo.comment.clone(),
creation_date: self.metainfo.creation_date, creation_date: self.metainfo.creation_date,
created_by: self.metainfo.created_by.clone(), created_by: self.metainfo.created_by.clone(),

View File

@ -36,17 +36,17 @@ impl<'a> Verifier<'a> {
base: &'a Path, base: &'a Path,
progress_bar: Option<ProgressBar>, progress_bar: Option<ProgressBar>,
) -> Result<Status> { ) -> Result<Status> {
Self::new(metainfo, base, progress_bar)?.verify_metainfo() Ok(Self::new(metainfo, base, progress_bar)?.verify_metainfo())
} }
fn verify_metainfo(mut self) -> Result<Status> { fn verify_metainfo(mut self) -> Status {
match &self.metainfo.info.mode { match &self.metainfo.info.mode {
Mode::Single { length, md5sum } => { Mode::Single { length, md5sum } => {
self.hash(&self.base).ok(); self.hash(&self.base).ok();
let error = FileError::verify(&self.base, *length, *md5sum).err(); let error = FileError::verify(&self.base, *length, *md5sum).err();
let pieces = self.finish(); let pieces = self.finish();
Ok(Status::single(pieces, error)) Status::single(pieces, error)
} }
Mode::Multiple { files } => { Mode::Multiple { files } => {
let mut status = Vec::new(); let mut status = Vec::new();
@ -65,7 +65,7 @@ impl<'a> Verifier<'a> {
let pieces = self.finish(); let pieces = self.finish();
Ok(Status::multiple(pieces, status)) Status::multiple(pieces, status)
} }
} }
} }